From 7569920be0a7ff9119aaede0d2dcda8e9f4db2f0 Mon Sep 17 00:00:00 2001 From: cephi Date: Wed, 18 Dec 2024 21:26:39 -0500 Subject: [PATCH] ahhhhh --- pytorch/batch.py | 2 +- ...c_7313p_1_coo_10_10_10_as-caida_G_010.json | 1 + ...7313p_1_coo_10_10_10_as-caida_G_010.output | 48 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 73 + ...6_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 56 + ...16_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...6_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 73 + ...6_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 73 + ...6_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ...16_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 73 + ..._16_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.01.output | 73 + ..._16_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.05.output | 56 + ...a_16_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.1.output | 56 + ...a_16_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.2.output | 56 + ...a_16_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.3.output | 56 + ...a_16_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.4.output | 56 + ...a_16_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...16_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 907 +++++ ...16_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...6_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 56 + ...6_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 56 + ...6_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 73 + ...16_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ..._16_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_50000_0.01.output | 73 + ...16_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 90 + ...16_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 73 + ...16_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 90 + ..._16_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...6_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...a_16_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.01.output | 73 + ...a_16_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...ra_16_coo_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...ra_16_coo_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.2.output | 56 + ...ra_16_coo_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.3.output | 56 + ...ra_16_coo_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.4.output | 56 + ...ra_16_coo_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.5.output | 56 + ..._16_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_1e-05.output | 535 +++ ..._16_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_5e-05.output | 90 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 76 +- ...6_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 58 +- ...16_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 45 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 86 +- ...6_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 72 +- ...6_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...16_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 66 +- ..._16_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.01.output | 58 +- ..._16_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.05.output | 42 +- ...a_16_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.1.output | 42 +- ...a_16_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.2.output | 42 +- ...a_16_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.3.output | 42 +- ...a_16_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.4.output | 45 + ...a_16_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.5.output | 45 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2536 +++++++------- ...16_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 64 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 64 +- ...6_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 60 +- ...6_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 64 +- ...6_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 72 +- ...16_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 78 +- ..._16_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_50000_0.01.output | 42 +- ...16_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 66 +- ...16_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 72 +- ...16_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 91 +- ..._16_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...6_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...a_16_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ...a_16_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.05.output | 58 +- ...ra_16_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.1.output | 58 +- ...ra_16_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.2.output | 42 +- ...ra_16_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.3.output | 42 +- ...ra_16_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.4.output | 42 +- ...ra_16_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.5.output | 42 +- ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_1e-05.output | 585 ++-- ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_5e-05.output | 66 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 73 + ...6_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ...16_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...6_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 73 + ...6_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...6_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ...16_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ..._16_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.01.output | 56 + ..._16_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.05.output | 39 + ...p_16_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.1.output | 39 + ...p_16_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.2.output | 39 + ...p_16_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.3.output | 39 + ...p_16_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.4.output | 39 + ...p_16_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...16_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 962 ++++++ ...16_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...6_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 39 + ...6_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...6_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ...16_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ..._16_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_50000_0.01.output | 39 + ..._16_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...6_coo_10_10_10_synthetic_50000_0.05.output | 39 + ...16_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ...16_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 73 + ...16_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ..._16_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...6_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...p_16_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.01.output | 56 + ...p_16_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...3p_16_coo_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...3p_16_coo_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...3p_16_coo_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...3p_16_coo_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...3p_16_coo_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.5.output | 39 + ..._16_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_1e-05.output | 510 +++ ..._16_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 120 +- ...6_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 74 +- ...16_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 65 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 66 +- ...6_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...6_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...16_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 80 +- ..._16_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.01.output | 74 +- ..._16_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.05.output | 74 +- ...p_16_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.1.output | 78 +- ...p_16_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.2.output | 78 +- ...p_16_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.3.output | 78 +- ...p_16_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.4.output | 65 + ...p_16_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.5.output | 65 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2455 +++++++------- ...16_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 85 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 85 +- ...6_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 104 +- ...6_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 86 +- ...6_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 86 +- ...16_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 96 +- ..._16_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_50000_0.01.output | 74 +- ..._16_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...6_csr_10_10_10_synthetic_50000_0.05.output | 65 + ...16_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 66 +- ...16_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ...16_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 66 +- ..._16_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...6_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...p_16_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ...p_16_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.05.output | 74 +- ...3p_16_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.1.output | 74 +- ...3p_16_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.2.output | 74 +- ...3p_16_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.3.output | 94 +- ...3p_16_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.4.output | 94 +- ...3p_16_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.5.output | 74 +- ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_1e-05.output | 583 ++-- ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_5e-05.output | 112 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 56 + ...6_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ...16_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...6_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 56 + ...6_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...6_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ...16_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ..._16_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.01.output | 56 + ..._16_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...6_coo_10_10_10_synthetic_10000_0.05.output | 39 + ...6_16_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.1.output | 39 + ...6_16_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.2.output | 39 + ...6_16_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.3.output | 39 + ...6_16_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.4.output | 39 + ...6_16_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...16_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...16_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 962 ++++++ ...16_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...6_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 39 + ...6_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...6_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ...16_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ..._16_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...6_coo_10_10_10_synthetic_50000_0.01.output | 39 + ..._16_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...6_coo_10_10_10_synthetic_50000_0.05.output | 39 + ...16_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ...16_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 56 + ...16_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ..._16_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...6_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...6_16_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.01.output | 56 + ...6_16_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...16_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...16_16_coo_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...16_16_coo_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...16_16_coo_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...16_16_coo_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...16_16_coo_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_coo_10_10_10_synthetic_5000_0.5.output | 39 + ..._16_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_1e-05.output | 429 +++ ..._16_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 96 +- ...6_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 74 +- ...16_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 65 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 66 +- ...6_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...6_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...16_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 66 +- ..._16_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.01.output | 92 +- ..._16_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...6_csr_10_10_10_synthetic_10000_0.05.output | 74 +- ...6_16_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.1.output | 74 +- ...6_16_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.2.output | 58 +- ...6_16_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...16_csr_10_10_10_synthetic_10000_0.3.output | 98 +- ...6_16_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.4.output | 65 + ...6_16_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.5.output | 65 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2645 ++++++++------- ...16_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 64 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 83 +- ...6_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 126 +- ...6_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 86 +- ...6_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 66 +- ...16_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 75 +- ..._16_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...6_csr_10_10_10_synthetic_50000_0.01.output | 98 +- ..._16_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...6_csr_10_10_10_synthetic_50000_0.05.output | 65 + ...16_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 66 +- ...16_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ...16_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 86 +- ..._16_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...6_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...6_16_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ...6_16_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...16_csr_10_10_10_synthetic_5000_0.05.output | 74 +- ...16_16_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.1.output | 74 +- ...16_16_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.2.output | 74 +- ...16_16_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.3.output | 74 +- ...16_16_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.4.output | 78 +- ...16_16_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ..._16_csr_10_10_10_synthetic_5000_0.5.output | 74 +- ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_1e-05.output | 692 ++-- ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...6_csr_10_10_10_synthetic_5000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 65 + ...6_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 65 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...6_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...6_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...16_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ..._16_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.01.output | 65 + ..._16_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.05.output | 45 + ...a_16_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.1.output | 45 + ...a_16_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.2.output | 45 + ...a_16_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.3.output | 45 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1307 +++++++ ...16_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 68 + ...6_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 68 + ...6_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...6_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ...16_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ..._16_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_50000_0.01.output | 45 + ...16_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ...16_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...16_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 82 + ..._16_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...6_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...a_16_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.01.output | 85 + ...a_16_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.05.output | 65 + ...ra_16_csr_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.1.output | 65 + ...ra_16_csr_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.2.output | 45 + ...ra_16_csr_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.3.output | 45 + ...ra_16_csr_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.4.output | 45 + ...ra_16_csr_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.5.output | 45 + ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...6_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 85 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...6_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...6_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...16_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 105 + ..._16_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.01.output | 85 + ..._16_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.05.output | 85 + ...p_16_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.1.output | 65 + ...p_16_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.2.output | 85 + ...p_16_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.3.output | 65 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1307 +++++++ ...16_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 68 + ...6_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 89 + ...6_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...6_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 105 + ...16_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ..._16_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_50000_0.01.output | 85 + ...16_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ...16_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...16_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ..._16_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...6_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...p_16_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.01.output | 85 + ...p_16_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...3p_16_csr_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.1.output | 85 + ...3p_16_csr_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.2.output | 85 + ...3p_16_csr_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.3.output | 85 + ...3p_16_csr_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.4.output | 85 + ...3p_16_csr_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.5.output | 85 + ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...6_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 85 + ...6_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...6_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...6_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...16_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ..._16_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.01.output | 85 + ..._16_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...6_csr_10_10_10_synthetic_10000_0.05.output | 85 + ...6_16_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.1.output | 85 + ...6_16_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.2.output | 65 + ...6_16_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...16_csr_10_10_10_synthetic_10000_0.3.output | 105 + ...16_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1307 +++++++ ...16_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 89 + ...6_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 89 + ...6_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...6_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ...16_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 86 + ..._16_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...6_csr_10_10_10_synthetic_50000_0.01.output | 105 + ...16_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ...16_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...16_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 82 + ..._16_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...6_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...6_16_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.01.output | 85 + ...6_16_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...16_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...16_16_csr_10_10_10_synthetic_5000_0.1.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.1.output | 85 + ...16_16_csr_10_10_10_synthetic_5000_0.2.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.2.output | 85 + ...16_16_csr_10_10_10_synthetic_5000_0.3.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.3.output | 85 + ...16_16_csr_10_10_10_synthetic_5000_0.4.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.4.output | 65 + ...16_16_csr_10_10_10_synthetic_5000_0.5.json | 1 + ..._16_csr_10_10_10_synthetic_5000_0.5.output | 85 + ..._16_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_1e-05.output | 356 ++ ..._16_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...6_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 73 + ...1_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 56 + ..._1_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...1_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 73 + ...1_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...1_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 75 + ..._1_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 73 + ...a_1_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.01.output | 56 + ...a_1_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.05.output | 56 + ...ra_1_coo_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.1.output | 56 + ...ra_1_coo_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.2.output | 56 + ...ra_1_coo_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.3.output | 56 + ...ra_1_coo_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.4.output | 56 + ...ra_1_coo_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.5.output | 39 + ..._1_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 962 ++++++ ..._1_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...1_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 56 + ...1_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 124 + ...1_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 73 + ..._1_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ...a_1_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_50000_0.01.output | 56 + ..._1_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ..._1_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 73 + ..._1_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...a_1_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...1_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...ra_1_coo_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.01.output | 73 + ...ra_1_coo_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.05.output | 73 + ...tra_1_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...a_1_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...tra_1_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...a_1_coo_10_10_10_synthetic_5000_0.2.output | 56 + ...tra_1_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...a_1_coo_10_10_10_synthetic_5000_0.3.output | 56 + ...tra_1_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...a_1_coo_10_10_10_synthetic_5000_0.4.output | 56 + ...tra_1_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...a_1_coo_10_10_10_synthetic_5000_0.5.output | 56 + ...a_1_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_1e-05.output | 456 +++ ...a_1_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 58 +- ...1_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 42 +- ..._1_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 45 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 52 +- ...1_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 52 +- ...1_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ..._1_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 52 +- ...a_1_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.01.output | 80 +- ...a_1_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.05.output | 42 +- ...ra_1_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.1.output | 42 +- ...ra_1_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.2.output | 42 +- ...ra_1_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.3.output | 42 +- ...ra_1_csr_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.4.output | 45 + ...ra_1_csr_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.5.output | 45 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2538 +++++++------- ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 46 +- ...1_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 46 +- ...1_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 46 +- ...1_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 72 +- ..._1_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 80 +- ...a_1_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_50000_0.01.output | 42 +- ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 66 +- ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 52 +- ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 66 +- ...a_1_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...1_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...ra_1_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.01.output | 52 +- ...ra_1_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.05.output | 58 +- ...tra_1_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...a_1_csr_10_10_10_synthetic_5000_0.1.output | 56 +- ...tra_1_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...a_1_csr_10_10_10_synthetic_5000_0.2.output | 42 +- ...tra_1_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...a_1_csr_10_10_10_synthetic_5000_0.3.output | 42 +- ...tra_1_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...a_1_csr_10_10_10_synthetic_5000_0.4.output | 42 +- ...tra_1_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...a_1_csr_10_10_10_synthetic_5000_0.5.output | 42 +- ...a_1_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_1e-05.output | 664 ++-- ...a_1_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_5e-05.output | 112 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 56 + ...1_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ..._1_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...1_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 56 + ...1_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 57 + ...1_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ..._1_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ...p_1_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.01.output | 56 + ...p_1_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.05.output | 39 + ...3p_1_coo_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.1.output | 39 + ...3p_1_coo_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.2.output | 39 + ...3p_1_coo_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.3.output | 39 + ...3p_1_coo_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.4.output | 39 + ...3p_1_coo_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.5.output | 39 + ..._1_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 962 ++++++ ..._1_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...1_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 56 + ...1_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...1_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ..._1_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ...p_1_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_50000_0.01.output | 39 + ...p_1_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...1_coo_10_10_10_synthetic_50000_0.05.output | 39 + ..._1_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ..._1_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 56 + ..._1_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...p_1_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...1_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...3p_1_coo_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.01.output | 56 + ...3p_1_coo_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...13p_1_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...p_1_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...13p_1_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...p_1_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...13p_1_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...p_1_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...13p_1_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...p_1_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...13p_1_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...p_1_coo_10_10_10_synthetic_5000_0.5.output | 39 + ...p_1_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_1e-05.output | 456 +++ ...p_1_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 96 +- ...1_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 58 +- ..._1_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 45 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 66 +- ...1_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...1_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ..._1_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 86 +- ...p_1_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.01.output | 74 +- ...p_1_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.05.output | 58 +- ...3p_1_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.1.output | 58 +- ...3p_1_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.2.output | 58 +- ...3p_1_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.3.output | 58 +- ...3p_1_csr_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.4.output | 65 + ...3p_1_csr_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.5.output | 65 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2988 ++++++++-------- ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 85 +- ...1_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 60 +- ...1_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 64 +- ...1_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 66 +- ..._1_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 56 +- ...p_1_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_50000_0.01.output | 56 +- ...p_1_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...1_csr_10_10_10_synthetic_50000_0.05.output | 45 + ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 64 +- ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 66 +- ...p_1_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...1_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...3p_1_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ...3p_1_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.05.output | 120 +- ...13p_1_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...p_1_csr_10_10_10_synthetic_5000_0.1.output | 58 +- ...13p_1_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...p_1_csr_10_10_10_synthetic_5000_0.2.output | 58 +- ...13p_1_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...p_1_csr_10_10_10_synthetic_5000_0.3.output | 58 +- ...13p_1_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...p_1_csr_10_10_10_synthetic_5000_0.4.output | 58 +- ...13p_1_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...p_1_csr_10_10_10_synthetic_5000_0.5.output | 60 +- ...p_1_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_1e-05.output | 558 +-- ...p_1_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_5e-05.output | 88 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 56 + ...1_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ..._1_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...1_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 56 + ...1_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...1_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ..._1_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ...6_1_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.01.output | 56 + ...6_1_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...1_coo_10_10_10_synthetic_10000_0.05.output | 39 + ...16_1_coo_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.1.output | 39 + ...16_1_coo_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.2.output | 39 + ...16_1_coo_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.3.output | 39 + ...16_1_coo_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.4.output | 39 + ...16_1_coo_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_coo_10_10_10_synthetic_10000_0.5.output | 39 + ..._1_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 1069 ++++++ ..._1_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...1_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 39 + ...1_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...1_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ..._1_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ...6_1_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...1_coo_10_10_10_synthetic_50000_0.01.output | 39 + ...6_1_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...1_coo_10_10_10_synthetic_50000_0.05.output | 39 + ..._1_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ..._1_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 56 + ..._1_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...6_1_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...1_coo_10_10_10_synthetic_5000_0.001.output | 73 + ...16_1_coo_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.01.output | 56 + ...16_1_coo_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...216_1_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...6_1_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...216_1_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...6_1_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...216_1_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...6_1_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...216_1_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...6_1_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...216_1_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...6_1_coo_10_10_10_synthetic_5000_0.5.output | 39 + ...6_1_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_1e-05.output | 429 +++ ...6_1_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 96 +- ...1_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 80 +- ..._1_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 45 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 66 +- ...1_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...1_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ..._1_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 66 +- ...6_1_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.01.output | 74 +- ...6_1_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...1_csr_10_10_10_synthetic_10000_0.05.output | 58 +- ...16_1_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.1.output | 58 +- ...16_1_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.2.output | 58 +- ...16_1_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ..._1_csr_10_10_10_synthetic_10000_0.3.output | 62 +- ...16_1_csr_10_10_10_synthetic_10000_0.4.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.4.output | 45 + ...16_1_csr_10_10_10_synthetic_10000_0.5.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.5.output | 45 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2617 +++++++------- ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 46 +- ...1_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 86 +- ...1_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 64 +- ...1_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 66 +- ..._1_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 96 +- ...6_1_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...1_csr_10_10_10_synthetic_50000_0.01.output | 58 +- ...6_1_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...1_csr_10_10_10_synthetic_50000_0.05.output | 45 + ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 88 +- ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 66 +- ...6_1_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...1_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ...16_1_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ...16_1_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ..._1_csr_10_10_10_synthetic_5000_0.05.output | 74 +- ...216_1_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...6_1_csr_10_10_10_synthetic_5000_0.1.output | 74 +- ...216_1_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...6_1_csr_10_10_10_synthetic_5000_0.2.output | 58 +- ...216_1_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...6_1_csr_10_10_10_synthetic_5000_0.3.output | 58 +- ...216_1_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...6_1_csr_10_10_10_synthetic_5000_0.4.output | 61 +- ...216_1_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...6_1_csr_10_10_10_synthetic_5000_0.5.output | 58 +- ...6_1_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_1e-05.output | 580 ++-- ...6_1_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...1_csr_10_10_10_synthetic_5000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 65 + ...1_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 45 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 65 + ...1_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 65 + ...1_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ..._1_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 65 + ...a_1_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.01.output | 65 + ...a_1_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.05.output | 45 + ...ra_1_csr_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.1.output | 45 + ...ra_1_csr_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.2.output | 45 + ...ra_1_csr_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.3.output | 45 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1200 +++++++ ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 47 + ...1_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 47 + ...1_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 47 + ...1_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 65 + ..._1_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 65 + ...a_1_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_50000_0.01.output | 45 + ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 65 + ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...a_1_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...1_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...ra_1_csr_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.01.output | 65 + ...ra_1_csr_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.05.output | 65 + ...tra_1_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...a_1_csr_10_10_10_synthetic_5000_0.1.output | 65 + ...tra_1_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...a_1_csr_10_10_10_synthetic_5000_0.2.output | 45 + ...tra_1_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...a_1_csr_10_10_10_synthetic_5000_0.3.output | 45 + ...tra_1_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...a_1_csr_10_10_10_synthetic_5000_0.4.output | 45 + ...tra_1_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...a_1_csr_10_10_10_synthetic_5000_0.5.output | 45 + ...a_1_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...a_1_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...1_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 65 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...1_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...1_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ..._1_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 105 + ...p_1_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.01.output | 85 + ...p_1_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.05.output | 65 + ...3p_1_csr_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.1.output | 65 + ...3p_1_csr_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.2.output | 65 + ...3p_1_csr_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.3.output | 65 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1521 +++++++++ ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 89 + ...1_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 68 + ...1_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...1_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ..._1_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 65 + ...p_1_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_50000_0.01.output | 65 + ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...p_1_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...1_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...3p_1_csr_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.01.output | 85 + ...3p_1_csr_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...13p_1_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...p_1_csr_10_10_10_synthetic_5000_0.1.output | 65 + ...13p_1_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...p_1_csr_10_10_10_synthetic_5000_0.2.output | 65 + ...13p_1_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...p_1_csr_10_10_10_synthetic_5000_0.3.output | 65 + ...13p_1_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...p_1_csr_10_10_10_synthetic_5000_0.4.output | 65 + ...13p_1_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...p_1_csr_10_10_10_synthetic_5000_0.5.output | 65 + ...p_1_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...p_1_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...1_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 65 + ...1_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...1_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...1_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ..._1_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ...6_1_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.01.output | 85 + ...6_1_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...1_csr_10_10_10_synthetic_10000_0.05.output | 65 + ...16_1_csr_10_10_10_synthetic_10000_0.1.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.1.output | 65 + ...16_1_csr_10_10_10_synthetic_10000_0.2.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.2.output | 65 + ...16_1_csr_10_10_10_synthetic_10000_0.3.json | 1 + ..._1_csr_10_10_10_synthetic_10000_0.3.output | 45 + ..._1_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1307 +++++++ ..._1_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 47 + ...1_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 68 + ...1_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...1_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ..._1_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ...6_1_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...1_csr_10_10_10_synthetic_50000_0.01.output | 65 + ..._1_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ..._1_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ..._1_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...6_1_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...1_csr_10_10_10_synthetic_5000_0.001.output | 81 + ...16_1_csr_10_10_10_synthetic_5000_0.01.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.01.output | 85 + ...16_1_csr_10_10_10_synthetic_5000_0.05.json | 1 + ..._1_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...216_1_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...6_1_csr_10_10_10_synthetic_5000_0.1.output | 85 + ...216_1_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...6_1_csr_10_10_10_synthetic_5000_0.2.output | 65 + ...216_1_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...6_1_csr_10_10_10_synthetic_5000_0.3.output | 65 + ...216_1_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...6_1_csr_10_10_10_synthetic_5000_0.4.output | 65 + ...216_1_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...6_1_csr_10_10_10_synthetic_5000_0.5.output | 65 + ...6_1_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...6_1_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...1_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 74 + ...x_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 56 + ...ax_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...x_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 73 + ...x_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 73 + ...x_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 74 + ...ax_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 73 + ...max_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.01.output | 73 + ...max_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.05.output | 56 + ..._max_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.1.output | 56 + ..._max_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.2.output | 56 + ..._max_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.3.output | 56 + ..._max_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.4.output | 58 + ..._max_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...ax_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 855 +++++ ...ax_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 74 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...x_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 90 + ...x_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 56 + ...x_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 73 + ...ax_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 73 + ...max_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_50000_0.01.output | 73 + ...ax_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ...ax_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 90 + ...ax_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...max_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...x_coo_10_10_10_synthetic_5000_0.001.output | 73 + ..._max_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.01.output | 73 + ..._max_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.05.output | 73 + ...a_max_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...a_max_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.2.output | 56 + ...a_max_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.3.output | 56 + ...a_max_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.4.output | 56 + ...a_max_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.5.output | 56 + ...max_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_1e-05.output | 429 +++ ...max_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 60 +- ...x_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 64 +- ...ax_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 65 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 68 +- ...x_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 74 +- ...x_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...ax_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 72 +- ...max_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.01.output | 58 +- ...max_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.05.output | 42 +- ..._max_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.1.output | 42 +- ..._max_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.2.output | 42 +- ..._max_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.3.output | 42 +- ..._max_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.4.output | 45 + ..._max_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.5.output | 45 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2621 +++++++------- ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 90 +- ...x_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 106 +- ...x_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 69 +- ...x_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 68 +- ...ax_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 96 +- ...max_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_50000_0.01.output | 64 +- ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 100 +- ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 88 +- ...max_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...x_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ..._max_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ..._max_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.05.output | 78 +- ...a_max_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.1.output | 80 +- ...a_max_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.2.output | 42 +- ...a_max_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.3.output | 42 +- ...a_max_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.4.output | 42 +- ...a_max_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.5.output | 42 +- ...max_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_1e-05.output | 580 ++-- ...max_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_5e-05.output | 64 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 56 + ...x_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ...ax_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...x_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 56 + ...x_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...x_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ...ax_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ...max_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.01.output | 56 + ...max_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.05.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.1.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.2.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.3.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.4.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...ax_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 748 ++++ ...ax_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...x_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 56 + ...x_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...x_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ...ax_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ...max_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_50000_0.01.output | 39 + ...max_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...x_coo_10_10_10_synthetic_50000_0.05.output | 39 + ...ax_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ...ax_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 73 + ...ax_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...max_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...x_coo_10_10_10_synthetic_5000_0.001.output | 73 + ..._max_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.01.output | 56 + ..._max_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...p_max_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...p_max_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...p_max_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...p_max_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...p_max_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.5.output | 39 + ...max_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_1e-05.output | 429 +++ ...max_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 72 +- ...x_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 74 +- ...ax_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 65 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 66 +- ...x_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...x_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...ax_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 66 +- ...max_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.01.output | 94 +- ...max_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.05.output | 74 +- ..._max_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.1.output | 74 +- ..._max_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.2.output | 58 +- ..._max_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.3.output | 94 +- ..._max_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.4.output | 65 + ..._max_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.5.output | 65 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 3000 +++++++---------- ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 64 +- ...x_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 104 +- ...x_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 64 +- ...x_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 66 +- ...ax_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 96 +- ...max_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_50000_0.01.output | 100 +- ...max_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...x_csr_10_10_10_synthetic_50000_0.05.output | 65 + ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 88 +- ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 66 +- ...max_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...x_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ..._max_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.01.output | 86 +- ..._max_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.05.output | 96 +- ...p_max_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.1.output | 74 +- ...p_max_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.2.output | 74 +- ...p_max_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.3.output | 74 +- ...p_max_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.4.output | 78 +- ...p_max_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.5.output | 74 +- ...max_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_1e-05.output | 554 +-- ...max_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_5e-05.output | 85 +- ..._coo_10_10_10_synthetic_100000_0.0001.json | 1 + ...oo_10_10_10_synthetic_100000_0.0001.output | 56 + ...x_coo_10_10_10_synthetic_100000_0.001.json | 1 + ...coo_10_10_10_synthetic_100000_0.001.output | 39 + ...ax_coo_10_10_10_synthetic_100000_0.01.json | 1 + ..._coo_10_10_10_synthetic_100000_0.01.output | 39 + ...x_coo_10_10_10_synthetic_100000_1e-05.json | 1 + ...coo_10_10_10_synthetic_100000_1e-05.output | 56 + ...x_coo_10_10_10_synthetic_100000_5e-05.json | 1 + ...coo_10_10_10_synthetic_100000_5e-05.output | 56 + ...x_coo_10_10_10_synthetic_10000_0.0001.json | 1 + ...coo_10_10_10_synthetic_10000_0.0001.output | 73 + ...ax_coo_10_10_10_synthetic_10000_0.001.json | 1 + ..._coo_10_10_10_synthetic_10000_0.001.output | 56 + ...max_coo_10_10_10_synthetic_10000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.01.output | 56 + ...max_coo_10_10_10_synthetic_10000_0.05.json | 1 + ...x_coo_10_10_10_synthetic_10000_0.05.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.1.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.2.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.3.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.4.output | 39 + ..._max_coo_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_coo_10_10_10_synthetic_10000_0.5.output | 39 + ...ax_coo_10_10_10_synthetic_10000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_1e-05.output | 962 ++++++ ...ax_coo_10_10_10_synthetic_10000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_10000_5e-05.output | 73 + ..._coo_10_10_10_synthetic_500000_0.0001.json | 1 + ...oo_10_10_10_synthetic_500000_0.0001.output | 39 + ...x_coo_10_10_10_synthetic_500000_1e-05.json | 1 + ...coo_10_10_10_synthetic_500000_1e-05.output | 39 + ...x_coo_10_10_10_synthetic_500000_5e-05.json | 1 + ...coo_10_10_10_synthetic_500000_5e-05.output | 39 + ...x_coo_10_10_10_synthetic_50000_0.0001.json | 1 + ...coo_10_10_10_synthetic_50000_0.0001.output | 56 + ...ax_coo_10_10_10_synthetic_50000_0.001.json | 1 + ..._coo_10_10_10_synthetic_50000_0.001.output | 56 + ...max_coo_10_10_10_synthetic_50000_0.01.json | 1 + ...x_coo_10_10_10_synthetic_50000_0.01.output | 39 + ...max_coo_10_10_10_synthetic_50000_0.05.json | 1 + ...x_coo_10_10_10_synthetic_50000_0.05.output | 39 + ...ax_coo_10_10_10_synthetic_50000_1e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_1e-05.output | 73 + ...ax_coo_10_10_10_synthetic_50000_5e-05.json | 1 + ..._coo_10_10_10_synthetic_50000_5e-05.output | 56 + ...ax_coo_10_10_10_synthetic_5000_0.0001.json | 1 + ..._coo_10_10_10_synthetic_5000_0.0001.output | 73 + ...max_coo_10_10_10_synthetic_5000_0.001.json | 1 + ...x_coo_10_10_10_synthetic_5000_0.001.output | 73 + ..._max_coo_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.01.output | 56 + ..._max_coo_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_coo_10_10_10_synthetic_5000_0.05.output | 56 + ...6_max_coo_10_10_10_synthetic_5000_0.1.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.1.output | 56 + ...6_max_coo_10_10_10_synthetic_5000_0.2.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.2.output | 39 + ...6_max_coo_10_10_10_synthetic_5000_0.3.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.3.output | 39 + ...6_max_coo_10_10_10_synthetic_5000_0.4.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.4.output | 39 + ...6_max_coo_10_10_10_synthetic_5000_0.5.json | 1 + ...max_coo_10_10_10_synthetic_5000_0.5.output | 39 + ...max_coo_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_1e-05.output | 456 +++ ...max_coo_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_coo_10_10_10_synthetic_5000_5e-05.output | 73 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_100000_0.0001.output | 96 +- ...x_csr_10_10_10_synthetic_100000_0.001.json | 2 +- ...csr_10_10_10_synthetic_100000_0.001.output | 58 +- ...ax_csr_10_10_10_synthetic_100000_0.01.json | 1 + ..._csr_10_10_10_synthetic_100000_0.01.output | 65 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_1e-05.output | 64 +- ...x_csr_10_10_10_synthetic_100000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_100000_5e-05.output | 66 +- ...x_csr_10_10_10_synthetic_10000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_10000_0.0001.output | 66 +- ...ax_csr_10_10_10_synthetic_10000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_10000_0.001.output | 66 +- ...max_csr_10_10_10_synthetic_10000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.01.output | 74 +- ...max_csr_10_10_10_synthetic_10000_0.05.json | 2 +- ...x_csr_10_10_10_synthetic_10000_0.05.output | 96 +- ..._max_csr_10_10_10_synthetic_10000_0.1.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.1.output | 78 +- ..._max_csr_10_10_10_synthetic_10000_0.2.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.2.output | 58 +- ..._max_csr_10_10_10_synthetic_10000_0.3.json | 2 +- ...ax_csr_10_10_10_synthetic_10000_0.3.output | 98 +- ..._max_csr_10_10_10_synthetic_10000_0.4.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.4.output | 65 + ..._max_csr_10_10_10_synthetic_10000_0.5.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.5.output | 65 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_1e-05.output | 2516 +++++++------- ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_10000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_500000_0.0001.json | 2 +- ...sr_10_10_10_synthetic_500000_0.0001.output | 85 +- ...x_csr_10_10_10_synthetic_500000_1e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_1e-05.output | 82 +- ...x_csr_10_10_10_synthetic_500000_5e-05.json | 2 +- ...csr_10_10_10_synthetic_500000_5e-05.output | 60 +- ...x_csr_10_10_10_synthetic_50000_0.0001.json | 2 +- ...csr_10_10_10_synthetic_50000_0.0001.output | 66 +- ...ax_csr_10_10_10_synthetic_50000_0.001.json | 2 +- ..._csr_10_10_10_synthetic_50000_0.001.output | 96 +- ...max_csr_10_10_10_synthetic_50000_0.01.json | 2 +- ...x_csr_10_10_10_synthetic_50000_0.01.output | 79 +- ...max_csr_10_10_10_synthetic_50000_0.05.json | 1 + ...x_csr_10_10_10_synthetic_50000_0.05.output | 65 + ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_1e-05.output | 85 +- ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 2 +- ..._csr_10_10_10_synthetic_50000_5e-05.output | 66 +- ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 2 +- ..._csr_10_10_10_synthetic_5000_0.0001.output | 64 +- ...max_csr_10_10_10_synthetic_5000_0.001.json | 2 +- ...x_csr_10_10_10_synthetic_5000_0.001.output | 66 +- ..._max_csr_10_10_10_synthetic_5000_0.01.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.01.output | 66 +- ..._max_csr_10_10_10_synthetic_5000_0.05.json | 2 +- ...ax_csr_10_10_10_synthetic_5000_0.05.output | 74 +- ...6_max_csr_10_10_10_synthetic_5000_0.1.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.1.output | 74 +- ...6_max_csr_10_10_10_synthetic_5000_0.2.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.2.output | 74 +- ...6_max_csr_10_10_10_synthetic_5000_0.3.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.3.output | 74 +- ...6_max_csr_10_10_10_synthetic_5000_0.4.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.4.output | 100 +- ...6_max_csr_10_10_10_synthetic_5000_0.5.json | 2 +- ...max_csr_10_10_10_synthetic_5000_0.5.output | 78 +- ...max_csr_10_10_10_synthetic_5000_1e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_1e-05.output | 583 ++-- ...max_csr_10_10_10_synthetic_5000_5e-05.json | 2 +- ...x_csr_10_10_10_synthetic_5000_5e-05.output | 66 +- ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 65 + ...x_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 45 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...x_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 65 + ...x_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...ax_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.01.output | 65 + ...max_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.05.output | 45 + ..._max_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.1.output | 45 + ..._max_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.2.output | 45 + ..._max_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.3.output | 45 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1200 +++++++ ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 47 + ...x_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 110 + ...x_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 47 + ...x_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ...ax_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_50000_0.01.output | 45 + ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 120 + ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...max_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...x_csr_10_10_10_synthetic_5000_0.001.output | 81 + ..._max_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.01.output | 85 + ..._max_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...a_max_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.1.output | 65 + ...a_max_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.2.output | 45 + ...a_max_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.3.output | 45 + ...a_max_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.4.output | 45 + ...a_max_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.5.output | 45 + ...max_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...max_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...x_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 85 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...x_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...x_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...ax_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.01.output | 85 + ...max_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.05.output | 85 + ..._max_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.1.output | 85 + ..._max_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.2.output | 65 + ..._max_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.3.output | 85 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1793 ++++++++++ ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 68 + ...x_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 89 + ...x_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...x_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ...ax_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_50000_0.01.output | 65 + ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 81 + ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...max_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...x_csr_10_10_10_synthetic_5000_0.001.output | 81 + ..._max_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.01.output | 85 + ..._max_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...p_max_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.1.output | 85 + ...p_max_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.2.output | 85 + ...p_max_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.3.output | 85 + ...p_max_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.4.output | 65 + ...p_max_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.5.output | 85 + ...max_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...max_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_100000_0.0001.json | 1 + ...sr_10_10_10_synthetic_100000_0.0001.output | 85 + ...x_csr_10_10_10_synthetic_100000_0.001.json | 1 + ...csr_10_10_10_synthetic_100000_0.001.output | 65 + ...x_csr_10_10_10_synthetic_100000_1e-05.json | 1 + ...csr_10_10_10_synthetic_100000_1e-05.output | 85 + ...x_csr_10_10_10_synthetic_100000_5e-05.json | 1 + ...csr_10_10_10_synthetic_100000_5e-05.output | 85 + ...x_csr_10_10_10_synthetic_10000_0.0001.json | 1 + ...csr_10_10_10_synthetic_10000_0.0001.output | 81 + ...ax_csr_10_10_10_synthetic_10000_0.001.json | 1 + ..._csr_10_10_10_synthetic_10000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_10000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.01.output | 85 + ...max_csr_10_10_10_synthetic_10000_0.05.json | 1 + ...x_csr_10_10_10_synthetic_10000_0.05.output | 85 + ..._max_csr_10_10_10_synthetic_10000_0.1.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.1.output | 85 + ..._max_csr_10_10_10_synthetic_10000_0.2.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.2.output | 65 + ..._max_csr_10_10_10_synthetic_10000_0.3.json | 1 + ...ax_csr_10_10_10_synthetic_10000_0.3.output | 105 + ...ax_csr_10_10_10_synthetic_10000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_1e-05.output | 1307 +++++++ ...ax_csr_10_10_10_synthetic_10000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_10000_5e-05.output | 81 + ..._csr_10_10_10_synthetic_500000_0.0001.json | 1 + ...sr_10_10_10_synthetic_500000_0.0001.output | 89 + ...x_csr_10_10_10_synthetic_500000_1e-05.json | 1 + ...csr_10_10_10_synthetic_500000_1e-05.output | 89 + ...x_csr_10_10_10_synthetic_500000_5e-05.json | 1 + ...csr_10_10_10_synthetic_500000_5e-05.output | 68 + ...x_csr_10_10_10_synthetic_50000_0.0001.json | 1 + ...csr_10_10_10_synthetic_50000_0.0001.output | 85 + ...ax_csr_10_10_10_synthetic_50000_0.001.json | 1 + ..._csr_10_10_10_synthetic_50000_0.001.output | 85 + ...max_csr_10_10_10_synthetic_50000_0.01.json | 1 + ...x_csr_10_10_10_synthetic_50000_0.01.output | 86 + ...ax_csr_10_10_10_synthetic_50000_1e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_1e-05.output | 100 + ...ax_csr_10_10_10_synthetic_50000_5e-05.json | 1 + ..._csr_10_10_10_synthetic_50000_5e-05.output | 85 + ...ax_csr_10_10_10_synthetic_5000_0.0001.json | 1 + ..._csr_10_10_10_synthetic_5000_0.0001.output | 81 + ...max_csr_10_10_10_synthetic_5000_0.001.json | 1 + ...x_csr_10_10_10_synthetic_5000_0.001.output | 81 + ..._max_csr_10_10_10_synthetic_5000_0.01.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.01.output | 85 + ..._max_csr_10_10_10_synthetic_5000_0.05.json | 1 + ...ax_csr_10_10_10_synthetic_5000_0.05.output | 85 + ...6_max_csr_10_10_10_synthetic_5000_0.1.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.1.output | 85 + ...6_max_csr_10_10_10_synthetic_5000_0.2.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.2.output | 85 + ...6_max_csr_10_10_10_synthetic_5000_0.3.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.3.output | 85 + ...6_max_csr_10_10_10_synthetic_5000_0.4.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.4.output | 85 + ...6_max_csr_10_10_10_synthetic_5000_0.5.json | 1 + ...max_csr_10_10_10_synthetic_5000_0.5.output | 85 + ...max_csr_10_10_10_synthetic_5000_1e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_1e-05.output | 329 ++ ...max_csr_10_10_10_synthetic_5000_5e-05.json | 1 + ...x_csr_10_10_10_synthetic_5000_5e-05.output | 81 + 1863 files changed, 91816 insertions(+), 24138 deletions(-) create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json create mode 100644 pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output diff --git a/pytorch/batch.py b/pytorch/batch.py index 8978d39..ff01165 100644 --- a/pytorch/batch.py +++ b/pytorch/batch.py @@ -117,7 +117,7 @@ elif args.matrix_type == MatrixType.SYNTHETIC: parameter_list = enumerate([(size, density) for size in args.synthetic_size for density in args.synthetic_density - if size ** 2 * density <= 50000000]) + if size ** 2 * density <= 100000000]) #for i, matrix in enumerate(glob.glob(f'{args.matrix_dir.rstrip("/")}/*.mtx')): for i, parameter in parameter_list: diff --git a/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.json b/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.json index e69de29..200315d 100644 --- a/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.json +++ b/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 4950, "MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 10.512001991271973, "TIME_S_1KI": 2.12363676591353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 850.9319224691392, "W": 64.26, "J_1KI": 171.90543888265438, "W_1KI": 12.981818181818182, "W_D": 29.119500000000002, "J_D": 385.600873270154, "W_D_1KI": 5.882727272727273, "J_D_1KI": 1.1884297520661158} diff --git a/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.output b/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.output index 3c8a67d..d142886 100644 --- a/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.output +++ b/pytorch/output_as-caida_1core/epyc_7313p_1_coo_10_10_10_as-caida_G_010.output @@ -1,11 +1,11 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'coo', '100', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] -{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 0.2207937240600586} +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 0.23576855659484863} tensor(indices=tensor([[ 1040, 2020, 2054, ..., 160, 160, 12170], [ 0, 0, 0, ..., 31353, 31360, 31378]]), values=tensor([3., 1., 1., ..., 3., 3., 3.]), size=(31379, 31379), nnz=74994, layout=torch.sparse_coo) -tensor([0.7193, 0.7070, 0.1341, ..., 0.0458, 0.8165, 0.6087]) +tensor([0.0720, 0.8900, 0.2156, ..., 0.8187, 0.1034, 0.8459]) Matrix Type: SuiteSparse Matrix: as-caida_G_010 Matrix Format: coo @@ -14,16 +14,16 @@ Rows: 31379 Size: 984641641 NNZ: 74994 Density: 7.616375021864427e-05 -Time: 0.2207937240600586 seconds +Time: 0.23576855659484863 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'coo', '4755', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] -{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 10.089951038360596} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'coo', '4453', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 9.444213628768921} tensor(indices=tensor([[ 1040, 2020, 2054, ..., 160, 160, 12170], [ 0, 0, 0, ..., 31353, 31360, 31378]]), values=tensor([3., 1., 1., ..., 3., 3., 3.]), size=(31379, 31379), nnz=74994, layout=torch.sparse_coo) -tensor([0.1581, 0.9236, 0.8715, ..., 0.0128, 0.7061, 0.4474]) +tensor([0.5426, 0.3622, 0.1360, ..., 0.9710, 0.4622, 0.4763]) Matrix Type: SuiteSparse Matrix: as-caida_G_010 Matrix Format: coo @@ -32,13 +32,16 @@ Rows: 31379 Size: 984641641 NNZ: 74994 Density: 7.616375021864427e-05 -Time: 10.089951038360596 seconds +Time: 9.444213628768921 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'suitesparse', 'coo', '4950', '-m', 'matrices/as-caida_pruned/as-caida_G_010.mtx', '-c', '1'] +{"MATRIX_TYPE": "SuiteSparse", "MATRIX_FILE": "as-caida_G_010", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [31379, 31379], "MATRIX_ROWS": 31379, "MATRIX_SIZE": 984641641, "MATRIX_NNZ": 74994, "MATRIX_DENSITY": 7.616375021864427e-05, "TIME_S": 10.512001991271973} tensor(indices=tensor([[ 1040, 2020, 2054, ..., 160, 160, 12170], [ 0, 0, 0, ..., 31353, 31360, 31378]]), values=tensor([3., 1., 1., ..., 3., 3., 3.]), size=(31379, 31379), nnz=74994, layout=torch.sparse_coo) -tensor([0.1581, 0.9236, 0.8715, ..., 0.0128, 0.7061, 0.4474]) +tensor([0.9719, 0.1948, 0.3482, ..., 0.1448, 0.3015, 0.3034]) Matrix Type: SuiteSparse Matrix: as-caida_G_010 Matrix Format: coo @@ -47,9 +50,28 @@ Rows: 31379 Size: 984641641 NNZ: 74994 Density: 7.616375021864427e-05 -Time: 10.089951038360596 seconds +Time: 10.512001991271973 seconds -[44.57, 39.27, 38.78, 39.37, 39.08, 39.0, 39.29, 40.29, 38.94, 38.6] -[64.64] -12.744337558746338 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4755, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 10.089951038360596, 'TIME_S_1KI': 2.1219665695816183, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 823.7939797973632, 'W': 64.64} +tensor(indices=tensor([[ 1040, 2020, 2054, ..., 160, 160, 12170], + [ 0, 0, 0, ..., 31353, 31360, 31378]]), + values=tensor([3., 1., 1., ..., 3., 3., 3.]), + size=(31379, 31379), nnz=74994, layout=torch.sparse_coo) +tensor([0.9719, 0.1948, 0.3482, ..., 0.1448, 0.3015, 0.3034]) +Matrix Type: SuiteSparse +Matrix: as-caida_G_010 +Matrix Format: coo +Shape: torch.Size([31379, 31379]) +Rows: 31379 +Size: 984641641 +NNZ: 74994 +Density: 7.616375021864427e-05 +Time: 10.512001991271973 seconds + +[39.44, 39.17, 38.57, 39.04, 39.83, 39.07, 38.63, 38.66, 40.52, 38.52] +[64.26] +13.242015600204468 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4950, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 10.512001991271973, 'TIME_S_1KI': 2.12363676591353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.9319224691392, 'W': 64.26} +[39.44, 39.17, 38.57, 39.04, 39.83, 39.07, 38.63, 38.66, 40.52, 38.52, 39.2, 38.77, 38.71, 38.46, 38.74, 39.68, 39.05, 39.32, 38.59, 38.84] +702.8100000000001 +35.1405 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 4950, 'MATRIX_TYPE': 'SuiteSparse', 'MATRIX_FILE': 'as-caida_G_010', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [31379, 31379], 'MATRIX_ROWS': 31379, 'MATRIX_SIZE': 984641641, 'MATRIX_NNZ': 74994, 'MATRIX_DENSITY': 7.616375021864427e-05, 'TIME_S': 10.512001991271973, 'TIME_S_1KI': 2.12363676591353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.9319224691392, 'W': 64.26, 'J_1KI': 171.90543888265438, 'W_1KI': 12.981818181818182, 'W_D': 29.119500000000002, 'J_D': 385.600873270154, 'W_D_1KI': 5.882727272727273, 'J_D_1KI': 1.1884297520661158} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..39f928f --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 367, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.419839143753052, "TIME_S_1KI": 28.39193227180668, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 397.99056369781505, "W": 27.315883432832518, "J_1KI": 1084.4429528550818, "W_1KI": 74.43020008946189, "W_D": 12.377883432832519, "J_D": 180.34491972160353, "W_D_1KI": 33.727202814257545, "J_D_1KI": 91.89973518871265} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..fbabafc --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3841264247894287} + +tensor(indices=tensor([[12468, 7286, 42070, ..., 41435, 93609, 87646], + [40435, 70110, 80235, ..., 72237, 42645, 8439]]), + values=tensor([0.4478, 0.9727, 0.1236, ..., 0.3635, 0.6500, 0.0042]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3810, 0.0198, 0.2872, ..., 0.7346, 0.9919, 0.8495]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.3841264247894287 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 273 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.802507400512695} + +tensor(indices=tensor([[66870, 12119, 38015, ..., 43745, 48193, 42393], + [24231, 38254, 74788, ..., 95676, 54345, 18633]]), + values=tensor([0.5489, 0.7321, 0.1915, ..., 0.2369, 0.8050, 0.8821]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.9003, 0.7844, 0.6361, ..., 0.6011, 0.0782, 0.7656]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 7.802507400512695 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 367 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.419839143753052} + +tensor(indices=tensor([[49924, 69461, 63269, ..., 50062, 11998, 67211], + [44612, 71174, 43698, ..., 38509, 65750, 86040]]), + values=tensor([0.0322, 0.9610, 0.0897, ..., 0.9817, 0.7794, 0.1450]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2779, 0.4359, 0.1613, ..., 0.8529, 0.4594, 0.0392]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.419839143753052 seconds + +tensor(indices=tensor([[49924, 69461, 63269, ..., 50062, 11998, 67211], + [44612, 71174, 43698, ..., 38509, 65750, 86040]]), + values=tensor([0.0322, 0.9610, 0.0897, ..., 0.9817, 0.7794, 0.1450]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2779, 0.4359, 0.1613, ..., 0.8529, 0.4594, 0.0392]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.419839143753052 seconds + +[16.36, 16.36, 16.48, 16.6, 16.68, 16.72, 16.76, 16.64, 16.56, 16.68] +[16.88, 16.8, 17.76, 17.76, 19.72, 23.36, 28.64, 32.92, 36.04, 38.68, 38.72, 38.96, 38.72, 38.76] +14.569931983947754 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.419839143753052, 'TIME_S_1KI': 28.39193227180668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 397.99056369781505, 'W': 27.315883432832518} +[16.36, 16.36, 16.48, 16.6, 16.68, 16.72, 16.76, 16.64, 16.56, 16.68, 16.4, 16.64, 16.6, 16.88, 16.76, 16.64, 16.64, 16.56, 16.28, 16.48] +298.76 +14.937999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.419839143753052, 'TIME_S_1KI': 28.39193227180668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 397.99056369781505, 'W': 27.315883432832518, 'J_1KI': 1084.4429528550818, 'W_1KI': 74.43020008946189, 'W_D': 12.377883432832519, 'J_D': 180.34491972160353, 'W_D_1KI': 33.727202814257545, 'J_D_1KI': 91.89973518871265} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..b140396 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 34, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.180678606033325, "TIME_S_1KI": 299.4317237068625, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 422.515793838501, "W": 25.557128131432332, "J_1KI": 12426.935112897088, "W_1KI": 751.6802391597745, "W_D": 10.417128131432332, "J_D": 172.2181435775757, "W_D_1KI": 306.38612151271565, "J_D_1KI": 9011.356515079871} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..6364c6c --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.0579380989074707} + +tensor(indices=tensor([[40619, 25503, 5323, ..., 95949, 31390, 82914], + [17388, 8501, 29360, ..., 49103, 79345, 61915]]), + values=tensor([0.5711, 0.4325, 0.6219, ..., 0.4496, 0.6456, 0.2954]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6293, 0.9496, 0.1593, ..., 0.6227, 0.5166, 0.2304]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 3.0579380989074707 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 34 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.180678606033325} + +tensor(indices=tensor([[22566, 11034, 23228, ..., 20288, 68913, 82958], + [93883, 53094, 89525, ..., 81254, 92542, 63317]]), + values=tensor([0.3468, 0.2008, 0.6088, ..., 0.6549, 0.5439, 0.6485]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.5288, 0.1483, 0.5754, ..., 0.9623, 0.5752, 0.3388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.180678606033325 seconds + +tensor(indices=tensor([[22566, 11034, 23228, ..., 20288, 68913, 82958], + [93883, 53094, 89525, ..., 81254, 92542, 63317]]), + values=tensor([0.3468, 0.2008, 0.6088, ..., 0.6549, 0.5439, 0.6485]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.5288, 0.1483, 0.5754, ..., 0.9623, 0.5752, 0.3388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.180678606033325 seconds + +[16.36, 16.68, 16.68, 16.96, 17.08, 17.24, 17.04, 16.76, 16.64, 16.48] +[16.32, 16.4, 19.4, 20.52, 22.24, 24.72, 24.72, 28.72, 29.0, 31.24, 33.04, 32.12, 32.0, 32.72, 33.04, 33.4] +16.532209396362305 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 34, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.180678606033325, 'TIME_S_1KI': 299.4317237068625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 422.515793838501, 'W': 25.557128131432332} +[16.36, 16.68, 16.68, 16.96, 17.08, 17.24, 17.04, 16.76, 16.64, 16.48, 16.36, 16.44, 16.6, 16.4, 16.84, 17.16, 17.12, 17.12, 17.0, 16.88] +302.8 +15.14 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 34, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.180678606033325, 'TIME_S_1KI': 299.4317237068625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 422.515793838501, 'W': 25.557128131432332, 'J_1KI': 12426.935112897088, 'W_1KI': 751.6802391597745, 'W_D': 10.417128131432332, 'J_D': 172.2181435775757, 'W_D_1KI': 306.38612151271565, 'J_D_1KI': 9011.356515079871} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..6e99db4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.31324076652527, "TIME_S_1KI": 3031.324076652527, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1621.7109812927251, "W": 22.805805675157114, "J_1KI": 162171.09812927252, "W_1KI": 2280.580567515711, "W_D": 7.854805675157115, "J_D": 558.5518354740149, "W_D_1KI": 785.4805675157115, "J_D_1KI": 78548.05675157116} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..8174072 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.31324076652527} + +tensor(indices=tensor([[13635, 80432, 8009, ..., 1124, 10473, 6580], + [94305, 46771, 25673, ..., 59297, 21852, 23926]]), + values=tensor([0.3778, 0.8729, 0.5621, ..., 0.9145, 0.2094, 0.6532]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.3910, 0.8851, 0.6086, ..., 0.3028, 0.8290, 0.5528]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 30.31324076652527 seconds + +tensor(indices=tensor([[13635, 80432, 8009, ..., 1124, 10473, 6580], + [94305, 46771, 25673, ..., 59297, 21852, 23926]]), + values=tensor([0.3778, 0.8729, 0.5621, ..., 0.9145, 0.2094, 0.6532]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.3910, 0.8851, 0.6086, ..., 0.3028, 0.8290, 0.5528]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 30.31324076652527 seconds + +[16.72, 16.52, 16.56, 16.64, 16.96, 17.0, 17.08, 16.88, 16.72, 16.88] +[16.76, 17.08, 19.12, 19.12, 20.84, 22.12, 23.08, 23.8, 21.6, 20.96, 21.92, 23.96, 25.6, 26.32, 26.2, 26.2, 24.68, 22.8, 21.2, 22.24, 23.0, 24.88, 27.76, 27.72, 26.96, 26.2, 22.76, 23.16, 23.0, 23.0, 22.36, 22.92, 22.68, 22.08, 22.68, 22.6, 21.8, 22.76, 23.08, 23.12, 23.24, 22.6, 22.6, 23.04, 22.84, 22.24, 22.84, 22.92, 22.36, 22.92, 22.08, 23.0, 26.8, 26.72, 27.2, 27.8, 27.8, 24.12, 23.28, 22.8, 23.08, 23.16, 23.48, 23.84, 24.0, 24.48, 24.48, 24.64, 24.64, 24.56] +71.10956764221191 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.31324076652527, 'TIME_S_1KI': 3031.324076652527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1621.7109812927251, 'W': 22.805805675157114} +[16.72, 16.52, 16.56, 16.64, 16.96, 17.0, 17.08, 16.88, 16.72, 16.88, 16.0, 16.04, 16.24, 16.4, 16.56, 16.44, 16.6, 16.52, 16.68, 16.76] +299.02 +14.950999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.31324076652527, 'TIME_S_1KI': 3031.324076652527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1621.7109812927251, 'W': 22.805805675157114, 'J_1KI': 162171.09812927252, 'W_1KI': 2280.580567515711, 'W_D': 7.854805675157115, 'J_D': 558.5518354740149, 'W_D_1KI': 785.4805675157115, 'J_D_1KI': 78548.05675157116} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..956e898 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3314, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.203248262405396, "TIME_S_1KI": 3.0788317025966796, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 375.60593753814703, "W": 27.842859053307212, "J_1KI": 113.33914832171003, "W_1KI": 8.401586920128912, "W_D": 12.732859053307209, "J_D": 171.76890681743623, "W_D_1KI": 3.8421421404065206, "J_D_1KI": 1.159366970551153} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..37d7d3d --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03758573532104492} + +tensor(indices=tensor([[76328, 20886, 32175, ..., 88815, 99390, 42238], + [62124, 81704, 42303, ..., 62938, 12925, 65352]]), + values=tensor([0.1394, 0.0752, 0.4473, ..., 0.7415, 0.9621, 0.2206]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.0051, 0.8634, 0.8851, ..., 0.0171, 0.3637, 0.0740]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.03758573532104492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2793 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.847030401229858} + +tensor(indices=tensor([[83955, 53828, 67250, ..., 26654, 58989, 94008], + [17882, 59451, 98038, ..., 31386, 2798, 66043]]), + values=tensor([0.7446, 0.4242, 0.1433, ..., 0.4259, 0.7075, 0.0137]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7343, 0.5780, 0.7130, ..., 0.7518, 0.6791, 0.3911]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 8.847030401229858 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3314 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.203248262405396} + +tensor(indices=tensor([[60945, 91856, 78422, ..., 21207, 13896, 37102], + [ 3510, 19927, 52458, ..., 24413, 55529, 42984]]), + values=tensor([0.7731, 0.2319, 0.9132, ..., 0.7538, 0.6562, 0.8763]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9001, 0.5184, 0.9677, ..., 0.4892, 0.1066, 0.2961]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.203248262405396 seconds + +tensor(indices=tensor([[60945, 91856, 78422, ..., 21207, 13896, 37102], + [ 3510, 19927, 52458, ..., 24413, 55529, 42984]]), + values=tensor([0.7731, 0.2319, 0.9132, ..., 0.7538, 0.6562, 0.8763]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9001, 0.5184, 0.9677, ..., 0.4892, 0.1066, 0.2961]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.203248262405396 seconds + +[16.32, 16.52, 16.72, 16.8, 16.84, 17.12, 17.2, 17.12, 17.08, 16.84] +[16.96, 16.96, 16.68, 20.32, 21.64, 26.48, 30.96, 36.4, 37.12, 40.04, 40.28, 40.4, 40.24] +13.490207195281982 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3314, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.203248262405396, 'TIME_S_1KI': 3.0788317025966796, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 375.60593753814703, 'W': 27.842859053307212} +[16.32, 16.52, 16.72, 16.8, 16.84, 17.12, 17.2, 17.12, 17.08, 16.84, 16.88, 16.64, 16.56, 16.52, 16.76, 16.64, 16.64, 16.64, 16.92, 16.92] +302.20000000000005 +15.110000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3314, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.203248262405396, 'TIME_S_1KI': 3.0788317025966796, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 375.60593753814703, 'W': 27.842859053307212, 'J_1KI': 113.33914832171003, 'W_1KI': 8.401586920128912, 'W_D': 12.732859053307209, 'J_D': 171.76890681743623, 'W_D_1KI': 3.8421421404065206, 'J_D_1KI': 1.159366970551153} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..243c2a4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 715, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.320415019989014, "TIME_S_1KI": 14.434146881103516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 408.59843476295475, "W": 28.07372939695753, "J_1KI": 571.4663423258108, "W_1KI": 39.263957198541995, "W_D": 13.211729396957528, "J_D": 192.28980502653127, "W_D_1KI": 18.47794321252801, "J_D_1KI": 25.843277220318896} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..40c7d73 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.19103288650512695} + +tensor(indices=tensor([[61371, 25679, 8054, ..., 88134, 1804, 13470], + [99010, 38769, 82799, ..., 8249, 8039, 50418]]), + values=tensor([0.7126, 0.6439, 0.5445, ..., 0.7563, 0.4214, 0.5013]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.0261, 0.4519, 0.6566, ..., 0.0991, 0.5006, 0.4804]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.19103288650512695 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 549 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.052725315093994} + +tensor(indices=tensor([[77539, 36814, 54376, ..., 37932, 50567, 94324], + [45152, 76258, 29226, ..., 85716, 88491, 37352]]), + values=tensor([0.9768, 0.4066, 0.7209, ..., 0.5181, 0.9076, 0.8507]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.2804, 0.1817, 0.9427, ..., 0.7550, 0.4651, 0.6315]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 8.052725315093994 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 715 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.320415019989014} + +tensor(indices=tensor([[ 4536, 78747, 27018, ..., 68410, 35059, 67322], + [38765, 55046, 93716, ..., 27462, 49408, 99832]]), + values=tensor([0.5387, 0.6165, 0.9464, ..., 0.2810, 0.4426, 0.6382]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.8650, 0.2994, 0.8365, ..., 0.8583, 0.9823, 0.7996]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.320415019989014 seconds + +tensor(indices=tensor([[ 4536, 78747, 27018, ..., 68410, 35059, 67322], + [38765, 55046, 93716, ..., 27462, 49408, 99832]]), + values=tensor([0.5387, 0.6165, 0.9464, ..., 0.2810, 0.4426, 0.6382]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.8650, 0.2994, 0.8365, ..., 0.8583, 0.9823, 0.7996]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.320415019989014 seconds + +[16.52, 16.32, 16.44, 16.72, 16.4, 16.56, 16.76, 16.76, 16.72, 16.72] +[16.92, 16.88, 16.84, 20.68, 21.88, 27.04, 31.28, 33.48, 36.68, 38.64, 38.8, 38.64, 38.68, 38.52] +14.554476499557495 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 715, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.320415019989014, 'TIME_S_1KI': 14.434146881103516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 408.59843476295475, 'W': 28.07372939695753} +[16.52, 16.32, 16.44, 16.72, 16.4, 16.56, 16.76, 16.76, 16.72, 16.72, 16.12, 16.28, 16.16, 16.44, 16.44, 16.68, 16.72, 16.64, 16.4, 16.24] +297.24 +14.862 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 715, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.320415019989014, 'TIME_S_1KI': 14.434146881103516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 408.59843476295475, 'W': 28.07372939695753, 'J_1KI': 571.4663423258108, 'W_1KI': 39.263957198541995, 'W_D': 13.211729396957528, 'J_D': 192.28980502653127, 'W_D_1KI': 18.47794321252801, 'J_D_1KI': 25.843277220318896} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..8f61976 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 47475, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.944029331207275, "TIME_S_1KI": 0.2305219448384892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 260.57559970855715, "W": 18.35299474105064, "J_1KI": 5.488690883803205, "W_1KI": 0.386582301022657, "W_D": 2.1109947410506393, "J_D": 29.971878071784975, "W_D_1KI": 0.04446539738916565, "J_D_1KI": 0.0009366065800772122} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..5d9036e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.008498430252075195} + +tensor(indices=tensor([[6168, 2783, 304, ..., 7181, 3674, 7845], + [2081, 3093, 7875, ..., 7763, 2281, 9973]]), + values=tensor([0.3303, 0.9628, 0.1035, ..., 0.9481, 0.9467, 0.4079]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.8870, 0.6017, 0.4765, ..., 0.0324, 0.5601, 0.5440]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.008498430252075195 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12355 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.732515335083008} + +tensor(indices=tensor([[ 234, 3244, 2663, ..., 1564, 8779, 2237], + [6482, 5485, 8803, ..., 9220, 7760, 3534]]), + values=tensor([0.8332, 0.7642, 0.3774, ..., 0.6914, 0.1324, 0.7233]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.6346, 0.8402, 0.2794, ..., 0.9708, 0.9602, 0.1286]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.732515335083008 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 47475 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.944029331207275} + +tensor(indices=tensor([[9618, 2312, 3184, ..., 9849, 7099, 5767], + [8363, 6117, 6209, ..., 3323, 4797, 4014]]), + values=tensor([0.3820, 0.2164, 0.4818, ..., 0.1828, 0.9004, 0.2452]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7549, 0.9757, 0.2583, ..., 0.4859, 0.3771, 0.8348]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.944029331207275 seconds + +tensor(indices=tensor([[9618, 2312, 3184, ..., 9849, 7099, 5767], + [8363, 6117, 6209, ..., 3323, 4797, 4014]]), + values=tensor([0.3820, 0.2164, 0.4818, ..., 0.1828, 0.9004, 0.2452]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7549, 0.9757, 0.2583, ..., 0.4859, 0.3771, 0.8348]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.944029331207275 seconds + +[20.44, 20.48, 20.24, 20.32, 20.08, 20.08, 19.6, 18.44, 17.68, 16.88] +[16.4, 16.44, 17.0, 18.04, 20.36, 21.0, 21.76, 21.76, 21.44, 21.28, 19.68, 19.68, 19.92, 20.08] +14.197988033294678 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 47475, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.944029331207275, 'TIME_S_1KI': 0.2305219448384892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 260.57559970855715, 'W': 18.35299474105064} +[20.44, 20.48, 20.24, 20.32, 20.08, 20.08, 19.6, 18.44, 17.68, 16.88, 16.16, 16.16, 16.28, 16.44, 16.72, 16.84, 16.76, 16.76, 16.88, 16.68] +324.84000000000003 +16.242 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 47475, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.944029331207275, 'TIME_S_1KI': 0.2305219448384892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 260.57559970855715, 'W': 18.35299474105064, 'J_1KI': 5.488690883803205, 'W_1KI': 0.386582301022657, 'W_D': 2.1109947410506393, 'J_D': 29.971878071784975, 'W_D_1KI': 0.04446539738916565, 'J_D_1KI': 0.0009366065800772122} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..27cd1bb --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4810, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.547842979431152, "TIME_S_1KI": 2.192898748322485, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 276.70302688598633, "W": 19.38616370089356, "J_1KI": 57.52661681621338, "W_1KI": 4.030387463803235, "W_D": 4.429163700893563, "J_D": 63.21843875455857, "W_D_1KI": 0.9208240542398259, "J_D_1KI": 0.19143951231597212} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..faa7fdb --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02799510955810547} + +tensor(indices=tensor([[ 266, 5742, 6915, ..., 1668, 4137, 6263], + [5390, 3213, 7167, ..., 9658, 3658, 5142]]), + values=tensor([0.3984, 0.0693, 0.4181, ..., 0.1044, 0.5212, 0.3869]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7188, 0.6598, 0.8343, ..., 0.0225, 0.4378, 0.3796]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.02799510955810547 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3750 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.18604826927185} + +tensor(indices=tensor([[7560, 7798, 937, ..., 3206, 7915, 3876], + [6246, 9875, 3448, ..., 1615, 3380, 5979]]), + values=tensor([0.6216, 0.7110, 0.4328, ..., 0.6404, 0.8377, 0.3045]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1658, 0.2435, 0.8360, ..., 0.5002, 0.6405, 0.4177]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 8.18604826927185 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 4810 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.547842979431152} + +tensor(indices=tensor([[9492, 943, 366, ..., 7541, 3419, 8520], + [3163, 6992, 6675, ..., 104, 3723, 9842]]), + values=tensor([0.3216, 0.9704, 0.2426, ..., 0.1027, 0.1369, 0.4280]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9840, 0.1417, 0.0398, ..., 0.7890, 0.7444, 0.3956]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.547842979431152 seconds + +tensor(indices=tensor([[9492, 943, 366, ..., 7541, 3419, 8520], + [3163, 6992, 6675, ..., 104, 3723, 9842]]), + values=tensor([0.3216, 0.9704, 0.2426, ..., 0.1027, 0.1369, 0.4280]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9840, 0.1417, 0.0398, ..., 0.7890, 0.7444, 0.3956]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.547842979431152 seconds + +[16.24, 16.48, 16.6, 16.52, 16.8, 16.64, 16.6, 16.6, 16.6, 16.6] +[16.44, 16.32, 16.4, 20.52, 22.56, 24.04, 25.44, 23.28, 22.24, 20.4, 20.4, 20.4, 20.4, 20.8] +14.27322244644165 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4810, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.547842979431152, 'TIME_S_1KI': 2.192898748322485, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.70302688598633, 'W': 19.38616370089356} +[16.24, 16.48, 16.6, 16.52, 16.8, 16.64, 16.6, 16.6, 16.6, 16.6, 16.28, 16.44, 16.48, 16.56, 16.6, 16.68, 16.76, 16.84, 16.88, 17.0] +299.14 +14.956999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4810, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.547842979431152, 'TIME_S_1KI': 2.192898748322485, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.70302688598633, 'W': 19.38616370089356, 'J_1KI': 57.52661681621338, 'W_1KI': 4.030387463803235, 'W_D': 4.429163700893563, 'J_D': 63.21843875455857, 'W_D_1KI': 0.9208240542398259, 'J_D_1KI': 0.19143951231597212} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..de8892c --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 483, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.741916418075562, "TIME_S_1KI": 22.239992584007375, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 276.1104020309449, "W": 19.461832909928876, "J_1KI": 571.6571470619976, "W_1KI": 40.29364991703701, "W_D": 4.5208329099288775, "J_D": 64.13830588579185, "W_D_1KI": 9.359902505028732, "J_D_1KI": 19.378680134635054} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..ac4464e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.2565734386444092} + +tensor(indices=tensor([[3040, 1975, 8185, ..., 4197, 2982, 9717], + [2548, 7576, 6395, ..., 4840, 4908, 516]]), + values=tensor([0.8984, 0.8219, 0.3232, ..., 0.0275, 0.0542, 0.2107]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4740, 0.4212, 0.7772, ..., 0.6354, 0.9914, 0.8150]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.2565734386444092 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 409 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.887248516082764} + +tensor(indices=tensor([[ 909, 4395, 9375, ..., 4784, 538, 5967], + [9525, 7935, 62, ..., 8942, 8068, 5683]]), + values=tensor([0.2394, 0.6993, 0.1720, ..., 0.6730, 0.4942, 0.9458]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.0813, 0.6209, 0.8868, ..., 0.0609, 0.7307, 0.2857]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 8.887248516082764 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 483 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.741916418075562} + +tensor(indices=tensor([[3476, 8500, 6569, ..., 1163, 7732, 5509], + [ 139, 5237, 3565, ..., 4894, 7373, 9358]]), + values=tensor([0.0056, 0.8699, 0.9380, ..., 0.1444, 0.5055, 0.8646]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.6523, 0.3723, 0.1460, ..., 0.5424, 0.6615, 0.0631]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.741916418075562 seconds + +tensor(indices=tensor([[3476, 8500, 6569, ..., 1163, 7732, 5509], + [ 139, 5237, 3565, ..., 4894, 7373, 9358]]), + values=tensor([0.0056, 0.8699, 0.9380, ..., 0.1444, 0.5055, 0.8646]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.6523, 0.3723, 0.1460, ..., 0.5424, 0.6615, 0.0631]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.741916418075562 seconds + +[17.12, 16.84, 16.88, 16.92, 16.72, 16.44, 16.6, 16.64, 16.64, 16.72] +[16.92, 16.92, 16.6, 20.12, 21.16, 23.2, 24.28, 25.04, 22.44, 21.72, 20.76, 20.72, 20.56, 20.56] +14.187276363372803 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 483, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.741916418075562, 'TIME_S_1KI': 22.239992584007375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.1104020309449, 'W': 19.461832909928876} +[17.12, 16.84, 16.88, 16.92, 16.72, 16.44, 16.6, 16.64, 16.64, 16.72, 16.2, 16.12, 16.16, 16.52, 16.52, 16.6, 16.6, 16.64, 16.64, 16.64] +298.82 +14.940999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 483, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.741916418075562, 'TIME_S_1KI': 22.239992584007375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.1104020309449, 'W': 19.461832909928876, 'J_1KI': 571.6571470619976, 'W_1KI': 40.29364991703701, 'W_D': 4.5208329099288775, 'J_D': 64.13830588579185, 'W_D_1KI': 9.359902505028732, 'J_D_1KI': 19.378680134635054} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..dc7bd78 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 95, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.398029565811157, "TIME_S_1KI": 109.45294279801219, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 302.1709572029114, "W": 19.739419615702108, "J_1KI": 3180.746917925383, "W_1KI": 207.78336437581166, "W_D": 5.013419615702105, "J_D": 76.74540759706494, "W_D_1KI": 52.77283806002216, "J_D_1KI": 555.5035585265491} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..0d64315 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1015262603759766} + +tensor(indices=tensor([[5975, 1448, 909, ..., 2303, 64, 4395], + [3014, 2842, 4837, ..., 5281, 7605, 6977]]), + values=tensor([0.1469, 0.9843, 0.0934, ..., 0.3480, 0.5731, 0.6993]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.4636, 0.4700, 0.7576, ..., 0.5837, 0.3639, 0.2563]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 1.1015262603759766 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 95 -ss 10000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.398029565811157} + +tensor(indices=tensor([[5766, 7593, 1135, ..., 8989, 4526, 639], + [2637, 6322, 1951, ..., 9020, 3345, 4258]]), + values=tensor([0.3933, 0.8952, 0.2235, ..., 0.1950, 0.5530, 0.4784]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5471, 0.2753, 0.9099, ..., 0.8421, 0.2533, 0.6665]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.398029565811157 seconds + +tensor(indices=tensor([[5766, 7593, 1135, ..., 8989, 4526, 639], + [2637, 6322, 1951, ..., 9020, 3345, 4258]]), + values=tensor([0.3933, 0.8952, 0.2235, ..., 0.1950, 0.5530, 0.4784]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5471, 0.2753, 0.9099, ..., 0.8421, 0.2533, 0.6665]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.398029565811157 seconds + +[16.52, 16.28, 16.48, 16.72, 16.84, 16.6, 16.56, 16.4, 16.4, 16.52] +[16.36, 16.52, 16.52, 19.76, 20.96, 24.16, 24.84, 25.6, 22.84, 21.8, 20.96, 21.08, 21.04, 21.04, 20.88] +15.307996034622192 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 95, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.398029565811157, 'TIME_S_1KI': 109.45294279801219, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.1709572029114, 'W': 19.739419615702108} +[16.52, 16.28, 16.48, 16.72, 16.84, 16.6, 16.56, 16.4, 16.4, 16.52, 15.96, 15.96, 16.44, 16.6, 16.4, 16.28, 16.28, 15.84, 15.84, 16.2] +294.52000000000004 +14.726000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 95, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.398029565811157, 'TIME_S_1KI': 109.45294279801219, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 302.1709572029114, 'W': 19.739419615702108, 'J_1KI': 3180.746917925383, 'W_1KI': 207.78336437581166, 'W_D': 5.013419615702105, 'J_D': 76.74540759706494, 'W_D_1KI': 52.77283806002216, 'J_D_1KI': 555.5035585265491} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..d434265 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 48, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.384194135665894, "TIME_S_1KI": 216.33737782637277, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 329.3590560531616, "W": 20.245232558826288, "J_1KI": 6861.647001107534, "W_1KI": 421.775678308881, "W_D": 4.979232558826286, "J_D": 81.00451949262619, "W_D_1KI": 103.7340116422143, "J_D_1KI": 2161.125242546131} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..dbb56c5 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.1636908054351807} + +tensor(indices=tensor([[9377, 3039, 357, ..., 252, 8928, 7869], + [5085, 6961, 3696, ..., 1339, 3476, 6424]]), + values=tensor([0.9709, 0.1419, 0.4987, ..., 0.9197, 0.7910, 0.9313]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8336, 0.3321, 0.0806, ..., 0.9733, 0.5686, 0.4957]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.1636908054351807 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 48 -ss 10000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.384194135665894} + +tensor(indices=tensor([[9257, 192, 6875, ..., 5776, 6405, 6896], + [3742, 49, 4010, ..., 1461, 8112, 474]]), + values=tensor([0.4683, 0.1972, 0.1221, ..., 0.3478, 0.8050, 0.6367]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4396, 0.1326, 0.6146, ..., 0.3496, 0.5544, 0.0468]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.384194135665894 seconds + +tensor(indices=tensor([[9257, 192, 6875, ..., 5776, 6405, 6896], + [3742, 49, 4010, ..., 1461, 8112, 474]]), + values=tensor([0.4683, 0.1972, 0.1221, ..., 0.3478, 0.8050, 0.6367]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4396, 0.1326, 0.6146, ..., 0.3496, 0.5544, 0.0468]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.384194135665894 seconds + +[16.52, 16.52, 16.68, 16.8, 16.8, 17.12, 17.2, 17.36, 17.36, 17.12] +[17.04, 16.8, 16.88, 20.24, 22.12, 24.52, 26.52, 24.36, 24.36, 22.88, 21.24, 21.04, 21.04, 21.0, 21.24, 21.6] +16.268474817276 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.384194135665894, 'TIME_S_1KI': 216.33737782637277, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.3590560531616, 'W': 20.245232558826288} +[16.52, 16.52, 16.68, 16.8, 16.8, 17.12, 17.2, 17.36, 17.36, 17.12, 16.96, 16.92, 16.88, 17.08, 16.48, 16.76, 16.88, 17.16, 17.36, 17.32] +305.32000000000005 +15.266000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.384194135665894, 'TIME_S_1KI': 216.33737782637277, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.3590560531616, 'W': 20.245232558826288, 'J_1KI': 6861.647001107534, 'W_1KI': 421.775678308881, 'W_D': 4.979232558826286, 'J_D': 81.00451949262619, 'W_D_1KI': 103.7340116422143, 'J_D_1KI': 2161.125242546131} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..3235224 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 24, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.480116844177246, "TIME_S_1KI": 436.67153517405194, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 396.95239097595214, "W": 20.533611291083126, "J_1KI": 16539.68295733134, "W_1KI": 855.5671371284635, "W_D": 5.676611291083127, "J_D": 109.73931437063219, "W_D_1KI": 236.52547046179694, "J_D_1KI": 9855.227935908206} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..6d19006 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 4.354996681213379} + +tensor(indices=tensor([[8500, 1795, 5814, ..., 695, 3146, 664], + [5432, 8646, 1435, ..., 1579, 7665, 2705]]), + values=tensor([0.0930, 0.5357, 0.7932, ..., 0.0365, 0.5764, 0.7419]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.3474, 0.9695, 0.8806, ..., 0.9277, 0.9905, 0.4679]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 4.354996681213379 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 24 -ss 10000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.480116844177246} + +tensor(indices=tensor([[8041, 8888, 2406, ..., 7710, 9509, 6677], + [5770, 4000, 6589, ..., 8858, 4258, 4777]]), + values=tensor([0.7909, 0.1567, 0.4983, ..., 0.2753, 0.5091, 0.1220]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.8824, 0.5060, 0.3659, ..., 0.3367, 0.8636, 0.1147]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.480116844177246 seconds + +tensor(indices=tensor([[8041, 8888, 2406, ..., 7710, 9509, 6677], + [5770, 4000, 6589, ..., 8858, 4258, 4777]]), + values=tensor([0.7909, 0.1567, 0.4983, ..., 0.2753, 0.5091, 0.1220]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.8824, 0.5060, 0.3659, ..., 0.3367, 0.8636, 0.1147]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.480116844177246 seconds + +[15.92, 15.76, 15.96, 16.08, 16.24, 16.52, 16.92, 17.0, 17.0, 16.92] +[16.64, 16.28, 16.24, 20.96, 22.64, 23.72, 26.8, 24.48, 23.28, 22.52, 22.24, 22.24, 20.92, 21.04, 21.12, 21.24, 21.2, 22.68, 22.88] +19.33183526992798 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.480116844177246, 'TIME_S_1KI': 436.67153517405194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 396.95239097595214, 'W': 20.533611291083126} +[15.92, 15.76, 15.96, 16.08, 16.24, 16.52, 16.92, 17.0, 17.0, 16.92, 16.12, 16.28, 16.64, 16.76, 16.76, 16.6, 16.6, 16.64, 16.64, 16.52] +297.14 +14.857 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.480116844177246, 'TIME_S_1KI': 436.67153517405194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 396.95239097595214, 'W': 20.533611291083126, 'J_1KI': 16539.68295733134, 'W_1KI': 855.5671371284635, 'W_D': 5.676611291083127, 'J_D': 109.73931437063219, 'W_D_1KI': 236.52547046179694, 'J_D_1KI': 9855.227935908206} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..1fd20d4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 16, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.457778215408325, "TIME_S_1KI": 653.6111384630203, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 462.69632364273076, "W": 20.647427058166027, "J_1KI": 28918.520227670673, "W_1KI": 1290.4641911353767, "W_D": 5.6004270581660265, "J_D": 125.50217532396319, "W_D_1KI": 350.02669113537667, "J_D_1KI": 21876.668195961043} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..7631221 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.5346596240997314} + +tensor(indices=tensor([[5669, 6778, 47, ..., 9404, 1949, 3404], + [3626, 9686, 5386, ..., 3395, 5291, 3142]]), + values=tensor([0.3625, 0.1114, 0.3966, ..., 0.2089, 0.4672, 0.6258]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.4665, 0.2779, 0.7138, ..., 0.8048, 0.7923, 0.8202]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 6.5346596240997314 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 10000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.457778215408325} + +tensor(indices=tensor([[5629, 1798, 9970, ..., 2553, 1781, 4827], + [9429, 534, 1490, ..., 3534, 9871, 9955]]), + values=tensor([0.8994, 0.0069, 0.6906, ..., 0.8855, 0.9801, 0.0930]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.1561, 0.3409, 0.9105, ..., 0.7284, 0.1370, 0.4812]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.457778215408325 seconds + +tensor(indices=tensor([[5629, 1798, 9970, ..., 2553, 1781, 4827], + [9429, 534, 1490, ..., 3534, 9871, 9955]]), + values=tensor([0.8994, 0.0069, 0.6906, ..., 0.8855, 0.9801, 0.0930]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.1561, 0.3409, 0.9105, ..., 0.7284, 0.1370, 0.4812]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.457778215408325 seconds + +[16.4, 16.24, 16.24, 16.56, 16.6, 16.76, 16.72, 16.72, 16.64, 16.68] +[16.6, 16.52, 16.92, 17.8, 19.88, 20.8, 22.88, 24.76, 24.76, 23.96, 23.88, 23.88, 21.56, 20.84, 21.04, 20.92, 21.36, 21.56, 23.28, 23.08, 23.0, 23.88] +22.409393787384033 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.457778215408325, 'TIME_S_1KI': 653.6111384630203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 462.69632364273076, 'W': 20.647427058166027} +[16.4, 16.24, 16.24, 16.56, 16.6, 16.76, 16.72, 16.72, 16.64, 16.68, 16.72, 16.84, 17.04, 17.04, 17.0, 16.88, 16.76, 16.84, 16.76, 16.8] +300.94 +15.047 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.457778215408325, 'TIME_S_1KI': 653.6111384630203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 462.69632364273076, 'W': 20.647427058166027, 'J_1KI': 28918.520227670673, 'W_1KI': 1290.4641911353767, 'W_D': 5.6004270581660265, 'J_D': 125.50217532396319, 'W_D_1KI': 350.02669113537667, 'J_D_1KI': 21876.668195961043} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..7ffe2f3 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 12, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.456308841705322, "TIME_S_1KI": 871.3590701421102, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 534.4857844734192, "W": 21.000350357084837, "J_1KI": 44540.48203945161, "W_1KI": 1750.0291964237365, "W_D": 6.247350357084837, "J_D": 159.00305945897108, "W_D_1KI": 520.6125297570698, "J_D_1KI": 43384.377479755814} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..87d80ed --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 8.726817607879639} + +tensor(indices=tensor([[ 704, 506, 9718, ..., 6139, 4735, 9511], + [5997, 6274, 6063, ..., 6327, 874, 3023]]), + values=tensor([0.9143, 0.7056, 0.2664, ..., 0.4979, 0.2124, 0.5956]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.4532, 0.5300, 0.4786, ..., 0.4756, 0.7807, 0.1145]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 8.726817607879639 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12 -ss 10000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.456308841705322} + +tensor(indices=tensor([[5636, 16, 8363, ..., 6222, 947, 1070], + [1231, 9841, 6646, ..., 2435, 9856, 5444]]), + values=tensor([0.9340, 0.6884, 0.3604, ..., 0.1109, 0.1674, 0.1581]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.9860, 0.0884, 0.3478, ..., 0.7535, 0.7854, 0.7338]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.456308841705322 seconds + +tensor(indices=tensor([[5636, 16, 8363, ..., 6222, 947, 1070], + [1231, 9841, 6646, ..., 2435, 9856, 5444]]), + values=tensor([0.9340, 0.6884, 0.3604, ..., 0.1109, 0.1674, 0.1581]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.9860, 0.0884, 0.3478, ..., 0.7535, 0.7854, 0.7338]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.456308841705322 seconds + +[16.16, 16.28, 16.24, 16.16, 16.4, 16.4, 16.36, 16.36, 16.36, 16.32] +[16.36, 16.52, 16.48, 18.56, 19.2, 21.04, 21.88, 24.12, 25.24, 24.72, 24.64, 23.96, 23.96, 21.72, 21.0, 20.96, 20.72, 20.68, 20.56, 22.76, 23.44, 24.16, 24.56, 23.28, 23.28] +25.451279401779175 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.456308841705322, 'TIME_S_1KI': 871.3590701421102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 534.4857844734192, 'W': 21.000350357084837} +[16.16, 16.28, 16.24, 16.16, 16.4, 16.4, 16.36, 16.36, 16.36, 16.32, 16.36, 16.36, 16.48, 16.48, 16.44, 16.48, 16.52, 16.44, 16.64, 16.48] +295.06 +14.753 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.456308841705322, 'TIME_S_1KI': 871.3590701421102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 534.4857844734192, 'W': 21.000350357084837, 'J_1KI': 44540.48203945161, 'W_1KI': 1750.0291964237365, 'W_D': 6.247350357084837, 'J_D': 159.00305945897108, 'W_D_1KI': 520.6125297570698, 'J_D_1KI': 43384.377479755814} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..de2a970 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.909107446670532, "TIME_S_1KI": 1090.9107446670532, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 612.7037763309478, "W": 21.449824734352383, "J_1KI": 61270.37763309478, "W_1KI": 2144.9824734352383, "W_D": 6.421824734352382, "J_D": 183.43628977870932, "W_D_1KI": 642.1824734352383, "J_D_1KI": 64218.24734352383} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..8ba8261 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.5 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.909107446670532} + +tensor(indices=tensor([[4804, 7773, 4408, ..., 1833, 4291, 2965], + [8671, 9730, 4445, ..., 1398, 3205, 9692]]), + values=tensor([0.1559, 0.5245, 0.4425, ..., 0.4269, 0.1155, 0.3969]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.3163, 0.5754, 0.1899, ..., 0.5042, 0.8957, 0.5433]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.909107446670532 seconds + +tensor(indices=tensor([[4804, 7773, 4408, ..., 1833, 4291, 2965], + [8671, 9730, 4445, ..., 1398, 3205, 9692]]), + values=tensor([0.1559, 0.5245, 0.4425, ..., 0.4269, 0.1155, 0.3969]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.3163, 0.5754, 0.1899, ..., 0.5042, 0.8957, 0.5433]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.909107446670532 seconds + +[16.6, 16.72, 16.68, 16.68, 16.68, 16.68, 16.88, 16.84, 16.84, 16.96] +[17.0, 16.72, 17.76, 18.56, 20.4, 21.08, 22.24, 22.64, 22.64, 24.88, 24.8, 24.96, 24.84, 23.04, 21.04, 20.8, 20.88, 20.92, 21.16, 21.12, 23.88, 24.24, 24.24, 25.2, 25.16, 23.8, 22.72, 23.0] +28.564512014389038 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.909107446670532, 'TIME_S_1KI': 1090.9107446670532, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 612.7037763309478, 'W': 21.449824734352383} +[16.6, 16.72, 16.68, 16.68, 16.68, 16.68, 16.88, 16.84, 16.84, 16.96, 16.4, 16.44, 16.64, 16.76, 16.84, 16.68, 16.56, 16.6, 16.6, 16.92] +300.56 +15.028 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.909107446670532, 'TIME_S_1KI': 1090.9107446670532, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 612.7037763309478, 'W': 21.449824734352383, 'J_1KI': 61270.37763309478, 'W_1KI': 2144.9824734352383, 'W_D': 6.421824734352382, 'J_D': 183.43628977870932, 'W_D_1KI': 642.1824734352383, 'J_D_1KI': 64218.24734352383} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..8aedb00 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 329192, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.653273582458496, "TIME_S_1KI": 0.03236188480418265, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 261.9017709636688, "W": 18.418336412038514, "J_1KI": 0.7955897195669057, "W_1KI": 0.05595013369716917, "W_D": 3.6753364120385132, "J_D": 52.261892369985574, "W_D_1KI": 0.01116471971384029, "J_D_1KI": 3.39155256319725e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..8728a5e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,907 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.006676435470581055} + +tensor(indices=tensor([[6770, 3309, 8056, ..., 269, 5308, 6845], + [6596, 3896, 4483, ..., 3312, 7741, 330]]), + values=tensor([0.1023, 0.1531, 0.6001, 0.2569, 0.8879, 0.3178, 0.0588, + 0.7220, 0.5004, 0.0771, 0.8327, 0.2209, 0.7456, 0.4977, + 0.9388, 0.7409, 0.8505, 0.5803, 0.7063, 0.2449, 0.6893, + 0.6549, 0.4784, 0.8221, 0.6025, 0.6859, 0.3434, 0.3615, + 0.1899, 0.6549, 0.1927, 0.3058, 0.8434, 0.2387, 0.3273, + 0.1260, 0.6557, 0.7775, 0.6353, 0.9011, 0.4828, 0.8487, + 0.9500, 0.0105, 0.2585, 0.6139, 0.0980, 0.9828, 0.4176, + 0.2034, 0.4170, 0.7998, 0.5720, 0.5705, 0.1784, 0.2098, + 0.5546, 0.4464, 0.1374, 0.4944, 0.5975, 0.8994, 0.0677, + 0.2679, 0.4587, 0.2233, 0.4248, 0.7000, 0.3029, 0.3320, + 0.7888, 0.3333, 0.2295, 0.8504, 0.1785, 0.9459, 0.5524, + 0.2551, 0.8953, 0.8049, 0.6596, 0.5606, 0.2965, 0.2479, + 0.2781, 0.1142, 0.4258, 0.4613, 0.9717, 0.5015, 0.2721, + 0.6300, 0.9621, 0.6811, 0.9932, 0.1516, 0.3428, 0.3082, + 0.0303, 0.0020, 0.8342, 0.3221, 0.3840, 0.7772, 0.3399, + 0.5576, 0.3917, 0.8455, 0.5888, 0.1935, 0.0197, 0.4475, + 0.1537, 0.7377, 0.4036, 0.3015, 0.6625, 0.0417, 0.7806, + 0.3857, 0.5506, 0.8312, 0.8762, 0.8452, 0.0474, 0.2292, + 0.8717, 0.2710, 0.9590, 0.0681, 0.2979, 0.7234, 0.6270, + 0.1104, 0.3087, 0.0430, 0.5470, 0.1557, 0.4961, 0.9061, + 0.6235, 0.4211, 0.9135, 0.4972, 0.8555, 0.7099, 0.6221, + 0.9813, 0.2940, 0.4521, 0.6207, 0.0727, 0.0949, 0.3432, + 0.1887, 0.4108, 0.2864, 0.7749, 0.0713, 0.1034, 0.5038, + 0.9156, 0.0769, 0.9562, 0.4637, 0.3135, 0.6304, 0.7515, + 0.9656, 0.1567, 0.8623, 0.9941, 0.3021, 0.7095, 0.3538, + 0.1253, 0.8088, 0.2189, 0.9596, 0.5312, 0.0912, 0.4733, + 0.7859, 0.4946, 0.5464, 0.6159, 0.7845, 0.2800, 0.8822, + 0.5266, 0.1573, 0.5551, 0.2534, 0.4929, 0.5086, 0.1526, + 0.6064, 0.9270, 0.2373, 0.9056, 0.3371, 0.6907, 0.7879, + 0.3012, 0.7749, 0.1708, 0.5713, 0.2177, 0.4326, 0.7511, + 0.3103, 0.8643, 0.8960, 0.5887, 0.4691, 0.6828, 0.8028, + 0.5708, 0.9180, 0.1604, 0.4563, 0.3732, 0.3762, 0.4692, + 0.2871, 0.7436, 0.5209, 0.9136, 0.0372, 0.7880, 0.7813, + 0.3825, 0.3799, 0.3659, 0.2722, 0.6256, 0.4604, 0.8030, + 0.1340, 0.2432, 0.1959, 0.0078, 0.7646, 0.7935, 0.1100, + 0.1739, 0.1750, 0.5373, 0.6310, 0.8905, 0.9681, 0.1020, + 0.6648, 0.0226, 0.5700, 0.9770, 0.0769, 0.5235, 0.6901, + 0.4191, 0.9363, 0.5673, 0.8175, 0.0357, 0.7512, 0.5221, + 0.5730, 0.7436, 0.8484, 0.9581, 0.0187, 0.0343, 0.7737, + 0.2606, 0.8287, 0.8887, 0.9505, 0.6400, 0.8450, 0.8621, + 0.1190, 0.6660, 0.2121, 0.8631, 0.5767, 0.7835, 0.7158, + 0.2229, 0.4717, 0.6910, 0.3706, 0.9403, 0.8863, 0.9304, + 0.0366, 0.0870, 0.0157, 0.9863, 0.7674, 0.6857, 0.3477, + 0.5374, 0.0938, 0.1143, 0.5268, 0.7218, 0.5709, 0.2182, + 0.7617, 0.1398, 0.2077, 0.5668, 0.1962, 0.8572, 0.8711, + 0.3395, 0.4739, 0.7478, 0.1105, 0.9099, 0.9201, 0.8443, + 0.5469, 0.0894, 0.9226, 0.2893, 0.5709, 0.9044, 0.6669, + 0.9141, 0.9257, 0.4084, 0.5283, 0.8227, 0.7906, 0.8554, + 0.9499, 0.0669, 0.7159, 0.1996, 0.9696, 0.4190, 0.2562, + 0.9704, 0.4652, 0.8458, 0.9194, 0.5097, 0.0655, 0.5671, + 0.5785, 0.4778, 0.4898, 0.8257, 0.6192, 0.8732, 0.7207, + 0.7511, 0.9590, 0.7906, 0.7548, 0.1667, 0.2615, 0.4314, + 0.2573, 0.2685, 0.6847, 0.0869, 0.5622, 0.1166, 0.9602, + 0.3127, 0.1640, 0.2691, 0.3227, 0.3403, 0.3489, 0.9388, + 0.3359, 0.0104, 0.1872, 0.7207, 0.1433, 0.1697, 0.6258, + 0.5318, 0.6168, 0.0333, 0.5280, 0.1766, 0.0232, 0.8446, + 0.7174, 0.8676, 0.2986, 0.0254, 0.4912, 0.1416, 0.1864, + 0.8933, 0.2387, 0.2060, 0.3968, 0.5610, 0.8316, 0.4771, + 0.1737, 0.0233, 0.2262, 0.0360, 0.1716, 0.0743, 0.3818, + 0.0949, 0.8673, 0.3754, 0.0249, 0.5028, 0.4749, 0.3543, + 0.9859, 0.9278, 0.5978, 0.8953, 0.4979, 0.6560, 0.5495, + 0.7096, 0.3474, 0.0889, 0.0510, 0.3640, 0.7458, 0.3563, + 0.3826, 0.6718, 0.6610, 0.6192, 0.6977, 0.5845, 0.2152, + 0.2554, 0.6164, 0.8918, 0.1642, 0.6237, 0.3843, 0.0132, + 0.4741, 0.5792, 0.9091, 0.6428, 0.1858, 0.3573, 0.3463, + 0.1552, 0.5504, 0.1943, 0.2234, 0.2346, 0.8493, 0.6135, + 0.5592, 0.6828, 0.3203, 0.3439, 0.3889, 0.5951, 0.7530, + 0.1606, 0.7771, 0.0552, 0.3138, 0.5919, 0.3985, 0.1135, + 0.0145, 0.2608, 0.5378, 0.7628, 0.2140, 0.6344, 0.1008, + 0.2826, 0.2001, 0.5116, 0.3959, 0.0813, 0.3690, 0.5839, + 0.3965, 0.4057, 0.8129, 0.7995, 0.1943, 0.0421, 0.5110, + 0.2449, 0.2264, 0.7449, 0.6906, 0.8488, 0.9227, 0.6656, + 0.6030, 0.8953, 0.6528, 0.0208, 0.6823, 0.5650, 0.8841, + 0.5718, 0.8234, 0.0964, 0.4219, 0.0789, 0.9155, 0.5740, + 0.6886, 0.9639, 0.5681, 0.1347, 0.5580, 0.4367, 0.3059, + 0.6761, 0.1251, 0.4489, 0.3920, 0.3020, 0.9654, 0.4819, + 0.8240, 0.8373, 0.2342, 0.4830, 0.8246, 0.2214, 0.3556, + 0.7149, 0.7024, 0.8671, 0.1276, 0.1141, 0.3165, 0.7614, + 0.5889, 0.6224, 0.3031, 0.3783, 0.2226, 0.5218, 0.0838, + 0.9392, 0.1966, 0.2896, 0.7253, 0.0139, 0.2465, 0.3701, + 0.6313, 0.8124, 0.5468, 0.7505, 0.7624, 0.0648, 0.8551, + 0.6897, 0.1781, 0.2765, 0.3457, 0.8610, 0.7567, 0.6316, + 0.2397, 0.0571, 0.3295, 0.9508, 0.8703, 0.1627, 0.3613, + 0.7606, 0.9244, 0.4570, 0.7809, 0.5614, 0.5541, 0.9587, + 0.2271, 0.9562, 0.6510, 0.4824, 0.3961, 0.5145, 0.8223, + 0.4562, 0.4107, 0.4659, 0.5773, 0.9154, 0.1674, 0.4458, + 0.0248, 0.6587, 0.4359, 0.3442, 0.6329, 0.7569, 0.9324, + 0.4947, 0.1260, 0.6087, 0.0494, 0.7224, 0.2718, 0.1531, + 0.4055, 0.8103, 0.0478, 0.1783, 0.6969, 0.2297, 0.2169, + 0.2122, 0.4943, 0.9244, 0.1928, 0.5317, 0.8160, 0.8999, + 0.3914, 0.0630, 0.3465, 0.7683, 0.9804, 0.0030, 0.4673, + 0.9302, 0.5249, 0.9988, 0.3048, 0.3348, 0.3355, 0.5781, + 0.1978, 0.8385, 0.4533, 0.7234, 0.4016, 0.8804, 0.3614, + 0.1809, 0.4865, 0.6793, 0.0872, 0.6248, 0.7749, 0.4694, + 0.1958, 0.0521, 0.9280, 0.0707, 0.6716, 0.2640, 0.1670, + 0.6301, 0.8944, 0.5261, 0.6254, 0.1766, 0.4565, 0.7198, + 0.1736, 0.6620, 0.0783, 0.9167, 0.7657, 0.1338, 0.6012, + 0.1281, 0.9651, 0.3900, 0.4378, 0.8072, 0.2697, 0.5110, + 0.1771, 0.0932, 0.3540, 0.9945, 0.7623, 0.4936, 0.9329, + 0.2020, 0.3024, 0.3539, 0.0427, 0.8725, 0.2306, 0.2966, + 0.6240, 0.2871, 0.2186, 0.3803, 0.9852, 0.1091, 0.7926, + 0.2193, 0.1113, 0.5193, 0.4920, 0.3036, 0.2979, 0.6556, + 0.7902, 0.7311, 0.0180, 0.5820, 0.5368, 0.6908, 0.5439, + 0.3314, 0.8313, 0.5965, 0.4499, 0.7466, 0.3992, 0.1825, + 0.4001, 0.1999, 0.2782, 0.5872, 0.5586, 0.3500, 0.0050, + 0.8385, 0.5606, 0.4875, 0.0503, 0.3778, 0.3731, 0.9350, + 0.8898, 0.0026, 0.4690, 0.4004, 0.7593, 0.2474, 0.2148, + 0.3787, 0.4063, 0.6691, 0.7552, 0.8265, 0.5458, 0.4045, + 0.2220, 0.2784, 0.2028, 0.0741, 0.9922, 0.5143, 0.4777, + 0.6227, 0.0477, 0.0359, 0.6085, 0.7970, 0.6946, 0.5808, + 0.8463, 0.8883, 0.5179, 0.9814, 0.9726, 0.1882, 0.9600, + 0.3405, 0.7404, 0.7173, 0.4994, 0.8012, 0.2596, 0.6001, + 0.6249, 0.4505, 0.0717, 0.3653, 0.3722, 0.7356, 0.7757, + 0.1010, 0.6916, 0.0587, 0.5943, 0.2406, 0.2035, 0.9767, + 0.0245, 0.3666, 0.3241, 0.5108, 0.7887, 0.8135, 0.8593, + 0.2908, 0.7582, 0.8540, 0.0369, 0.3636, 0.0663, 0.7726, + 0.8034, 0.2493, 0.8409, 0.3698, 0.2521, 0.2834, 0.1384, + 0.9298, 0.2136, 0.3549, 0.2335, 0.9603, 0.1264, 0.8569, + 0.6410, 0.0946, 0.4658, 0.0414, 0.7013, 0.9159, 0.8287, + 0.1496, 0.5704, 0.4617, 0.8996, 0.7122, 0.3732, 0.2144, + 0.6662, 0.5916, 0.3690, 0.9740, 0.2777, 0.9072, 0.7046, + 0.4843, 0.9980, 0.2662, 0.2796, 0.5916, 0.9107, 0.6790, + 0.3320, 0.5968, 0.3695, 0.4508, 0.3383, 0.4398, 0.6214, + 0.0746, 0.1184, 0.5978, 0.3906, 0.7729, 0.1297, 0.5391, + 0.6557, 0.9282, 0.8182, 0.4721, 0.2111, 0.0576, 0.9989, + 0.3069, 0.6847, 0.7666, 0.0948, 0.6698, 0.9817, 0.9605, + 0.0202, 0.3853, 0.2590, 0.4869, 0.1853, 0.8502, 0.1858, + 0.2325, 0.7441, 0.7522, 0.0034, 0.8486, 0.0311, 0.6441, + 0.4989, 0.8010, 0.1386, 0.3781, 0.0569, 0.0390, 0.4517, + 0.2811, 0.2498, 0.9078, 0.0281, 0.3113, 0.2553, 0.4893, + 0.9806, 0.3949, 0.9066, 0.4365, 0.3996, 0.4551, 0.4604, + 0.4558, 0.0431, 0.5314, 0.7681, 0.7826, 0.8589, 0.0182, + 0.8651, 0.2373, 0.6694, 0.6111, 0.9888, 0.9290, 0.2136, + 0.1452, 0.1554, 0.8183, 0.0487, 0.0098, 0.7941, 0.6208, + 0.8470, 0.7516, 0.9154, 0.9329, 0.1755, 0.4850, 0.6580, + 0.6590, 0.6896, 0.3066, 0.0936, 0.9599, 0.0037, 0.8438, + 0.6809, 0.0368, 0.6345, 0.9572, 0.3672, 0.9294, 0.7582, + 0.7453, 0.0548, 0.5323, 0.9008, 0.0849, 0.7057, 0.9715, + 0.8649, 0.8472, 0.3184, 0.2070, 0.2540, 0.8347, 0.1823, + 0.6558, 0.5421, 0.5100, 0.2470, 0.1989, 0.2160, 0.6218, + 0.5054, 0.6662, 0.0814, 0.2003, 0.6354, 0.9565, 0.6935, + 0.9381, 0.3487, 0.0722, 0.0644, 0.9236, 0.6484, 0.8955, + 0.8630, 0.8973, 0.1106, 0.1243, 0.9820, 0.5929]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.2430, 0.4800, 0.1603, ..., 0.2671, 0.3314, 0.3631]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.006676435470581055 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15726 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5300266742706299} + +tensor(indices=tensor([[5997, 1278, 1062, ..., 9685, 5347, 4668], + [3395, 3427, 4627, ..., 7441, 3913, 814]]), + values=tensor([6.2407e-01, 1.2538e-01, 4.1784e-01, 9.8965e-01, + 7.2891e-01, 3.5778e-01, 1.7063e-01, 4.5992e-01, + 8.6162e-01, 6.4424e-01, 7.9538e-01, 4.7107e-01, + 5.7685e-01, 8.2889e-01, 3.4635e-01, 8.9488e-01, + 8.5300e-01, 6.3658e-01, 8.3929e-01, 4.4403e-01, + 5.4296e-01, 4.8266e-01, 8.9874e-01, 9.8882e-01, + 2.1280e-01, 8.7531e-03, 3.7347e-01, 6.5496e-01, + 4.5754e-01, 6.2680e-01, 9.0112e-01, 3.8784e-01, + 3.1732e-03, 9.9271e-01, 6.8891e-01, 3.6098e-01, + 7.2754e-01, 6.0428e-01, 4.8622e-01, 7.4485e-01, + 9.0766e-01, 3.3373e-02, 5.1747e-01, 6.2127e-01, + 2.5330e-01, 6.7114e-01, 6.9123e-01, 6.8898e-01, + 9.6021e-01, 7.5899e-01, 7.8065e-01, 4.4694e-01, + 3.8022e-01, 3.3948e-01, 4.7395e-01, 6.1292e-01, + 6.8791e-01, 8.1816e-01, 7.5088e-01, 7.7661e-01, + 5.6991e-01, 9.5073e-03, 2.0611e-01, 2.7890e-02, + 5.5340e-01, 8.7299e-01, 3.4812e-01, 1.6708e-02, + 9.4747e-01, 1.8317e-01, 5.0613e-01, 6.2610e-01, + 2.2833e-01, 1.2633e-01, 1.8158e-01, 9.1466e-01, + 3.3389e-01, 5.0335e-01, 8.4599e-01, 1.8027e-01, + 6.2290e-01, 8.2216e-01, 8.3133e-01, 2.6503e-03, + 2.2292e-01, 2.8347e-01, 9.2290e-01, 6.6479e-01, + 3.1394e-01, 9.1359e-01, 6.9253e-01, 4.2741e-01, + 3.2298e-01, 2.6820e-01, 3.3018e-01, 2.3057e-02, + 2.2186e-01, 6.4942e-01, 3.2413e-01, 3.1178e-01, + 8.1188e-01, 5.2483e-02, 9.0385e-01, 4.9963e-01, + 3.7885e-01, 9.4741e-01, 2.9983e-01, 5.1702e-01, + 8.0334e-01, 8.9488e-01, 8.8164e-02, 6.8859e-01, + 7.3678e-01, 1.6806e-01, 5.7900e-01, 6.3113e-01, + 4.0424e-01, 2.4239e-02, 5.0892e-01, 5.3805e-01, + 2.9064e-01, 9.1268e-01, 1.2101e-01, 4.1877e-01, + 2.0852e-01, 1.3413e-01, 7.8301e-01, 4.4682e-01, + 2.9015e-01, 3.8678e-01, 6.6147e-01, 4.9515e-01, + 1.4574e-01, 1.4710e-01, 7.0317e-01, 6.4034e-03, + 8.5960e-01, 2.7000e-01, 7.5114e-01, 3.8215e-01, + 4.1886e-01, 4.1581e-01, 2.0558e-01, 6.0559e-01, + 8.2907e-01, 1.2591e-01, 9.0277e-01, 5.2279e-01, + 6.5799e-01, 7.4533e-01, 4.2842e-01, 6.3123e-01, + 5.3794e-01, 3.1537e-01, 4.1977e-01, 4.9710e-01, + 8.4075e-01, 6.7790e-01, 2.2796e-01, 8.7767e-01, + 6.1881e-01, 1.1055e-01, 9.1309e-01, 7.5453e-01, + 8.0830e-01, 9.9232e-01, 3.3055e-01, 5.1687e-01, + 9.2719e-01, 5.0786e-01, 9.5483e-03, 3.7840e-01, + 3.6661e-01, 1.4995e-01, 2.9554e-01, 3.1821e-01, + 4.9824e-01, 9.9963e-01, 1.5045e-01, 1.9130e-01, + 6.6253e-01, 1.7195e-01, 3.4380e-01, 7.0945e-01, + 4.9941e-03, 2.6078e-01, 7.9163e-01, 1.9209e-03, + 5.9859e-01, 7.7841e-01, 9.3660e-01, 2.4402e-01, + 6.1518e-01, 3.7168e-01, 8.8917e-01, 2.6245e-01, + 1.7135e-01, 3.5749e-01, 4.0070e-01, 1.6442e-01, + 7.3401e-01, 2.6389e-01, 7.0527e-02, 5.3142e-01, + 5.0891e-01, 5.3022e-01, 3.4815e-01, 2.2142e-01, + 6.6437e-01, 5.2212e-01, 5.3383e-01, 9.7662e-01, + 1.1557e-01, 2.1160e-01, 6.6142e-01, 8.7895e-01, + 2.7760e-01, 4.1657e-01, 6.2405e-01, 1.5318e-01, + 7.4504e-02, 3.3979e-01, 2.8786e-01, 8.8736e-01, + 1.5587e-01, 6.7580e-01, 1.4225e-01, 9.5988e-01, + 2.5704e-01, 6.5241e-01, 1.5267e-01, 9.4210e-01, + 3.4100e-01, 3.8842e-01, 4.0411e-01, 1.8129e-01, + 5.6850e-01, 6.3043e-01, 2.1361e-01, 2.0506e-01, + 4.8263e-01, 5.3470e-02, 4.0666e-01, 6.5519e-01, + 5.8061e-01, 1.6086e-01, 1.7339e-01, 5.5943e-01, + 3.5272e-01, 7.5839e-01, 2.2056e-01, 4.3755e-01, + 9.8077e-01, 4.7334e-01, 6.8665e-02, 2.0600e-01, + 8.0901e-01, 7.2677e-01, 6.0555e-01, 2.6328e-02, + 8.3332e-01, 4.3899e-01, 3.8217e-01, 4.6991e-01, + 3.7185e-01, 1.4837e-01, 2.7407e-01, 8.9076e-01, + 1.7846e-01, 1.8808e-01, 7.2514e-03, 4.0515e-01, + 7.6511e-01, 1.7467e-01, 3.6400e-01, 2.7694e-03, + 9.6128e-02, 1.3602e-01, 2.1718e-01, 5.7225e-01, + 6.1917e-02, 1.4092e-01, 5.6339e-01, 6.3842e-01, + 4.2242e-01, 2.6277e-01, 3.1191e-01, 5.6174e-01, + 2.9405e-01, 1.2122e-01, 5.3808e-01, 9.7700e-01, + 8.7333e-01, 2.3189e-01, 2.9897e-01, 7.4085e-01, + 7.8363e-01, 8.2872e-01, 2.6890e-01, 8.8718e-01, + 1.3638e-01, 3.9265e-01, 4.5839e-01, 5.7029e-01, + 5.9501e-01, 3.4062e-02, 8.7577e-01, 6.0436e-02, + 7.1961e-04, 9.3104e-03, 3.5666e-01, 6.4799e-01, + 1.7543e-01, 1.9171e-01, 2.6842e-01, 3.2403e-01, + 3.0434e-01, 7.1624e-01, 1.4627e-01, 3.0650e-01, + 6.0198e-01, 3.0517e-01, 1.9671e-01, 3.2044e-01, + 4.0399e-01, 1.0826e-01, 4.9949e-01, 5.3610e-01, + 3.0624e-01, 3.4837e-01, 7.9623e-01, 5.6877e-01, + 7.4306e-01, 5.1520e-01, 8.6672e-01, 9.6822e-01, + 4.9352e-01, 8.6554e-01, 3.6502e-01, 7.8620e-01, + 8.1046e-01, 8.2074e-01, 7.3968e-01, 1.6987e-01, + 7.3079e-01, 5.3901e-01, 3.6955e-01, 3.9876e-01, + 4.5200e-01, 3.5431e-01, 5.8412e-01, 6.1950e-01, + 1.6328e-01, 3.2130e-01, 7.8682e-01, 1.0915e-01, + 6.4928e-01, 3.6801e-01, 2.8330e-01, 2.6997e-01, + 2.8309e-01, 5.1312e-01, 6.8854e-01, 1.8489e-03, + 6.4053e-01, 1.2789e-01, 1.3003e-01, 5.0110e-01, + 3.4187e-01, 4.8386e-01, 3.8331e-01, 8.4190e-01, + 1.2117e-02, 6.0937e-01, 6.6705e-02, 9.1317e-01, + 7.9748e-01, 3.5053e-01, 9.2929e-01, 6.8545e-01, + 3.9395e-01, 2.3692e-01, 7.8114e-01, 6.3460e-01, + 5.1220e-01, 5.2642e-01, 4.0452e-01, 6.5564e-02, + 6.3324e-01, 4.5916e-01, 7.4220e-01, 1.6661e-01, + 4.9083e-01, 1.2483e-01, 8.9627e-01, 1.3694e-01, + 3.8703e-01, 7.7753e-01, 4.7431e-01, 6.3878e-01, + 4.7429e-01, 9.2205e-01, 6.0183e-01, 8.1714e-01, + 2.6882e-01, 8.4049e-01, 8.0980e-01, 1.9838e-01, + 3.8976e-01, 4.1516e-01, 3.0307e-01, 1.8767e-01, + 9.9624e-01, 9.0763e-01, 7.3573e-01, 3.4575e-01, + 1.4533e-01, 9.7815e-01, 9.6509e-01, 4.9655e-01, + 9.4037e-01, 9.2345e-01, 6.8192e-01, 7.7264e-01, + 1.4952e-01, 8.7138e-01, 9.5731e-01, 2.3986e-01, + 9.0293e-01, 4.4668e-01, 9.6129e-01, 3.1186e-01, + 2.1951e-02, 5.3612e-01, 6.8536e-01, 6.7360e-01, + 9.6969e-01, 8.3986e-01, 1.1752e-01, 8.8199e-01, + 2.7328e-01, 4.1212e-01, 6.7085e-01, 2.6782e-02, + 4.9269e-01, 9.3860e-01, 9.8616e-01, 6.3886e-01, + 1.2650e-01, 9.6115e-01, 6.3708e-01, 6.5410e-01, + 9.0410e-01, 2.5286e-01, 5.9933e-02, 2.6397e-01, + 7.0771e-01, 9.2008e-01, 4.1411e-01, 3.8053e-01, + 8.9365e-01, 5.5154e-02, 8.4580e-01, 1.7546e-01, + 5.3564e-02, 1.9970e-01, 5.4910e-01, 5.8737e-01, + 8.1043e-01, 1.1794e-01, 1.4575e-01, 2.6790e-01, + 9.4283e-01, 5.8374e-01, 2.2216e-01, 4.6900e-02, + 9.7030e-01, 6.4936e-01, 7.2421e-01, 1.4546e-01, + 9.3333e-01, 6.2608e-01, 7.4945e-02, 8.0587e-01, + 1.0057e-01, 7.8694e-01, 5.6397e-01, 3.2940e-01, + 1.7018e-01, 2.1334e-01, 2.9558e-02, 7.4709e-01, + 1.5956e-01, 2.1743e-01, 1.6044e-01, 1.0934e-01, + 4.1767e-01, 7.8993e-01, 9.5229e-01, 6.1846e-01, + 2.0035e-01, 1.6408e-01, 3.6232e-01, 7.8733e-01, + 1.7485e-01, 7.5996e-01, 7.9968e-01, 7.4437e-01, + 3.8109e-01, 3.2473e-01, 6.4219e-01, 5.2608e-01, + 3.3553e-01, 9.5822e-01, 7.2834e-01, 3.9680e-01, + 2.2607e-01, 1.5112e-01, 1.2638e-01, 4.0057e-01, + 3.5572e-01, 2.3048e-01, 3.1399e-01, 3.2664e-01, + 4.2918e-01, 9.2793e-01, 5.6687e-01, 4.2255e-01, + 1.8980e-01, 7.2494e-01, 3.8036e-01, 8.7577e-01, + 7.1411e-01, 3.7312e-01, 1.2070e-01, 1.2025e-01, + 4.6491e-01, 6.7677e-01, 9.7232e-01, 4.8362e-02, + 3.7182e-01, 7.8297e-01, 3.8148e-01, 2.8425e-01, + 7.5418e-01, 4.9069e-01, 4.5503e-01, 9.2657e-01, + 5.2803e-01, 3.1407e-01, 3.7168e-01, 7.1866e-01, + 1.4714e-01, 7.3509e-01, 2.8026e-01, 9.9717e-01, + 5.3740e-01, 1.3734e-01, 1.6903e-01, 3.0765e-01, + 6.8975e-01, 2.9802e-01, 6.6934e-01, 8.4892e-01, + 7.7295e-01, 7.1347e-01, 6.2621e-01, 8.8409e-01, + 7.0916e-01, 8.2947e-01, 1.8425e-01, 8.8261e-01, + 9.1764e-01, 6.3230e-01, 4.6121e-01, 6.2179e-01, + 6.8532e-01, 3.5350e-01, 7.5911e-01, 1.0239e-01, + 6.7434e-01, 5.4930e-01, 7.9796e-01, 4.2234e-02, + 8.3696e-01, 9.2378e-01, 5.6462e-02, 6.3795e-01, + 1.5837e-01, 7.1053e-01, 8.4872e-01, 9.0589e-01, + 9.2907e-01, 5.5405e-01, 3.5422e-01, 6.8767e-01, + 4.8460e-01, 2.3444e-01, 1.4530e-01, 6.1238e-01, + 4.8763e-01, 6.4096e-01, 4.3887e-02, 8.4372e-01, + 1.5050e-01, 8.7181e-01, 5.4819e-01, 5.1835e-01, + 3.0208e-01, 6.8904e-01, 7.8332e-03, 7.8221e-01, + 5.1838e-01, 3.2329e-02, 5.3868e-01, 3.1053e-01, + 9.0127e-01, 1.6066e-01, 5.3207e-01, 8.6322e-01, + 9.4785e-01, 6.6579e-01, 1.0125e-01, 7.3424e-01, + 3.1315e-01, 2.8608e-02, 7.5876e-01, 5.9681e-01, + 9.3647e-01, 3.6785e-01, 2.2612e-01, 9.9714e-01, + 1.2672e-01, 6.5794e-01, 1.1796e-01, 1.4216e-01, + 2.4559e-01, 9.5884e-01, 9.0221e-01, 8.0889e-01, + 8.5165e-01, 8.4272e-01, 8.2956e-01, 6.4154e-01, + 5.7936e-01, 7.3605e-01, 4.8948e-01, 4.8941e-01, + 3.2076e-01, 1.8599e-01, 5.2359e-01, 5.1704e-01, + 7.8842e-01, 9.0260e-01, 1.1817e-01, 6.1870e-02, + 1.8319e-01, 5.9255e-01, 3.6822e-01, 6.7181e-01, + 2.7303e-01, 6.0760e-01, 4.2469e-01, 9.9876e-01, + 5.6797e-01, 8.3794e-01, 6.1265e-01, 9.7684e-02, + 5.3432e-01, 5.6442e-01, 7.4580e-01, 1.7397e-01, + 6.6385e-01, 3.5452e-01, 1.0516e-01, 2.7100e-01, + 8.5023e-01, 2.4203e-01, 9.3199e-01, 4.4348e-01, + 5.8310e-01, 8.1298e-01, 8.6035e-01, 6.8515e-01, + 6.2755e-01, 3.0992e-01, 9.9940e-01, 6.6833e-01, + 6.1562e-01, 1.9145e-01, 5.4221e-01, 8.8453e-01, + 7.4263e-01, 6.5658e-01, 2.4534e-01, 9.6310e-01, + 3.9416e-01, 2.3583e-01, 5.6141e-01, 4.5871e-01, + 6.8509e-01, 7.8195e-01, 3.3198e-01, 1.9099e-01, + 5.0332e-01, 5.7085e-01, 2.9725e-01, 3.6297e-01, + 4.1326e-01, 1.8705e-01, 1.4135e-01, 1.7254e-01, + 1.2905e-01, 4.3934e-01, 5.0844e-01, 1.2112e-01, + 3.8574e-01, 7.0878e-01, 5.0355e-01, 2.1318e-01, + 3.8091e-01, 4.3161e-01, 1.6707e-01, 4.4372e-02, + 7.3388e-01, 3.6989e-01, 1.5159e-01, 3.4609e-01, + 3.7557e-01, 2.6034e-01, 6.7888e-01, 5.4854e-01, + 2.4233e-01, 2.0606e-01, 6.0941e-01, 1.7158e-01, + 9.6916e-01, 3.7251e-01, 8.1107e-02, 1.9139e-01, + 2.8768e-02, 7.1590e-01, 3.1600e-01, 2.5095e-01, + 2.5028e-01, 6.9803e-01, 4.6188e-01, 5.2031e-01, + 9.9823e-01, 9.7865e-01, 7.9610e-01, 2.8568e-01, + 7.3358e-01, 9.5757e-01, 5.2937e-01, 3.6140e-01, + 4.8958e-01, 1.1323e-01, 1.3246e-01, 3.5362e-01, + 6.2577e-01, 7.6715e-01, 5.6947e-01, 4.9386e-01, + 6.1593e-01, 7.3663e-01, 2.0667e-01, 4.1311e-02, + 3.7104e-01, 1.2796e-01, 2.8536e-02, 5.1991e-01, + 4.0647e-01, 7.5064e-01, 7.7934e-01, 7.7913e-02, + 5.8566e-01, 6.5901e-01, 3.3381e-01, 9.0677e-01, + 8.0619e-01, 8.3290e-01, 9.2258e-01, 4.1665e-01, + 4.5517e-01, 8.7791e-01, 2.3793e-01, 8.5328e-01, + 5.3735e-01, 6.9742e-01, 4.8220e-01, 1.8653e-01, + 7.6875e-01, 7.4601e-01, 8.5552e-03, 9.4398e-01, + 4.9527e-02, 6.7147e-01, 7.9374e-01, 5.5156e-01, + 6.9347e-01, 9.2732e-01, 2.9101e-01, 6.1512e-01, + 6.7840e-01, 6.8199e-01, 3.7666e-01, 5.0376e-01, + 7.3021e-01, 5.8192e-01, 6.0438e-01, 9.8176e-01, + 6.0000e-01, 4.1374e-02, 9.6159e-01, 9.6217e-01, + 3.1469e-01, 1.4579e-01, 8.1938e-01, 1.9643e-01, + 4.0646e-02, 4.0573e-01, 1.4437e-01, 3.4342e-01, + 1.6384e-01, 1.4267e-01, 8.5850e-01, 4.1913e-01, + 7.7964e-01, 5.0587e-02, 5.4522e-01, 9.1170e-02, + 4.7335e-01, 1.5569e-02, 7.1191e-01, 9.4130e-01, + 9.5234e-02, 7.5346e-01, 5.9941e-01, 4.9381e-01, + 8.2175e-01, 5.4603e-01, 5.0709e-01, 3.1483e-01, + 1.8385e-01, 5.3932e-01, 3.8007e-01, 2.3323e-01, + 1.3159e-01, 2.2909e-01, 6.0914e-01, 2.1429e-01, + 3.4677e-01, 3.6510e-01, 4.6788e-01, 7.5851e-02, + 6.2342e-01, 3.9178e-01, 8.0332e-01, 2.8579e-02, + 2.2596e-01, 3.9186e-01, 9.5848e-01, 6.6292e-01, + 3.9079e-01, 6.4023e-01, 5.1926e-01, 5.3093e-01, + 3.9483e-01, 6.2126e-01, 7.8066e-01, 7.1039e-01, + 8.0931e-01, 1.5888e-01, 1.1579e-01, 8.2985e-02, + 7.6645e-01, 5.3384e-01, 3.8176e-01, 7.2784e-01, + 6.3820e-02, 8.9909e-01, 2.8961e-01, 4.3128e-01, + 8.4458e-01, 8.0799e-01, 9.6677e-01, 4.3147e-01, + 8.5521e-01, 4.2834e-01, 4.1613e-01, 9.0932e-01, + 8.9002e-01, 2.7915e-01, 8.7947e-02, 3.0907e-01, + 7.4248e-01, 4.7374e-01, 3.8687e-01, 5.7613e-01, + 4.4292e-02, 4.6982e-01, 2.8335e-01, 5.7595e-01, + 1.3222e-01, 8.0014e-01, 4.9960e-01, 3.6663e-01, + 8.3945e-01, 6.4090e-01, 4.8181e-01, 4.7471e-01, + 5.9235e-01, 8.6579e-02, 7.1166e-01, 7.0863e-01, + 4.3654e-01, 1.2795e-01, 6.7865e-01, 5.2378e-01, + 9.9089e-02, 3.8994e-01, 6.8144e-01, 1.6013e-01, + 6.8525e-02, 6.2662e-01, 9.8279e-01, 6.2973e-02, + 8.5620e-01, 5.1255e-01, 4.1039e-01, 8.2570e-01, + 9.1847e-01, 6.1354e-02, 6.5377e-01, 7.7310e-01, + 8.3012e-01, 8.6661e-01, 3.0115e-01, 6.7939e-01, + 9.0035e-01, 3.7404e-01, 7.2569e-01, 2.8425e-01, + 6.9314e-01, 3.2587e-01, 8.9884e-01, 2.9391e-01, + 2.3869e-01, 8.9970e-01, 9.0712e-01, 2.4140e-01, + 1.0854e-01, 2.8831e-01, 2.9758e-01, 4.7286e-01, + 1.5410e-01, 6.2718e-01, 8.3891e-01, 4.9411e-01, + 7.2681e-01, 5.1884e-01, 3.4066e-01, 3.4069e-01, + 6.7893e-01, 6.2099e-01, 1.6795e-01, 3.5633e-01, + 5.4807e-01, 3.0646e-01, 3.9730e-01, 9.8662e-01, + 5.0149e-02, 4.8717e-01, 9.9230e-01, 5.6152e-01, + 1.4681e-01, 9.7890e-01, 1.2282e-01, 4.0130e-01, + 3.5790e-01, 6.9318e-01, 7.7732e-01, 5.1771e-01, + 8.0221e-01, 1.7685e-01, 3.7493e-01, 3.0565e-01, + 8.9442e-01, 8.6437e-01, 5.0614e-01, 3.2743e-01, + 4.1584e-02, 1.5142e-01, 8.1169e-01, 1.0022e-01, + 1.8970e-01, 7.7451e-01, 8.9922e-01, 1.8975e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.0173, 0.9884, 0.5080, ..., 0.4806, 0.4208, 0.0837]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.5300266742706299 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 311537 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.936841487884521} + +tensor(indices=tensor([[9449, 1983, 8604, ..., 9079, 441, 9488], + [ 379, 3653, 8275, ..., 76, 2740, 1704]]), + values=tensor([0.4852, 0.7456, 0.6746, 0.1995, 0.1009, 0.4793, 0.6989, + 0.8157, 0.3591, 0.3036, 0.7392, 0.5362, 0.2173, 0.9942, + 0.5412, 0.2497, 0.7900, 0.8030, 0.6591, 0.9085, 0.5926, + 0.1392, 0.5048, 0.6661, 0.5178, 0.4216, 0.7460, 0.5899, + 0.5811, 0.7930, 0.0027, 0.6542, 0.3589, 0.3222, 0.0919, + 0.4255, 0.9995, 0.7327, 0.4068, 0.8307, 0.2022, 0.8956, + 0.8676, 0.7948, 0.1826, 0.7101, 0.1341, 0.1712, 0.4737, + 0.2177, 0.4263, 0.9124, 0.6689, 0.0389, 0.7267, 0.7015, + 0.8665, 0.1742, 0.3336, 0.9406, 0.9724, 0.3904, 0.7599, + 0.0814, 0.8305, 0.1901, 0.0554, 0.6683, 0.3472, 0.6595, + 0.8622, 0.9735, 0.2275, 0.3053, 0.1272, 0.7269, 0.9531, + 0.6465, 0.6439, 0.9211, 0.7760, 0.0794, 0.3314, 0.9024, + 0.8993, 0.1186, 0.9071, 0.2108, 0.0485, 0.2647, 0.8400, + 0.7023, 0.8418, 0.2426, 0.6013, 0.8496, 0.7980, 0.9617, + 0.6286, 0.4515, 0.6201, 0.0544, 0.1048, 0.8330, 0.0134, + 0.7385, 0.1222, 0.2903, 0.9429, 0.2839, 0.2682, 0.3453, + 0.0243, 0.9923, 0.0231, 0.5482, 0.5942, 0.3786, 0.1047, + 0.5445, 0.7316, 0.1798, 0.9223, 0.5525, 0.2892, 0.9262, + 0.2671, 0.1566, 0.5654, 0.1653, 0.7942, 0.5858, 0.4767, + 0.1123, 0.9356, 0.9940, 0.5222, 0.9677, 0.5735, 0.4695, + 0.3704, 0.5280, 0.2999, 0.5746, 0.7917, 0.4402, 0.3754, + 0.4109, 0.9118, 0.5183, 0.7820, 0.9995, 0.0593, 0.4479, + 0.2289, 0.8858, 0.7250, 0.4418, 0.1500, 0.1326, 0.6218, + 0.3959, 0.7546, 0.4617, 0.8757, 0.5567, 0.5667, 0.8284, + 0.7029, 0.4684, 0.6413, 0.6062, 0.0304, 0.5222, 0.9231, + 0.6230, 0.9575, 0.5842, 0.3697, 0.3495, 0.7724, 0.0770, + 0.4702, 0.7248, 0.6694, 0.0962, 0.6605, 0.3335, 0.4235, + 0.0213, 0.8290, 0.9880, 0.9364, 0.0258, 0.6612, 0.9191, + 0.7033, 0.9526, 0.4977, 0.4658, 0.9569, 0.4742, 0.3591, + 0.3812, 0.6862, 0.5362, 0.7735, 0.5023, 0.9105, 0.2276, + 0.7709, 0.0463, 0.8514, 0.4990, 0.1399, 0.8960, 0.0896, + 0.5102, 0.2557, 0.8461, 0.2458, 0.0427, 0.0867, 0.5415, + 0.5708, 0.9042, 0.4496, 0.6420, 0.8783, 0.8222, 0.5298, + 0.6747, 0.8973, 0.4873, 0.9680, 0.8983, 0.1539, 0.6048, + 0.4821, 0.0213, 0.5180, 0.4017, 0.8770, 0.3389, 0.7046, + 0.1742, 0.2085, 0.4764, 0.4352, 0.7132, 0.7906, 0.3089, + 0.7015, 0.4204, 0.3337, 0.3147, 0.1107, 0.6784, 0.5015, + 0.0549, 0.2294, 0.9551, 0.3621, 0.3400, 0.9947, 0.5778, + 0.3669, 0.9387, 0.6661, 0.3792, 0.7299, 0.7109, 0.3595, + 0.7741, 0.5866, 0.1149, 0.8338, 0.5018, 0.6483, 0.7764, + 0.7751, 0.0053, 0.6496, 0.9392, 0.3239, 0.0708, 0.1991, + 0.9338, 0.7504, 0.7297, 0.6946, 0.1742, 0.8873, 0.9167, + 0.2884, 0.6010, 0.0437, 0.0767, 0.9217, 0.0656, 0.5786, + 0.0441, 0.2633, 0.0835, 0.1193, 0.1738, 0.5494, 0.1836, + 0.0332, 0.6421, 0.8720, 0.0437, 0.5397, 0.8110, 0.2584, + 0.9588, 0.0320, 0.3449, 0.0691, 0.6464, 0.3356, 0.4672, + 0.0635, 0.1050, 0.3464, 0.6712, 0.0015, 0.1853, 0.5642, + 0.0408, 0.9961, 0.7748, 0.0685, 0.2047, 0.6197, 0.3278, + 0.7583, 0.2733, 0.8271, 0.2729, 0.8148, 0.0368, 0.1332, + 0.5337, 0.5162, 0.6468, 0.9458, 0.6451, 0.7904, 0.3550, + 0.3149, 0.0622, 0.9385, 0.6218, 0.1723, 0.9213, 0.1713, + 0.8356, 0.1100, 0.1981, 0.1793, 0.4669, 0.1647, 0.9325, + 0.4413, 0.7915, 0.3851, 0.4090, 0.3309, 0.0755, 0.8133, + 0.7755, 0.7487, 0.8718, 0.6282, 0.2601, 0.2132, 0.7562, + 0.2011, 0.0172, 0.7932, 0.8234, 0.8568, 0.3250, 0.4598, + 0.6212, 0.9336, 0.7698, 0.2202, 0.3754, 0.5239, 0.8287, + 0.3539, 0.1326, 0.9379, 0.0536, 0.6327, 0.7754, 0.8015, + 0.8595, 0.3997, 0.9275, 0.3337, 0.8963, 0.0946, 0.4996, + 0.7186, 0.0069, 0.0724, 0.2769, 0.3360, 0.5500, 0.2495, + 0.8925, 0.1549, 0.7257, 0.7732, 0.9815, 0.4727, 0.3748, + 0.3093, 0.7225, 0.1997, 0.5055, 0.1363, 0.7904, 0.1816, + 0.3599, 0.0027, 0.6202, 0.9946, 0.0782, 0.4728, 0.9528, + 0.1654, 0.9199, 0.5981, 0.1050, 0.0695, 0.9475, 0.6707, + 0.5208, 0.2893, 0.6563, 0.0969, 0.6972, 0.8404, 0.8738, + 0.4173, 0.0037, 0.8329, 0.2460, 0.7749, 0.0633, 0.5881, + 0.7518, 0.7542, 0.8105, 0.1156, 0.6835, 0.2620, 0.0244, + 0.2257, 0.2749, 0.6711, 0.9895, 0.1034, 0.4706, 0.4458, + 0.4360, 0.5373, 0.8291, 0.5069, 0.8022, 0.3949, 0.9910, + 0.7216, 0.9302, 0.1221, 0.7635, 0.2340, 0.5895, 0.2299, + 0.7934, 0.9815, 0.7163, 0.6735, 0.4088, 0.6104, 0.5671, + 0.3594, 0.2926, 0.8253, 0.6302, 0.3483, 0.5853, 0.8522, + 0.0274, 0.4013, 0.4136, 0.7071, 0.8192, 0.7581, 0.8372, + 0.2805, 0.0304, 0.2594, 0.8125, 0.2690, 0.8014, 0.2972, + 0.9536, 0.8553, 0.0263, 0.9506, 0.8009, 0.2123, 0.4994, + 0.0967, 0.4292, 0.8558, 0.9874, 0.2902, 0.2838, 0.4256, + 0.7640, 0.5011, 0.7127, 0.0227, 0.8603, 0.5746, 0.6935, + 0.8273, 0.8493, 0.2221, 0.7469, 0.8481, 0.1433, 0.2126, + 0.0609, 0.2633, 0.1045, 0.0344, 0.9965, 0.1423, 0.8976, + 0.0645, 0.6131, 0.8452, 0.8508, 0.8462, 0.1697, 0.1348, + 0.2804, 0.0906, 0.6075, 0.1640, 0.5120, 0.4907, 0.6374, + 0.4293, 0.9539, 0.4440, 0.2977, 0.5979, 0.2534, 0.2416, + 0.0863, 0.1093, 0.0024, 0.6833, 0.4643, 0.6603, 0.5636, + 0.7695, 0.5315, 0.0298, 0.3169, 0.0039, 0.2062, 0.6618, + 0.6958, 0.6166, 0.1840, 0.1331, 0.6205, 0.2864, 0.4641, + 0.2037, 0.5126, 0.5366, 0.1584, 0.8256, 0.6681, 0.3504, + 0.4197, 0.1706, 0.9348, 0.3533, 0.0191, 0.3668, 0.4875, + 0.9158, 0.3382, 0.8800, 0.4014, 0.9078, 0.3361, 0.2503, + 0.5280, 0.6507, 0.2868, 0.2080, 0.5840, 0.2180, 0.9088, + 0.9062, 0.5048, 0.2187, 0.9797, 0.8531, 0.7473, 0.3583, + 0.7500, 0.8813, 0.7021, 0.1034, 0.5837, 0.7993, 0.0321, + 0.8041, 0.9711, 0.3426, 0.0569, 0.2130, 0.7519, 0.7420, + 0.6780, 0.0354, 0.6662, 0.1161, 0.5677, 0.8211, 0.2188, + 0.1319, 0.0503, 0.3529, 0.2317, 0.6917, 0.7119, 0.0328, + 0.4702, 0.8651, 0.0378, 0.9488, 0.1620, 0.7004, 0.1993, + 0.6881, 0.6031, 0.9878, 0.6244, 0.8395, 0.2876, 0.4623, + 0.5688, 0.9568, 0.3583, 0.3332, 0.2481, 0.3490, 0.1522, + 0.4169, 0.9565, 0.4692, 0.9341, 0.1455, 0.6058, 0.3256, + 0.7732, 0.4596, 0.4758, 0.3239, 0.6178, 0.2159, 0.9497, + 0.4460, 0.2619, 0.5366, 0.2807, 0.3747, 0.5316, 0.6398, + 0.5014, 0.9129, 0.6511, 0.9745, 0.3450, 0.4354, 0.7145, + 0.3963, 0.1018, 0.2370, 0.0579, 0.6296, 0.5376, 0.4212, + 0.7341, 0.4723, 0.5945, 0.4571, 0.9799, 0.3736, 0.2755, + 0.5768, 0.2102, 0.0193, 0.0556, 0.3506, 0.8902, 0.7765, + 0.5524, 0.2526, 0.5750, 0.7180, 0.0332, 0.4840, 0.5667, + 0.3916, 0.7413, 0.7587, 0.3881, 0.9480, 0.2334, 0.3730, + 0.4054, 0.1993, 0.9826, 0.0595, 0.9834, 0.3868, 0.7498, + 0.7383, 0.1696, 0.3770, 0.4540, 0.6246, 0.0988, 0.1197, + 0.2522, 0.3898, 0.3747, 0.5319, 0.9028, 0.2064, 0.9789, + 0.2022, 0.1043, 0.1360, 0.0178, 0.4510, 0.2510, 0.5015, + 0.3519, 0.4152, 0.8545, 0.4783, 0.6235, 1.0000, 0.3091, + 0.2701, 0.8880, 0.8861, 0.3516, 0.1603, 0.3569, 0.0521, + 0.5316, 0.2367, 0.9484, 0.4276, 0.8079, 0.1098, 0.0901, + 0.8197, 0.3460, 0.4770, 0.1628, 0.4097, 0.4827, 0.4121, + 0.4955, 0.7300, 0.1991, 0.8566, 0.7018, 0.8926, 0.4044, + 0.9527, 0.1783, 0.8023, 0.0037, 0.2145, 0.3690, 0.6856, + 0.1160, 0.7008, 0.8968, 0.8291, 0.2190, 0.5082, 0.7170, + 0.5545, 0.6468, 0.4032, 0.8766, 0.9935, 0.6206, 0.9196, + 0.2708, 0.6010, 0.3566, 0.5655, 0.1334, 0.0317, 0.9089, + 0.4883, 0.3129, 0.2375, 0.0245, 0.9931, 0.8682, 0.2100, + 0.8278, 0.2357, 0.8186, 0.0746, 0.9512, 0.5988, 0.4698, + 0.7326, 0.2072, 0.9661, 0.2194, 0.0909, 0.3693, 0.2241, + 0.3486, 0.9000, 0.7653, 0.3210, 0.1106, 0.7392, 0.9930, + 0.5615, 0.3160, 0.6996, 0.7221, 0.4820, 0.2912, 0.4774, + 0.5757, 0.4963, 0.0242, 0.3244, 0.5335, 0.2241, 0.8257, + 0.5038, 0.5392, 0.0582, 0.5343, 0.1811, 0.6934, 0.8311, + 0.1849, 0.1453, 0.4364, 0.6098, 0.2830, 0.6671, 0.7649, + 0.9963, 0.2240, 0.9808, 0.1730, 0.9039, 0.1287, 0.6533, + 0.4767, 0.4074, 0.4945, 0.9337, 0.3231, 0.5145, 0.6775, + 0.2409, 0.7241, 0.5970, 0.3235, 0.3946, 0.9823, 0.1307, + 0.5413, 0.2118, 0.5875, 0.2021, 0.2299, 0.2350, 0.7436, + 0.6276, 0.1237, 0.4474, 0.3800, 0.0240, 0.6668, 0.7029, + 0.2403, 0.8558, 0.2573, 0.5023, 0.7296, 0.9904, 0.0610, + 0.4926, 0.0911, 0.8133, 0.9773, 0.6748, 0.1779, 0.3645, + 0.3936, 0.6101, 0.2505, 0.3746, 0.8904, 0.9525, 0.2954, + 0.6249, 0.6218, 0.6388, 0.0360, 0.5495, 0.3338, 0.7487, + 0.6226, 0.0604, 0.0264, 0.4848, 0.4942, 0.4977, 0.3416, + 0.3983, 0.2565, 0.3809, 0.0121, 0.7773, 0.7943, 0.6463, + 0.0336, 0.0322, 0.0067, 0.6148, 0.7752, 0.2595, 0.7560, + 0.9935, 0.3053, 0.7422, 0.2791, 0.8499, 0.9851, 0.8966, + 0.9209, 0.4825, 0.6859, 0.1353, 0.4816, 0.2144, 0.0737, + 0.2418, 0.5632, 0.1795, 0.4575, 0.0939, 0.5427, 0.8005, + 0.1748, 0.3050, 0.9237, 0.4077, 0.6380, 0.6156, 0.9027, + 0.8978, 0.5298, 0.0987, 0.4984, 0.3592, 0.4138]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.9181, 0.5008, 0.3382, ..., 0.4200, 0.0196, 0.8754]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 9.936841487884521 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 329192 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.653273582458496} + +tensor(indices=tensor([[4854, 1972, 1654, ..., 649, 6272, 4588], + [9719, 4284, 6068, ..., 2523, 230, 5289]]), + values=tensor([0.0546, 0.9618, 0.5019, 0.3688, 0.2823, 0.2385, 0.3981, + 0.3311, 0.3196, 0.0360, 0.2973, 0.2967, 0.8582, 0.3259, + 0.3539, 0.6753, 0.6200, 0.0686, 0.7919, 0.5337, 0.7565, + 0.3645, 0.2896, 0.2953, 0.1535, 0.6412, 0.0942, 0.1284, + 0.3823, 0.2412, 0.8320, 0.6559, 0.7567, 0.3082, 0.6391, + 0.6003, 0.9169, 0.6491, 0.8636, 0.7921, 0.9829, 0.2122, + 0.0234, 0.5027, 0.6219, 0.4056, 0.6024, 0.9485, 0.7115, + 0.3451, 0.2106, 0.4603, 0.5533, 0.0486, 0.7718, 0.2406, + 0.7476, 0.2536, 0.6648, 0.8494, 0.3832, 0.2494, 0.3734, + 0.2158, 0.8473, 0.7907, 0.1672, 0.4406, 0.9048, 0.0481, + 0.2471, 0.3739, 0.4422, 0.8044, 0.6496, 0.9822, 0.0162, + 0.4389, 0.2354, 0.0506, 0.9853, 0.6680, 0.1153, 0.2338, + 0.3680, 0.0319, 0.9285, 0.7733, 0.1113, 0.8243, 0.7520, + 0.1949, 0.9210, 0.6430, 0.2592, 0.2631, 0.6721, 0.9714, + 0.4340, 0.0745, 0.9262, 0.1076, 0.5913, 0.8244, 0.9226, + 0.2082, 0.8533, 0.8589, 0.7031, 0.4953, 0.7932, 0.6515, + 0.4222, 0.8735, 0.6638, 0.7179, 0.5208, 0.4634, 0.6346, + 0.8158, 0.2966, 0.6425, 0.5722, 0.9826, 0.0998, 0.1347, + 0.2221, 0.5267, 0.5951, 0.5359, 0.4663, 0.2881, 0.3374, + 0.0562, 0.6246, 0.5624, 0.6535, 0.8938, 0.9675, 0.6807, + 0.1069, 0.2400, 0.9511, 0.5834, 0.8067, 0.7875, 0.9192, + 0.9314, 0.6987, 0.1271, 0.6400, 0.5017, 0.4446, 0.3635, + 0.6910, 0.3672, 0.2439, 0.2649, 0.1640, 0.7850, 0.7337, + 0.8578, 0.6221, 0.0166, 0.6421, 0.3661, 0.7555, 0.0663, + 0.4310, 0.3023, 0.6408, 0.1570, 0.4641, 0.5173, 0.7738, + 0.4925, 0.6237, 0.6803, 0.4675, 0.7691, 0.6378, 0.1895, + 0.9558, 0.1102, 0.5469, 0.1449, 0.3673, 0.0670, 0.6119, + 0.7794, 0.6773, 0.2571, 0.4058, 0.7148, 0.9526, 0.2736, + 0.5870, 0.9203, 0.7770, 0.6000, 0.9050, 0.8928, 0.6043, + 0.2173, 0.8021, 0.3987, 0.9319, 0.1321, 0.6419, 0.8805, + 0.5762, 0.9898, 0.7741, 0.0014, 0.4154, 0.8250, 0.9940, + 0.1492, 0.2679, 0.5259, 0.8599, 0.3844, 0.0824, 0.9404, + 0.1299, 0.4514, 0.9935, 0.0880, 0.7071, 0.9205, 0.6893, + 0.7071, 0.6541, 0.2951, 0.6104, 0.0685, 0.8105, 0.5565, + 0.3738, 0.4508, 0.2772, 0.8218, 0.3212, 0.1522, 0.9104, + 0.8334, 0.9032, 0.6331, 0.1635, 0.9594, 0.1631, 0.2170, + 0.2663, 0.0335, 0.0622, 0.5555, 0.0460, 0.2288, 0.5383, + 0.2672, 0.1372, 0.4498, 0.4359, 0.1932, 0.7826, 0.7543, + 0.2390, 0.9424, 0.7304, 0.1308, 0.9979, 0.2674, 0.2207, + 0.4649, 0.0767, 0.0635, 0.7269, 0.8666, 0.4565, 0.8178, + 0.9247, 0.5805, 0.7682, 0.1326, 0.1208, 0.8191, 0.5636, + 0.6199, 0.7422, 0.3247, 0.7303, 0.9963, 0.1741, 0.6580, + 0.2115, 0.2373, 0.1476, 0.0673, 0.2244, 0.3259, 0.7683, + 0.9841, 0.6551, 0.9793, 0.6693, 0.3136, 0.1028, 0.2181, + 0.6644, 0.2183, 0.2541, 0.6732, 0.8967, 0.8898, 0.9288, + 0.3995, 0.1632, 0.8336, 0.9092, 0.7542, 0.3277, 0.5676, + 0.7198, 0.6792, 0.1867, 0.2225, 0.9889, 0.6153, 0.2516, + 0.8157, 0.6179, 0.7287, 0.0399, 0.7523, 0.9450, 0.2052, + 0.4578, 0.9994, 0.9638, 0.1661, 0.1734, 0.6150, 0.7844, + 0.6947, 0.6125, 0.2980, 0.3428, 0.3989, 0.5310, 0.5801, + 0.5691, 0.3263, 0.4789, 0.8018, 0.9784, 0.1424, 0.7686, + 0.7759, 0.5730, 0.6912, 0.9861, 0.4485, 0.3292, 0.8811, + 0.8909, 0.7811, 0.0343, 0.3755, 0.9717, 0.1991, 0.2348, + 0.0415, 0.2611, 0.5831, 0.4307, 0.7342, 0.3183, 0.5113, + 0.6527, 0.8104, 0.1210, 0.4177, 0.9794, 0.8917, 0.7427, + 0.6576, 0.4750, 0.3355, 0.7430, 0.3699, 0.7745, 0.6011, + 0.6918, 0.7276, 0.2705, 0.0994, 0.8232, 0.5870, 0.1146, + 0.9875, 0.6768, 0.1684, 0.0647, 0.5265, 0.5565, 0.9341, + 0.1991, 0.4732, 0.5823, 0.4300, 0.1062, 0.9316, 0.2859, + 0.6377, 0.1954, 0.8861, 0.4825, 0.4980, 0.0519, 0.4206, + 0.9348, 0.0941, 0.7470, 0.9779, 0.4837, 0.1233, 0.9046, + 0.8605, 0.6756, 0.3581, 0.5206, 0.7869, 0.4967, 0.5488, + 0.0842, 0.7040, 0.5311, 0.2386, 0.6312, 0.1512, 0.4338, + 0.4572, 0.8489, 0.6417, 0.4462, 0.8036, 0.6098, 0.1997, + 0.8349, 0.4730, 0.5787, 0.2206, 0.3643, 0.5429, 0.2836, + 0.3462, 0.3657, 0.5533, 0.6283, 0.5639, 0.6018, 0.4269, + 0.3299, 0.7280, 0.8104, 0.6276, 0.0662, 0.1279, 0.8930, + 0.6281, 0.9424, 0.3968, 0.2508, 0.2171, 0.5602, 0.1197, + 0.7457, 0.3991, 0.4626, 0.0622, 0.6755, 0.7264, 0.9811, + 0.1693, 0.9437, 0.6089, 0.9176, 0.1836, 0.8271, 0.8856, + 0.9443, 0.7809, 0.0108, 0.7203, 0.5947, 0.8905, 0.0445, + 0.0699, 0.4006, 0.6961, 0.4831, 0.7407, 0.2880, 0.4599, + 0.4693, 0.3042, 0.8173, 0.5769, 0.3609, 0.4964, 0.0841, + 0.2442, 0.2771, 0.0993, 0.4282, 0.1780, 0.8141, 0.8384, + 0.3696, 0.7660, 0.0872, 0.9099, 0.4227, 0.5207, 0.1521, + 0.1139, 0.2068, 0.0133, 0.4689, 0.3938, 0.4969, 0.2244, + 0.6359, 0.6660, 0.3449, 0.2158, 0.5108, 0.7421, 0.2597, + 0.3621, 0.0177, 0.3102, 0.4907, 0.4913, 0.5419, 0.2531, + 0.4241, 0.4212, 0.4517, 0.6030, 0.7879, 0.2272, 0.2337, + 0.8575, 0.1110, 0.0202, 0.1069, 0.6565, 0.2442, 0.0772, + 0.4766, 0.1118, 0.2209, 0.2595, 0.6296, 0.7056, 0.6889, + 0.6495, 0.8593, 0.1117, 0.4068, 0.6759, 0.6510, 0.5487, + 0.1141, 0.3036, 0.8762, 0.2552, 0.4586, 0.9505, 0.1165, + 0.6656, 0.9939, 0.7003, 0.4030, 0.4376, 0.4250, 0.2459, + 0.7688, 0.3425, 0.0164, 0.3476, 0.7114, 0.3631, 0.1853, + 0.0197, 0.1895, 0.1403, 0.8574, 0.3452, 0.8160, 0.9020, + 0.9156, 0.1928, 0.7611, 0.1365, 0.1221, 0.0355, 0.4036, + 0.7580, 0.6105, 0.4257, 0.3053, 0.6391, 0.2544, 0.2600, + 0.6769, 0.1254, 0.3276, 0.1147, 0.5396, 0.7679, 0.4043, + 0.3654, 0.7254, 0.4094, 0.7651, 0.5771, 0.6880, 0.9243, + 0.5574, 0.9087, 0.9140, 0.3571, 0.8742, 0.0448, 0.5066, + 0.1946, 0.2322, 0.9189, 0.3201, 0.8150, 0.4373, 0.9521, + 0.4274, 0.7234, 0.2347, 0.4717, 0.0954, 0.3797, 0.3487, + 0.1178, 0.7890, 0.1625, 0.0516, 0.4215, 0.9427, 0.7114, + 0.0679, 0.8178, 0.9847, 0.4645, 0.1250, 0.7080, 0.6793, + 0.2496, 0.5513, 0.2581, 0.7638, 0.5884, 0.1229, 0.7480, + 0.3331, 0.7886, 0.3105, 0.9359, 0.5589, 0.6762, 0.1715, + 0.3992, 0.3033, 0.1567, 0.6501, 0.3727, 0.3524, 0.2620, + 0.3809, 0.4793, 0.4363, 0.8401, 0.1721, 0.4691, 0.1140, + 0.0769, 0.5503, 0.6917, 0.9551, 0.9730, 0.7478, 0.9795, + 0.3056, 0.9739, 0.0738, 0.3801, 0.2976, 0.9665, 0.5065, + 0.5275, 0.5874, 0.1639, 0.0384, 0.9320, 0.6764, 0.5528, + 0.7685, 0.3392, 0.7970, 0.3212, 0.3294, 0.7715, 0.7566, + 0.0845, 0.7230, 0.9756, 0.7438, 0.0730, 0.0140, 0.2442, + 0.3170, 0.1531, 0.6572, 0.2241, 0.6559, 0.7594, 0.5163, + 0.0770, 0.8216, 0.1121, 0.2414, 0.3304, 0.4292, 0.6703, + 0.8489, 0.1680, 0.1843, 0.7202, 0.8341, 0.2089, 0.9189, + 0.3321, 0.2930, 0.6854, 0.2746, 0.2375, 0.1258, 0.9921, + 0.2697, 0.2706, 0.4161, 0.9534, 0.4156, 0.0203, 0.7281, + 0.8287, 0.1664, 0.6949, 0.3616, 0.7643, 0.8454, 0.7725, + 0.2379, 0.0818, 0.3522, 0.8770, 0.2477, 0.8016, 0.9264, + 0.5983, 0.8318, 0.9309, 0.1421, 0.5214, 0.4590, 0.4466, + 0.8836, 0.1506, 0.6918, 0.5040, 0.0693, 0.8909, 0.8473, + 0.3000, 0.4501, 0.0317, 0.6074, 0.6142, 0.8717, 0.1123, + 0.2734, 0.6236, 0.7128, 0.9839, 0.1268, 0.4137, 0.8249, + 0.9714, 0.2626, 0.4457, 0.7949, 0.0601, 0.1474, 0.7151, + 0.5980, 0.4383, 0.8719, 0.8532, 0.1146, 0.2021, 0.3510, + 0.7357, 0.8046, 0.4561, 0.1123, 0.5495, 0.8039, 0.9162, + 0.1794, 0.4379, 0.0797, 0.6351, 0.2957, 0.0267, 0.1551, + 0.6665, 0.5637, 0.9045, 0.5240, 0.2534, 0.4744, 0.8888, + 0.7171, 0.0746, 0.9509, 0.9155, 0.0161, 0.4715, 0.7253, + 0.2094, 0.9385, 0.4007, 0.7703, 0.4403, 0.4696, 0.0635, + 0.6452, 0.9162, 0.8772, 0.6423, 0.8830, 0.1271, 0.2269, + 0.8334, 0.5840, 0.7807, 0.8628, 0.2077, 0.7466, 0.6649, + 0.4775, 0.5364, 0.9753, 0.9333, 0.8689, 0.3676, 0.6997, + 0.6413, 0.1514, 0.4442, 0.4757, 0.6288, 0.8351, 0.5958, + 0.7445, 0.1575, 0.2611, 0.9676, 0.0288, 0.8028, 0.9286, + 0.3416, 0.8684, 0.9159, 0.0786, 0.1630, 0.6937, 0.0573, + 0.2736, 0.8433, 0.1001, 0.2551, 0.8051, 0.8270, 0.2224, + 0.8216, 0.0075, 0.0447, 0.1405, 0.2786, 0.7581, 0.5283, + 0.0187, 0.6181, 0.6165, 0.5798, 0.8513, 0.6720, 0.7857, + 0.0688, 0.4059, 0.2379, 0.9488, 0.1719, 0.2302, 0.7356, + 0.6534, 0.7287, 0.0684, 0.5311, 0.2033, 0.2442, 0.0088, + 0.4083, 0.8184, 0.3400, 0.1257, 0.4984, 0.2301, 0.1742, + 0.7550, 0.8626, 0.6692, 0.1226, 0.7922, 0.4923, 0.4013, + 0.8096, 0.3364, 0.2426, 0.0395, 0.3774, 0.7735, 0.1798, + 0.7857, 0.9528, 0.7629, 0.4383, 0.4673, 0.4848, 0.4766, + 0.7546, 0.5048, 0.0907, 0.0024, 0.6060, 0.4046, 0.6692, + 0.0093, 0.9058, 0.9962, 0.9902, 0.8031, 0.7622, 0.7882, + 0.4125, 0.7561, 0.2878, 0.1422, 0.9781, 0.5586, 0.9705, + 0.5239, 0.5728, 0.1892, 0.6013, 0.2173, 0.0813, 0.3227, + 0.5416, 0.2267, 0.9476, 0.6435, 0.2364, 0.6455, 0.7828, + 0.9169, 0.5877, 0.7036, 0.7500, 0.4633, 0.7657]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.3315, 0.1043, 0.8974, ..., 0.8556, 0.8987, 0.2347]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.653273582458496 seconds + +tensor(indices=tensor([[4854, 1972, 1654, ..., 649, 6272, 4588], + [9719, 4284, 6068, ..., 2523, 230, 5289]]), + values=tensor([0.0546, 0.9618, 0.5019, 0.3688, 0.2823, 0.2385, 0.3981, + 0.3311, 0.3196, 0.0360, 0.2973, 0.2967, 0.8582, 0.3259, + 0.3539, 0.6753, 0.6200, 0.0686, 0.7919, 0.5337, 0.7565, + 0.3645, 0.2896, 0.2953, 0.1535, 0.6412, 0.0942, 0.1284, + 0.3823, 0.2412, 0.8320, 0.6559, 0.7567, 0.3082, 0.6391, + 0.6003, 0.9169, 0.6491, 0.8636, 0.7921, 0.9829, 0.2122, + 0.0234, 0.5027, 0.6219, 0.4056, 0.6024, 0.9485, 0.7115, + 0.3451, 0.2106, 0.4603, 0.5533, 0.0486, 0.7718, 0.2406, + 0.7476, 0.2536, 0.6648, 0.8494, 0.3832, 0.2494, 0.3734, + 0.2158, 0.8473, 0.7907, 0.1672, 0.4406, 0.9048, 0.0481, + 0.2471, 0.3739, 0.4422, 0.8044, 0.6496, 0.9822, 0.0162, + 0.4389, 0.2354, 0.0506, 0.9853, 0.6680, 0.1153, 0.2338, + 0.3680, 0.0319, 0.9285, 0.7733, 0.1113, 0.8243, 0.7520, + 0.1949, 0.9210, 0.6430, 0.2592, 0.2631, 0.6721, 0.9714, + 0.4340, 0.0745, 0.9262, 0.1076, 0.5913, 0.8244, 0.9226, + 0.2082, 0.8533, 0.8589, 0.7031, 0.4953, 0.7932, 0.6515, + 0.4222, 0.8735, 0.6638, 0.7179, 0.5208, 0.4634, 0.6346, + 0.8158, 0.2966, 0.6425, 0.5722, 0.9826, 0.0998, 0.1347, + 0.2221, 0.5267, 0.5951, 0.5359, 0.4663, 0.2881, 0.3374, + 0.0562, 0.6246, 0.5624, 0.6535, 0.8938, 0.9675, 0.6807, + 0.1069, 0.2400, 0.9511, 0.5834, 0.8067, 0.7875, 0.9192, + 0.9314, 0.6987, 0.1271, 0.6400, 0.5017, 0.4446, 0.3635, + 0.6910, 0.3672, 0.2439, 0.2649, 0.1640, 0.7850, 0.7337, + 0.8578, 0.6221, 0.0166, 0.6421, 0.3661, 0.7555, 0.0663, + 0.4310, 0.3023, 0.6408, 0.1570, 0.4641, 0.5173, 0.7738, + 0.4925, 0.6237, 0.6803, 0.4675, 0.7691, 0.6378, 0.1895, + 0.9558, 0.1102, 0.5469, 0.1449, 0.3673, 0.0670, 0.6119, + 0.7794, 0.6773, 0.2571, 0.4058, 0.7148, 0.9526, 0.2736, + 0.5870, 0.9203, 0.7770, 0.6000, 0.9050, 0.8928, 0.6043, + 0.2173, 0.8021, 0.3987, 0.9319, 0.1321, 0.6419, 0.8805, + 0.5762, 0.9898, 0.7741, 0.0014, 0.4154, 0.8250, 0.9940, + 0.1492, 0.2679, 0.5259, 0.8599, 0.3844, 0.0824, 0.9404, + 0.1299, 0.4514, 0.9935, 0.0880, 0.7071, 0.9205, 0.6893, + 0.7071, 0.6541, 0.2951, 0.6104, 0.0685, 0.8105, 0.5565, + 0.3738, 0.4508, 0.2772, 0.8218, 0.3212, 0.1522, 0.9104, + 0.8334, 0.9032, 0.6331, 0.1635, 0.9594, 0.1631, 0.2170, + 0.2663, 0.0335, 0.0622, 0.5555, 0.0460, 0.2288, 0.5383, + 0.2672, 0.1372, 0.4498, 0.4359, 0.1932, 0.7826, 0.7543, + 0.2390, 0.9424, 0.7304, 0.1308, 0.9979, 0.2674, 0.2207, + 0.4649, 0.0767, 0.0635, 0.7269, 0.8666, 0.4565, 0.8178, + 0.9247, 0.5805, 0.7682, 0.1326, 0.1208, 0.8191, 0.5636, + 0.6199, 0.7422, 0.3247, 0.7303, 0.9963, 0.1741, 0.6580, + 0.2115, 0.2373, 0.1476, 0.0673, 0.2244, 0.3259, 0.7683, + 0.9841, 0.6551, 0.9793, 0.6693, 0.3136, 0.1028, 0.2181, + 0.6644, 0.2183, 0.2541, 0.6732, 0.8967, 0.8898, 0.9288, + 0.3995, 0.1632, 0.8336, 0.9092, 0.7542, 0.3277, 0.5676, + 0.7198, 0.6792, 0.1867, 0.2225, 0.9889, 0.6153, 0.2516, + 0.8157, 0.6179, 0.7287, 0.0399, 0.7523, 0.9450, 0.2052, + 0.4578, 0.9994, 0.9638, 0.1661, 0.1734, 0.6150, 0.7844, + 0.6947, 0.6125, 0.2980, 0.3428, 0.3989, 0.5310, 0.5801, + 0.5691, 0.3263, 0.4789, 0.8018, 0.9784, 0.1424, 0.7686, + 0.7759, 0.5730, 0.6912, 0.9861, 0.4485, 0.3292, 0.8811, + 0.8909, 0.7811, 0.0343, 0.3755, 0.9717, 0.1991, 0.2348, + 0.0415, 0.2611, 0.5831, 0.4307, 0.7342, 0.3183, 0.5113, + 0.6527, 0.8104, 0.1210, 0.4177, 0.9794, 0.8917, 0.7427, + 0.6576, 0.4750, 0.3355, 0.7430, 0.3699, 0.7745, 0.6011, + 0.6918, 0.7276, 0.2705, 0.0994, 0.8232, 0.5870, 0.1146, + 0.9875, 0.6768, 0.1684, 0.0647, 0.5265, 0.5565, 0.9341, + 0.1991, 0.4732, 0.5823, 0.4300, 0.1062, 0.9316, 0.2859, + 0.6377, 0.1954, 0.8861, 0.4825, 0.4980, 0.0519, 0.4206, + 0.9348, 0.0941, 0.7470, 0.9779, 0.4837, 0.1233, 0.9046, + 0.8605, 0.6756, 0.3581, 0.5206, 0.7869, 0.4967, 0.5488, + 0.0842, 0.7040, 0.5311, 0.2386, 0.6312, 0.1512, 0.4338, + 0.4572, 0.8489, 0.6417, 0.4462, 0.8036, 0.6098, 0.1997, + 0.8349, 0.4730, 0.5787, 0.2206, 0.3643, 0.5429, 0.2836, + 0.3462, 0.3657, 0.5533, 0.6283, 0.5639, 0.6018, 0.4269, + 0.3299, 0.7280, 0.8104, 0.6276, 0.0662, 0.1279, 0.8930, + 0.6281, 0.9424, 0.3968, 0.2508, 0.2171, 0.5602, 0.1197, + 0.7457, 0.3991, 0.4626, 0.0622, 0.6755, 0.7264, 0.9811, + 0.1693, 0.9437, 0.6089, 0.9176, 0.1836, 0.8271, 0.8856, + 0.9443, 0.7809, 0.0108, 0.7203, 0.5947, 0.8905, 0.0445, + 0.0699, 0.4006, 0.6961, 0.4831, 0.7407, 0.2880, 0.4599, + 0.4693, 0.3042, 0.8173, 0.5769, 0.3609, 0.4964, 0.0841, + 0.2442, 0.2771, 0.0993, 0.4282, 0.1780, 0.8141, 0.8384, + 0.3696, 0.7660, 0.0872, 0.9099, 0.4227, 0.5207, 0.1521, + 0.1139, 0.2068, 0.0133, 0.4689, 0.3938, 0.4969, 0.2244, + 0.6359, 0.6660, 0.3449, 0.2158, 0.5108, 0.7421, 0.2597, + 0.3621, 0.0177, 0.3102, 0.4907, 0.4913, 0.5419, 0.2531, + 0.4241, 0.4212, 0.4517, 0.6030, 0.7879, 0.2272, 0.2337, + 0.8575, 0.1110, 0.0202, 0.1069, 0.6565, 0.2442, 0.0772, + 0.4766, 0.1118, 0.2209, 0.2595, 0.6296, 0.7056, 0.6889, + 0.6495, 0.8593, 0.1117, 0.4068, 0.6759, 0.6510, 0.5487, + 0.1141, 0.3036, 0.8762, 0.2552, 0.4586, 0.9505, 0.1165, + 0.6656, 0.9939, 0.7003, 0.4030, 0.4376, 0.4250, 0.2459, + 0.7688, 0.3425, 0.0164, 0.3476, 0.7114, 0.3631, 0.1853, + 0.0197, 0.1895, 0.1403, 0.8574, 0.3452, 0.8160, 0.9020, + 0.9156, 0.1928, 0.7611, 0.1365, 0.1221, 0.0355, 0.4036, + 0.7580, 0.6105, 0.4257, 0.3053, 0.6391, 0.2544, 0.2600, + 0.6769, 0.1254, 0.3276, 0.1147, 0.5396, 0.7679, 0.4043, + 0.3654, 0.7254, 0.4094, 0.7651, 0.5771, 0.6880, 0.9243, + 0.5574, 0.9087, 0.9140, 0.3571, 0.8742, 0.0448, 0.5066, + 0.1946, 0.2322, 0.9189, 0.3201, 0.8150, 0.4373, 0.9521, + 0.4274, 0.7234, 0.2347, 0.4717, 0.0954, 0.3797, 0.3487, + 0.1178, 0.7890, 0.1625, 0.0516, 0.4215, 0.9427, 0.7114, + 0.0679, 0.8178, 0.9847, 0.4645, 0.1250, 0.7080, 0.6793, + 0.2496, 0.5513, 0.2581, 0.7638, 0.5884, 0.1229, 0.7480, + 0.3331, 0.7886, 0.3105, 0.9359, 0.5589, 0.6762, 0.1715, + 0.3992, 0.3033, 0.1567, 0.6501, 0.3727, 0.3524, 0.2620, + 0.3809, 0.4793, 0.4363, 0.8401, 0.1721, 0.4691, 0.1140, + 0.0769, 0.5503, 0.6917, 0.9551, 0.9730, 0.7478, 0.9795, + 0.3056, 0.9739, 0.0738, 0.3801, 0.2976, 0.9665, 0.5065, + 0.5275, 0.5874, 0.1639, 0.0384, 0.9320, 0.6764, 0.5528, + 0.7685, 0.3392, 0.7970, 0.3212, 0.3294, 0.7715, 0.7566, + 0.0845, 0.7230, 0.9756, 0.7438, 0.0730, 0.0140, 0.2442, + 0.3170, 0.1531, 0.6572, 0.2241, 0.6559, 0.7594, 0.5163, + 0.0770, 0.8216, 0.1121, 0.2414, 0.3304, 0.4292, 0.6703, + 0.8489, 0.1680, 0.1843, 0.7202, 0.8341, 0.2089, 0.9189, + 0.3321, 0.2930, 0.6854, 0.2746, 0.2375, 0.1258, 0.9921, + 0.2697, 0.2706, 0.4161, 0.9534, 0.4156, 0.0203, 0.7281, + 0.8287, 0.1664, 0.6949, 0.3616, 0.7643, 0.8454, 0.7725, + 0.2379, 0.0818, 0.3522, 0.8770, 0.2477, 0.8016, 0.9264, + 0.5983, 0.8318, 0.9309, 0.1421, 0.5214, 0.4590, 0.4466, + 0.8836, 0.1506, 0.6918, 0.5040, 0.0693, 0.8909, 0.8473, + 0.3000, 0.4501, 0.0317, 0.6074, 0.6142, 0.8717, 0.1123, + 0.2734, 0.6236, 0.7128, 0.9839, 0.1268, 0.4137, 0.8249, + 0.9714, 0.2626, 0.4457, 0.7949, 0.0601, 0.1474, 0.7151, + 0.5980, 0.4383, 0.8719, 0.8532, 0.1146, 0.2021, 0.3510, + 0.7357, 0.8046, 0.4561, 0.1123, 0.5495, 0.8039, 0.9162, + 0.1794, 0.4379, 0.0797, 0.6351, 0.2957, 0.0267, 0.1551, + 0.6665, 0.5637, 0.9045, 0.5240, 0.2534, 0.4744, 0.8888, + 0.7171, 0.0746, 0.9509, 0.9155, 0.0161, 0.4715, 0.7253, + 0.2094, 0.9385, 0.4007, 0.7703, 0.4403, 0.4696, 0.0635, + 0.6452, 0.9162, 0.8772, 0.6423, 0.8830, 0.1271, 0.2269, + 0.8334, 0.5840, 0.7807, 0.8628, 0.2077, 0.7466, 0.6649, + 0.4775, 0.5364, 0.9753, 0.9333, 0.8689, 0.3676, 0.6997, + 0.6413, 0.1514, 0.4442, 0.4757, 0.6288, 0.8351, 0.5958, + 0.7445, 0.1575, 0.2611, 0.9676, 0.0288, 0.8028, 0.9286, + 0.3416, 0.8684, 0.9159, 0.0786, 0.1630, 0.6937, 0.0573, + 0.2736, 0.8433, 0.1001, 0.2551, 0.8051, 0.8270, 0.2224, + 0.8216, 0.0075, 0.0447, 0.1405, 0.2786, 0.7581, 0.5283, + 0.0187, 0.6181, 0.6165, 0.5798, 0.8513, 0.6720, 0.7857, + 0.0688, 0.4059, 0.2379, 0.9488, 0.1719, 0.2302, 0.7356, + 0.6534, 0.7287, 0.0684, 0.5311, 0.2033, 0.2442, 0.0088, + 0.4083, 0.8184, 0.3400, 0.1257, 0.4984, 0.2301, 0.1742, + 0.7550, 0.8626, 0.6692, 0.1226, 0.7922, 0.4923, 0.4013, + 0.8096, 0.3364, 0.2426, 0.0395, 0.3774, 0.7735, 0.1798, + 0.7857, 0.9528, 0.7629, 0.4383, 0.4673, 0.4848, 0.4766, + 0.7546, 0.5048, 0.0907, 0.0024, 0.6060, 0.4046, 0.6692, + 0.0093, 0.9058, 0.9962, 0.9902, 0.8031, 0.7622, 0.7882, + 0.4125, 0.7561, 0.2878, 0.1422, 0.9781, 0.5586, 0.9705, + 0.5239, 0.5728, 0.1892, 0.6013, 0.2173, 0.0813, 0.3227, + 0.5416, 0.2267, 0.9476, 0.6435, 0.2364, 0.6455, 0.7828, + 0.9169, 0.5877, 0.7036, 0.7500, 0.4633, 0.7657]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.3315, 0.1043, 0.8974, ..., 0.8556, 0.8987, 0.2347]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.653273582458496 seconds + +[16.56, 16.36, 16.24, 15.96, 16.0, 16.12, 16.36, 16.32, 16.32, 16.52] +[16.28, 16.12, 16.4, 18.36, 20.08, 21.52, 22.48, 22.28, 21.04, 19.88, 20.12, 20.12, 20.48, 20.68] +14.219621419906616 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 329192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.653273582458496, 'TIME_S_1KI': 0.03236188480418265, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.9017709636688, 'W': 18.418336412038514} +[16.56, 16.36, 16.24, 15.96, 16.0, 16.12, 16.36, 16.32, 16.32, 16.52, 16.72, 16.56, 16.56, 16.52, 16.56, 16.48, 16.52, 16.4, 16.4, 16.56] +294.86 +14.743 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 329192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.653273582458496, 'TIME_S_1KI': 0.03236188480418265, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.9017709636688, 'W': 18.418336412038514, 'J_1KI': 0.7955897195669057, 'W_1KI': 0.05595013369716917, 'W_D': 3.6753364120385132, 'J_D': 52.261892369985574, 'W_D_1KI': 0.01116471971384029, 'J_D_1KI': 3.39155256319725e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..b6e1857 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 88995, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.443303108215332, "TIME_S_1KI": 0.11734707689437981, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 270.51667034149176, "W": 19.057403527564766, "J_1KI": 3.039683918663877, "W_1KI": 0.2141401598692597, "W_D": 3.6324035275647653, "J_D": 51.56136334061629, "W_D_1KI": 0.0408158158049864, "J_D_1KI": 0.0004586304377210675} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..635beaf --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.007787942886352539} + +tensor(indices=tensor([[ 81, 6169, 4982, ..., 178, 4013, 1961], + [7045, 7394, 3645, ..., 7651, 4390, 2217]]), + values=tensor([0.8049, 0.4289, 0.7593, ..., 0.5596, 0.6808, 0.2640]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.5931, 0.9373, 0.6013, ..., 0.5668, 0.9421, 0.1475]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.007787942886352539 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 13482 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.5906615257263184} + +tensor(indices=tensor([[3234, 8693, 2147, ..., 5688, 1534, 3090], + [9159, 9880, 3224, ..., 8340, 9421, 7104]]), + values=tensor([0.7853, 0.2358, 0.3716, ..., 0.2457, 0.7101, 0.4461]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.9742, 0.9802, 0.6557, ..., 0.5070, 0.9590, 0.3935]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 1.5906615257263184 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 88995 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.443303108215332} + +tensor(indices=tensor([[5607, 79, 4390, ..., 1599, 3214, 3963], + [3275, 1974, 816, ..., 8032, 506, 939]]), + values=tensor([0.0722, 0.6184, 0.1263, ..., 0.7728, 0.1203, 0.7380]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.1874, 0.1725, 0.9599, ..., 0.6031, 0.8165, 0.4528]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.443303108215332 seconds + +tensor(indices=tensor([[5607, 79, 4390, ..., 1599, 3214, 3963], + [3275, 1974, 816, ..., 8032, 506, 939]]), + values=tensor([0.0722, 0.6184, 0.1263, ..., 0.7728, 0.1203, 0.7380]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.1874, 0.1725, 0.9599, ..., 0.6031, 0.8165, 0.4528]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.443303108215332 seconds + +[16.72, 16.4, 16.56, 16.64, 16.76, 17.16, 17.32, 17.12, 16.88, 16.68] +[16.28, 16.28, 16.32, 19.72, 20.96, 23.0, 24.12, 24.64, 21.84, 21.0, 20.32, 20.2, 20.08, 20.0] +14.194833517074585 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 88995, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.443303108215332, 'TIME_S_1KI': 0.11734707689437981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.51667034149176, 'W': 19.057403527564766} +[16.72, 16.4, 16.56, 16.64, 16.76, 17.16, 17.32, 17.12, 16.88, 16.68, 16.76, 16.76, 16.76, 16.72, 16.28, 16.64, 17.52, 18.2, 19.48, 20.44] +308.5 +15.425 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 88995, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.443303108215332, 'TIME_S_1KI': 0.11734707689437981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.51667034149176, 'W': 19.057403527564766, 'J_1KI': 3.039683918663877, 'W_1KI': 0.2141401598692597, 'W_D': 3.6324035275647653, 'J_D': 51.56136334061629, 'W_D_1KI': 0.0408158158049864, 'J_D_1KI': 0.0004586304377210675} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..e0dbcb2 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.64857006072998, "TIME_S_1KI": 1264.857006072998, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 612.8634984588622, "W": 24.05277526457356, "J_1KI": 61286.34984588622, "W_1KI": 2405.277526457356, "W_D": 9.193775264573558, "J_D": 234.25692921972256, "W_D_1KI": 919.3775264573558, "J_D_1KI": 91937.75264573557} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..8dab665 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.64857006072998} + +tensor(indices=tensor([[490436, 434679, 436915, ..., 139734, 282058, 357713], + [274216, 175918, 137273, ..., 106291, 40355, 344584]]), + values=tensor([0.5214, 0.8195, 0.0840, ..., 0.5932, 0.8280, 0.8598]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3817, 0.5445, 0.8793, ..., 0.8491, 0.3699, 0.3958]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.64857006072998 seconds + +tensor(indices=tensor([[490436, 434679, 436915, ..., 139734, 282058, 357713], + [274216, 175918, 137273, ..., 106291, 40355, 344584]]), + values=tensor([0.5214, 0.8195, 0.0840, ..., 0.5932, 0.8280, 0.8598]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3817, 0.5445, 0.8793, ..., 0.8491, 0.3699, 0.3958]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.64857006072998 seconds + +[16.8, 16.6, 16.64, 16.64, 16.6, 16.64, 16.76, 16.84, 16.84, 16.92] +[16.92, 16.96, 16.84, 20.04, 22.04, 23.48, 25.72, 26.48, 26.04, 26.16, 27.4, 28.0, 28.52, 28.52, 26.8, 25.6, 24.84, 24.76, 25.76, 26.56, 27.0, 26.56, 28.36, 26.72, 25.76] +25.479949474334717 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.64857006072998, 'TIME_S_1KI': 1264.857006072998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 612.8634984588622, 'W': 24.05277526457356} +[16.8, 16.6, 16.64, 16.64, 16.6, 16.64, 16.76, 16.84, 16.84, 16.92, 16.36, 16.44, 16.12, 16.12, 16.2, 16.48, 16.28, 16.4, 16.48, 16.12] +297.18000000000006 +14.859000000000004 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.64857006072998, 'TIME_S_1KI': 1264.857006072998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 612.8634984588622, 'W': 24.05277526457356, 'J_1KI': 61286.34984588622, 'W_1KI': 2405.277526457356, 'W_D': 9.193775264573558, 'J_D': 234.25692921972256, 'W_D_1KI': 919.3775264573558, 'J_D_1KI': 91937.75264573557} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..6ad14ae --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 85, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.22414517402649, "TIME_S_1KI": 120.28406087089988, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 441.509245891571, "W": 30.226765643160533, "J_1KI": 5194.226422253776, "W_1KI": 355.6090075665945, "W_D": 15.141765643160534, "J_D": 221.1691985011101, "W_D_1KI": 178.13841933130038, "J_D_1KI": 2095.746109780004} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..6f69b65 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2221043109893799} + +tensor(indices=tensor([[453456, 92215, 427412, ..., 418685, 103637, 233573], + [ 33636, 48718, 269177, ..., 336146, 311811, 477976]]), + values=tensor([0.0230, 0.0242, 0.1102, ..., 0.5701, 0.0679, 0.6937]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.0691, 0.5190, 0.0132, ..., 0.3549, 0.1733, 0.7132]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 1.2221043109893799 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 85 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.22414517402649} + +tensor(indices=tensor([[260240, 85351, 123903, ..., 321649, 47553, 249584], + [344184, 213565, 477901, ..., 246665, 325062, 34759]]), + values=tensor([0.5697, 0.5352, 0.0767, ..., 0.7613, 0.3405, 0.0952]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6035, 0.7626, 0.1866, ..., 0.6682, 0.2071, 0.1192]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.22414517402649 seconds + +tensor(indices=tensor([[260240, 85351, 123903, ..., 321649, 47553, 249584], + [344184, 213565, 477901, ..., 246665, 325062, 34759]]), + values=tensor([0.5697, 0.5352, 0.0767, ..., 0.7613, 0.3405, 0.0952]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6035, 0.7626, 0.1866, ..., 0.6682, 0.2071, 0.1192]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.22414517402649 seconds + +[16.68, 16.56, 16.32, 16.68, 16.72, 16.88, 16.88, 16.8, 16.92, 16.84] +[16.84, 16.72, 16.88, 18.28, 18.96, 23.16, 28.72, 34.92, 39.92, 45.28, 46.4, 46.72, 46.36, 45.88] +14.606565952301025 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 85, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.22414517402649, 'TIME_S_1KI': 120.28406087089988, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 441.509245891571, 'W': 30.226765643160533} +[16.68, 16.56, 16.32, 16.68, 16.72, 16.88, 16.88, 16.8, 16.92, 16.84, 17.0, 16.96, 16.84, 16.84, 16.68, 16.68, 16.68, 16.88, 16.68, 16.88] +301.7 +15.084999999999999 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 85, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.22414517402649, 'TIME_S_1KI': 120.28406087089988, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 441.509245891571, 'W': 30.226765643160533, 'J_1KI': 5194.226422253776, 'W_1KI': 355.6090075665945, 'W_D': 15.141765643160534, 'J_D': 221.1691985011101, 'W_D_1KI': 178.13841933130038, 'J_D_1KI': 2095.746109780004} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..0e74a30 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 16, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.14887547492981, "TIME_S_1KI": 634.3047171831131, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 446.6620703125, "W": 24.244680563469874, "J_1KI": 27916.37939453125, "W_1KI": 1515.2925352168672, "W_D": 9.130680563469873, "J_D": 168.21540185546877, "W_D_1KI": 570.6675352168671, "J_D_1KI": 35666.720951054194} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..867dfb1 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.252246856689453} + +tensor(indices=tensor([[168482, 109157, 139135, ..., 23544, 376992, 400952], + [430905, 452074, 310643, ..., 282410, 427036, 174169]]), + values=tensor([0.7070, 0.6863, 0.4236, ..., 0.8628, 0.0765, 0.6124]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3127, 0.2848, 0.1569, ..., 0.6228, 0.9606, 0.0654]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 6.252246856689453 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 500000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.14887547492981} + +tensor(indices=tensor([[416108, 2264, 471967, ..., 939, 148738, 423683], + [ 3445, 337531, 280778, ..., 50285, 129885, 10398]]), + values=tensor([0.8474, 0.6306, 0.8958, ..., 0.7968, 0.8541, 0.8366]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.5104, 0.4137, 0.5956, ..., 0.5821, 0.0614, 0.8299]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.14887547492981 seconds + +tensor(indices=tensor([[416108, 2264, 471967, ..., 939, 148738, 423683], + [ 3445, 337531, 280778, ..., 50285, 129885, 10398]]), + values=tensor([0.8474, 0.6306, 0.8958, ..., 0.7968, 0.8541, 0.8366]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.5104, 0.4137, 0.5956, ..., 0.5821, 0.0614, 0.8299]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.14887547492981 seconds + +[17.48, 17.16, 17.32, 16.84, 16.44, 16.72, 17.0, 17.0, 17.16, 17.2] +[16.96, 16.72, 19.72, 19.72, 21.8, 23.0, 25.28, 28.4, 26.32, 28.6, 30.04, 29.48, 29.0, 29.36, 28.12, 28.16, 28.32, 28.32] +18.423095703125 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.14887547492981, 'TIME_S_1KI': 634.3047171831131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 446.6620703125, 'W': 24.244680563469874} +[17.48, 17.16, 17.32, 16.84, 16.44, 16.72, 17.0, 17.0, 17.16, 17.2, 16.28, 16.28, 16.52, 16.52, 16.6, 16.84, 16.72, 16.68, 16.68, 16.64] +302.28000000000003 +15.114 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.14887547492981, 'TIME_S_1KI': 634.3047171831131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 446.6620703125, 'W': 24.244680563469874, 'J_1KI': 27916.37939453125, 'W_1KI': 1515.2925352168672, 'W_D': 9.130680563469873, 'J_D': 168.21540185546877, 'W_D_1KI': 570.6675352168671, 'J_D_1KI': 35666.720951054194} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..55dea33 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1778, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.44342303276062, "TIME_S_1KI": 5.873691244522284, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 359.84609330177307, "W": 26.690882073452276, "J_1KI": 202.3881289661266, "W_1KI": 15.011744698229625, "W_D": 11.780882073452275, "J_D": 158.82968491315842, "W_D_1KI": 6.625917926576083, "J_D_1KI": 3.7266130070731625} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..23ed816 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.09349679946899414} + +tensor(indices=tensor([[11244, 30890, 16389, ..., 29148, 5026, 856], + [10492, 33035, 45125, ..., 39727, 41928, 8324]]), + values=tensor([0.4818, 0.4973, 0.6152, ..., 0.1205, 0.4026, 0.7356]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.5690, 0.0428, 0.6445, ..., 0.6439, 0.9256, 0.1962]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.09349679946899414 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1123 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.628721237182617} + +tensor(indices=tensor([[13208, 18123, 49772, ..., 18808, 48208, 10205], + [37715, 8739, 28481, ..., 39488, 2329, 6895]]), + values=tensor([0.7313, 0.0416, 0.7748, ..., 0.4902, 0.4676, 0.4179]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8552, 0.4811, 0.9521, ..., 0.6838, 0.6988, 0.7549]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 6.628721237182617 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1778 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.44342303276062} + +tensor(indices=tensor([[21532, 45538, 16871, ..., 24971, 35256, 43346], + [38506, 15721, 34837, ..., 48627, 30893, 13903]]), + values=tensor([0.0933, 0.7831, 0.5713, ..., 0.5170, 0.2974, 0.2342]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2432, 0.4339, 0.6151, ..., 0.5604, 0.5014, 0.9717]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.44342303276062 seconds + +tensor(indices=tensor([[21532, 45538, 16871, ..., 24971, 35256, 43346], + [38506, 15721, 34837, ..., 48627, 30893, 13903]]), + values=tensor([0.0933, 0.7831, 0.5713, ..., 0.5170, 0.2974, 0.2342]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2432, 0.4339, 0.6151, ..., 0.5604, 0.5014, 0.9717]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.44342303276062 seconds + +[16.72, 16.92, 16.64, 16.64, 16.2, 16.24, 16.2, 16.44, 16.68, 16.84] +[17.0, 17.04, 17.4, 18.68, 21.68, 26.12, 31.28, 31.28, 34.56, 38.6, 38.56, 38.32, 38.52] +13.481985807418823 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1778, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.44342303276062, 'TIME_S_1KI': 5.873691244522284, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 359.84609330177307, 'W': 26.690882073452276} +[16.72, 16.92, 16.64, 16.64, 16.2, 16.24, 16.2, 16.44, 16.68, 16.84, 16.68, 16.4, 16.36, 16.32, 16.48, 16.96, 16.68, 16.72, 16.8, 16.8] +298.2 +14.91 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1778, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.44342303276062, 'TIME_S_1KI': 5.873691244522284, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 359.84609330177307, 'W': 26.690882073452276, 'J_1KI': 202.3881289661266, 'W_1KI': 15.011744698229625, 'W_D': 11.780882073452275, 'J_D': 158.82968491315842, 'W_D_1KI': 6.625917926576083, 'J_D_1KI': 3.7266130070731625} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..d13001e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 184, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.326260328292847, "TIME_S_1KI": 56.120980045069814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 411.99043563842775, "W": 28.23306413648923, "J_1KI": 2239.0784545566726, "W_1KI": 153.4405659591806, "W_D": 13.442064136489229, "J_D": 196.15305773043633, "W_D_1KI": 73.0546963939632, "J_D_1KI": 397.0363934454522} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..9a3e8b7 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5684177875518799} + +tensor(indices=tensor([[41162, 13603, 11375, ..., 40680, 4766, 38183], + [45410, 46762, 37555, ..., 26435, 44149, 44106]]), + values=tensor([0.8659, 0.4595, 0.1004, ..., 0.5826, 0.6457, 0.9200]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5708, 0.4094, 0.8205, ..., 0.4535, 0.1673, 0.5749]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.5684177875518799 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 184 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.326260328292847} + +tensor(indices=tensor([[12504, 44129, 13122, ..., 38229, 48871, 14747], + [19615, 42550, 37015, ..., 26953, 44143, 28147]]), + values=tensor([0.5805, 0.8597, 0.6297, ..., 0.9023, 0.5896, 0.7278]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6769, 0.6786, 0.2415, ..., 0.7515, 0.6628, 0.7388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.326260328292847 seconds + +tensor(indices=tensor([[12504, 44129, 13122, ..., 38229, 48871, 14747], + [19615, 42550, 37015, ..., 26953, 44143, 28147]]), + values=tensor([0.5805, 0.8597, 0.6297, ..., 0.9023, 0.5896, 0.7278]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6769, 0.6786, 0.2415, ..., 0.7515, 0.6628, 0.7388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.326260328292847 seconds + +[16.4, 16.44, 16.28, 16.4, 16.56, 16.32, 16.56, 16.96, 16.88, 16.84] +[16.84, 16.56, 19.8, 19.8, 22.36, 25.28, 30.6, 35.08, 35.84, 38.48, 39.2, 39.0, 38.92, 39.04] +14.592480421066284 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.326260328292847, 'TIME_S_1KI': 56.120980045069814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 411.99043563842775, 'W': 28.23306413648923} +[16.4, 16.44, 16.28, 16.4, 16.56, 16.32, 16.56, 16.96, 16.88, 16.84, 16.44, 16.6, 16.4, 16.64, 16.32, 16.08, 16.16, 16.16, 16.08, 16.28] +295.82 +14.791 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.326260328292847, 'TIME_S_1KI': 56.120980045069814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 411.99043563842775, 'W': 28.23306413648923, 'J_1KI': 2239.0784545566726, 'W_1KI': 153.4405659591806, 'W_D': 13.442064136489229, 'J_D': 196.15305773043633, 'W_D_1KI': 73.0546963939632, 'J_D_1KI': 397.0363934454522} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..3eecef1 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 18, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.342544078826904, "TIME_S_1KI": 574.5857821570503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 530.0986167526245, "W": 23.47686829958945, "J_1KI": 29449.92315292358, "W_1KI": 1304.270461088303, "W_D": 8.53086829958945, "J_D": 192.62371060752864, "W_D_1KI": 473.9371277549694, "J_D_1KI": 26329.840430831635} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..f5624bc --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.938621759414673} + +tensor(indices=tensor([[33962, 4303, 19644, ..., 22831, 19183, 15150], + [16907, 48572, 8707, ..., 42635, 21437, 36778]]), + values=tensor([0.1564, 0.4173, 0.0935, ..., 0.7954, 0.7396, 0.5651]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.1827, 0.1798, 0.1712, ..., 0.4482, 0.1689, 0.7752]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 5.938621759414673 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 17 -ss 50000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.826071500778198} + +tensor(indices=tensor([[15032, 14866, 34006, ..., 33183, 39278, 1887], + [31823, 10906, 17965, ..., 39818, 15705, 29697]]), + values=tensor([0.0656, 0.1086, 0.8160, ..., 0.5944, 0.0846, 0.1136]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.0674, 0.8957, 0.9270, ..., 0.0566, 0.4698, 0.5164]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 9.826071500778198 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 18 -ss 50000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.342544078826904} + +tensor(indices=tensor([[11244, 18386, 20275, ..., 48502, 35178, 23903], + [28265, 10496, 16525, ..., 34799, 38517, 26200]]), + values=tensor([0.9523, 0.0079, 0.4231, ..., 0.8081, 0.4550, 0.9210]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3725, 0.3516, 0.7851, ..., 0.0259, 0.1462, 0.4294]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.342544078826904 seconds + +tensor(indices=tensor([[11244, 18386, 20275, ..., 48502, 35178, 23903], + [28265, 10496, 16525, ..., 34799, 38517, 26200]]), + values=tensor([0.9523, 0.0079, 0.4231, ..., 0.8081, 0.4550, 0.9210]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3725, 0.3516, 0.7851, ..., 0.0259, 0.1462, 0.4294]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.342544078826904 seconds + +[16.4, 16.36, 16.2, 16.16, 16.04, 16.32, 16.56, 16.56, 16.24, 16.4] +[16.56, 16.4, 16.64, 18.24, 20.36, 21.12, 23.24, 23.36, 25.4, 25.52, 27.04, 27.04, 27.36, 28.08, 27.72, 27.6, 27.68, 28.04, 26.96, 28.32, 27.24, 26.36] +22.57961368560791 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 18, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.342544078826904, 'TIME_S_1KI': 574.5857821570503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.0986167526245, 'W': 23.47686829958945} +[16.4, 16.36, 16.2, 16.16, 16.04, 16.32, 16.56, 16.56, 16.24, 16.4, 17.28, 16.76, 16.88, 16.8, 16.72, 16.72, 16.84, 17.16, 17.08, 16.96] +298.92 +14.946000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 18, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.342544078826904, 'TIME_S_1KI': 574.5857821570503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.0986167526245, 'W': 23.47686829958945, 'J_1KI': 29449.92315292358, 'W_1KI': 1304.270461088303, 'W_D': 8.53086829958945, 'J_D': 192.62371060752864, 'W_D_1KI': 473.9371277549694, 'J_D_1KI': 26329.840430831635} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..c4000b3 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 15157, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.835047960281372, "TIME_S_1KI": 0.714854388090082, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 415.2423467063904, "W": 28.507699379354285, "J_1KI": 27.396077502565838, "W_1KI": 1.8808272995549438, "W_D": 13.533699379354285, "J_D": 197.1314842042923, "W_D_1KI": 0.8929009289011206, "J_D_1KI": 0.0589101358383005} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..1315030 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,90 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013768911361694336} + +tensor(indices=tensor([[33033, 11109, 18051, ..., 27132, 1656, 18047], + [ 5955, 40028, 25745, ..., 29029, 19940, 42153]]), + values=tensor([0.4677, 0.6824, 0.9028, ..., 0.7333, 0.7294, 0.5422]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8429, 0.2047, 0.7114, ..., 0.4629, 0.9550, 0.3146]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.013768911361694336 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 7625 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.658907651901245} + +tensor(indices=tensor([[29559, 32038, 25388, ..., 565, 31919, 43432], + [ 9509, 32743, 47185, ..., 35573, 4789, 3963]]), + values=tensor([0.3165, 0.7586, 0.5704, ..., 0.3999, 0.7393, 0.5699]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1032, 0.7630, 0.8261, ..., 0.4542, 0.5917, 0.7318]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 5.658907651901245 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 14148 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.800445079803467} + +tensor(indices=tensor([[ 6578, 2190, 46333, ..., 24615, 28424, 8843], + [42402, 36, 29812, ..., 27551, 4856, 29012]]), + values=tensor([0.2368, 0.8745, 0.6718, ..., 0.0545, 0.9603, 0.8446]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.2697, 0.1796, 0.1572, ..., 0.6469, 0.0377, 0.0527]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.800445079803467 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15157 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.835047960281372} + +tensor(indices=tensor([[42461, 6468, 45164, ..., 16329, 24795, 38528], + [25076, 47168, 16720, ..., 19861, 7503, 2163]]), + values=tensor([0.0734, 0.7829, 0.7930, ..., 0.6312, 0.4098, 0.4089]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5159, 0.5899, 0.0471, ..., 0.7528, 0.0753, 0.7335]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.835047960281372 seconds + +tensor(indices=tensor([[42461, 6468, 45164, ..., 16329, 24795, 38528], + [25076, 47168, 16720, ..., 19861, 7503, 2163]]), + values=tensor([0.0734, 0.7829, 0.7930, ..., 0.6312, 0.4098, 0.4089]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5159, 0.5899, 0.0471, ..., 0.7528, 0.0753, 0.7335]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.835047960281372 seconds + +[16.48, 16.52, 16.72, 16.72, 16.72, 16.76, 16.96, 16.96, 16.96, 16.76] +[16.48, 16.8, 17.4, 19.16, 23.32, 27.56, 32.6, 36.2, 36.2, 39.84, 38.96, 38.68, 38.88, 38.84] +14.565971851348877 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 15157, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.835047960281372, 'TIME_S_1KI': 0.714854388090082, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 415.2423467063904, 'W': 28.507699379354285} +[16.48, 16.52, 16.72, 16.72, 16.72, 16.76, 16.96, 16.96, 16.96, 16.76, 16.08, 16.12, 16.2, 16.12, 16.28, 16.72, 16.68, 16.88, 17.04, 16.92] +299.48 +14.974 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 15157, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.835047960281372, 'TIME_S_1KI': 0.714854388090082, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 415.2423467063904, 'W': 28.507699379354285, 'J_1KI': 27.396077502565838, 'W_1KI': 1.8808272995549438, 'W_D': 13.533699379354285, 'J_D': 197.1314842042923, 'W_D_1KI': 0.8929009289011206, 'J_D_1KI': 0.0589101358383005} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..d19b064 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3481, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.402421712875366, "TIME_S_1KI": 2.9883429224002773, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 353.7775730323791, "W": 26.24712784244181, "J_1KI": 101.63101782027553, "W_1KI": 7.540111416961163, "W_D": 11.136127842441809, "J_D": 150.10070072150228, "W_D_1KI": 3.199117449710373, "J_D_1KI": 0.9190225365442037} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..9f73158 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.036684274673461914} + +tensor(indices=tensor([[18565, 37317, 37229, ..., 15515, 8032, 29397], + [22786, 29306, 38320, ..., 30817, 20212, 40410]]), + values=tensor([0.1056, 0.5534, 0.3575, ..., 0.6610, 0.1768, 0.9441]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.4641, 0.9395, 0.1594, ..., 0.6973, 0.4970, 0.2160]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.036684274673461914 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2862 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.631719589233398} + +tensor(indices=tensor([[42858, 27308, 9084, ..., 37217, 18931, 37873], + [ 81, 3375, 13530, ..., 20114, 31496, 1187]]), + values=tensor([0.8565, 0.1635, 0.7362, ..., 0.0710, 0.2247, 0.4334]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.4135, 0.1279, 0.8547, ..., 0.5297, 0.7620, 0.4742]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 8.631719589233398 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3481 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.402421712875366} + +tensor(indices=tensor([[33914, 31948, 14748, ..., 14390, 6168, 12016], + [40115, 2160, 12920, ..., 12525, 42484, 30064]]), + values=tensor([0.4182, 0.9936, 0.1094, ..., 0.4626, 0.3863, 0.9938]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.8014, 0.1671, 0.7924, ..., 0.4574, 0.3021, 0.5942]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.402421712875366 seconds + +tensor(indices=tensor([[33914, 31948, 14748, ..., 14390, 6168, 12016], + [40115, 2160, 12920, ..., 12525, 42484, 30064]]), + values=tensor([0.4182, 0.9936, 0.1094, ..., 0.4626, 0.3863, 0.9938]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.8014, 0.1671, 0.7924, ..., 0.4574, 0.3021, 0.5942]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.402421712875366 seconds + +[17.04, 17.0, 17.0, 17.04, 16.72, 16.36, 16.56, 16.68, 16.68, 16.76] +[16.56, 16.44, 18.44, 19.4, 22.52, 22.52, 26.96, 32.0, 34.36, 37.84, 38.6, 38.6, 38.64] +13.478715658187866 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.402421712875366, 'TIME_S_1KI': 2.9883429224002773, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.7775730323791, 'W': 26.24712784244181} +[17.04, 17.0, 17.0, 17.04, 16.72, 16.36, 16.56, 16.68, 16.68, 16.76, 16.28, 16.48, 16.64, 16.6, 16.8, 17.0, 17.0, 17.04, 17.04, 17.08] +302.22 +15.111 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.402421712875366, 'TIME_S_1KI': 2.9883429224002773, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.7775730323791, 'W': 26.24712784244181, 'J_1KI': 101.63101782027553, 'W_1KI': 7.540111416961163, 'W_D': 11.136127842441809, 'J_D': 150.10070072150228, 'W_D_1KI': 3.199117449710373, 'J_D_1KI': 0.9190225365442037} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..df2aa61 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 168281, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.558050394058228, "TIME_S_1KI": 0.06274059694236561, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 258.021646194458, "W": 18.156079475026804, "J_1KI": 1.5332785412165248, "W_1KI": 0.10789144035884506, "W_D": 3.2230794750268057, "J_D": 45.80417667293551, "W_D_1KI": 0.01915296126732552, "J_D_1KI": 0.00011381535210347883} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..f840f81 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,90 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0070648193359375} + +tensor(indices=tensor([[3956, 2230, 2243, ..., 553, 2941, 3327], + [ 100, 3363, 3805, ..., 1214, 2035, 3498]]), + values=tensor([0.6449, 0.6155, 0.6951, ..., 0.1054, 0.5097, 0.8278]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.7934, 0.1148, 0.8056, ..., 0.2671, 0.8625, 0.6224]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.0070648193359375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 14862 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.0316338539123535} + +tensor(indices=tensor([[1030, 4118, 1650, ..., 2883, 3187, 2353], + [ 667, 828, 1877, ..., 3989, 4580, 4263]]), + values=tensor([0.1309, 0.0428, 0.8693, ..., 0.4535, 0.1348, 0.0724]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9292, 0.7381, 0.6360, ..., 0.7728, 0.3424, 0.9150]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.0316338539123535 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 151265 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.438246726989746} + +tensor(indices=tensor([[3854, 4647, 3797, ..., 4290, 1182, 2413], + [2979, 3440, 283, ..., 2311, 305, 3110]]), + values=tensor([0.3777, 0.1018, 0.5825, ..., 0.5209, 0.5950, 0.9338]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.5017, 0.1845, 0.8952, ..., 0.2007, 0.0112, 0.1540]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 9.438246726989746 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 168281 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.558050394058228} + +tensor(indices=tensor([[3609, 3388, 2152, ..., 226, 3809, 171], + [2468, 2509, 1869, ..., 1996, 758, 1558]]), + values=tensor([0.0830, 0.3902, 0.7345, ..., 0.8874, 0.6618, 0.8227]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9648, 0.1928, 0.6569, ..., 0.1801, 0.7572, 0.2717]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.558050394058228 seconds + +tensor(indices=tensor([[3609, 3388, 2152, ..., 226, 3809, 171], + [2468, 2509, 1869, ..., 1996, 758, 1558]]), + values=tensor([0.0830, 0.3902, 0.7345, ..., 0.8874, 0.6618, 0.8227]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9648, 0.1928, 0.6569, ..., 0.1801, 0.7572, 0.2717]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.558050394058228 seconds + +[16.48, 16.36, 16.52, 16.56, 16.68, 16.6, 16.88, 17.04, 16.92, 16.8] +[16.56, 16.64, 16.64, 17.64, 19.2, 21.16, 22.0, 22.24, 21.16, 19.8, 19.84, 19.76, 19.72, 19.6] +14.211308479309082 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 168281, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.558050394058228, 'TIME_S_1KI': 0.06274059694236561, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 258.021646194458, 'W': 18.156079475026804} +[16.48, 16.36, 16.52, 16.56, 16.68, 16.6, 16.88, 17.04, 16.92, 16.8, 16.88, 16.6, 16.6, 16.68, 16.6, 16.4, 16.4, 16.28, 16.24, 16.44] +298.65999999999997 +14.932999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 168281, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.558050394058228, 'TIME_S_1KI': 0.06274059694236561, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 258.021646194458, 'W': 18.156079475026804, 'J_1KI': 1.5332785412165248, 'W_1KI': 0.10789144035884506, 'W_D': 3.2230794750268057, 'J_D': 45.80417667293551, 'W_D_1KI': 0.01915296126732552, 'J_D_1KI': 0.00011381535210347883} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..4a3f027 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 19579, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.580077886581421, "TIME_S_1KI": 0.5403788695327351, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 279.89611061096195, "W": 19.667352009451033, "J_1KI": 14.295730660961334, "W_1KI": 1.004512590502632, "W_D": 4.875352009451035, "J_D": 69.38361934280404, "W_D_1KI": 0.2490092450815177, "J_D_1KI": 0.012718179941851867} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..e1ac0b4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.011558294296264648} + +tensor(indices=tensor([[3102, 170, 623, ..., 4318, 3355, 165], + [2789, 3644, 4439, ..., 3042, 2569, 785]]), + values=tensor([0.9058, 0.9769, 0.0114, ..., 0.8908, 0.0153, 0.7725]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5476, 0.7239, 0.9140, ..., 0.1881, 0.6789, 0.5908]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.011558294296264648 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 9084 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 4.8714423179626465} + +tensor(indices=tensor([[3623, 2606, 2418, ..., 4829, 281, 3438], + [4015, 2226, 570, ..., 2552, 1179, 1373]]), + values=tensor([0.3460, 0.2971, 0.5222, ..., 0.0239, 0.0798, 0.1975]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1235, 0.5688, 0.3759, ..., 0.8307, 0.0212, 0.3099]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 4.8714423179626465 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 19579 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.580077886581421} + +tensor(indices=tensor([[2638, 2285, 4642, ..., 1060, 4675, 1349], + [4205, 3626, 278, ..., 1169, 1702, 4020]]), + values=tensor([0.7579, 0.0941, 0.0015, ..., 0.8515, 0.3315, 0.3835]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3468, 0.9990, 0.4644, ..., 0.0220, 0.7046, 0.8273]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.580077886581421 seconds + +tensor(indices=tensor([[2638, 2285, 4642, ..., 1060, 4675, 1349], + [4205, 3626, 278, ..., 1169, 1702, 4020]]), + values=tensor([0.7579, 0.0941, 0.0015, ..., 0.8515, 0.3315, 0.3835]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3468, 0.9990, 0.4644, ..., 0.0220, 0.7046, 0.8273]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.580077886581421 seconds + +[16.72, 16.6, 16.6, 16.48, 16.04, 16.2, 16.68, 16.64, 16.8, 16.8] +[16.52, 15.96, 19.0, 21.24, 23.64, 24.6, 24.6, 25.52, 21.72, 21.4, 19.92, 19.88, 19.76, 19.68] +14.231509685516357 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19579, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.580077886581421, 'TIME_S_1KI': 0.5403788695327351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 279.89611061096195, 'W': 19.667352009451033} +[16.72, 16.6, 16.6, 16.48, 16.04, 16.2, 16.68, 16.64, 16.8, 16.8, 16.4, 16.28, 16.4, 16.2, 16.24, 16.24, 16.28, 16.32, 16.6, 16.56] +295.84 +14.791999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19579, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.580077886581421, 'TIME_S_1KI': 0.5403788695327351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 279.89611061096195, 'W': 19.667352009451033, 'J_1KI': 14.295730660961334, 'W_1KI': 1.004512590502632, 'W_D': 4.875352009451035, 'J_D': 69.38361934280404, 'W_D_1KI': 0.2490092450815177, 'J_D_1KI': 0.012718179941851867} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..77e8d55 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1968, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.277064561843872, "TIME_S_1KI": 5.222085651343431, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 273.486970872879, "W": 19.269726615017994, "J_1KI": 138.9669567443491, "W_1KI": 9.791527751533533, "W_D": 4.205726615017992, "J_D": 59.6900752792358, "W_D_1KI": 2.1370562068180856, "J_D_1KI": 1.0859025441148809} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..c0a6151 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.0581815242767334} + +tensor(indices=tensor([[1705, 1843, 4243, ..., 1472, 574, 2171], + [ 999, 1582, 2374, ..., 1857, 3396, 926]]), + values=tensor([0.2793, 0.4750, 0.9003, ..., 0.4581, 0.2120, 0.3581]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4895, 0.6805, 0.6750, ..., 0.8029, 0.3171, 0.4262]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.0581815242767334 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1804 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.62302303314209} + +tensor(indices=tensor([[3263, 2220, 2592, ..., 2483, 3191, 2817], + [4244, 110, 4304, ..., 978, 1265, 521]]), + values=tensor([0.5038, 0.7305, 0.7346, ..., 0.1644, 0.1385, 0.4258]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1558, 0.5415, 0.2232, ..., 0.0177, 0.6458, 0.0827]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 9.62302303314209 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1968 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.277064561843872} + +tensor(indices=tensor([[4412, 41, 1508, ..., 2890, 4838, 4018], + [4820, 1611, 2655, ..., 2263, 2126, 3753]]), + values=tensor([0.2794, 0.8020, 0.5426, ..., 0.2909, 0.4609, 0.7425]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1277, 0.2756, 0.3462, ..., 0.0284, 0.7824, 0.0281]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.277064561843872 seconds + +tensor(indices=tensor([[4412, 41, 1508, ..., 2890, 4838, 4018], + [4820, 1611, 2655, ..., 2263, 2126, 3753]]), + values=tensor([0.2794, 0.8020, 0.5426, ..., 0.2909, 0.4609, 0.7425]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1277, 0.2756, 0.3462, ..., 0.0284, 0.7824, 0.0281]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.277064561843872 seconds + +[16.44, 16.4, 16.68, 17.16, 17.04, 17.0, 16.84, 16.64, 16.64, 16.2] +[16.2, 16.2, 16.4, 20.48, 22.72, 24.28, 25.16, 22.6, 21.8, 20.16, 20.4, 20.4, 20.52, 20.6] +14.19257140159607 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1968, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.277064561843872, 'TIME_S_1KI': 5.222085651343431, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 273.486970872879, 'W': 19.269726615017994} +[16.44, 16.4, 16.68, 17.16, 17.04, 17.0, 16.84, 16.64, 16.64, 16.2, 16.44, 16.8, 16.68, 16.8, 17.0, 16.8, 16.72, 16.68, 16.52, 16.68] +301.28000000000003 +15.064000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1968, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.277064561843872, 'TIME_S_1KI': 5.222085651343431, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 273.486970872879, 'W': 19.269726615017994, 'J_1KI': 138.9669567443491, 'W_1KI': 9.791527751533533, 'W_D': 4.205726615017992, 'J_D': 59.6900752792358, 'W_D_1KI': 2.1370562068180856, 'J_D_1KI': 1.0859025441148809} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..24be72e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 370, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.596680641174316, "TIME_S_1KI": 28.639677408579235, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 258.78833953857423, "W": 19.654690540248374, "J_1KI": 699.4279446988493, "W_1KI": 53.120785243914526, "W_D": 4.792690540248376, "J_D": 63.10414423942572, "W_D_1KI": 12.953217676346963, "J_D_1KI": 35.00869642255936} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..42ea30e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.2832469940185547} + +tensor(indices=tensor([[4216, 4421, 4642, ..., 4167, 2972, 4683], + [ 724, 4244, 4649, ..., 3333, 3645, 147]]), + values=tensor([0.8581, 0.8007, 0.7199, ..., 0.3825, 0.8322, 0.5968]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.5296, 0.9285, 0.1851, ..., 0.3657, 0.2915, 0.3561]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.2832469940185547 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 370 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.596680641174316} + +tensor(indices=tensor([[ 903, 730, 2755, ..., 910, 2642, 1332], + [1147, 2772, 1382, ..., 3911, 708, 3773]]), + values=tensor([0.2771, 0.3229, 0.1452, ..., 0.5103, 0.2731, 0.0454]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.0202, 0.9007, 0.6371, ..., 0.8745, 0.2608, 0.1412]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.596680641174316 seconds + +tensor(indices=tensor([[ 903, 730, 2755, ..., 910, 2642, 1332], + [1147, 2772, 1382, ..., 3911, 708, 3773]]), + values=tensor([0.2771, 0.3229, 0.1452, ..., 0.5103, 0.2731, 0.0454]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.0202, 0.9007, 0.6371, ..., 0.8745, 0.2608, 0.1412]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.596680641174316 seconds + +[16.28, 16.2, 16.32, 16.6, 16.8, 16.72, 16.72, 16.84, 16.6, 16.48] +[16.24, 16.28, 16.56, 21.04, 22.72, 24.72, 25.6, 23.56, 22.36, 22.36, 20.8, 20.8, 20.8] +13.166747093200684 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.596680641174316, 'TIME_S_1KI': 28.639677408579235, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 258.78833953857423, 'W': 19.654690540248374} +[16.28, 16.2, 16.32, 16.6, 16.8, 16.72, 16.72, 16.84, 16.6, 16.48, 16.6, 16.72, 16.48, 16.4, 16.52, 16.56, 16.32, 16.32, 16.32, 16.24] +297.23999999999995 +14.861999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.596680641174316, 'TIME_S_1KI': 28.639677408579235, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 258.78833953857423, 'W': 19.654690540248374, 'J_1KI': 699.4279446988493, 'W_1KI': 53.120785243914526, 'W_D': 4.792690540248376, 'J_D': 63.10414423942572, 'W_D_1KI': 12.953217676346963, 'J_D_1KI': 35.00869642255936} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..7cb3694 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 188, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.026696681976318, "TIME_S_1KI": 53.33349298923574, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 278.07759874343867, "W": 19.571934998478163, "J_1KI": 1479.136163528929, "W_1KI": 104.10603722594767, "W_D": 4.928934998478162, "J_D": 70.03019419622416, "W_D_1KI": 26.217739353607246, "J_D_1KI": 139.45606039152793} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..8a03cf4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.5578980445861816} + +tensor(indices=tensor([[3844, 4051, 1353, ..., 2707, 1556, 4908], + [1304, 3255, 2512, ..., 2111, 1142, 1638]]), + values=tensor([0.4560, 0.5837, 0.9413, ..., 0.9865, 0.1177, 0.2599]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1895, 0.4537, 0.0116, ..., 0.1378, 0.1077, 0.1212]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.5578980445861816 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 188 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.026696681976318} + +tensor(indices=tensor([[ 334, 4292, 235, ..., 2755, 3063, 332], + [2623, 4026, 3385, ..., 2226, 3224, 3278]]), + values=tensor([0.5389, 0.7778, 0.1906, ..., 0.1883, 0.4089, 0.2607]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1524, 0.2872, 0.6357, ..., 0.5048, 0.7270, 0.2381]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.026696681976318 seconds + +tensor(indices=tensor([[ 334, 4292, 235, ..., 2755, 3063, 332], + [2623, 4026, 3385, ..., 2226, 3224, 3278]]), + values=tensor([0.5389, 0.7778, 0.1906, ..., 0.1883, 0.4089, 0.2607]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1524, 0.2872, 0.6357, ..., 0.5048, 0.7270, 0.2381]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.026696681976318 seconds + +[16.16, 15.92, 16.0, 16.08, 16.24, 16.52, 16.6, 16.52, 16.52, 16.32] +[15.96, 16.0, 16.04, 21.04, 22.84, 24.8, 26.08, 23.44, 22.28, 21.04, 20.6, 20.6, 20.64, 20.76] +14.207976818084717 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.026696681976318, 'TIME_S_1KI': 53.33349298923574, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.07759874343867, 'W': 19.571934998478163} +[16.16, 15.92, 16.0, 16.08, 16.24, 16.52, 16.6, 16.52, 16.52, 16.32, 16.56, 16.24, 16.28, 16.12, 15.96, 16.32, 16.32, 16.32, 16.28, 16.2] +292.86 +14.643 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.026696681976318, 'TIME_S_1KI': 53.33349298923574, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 278.07759874343867, 'W': 19.571934998478163, 'J_1KI': 1479.136163528929, 'W_1KI': 104.10603722594767, 'W_D': 4.928934998478162, 'J_D': 70.03019419622416, 'W_D_1KI': 26.217739353607246, 'J_D_1KI': 139.45606039152793} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..cbbfff1 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 96, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.222102403640747, "TIME_S_1KI": 106.48023337125778, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 283.6666414546967, "W": 18.589544841781695, "J_1KI": 2954.860848486424, "W_1KI": 193.64109210189264, "W_D": 3.844544841781696, "J_D": 58.66572487235075, "W_D_1KI": 40.04734210189267, "J_D_1KI": 417.15981356138195} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..23cc18c --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.0914530754089355} + +tensor(indices=tensor([[1303, 2160, 4369, ..., 4435, 3810, 417], + [3400, 4600, 1739, ..., 224, 551, 4286]]), + values=tensor([0.1494, 0.6110, 0.0642, ..., 0.9959, 0.2030, 0.5785]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.4794, 0.4904, 0.5821, ..., 0.5127, 0.0773, 0.9625]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 1.0914530754089355 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 96 -ss 5000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.222102403640747} + +tensor(indices=tensor([[ 6, 2868, 3797, ..., 4974, 4960, 931], + [ 444, 3717, 4556, ..., 2406, 4953, 659]]), + values=tensor([0.2759, 0.5310, 0.4190, ..., 0.5786, 0.7798, 0.2127]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.6123, 0.1543, 0.6655, ..., 0.1791, 0.8116, 0.2360]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.222102403640747 seconds + +tensor(indices=tensor([[ 6, 2868, 3797, ..., 4974, 4960, 931], + [ 444, 3717, 4556, ..., 2406, 4953, 659]]), + values=tensor([0.2759, 0.5310, 0.4190, ..., 0.5786, 0.7798, 0.2127]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.6123, 0.1543, 0.6655, ..., 0.1791, 0.8116, 0.2360]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.222102403640747 seconds + +[16.56, 16.44, 16.32, 16.28, 16.72, 16.44, 16.48, 16.44, 16.08, 15.92] +[16.12, 16.12, 16.24, 17.36, 19.12, 21.0, 21.92, 22.48, 21.44, 21.36, 20.68, 20.72, 20.64, 20.76, 20.76] +15.259472131729126 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 96, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.222102403640747, 'TIME_S_1KI': 106.48023337125778, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.6666414546967, 'W': 18.589544841781695} +[16.56, 16.44, 16.32, 16.28, 16.72, 16.44, 16.48, 16.44, 16.08, 15.92, 16.52, 16.28, 16.32, 16.12, 16.08, 16.36, 16.6, 16.56, 16.6, 16.56] +294.9 +14.745 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 96, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.222102403640747, 'TIME_S_1KI': 106.48023337125778, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.6666414546967, 'W': 18.589544841781695, 'J_1KI': 2954.860848486424, 'W_1KI': 193.64109210189264, 'W_D': 3.844544841781696, 'J_D': 58.66572487235075, 'W_D_1KI': 40.04734210189267, 'J_D_1KI': 417.15981356138195} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..b356e55 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 63, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.021673440933228, "TIME_S_1KI": 159.07418160211472, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 296.1811933517456, "W": 19.37098300965613, "J_1KI": 4701.288783361042, "W_1KI": 307.4759207881925, "W_D": 4.480983009656132, "J_D": 68.51396723270419, "W_D_1KI": 71.12671443898623, "J_D_1KI": 1128.9954672854956} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..fa8e0f5 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.6492033004760742} + +tensor(indices=tensor([[ 945, 4589, 2774, ..., 2909, 736, 2667], + [2560, 3363, 4131, ..., 2105, 3960, 2928]]), + values=tensor([0.2201, 0.4510, 0.7924, ..., 0.8323, 0.0651, 0.5160]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.2493, 0.4630, 0.6009, ..., 0.6971, 0.1027, 0.3339]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 1.6492033004760742 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 63 -ss 5000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.021673440933228} + +tensor(indices=tensor([[4166, 4477, 4564, ..., 264, 2117, 2546], + [1294, 2850, 2607, ..., 362, 1048, 4735]]), + values=tensor([0.0149, 0.1469, 0.7394, ..., 0.7181, 0.3269, 0.1660]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.8789, 0.0830, 0.2305, ..., 0.2880, 0.6985, 0.6729]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.021673440933228 seconds + +tensor(indices=tensor([[4166, 4477, 4564, ..., 264, 2117, 2546], + [1294, 2850, 2607, ..., 362, 1048, 4735]]), + values=tensor([0.0149, 0.1469, 0.7394, ..., 0.7181, 0.3269, 0.1660]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.8789, 0.0830, 0.2305, ..., 0.2880, 0.6985, 0.6729]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.021673440933228 seconds + +[17.32, 17.0, 16.68, 16.6, 16.64, 16.76, 16.8, 16.76, 16.76, 16.64] +[16.52, 16.4, 16.68, 18.28, 19.96, 23.0, 23.96, 24.08, 23.76, 22.16, 21.0, 21.0, 20.8, 20.6, 20.56] +15.289941310882568 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 63, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.021673440933228, 'TIME_S_1KI': 159.07418160211472, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.1811933517456, 'W': 19.37098300965613} +[17.32, 17.0, 16.68, 16.6, 16.64, 16.76, 16.8, 16.76, 16.76, 16.64, 16.32, 16.2, 16.4, 16.32, 16.56, 16.16, 16.28, 16.08, 16.36, 16.6] +297.79999999999995 +14.889999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 63, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.021673440933228, 'TIME_S_1KI': 159.07418160211472, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.1811933517456, 'W': 19.37098300965613, 'J_1KI': 4701.288783361042, 'W_1KI': 307.4759207881925, 'W_D': 4.480983009656132, 'J_D': 68.51396723270419, 'W_D_1KI': 71.12671443898623, 'J_D_1KI': 1128.9954672854956} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..a8b662b --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 48, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.146132707595825, "TIME_S_1KI": 211.37776474157968, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.9102397632598, "W": 20.01824681068372, "J_1KI": 6789.79666173458, "W_1KI": 417.04680855591084, "W_D": 5.35524681068372, "J_D": 87.18694441962234, "W_D_1KI": 111.56764188924417, "J_D_1KI": 2324.325872692587} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..c604cad --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.167170524597168} + +tensor(indices=tensor([[ 488, 4773, 4527, ..., 2629, 4924, 3969], + [ 860, 3190, 4868, ..., 1859, 4874, 4035]]), + values=tensor([0.1016, 0.7267, 0.8882, ..., 0.9839, 0.5575, 0.6801]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8969, 0.8054, 0.0829, ..., 0.5187, 0.3613, 0.7237]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 2.167170524597168 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 48 -ss 5000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.146132707595825} + +tensor(indices=tensor([[1697, 3818, 4103, ..., 3079, 1289, 2714], + [ 103, 2605, 2675, ..., 1234, 3413, 938]]), + values=tensor([0.9440, 0.3445, 0.4659, ..., 0.6576, 0.2575, 0.7871]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3655, 0.4478, 0.5552, ..., 0.4179, 0.5971, 0.2546]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.146132707595825 seconds + +tensor(indices=tensor([[1697, 3818, 4103, ..., 3079, 1289, 2714], + [ 103, 2605, 2675, ..., 1234, 3413, 938]]), + values=tensor([0.9440, 0.3445, 0.4659, ..., 0.6576, 0.2575, 0.7871]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3655, 0.4478, 0.5552, ..., 0.4179, 0.5971, 0.2546]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.146132707595825 seconds + +[16.32, 16.12, 16.52, 16.4, 16.48, 16.56, 16.72, 16.32, 16.24, 16.12] +[15.92, 15.8, 19.4, 19.4, 20.44, 23.08, 25.84, 26.64, 23.72, 22.56, 20.6, 20.48, 20.48, 20.72, 20.96, 22.84] +16.28065848350525 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.146132707595825, 'TIME_S_1KI': 211.37776474157968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.9102397632598, 'W': 20.01824681068372} +[16.32, 16.12, 16.52, 16.4, 16.48, 16.56, 16.72, 16.32, 16.24, 16.12, 16.2, 16.2, 15.96, 16.0, 16.2, 16.28, 16.28, 16.32, 16.32, 16.04] +293.26 +14.663 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.146132707595825, 'TIME_S_1KI': 211.37776474157968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.9102397632598, 'W': 20.01824681068372, 'J_1KI': 6789.79666173458, 'W_1KI': 417.04680855591084, 'W_D': 5.35524681068372, 'J_D': 87.18694441962234, 'W_D_1KI': 111.56764188924417, 'J_D_1KI': 2324.325872692587} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..1c8d177 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 39, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.389892101287842, "TIME_S_1KI": 266.4074897766113, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 356.30051391601563, "W": 20.603235750156045, "J_1KI": 9135.91061323117, "W_1KI": 528.2880961578473, "W_D": 5.519235750156046, "J_D": 95.44648996162417, "W_D_1KI": 141.51886538861655, "J_D_1KI": 3628.688856118373} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..197ce63 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.5 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 2.680294990539551} + +tensor(indices=tensor([[ 232, 2268, 3564, ..., 3025, 1104, 2113], + [4930, 4710, 1043, ..., 496, 3241, 1795]]), + values=tensor([0.3745, 0.9505, 0.0317, ..., 0.4463, 0.4365, 0.6949]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.5118, 0.3866, 0.4458, ..., 0.5438, 0.2065, 0.9705]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 2.680294990539551 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 39 -ss 5000 -sd 0.5 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.389892101287842} + +tensor(indices=tensor([[ 646, 3657, 399, ..., 3262, 602, 2820], + [4655, 3729, 1355, ..., 2889, 4562, 2578]]), + values=tensor([0.7272, 0.5338, 0.0354, ..., 0.0316, 0.0331, 0.9529]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7082, 0.9992, 0.2922, ..., 0.9082, 0.7559, 0.2396]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.389892101287842 seconds + +tensor(indices=tensor([[ 646, 3657, 399, ..., 3262, 602, 2820], + [4655, 3729, 1355, ..., 2889, 4562, 2578]]), + values=tensor([0.7272, 0.5338, 0.0354, ..., 0.0316, 0.0331, 0.9529]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7082, 0.9992, 0.2922, ..., 0.9082, 0.7559, 0.2396]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.389892101287842 seconds + +[16.16, 16.32, 16.4, 17.08, 17.04, 17.12, 17.12, 17.08, 17.0, 16.76] +[16.52, 16.28, 16.4, 21.36, 23.04, 25.68, 27.48, 25.2, 23.6, 23.6, 23.24, 20.76, 20.56, 20.76, 20.96, 20.84, 23.04] +17.2934250831604 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.389892101287842, 'TIME_S_1KI': 266.4074897766113, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.30051391601563, 'W': 20.603235750156045} +[16.16, 16.32, 16.4, 17.08, 17.04, 17.12, 17.12, 17.08, 17.0, 16.76, 16.28, 16.28, 16.44, 16.6, 16.52, 16.88, 16.88, 16.96, 17.0, 16.72] +301.68 +15.084 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.389892101287842, 'TIME_S_1KI': 266.4074897766113, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.30051391601563, 'W': 20.603235750156045, 'J_1KI': 9135.91061323117, 'W_1KI': 528.2880961578473, 'W_D': 5.519235750156046, 'J_D': 95.44648996162417, 'W_D_1KI': 141.51886538861655, 'J_D_1KI': 3628.688856118373} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..0081da2 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 652466, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.900923252105713, "TIME_S_1KI": 0.016707266358868836, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 277.6165154457092, "W": 19.57926332943202, "J_1KI": 0.42548809508190344, "W_1KI": 0.030008097478538377, "W_D": 4.625263329432018, "J_D": 65.58211445093146, "W_D_1KI": 0.007088895558438322, "J_D_1KI": 1.0864773886207592e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..f9929ad --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,535 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.008069992065429688} + +tensor(indices=tensor([[ 664, 4930, 1798, 3252, 3946, 4295, 4519, 4181, 2621, + 4985, 1156, 2770, 3847, 1146, 3512, 4324, 2076, 387, + 2436, 4560, 4421, 4032, 1885, 152, 1276, 1223, 3551, + 174, 1494, 158, 4266, 839, 2702, 540, 4136, 803, + 1669, 3838, 1784, 2329, 832, 2111, 4262, 2157, 3766, + 2817, 4291, 4273, 2069, 490, 2744, 4001, 1392, 4428, + 2000, 2415, 1622, 1061, 110, 1089, 4013, 198, 4355, + 2043, 4800, 4574, 131, 1117, 1530, 4360, 4135, 812, + 1527, 895, 657, 3188, 832, 2835, 2204, 198, 4539, + 4408, 1046, 4693, 4806, 3475, 4407, 3488, 4953, 1447, + 1254, 650, 1870, 3116, 2840, 3510, 2077, 3496, 4472, + 4651, 4861, 3153, 2, 3805, 2354, 3821, 996, 517, + 1357, 2359, 2581, 2352, 2227, 3670, 4367, 2835, 1544, + 4185, 2880, 1672, 4926, 3664, 40, 3647, 3941, 2471, + 4560, 4299, 3143, 3787, 2060, 1368, 930, 147, 3056, + 2878, 2010, 2941, 917, 1586, 1955, 1190, 574, 4502, + 388, 4230, 1939, 2848, 2507, 2644, 287, 1344, 1990, + 1381, 2601, 747, 315, 3944, 1715, 4518, 3240, 2637, + 232, 1184, 339, 1787, 2689, 4217, 4072, 1542, 344, + 1976, 3343, 2848, 2050, 2076, 4989, 1633, 2082, 3518, + 2016, 2530, 4677, 1397, 1211, 936, 475, 4678, 1559, + 2188, 1342, 3728, 1251, 4152, 4891, 3770, 4813, 3981, + 1284, 2510, 2944, 3149, 2021, 51, 2605, 103, 1642, + 3101, 1191, 4960, 2035, 4340, 3620, 1056, 2819, 3689, + 3709, 2214, 1301, 4580, 928, 1032, 1003, 1749, 4113, + 4664, 4292, 950, 4925, 1595, 85, 3965, 4692, 2978, + 4965, 2232, 1708, 1082, 508, 2089, 3354, 4680, 360, + 1274, 3513, 2939, 1911, 1726, 1045, 2181], + [ 227, 1685, 1834, 4760, 2477, 1206, 1370, 4969, 2285, + 4154, 3537, 2882, 476, 1657, 1264, 2936, 4505, 655, + 172, 2416, 1928, 1148, 3342, 3077, 1196, 1301, 3298, + 1659, 693, 3211, 339, 2330, 2725, 4934, 4983, 4525, + 289, 1742, 1273, 2290, 3687, 518, 4074, 2495, 2222, + 487, 4458, 73, 1464, 1309, 2746, 3413, 3337, 2501, + 3677, 4544, 2684, 2022, 4890, 1086, 1022, 755, 2964, + 474, 1304, 3752, 1417, 3331, 4538, 2615, 1830, 4470, + 3609, 1871, 4471, 779, 1324, 3166, 3672, 2820, 321, + 4659, 35, 1380, 210, 4203, 1645, 1296, 2662, 3090, + 1042, 103, 576, 2454, 1215, 824, 1013, 1864, 4248, + 307, 2509, 4386, 3, 1985, 3256, 3802, 943, 3714, + 4522, 3576, 4210, 2090, 3438, 3967, 1564, 1863, 3263, + 2286, 3440, 429, 2419, 1905, 907, 4188, 940, 3906, + 4449, 3348, 3278, 3763, 2007, 3964, 573, 4214, 3591, + 131, 2017, 1029, 1697, 1756, 4918, 2743, 4252, 1352, + 3147, 2118, 2967, 451, 4607, 3947, 3655, 3938, 774, + 1101, 1407, 1241, 3001, 2930, 352, 4751, 4101, 4416, + 151, 3273, 2821, 2603, 2320, 491, 1042, 1599, 4185, + 4810, 2192, 3559, 377, 3404, 1890, 1851, 4775, 1133, + 384, 2066, 1786, 2265, 3514, 2058, 3787, 14, 2629, + 4222, 1405, 1854, 2078, 1355, 3521, 1677, 4776, 3582, + 574, 1847, 226, 4790, 4975, 361, 487, 2247, 1941, + 3334, 1667, 3248, 4500, 229, 1484, 2425, 3025, 3524, + 2889, 4298, 4920, 316, 3597, 1411, 1029, 733, 257, + 2753, 1484, 436, 235, 2687, 2908, 3209, 4767, 1879, + 2378, 4394, 4010, 541, 2114, 976, 2309, 1786, 904, + 3958, 1974, 4912, 3796, 3066, 2802, 3789]]), + values=tensor([0.5478, 0.7056, 0.7047, 0.3907, 0.1936, 0.1434, 0.5362, + 0.7092, 0.4309, 0.4755, 0.1170, 0.1815, 0.8437, 0.1320, + 0.4893, 0.7832, 0.8076, 0.6627, 0.4173, 0.4168, 0.9147, + 0.0305, 0.1742, 0.8991, 0.9110, 0.1332, 0.2154, 0.9640, + 0.7797, 0.7831, 0.7187, 0.3847, 0.4541, 0.7475, 0.8114, + 0.2441, 0.4632, 0.9117, 0.9441, 0.5983, 0.4901, 0.3437, + 0.2617, 0.2485, 0.7118, 0.2219, 0.1317, 0.5238, 0.3914, + 0.2178, 0.6886, 0.5681, 0.0202, 0.8330, 0.7685, 0.9526, + 0.4855, 0.6656, 0.6872, 0.0441, 0.8700, 0.2680, 0.3375, + 0.9234, 0.0170, 0.2131, 0.7436, 0.8296, 0.0546, 0.1471, + 0.6863, 0.8161, 0.4032, 0.4274, 0.3899, 0.6303, 0.1371, + 0.7743, 0.1286, 0.0226, 0.5868, 0.5392, 0.3803, 0.7072, + 0.9965, 0.1328, 0.9937, 0.4945, 0.6532, 0.0676, 0.1266, + 0.3365, 0.0678, 0.8188, 0.2290, 0.9214, 0.5333, 0.9066, + 0.6876, 0.0305, 0.0055, 0.2622, 0.6721, 0.9806, 0.5098, + 0.6957, 0.8504, 0.8837, 0.2222, 0.3826, 0.1748, 0.2531, + 0.2562, 0.2514, 0.5238, 0.2018, 0.1344, 0.4871, 0.4122, + 0.7600, 0.0932, 0.1621, 0.9619, 0.7573, 0.2498, 0.7948, + 0.6398, 0.8321, 0.9915, 0.2662, 0.2166, 0.1111, 0.0943, + 0.3203, 0.9544, 0.3327, 0.4141, 0.4955, 0.2020, 0.5638, + 0.1565, 0.2953, 0.3244, 0.9269, 0.8990, 0.8711, 0.6869, + 0.8622, 0.7318, 0.3443, 0.7774, 0.1845, 0.8328, 0.4849, + 0.9562, 0.1240, 0.9302, 0.4756, 0.1352, 0.9399, 0.0781, + 0.9452, 0.3237, 0.5029, 0.6363, 0.9118, 0.5470, 0.1844, + 0.8554, 0.2034, 0.6470, 0.4447, 0.0686, 0.4577, 0.4534, + 0.0832, 0.7085, 0.7166, 0.0447, 0.2665, 0.5048, 0.2450, + 0.5307, 0.7659, 0.4786, 0.1418, 0.8720, 0.3272, 0.0125, + 0.9431, 0.2795, 0.5223, 0.7190, 0.0966, 0.6423, 0.1997, + 0.9068, 0.3006, 0.0762, 0.8873, 0.9474, 0.7672, 0.1354, + 0.9120, 0.8878, 0.8157, 0.8469, 0.4956, 0.1563, 0.0011, + 0.1351, 0.0633, 0.9698, 0.0620, 0.3840, 0.8019, 0.9943, + 0.4723, 0.4214, 0.0447, 0.0477, 0.1476, 0.9014, 0.0753, + 0.6803, 0.8515, 0.4763, 0.0839, 0.4336, 0.6019, 0.6703, + 0.6200, 0.4603, 0.8676, 0.2675, 0.9946, 0.4776, 0.9099, + 0.5130, 0.0339, 0.4682, 0.1878, 0.2460, 0.2344, 0.1080, + 0.4357, 0.2186, 0.2272, 0.8348, 0.3487]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.1360, 0.1704, 0.2893, ..., 0.2178, 0.6550, 0.7906]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.008069992065429688 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 13011 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.2877483367919922} + +tensor(indices=tensor([[3477, 3611, 2655, 840, 3833, 1914, 956, 1511, 3911, + 884, 3345, 3818, 4383, 4925, 4096, 3582, 352, 1510, + 1817, 2342, 2028, 1179, 4899, 1626, 597, 2720, 4264, + 3894, 3495, 3809, 3664, 4577, 278, 2036, 2714, 2200, + 4236, 529, 538, 3387, 2708, 4749, 1007, 1753, 3744, + 2256, 2975, 2981, 2337, 481, 4337, 3842, 3461, 4171, + 4662, 1666, 3868, 4324, 795, 2350, 3688, 2699, 1550, + 2805, 1375, 3083, 2253, 3037, 3407, 2844, 2045, 2368, + 2868, 1010, 3970, 3854, 4572, 452, 412, 3247, 1514, + 2750, 1913, 2411, 3285, 2541, 1797, 3693, 1850, 2098, + 3274, 1588, 2513, 2196, 825, 946, 191, 226, 1537, + 698, 4482, 1724, 2460, 3525, 4941, 2616, 673, 4902, + 2240, 2520, 3971, 2955, 3289, 3082, 4012, 4755, 20, + 4055, 2643, 237, 3730, 3380, 123, 4560, 882, 4345, + 885, 76, 241, 3928, 3630, 3159, 3052, 4972, 261, + 255, 2327, 3533, 1748, 641, 2130, 2354, 3894, 587, + 4821, 3536, 3295, 2694, 1037, 1697, 1886, 2527, 58, + 4855, 892, 2529, 2236, 1336, 4868, 3733, 814, 243, + 466, 3479, 3667, 1561, 4744, 3303, 1471, 3848, 3874, + 365, 433, 1399, 1624, 4306, 1907, 3778, 382, 2928, + 3349, 1707, 4473, 1136, 4368, 1128, 3876, 2055, 3864, + 4828, 1769, 81, 2873, 274, 2862, 4202, 542, 3896, + 4098, 4145, 4002, 4370, 4489, 1904, 3962, 1620, 4959, + 215, 3356, 1895, 2259, 2950, 4756, 1621, 3272, 1979, + 2572, 1532, 4609, 3652, 761, 2920, 331, 4467, 2688, + 3913, 1589, 2065, 1637, 938, 2765, 4072, 4359, 68, + 2235, 3545, 3455, 4372, 279, 153, 4677, 4057, 1588, + 4985, 4988, 70, 1246, 3453, 2048, 555], + [3538, 1727, 1017, 804, 1700, 1693, 5, 465, 2603, + 368, 2512, 2704, 3341, 3367, 4901, 4432, 4229, 2409, + 3141, 4227, 3113, 267, 1697, 4704, 4478, 3278, 247, + 259, 704, 1427, 1985, 3571, 414, 88, 1244, 1457, + 79, 1494, 1626, 405, 2243, 1008, 3738, 909, 1636, + 2270, 4838, 659, 4205, 2159, 2733, 4564, 56, 3545, + 348, 2456, 4390, 4016, 2848, 525, 2051, 1388, 4760, + 3578, 519, 2007, 3605, 3446, 2256, 3544, 4952, 3452, + 4366, 2681, 3523, 4612, 230, 1656, 4828, 2657, 903, + 501, 1509, 1560, 2926, 2952, 142, 4006, 58, 3964, + 193, 2277, 1385, 4874, 1339, 1120, 4212, 3420, 4688, + 3990, 2941, 3766, 890, 544, 904, 3739, 1438, 1074, + 3937, 4631, 2947, 4344, 3049, 4861, 2867, 3327, 4296, + 830, 2177, 2210, 4988, 3572, 4758, 4428, 1837, 4461, + 3306, 2048, 1010, 3309, 4840, 4840, 333, 2154, 405, + 3347, 3665, 1971, 537, 2725, 3655, 4408, 4233, 3801, + 4950, 3463, 2921, 2808, 4651, 4924, 1327, 3037, 4194, + 2513, 2626, 2649, 807, 3666, 2803, 1061, 2547, 4325, + 3080, 4517, 975, 1092, 4070, 702, 4225, 2654, 2527, + 438, 4951, 4056, 1825, 1671, 4466, 148, 1233, 2403, + 1774, 2616, 1478, 1750, 1212, 4313, 4661, 3389, 1028, + 4344, 2123, 1543, 3882, 3206, 1687, 3088, 4888, 4676, + 260, 565, 1746, 231, 707, 2988, 130, 1913, 4964, + 4281, 1591, 1869, 2007, 1608, 755, 1759, 604, 2415, + 3407, 2991, 1405, 4524, 4051, 4594, 3896, 428, 3749, + 2879, 1324, 628, 2281, 3559, 196, 2359, 4566, 3521, + 2459, 4570, 4032, 3195, 2550, 620, 3334, 3490, 562, + 3322, 4534, 397, 4728, 1685, 1482, 4969]]), + values=tensor([0.9849, 0.8818, 0.8584, 0.4875, 0.5177, 0.9192, 0.6572, + 0.3662, 0.9670, 0.3872, 0.3036, 0.5200, 0.7714, 0.2232, + 0.9006, 0.5557, 0.6312, 0.9320, 0.0165, 0.2735, 0.1932, + 0.1355, 0.9851, 0.9431, 0.4292, 0.8489, 0.9178, 0.6777, + 0.0607, 0.2320, 0.0845, 0.6071, 0.3885, 0.7081, 0.0452, + 0.0999, 0.7447, 0.9400, 0.9739, 0.9473, 0.1005, 0.8684, + 0.7080, 0.9705, 0.8679, 0.3477, 0.0486, 0.2863, 0.6324, + 0.1179, 0.7374, 0.4103, 0.2815, 0.4975, 0.1108, 0.4681, + 0.3680, 0.2232, 0.7214, 0.4334, 0.4380, 0.7473, 0.5234, + 0.0365, 0.6777, 0.2226, 0.7133, 0.1178, 0.0666, 0.4798, + 0.4167, 0.3609, 0.2409, 0.4480, 0.9400, 0.5244, 0.1724, + 0.0385, 0.9391, 0.8581, 0.1877, 0.0217, 0.5553, 0.7351, + 0.1314, 0.1859, 0.2337, 0.5785, 0.7907, 0.5077, 0.9739, + 0.9648, 0.8810, 0.6052, 0.1608, 0.9813, 0.7701, 0.9916, + 0.2638, 0.0745, 0.6661, 0.7738, 0.0039, 0.6546, 0.6118, + 0.7432, 0.5169, 0.1005, 0.8166, 0.6080, 0.0207, 0.3868, + 0.8981, 0.3719, 0.5138, 0.3947, 0.1610, 0.4791, 0.5326, + 0.4433, 0.7155, 0.8163, 0.2183, 0.3885, 0.9623, 0.8678, + 0.1647, 0.0423, 0.3487, 0.3285, 0.6303, 0.3733, 0.5618, + 0.7513, 0.0310, 0.2902, 0.7791, 0.1040, 0.4654, 0.0998, + 0.4001, 0.8015, 0.0754, 0.1587, 0.3364, 0.2020, 0.4977, + 0.7455, 0.6895, 0.6846, 0.1977, 0.4137, 0.9377, 0.5152, + 0.9768, 0.6665, 0.1917, 0.5539, 0.5968, 0.2984, 0.7945, + 0.2705, 0.2146, 0.1598, 0.1116, 0.1109, 0.4816, 0.7969, + 0.7859, 0.8810, 0.7142, 0.2551, 0.6023, 0.2201, 0.2197, + 0.2500, 0.5558, 0.2337, 0.8018, 0.2552, 0.0327, 0.2900, + 0.3396, 0.7355, 0.0700, 0.7093, 0.9055, 0.7784, 0.7197, + 0.4975, 0.1554, 0.8521, 0.8334, 0.2641, 0.9429, 0.0535, + 0.7627, 0.8737, 0.5579, 0.4793, 0.1122, 0.8415, 0.0910, + 0.5006, 0.1759, 0.2400, 0.4011, 0.5869, 0.2358, 0.3276, + 0.6846, 0.9205, 0.5225, 0.4563, 0.2238, 0.6037, 0.3599, + 0.6425, 0.8924, 0.5998, 0.0085, 0.0645, 0.5407, 0.2890, + 0.4224, 0.1710, 0.8487, 0.9686, 0.5352, 0.4874, 0.4820, + 0.9833, 0.7249, 0.2196, 0.8624, 0.3360, 0.1483, 0.9238, + 0.2273, 0.2954, 0.7485, 0.7151, 0.2174, 0.2781, 0.2399, + 0.4069, 0.0407, 0.6707, 0.7104, 0.3223]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3518, 0.5120, 0.5532, ..., 0.9536, 0.9900, 0.0346]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.2877483367919922 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 474774 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.640431642532349} + +tensor(indices=tensor([[3128, 1360, 2394, 4783, 3296, 550, 4287, 4097, 4305, + 3222, 539, 2195, 255, 3769, 4633, 2927, 467, 1981, + 2952, 4258, 1766, 3479, 2132, 1614, 773, 112, 762, + 1280, 1192, 3396, 1844, 2937, 2058, 896, 4410, 129, + 4860, 632, 2990, 1331, 2487, 4884, 2796, 359, 2944, + 803, 804, 857, 4919, 551, 1227, 4351, 3706, 39, + 1971, 1389, 4682, 1805, 3061, 1002, 1014, 1954, 4647, + 2776, 2316, 2127, 4708, 4448, 1961, 3078, 4508, 559, + 829, 4928, 674, 2737, 4156, 3918, 193, 3455, 2050, + 1763, 2883, 2547, 2447, 2878, 2512, 1207, 2204, 4273, + 798, 962, 4049, 2494, 2221, 2723, 4750, 4477, 90, + 2367, 4333, 2150, 1252, 3420, 3526, 4605, 3016, 692, + 943, 57, 4814, 3539, 4852, 1466, 3525, 2826, 899, + 2399, 1213, 1803, 4358, 4286, 3110, 1692, 4248, 1451, + 1641, 2240, 2931, 4366, 496, 2722, 1139, 14, 65, + 720, 4347, 4775, 2208, 4931, 3807, 486, 1484, 1857, + 1750, 2218, 1019, 3570, 4610, 3036, 1184, 3052, 1954, + 4860, 3992, 412, 1700, 2612, 2714, 1969, 1407, 3493, + 3595, 4128, 1919, 4929, 552, 2250, 2673, 2312, 2320, + 4591, 4431, 2648, 214, 1597, 421, 3408, 4252, 4243, + 3355, 3073, 3936, 3223, 1359, 3707, 3567, 3159, 2019, + 4828, 3268, 2935, 4514, 4923, 4963, 3259, 4691, 4746, + 2316, 270, 334, 3101, 1256, 3463, 1377, 3886, 3568, + 4663, 2994, 4107, 136, 98, 4583, 4473, 474, 2012, + 2891, 183, 1629, 3889, 2890, 2739, 1085, 1019, 3383, + 1248, 2505, 3757, 2299, 359, 1455, 4123, 2780, 925, + 4232, 2672, 3760, 3259, 2415, 1338, 661, 1893, 230, + 4699, 2801, 732, 3278, 83, 1911, 4271], + [2148, 916, 1520, 3310, 533, 3648, 1000, 2390, 178, + 3718, 2354, 4382, 2079, 3051, 450, 3204, 3703, 4654, + 1669, 1762, 2932, 4623, 2932, 371, 1013, 896, 747, + 640, 441, 468, 4738, 4669, 4531, 3002, 4060, 1216, + 1248, 2099, 3249, 1341, 706, 1784, 1815, 25, 2257, + 3272, 4803, 1550, 2752, 1286, 552, 1626, 1833, 1341, + 4685, 308, 3088, 2049, 4025, 849, 3902, 2534, 3494, + 671, 1445, 4403, 4845, 4776, 2718, 4158, 1233, 4790, + 2405, 2412, 4997, 4687, 4965, 2523, 4558, 3964, 4188, + 462, 49, 2496, 1786, 4613, 4653, 1063, 4817, 3569, + 1542, 4633, 77, 3959, 902, 1334, 4376, 3566, 2377, + 1262, 2517, 71, 1403, 4185, 4906, 4064, 886, 3798, + 3191, 4659, 4033, 4413, 2828, 2728, 1047, 505, 3040, + 2255, 1567, 1757, 3892, 3131, 749, 4135, 4774, 2230, + 155, 4929, 4009, 1085, 1598, 4579, 4790, 3694, 3870, + 2111, 3323, 4183, 124, 137, 1520, 3236, 249, 581, + 4999, 840, 3719, 3410, 1837, 3674, 1878, 1931, 3905, + 808, 1006, 4889, 1709, 2521, 3711, 704, 879, 1980, + 4837, 2598, 3631, 3332, 4759, 1667, 507, 2134, 1456, + 287, 1422, 4217, 602, 3844, 896, 2645, 2976, 2949, + 3621, 4416, 4041, 3032, 315, 1118, 1139, 1725, 3908, + 3916, 4545, 4809, 2259, 3154, 3030, 1461, 1757, 4318, + 825, 1984, 1875, 2301, 972, 1996, 233, 74, 1306, + 2544, 1643, 789, 4865, 3177, 3739, 4070, 4214, 4807, + 4078, 2821, 38, 4989, 2384, 4387, 1742, 3309, 4631, + 1148, 1011, 4241, 3356, 4276, 367, 573, 4141, 84, + 2854, 1738, 4498, 1730, 2488, 2149, 4208, 1523, 2615, + 2804, 4126, 4596, 3860, 2512, 1604, 3064]]), + values=tensor([0.9822, 0.1534, 0.1686, 0.5947, 0.7246, 0.2474, 0.9544, + 0.9002, 0.5549, 0.6022, 0.3601, 0.5476, 0.4151, 0.4654, + 0.1566, 0.5469, 0.0781, 0.5394, 0.9966, 0.1074, 0.0973, + 0.4397, 0.2161, 0.2235, 0.4113, 0.3325, 0.7980, 0.4203, + 0.2621, 0.8819, 0.5377, 0.2702, 0.0875, 0.7708, 0.1628, + 0.4825, 0.4567, 0.3855, 0.7640, 0.8886, 0.7914, 0.2717, + 0.1986, 0.0231, 0.0461, 0.7087, 0.1190, 0.0307, 0.4844, + 0.9855, 0.2634, 0.1760, 0.2304, 0.2753, 0.9429, 0.3134, + 0.0284, 0.7189, 0.1069, 0.7654, 0.8805, 0.7903, 0.2783, + 0.5666, 0.9545, 0.3065, 0.2293, 0.8232, 0.0688, 0.6704, + 0.2342, 0.3965, 0.8513, 0.5908, 0.7413, 0.1564, 0.6546, + 0.7975, 0.5058, 0.6244, 0.6420, 0.5084, 0.5303, 0.2070, + 0.8040, 0.6628, 0.7549, 0.0951, 0.2973, 0.8999, 0.6411, + 0.4591, 0.4369, 0.8794, 0.6863, 0.0289, 0.1288, 0.4316, + 0.3599, 0.2581, 0.8396, 0.5147, 0.3632, 0.9620, 0.5698, + 0.6621, 0.6122, 0.8311, 0.1953, 0.9165, 0.4396, 0.8153, + 0.8396, 0.0337, 0.3298, 0.1881, 0.3038, 0.3242, 0.1854, + 0.2863, 0.8736, 0.0755, 0.0821, 0.3582, 0.3870, 0.9219, + 0.1419, 0.3133, 0.3257, 0.1205, 0.8028, 0.1316, 0.0697, + 0.2440, 0.2572, 0.2460, 0.4508, 0.4553, 0.5597, 0.3545, + 0.0693, 0.2173, 0.2814, 0.5150, 0.0483, 0.2946, 0.5622, + 0.0764, 0.9745, 0.2487, 0.5250, 0.7227, 0.3393, 0.2127, + 0.2007, 0.3635, 0.5301, 0.8381, 0.3362, 0.9981, 0.8321, + 0.8542, 0.9747, 0.1981, 0.7535, 0.9735, 0.2741, 0.6191, + 0.1461, 0.7833, 0.1997, 0.9947, 0.4343, 0.0393, 0.6682, + 0.8420, 0.5376, 0.8536, 0.1142, 0.2691, 0.2629, 0.4109, + 0.7381, 0.2761, 0.3793, 0.6378, 0.4568, 0.7861, 0.9356, + 0.3378, 0.7598, 0.0671, 0.4736, 0.0503, 0.6163, 0.6588, + 0.6120, 0.3661, 0.0194, 0.7332, 0.2584, 0.7912, 0.3036, + 0.5970, 0.5204, 0.0686, 0.8139, 0.4411, 0.0793, 0.3333, + 0.0064, 0.7947, 0.2414, 0.1441, 0.2754, 0.0165, 0.0787, + 0.1391, 0.2265, 0.9901, 0.8627, 0.7933, 0.9161, 0.3608, + 0.2697, 0.1733, 0.5878, 0.6437, 0.0039, 0.7442, 0.5785, + 0.7495, 0.0770, 0.2339, 0.4228, 0.2513, 0.7691, 0.7740, + 0.2943, 0.1347, 0.0357, 0.4640, 0.3174, 0.1347, 0.9333, + 0.1092, 0.1714, 0.0601, 0.7413, 0.1599]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.8038, 0.3754, 0.8168, ..., 0.2215, 0.9556, 0.7170]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 7.640431642532349 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 652466 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.900923252105713} + +tensor(indices=tensor([[3119, 4218, 1197, 1728, 1966, 3162, 471, 4398, 689, + 426, 2339, 1139, 4483, 1325, 824, 1233, 4344, 1543, + 2756, 2312, 4288, 4669, 4399, 2423, 4473, 615, 304, + 1203, 3595, 1240, 800, 3659, 1540, 2507, 345, 3633, + 153, 2776, 1885, 2482, 4846, 2900, 1825, 4930, 4211, + 1775, 476, 952, 2204, 4155, 3700, 823, 3109, 1161, + 1091, 4731, 1731, 4714, 1569, 1379, 307, 3976, 583, + 2300, 2804, 2699, 3175, 4892, 4865, 843, 4590, 2689, + 616, 492, 4924, 1583, 3298, 2812, 3192, 1200, 1137, + 4209, 330, 2831, 4340, 2689, 543, 1790, 4159, 2874, + 761, 2646, 4714, 4800, 881, 3578, 4887, 1661, 2706, + 1859, 136, 3424, 105, 1029, 3975, 1286, 2965, 4856, + 1154, 2510, 4220, 3016, 1134, 1479, 1448, 307, 462, + 934, 2336, 3750, 1790, 1575, 3443, 939, 1241, 2837, + 3900, 4291, 203, 1741, 859, 4951, 4488, 3294, 2886, + 2610, 2662, 3452, 1898, 44, 4857, 213, 942, 4363, + 796, 4218, 1609, 456, 2688, 2971, 4107, 2034, 3965, + 3593, 4002, 3087, 358, 1630, 3151, 4014, 4838, 2938, + 3540, 1767, 663, 1893, 1392, 1160, 2907, 763, 922, + 3419, 1169, 2783, 4882, 4571, 4127, 1386, 4947, 625, + 2376, 1666, 569, 1691, 2369, 4667, 836, 61, 3214, + 4560, 3603, 3056, 1904, 4594, 1650, 644, 4894, 3393, + 2437, 1873, 2135, 1128, 1722, 2256, 2848, 4414, 3755, + 831, 4027, 3556, 1089, 181, 1511, 1830, 3582, 3749, + 3611, 2411, 3124, 56, 3265, 2430, 3858, 1720, 2053, + 3351, 984, 4170, 1134, 3745, 1630, 4345, 1473, 2765, + 2640, 3357, 3473, 2386, 2178, 2533, 48, 3140, 4470, + 2988, 4096, 883, 792, 1071, 1011, 3499], + [4549, 2279, 3645, 237, 3317, 4582, 4171, 4232, 2706, + 92, 94, 3750, 2848, 484, 4218, 1986, 3696, 1672, + 930, 2374, 1967, 3270, 4895, 3262, 2094, 4808, 222, + 1753, 1069, 3932, 4014, 1224, 2176, 3990, 2419, 4813, + 829, 4291, 636, 2217, 3375, 202, 1147, 3267, 983, + 702, 4405, 444, 3639, 2242, 4305, 279, 2856, 2308, + 2313, 4364, 2957, 1407, 1421, 4556, 4301, 895, 1516, + 603, 3489, 2646, 3581, 1836, 1680, 3397, 4887, 1550, + 407, 366, 3995, 4149, 1941, 3739, 4440, 1091, 2963, + 3749, 3460, 4903, 455, 1704, 2786, 2508, 1940, 2760, + 1348, 3769, 1810, 3657, 2796, 4348, 2607, 4542, 4804, + 3615, 1544, 3108, 2347, 1683, 2847, 4550, 3462, 2759, + 1848, 3686, 3005, 4294, 1915, 2265, 1694, 1489, 662, + 303, 3738, 2807, 4571, 543, 4829, 4377, 4537, 1035, + 183, 3822, 4930, 4024, 912, 4848, 4748, 2489, 3065, + 3812, 3351, 4108, 63, 2447, 1957, 2692, 150, 969, + 1308, 4258, 3115, 3689, 3796, 896, 4478, 540, 3953, + 61, 3052, 489, 1814, 2160, 3810, 261, 467, 1597, + 3910, 638, 33, 3095, 2586, 1546, 1220, 1154, 2606, + 1129, 3854, 3262, 2689, 3172, 832, 338, 4232, 4949, + 3068, 1177, 4860, 1777, 3990, 918, 4219, 4283, 2139, + 3787, 2811, 4271, 4798, 334, 1313, 460, 1978, 4805, + 793, 994, 1327, 514, 560, 4414, 341, 1150, 3312, + 3806, 4496, 1658, 2567, 2306, 3480, 1642, 1603, 2916, + 4101, 1615, 2278, 4133, 2593, 1278, 473, 2698, 3950, + 2941, 696, 1261, 4941, 4572, 556, 2204, 2050, 270, + 4547, 633, 2343, 3078, 2536, 1860, 1208, 4708, 157, + 2124, 1314, 2371, 1568, 847, 587, 65]]), + values=tensor([0.6355, 0.0837, 0.8410, 0.0418, 0.6736, 0.2750, 0.7118, + 0.4504, 0.1192, 0.0944, 0.6430, 0.2386, 0.7967, 0.9967, + 0.2745, 0.6488, 0.9204, 0.4287, 0.2051, 0.7098, 0.2228, + 0.2206, 0.1190, 0.9221, 0.7121, 0.2346, 0.9582, 0.8038, + 0.3691, 0.7817, 0.5756, 0.9334, 0.1738, 0.9079, 0.0606, + 0.1776, 0.8951, 0.7399, 0.1055, 0.2805, 0.8924, 0.5937, + 0.5317, 0.1397, 0.5770, 0.6960, 0.2097, 0.9764, 0.2725, + 0.5061, 0.8215, 0.1246, 0.3881, 0.4674, 0.9528, 0.1634, + 0.9358, 0.4536, 0.4361, 0.4732, 0.8428, 0.2144, 0.1339, + 0.4100, 0.7414, 0.9882, 0.2160, 0.9020, 0.2304, 0.2481, + 0.2533, 0.3484, 0.0311, 0.6948, 0.2407, 0.9490, 0.7932, + 0.3976, 0.2464, 0.4584, 0.9675, 0.8166, 0.9928, 0.6982, + 0.3607, 0.7689, 0.6945, 0.7953, 0.6452, 0.3603, 0.3558, + 0.7032, 0.9827, 0.4366, 0.6723, 0.9128, 0.5394, 0.4372, + 0.0095, 0.3611, 0.0511, 0.1433, 0.3252, 0.8166, 0.8777, + 0.6930, 0.4048, 0.8859, 0.5054, 0.6514, 0.1121, 0.9778, + 0.5674, 0.2770, 0.9926, 0.0302, 0.4015, 0.3543, 0.5096, + 0.8317, 0.1443, 0.4937, 0.3506, 0.4212, 0.3419, 0.3912, + 0.7549, 0.8208, 0.9482, 0.2823, 0.8145, 0.4589, 0.9038, + 0.4967, 0.3645, 0.4486, 0.5036, 0.0275, 0.9454, 0.6409, + 0.3598, 0.2317, 0.8481, 0.9516, 0.0826, 0.9137, 0.2611, + 0.3765, 0.8478, 0.3309, 0.5264, 0.5475, 0.5680, 0.8134, + 0.2741, 0.2839, 0.1742, 0.4681, 0.2521, 0.8332, 0.6563, + 0.6688, 0.2858, 0.7436, 0.0563, 0.1330, 0.9614, 0.0077, + 0.6823, 0.2785, 0.9746, 0.6623, 0.6318, 0.0722, 0.6669, + 0.0013, 0.0788, 0.6422, 0.9487, 0.2169, 0.6363, 0.1344, + 0.4209, 0.4347, 0.7185, 0.6101, 0.5517, 0.0476, 0.4783, + 0.6344, 0.5806, 0.9366, 0.6038, 0.3083, 0.8057, 0.9261, + 0.5997, 0.6963, 0.8340, 0.9476, 0.8282, 0.3252, 0.7333, + 0.9336, 0.5398, 0.0321, 0.0697, 0.3161, 0.9531, 0.8266, + 0.3184, 0.3181, 0.1843, 0.8599, 0.1487, 0.2590, 0.4247, + 0.1841, 0.2357, 0.9877, 0.7316, 0.0964, 0.5700, 0.6667, + 0.7404, 0.7193, 0.6826, 0.4300, 0.2584, 0.8033, 0.6500, + 0.0540, 0.9587, 0.2863, 0.3374, 0.3943, 0.8482, 0.2479, + 0.5712, 0.6593, 0.4789, 0.9350, 0.8089, 0.3627, 0.4241, + 0.3935, 0.4552, 0.1829, 0.8315, 0.0492]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.6042, 0.7803, 0.5228, ..., 0.2007, 0.5476, 0.8431]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.900923252105713 seconds + +tensor(indices=tensor([[3119, 4218, 1197, 1728, 1966, 3162, 471, 4398, 689, + 426, 2339, 1139, 4483, 1325, 824, 1233, 4344, 1543, + 2756, 2312, 4288, 4669, 4399, 2423, 4473, 615, 304, + 1203, 3595, 1240, 800, 3659, 1540, 2507, 345, 3633, + 153, 2776, 1885, 2482, 4846, 2900, 1825, 4930, 4211, + 1775, 476, 952, 2204, 4155, 3700, 823, 3109, 1161, + 1091, 4731, 1731, 4714, 1569, 1379, 307, 3976, 583, + 2300, 2804, 2699, 3175, 4892, 4865, 843, 4590, 2689, + 616, 492, 4924, 1583, 3298, 2812, 3192, 1200, 1137, + 4209, 330, 2831, 4340, 2689, 543, 1790, 4159, 2874, + 761, 2646, 4714, 4800, 881, 3578, 4887, 1661, 2706, + 1859, 136, 3424, 105, 1029, 3975, 1286, 2965, 4856, + 1154, 2510, 4220, 3016, 1134, 1479, 1448, 307, 462, + 934, 2336, 3750, 1790, 1575, 3443, 939, 1241, 2837, + 3900, 4291, 203, 1741, 859, 4951, 4488, 3294, 2886, + 2610, 2662, 3452, 1898, 44, 4857, 213, 942, 4363, + 796, 4218, 1609, 456, 2688, 2971, 4107, 2034, 3965, + 3593, 4002, 3087, 358, 1630, 3151, 4014, 4838, 2938, + 3540, 1767, 663, 1893, 1392, 1160, 2907, 763, 922, + 3419, 1169, 2783, 4882, 4571, 4127, 1386, 4947, 625, + 2376, 1666, 569, 1691, 2369, 4667, 836, 61, 3214, + 4560, 3603, 3056, 1904, 4594, 1650, 644, 4894, 3393, + 2437, 1873, 2135, 1128, 1722, 2256, 2848, 4414, 3755, + 831, 4027, 3556, 1089, 181, 1511, 1830, 3582, 3749, + 3611, 2411, 3124, 56, 3265, 2430, 3858, 1720, 2053, + 3351, 984, 4170, 1134, 3745, 1630, 4345, 1473, 2765, + 2640, 3357, 3473, 2386, 2178, 2533, 48, 3140, 4470, + 2988, 4096, 883, 792, 1071, 1011, 3499], + [4549, 2279, 3645, 237, 3317, 4582, 4171, 4232, 2706, + 92, 94, 3750, 2848, 484, 4218, 1986, 3696, 1672, + 930, 2374, 1967, 3270, 4895, 3262, 2094, 4808, 222, + 1753, 1069, 3932, 4014, 1224, 2176, 3990, 2419, 4813, + 829, 4291, 636, 2217, 3375, 202, 1147, 3267, 983, + 702, 4405, 444, 3639, 2242, 4305, 279, 2856, 2308, + 2313, 4364, 2957, 1407, 1421, 4556, 4301, 895, 1516, + 603, 3489, 2646, 3581, 1836, 1680, 3397, 4887, 1550, + 407, 366, 3995, 4149, 1941, 3739, 4440, 1091, 2963, + 3749, 3460, 4903, 455, 1704, 2786, 2508, 1940, 2760, + 1348, 3769, 1810, 3657, 2796, 4348, 2607, 4542, 4804, + 3615, 1544, 3108, 2347, 1683, 2847, 4550, 3462, 2759, + 1848, 3686, 3005, 4294, 1915, 2265, 1694, 1489, 662, + 303, 3738, 2807, 4571, 543, 4829, 4377, 4537, 1035, + 183, 3822, 4930, 4024, 912, 4848, 4748, 2489, 3065, + 3812, 3351, 4108, 63, 2447, 1957, 2692, 150, 969, + 1308, 4258, 3115, 3689, 3796, 896, 4478, 540, 3953, + 61, 3052, 489, 1814, 2160, 3810, 261, 467, 1597, + 3910, 638, 33, 3095, 2586, 1546, 1220, 1154, 2606, + 1129, 3854, 3262, 2689, 3172, 832, 338, 4232, 4949, + 3068, 1177, 4860, 1777, 3990, 918, 4219, 4283, 2139, + 3787, 2811, 4271, 4798, 334, 1313, 460, 1978, 4805, + 793, 994, 1327, 514, 560, 4414, 341, 1150, 3312, + 3806, 4496, 1658, 2567, 2306, 3480, 1642, 1603, 2916, + 4101, 1615, 2278, 4133, 2593, 1278, 473, 2698, 3950, + 2941, 696, 1261, 4941, 4572, 556, 2204, 2050, 270, + 4547, 633, 2343, 3078, 2536, 1860, 1208, 4708, 157, + 2124, 1314, 2371, 1568, 847, 587, 65]]), + values=tensor([0.6355, 0.0837, 0.8410, 0.0418, 0.6736, 0.2750, 0.7118, + 0.4504, 0.1192, 0.0944, 0.6430, 0.2386, 0.7967, 0.9967, + 0.2745, 0.6488, 0.9204, 0.4287, 0.2051, 0.7098, 0.2228, + 0.2206, 0.1190, 0.9221, 0.7121, 0.2346, 0.9582, 0.8038, + 0.3691, 0.7817, 0.5756, 0.9334, 0.1738, 0.9079, 0.0606, + 0.1776, 0.8951, 0.7399, 0.1055, 0.2805, 0.8924, 0.5937, + 0.5317, 0.1397, 0.5770, 0.6960, 0.2097, 0.9764, 0.2725, + 0.5061, 0.8215, 0.1246, 0.3881, 0.4674, 0.9528, 0.1634, + 0.9358, 0.4536, 0.4361, 0.4732, 0.8428, 0.2144, 0.1339, + 0.4100, 0.7414, 0.9882, 0.2160, 0.9020, 0.2304, 0.2481, + 0.2533, 0.3484, 0.0311, 0.6948, 0.2407, 0.9490, 0.7932, + 0.3976, 0.2464, 0.4584, 0.9675, 0.8166, 0.9928, 0.6982, + 0.3607, 0.7689, 0.6945, 0.7953, 0.6452, 0.3603, 0.3558, + 0.7032, 0.9827, 0.4366, 0.6723, 0.9128, 0.5394, 0.4372, + 0.0095, 0.3611, 0.0511, 0.1433, 0.3252, 0.8166, 0.8777, + 0.6930, 0.4048, 0.8859, 0.5054, 0.6514, 0.1121, 0.9778, + 0.5674, 0.2770, 0.9926, 0.0302, 0.4015, 0.3543, 0.5096, + 0.8317, 0.1443, 0.4937, 0.3506, 0.4212, 0.3419, 0.3912, + 0.7549, 0.8208, 0.9482, 0.2823, 0.8145, 0.4589, 0.9038, + 0.4967, 0.3645, 0.4486, 0.5036, 0.0275, 0.9454, 0.6409, + 0.3598, 0.2317, 0.8481, 0.9516, 0.0826, 0.9137, 0.2611, + 0.3765, 0.8478, 0.3309, 0.5264, 0.5475, 0.5680, 0.8134, + 0.2741, 0.2839, 0.1742, 0.4681, 0.2521, 0.8332, 0.6563, + 0.6688, 0.2858, 0.7436, 0.0563, 0.1330, 0.9614, 0.0077, + 0.6823, 0.2785, 0.9746, 0.6623, 0.6318, 0.0722, 0.6669, + 0.0013, 0.0788, 0.6422, 0.9487, 0.2169, 0.6363, 0.1344, + 0.4209, 0.4347, 0.7185, 0.6101, 0.5517, 0.0476, 0.4783, + 0.6344, 0.5806, 0.9366, 0.6038, 0.3083, 0.8057, 0.9261, + 0.5997, 0.6963, 0.8340, 0.9476, 0.8282, 0.3252, 0.7333, + 0.9336, 0.5398, 0.0321, 0.0697, 0.3161, 0.9531, 0.8266, + 0.3184, 0.3181, 0.1843, 0.8599, 0.1487, 0.2590, 0.4247, + 0.1841, 0.2357, 0.9877, 0.7316, 0.0964, 0.5700, 0.6667, + 0.7404, 0.7193, 0.6826, 0.4300, 0.2584, 0.8033, 0.6500, + 0.0540, 0.9587, 0.2863, 0.3374, 0.3943, 0.8482, 0.2479, + 0.5712, 0.6593, 0.4789, 0.9350, 0.8089, 0.3627, 0.4241, + 0.3935, 0.4552, 0.1829, 0.8315, 0.0492]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.6042, 0.7803, 0.5228, ..., 0.2007, 0.5476, 0.8431]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.900923252105713 seconds + +[16.6, 16.56, 16.6, 16.76, 16.76, 16.8, 17.28, 17.24, 17.12, 16.92] +[16.84, 16.56, 16.88, 21.2, 23.4, 24.16, 24.8, 24.8, 22.32, 21.2, 20.0, 20.12, 20.12, 20.08] +14.179109334945679 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 652466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.900923252105713, 'TIME_S_1KI': 0.016707266358868836, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 277.6165154457092, 'W': 19.57926332943202} +[16.6, 16.56, 16.6, 16.76, 16.76, 16.8, 17.28, 17.24, 17.12, 16.92, 16.44, 16.6, 16.48, 16.44, 16.36, 16.28, 16.2, 16.2, 16.2, 16.44] +299.08000000000004 +14.954000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 652466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.900923252105713, 'TIME_S_1KI': 0.016707266358868836, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 277.6165154457092, 'W': 19.57926332943202, 'J_1KI': 0.42548809508190344, 'W_1KI': 0.030008097478538377, 'W_D': 4.625263329432018, 'J_D': 65.58211445093146, 'W_D_1KI': 0.007088895558438322, 'J_D_1KI': 1.0864773886207592e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..a5b1d25 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 286349, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45237112045288, "TIME_S_1KI": 0.03650220926370576, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 261.584655122757, "W": 18.391800548576306, "J_1KI": 0.9135169151027488, "W_1KI": 0.06422861804503004, "W_D": 3.7758005485763064, "J_D": 53.702816192627004, "W_D_1KI": 0.013186009200577989, "J_D_1KI": 4.604873493735961e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..70417c4 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,90 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0067138671875} + +tensor(indices=tensor([[2960, 3352, 2272, ..., 4354, 3613, 3081], + [1683, 910, 4825, ..., 4503, 3569, 3443]]), + values=tensor([0.7470, 0.7400, 0.2423, ..., 0.7452, 0.6241, 0.1819]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.8861, 0.1443, 0.1411, ..., 0.6596, 0.1915, 0.7341]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.0067138671875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15639 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.6672103404998779} + +tensor(indices=tensor([[2657, 10, 213, ..., 3459, 1898, 1412], + [1570, 991, 1711, ..., 3008, 4849, 3361]]), + values=tensor([0.0477, 0.3374, 0.7312, ..., 0.1165, 0.6978, 0.1653]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.8830, 0.3419, 0.0938, ..., 0.1921, 0.2616, 0.4458]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.6672103404998779 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 246113 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.024578332901001} + +tensor(indices=tensor([[3242, 153, 953, ..., 3164, 1173, 3828], + [1636, 461, 1866, ..., 3350, 1390, 1427]]), + values=tensor([0.1174, 0.8189, 0.7761, ..., 0.6490, 0.4361, 0.8132]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.9981, 0.3203, 0.0360, ..., 0.8201, 0.6163, 0.8541]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 9.024578332901001 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 286349 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45237112045288} + +tensor(indices=tensor([[4546, 268, 2475, ..., 4578, 2693, 1114], + [2810, 1887, 1881, ..., 2898, 4458, 2685]]), + values=tensor([0.3289, 0.4846, 0.8504, ..., 0.7188, 0.8266, 0.2598]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.3695, 0.7282, 0.1314, ..., 0.6685, 0.6494, 0.9188]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.45237112045288 seconds + +tensor(indices=tensor([[4546, 268, 2475, ..., 4578, 2693, 1114], + [2810, 1887, 1881, ..., 2898, 4458, 2685]]), + values=tensor([0.3289, 0.4846, 0.8504, ..., 0.7188, 0.8266, 0.2598]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.3695, 0.7282, 0.1314, ..., 0.6685, 0.6494, 0.9188]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.45237112045288 seconds + +[16.56, 16.44, 16.2, 16.36, 16.12, 16.08, 16.36, 16.4, 16.48, 16.4] +[16.4, 16.28, 16.4, 17.72, 18.88, 21.72, 22.28, 22.24, 22.04, 20.92, 20.2, 20.04, 20.12, 20.12] +14.222895383834839 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 286349, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45237112045288, 'TIME_S_1KI': 0.03650220926370576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.584655122757, 'W': 18.391800548576306} +[16.56, 16.44, 16.2, 16.36, 16.12, 16.08, 16.36, 16.4, 16.48, 16.4, 16.36, 16.08, 15.96, 16.04, 15.84, 16.12, 16.32, 16.16, 16.44, 16.52] +292.32 +14.616 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 286349, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45237112045288, 'TIME_S_1KI': 0.03650220926370576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.584655122757, 'W': 18.391800548576306, 'J_1KI': 0.9135169151027488, 'W_1KI': 0.06422861804503004, 'W_D': 3.7758005485763064, 'J_D': 53.702816192627004, 'W_D_1KI': 0.013186009200577989, 'J_D_1KI': 4.604873493735961e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json index e9b3498..ddab6be 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1748, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.143786191940308, "TIME_S_1KI": 5.8030813455036085, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 530.3414198303223, "W": 36.295146632325824, "J_1KI": 303.3989815962942, "W_1KI": 20.763813862886625, "W_D": 17.602146632325823, "J_D": 257.20098424220083, "W_D_1KI": 10.069877936113171, "J_D_1KI": 5.760799734618519} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1752, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485095500946045, "TIME_S_1KI": 5.984643550768291, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 512.186183013916, "W": 34.91048158652564, "J_1KI": 292.3437117659338, "W_1KI": 19.926073964911897, "W_D": 16.22748158652564, "J_D": 238.08012596821786, "W_D_1KI": 9.262261179523769, "J_D_1KI": 5.286678755435941} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output index 2da7ef7..89c6336 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,34 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6005632877349854} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6985688209533691} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 999970, +tensor(crow_indices=tensor([ 0, 13, 22, ..., 999981, + 999991, 1000000]), + col_indices=tensor([ 3841, 8582, 8659, ..., 46850, 51232, 80903]), + values=tensor([0.4815, 0.2359, 0.4039, ..., 0.8218, 0.1508, 0.2420]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4121, 0.4277, 0.6441, ..., 0.2185, 0.3156, 0.3986]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.6985688209533691 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1503 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.003351211547852} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 22, ..., 999979, 999989, 1000000]), - col_indices=tensor([27708, 32922, 35240, ..., 82805, 88487, 98517]), - values=tensor([0.0088, 0.7733, 0.0012, ..., 0.6420, 0.7382, 0.2177]), + col_indices=tensor([16506, 20710, 37506, ..., 59060, 78382, 91823]), + values=tensor([0.6315, 0.8392, 0.3677, ..., 0.4353, 0.5265, 0.8972]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1129, 0.5965, 0.7496, ..., 0.0902, 0.9107, 0.7724]) +tensor([0.3202, 0.2384, 0.4035, ..., 0.5180, 0.1753, 0.3933]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.6005632877349854 seconds +Time: 9.003351211547852 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1748 -ss 100000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.143786191940308} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1752 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485095500946045} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 23, ..., 999980, - 999992, 1000000]), - col_indices=tensor([ 8017, 17251, 18992, ..., 72823, 91334, 91663]), - values=tensor([0.4596, 0.1797, 0.9797, ..., 0.0499, 0.7967, 0.0183]), +tensor(crow_indices=tensor([ 0, 10, 27, ..., 999981, + 999986, 1000000]), + col_indices=tensor([27575, 38852, 42502, ..., 91134, 92148, 97111]), + values=tensor([0.1986, 0.8938, 0.8330, ..., 0.0983, 0.0891, 0.3928]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8873, 0.5523, 0.3791, ..., 0.8812, 0.4027, 0.2259]) +tensor([0.2333, 0.4383, 0.7667, ..., 0.5123, 0.4425, 0.6550]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.143786191940308 seconds +Time: 10.485095500946045 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 23, ..., 999980, - 999992, 1000000]), - col_indices=tensor([ 8017, 17251, 18992, ..., 72823, 91334, 91663]), - values=tensor([0.4596, 0.1797, 0.9797, ..., 0.0499, 0.7967, 0.0183]), +tensor(crow_indices=tensor([ 0, 10, 27, ..., 999981, + 999986, 1000000]), + col_indices=tensor([27575, 38852, 42502, ..., 91134, 92148, 97111]), + values=tensor([0.1986, 0.8938, 0.8330, ..., 0.0983, 0.0891, 0.3928]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8873, 0.5523, 0.3791, ..., 0.8812, 0.4027, 0.2259]) +tensor([0.2333, 0.4383, 0.7667, ..., 0.5123, 0.4425, 0.6550]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.143786191940308 seconds +Time: 10.485095500946045 seconds -[20.48, 20.6, 20.64, 20.68, 21.12, 21.12, 21.24, 21.12, 21.04, 21.04] -[20.76, 20.76, 21.04, 22.36, 24.68, 32.36, 38.56, 45.04, 50.12, 51.88, 51.52, 52.0, 52.0, 51.84] -14.611910104751587 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.143786191940308, 'TIME_S_1KI': 5.8030813455036085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.3414198303223, 'W': 36.295146632325824} -[20.48, 20.6, 20.64, 20.68, 21.12, 21.12, 21.24, 21.12, 21.04, 21.04, 20.36, 20.64, 20.56, 20.72, 20.68, 20.52, 20.72, 20.76, 20.52, 20.48] -373.86 -18.693 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.143786191940308, 'TIME_S_1KI': 5.8030813455036085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.3414198303223, 'W': 36.295146632325824, 'J_1KI': 303.3989815962942, 'W_1KI': 20.763813862886625, 'W_D': 17.602146632325823, 'J_D': 257.20098424220083, 'W_D_1KI': 10.069877936113171, 'J_D_1KI': 5.760799734618519} +[21.12, 20.96, 20.8, 20.6, 20.8, 20.8, 20.8, 20.84, 20.72, 20.68] +[20.76, 20.8, 21.92, 21.92, 22.88, 27.32, 33.44, 40.64, 45.96, 51.28, 52.0, 51.52, 51.4, 51.84] +14.671415567398071 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1752, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485095500946045, 'TIME_S_1KI': 5.984643550768291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 512.186183013916, 'W': 34.91048158652564} +[21.12, 20.96, 20.8, 20.6, 20.8, 20.8, 20.8, 20.84, 20.72, 20.68, 21.0, 21.04, 20.92, 20.76, 20.48, 20.48, 20.48, 20.64, 20.64, 21.0] +373.65999999999997 +18.683 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1752, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485095500946045, 'TIME_S_1KI': 5.984643550768291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 512.186183013916, 'W': 34.91048158652564, 'J_1KI': 292.3437117659338, 'W_1KI': 19.926073964911897, 'W_D': 16.22748158652564, 'J_D': 238.08012596821786, 'W_D_1KI': 9.262261179523769, 'J_D_1KI': 5.286678755435941} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json index fdbe85f..c7f2b4c 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 175, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.183927774429321, "TIME_S_1KI": 63.90815871102469, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 628.162894821167, "W": 35.42748603907242, "J_1KI": 3589.5022561209544, "W_1KI": 202.44277736612813, "W_D": 16.71548603907242, "J_D": 296.38140530395515, "W_D_1KI": 95.51706308041383, "J_D_1KI": 545.8117890309362} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 183, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.392640113830566, "TIME_S_1KI": 56.790383135686156, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 621.8855728912355, "W": 35.106665438111044, "J_1KI": 3398.2818190777894, "W_1KI": 191.83970184760133, "W_D": 16.547665438111046, "J_D": 293.1282214522363, "W_D_1KI": 90.42440130115325, "J_D_1KI": 494.12241148171177} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output index 887a602..676e0dc 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.9738054275512695} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.7204060554504395} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 100, 229, ..., 9999825, - 9999913, 10000000]), - col_indices=tensor([ 2839, 3131, 5153, ..., 92533, 94576, 98932]), - values=tensor([0.4697, 0.9996, 0.7875, ..., 0.5192, 0.5202, 0.9540]), +tensor(crow_indices=tensor([ 0, 98, 204, ..., 9999786, + 9999897, 10000000]), + col_indices=tensor([ 2168, 2221, 3670, ..., 97171, 97920, 99173]), + values=tensor([0.5868, 0.2768, 0.5813, ..., 0.2211, 0.2231, 0.3014]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.9598, 0.0952, 0.8851, ..., 0.3844, 0.8104, 0.5939]) +tensor([0.6888, 0.8719, 0.0407, ..., 0.0271, 0.8141, 0.6850]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 5.9738054275512695 seconds +Time: 5.7204060554504395 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 175 -ss 100000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.183927774429321} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 183 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.392640113830566} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 95, 193, ..., 9999801, - 9999901, 10000000]), - col_indices=tensor([ 2869, 4015, 6080, ..., 94953, 95635, 98117]), - values=tensor([0.0857, 0.9758, 0.7363, ..., 0.8151, 0.8595, 0.7723]), +tensor(crow_indices=tensor([ 0, 97, 203, ..., 9999791, + 9999906, 10000000]), + col_indices=tensor([ 466, 1594, 2031, ..., 98883, 99435, 99456]), + values=tensor([0.5174, 0.4660, 0.4037, ..., 0.7630, 0.3193, 0.8740]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7586, 0.5970, 0.0221, ..., 0.6721, 0.2659, 0.4588]) +tensor([0.4869, 0.9209, 0.1121, ..., 0.4031, 0.7408, 0.7156]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 11.183927774429321 seconds +Time: 10.392640113830566 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 95, 193, ..., 9999801, - 9999901, 10000000]), - col_indices=tensor([ 2869, 4015, 6080, ..., 94953, 95635, 98117]), - values=tensor([0.0857, 0.9758, 0.7363, ..., 0.8151, 0.8595, 0.7723]), +tensor(crow_indices=tensor([ 0, 97, 203, ..., 9999791, + 9999906, 10000000]), + col_indices=tensor([ 466, 1594, 2031, ..., 98883, 99435, 99456]), + values=tensor([0.5174, 0.4660, 0.4037, ..., 0.7630, 0.3193, 0.8740]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7586, 0.5970, 0.0221, ..., 0.6721, 0.2659, 0.4588]) +tensor([0.4869, 0.9209, 0.1121, ..., 0.4031, 0.7408, 0.7156]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 11.183927774429321 seconds +Time: 10.392640113830566 seconds -[20.52, 20.84, 20.72, 20.68, 20.8, 20.68, 20.68, 20.72, 20.64, 20.64] -[20.64, 20.76, 20.84, 24.4, 27.0, 28.6, 31.96, 32.96, 34.0, 38.76, 44.2, 47.96, 51.6, 50.84, 50.88, 50.6, 50.76] -17.730947494506836 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.183927774429321, 'TIME_S_1KI': 63.90815871102469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 628.162894821167, 'W': 35.42748603907242} -[20.52, 20.84, 20.72, 20.68, 20.8, 20.68, 20.68, 20.72, 20.64, 20.64, 20.64, 20.64, 20.64, 20.72, 20.76, 20.76, 21.0, 20.92, 21.36, 21.56] -374.24 -18.712 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.183927774429321, 'TIME_S_1KI': 63.90815871102469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 628.162894821167, 'W': 35.42748603907242, 'J_1KI': 3589.5022561209544, 'W_1KI': 202.44277736612813, 'W_D': 16.71548603907242, 'J_D': 296.38140530395515, 'W_D_1KI': 95.51706308041383, 'J_D_1KI': 545.8117890309362} +[20.4, 20.28, 20.32, 20.28, 20.48, 20.48, 20.88, 20.84, 20.76, 20.76] +[20.72, 20.64, 21.04, 22.32, 24.12, 25.84, 28.68, 31.24, 36.6, 42.2, 42.2, 45.8, 51.08, 52.16, 52.0, 52.08, 52.32] +17.714173793792725 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.392640113830566, 'TIME_S_1KI': 56.790383135686156, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 621.8855728912355, 'W': 35.106665438111044} +[20.4, 20.28, 20.32, 20.28, 20.48, 20.48, 20.88, 20.84, 20.76, 20.76, 20.8, 20.68, 20.96, 21.0, 20.84, 20.72, 20.64, 20.56, 20.32, 20.32] +371.17999999999995 +18.558999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.392640113830566, 'TIME_S_1KI': 56.790383135686156, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 621.8855728912355, 'W': 35.106665438111044, 'J_1KI': 3398.2818190777894, 'W_1KI': 191.83970184760133, 'W_D': 16.547665438111046, 'J_D': 293.1282214522363, 'W_D_1KI': 90.42440130115325, 'J_D_1KI': 494.12241148171177} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..4c7e6de --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 55.23988056182861, "TIME_S_1KI": 552.3988056182861, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3392.229231271745, "W": 34.21525226903433, "J_1KI": 33922.29231271745, "W_1KI": 342.1525226903433, "W_D": 15.519252269034329, "J_D": 1538.6372364158642, "W_D_1KI": 155.19252269034328, "J_D_1KI": 1551.9252269034328} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..ca8594f --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 55.23988056182861} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 975, 2002, ..., + 99998038, 99999032, 100000000]), + col_indices=tensor([ 193, 475, 708, ..., 99676, 99979, 99985]), + values=tensor([0.3846, 0.7039, 0.8140, ..., 0.8966, 0.5567, 0.1696]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.9784, 0.7882, 0.1982, ..., 0.8110, 0.1934, 0.3487]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 55.23988056182861 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 975, 2002, ..., + 99998038, 99999032, 100000000]), + col_indices=tensor([ 193, 475, 708, ..., 99676, 99979, 99985]), + values=tensor([0.3846, 0.7039, 0.8140, ..., 0.8966, 0.5567, 0.1696]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.9784, 0.7882, 0.1982, ..., 0.8110, 0.1934, 0.3487]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 55.23988056182861 seconds + +[21.08, 21.04, 20.84, 21.08, 21.04, 21.04, 20.92, 20.88, 20.88, 20.72] +[20.8, 20.76, 20.92, 22.24, 24.44, 26.24, 27.2, 27.2, 26.72, 25.88, 27.64, 27.64, 29.6, 30.76, 30.64, 29.12, 27.36, 25.36, 26.56, 29.12, 32.64, 36.12, 36.04, 35.4, 34.08, 34.08, 28.52, 28.84, 28.6, 28.12, 27.72, 28.28, 28.12, 28.84, 29.32, 29.84, 29.72, 29.68, 29.68, 29.48, 29.92, 33.16, 35.88, 39.28, 42.08, 43.0, 42.28, 41.44, 40.48, 40.64, 40.88, 40.72, 40.84, 41.68, 41.32, 41.48, 41.32, 41.32, 41.4, 41.6, 41.04, 40.84, 40.52, 40.48, 41.32, 41.8, 41.56, 41.52, 40.16, 39.76, 39.12, 38.96, 39.96, 40.84, 40.92, 40.92, 41.64, 40.48, 41.04, 40.68, 40.48, 41.48, 42.2, 42.24, 42.56, 42.28, 41.8, 41.24, 40.48, 40.48, 40.96, 41.72, 41.92, 41.4, 41.24, 41.24] +99.1437737941742 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 55.23988056182861, 'TIME_S_1KI': 552.3988056182861, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3392.229231271745, 'W': 34.21525226903433} +[21.08, 21.04, 20.84, 21.08, 21.04, 21.04, 20.92, 20.88, 20.88, 20.72, 20.16, 20.16, 20.4, 20.36, 20.56, 20.68, 20.84, 20.88, 20.84, 21.0] +373.92 +18.696 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 55.23988056182861, 'TIME_S_1KI': 552.3988056182861, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3392.229231271745, 'W': 34.21525226903433, 'J_1KI': 33922.29231271745, 'W_1KI': 342.1525226903433, 'W_D': 15.519252269034329, 'J_D': 1538.6372364158642, 'W_D_1KI': 155.19252269034328, 'J_D_1KI': 1551.9252269034328} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json index eff1e86..f4c211f 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 11597, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.436057329177856, "TIME_S_1KI": 0.8998928454926151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 483.6025880336762, "W": 33.00177532885384, "J_1KI": 41.700662932971994, "W_1KI": 2.845716592985586, "W_D": 14.225775328853839, "J_D": 208.462172027588, "W_D_1KI": 1.2266771862424626, "J_D_1KI": 0.10577538900081596} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 11845, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.452897548675537, "TIME_S_1KI": 0.8824734106100073, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 491.721949520111, "W": 33.55721565186675, "J_1KI": 41.51303921655644, "W_1KI": 2.833027914889552, "W_D": 15.062215651866751, "J_D": 220.71026754021645, "W_D_1KI": 1.2716095949233222, "J_D_1KI": 0.10735412367440457} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output index cdfe200..42e30bb 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.12187767028808594} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.19909358024597168} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99999, +tensor(crow_indices=tensor([ 0, 4, 5, ..., 99997, 99999, 100000]), - col_indices=tensor([99237, 81965, 52149, ..., 94819, 50598, 82628]), - values=tensor([0.3300, 0.8237, 0.5005, ..., 0.6469, 0.1010, 0.4687]), + col_indices=tensor([12266, 41353, 64119, ..., 57579, 58990, 6971]), + values=tensor([0.6227, 0.7944, 0.7450, ..., 0.9056, 0.8637, 0.0316]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0038, 0.2456, 0.3182, ..., 0.7163, 0.7510, 0.9775]) +tensor([0.5310, 0.7756, 0.0968, ..., 0.3911, 0.0764, 0.5885]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.12187767028808594 seconds +Time: 0.19909358024597168 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8615 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.799410104751587} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5273 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.112189292907715} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99999, +tensor(crow_indices=tensor([ 0, 0, 2, ..., 99998, 99999, 100000]), - col_indices=tensor([88588, 42232, 90125, ..., 27244, 80106, 39636]), - values=tensor([0.8018, 0.8315, 0.5597, ..., 0.5532, 0.0030, 0.5793]), + col_indices=tensor([18500, 89431, 21652, ..., 1449, 96967, 19441]), + values=tensor([0.9093, 0.3424, 0.7088, ..., 0.3078, 0.7479, 0.8254]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1929, 0.1411, 0.4568, ..., 0.6294, 0.2188, 0.4350]) +tensor([0.7902, 0.1848, 0.8411, ..., 0.5530, 0.0625, 0.5516]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 7.799410104751587 seconds +Time: 5.112189292907715 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 11597 -ss 100000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.436057329177856} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10830 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.599655628204346} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99998, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([24172, 83350, 29274, ..., 76990, 53592, 71081]), - values=tensor([0.7302, 0.8346, 0.3553, ..., 0.4222, 0.8183, 0.0288]), + col_indices=tensor([ 4135, 30090, 75785, ..., 75263, 77636, 9635]), + values=tensor([0.5430, 0.9022, 0.0393, ..., 0.9343, 0.9359, 0.0313]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8537, 0.9431, 0.0277, ..., 0.1357, 0.7019, 0.9196]) +tensor([0.4424, 0.3111, 0.3025, ..., 0.8284, 0.3246, 0.9597]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.436057329177856 seconds +Time: 9.599655628204346 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 11845 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.452897548675537} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99998, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 100000, 100000]), - col_indices=tensor([24172, 83350, 29274, ..., 76990, 53592, 71081]), - values=tensor([0.7302, 0.8346, 0.3553, ..., 0.4222, 0.8183, 0.0288]), + col_indices=tensor([26237, 4052, 39558, ..., 16301, 35459, 98674]), + values=tensor([0.0699, 0.2116, 0.3702, ..., 0.5467, 0.2088, 0.3545]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8537, 0.9431, 0.0277, ..., 0.1357, 0.7019, 0.9196]) +tensor([0.6982, 0.4327, 0.4762, ..., 0.0773, 0.8958, 0.0557]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +76,30 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.436057329177856 seconds +Time: 10.452897548675537 seconds -[21.08, 20.64, 20.76, 20.76, 20.92, 21.32, 21.32, 21.36, 21.0, 20.8] -[20.64, 20.36, 20.72, 23.08, 24.72, 29.44, 35.88, 40.08, 43.56, 45.4, 45.96, 45.6, 45.36, 46.04] -14.653835535049438 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11597, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.436057329177856, 'TIME_S_1KI': 0.8998928454926151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 483.6025880336762, 'W': 33.00177532885384} -[21.08, 20.64, 20.76, 20.76, 20.92, 21.32, 21.32, 21.36, 21.0, 20.8, 20.36, 20.32, 20.32, 20.44, 20.64, 20.8, 21.0, 21.16, 21.12, 21.04] -375.52 -18.776 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11597, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.436057329177856, 'TIME_S_1KI': 0.8998928454926151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 483.6025880336762, 'W': 33.00177532885384, 'J_1KI': 41.700662932971994, 'W_1KI': 2.845716592985586, 'W_D': 14.225775328853839, 'J_D': 208.462172027588, 'W_D_1KI': 1.2266771862424626, 'J_D_1KI': 0.10577538900081596} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 100000, + 100000]), + col_indices=tensor([26237, 4052, 39558, ..., 16301, 35459, 98674]), + values=tensor([0.0699, 0.2116, 0.3702, ..., 0.5467, 0.2088, 0.3545]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6982, 0.4327, 0.4762, ..., 0.0773, 0.8958, 0.0557]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.452897548675537 seconds + +[20.44, 20.4, 20.64, 20.88, 20.76, 20.76, 20.88, 20.8, 20.96, 21.04] +[20.84, 20.88, 21.72, 26.12, 28.0, 32.48, 37.88, 39.16, 42.96, 44.8, 45.0, 45.32, 45.04, 45.04] +14.653240442276001 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11845, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.452897548675537, 'TIME_S_1KI': 0.8824734106100073, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 491.721949520111, 'W': 33.55721565186675} +[20.44, 20.4, 20.64, 20.88, 20.76, 20.76, 20.88, 20.8, 20.96, 21.04, 20.68, 20.68, 20.52, 20.24, 20.2, 20.04, 20.28, 20.24, 20.36, 20.36] +369.9 +18.494999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11845, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.452897548675537, 'TIME_S_1KI': 0.8824734106100073, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 491.721949520111, 'W': 33.55721565186675, 'J_1KI': 41.51303921655644, 'W_1KI': 2.833027914889552, 'W_D': 15.062215651866751, 'J_D': 220.71026754021645, 'W_D_1KI': 1.2716095949233222, 'J_D_1KI': 0.10735412367440457} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.json index 67cd5e0..5fb8701 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3297, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.891435861587524, "TIME_S_1KI": 3.3034382352403777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 528.672490940094, "W": 36.05478479295784, "J_1KI": 160.34955745832394, "W_1KI": 10.935633846817664, "W_D": 17.45578479295784, "J_D": 255.95474444794658, "W_D_1KI": 5.2944448871573675, "J_D_1KI": 1.605837090432929} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3160, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.255688428878784, "TIME_S_1KI": 3.2454710217970835, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 463.90646287918094, "W": 34.111621125184975, "J_1KI": 146.8058426832851, "W_1KI": 10.794816811767397, "W_D": 15.559621125184975, "J_D": 211.6055632019043, "W_D_1KI": 4.92393073581803, "J_D_1KI": 1.5582059290563386} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.output index 3c3d8e5..7c7ecd3 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4573814868927002} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.332211971282959} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 16, ..., 499990, 499995, +tensor(crow_indices=tensor([ 0, 10, 13, ..., 499990, 499997, 500000]), - col_indices=tensor([ 5164, 6869, 8448, ..., 29154, 68140, 97893]), - values=tensor([0.8386, 0.0921, 0.7067, ..., 0.9232, 0.1449, 0.6848]), + col_indices=tensor([16831, 31700, 33476, ..., 20126, 37524, 56641]), + values=tensor([0.4034, 0.0732, 0.2390, ..., 0.1660, 0.9005, 0.3603]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.0246, 0.8160, 0.3295, ..., 0.0588, 0.6998, 0.9868]) +tensor([0.3728, 0.2661, 0.0172, ..., 0.8787, 0.3705, 0.6094]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.4573814868927002 seconds +Time: 0.332211971282959 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2295 -ss 100000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.306779146194458} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3160 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.255688428878784} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 499989, 499995, +tensor(crow_indices=tensor([ 0, 1, 7, ..., 499992, 499996, 500000]), - col_indices=tensor([ 2059, 19971, 54406, ..., 65065, 65922, 83323]), - values=tensor([0.5530, 0.6181, 0.7781, ..., 0.5380, 0.6243, 0.8378]), + col_indices=tensor([64602, 478, 27899, ..., 42044, 53218, 73264]), + values=tensor([0.8097, 0.5983, 0.2516, ..., 0.0269, 0.0458, 0.5960]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.4055, 0.5945, 0.9428, ..., 0.6446, 0.1456, 0.3700]) +tensor([0.5290, 0.0200, 0.8761, ..., 0.9070, 0.8739, 0.1739]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 7.306779146194458 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3297 -ss 100000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.891435861587524} +Time: 10.255688428878784 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 499995, 499995, +tensor(crow_indices=tensor([ 0, 1, 7, ..., 499992, 499996, 500000]), - col_indices=tensor([ 8913, 22689, 49331, ..., 65321, 72756, 72788]), - values=tensor([0.7511, 0.0782, 0.7533, ..., 0.6341, 0.1803, 0.2288]), + col_indices=tensor([64602, 478, 27899, ..., 42044, 53218, 73264]), + values=tensor([0.8097, 0.5983, 0.2516, ..., 0.0269, 0.0458, 0.5960]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.5601, 0.4293, 0.2285, ..., 0.5137, 0.5400, 0.5797]) +tensor([0.5290, 0.0200, 0.8761, ..., 0.9070, 0.8739, 0.1739]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,30 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.891435861587524 seconds +Time: 10.255688428878784 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 11, ..., 499995, 499995, - 500000]), - col_indices=tensor([ 8913, 22689, 49331, ..., 65321, 72756, 72788]), - values=tensor([0.7511, 0.0782, 0.7533, ..., 0.6341, 0.1803, 0.2288]), - size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.5601, 0.4293, 0.2285, ..., 0.5137, 0.5400, 0.5797]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 500000 -Density: 5e-05 -Time: 10.891435861587524 seconds - -[20.84, 20.48, 20.44, 20.64, 20.64, 20.68, 20.72, 20.64, 20.64, 20.92] -[20.6, 20.48, 20.72, 25.12, 26.88, 32.4, 38.84, 42.28, 46.96, 50.12, 51.2, 51.8, 51.56, 51.6] -14.66303277015686 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.891435861587524, 'TIME_S_1KI': 3.3034382352403777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 528.672490940094, 'W': 36.05478479295784} -[20.84, 20.48, 20.44, 20.64, 20.64, 20.68, 20.72, 20.64, 20.64, 20.92, 20.84, 21.04, 20.84, 20.84, 20.76, 20.8, 20.68, 20.44, 20.28, 20.24] -371.98 -18.599 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.891435861587524, 'TIME_S_1KI': 3.3034382352403777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 528.672490940094, 'W': 36.05478479295784, 'J_1KI': 160.34955745832394, 'W_1KI': 10.935633846817664, 'W_D': 17.45578479295784, 'J_D': 255.95474444794658, 'W_D_1KI': 5.2944448871573675, 'J_D_1KI': 1.605837090432929} +[20.4, 20.4, 20.36, 20.48, 20.48, 20.56, 20.72, 20.68, 20.88, 20.92] +[20.8, 20.96, 22.0, 23.08, 26.64, 32.64, 32.64, 39.84, 44.88, 50.64, 51.68, 51.72, 51.64] +13.599660396575928 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3160, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.255688428878784, 'TIME_S_1KI': 3.2454710217970835, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.90646287918094, 'W': 34.111621125184975} +[20.4, 20.4, 20.36, 20.48, 20.48, 20.56, 20.72, 20.68, 20.88, 20.92, 20.52, 20.4, 20.36, 20.68, 20.72, 20.8, 20.88, 20.68, 20.56, 20.96] +371.03999999999996 +18.552 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3160, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.255688428878784, 'TIME_S_1KI': 3.2454710217970835, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.90646287918094, 'W': 34.111621125184975, 'J_1KI': 146.8058426832851, 'W_1KI': 10.794816811767397, 'W_D': 15.559621125184975, 'J_D': 211.6055632019043, 'W_D_1KI': 4.92393073581803, 'J_D_1KI': 1.5582059290563386} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json index 6fa3845..ce06cb3 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 32636, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.47010850906372, "TIME_S_1KI": 0.32081469877018387, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 319.63640854835506, "W": 22.48260624860275, "J_1KI": 9.793982367580435, "W_1KI": 0.6888897612637195, "W_D": 3.984606248602752, "J_D": 56.64935891771315, "W_D_1KI": 0.12209235962136145, "J_D_1KI": 0.003741033203252894} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 32993, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.722160577774048, "TIME_S_1KI": 0.32498289266735514, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.13037155151375, "W": 22.887281127943623, "J_1KI": 9.854525855530376, "W_1KI": 0.6937011222969607, "W_D": 4.493281127943625, "J_D": 63.8303062057496, "W_D_1KI": 0.13618892273947883, "J_D_1KI": 0.0041278126493340655} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output index 21cdb17..f6eb827 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.050879478454589844} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04047083854675293} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), - col_indices=tensor([5382, 2827, 5658, ..., 9195, 8647, 1137]), - values=tensor([0.6423, 0.5656, 0.8194, ..., 0.3825, 0.7281, 0.0248]), +tensor(crow_indices=tensor([ 0, 3, 4, ..., 10000, 10000, 10000]), + col_indices=tensor([2549, 9361, 9970, ..., 704, 4011, 7891]), + values=tensor([0.5892, 0.1476, 0.4892, ..., 0.9338, 0.1639, 0.4664]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.6609, 0.7541, 0.4159, ..., 0.2180, 0.3481, 0.0053]) +tensor([0.8574, 0.7762, 0.9018, ..., 0.6074, 0.6936, 0.9938]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.050879478454589844 seconds +Time: 0.04047083854675293 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 20637 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.639445781707764} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 25944 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.256449699401855} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 10000, 10000]), - col_indices=tensor([1538, 6690, 5733, ..., 9607, 7438, 7782]), - values=tensor([0.7222, 0.1089, 0.5631, ..., 0.3116, 0.0243, 0.6999]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 10000, 10000]), + col_indices=tensor([2764, 9413, 5263, ..., 1959, 4242, 4549]), + values=tensor([0.9684, 0.2656, 0.2250, ..., 0.3440, 0.8382, 0.0353]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2878, 0.8940, 0.0961, ..., 0.0631, 0.2895, 0.2219]) +tensor([0.2298, 0.9239, 0.9999, ..., 0.9160, 0.4053, 0.1195]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 6.639445781707764 seconds +Time: 8.256449699401855 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32636 -ss 10000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.47010850906372} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32993 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.722160577774048} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), - col_indices=tensor([7014, 8766, 3433, ..., 9466, 1431, 7728]), - values=tensor([0.0370, 0.3747, 0.2051, ..., 0.2901, 0.3737, 0.7201]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), + col_indices=tensor([5965, 6204, 1451, ..., 9267, 1058, 9600]), + values=tensor([0.3937, 0.8327, 0.3110, ..., 0.9645, 0.7301, 0.2055]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8451, 0.4833, 0.4298, ..., 0.9015, 0.0937, 0.6764]) +tensor([0.5302, 0.5425, 0.3526, ..., 0.3577, 0.4587, 0.7959]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.47010850906372 seconds +Time: 10.722160577774048 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), - col_indices=tensor([7014, 8766, 3433, ..., 9466, 1431, 7728]), - values=tensor([0.0370, 0.3747, 0.2051, ..., 0.2901, 0.3737, 0.7201]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), + col_indices=tensor([5965, 6204, 1451, ..., 9267, 1058, 9600]), + values=tensor([0.3937, 0.8327, 0.3110, ..., 0.9645, 0.7301, 0.2055]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8451, 0.4833, 0.4298, ..., 0.9015, 0.0937, 0.6764]) +tensor([0.5302, 0.5425, 0.3526, ..., 0.3577, 0.4587, 0.7959]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.47010850906372 seconds +Time: 10.722160577774048 seconds -[20.64, 20.64, 20.48, 20.48, 20.48, 20.4, 20.32, 20.16, 20.24, 20.4] -[20.72, 20.64, 21.24, 23.32, 25.32, 26.04, 26.72, 26.72, 26.48, 25.08, 23.72, 23.6, 23.56, 23.48] -14.217053174972534 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.47010850906372, 'TIME_S_1KI': 0.32081469877018387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.63640854835506, 'W': 22.48260624860275} -[20.64, 20.64, 20.48, 20.48, 20.48, 20.4, 20.32, 20.16, 20.24, 20.4, 20.64, 20.56, 20.48, 20.52, 20.68, 20.84, 20.88, 20.88, 20.84, 20.48] -369.96 -18.497999999999998 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.47010850906372, 'TIME_S_1KI': 0.32081469877018387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.63640854835506, 'W': 22.48260624860275, 'J_1KI': 9.793982367580435, 'W_1KI': 0.6888897612637195, 'W_D': 3.984606248602752, 'J_D': 56.64935891771315, 'W_D_1KI': 0.12209235962136145, 'J_D_1KI': 0.003741033203252894} +[20.24, 20.2, 20.16, 20.24, 20.32, 20.88, 20.84, 20.52, 20.88, 20.36] +[20.32, 20.56, 20.56, 23.56, 25.76, 28.24, 29.12, 29.28, 25.72, 24.52, 23.72, 23.72, 23.64, 23.48] +14.20572280883789 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.722160577774048, 'TIME_S_1KI': 0.32498289266735514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.13037155151375, 'W': 22.887281127943623} +[20.24, 20.2, 20.16, 20.24, 20.32, 20.88, 20.84, 20.52, 20.88, 20.36, 20.28, 20.32, 20.32, 20.44, 20.4, 20.4, 20.4, 20.32, 20.56, 20.48] +367.88 +18.394 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.722160577774048, 'TIME_S_1KI': 0.32498289266735514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.13037155151375, 'W': 22.887281127943623, 'J_1KI': 9.854525855530376, 'W_1KI': 0.6937011222969607, 'W_D': 4.493281127943625, 'J_D': 63.8303062057496, 'W_D_1KI': 0.13618892273947883, 'J_D_1KI': 0.0041278126493340655} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json index c0d7ab3..c72c25e 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4519, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300970077514648, "TIME_S_1KI": 2.279479990598506, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.2094431686402, "W": 23.392743273719635, "J_1KI": 73.51392856132777, "W_1KI": 5.176530930232272, "W_D": 4.8837432737196345, "J_D": 69.35593720483783, "W_D_1KI": 1.0807132714582062, "J_D_1KI": 0.2391487655362262} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4704, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.513959884643555, "TIME_S_1KI": 2.235110519694633, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 329.1826779842377, "W": 23.013408423661655, "J_1KI": 69.9793107959689, "W_1KI": 4.892306212513107, "W_D": 4.506408423661657, "J_D": 64.45944753956805, "W_D_1KI": 0.9579949880233115, "J_D_1KI": 0.2036553971137992} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output index f0ce63e..e728837 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2727935314178467} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.28493499755859375} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 99975, 99993, +tensor(crow_indices=tensor([ 0, 6, 15, ..., 99974, 99987, 100000]), - col_indices=tensor([2872, 4034, 5620, ..., 6357, 6556, 9590]), - values=tensor([0.7995, 0.0045, 0.2448, ..., 0.5761, 0.7842, 0.1546]), + col_indices=tensor([1008, 1745, 2458, ..., 8180, 8309, 8725]), + values=tensor([0.4541, 0.1167, 0.6157, ..., 0.2339, 0.7280, 0.6670]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8077, 0.7130, 0.7281, ..., 0.3829, 0.9486, 0.9162]) +tensor([0.9535, 0.6938, 0.6793, ..., 0.3504, 0.5915, 0.1345]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.2727935314178467 seconds +Time: 0.28493499755859375 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3849 -ss 10000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.942286252975464} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3685 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.224288940429688} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 21, ..., 99991, 99998, +tensor(crow_indices=tensor([ 0, 8, 23, ..., 99975, 99993, 100000]), - col_indices=tensor([ 425, 574, 695, ..., 9570, 6024, 9715]), - values=tensor([0.7410, 0.8879, 0.5840, ..., 0.6995, 0.9280, 0.9465]), + col_indices=tensor([2342, 2426, 3411, ..., 3261, 4460, 9472]), + values=tensor([0.8447, 0.2534, 0.2074, ..., 0.5724, 0.1389, 0.7449]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2929, 0.5164, 0.5482, ..., 0.5103, 0.5008, 0.9557]) +tensor([0.2993, 0.9441, 0.0750, ..., 0.0171, 0.8286, 0.1160]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 8.942286252975464 seconds +Time: 8.224288940429688 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4519 -ss 10000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300970077514648} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4704 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.513959884643555} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 16, ..., 99974, 99990, +tensor(crow_indices=tensor([ 0, 7, 18, ..., 99977, 99987, 100000]), - col_indices=tensor([ 582, 1691, 2515, ..., 7345, 7996, 8295]), - values=tensor([0.8177, 0.9283, 0.6030, ..., 0.2647, 0.3717, 0.8633]), + col_indices=tensor([ 68, 2486, 2822, ..., 8793, 8847, 9684]), + values=tensor([0.4423, 0.5768, 0.9908, ..., 0.4103, 0.7568, 0.2801]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2209, 0.7260, 0.0429, ..., 0.4887, 0.7834, 0.0043]) +tensor([0.3663, 0.2060, 0.8473, ..., 0.8925, 0.9991, 0.3035]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.300970077514648 seconds +Time: 10.513959884643555 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 16, ..., 99974, 99990, +tensor(crow_indices=tensor([ 0, 7, 18, ..., 99977, 99987, 100000]), - col_indices=tensor([ 582, 1691, 2515, ..., 7345, 7996, 8295]), - values=tensor([0.8177, 0.9283, 0.6030, ..., 0.2647, 0.3717, 0.8633]), + col_indices=tensor([ 68, 2486, 2822, ..., 8793, 8847, 9684]), + values=tensor([0.4423, 0.5768, 0.9908, ..., 0.4103, 0.7568, 0.2801]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2209, 0.7260, 0.0429, ..., 0.4887, 0.7834, 0.0043]) +tensor([0.3663, 0.2060, 0.8473, ..., 0.8925, 0.9991, 0.3035]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.300970077514648 seconds +Time: 10.513959884643555 seconds -[20.48, 20.64, 20.44, 20.28, 20.28, 20.6, 20.72, 20.68, 20.68, 20.48] -[20.4, 20.36, 23.16, 24.08, 27.0, 27.6, 28.72, 28.72, 26.24, 26.28, 24.44, 24.28, 24.24, 24.08] -14.201388835906982 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300970077514648, 'TIME_S_1KI': 2.279479990598506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2094431686402, 'W': 23.392743273719635} -[20.48, 20.64, 20.44, 20.28, 20.28, 20.6, 20.72, 20.68, 20.68, 20.48, 20.52, 20.6, 20.68, 20.64, 20.64, 20.52, 20.52, 20.48, 20.68, 20.72] -370.18 -18.509 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300970077514648, 'TIME_S_1KI': 2.279479990598506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2094431686402, 'W': 23.392743273719635, 'J_1KI': 73.51392856132777, 'W_1KI': 5.176530930232272, 'W_D': 4.8837432737196345, 'J_D': 69.35593720483783, 'W_D_1KI': 1.0807132714582062, 'J_D_1KI': 0.2391487655362262} +[20.44, 20.44, 20.44, 20.4, 20.24, 20.32, 20.48, 20.48, 20.52, 20.68] +[20.64, 20.56, 20.48, 24.2, 26.08, 28.48, 29.28, 27.2, 26.72, 24.2, 24.2, 24.12, 24.08, 23.96] +14.303951501846313 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.513959884643555, 'TIME_S_1KI': 2.235110519694633, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.1826779842377, 'W': 23.013408423661655} +[20.44, 20.44, 20.44, 20.4, 20.24, 20.32, 20.48, 20.48, 20.52, 20.68, 20.24, 20.44, 20.28, 20.72, 20.96, 21.08, 20.96, 21.0, 20.48, 20.44] +370.14 +18.506999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.513959884643555, 'TIME_S_1KI': 2.235110519694633, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.1826779842377, 'W': 23.013408423661655, 'J_1KI': 69.9793107959689, 'W_1KI': 4.892306212513107, 'W_D': 4.506408423661657, 'J_D': 64.45944753956805, 'W_D_1KI': 0.9579949880233115, 'J_D_1KI': 0.2036553971137992} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json index 6d63757..5526a0d 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 490, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.49430251121521, "TIME_S_1KI": 21.416943900439204, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 322.6961988353729, "W": 22.678695903983957, "J_1KI": 658.5636710925977, "W_1KI": 46.283052865273376, "W_D": 4.3196959039839555, "J_D": 61.46515012335775, "W_D_1KI": 8.815705926497868, "J_D_1KI": 17.991236584689528} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 466, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.031994819641113, "TIME_S_1KI": 21.527885879058182, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.8022685432434, "W": 22.86777901563946, "J_1KI": 699.1464990198356, "W_1KI": 49.07248715802459, "W_D": 4.6127790156394575, "J_D": 65.71927542924874, "W_D_1KI": 9.898667415535316, "J_D_1KI": 21.241775569818273} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output index 8697ed9..89008b5 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.140977382659912} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.2486960887908936} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 212, ..., 999798, - 999901, 1000000]), - col_indices=tensor([ 63, 140, 146, ..., 9691, 9771, 9918]), - values=tensor([0.8748, 0.2571, 0.8906, ..., 0.1504, 0.2890, 0.7825]), +tensor(crow_indices=tensor([ 0, 77, 175, ..., 999811, + 999910, 1000000]), + col_indices=tensor([ 35, 141, 347, ..., 9617, 9713, 9775]), + values=tensor([0.5684, 0.4118, 0.8956, ..., 0.8300, 0.5668, 0.8186]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5882, 0.3416, 0.1892, ..., 0.3016, 0.5220, 0.0626]) +tensor([0.7862, 0.9671, 0.1334, ..., 0.0132, 0.8938, 0.7920]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 2.140977382659912 seconds +Time: 2.2486960887908936 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 490 -ss 10000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.49430251121521} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 466 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.031994819641113} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 113, 202, ..., 999820, - 999916, 1000000]), - col_indices=tensor([ 2, 35, 39, ..., 9519, 9605, 9656]), - values=tensor([0.0992, 0.7724, 0.9238, ..., 0.3639, 0.9758, 0.0697]), +tensor(crow_indices=tensor([ 0, 101, 225, ..., 999830, + 999902, 1000000]), + col_indices=tensor([ 154, 165, 205, ..., 9812, 9815, 9915]), + values=tensor([0.8739, 0.8341, 0.5427, ..., 0.3042, 0.6360, 0.3675]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2199, 0.1288, 0.7757, ..., 0.1449, 0.2950, 0.2928]) +tensor([0.1667, 0.6012, 0.2305, ..., 0.2181, 0.2842, 0.9004]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.49430251121521 seconds +Time: 10.031994819641113 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 113, 202, ..., 999820, - 999916, 1000000]), - col_indices=tensor([ 2, 35, 39, ..., 9519, 9605, 9656]), - values=tensor([0.0992, 0.7724, 0.9238, ..., 0.3639, 0.9758, 0.0697]), +tensor(crow_indices=tensor([ 0, 101, 225, ..., 999830, + 999902, 1000000]), + col_indices=tensor([ 154, 165, 205, ..., 9812, 9815, 9915]), + values=tensor([0.8739, 0.8341, 0.5427, ..., 0.3042, 0.6360, 0.3675]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2199, 0.1288, 0.7757, ..., 0.1449, 0.2950, 0.2928]) +tensor([0.1667, 0.6012, 0.2305, ..., 0.2181, 0.2842, 0.9004]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.49430251121521 seconds +Time: 10.031994819641113 seconds -[20.68, 20.72, 20.56, 20.52, 20.48, 20.2, 19.92, 19.96, 19.92, 19.92] -[19.96, 20.36, 20.52, 22.04, 24.08, 26.92, 27.96, 27.96, 26.64, 25.04, 24.52, 24.4, 24.4, 24.52] -14.229045629501343 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.49430251121521, 'TIME_S_1KI': 21.416943900439204, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.6961988353729, 'W': 22.678695903983957} -[20.68, 20.72, 20.56, 20.52, 20.48, 20.2, 19.92, 19.96, 19.92, 19.92, 20.44, 20.36, 20.4, 20.52, 20.4, 20.64, 20.76, 20.52, 20.52, 20.52] -367.18 -18.359 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.49430251121521, 'TIME_S_1KI': 21.416943900439204, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.6961988353729, 'W': 22.678695903983957, 'J_1KI': 658.5636710925977, 'W_1KI': 46.283052865273376, 'W_D': 4.3196959039839555, 'J_D': 61.46515012335775, 'W_D_1KI': 8.815705926497868, 'J_D_1KI': 17.991236584689528} +[20.0, 20.04, 20.24, 20.28, 20.28, 20.28, 20.44, 20.2, 20.12, 20.16] +[20.16, 20.4, 20.28, 22.2, 24.0, 27.88, 28.52, 28.36, 27.2, 25.84, 24.16, 24.32, 24.36, 24.36] +14.247219562530518 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.031994819641113, 'TIME_S_1KI': 21.527885879058182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.8022685432434, 'W': 22.86777901563946} +[20.0, 20.04, 20.24, 20.28, 20.28, 20.28, 20.44, 20.2, 20.12, 20.16, 20.4, 20.44, 20.32, 20.44, 20.56, 20.32, 20.4, 20.36, 20.08, 20.04] +365.1 +18.255000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.031994819641113, 'TIME_S_1KI': 21.527885879058182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.8022685432434, 'W': 22.86777901563946, 'J_1KI': 699.1464990198356, 'W_1KI': 49.07248715802459, 'W_D': 4.6127790156394575, 'J_D': 65.71927542924874, 'W_D_1KI': 9.898667415535316, 'J_D_1KI': 21.241775569818273} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json index d53721b..e3b0887 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.575754880905151, "TIME_S_1KI": 105.75754880905151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 357.48722974777223, "W": 23.357767084816274, "J_1KI": 3574.8722974777224, "W_1KI": 233.57767084816274, "W_D": 4.891767084816273, "J_D": 74.86778412389756, "W_D_1KI": 48.91767084816273, "J_D_1KI": 489.1767084816273} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.582204103469849, "TIME_S_1KI": 105.82204103469849, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 394.3855400466919, "W": 24.027936546671576, "J_1KI": 3943.8554004669195, "W_1KI": 240.27936546671577, "W_D": 5.732936546671578, "J_D": 94.09827063679698, "W_D_1KI": 57.32936546671578, "J_D_1KI": 573.2936546671577} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output index 5bc40f5..9eadab5 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.575754880905151} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.582204103469849} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 505, 1011, ..., 4998999, - 4999505, 5000000]), - col_indices=tensor([ 17, 34, 93, ..., 9927, 9945, 9977]), - values=tensor([0.3942, 0.9668, 0.2842, ..., 0.3748, 0.5474, 0.6270]), +tensor(crow_indices=tensor([ 0, 506, 1036, ..., 4999012, + 4999522, 5000000]), + col_indices=tensor([ 2, 6, 14, ..., 9953, 9962, 9983]), + values=tensor([0.7970, 0.3700, 0.8324, ..., 0.2223, 0.8075, 0.1339]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3102, 0.7326, 0.1847, ..., 0.2267, 0.2009, 0.0941]) +tensor([0.3881, 0.8882, 0.8978, ..., 0.9188, 0.6267, 0.4542]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.575754880905151 seconds +Time: 10.582204103469849 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 505, 1011, ..., 4998999, - 4999505, 5000000]), - col_indices=tensor([ 17, 34, 93, ..., 9927, 9945, 9977]), - values=tensor([0.3942, 0.9668, 0.2842, ..., 0.3748, 0.5474, 0.6270]), +tensor(crow_indices=tensor([ 0, 506, 1036, ..., 4999012, + 4999522, 5000000]), + col_indices=tensor([ 2, 6, 14, ..., 9953, 9962, 9983]), + values=tensor([0.7970, 0.3700, 0.8324, ..., 0.2223, 0.8075, 0.1339]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3102, 0.7326, 0.1847, ..., 0.2267, 0.2009, 0.0941]) +tensor([0.3881, 0.8882, 0.8978, ..., 0.9188, 0.6267, 0.4542]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.575754880905151 seconds +Time: 10.582204103469849 seconds -[20.44, 20.44, 20.44, 20.52, 20.44, 20.4, 20.32, 20.2, 20.08, 20.32] -[20.32, 20.16, 21.08, 22.56, 24.44, 27.6, 27.6, 29.28, 28.88, 28.12, 25.16, 24.16, 24.12, 24.4, 24.56] -15.30485463142395 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.575754880905151, 'TIME_S_1KI': 105.75754880905151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.48722974777223, 'W': 23.357767084816274} -[20.44, 20.44, 20.44, 20.52, 20.44, 20.4, 20.32, 20.2, 20.08, 20.32, 20.52, 20.56, 20.76, 20.76, 20.84, 20.84, 20.72, 20.56, 20.56, 20.48] -369.32 -18.466 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.575754880905151, 'TIME_S_1KI': 105.75754880905151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.48722974777223, 'W': 23.357767084816274, 'J_1KI': 3574.8722974777224, 'W_1KI': 233.57767084816274, 'W_D': 4.891767084816273, 'J_D': 74.86778412389756, 'W_D_1KI': 48.91767084816273, 'J_D_1KI': 489.1767084816273} +[20.0, 20.2, 20.08, 20.2, 20.2, 20.12, 20.4, 20.28, 20.44, 20.72] +[20.6, 20.72, 21.08, 24.84, 27.04, 29.48, 31.2, 28.84, 27.88, 27.88, 26.0, 23.84, 24.0, 24.32, 24.6, 24.48] +16.41362500190735 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.582204103469849, 'TIME_S_1KI': 105.82204103469849, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 394.3855400466919, 'W': 24.027936546671576} +[20.0, 20.2, 20.08, 20.2, 20.2, 20.12, 20.4, 20.28, 20.44, 20.72, 20.56, 20.44, 20.2, 20.24, 20.08, 20.28, 20.68, 20.68, 20.44, 20.6] +365.9 +18.294999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.582204103469849, 'TIME_S_1KI': 105.82204103469849, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 394.3855400466919, 'W': 24.027936546671576, 'J_1KI': 3943.8554004669195, 'W_1KI': 240.27936546671577, 'W_D': 5.732936546671578, 'J_D': 94.09827063679698, 'W_D_1KI': 57.32936546671578, 'J_D_1KI': 573.2936546671577} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json index d4908f3..33e4608 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.58586049079895, "TIME_S_1KI": 215.8586049079895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 695.2596657943726, "W": 24.42843676698333, "J_1KI": 6952.596657943726, "W_1KI": 244.2843676698333, "W_D": 6.1854367669833294, "J_D": 176.04420374608034, "W_D_1KI": 61.854367669833294, "J_D_1KI": 618.543676698333} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.590675115585327, "TIME_S_1KI": 215.90675115585327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 698.2714468479157, "W": 24.533351073497297, "J_1KI": 6982.714468479157, "W_1KI": 245.33351073497298, "W_D": 6.067351073497296, "J_D": 172.68973977231983, "W_D_1KI": 60.67351073497296, "J_D_1KI": 606.7351073497296} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output index b9e17f1..3058f8c 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.58586049079895} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.590675115585327} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 983, 1945, ..., 9997995, - 9998975, 10000000]), - col_indices=tensor([ 7, 29, 32, ..., 9986, 9994, 9999]), - values=tensor([0.5805, 0.0545, 0.7779, ..., 0.8799, 0.6314, 0.5149]), +tensor(crow_indices=tensor([ 0, 953, 1990, ..., 9998028, + 9999002, 10000000]), + col_indices=tensor([ 3, 5, 31, ..., 9963, 9981, 9996]), + values=tensor([0.0177, 0.1736, 0.9045, ..., 0.8807, 0.7948, 0.3225]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1246, 0.2739, 0.0084, ..., 0.7975, 0.3318, 0.0977]) +tensor([0.5695, 0.4055, 0.5858, ..., 0.6808, 0.9483, 0.6795]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.58586049079895 seconds +Time: 21.590675115585327 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 983, 1945, ..., 9997995, - 9998975, 10000000]), - col_indices=tensor([ 7, 29, 32, ..., 9986, 9994, 9999]), - values=tensor([0.5805, 0.0545, 0.7779, ..., 0.8799, 0.6314, 0.5149]), +tensor(crow_indices=tensor([ 0, 953, 1990, ..., 9998028, + 9999002, 10000000]), + col_indices=tensor([ 3, 5, 31, ..., 9963, 9981, 9996]), + values=tensor([0.0177, 0.1736, 0.9045, ..., 0.8807, 0.7948, 0.3225]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1246, 0.2739, 0.0084, ..., 0.7975, 0.3318, 0.0977]) +tensor([0.5695, 0.4055, 0.5858, ..., 0.6808, 0.9483, 0.6795]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.58586049079895 seconds +Time: 21.590675115585327 seconds -[20.44, 20.24, 20.32, 20.24, 20.16, 20.52, 20.56, 20.68, 20.88, 21.04] -[20.8, 20.6, 23.72, 23.72, 26.04, 27.56, 30.88, 32.68, 29.8, 29.08, 27.52, 26.28, 24.44, 24.48, 24.48, 24.2, 24.2, 24.2, 24.12, 24.12, 24.28, 24.28, 24.4, 24.32, 24.16, 24.0, 24.08, 24.16] -28.461078882217407 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.58586049079895, 'TIME_S_1KI': 215.8586049079895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 695.2596657943726, 'W': 24.42843676698333} -[20.44, 20.24, 20.32, 20.24, 20.16, 20.52, 20.56, 20.68, 20.88, 21.04, 19.8, 19.8, 19.8, 19.96, 20.0, 20.08, 20.12, 20.36, 20.32, 20.36] -364.86 -18.243000000000002 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.58586049079895, 'TIME_S_1KI': 215.8586049079895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 695.2596657943726, 'W': 24.42843676698333, 'J_1KI': 6952.596657943726, 'W_1KI': 244.2843676698333, 'W_D': 6.1854367669833294, 'J_D': 176.04420374608034, 'W_D_1KI': 61.854367669833294, 'J_D_1KI': 618.543676698333} +[20.76, 20.76, 20.76, 20.76, 20.48, 20.8, 20.84, 20.8, 20.76, 20.6] +[20.24, 20.28, 23.72, 25.04, 26.96, 30.6, 31.88, 31.88, 29.52, 29.44, 27.04, 25.04, 23.8, 23.76, 24.04, 24.2, 24.36, 24.36, 24.28, 24.28, 24.28, 24.2, 24.12, 24.32, 24.28, 24.52, 24.36, 24.52] +28.462130784988403 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.590675115585327, 'TIME_S_1KI': 215.90675115585327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 698.2714468479157, 'W': 24.533351073497297} +[20.76, 20.76, 20.76, 20.76, 20.48, 20.8, 20.84, 20.8, 20.76, 20.6, 20.52, 20.48, 20.64, 20.56, 20.36, 20.36, 20.0, 20.12, 19.88, 20.04] +369.32000000000005 +18.466 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.590675115585327, 'TIME_S_1KI': 215.90675115585327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 698.2714468479157, 'W': 24.533351073497297, 'J_1KI': 6982.714468479157, 'W_1KI': 245.33351073497298, 'W_D': 6.067351073497296, 'J_D': 172.68973977231983, 'W_D_1KI': 60.67351073497296, 'J_D_1KI': 606.7351073497296} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.json index 00303c3..4bc8860 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.006776571273804, "TIME_S_1KI": 420.06776571273804, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1323.3857918548583, "W": 24.65839535462019, "J_1KI": 13233.857918548583, "W_1KI": 246.58395354620188, "W_D": 6.256395354620192, "J_D": 335.77305422592167, "W_D_1KI": 62.56395354620192, "J_D_1KI": 625.6395354620192} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.53456234931946, "TIME_S_1KI": 425.3456234931946, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1258.7017677497863, "W": 24.331627673949086, "J_1KI": 12587.017677497863, "W_1KI": 243.31627673949086, "W_D": 5.892627673949086, "J_D": 304.83208806586254, "W_D_1KI": 58.92627673949086, "J_D_1KI": 589.2627673949086} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.output index 7c8bead..d93254b 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.006776571273804} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.53456234931946} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1950, 3929, ..., 19995954, - 19997973, 20000000]), - col_indices=tensor([ 0, 10, 17, ..., 9977, 9980, 9990]), - values=tensor([0.1470, 0.8510, 0.9446, ..., 0.3735, 0.6466, 0.3885]), +tensor(crow_indices=tensor([ 0, 1981, 3961, ..., 19996043, + 19998038, 20000000]), + col_indices=tensor([ 1, 3, 6, ..., 9979, 9991, 9993]), + values=tensor([0.0058, 0.7086, 0.6623, ..., 0.9502, 0.1257, 0.5097]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.1499, 0.7404, 0.4886, ..., 0.1182, 0.4158, 0.3615]) +tensor([0.3594, 0.8224, 0.5071, ..., 0.7554, 0.0445, 0.2812]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.006776571273804 seconds +Time: 42.53456234931946 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1950, 3929, ..., 19995954, - 19997973, 20000000]), - col_indices=tensor([ 0, 10, 17, ..., 9977, 9980, 9990]), - values=tensor([0.1470, 0.8510, 0.9446, ..., 0.3735, 0.6466, 0.3885]), +tensor(crow_indices=tensor([ 0, 1981, 3961, ..., 19996043, + 19998038, 20000000]), + col_indices=tensor([ 1, 3, 6, ..., 9979, 9991, 9993]), + values=tensor([0.0058, 0.7086, 0.6623, ..., 0.9502, 0.1257, 0.5097]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.1499, 0.7404, 0.4886, ..., 0.1182, 0.4158, 0.3615]) +tensor([0.3594, 0.8224, 0.5071, ..., 0.7554, 0.0445, 0.2812]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.006776571273804 seconds +Time: 42.53456234931946 seconds -[20.44, 20.28, 20.36, 20.28, 20.32, 20.16, 20.32, 20.32, 20.52, 20.56] -[20.6, 20.56, 20.76, 24.64, 26.0, 26.92, 30.76, 29.04, 28.68, 29.52, 30.24, 30.24, 27.76, 27.48, 26.4, 24.64, 24.48, 24.32, 24.28, 24.28, 24.28, 24.08, 24.28, 24.28, 24.36, 24.2, 24.24, 24.6, 24.64, 24.52, 24.72, 24.48, 24.64, 24.4, 24.52, 24.52, 24.36, 24.4, 24.4, 24.4, 24.48, 24.68, 24.76, 24.56, 24.36, 24.16, 24.24, 24.4, 24.4, 24.76, 24.88, 24.96, 24.92] -53.668771743774414 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.006776571273804, 'TIME_S_1KI': 420.06776571273804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.3857918548583, 'W': 24.65839535462019} -[20.44, 20.28, 20.36, 20.28, 20.32, 20.16, 20.32, 20.32, 20.52, 20.56, 20.12, 19.92, 20.0, 20.28, 20.44, 20.72, 20.96, 21.04, 21.04, 21.04] -368.03999999999996 -18.401999999999997 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.006776571273804, 'TIME_S_1KI': 420.06776571273804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.3857918548583, 'W': 24.65839535462019, 'J_1KI': 13233.857918548583, 'W_1KI': 246.58395354620188, 'W_D': 6.256395354620192, 'J_D': 335.77305422592167, 'W_D_1KI': 62.56395354620192, 'J_D_1KI': 625.6395354620192} +[20.48, 20.48, 20.28, 20.32, 20.32, 20.64, 20.72, 20.8, 20.8, 20.6] +[20.68, 20.52, 20.64, 22.6, 23.52, 25.2, 28.36, 29.8, 29.56, 30.0, 31.0, 27.24, 27.24, 27.16, 25.84, 24.08, 24.08, 24.32, 24.32, 24.2, 24.04, 24.08, 24.12, 24.16, 24.16, 24.28, 24.44, 24.36, 24.12, 23.96, 23.8, 24.08, 24.2, 24.32, 24.56, 24.56, 24.32, 24.32, 24.4, 24.28, 24.48, 24.4, 24.4, 24.36, 24.6, 24.52, 24.4, 24.52, 24.36, 24.24, 24.24] +51.731096029281616 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.53456234931946, 'TIME_S_1KI': 425.3456234931946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.7017677497863, 'W': 24.331627673949086} +[20.48, 20.48, 20.28, 20.32, 20.32, 20.64, 20.72, 20.8, 20.8, 20.6, 19.76, 19.96, 20.32, 20.4, 20.48, 20.2, 20.28, 20.84, 20.88, 21.28] +368.78 +18.439 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.53456234931946, 'TIME_S_1KI': 425.3456234931946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.7017677497863, 'W': 24.331627673949086, 'J_1KI': 12587.017677497863, 'W_1KI': 243.31627673949086, 'W_D': 5.892627673949086, 'J_D': 304.83208806586254, 'W_D_1KI': 58.92627673949086, 'J_D_1KI': 589.2627673949086} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.json index efd7bc8..a851bf2 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 67.20304369926453, "TIME_S_1KI": 672.0304369926453, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1846.4787199783327, "W": 24.29422238106021, "J_1KI": 18464.787199783328, "W_1KI": 242.9422238106021, "W_D": 5.818222381060206, "J_D": 442.2131174325942, "W_D_1KI": 58.18222381060206, "J_D_1KI": 581.8222381060207} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.5993595123291, "TIME_S_1KI": 635.993595123291, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1830.1276798915862, "W": 24.37217906347355, "J_1KI": 18301.27679891586, "W_1KI": 243.72179063473547, "W_D": 6.110179063473549, "J_D": 458.81854897069934, "W_D_1KI": 61.10179063473549, "J_D_1KI": 611.0179063473548} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.output index f1c9a57..8e00d2d 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 67.20304369926453} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.5993595123291} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2926, 5920, ..., 29993999, - 29997022, 30000000]), - col_indices=tensor([ 1, 4, 6, ..., 9978, 9982, 9992]), - values=tensor([0.3929, 0.6592, 0.7367, ..., 0.3321, 0.3012, 0.1502]), +tensor(crow_indices=tensor([ 0, 3020, 6019, ..., 29993935, + 29996918, 30000000]), + col_indices=tensor([ 3, 10, 12, ..., 9996, 9998, 9999]), + values=tensor([0.4272, 0.9443, 0.2889, ..., 0.0892, 0.8844, 0.9121]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.6782, 0.5388, 0.0901, ..., 0.7339, 0.4235, 0.1483]) +tensor([0.7267, 0.4804, 0.8266, ..., 0.7945, 0.0876, 0.7736]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 67.20304369926453 seconds +Time: 63.5993595123291 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2926, 5920, ..., 29993999, - 29997022, 30000000]), - col_indices=tensor([ 1, 4, 6, ..., 9978, 9982, 9992]), - values=tensor([0.3929, 0.6592, 0.7367, ..., 0.3321, 0.3012, 0.1502]), +tensor(crow_indices=tensor([ 0, 3020, 6019, ..., 29993935, + 29996918, 30000000]), + col_indices=tensor([ 3, 10, 12, ..., 9996, 9998, 9999]), + values=tensor([0.4272, 0.9443, 0.2889, ..., 0.0892, 0.8844, 0.9121]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.6782, 0.5388, 0.0901, ..., 0.7339, 0.4235, 0.1483]) +tensor([0.7267, 0.4804, 0.8266, ..., 0.7945, 0.0876, 0.7736]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 67.20304369926453 seconds +Time: 63.5993595123291 seconds -[20.6, 20.8, 20.76, 20.64, 20.32, 20.48, 20.48, 20.52, 20.8, 20.6] -[20.64, 20.6, 20.6, 21.32, 22.6, 24.24, 25.68, 28.84, 30.4, 30.64, 31.4, 31.32, 28.4, 28.16, 27.72, 26.8, 26.8, 25.96, 24.6, 24.4, 24.12, 24.12, 24.12, 24.08, 24.36, 24.56, 24.8, 24.88, 24.88, 24.92, 24.92, 24.72, 24.64, 24.44, 24.28, 24.52, 24.68, 24.64, 24.68, 24.64, 24.64, 24.52, 24.76, 24.84, 24.68, 24.72, 24.68, 24.68, 24.76, 24.68, 24.52, 24.2, 24.12, 24.12, 24.24, 24.48, 24.64, 24.76, 24.76, 24.52, 24.28, 24.32, 24.12, 24.04, 24.32, 24.32, 24.6, 24.52, 24.76, 24.72, 24.36, 24.12, 24.16, 24.36, 24.48] -76.0048496723175 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 67.20304369926453, 'TIME_S_1KI': 672.0304369926453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1846.4787199783327, 'W': 24.29422238106021} -[20.6, 20.8, 20.76, 20.64, 20.32, 20.48, 20.48, 20.52, 20.8, 20.6, 20.04, 20.16, 20.28, 20.16, 20.16, 20.64, 20.72, 20.72, 20.92, 20.68] -369.52000000000004 -18.476000000000003 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 67.20304369926453, 'TIME_S_1KI': 672.0304369926453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1846.4787199783327, 'W': 24.29422238106021, 'J_1KI': 18464.787199783328, 'W_1KI': 242.9422238106021, 'W_D': 5.818222381060206, 'J_D': 442.2131174325942, 'W_D_1KI': 58.18222381060206, 'J_D_1KI': 581.8222381060207} +[20.12, 20.36, 20.28, 20.24, 20.4, 20.44, 20.32, 20.4, 20.48, 20.56] +[20.56, 20.48, 20.76, 22.32, 24.68, 26.0, 27.64, 30.32, 30.48, 30.52, 31.08, 31.48, 29.28, 29.36, 29.36, 28.84, 27.6, 26.24, 24.72, 24.8, 24.56, 24.56, 24.52, 24.24, 24.12, 24.36, 24.52, 24.52, 24.36, 24.48, 24.12, 23.72, 23.84, 24.24, 24.16, 24.24, 24.16, 24.2, 24.0, 24.0, 24.16, 24.32, 24.36, 24.48, 24.4, 24.32, 24.28, 24.16, 24.2, 24.08, 24.28, 24.36, 24.36, 24.4, 24.2, 24.28, 24.16, 24.28, 24.4, 24.48, 24.68, 24.72, 24.72, 24.56, 24.56, 24.48, 24.64, 24.44, 24.4, 24.44, 24.28, 24.24, 24.36, 24.52] +75.09085154533386 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.5993595123291, 'TIME_S_1KI': 635.993595123291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1830.1276798915862, 'W': 24.37217906347355} +[20.12, 20.36, 20.28, 20.24, 20.4, 20.44, 20.32, 20.4, 20.48, 20.56, 20.2, 20.2, 20.36, 20.16, 20.08, 20.08, 20.12, 20.24, 20.4, 20.48] +365.24 +18.262 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.5993595123291, 'TIME_S_1KI': 635.993595123291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1830.1276798915862, 'W': 24.37217906347355, 'J_1KI': 18301.27679891586, 'W_1KI': 243.72179063473547, 'W_D': 6.110179063473549, 'J_D': 458.81854897069934, 'W_D_1KI': 61.10179063473549, 'J_D_1KI': 611.0179063473548} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..8143691 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 83.99711680412292, "TIME_S_1KI": 839.9711680412292, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2478.5038306999204, "W": 24.954187412594408, "J_1KI": 24785.038306999206, "W_1KI": 249.54187412594408, "W_D": 6.705187412594405, "J_D": 665.9737066528793, "W_D_1KI": 67.05187412594405, "J_D_1KI": 670.5187412594405} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..0e62c4b --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 83.99711680412292} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3989, 7950, ..., 39991913, + 39995922, 40000000]), + col_indices=tensor([ 0, 1, 6, ..., 9996, 9997, 9999]), + values=tensor([0.4556, 0.9367, 0.5980, ..., 0.0177, 0.8725, 0.2828]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.5334, 0.8652, 0.9709, ..., 0.8156, 0.3004, 0.2949]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 83.99711680412292 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3989, 7950, ..., 39991913, + 39995922, 40000000]), + col_indices=tensor([ 0, 1, 6, ..., 9996, 9997, 9999]), + values=tensor([0.4556, 0.9367, 0.5980, ..., 0.0177, 0.8725, 0.2828]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.5334, 0.8652, 0.9709, ..., 0.8156, 0.3004, 0.2949]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 83.99711680412292 seconds + +[20.2, 20.04, 20.04, 20.04, 20.28, 20.2, 20.28, 20.64, 20.48, 20.64] +[20.92, 20.76, 21.2, 24.8, 26.64, 27.52, 29.04, 30.4, 30.4, 31.44, 31.2, 31.8, 30.56, 27.28, 27.84, 27.44, 28.2, 27.6, 26.92, 25.92, 25.92, 24.76, 24.68, 24.72, 24.84, 24.84, 24.92, 25.08, 25.16, 25.12, 25.12, 24.8, 24.68, 24.68, 24.68, 24.76, 24.92, 24.84, 24.8, 25.0, 25.08, 25.08, 25.48, 25.56, 25.56, 25.56, 25.32, 25.2, 24.96, 24.6, 24.72, 24.72, 24.76, 25.04, 25.04, 24.96, 24.8, 24.72, 24.72, 24.6, 24.64, 24.8, 25.04, 24.96, 25.04, 25.08, 24.88, 24.68, 24.76, 24.8, 24.8, 24.88, 24.92, 25.12, 25.08, 25.16, 25.36, 25.28, 25.32, 25.4, 25.2, 24.96, 24.96, 24.96, 24.8, 25.0, 25.04, 25.0, 24.92, 24.88, 24.84, 24.8, 24.6, 24.44, 24.48, 24.48, 24.6, 24.72] +99.32216143608093 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 83.99711680412292, 'TIME_S_1KI': 839.9711680412292, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2478.5038306999204, 'W': 24.954187412594408} +[20.2, 20.04, 20.04, 20.04, 20.28, 20.2, 20.28, 20.64, 20.48, 20.64, 20.4, 20.44, 20.36, 20.32, 20.24, 20.24, 20.24, 20.16, 20.16, 20.4] +364.98 +18.249000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 83.99711680412292, 'TIME_S_1KI': 839.9711680412292, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2478.5038306999204, 'W': 24.954187412594408, 'J_1KI': 24785.038306999206, 'W_1KI': 249.54187412594408, 'W_D': 6.705187412594405, 'J_D': 665.9737066528793, 'W_D_1KI': 67.05187412594405, 'J_D_1KI': 670.5187412594405} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..d4aa07e --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 122.3178551197052, "TIME_S_1KI": 1223.178551197052, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3064.272414226532, "W": 24.76047197717133, "J_1KI": 30642.72414226532, "W_1KI": 247.6047197717133, "W_D": 6.469471977171327, "J_D": 800.6400093073842, "W_D_1KI": 64.69471977171328, "J_D_1KI": 646.9471977171328} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..e82eda2 --- /dev/null +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.5 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 122.3178551197052} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4994, 10005, ..., 49990059, + 49994955, 50000000]), + col_indices=tensor([ 0, 1, 3, ..., 9993, 9994, 9997]), + values=tensor([0.7680, 0.8457, 0.0556, ..., 0.3907, 0.5360, 0.5578]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.8026, 0.6854, 0.4524, ..., 0.3540, 0.7807, 0.8385]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 122.3178551197052 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4994, 10005, ..., 49990059, + 49994955, 50000000]), + col_indices=tensor([ 0, 1, 3, ..., 9993, 9994, 9997]), + values=tensor([0.7680, 0.8457, 0.0556, ..., 0.3907, 0.5360, 0.5578]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.8026, 0.6854, 0.4524, ..., 0.3540, 0.7807, 0.8385]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 122.3178551197052 seconds + +[20.72, 20.68, 20.44, 20.36, 20.16, 20.16, 19.92, 20.0, 20.04, 20.16] +[20.12, 20.36, 20.6, 24.52, 26.2, 27.84, 28.8, 27.28, 29.52, 31.56, 31.56, 31.64, 32.56, 31.84, 27.64, 27.84, 28.52, 29.04, 29.32, 29.24, 28.2, 27.12, 26.12, 26.12, 25.08, 24.96, 24.96, 24.84, 24.88, 24.8, 24.56, 24.44, 24.2, 24.56, 24.68, 24.68, 24.68, 25.0, 24.96, 24.84, 24.76, 24.72, 24.44, 24.56, 24.52, 24.56, 24.96, 24.84, 24.84, 24.68, 24.76, 24.72, 24.6, 24.56, 24.4, 24.48, 24.36, 24.48, 24.44, 24.4, 24.4, 24.48, 24.24, 24.24, 24.28, 24.28, 24.16, 24.48, 24.52, 24.64, 25.0, 24.8, 24.88, 24.88, 24.84, 24.6, 24.48, 24.56, 24.56, 24.44, 24.36, 24.08, 24.2, 24.16, 24.32, 24.44, 24.44, 24.56, 24.64, 24.6, 24.6, 24.2, 24.04, 24.32, 24.36, 24.32, 24.64, 24.36, 24.36, 24.52, 24.44, 24.24, 24.24, 24.44, 24.4, 24.32, 24.68, 24.6, 24.6, 24.64, 24.72, 24.72, 24.36, 24.4, 24.16, 24.28, 24.44, 24.32, 24.4, 24.56, 24.52, 24.52] +123.7566237449646 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 122.3178551197052, 'TIME_S_1KI': 1223.178551197052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3064.272414226532, 'W': 24.76047197717133} +[20.72, 20.68, 20.44, 20.36, 20.16, 20.16, 19.92, 20.0, 20.04, 20.16, 20.48, 20.52, 20.6, 20.6, 20.44, 20.44, 20.48, 20.28, 20.0, 20.04] +365.82000000000005 +18.291000000000004 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 122.3178551197052, 'TIME_S_1KI': 1223.178551197052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3064.272414226532, 'W': 24.76047197717133, 'J_1KI': 30642.72414226532, 'W_1KI': 247.6047197717133, 'W_D': 6.469471977171327, 'J_D': 800.6400093073842, 'W_D_1KI': 64.69471977171328, 'J_D_1KI': 646.9471977171328} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json index e0e6919..3b59597 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 145476, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.560847282409668, "TIME_S_1KI": 0.07259511728676668, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.8679765701293, "W": 22.125839021878733, "J_1KI": 2.1643980902013342, "W_1KI": 0.15209270960075016, "W_D": 3.6298390218787304, "J_D": 51.65544533538807, "W_D_1KI": 0.024951462934633413, "J_D_1KI": 0.00017151600906426773} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 138585, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.35036301612854, "TIME_S_1KI": 0.07468602674263838, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 330.615868434906, "W": 23.22132044200209, "J_1KI": 2.385654063822968, "W_1KI": 0.16756012874410714, "W_D": 4.727320442002089, "J_D": 67.30569681453706, "W_D_1KI": 0.034111342800462456, "J_D_1KI": 0.0002461402229711906} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output index 29cdb63..8b339c8 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015525579452514648} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.01529836654663086} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([3898, 1482, 5519, 8868, 7883, 9892, 3663, 8094, 3265, - 5214, 2679, 8390, 8300, 1254, 5771, 2330, 2277, 5567, - 3590, 9876, 9425, 1312, 3675, 250, 2676, 6405, 193, - 4817, 1118, 5220, 848, 1258, 8124, 2361, 9824, 8605, - 9110, 7791, 9707, 7161, 2008, 5741, 2334, 3730, 757, - 2296, 932, 6682, 8936, 2942, 9026, 5748, 1994, 1085, - 8416, 9758, 9650, 6359, 2155, 2494, 3974, 6463, 3581, - 5365, 2085, 839, 8439, 7015, 1395, 6847, 8399, 8014, - 2130, 4017, 54, 2801, 9388, 5676, 8884, 4034, 7935, - 8947, 2234, 5545, 9207, 3191, 9648, 2875, 4561, 7931, - 3725, 5272, 5784, 4597, 9489, 9250, 7919, 1669, 9885, - 3310, 3149, 6923, 5545, 6508, 9800, 2657, 407, 655, - 406, 5436, 7564, 5016, 8520, 4483, 4450, 4851, 2131, - 3975, 4798, 7045, 7501, 7521, 8639, 1296, 7684, 6036, - 1810, 3941, 2660, 7466, 7025, 675, 3142, 6789, 8851, - 6431, 2897, 4149, 5159, 680, 7589, 1161, 4000, 1900, - 6580, 7108, 3052, 1112, 5611, 1198, 1887, 1274, 7907, - 3980, 3920, 7284, 8685, 2676, 8814, 2020, 2087, 7588, - 3473, 6720, 3346, 2385, 331, 4730, 8085, 823, 9752, - 6054, 4004, 3518, 6972, 6393, 1022, 1826, 136, 9104, - 644, 7087, 4535, 6495, 835, 3627, 1719, 5375, 3188, - 2721, 5370, 4060, 7632, 6334, 3349, 38, 8498, 3165, - 8955, 2406, 4105, 3127, 5298, 6106, 8762, 5107, 5923, - 5348, 1666, 7355, 5280, 5787, 8949, 6219, 1327, 3885, - 4849, 3598, 3254, 3228, 5330, 4493, 6213, 7289, 3249, - 8132, 1290, 6830, 5688, 301, 690, 2742, 8864, 5921, - 5003, 9181, 3079, 5148, 1551, 1694, 5480, 2170, 996, - 4985, 1075, 5138, 124, 940, 948, 790, 7486, 9030, - 1477, 9142, 6659, 1811, 9169, 4503, 6175, 4785, 257, - 6391, 3383, 3331, 6911, 1486, 8788, 912, 7174, 9477, - 2748, 6272, 6520, 2719, 8184, 7316, 355, 1823, 8556, - 1459, 3956, 2370, 4381, 4423, 3578, 3880, 2683, 6911, - 2226, 9500, 796, 7668, 4666, 352, 6328, 9286, 6457, - 6272, 7478, 7423, 9055, 4170, 4251, 4175, 8805, 3517, - 7463, 5717, 5192, 4123, 6402, 6096, 3259, 4246, 3250, - 9200, 9472, 913, 8843, 8722, 570, 5337, 2453, 5040, - 5362, 1911, 6859, 1882, 1285, 6680, 1244, 5808, 2588, - 6330, 9064, 794, 6569, 9800, 7433, 8463, 8846, 1304, - 7393, 935, 8025, 9550, 5525, 2976, 4949, 8851, 1570, - 3653, 6681, 4564, 728, 7281, 2542, 4708, 3948, 9376, - 6780, 9074, 2736, 9846, 1380, 937, 7606, 2545, 5198, - 5963, 9747, 3442, 3023, 85, 8214, 770, 1399, 3822, - 9430, 1851, 2996, 5209, 2392, 9546, 4627, 1175, 617, - 8182, 6735, 5491, 7308, 8708, 2805, 8080, 7395, 4480, - 7631, 718, 6417, 689, 5328, 6587, 329, 9209, 5321, - 1461, 6105, 9464, 1295, 7201, 6830, 6066, 6587, 4948, - 3548, 7991, 675, 5203, 1178, 3798, 316, 2004, 1726, - 4002, 1942, 3929, 5033, 5856, 6830, 2915, 4600, 8654, - 5488, 5734, 2078, 667, 6513, 5791, 3009, 2836, 6123, - 6550, 1123, 1084, 663, 7685, 49, 494, 2703, 2647, - 7554, 6681, 5448, 7756, 1560, 7102, 8537, 9152, 84, - 3412, 2379, 6668, 3950, 3737, 3834, 9623, 5269, 4317, - 8632, 3929, 7262, 8925, 8077, 1607, 7516, 928, 9628, - 3158, 8912, 7204, 843, 5299, 6571, 6817, 7581, 3627, - 5903, 1128, 6080, 6794, 135, 8922, 789, 4054, 7312, - 4581, 2854, 8527, 6140, 4291, 3168, 7090, 1112, 9120, - 9629, 458, 4871, 6869, 7933, 6321, 81, 9105, 20, - 4065, 9879, 6324, 5948, 4258, 6176, 8280, 5550, 3648, - 7828, 4940, 925, 9064, 4130, 997, 220, 3952, 3265, - 6751, 778, 1658, 2790, 9143, 4446, 5474, 5369, 9890, - 175, 5804, 2578, 9295, 68, 9554, 875, 4714, 6630, - 6903, 8256, 9485, 6873, 788, 4576, 257, 4058, 9901, - 1081, 6164, 1099, 9686, 1784, 8594, 5780, 5851, 1800, - 5807, 2784, 1548, 5394, 1010, 5864, 5995, 2197, 3622, - 6360, 3236, 1985, 7074, 4983, 423, 3106, 5792, 3257, - 3522, 3832, 8509, 3852, 7499, 7269, 9610, 1857, 3828, - 1095, 6707, 489, 8713, 7327, 839, 4913, 6149, 7538, - 8425, 3515, 9652, 3014, 3558, 6510, 3484, 3152, 9750, - 2084, 2579, 5450, 9665, 6024, 8898, 7948, 302, 5315, - 4180, 7959, 8289, 7761, 8450, 7959, 3206, 7670, 9154, - 9147, 6208, 5499, 7616, 9052, 4253, 8243, 2955, 7028, - 8394, 3989, 1599, 2737, 7385, 5626, 8082, 6230, 7546, - 57, 2410, 2942, 700, 3122, 9243, 2814, 7397, 4819, - 1084, 7234, 7860, 4622, 6516, 8828, 7448, 8210, 8805, - 9395, 6187, 2031, 3767, 7179, 2583, 6085, 53, 4677, - 4729, 9923, 5613, 1409, 9440, 8359, 7777, 2897, 714, - 7282, 512, 8652, 6139, 3021, 3847, 1702, 2455, 7034, - 1994, 4895, 247, 8803, 6199, 492, 8970, 5329, 1576, - 3945, 8058, 9716, 5849, 5153, 3106, 3211, 3905, 6116, - 1826, 7235, 9824, 1073, 4092, 8941, 2805, 3916, 9074, - 3940, 6464, 5740, 9399, 1287, 3979, 6746, 3637, 4616, - 8686, 7537, 7730, 8467, 9287, 3649, 9249, 9531, 8797, - 8683, 9126, 8105, 4413, 4839, 9084, 7493, 1505, 4616, - 1315, 9867, 8108, 9105, 3235, 2976, 2185, 8767, 9318, - 1590, 5862, 8416, 8494, 8249, 1922, 2173, 387, 689, - 5537, 319, 2548, 8781, 3876, 2904, 3074, 5424, 2666, - 1126, 9321, 8588, 3445, 8617, 334, 9161, 2384, 5147, - 6895, 9089, 6921, 7186, 4121, 7073, 3155, 3080, 9515, - 7841, 4184, 6560, 3776, 3166, 1037, 8649, 3341, 5486, - 8702, 8561, 2034, 4122, 6170, 611, 3580, 3403, 2813, - 9678, 1682, 4710, 1831, 6141, 5603, 6459, 1617, 1704, - 3743, 8018, 4183, 2372, 4280, 5580, 5554, 685, 4906, - 6177, 8204, 7285, 3595, 6551, 4218, 1732, 9617, 2156, - 3300, 434, 1438, 2176, 7789, 6779, 4580, 226, 6725, - 8186, 2641, 2667, 1600, 4234, 2261, 5072, 8276, 7022, - 8494, 775, 6, 6308, 1899, 4053, 5784, 3072, 8025, - 9379, 1116, 5148, 7628, 5060, 7829, 5341, 3570, 8529, - 1061, 3503, 8581, 9989, 8482, 7388, 2356, 4100, 322, - 4769, 9793, 1301, 2512, 7579, 4298, 1600, 8953, 8920, - 1692, 4787, 9285, 898, 2698, 1475, 3001, 3611, 3866, - 9122, 3753, 1211, 3092, 2312, 1040, 1123, 4411, 7130, - 9430, 8051, 8329, 3156, 6985, 5781, 3991, 8161, 6751, - 1449, 9718, 369, 9063, 273, 371, 8003, 9858, 1365, - 1729, 6226, 4643, 9125, 2272, 8996, 3896, 1943, 4185, - 4415, 9955, 2412, 9510, 3232, 4806, 2200, 1990, 6139, - 6256, 358, 9382, 2583, 7365, 9846, 8260, 733, 2064, - 118, 7040, 8862, 4366, 6192, 8535, 9244, 9993, 8572, - 3490, 6291, 7873, 3279, 5895, 2996, 7172, 761, 8005, - 1773, 7176, 2507, 2938, 3630, 9271, 4295, 6683, 5600, - 2223, 796, 5209, 6697, 614, 4824, 298, 6536, 5113, - 5778, 5914, 2119, 5281, 1677, 5222, 2043, 1439, 5141, - 1903]), - values=tensor([8.8707e-01, 8.2479e-01, 8.1424e-01, 6.0134e-01, - 2.1717e-01, 8.7330e-01, 7.2034e-01, 8.8373e-01, - 6.7753e-01, 7.2863e-01, 2.3933e-01, 7.4252e-01, - 7.9896e-02, 1.7017e-01, 7.4504e-01, 4.4774e-01, - 7.3251e-01, 7.9618e-01, 1.2655e-01, 1.0721e-01, - 6.0589e-01, 3.9688e-01, 1.0220e-01, 4.9557e-01, - 7.9026e-02, 8.0845e-01, 2.4031e-01, 1.3799e-02, - 3.6592e-01, 9.1889e-01, 4.4397e-01, 9.5919e-02, - 5.2901e-01, 6.3995e-01, 7.8787e-01, 7.3005e-01, - 5.9923e-01, 1.5700e-01, 6.1980e-01, 8.2992e-02, - 8.1762e-01, 6.8552e-01, 5.1150e-01, 6.6341e-01, - 9.1871e-01, 4.9767e-02, 8.7331e-01, 3.8092e-01, - 3.0824e-01, 3.9823e-01, 4.3039e-01, 9.3772e-01, - 1.4926e-01, 7.9505e-01, 8.0083e-01, 6.1216e-01, - 2.0931e-01, 6.1824e-01, 6.4771e-01, 5.6077e-01, - 7.8449e-01, 7.6306e-01, 7.5572e-01, 9.7267e-01, - 3.5240e-01, 5.5727e-01, 1.8393e-02, 9.6845e-01, - 6.0478e-02, 8.9628e-01, 4.4149e-01, 3.1202e-02, - 1.7476e-01, 9.2967e-01, 8.8825e-01, 9.2412e-01, - 9.8151e-01, 9.7789e-01, 3.3578e-01, 3.7791e-01, - 5.2634e-01, 5.3450e-01, 4.7320e-01, 3.5529e-02, - 8.2385e-01, 6.8358e-01, 9.8549e-01, 5.6940e-01, - 7.9720e-01, 2.6003e-01, 6.6890e-01, 1.7961e-01, - 6.2016e-01, 2.3928e-01, 1.8421e-02, 3.3260e-01, - 1.0002e-01, 8.5414e-02, 3.1758e-01, 3.3376e-01, - 6.7478e-01, 9.7524e-02, 4.6108e-01, 1.0332e-02, - 4.0865e-01, 3.7106e-02, 6.9802e-01, 8.7269e-01, - 8.2593e-01, 9.5227e-01, 3.3687e-01, 9.4058e-01, - 7.4500e-01, 3.6415e-01, 4.5557e-01, 9.7232e-01, - 5.1097e-01, 5.5568e-01, 2.7656e-01, 3.3562e-01, - 7.3346e-01, 7.7270e-01, 9.0672e-01, 2.6724e-01, - 2.5245e-01, 4.3405e-01, 6.8147e-01, 5.7156e-01, - 8.0443e-01, 2.9648e-01, 5.6969e-01, 8.6520e-01, - 4.0162e-01, 7.0064e-01, 9.6519e-01, 7.6895e-01, - 3.9290e-01, 8.0828e-01, 1.5562e-02, 5.5538e-01, - 9.3270e-01, 4.4461e-01, 7.5449e-01, 9.9752e-01, - 3.2640e-01, 5.5045e-01, 9.9590e-01, 5.7782e-01, - 3.0729e-01, 9.4262e-02, 9.6363e-01, 7.8404e-01, - 4.7225e-01, 2.8178e-01, 4.0082e-01, 4.3801e-02, - 9.9491e-01, 7.4034e-01, 6.3304e-01, 7.9732e-01, - 3.8854e-02, 3.3586e-01, 1.2839e-01, 6.2910e-01, - 1.3753e-01, 5.4593e-01, 1.8915e-01, 8.0045e-01, - 1.5614e-02, 5.9397e-01, 6.1183e-01, 5.9631e-02, - 2.4885e-01, 1.7688e-01, 3.3617e-01, 8.6754e-01, - 6.9855e-01, 9.5329e-01, 3.7958e-01, 4.4295e-01, - 3.9831e-01, 5.8230e-02, 5.4947e-01, 1.1816e-01, - 2.4831e-01, 2.1298e-01, 9.6579e-01, 8.9969e-01, - 3.8227e-01, 9.3422e-01, 1.6318e-01, 1.9859e-01, - 9.2078e-01, 9.3321e-02, 8.2575e-01, 7.2248e-01, - 8.1688e-01, 7.9359e-01, 5.1287e-01, 6.0873e-01, - 3.4040e-01, 4.0884e-01, 3.0602e-01, 7.1454e-01, - 9.3968e-01, 1.3258e-01, 3.7262e-01, 2.4053e-01, - 2.6331e-01, 9.9278e-01, 5.5712e-01, 1.1806e-01, - 3.2942e-01, 6.4174e-01, 6.8423e-01, 2.5605e-01, - 9.5326e-01, 6.1876e-01, 9.1393e-02, 9.7102e-01, - 6.2910e-01, 9.9254e-01, 6.9186e-01, 7.1631e-01, - 3.8637e-01, 4.1415e-01, 9.1739e-01, 9.1011e-01, - 7.0735e-01, 2.6752e-01, 6.7070e-02, 5.6268e-01, - 8.9992e-01, 7.8722e-02, 9.8479e-01, 8.4552e-02, - 4.0442e-01, 5.1450e-01, 8.6027e-01, 3.6511e-01, - 3.6834e-02, 9.6351e-01, 8.2693e-01, 8.1645e-01, - 7.5576e-01, 5.1891e-01, 3.3959e-01, 1.2834e-01, - 1.4061e-02, 2.5165e-01, 4.1608e-02, 4.2840e-01, - 9.2551e-01, 6.1411e-01, 6.3363e-01, 4.0275e-01, - 5.9319e-01, 2.9263e-01, 2.1565e-01, 4.4295e-01, - 5.1955e-01, 4.5938e-01, 8.5177e-01, 9.2589e-01, - 6.1009e-01, 7.1321e-01, 3.9839e-01, 6.6454e-01, - 4.9760e-01, 7.4274e-01, 6.1422e-01, 9.7794e-01, - 2.5439e-01, 7.2161e-01, 8.4072e-01, 9.1789e-01, - 2.0604e-01, 4.9255e-01, 3.0544e-01, 7.3972e-01, - 5.1823e-01, 2.7708e-02, 1.0035e-01, 3.6838e-01, - 6.9918e-01, 9.7863e-01, 4.6654e-01, 7.0863e-01, - 5.2875e-01, 1.3497e-01, 5.8760e-01, 1.5144e-01, - 8.5865e-01, 5.7095e-01, 4.1667e-01, 3.1601e-02, - 2.1484e-01, 8.1898e-02, 8.0481e-02, 3.2171e-01, - 4.1080e-01, 3.7800e-01, 2.5945e-01, 7.1999e-01, - 8.7656e-02, 6.2152e-01, 7.8688e-01, 5.0297e-01, - 7.9838e-01, 1.5162e-01, 6.3852e-01, 8.1854e-02, - 5.8775e-01, 2.1661e-01, 1.9911e-01, 6.6725e-01, - 6.6917e-01, 6.4379e-01, 5.7853e-01, 5.7889e-01, - 3.4456e-01, 3.7407e-01, 8.9088e-01, 9.6166e-01, - 3.6272e-01, 3.1812e-01, 5.3217e-02, 6.0500e-01, - 8.7168e-01, 9.1038e-01, 4.5582e-01, 6.3918e-02, - 6.9142e-01, 3.2649e-01, 9.8192e-01, 6.0464e-01, - 9.6815e-01, 1.0503e-01, 7.1600e-01, 4.3663e-01, - 8.7706e-01, 4.5971e-02, 6.6596e-01, 9.3079e-01, - 1.5823e-01, 3.2003e-01, 1.2870e-01, 6.3500e-01, - 5.3860e-01, 4.0744e-01, 2.8252e-01, 9.0788e-01, - 9.4387e-01, 3.8651e-01, 4.5116e-01, 7.6362e-01, - 9.0434e-01, 3.7603e-01, 8.1415e-01, 3.1082e-01, - 2.3815e-01, 3.2440e-01, 2.3116e-01, 3.3219e-01, - 4.3396e-01, 2.5441e-01, 3.3591e-01, 3.7212e-01, - 9.9808e-01, 7.3589e-01, 5.1922e-01, 3.1994e-01, - 5.7040e-01, 9.8825e-01, 6.5887e-01, 9.4511e-01, - 1.4550e-01, 6.0308e-01, 6.0994e-01, 3.0875e-01, - 9.5170e-01, 7.2979e-01, 1.0265e-03, 4.4914e-01, - 4.0148e-01, 5.7954e-01, 3.6786e-01, 1.5804e-01, - 7.6112e-01, 2.7800e-01, 3.6618e-01, 3.8852e-01, - 6.2086e-01, 7.5561e-01, 3.3979e-01, 9.2805e-01, - 2.8089e-01, 2.6715e-01, 4.1424e-03, 5.2598e-01, - 6.1266e-01, 2.0902e-01, 5.0368e-01, 8.0383e-01, - 7.4025e-01, 4.4304e-01, 7.4164e-01, 1.8111e-01, - 9.0030e-01, 2.8654e-01, 5.1264e-02, 6.6938e-01, - 4.7722e-03, 4.2012e-01, 6.0558e-01, 6.9264e-01, - 1.6392e-01, 8.8908e-01, 5.7035e-01, 1.4618e-01, - 6.2172e-01, 4.4803e-01, 4.2737e-01, 2.5387e-01, - 7.3742e-01, 4.9420e-01, 5.4518e-01, 4.1535e-01, - 9.8074e-01, 2.9175e-01, 7.5267e-01, 5.0346e-01, - 3.7893e-01, 3.1575e-01, 1.8698e-01, 4.7304e-01, - 9.0176e-02, 6.7209e-01, 3.8501e-01, 3.2009e-01, - 6.0189e-02, 6.3261e-01, 5.9643e-01, 5.9368e-01, - 8.3045e-01, 7.8057e-01, 7.7127e-02, 6.9980e-01, - 7.4648e-01, 4.1912e-01, 6.6123e-01, 8.0898e-01, - 9.4798e-01, 6.1651e-01, 7.6225e-01, 4.9304e-02, - 3.8411e-01, 5.1162e-01, 4.2531e-01, 4.4692e-01, - 9.0702e-01, 9.2927e-01, 7.5881e-01, 6.8857e-01, - 7.8800e-01, 5.1040e-01, 1.1781e-02, 2.1146e-01, - 2.3732e-01, 8.1244e-01, 4.9067e-01, 8.1642e-01, - 1.0624e-01, 6.6891e-01, 8.1822e-01, 5.6763e-01, - 1.5228e-01, 7.8509e-02, 9.4350e-02, 9.0172e-01, - 3.1038e-01, 4.9468e-01, 6.6537e-01, 5.0303e-01, - 2.8804e-01, 8.0134e-01, 3.1829e-02, 8.0496e-01, - 4.3146e-01, 2.1680e-01, 8.5241e-01, 4.0167e-01, - 6.2134e-01, 5.0197e-01, 6.2379e-01, 4.6456e-01, - 8.5710e-01, 3.8621e-01, 9.3479e-02, 8.4115e-01, - 1.3886e-01, 4.5770e-01, 4.5754e-01, 8.0833e-01, - 4.9763e-01, 2.5510e-01, 9.6840e-01, 1.9707e-01, - 4.0702e-01, 7.7955e-01, 5.7241e-01, 2.2345e-01, - 4.6512e-02, 8.3993e-01, 5.3641e-01, 6.2885e-01, - 3.4946e-01, 3.5017e-01, 4.0024e-01, 1.4394e-01, - 5.9394e-01, 6.1531e-01, 3.5865e-01, 2.2163e-03, - 4.5091e-01, 8.5622e-01, 9.8299e-01, 2.6351e-01, - 1.5344e-01, 8.8509e-01, 9.4732e-01, 2.3528e-01, - 4.9871e-04, 9.2326e-01, 6.8197e-01, 7.7192e-01, - 3.7885e-01, 1.9122e-01, 4.2807e-01, 4.4857e-01, - 1.8060e-01, 5.1258e-01, 7.8550e-01, 1.0826e-02, - 4.0016e-01, 5.1045e-01, 9.8409e-03, 9.4114e-01, - 7.7132e-01, 5.9286e-01, 4.3541e-01, 7.4579e-01, - 9.4221e-02, 2.5992e-01, 8.5052e-01, 6.8925e-01, - 3.1627e-01, 2.0341e-01, 2.6395e-01, 9.5170e-01, - 2.6815e-02, 5.5151e-01, 9.4061e-01, 7.8800e-01, - 7.9643e-01, 1.9611e-01, 3.9293e-01, 8.1546e-01, - 6.0137e-01, 7.1575e-01, 6.3150e-01, 4.2849e-01, - 7.8772e-01, 3.2756e-01, 8.6487e-01, 2.6993e-01, - 8.7612e-01, 3.3995e-01, 1.0473e-01, 9.2012e-01, - 3.5880e-01, 3.0548e-01, 7.9299e-02, 7.5157e-01, - 4.4155e-02, 4.3668e-01, 6.7371e-01, 3.6075e-01, - 8.1871e-01, 4.1564e-01, 7.8281e-01, 7.8903e-01, - 1.2638e-01, 8.9728e-02, 9.2093e-01, 6.7936e-01, - 2.5980e-01, 1.6090e-01, 1.8550e-01, 1.4940e-01, - 7.0870e-01, 9.5589e-02, 5.0303e-01, 6.9889e-01, - 1.5938e-01, 2.2200e-01, 6.2784e-01, 1.5446e-01, - 1.2124e-02, 8.7384e-01, 6.0268e-02, 8.1065e-02, - 9.6171e-01, 5.4637e-01, 6.6976e-01, 6.1643e-01, - 1.7711e-01, 6.3124e-01, 7.1647e-01, 1.8549e-01, - 7.9711e-01, 8.6392e-01, 6.8736e-01, 7.6796e-01, - 3.6441e-01, 7.4881e-01, 7.4269e-01, 3.0689e-01, - 7.3449e-01, 5.6269e-01, 8.7710e-01, 8.8517e-01, - 6.5795e-01, 4.5779e-01, 9.4681e-01, 5.7892e-01, - 5.7829e-01, 7.6546e-02, 8.1582e-01, 9.6362e-01, - 9.3306e-01, 5.3107e-01, 9.4367e-01, 9.9397e-01, - 2.1635e-02, 3.3178e-01, 6.0266e-01, 3.8334e-01, - 9.5467e-01, 6.0713e-01, 9.6563e-01, 1.5662e-01, - 4.2960e-02, 4.6865e-01, 4.7849e-01, 1.1095e-01, - 4.4027e-01, 7.9249e-01, 1.3336e-01, 1.8189e-01, - 6.7993e-01, 1.6154e-01, 3.7795e-01, 9.8899e-01, - 6.0252e-01, 8.8063e-01, 5.0927e-01, 8.2521e-01, - 6.4850e-01, 9.2767e-01, 1.6998e-01, 5.2612e-01, - 5.6923e-01, 1.4796e-01, 8.0528e-01, 4.2194e-01, - 6.5700e-01, 6.3868e-01, 1.8030e-01, 5.5921e-01, - 2.3978e-01, 2.7126e-01, 1.5009e-01, 4.5821e-01, - 5.9677e-01, 6.0861e-01, 2.3955e-01, 7.1788e-01, - 3.6544e-01, 3.7107e-01, 7.6629e-01, 1.4653e-01, - 9.6017e-01, 2.9226e-01, 9.6527e-01, 5.7781e-01, - 3.1803e-01, 3.4564e-01, 8.5270e-01, 8.0335e-01, - 7.2978e-01, 7.4313e-01, 2.9239e-01, 9.3231e-02, - 3.7290e-01, 9.6242e-01, 9.3227e-01, 5.2356e-02, - 4.5231e-01, 7.0243e-01, 2.0260e-01, 6.0563e-01, - 6.3537e-01, 7.0126e-03, 2.0549e-01, 6.4348e-01, - 1.1234e-01, 9.1736e-01, 9.5159e-01, 2.5712e-01, - 3.6692e-02, 2.6733e-01, 6.4886e-01, 7.0188e-01, - 7.3541e-01, 7.5349e-02, 1.9926e-01, 6.8626e-01, - 9.8980e-01, 5.8998e-01, 9.5392e-01, 2.7051e-02, - 6.8310e-01, 1.5713e-01, 5.7251e-01, 9.5125e-02, - 6.3484e-01, 8.0309e-01, 3.5265e-01, 3.2764e-01, - 4.8770e-01, 9.2443e-01, 3.0410e-01, 5.5153e-02, - 4.4606e-01, 1.5785e-01, 6.6155e-01, 1.6335e-01, - 1.6467e-01, 8.6871e-02, 9.3798e-01, 5.7792e-01, - 5.6320e-01, 7.8288e-01, 4.2018e-01, 9.8989e-01, - 1.5890e-02, 5.4217e-01, 7.9690e-01, 6.7578e-01, - 1.7863e-01, 2.4870e-01, 2.4165e-01, 9.3261e-01, - 4.3334e-01, 4.5406e-01, 1.2943e-01, 2.4438e-01, - 6.1877e-01, 1.5403e-01, 2.3696e-01, 6.0019e-01, - 7.6954e-02, 4.8879e-01, 3.9922e-01, 6.1722e-02, - 9.8407e-01, 1.3447e-01, 3.0239e-01, 9.1036e-01, - 9.3287e-01, 4.3368e-01, 2.4395e-01, 4.6772e-01, - 1.3810e-01, 4.4311e-01, 1.8397e-01, 1.7834e-01, - 5.5151e-01, 6.6086e-01, 8.9171e-01, 6.7263e-01, - 8.5315e-01, 7.7935e-01, 9.3442e-01, 4.3695e-01, - 1.5108e-01, 3.6121e-01, 3.5574e-01, 7.4972e-01, - 8.1697e-01, 8.9608e-01, 4.1783e-01, 5.6045e-02, - 2.1298e-01, 7.5669e-01, 2.7640e-01, 8.8664e-01, - 7.8179e-01, 2.7053e-02, 1.3976e-01, 6.5301e-01, - 2.0818e-01, 9.5708e-02, 7.3530e-01, 5.6532e-01, - 1.9201e-01, 7.5872e-01, 8.5304e-01, 7.6696e-01, - 2.3298e-01, 7.3064e-02, 4.5750e-01, 3.0491e-01, - 8.4562e-01, 5.0911e-01, 8.4650e-01, 1.7022e-01, - 1.6887e-01, 2.5773e-01, 4.1362e-01, 2.3742e-01, - 9.0183e-01, 9.8016e-01, 4.6076e-02, 3.1594e-01, - 6.9052e-01, 9.5460e-01, 9.5283e-01, 7.5063e-01, - 2.9345e-01, 3.4141e-01, 4.1893e-01, 5.2709e-01, - 7.8712e-01, 7.9375e-01, 9.2839e-01, 8.4541e-01, - 6.4293e-01, 9.0380e-01, 6.3515e-01, 8.8752e-01, - 4.0009e-03, 5.8117e-02, 3.0656e-02, 8.5350e-01, - 7.0642e-01, 1.4772e-02, 7.6152e-01, 9.4227e-01, - 1.6103e-01, 5.6090e-01, 8.9968e-02, 4.7046e-01, - 5.8490e-01, 8.4874e-01, 9.9450e-01, 7.0178e-01, - 8.8232e-01, 9.9210e-01, 3.7964e-01, 5.7953e-01, - 7.0927e-01, 5.9254e-01, 3.0037e-01, 1.9808e-01, - 1.3903e-01, 6.7066e-01, 4.3748e-01, 7.7025e-01, - 8.2460e-01, 5.1989e-01, 6.9893e-01, 3.1880e-02, - 6.5270e-01, 7.5305e-01, 2.6387e-01, 2.3470e-01, - 7.7775e-01, 5.4475e-02, 8.4215e-01, 7.9833e-01, - 6.8635e-01, 5.8313e-01, 6.2967e-01, 9.3181e-02, - 2.5176e-01, 2.8259e-01, 8.5278e-01, 2.0479e-01, - 3.2888e-01, 4.0002e-01, 4.2985e-01, 1.6745e-02, - 6.4703e-01, 3.8685e-01, 2.7396e-01, 4.0083e-02, - 6.9795e-01, 3.2896e-01, 9.4557e-01, 6.2325e-01, - 6.2040e-01, 7.2851e-01, 1.4586e-01, 2.2619e-01, - 8.3001e-01, 6.9165e-01, 9.3075e-01, 8.4699e-02, - 9.1120e-01, 6.1131e-01, 6.7315e-01, 6.2863e-01, - 6.1502e-02, 6.6647e-01, 3.9426e-01, 3.4242e-01, - 2.4155e-01, 7.0349e-01, 6.4289e-01, 2.0874e-01, - 3.7193e-03, 1.9972e-01, 6.8698e-01, 1.8595e-01, - 5.9335e-01, 5.0666e-01, 6.8596e-01, 7.5661e-01, - 4.4919e-01, 9.2522e-01, 3.4907e-01, 4.9716e-01, - 5.3208e-01, 1.8718e-01, 8.8527e-01, 3.9827e-01, - 3.4328e-01, 2.1804e-01, 3.2701e-01, 7.1900e-01, - 8.8112e-01, 5.6475e-01, 2.6203e-01, 5.6843e-01, - 3.6244e-01, 7.4864e-01, 4.2898e-02, 1.0206e-01, - 5.8229e-01, 3.6364e-01, 9.0089e-01, 6.3906e-01, - 9.4625e-01, 1.5743e-01, 7.0745e-01, 9.7285e-01, - 3.2107e-01, 4.6844e-01, 5.0980e-01, 3.0922e-01, - 6.4165e-01, 5.8791e-01, 5.8697e-01, 5.0368e-01, - 1.3440e-01, 7.0304e-01, 9.5832e-01, 4.9678e-01, - 7.5464e-01, 5.7994e-01, 2.8987e-01, 9.1487e-03, - 6.1330e-01, 3.2294e-01, 3.1984e-01, 5.8267e-01, - 6.6203e-01, 7.6829e-01, 9.8125e-01, 6.4370e-01, - 6.1405e-01, 2.6304e-01, 8.6038e-01, 6.6028e-01, - 1.6081e-02, 9.8894e-01, 5.8987e-01, 3.5565e-01]), + col_indices=tensor([6415, 928, 8153, 8508, 1462, 7229, 749, 4739, 4477, + 2451, 209, 7360, 5162, 5212, 4780, 7780, 5428, 9366, + 8119, 3530, 5509, 5963, 7114, 7299, 838, 8061, 9426, + 4099, 8098, 651, 127, 1266, 6472, 8793, 271, 5158, + 1770, 6415, 9315, 8844, 3920, 3610, 4550, 4265, 5357, + 570, 7168, 9317, 1051, 1147, 3100, 8667, 4920, 5992, + 5054, 351, 1450, 5953, 842, 8017, 313, 707, 2262, + 4812, 5022, 9984, 2170, 6285, 6627, 7700, 3251, 7644, + 8798, 5250, 6342, 7030, 4612, 6883, 7032, 5699, 5639, + 2422, 4270, 8141, 4113, 647, 104, 6114, 6835, 3508, + 4397, 7231, 8465, 3401, 5341, 366, 7648, 6294, 8939, + 2397, 4088, 4439, 294, 9815, 189, 5124, 7855, 4710, + 4682, 1122, 6467, 1512, 3649, 791, 8013, 2598, 8185, + 8582, 9781, 5896, 7540, 678, 5940, 1152, 3073, 5423, + 6158, 280, 9019, 1773, 4886, 6208, 5839, 2896, 4823, + 6982, 2616, 2511, 211, 7152, 7832, 8239, 1741, 2496, + 742, 5813, 5557, 6121, 186, 9274, 3626, 2123, 8, + 7539, 8300, 6354, 3975, 5600, 8321, 8120, 3057, 8937, + 4796, 7180, 2549, 6947, 334, 3899, 7184, 2628, 7129, + 3248, 853, 9997, 6186, 6232, 6149, 7650, 599, 3796, + 2222, 9471, 3975, 8927, 3219, 2619, 6184, 5134, 8655, + 5147, 5564, 9576, 6419, 8589, 5599, 6344, 8612, 1185, + 879, 1815, 813, 3867, 1700, 604, 8121, 5711, 3570, + 2958, 2540, 8557, 9213, 6837, 8005, 466, 7194, 9583, + 3377, 5933, 5764, 8034, 4417, 7726, 3271, 8144, 9226, + 542, 5668, 1860, 5628, 7932, 623, 7018, 8898, 6240, + 9101, 2968, 7282, 3541, 3208, 4809, 5108, 2052, 3492, + 819, 5737, 694, 2278, 1788, 6332, 6748, 4461, 308, + 4392, 4164, 9903, 3459, 1950, 7963, 2376, 9584, 153, + 7781, 205, 8081, 3440, 1703, 5646, 403, 5717, 9706, + 9895, 5752, 6175, 195, 1463, 6055, 6033, 4384, 5356, + 2098, 7072, 1348, 3718, 774, 2306, 9966, 5619, 4970, + 5505, 187, 2008, 178, 7301, 4767, 7853, 6225, 9832, + 2859, 5476, 2308, 8884, 4776, 2784, 6321, 5832, 3813, + 8357, 7944, 4791, 4419, 193, 9124, 2777, 4835, 8180, + 716, 7353, 9609, 9600, 1834, 1180, 642, 3808, 8540, + 3207, 7194, 5547, 9840, 7465, 2291, 1660, 7373, 1837, + 5035, 4182, 4184, 5632, 9899, 7657, 1833, 5296, 504, + 533, 9246, 9692, 3803, 3103, 6133, 3095, 2116, 2787, + 5065, 9224, 8535, 3152, 2498, 4870, 9875, 6446, 8643, + 520, 6605, 6465, 8878, 476, 5800, 8684, 7411, 9905, + 1689, 330, 5932, 3570, 9425, 8816, 7974, 7447, 159, + 7271, 7459, 804, 9451, 9424, 1385, 2521, 5288, 9275, + 1189, 2347, 6310, 1496, 968, 1468, 7641, 135, 6585, + 2715, 138, 8166, 1607, 6008, 7278, 4577, 7098, 1923, + 9424, 7093, 2125, 4000, 8521, 5006, 9438, 8229, 3454, + 7763, 2340, 2137, 1650, 8691, 4198, 3468, 1758, 9118, + 8241, 723, 3765, 1607, 3163, 6399, 9127, 3400, 3807, + 3641, 4318, 1758, 7567, 4746, 8127, 8233, 8947, 2926, + 6118, 5683, 7777, 4174, 2162, 4374, 443, 8240, 1734, + 8285, 9581, 5437, 7296, 3506, 7771, 6077, 4545, 7774, + 701, 9299, 1684, 7223, 8045, 460, 8857, 9555, 8680, + 2593, 3697, 6346, 2238, 1657, 3750, 5994, 2090, 6528, + 1319, 1084, 1140, 3, 9247, 280, 4079, 2119, 732, + 5059, 5518, 4898, 4141, 455, 2150, 2453, 3999, 9077, + 4873, 2710, 7487, 8670, 9023, 5546, 3557, 6159, 7633, + 3843, 4759, 4989, 8902, 9874, 770, 3985, 2005, 575, + 3757, 2344, 1207, 4405, 7377, 2986, 7913, 8647, 3999, + 2102, 4748, 3233, 8334, 3523, 8679, 4191, 1758, 5343, + 7265, 200, 2583, 5836, 6834, 8466, 7612, 8386, 1851, + 7252, 476, 4668, 5641, 6509, 1988, 2858, 2455, 1728, + 7084, 3892, 223, 1119, 3884, 9326, 5238, 127, 966, + 9055, 5318, 2816, 7199, 622, 6726, 2674, 8065, 2139, + 4199, 5260, 9491, 8574, 2878, 4517, 4611, 2225, 7698, + 6530, 758, 3681, 1200, 3723, 7611, 9756, 9801, 8943, + 47, 1763, 9636, 9912, 8664, 4151, 7713, 9463, 4795, + 9093, 3956, 7478, 7402, 2657, 9669, 8249, 9187, 439, + 481, 6412, 3165, 9683, 1934, 5165, 7897, 4310, 8766, + 8131, 2759, 3193, 9505, 1644, 2152, 7912, 7117, 9594, + 3678, 2800, 2209, 9530, 9408, 7689, 1185, 8075, 3478, + 3439, 8114, 3444, 2728, 1327, 6548, 7621, 4394, 7306, + 7063, 2821, 3129, 5422, 341, 4238, 5011, 3242, 5793, + 868, 8075, 1933, 1438, 5994, 8854, 3526, 4021, 2127, + 7639, 3027, 908, 5652, 4283, 2341, 6796, 4552, 2197, + 7973, 219, 7978, 1828, 2464, 4301, 4280, 4813, 3123, + 6725, 5347, 5006, 8182, 6307, 2435, 1641, 2108, 1773, + 7105, 2172, 7260, 642, 220, 1538, 9973, 9833, 9577, + 6379, 4201, 3630, 8575, 5285, 839, 8878, 7366, 4225, + 3472, 5600, 7708, 6516, 163, 3618, 3009, 2742, 5454, + 8920, 4309, 9634, 2994, 8934, 3323, 13, 9727, 3682, + 7965, 8418, 8096, 5964, 2986, 7521, 1093, 6986, 4896, + 4446, 3469, 1627, 8115, 9427, 4299, 6046, 2368, 5300, + 3383, 9084, 5457, 8764, 7176, 1922, 4624, 9329, 3233, + 4479, 4755, 8641, 9705, 6861, 5950, 2460, 404, 8018, + 2485, 2946, 4874, 4636, 3428, 842, 3921, 7735, 3952, + 9065, 2443, 2101, 1663, 3950, 9094, 154, 7822, 8172, + 4379, 392, 9699, 8735, 8117, 7529, 7932, 2856, 6417, + 2627, 6118, 6931, 1645, 9458, 7011, 7745, 6146, 5196, + 6867, 5314, 2101, 2353, 4987, 5412, 3344, 9166, 3196, + 2401, 8605, 5030, 385, 1120, 5834, 9930, 877, 6677, + 8178, 7918, 6932, 8184, 5983, 9424, 4229, 5589, 5941, + 446, 3031, 7419, 6029, 3818, 5887, 6280, 7547, 3226, + 6206, 2793, 133, 1722, 3021, 8747, 4880, 6401, 2757, + 5234, 3214, 4807, 6170, 9084, 6223, 4996, 311, 6775, + 9662, 8133, 1851, 2587, 5840, 5854, 4238, 1199, 8020, + 8396, 7287, 9939, 6927, 539, 5567, 7536, 5009, 9993, + 1978, 6171, 5547, 100, 7404, 728, 6276, 7796, 9188, + 901, 8704, 2139, 7316, 4003, 1817, 4165, 6830, 813, + 5939, 4121, 2236, 4761, 4358, 6832, 5233, 8955, 4909, + 3764, 4400, 6022, 3664, 9638, 2286, 5864, 509, 7044, + 9881, 6515, 1294, 6259, 8794, 4806, 3807, 7300, 1397, + 7766, 2665, 6943, 5207, 7490, 2582, 1425, 5493, 6466, + 597, 7381, 7750, 6611, 4214, 8771, 5482, 1896, 5445, + 3953, 9504, 5289, 191, 7335, 1934, 6089, 1261, 9956, + 8607, 9415, 6160, 389, 157, 7051, 6001, 2006, 2152, + 2184, 1876, 1281, 6937, 5392, 8482, 452, 2563, 5303, + 2718, 3521, 4930, 4278, 1408, 4909, 1878, 4305, 9870, + 5719, 7145, 6122, 6764, 2722, 2683, 7970, 3946, 220, + 8774, 538, 5809, 9481, 6738, 6725, 9875, 7949, 3088, + 3880, 179, 7227, 5758, 5822, 6473, 9665, 3342, 9419, + 2270, 6022, 9317, 5823, 9654, 4523, 164, 2926, 8223, + 8057]), + values=tensor([4.8590e-01, 6.5665e-01, 9.5344e-02, 6.6304e-01, + 7.7228e-01, 7.6334e-01, 3.7894e-01, 3.5514e-01, + 1.6004e-01, 6.1673e-01, 3.8523e-01, 5.3872e-01, + 7.0944e-01, 4.4357e-01, 1.0704e-01, 9.4222e-01, + 7.7743e-01, 5.2593e-01, 1.1059e-01, 6.8057e-02, + 4.8641e-02, 3.2022e-01, 9.7788e-01, 6.2168e-01, + 9.0117e-01, 5.1752e-01, 8.7096e-01, 6.0685e-01, + 8.8329e-01, 2.3026e-01, 5.6753e-01, 9.2746e-01, + 3.5864e-01, 4.4602e-01, 1.1219e-01, 3.2988e-01, + 3.5991e-01, 6.8050e-01, 9.4314e-01, 5.0254e-01, + 6.6655e-01, 9.5697e-01, 5.7315e-01, 4.3741e-01, + 6.4333e-01, 4.8375e-01, 1.1974e-01, 3.5120e-01, + 3.7750e-01, 9.2368e-01, 8.2557e-01, 5.2412e-01, + 9.8851e-01, 3.7928e-01, 2.1040e-01, 5.4896e-01, + 4.9582e-01, 5.9509e-01, 6.1636e-01, 3.8323e-01, + 6.3070e-01, 9.7636e-01, 9.7297e-01, 2.4869e-01, + 9.9860e-01, 7.6734e-01, 8.2743e-02, 9.1773e-01, + 7.6699e-01, 4.7811e-01, 5.3248e-01, 4.2996e-01, + 5.2303e-01, 3.3377e-02, 9.4868e-01, 9.3634e-01, + 9.9180e-01, 4.3477e-01, 1.6663e-03, 5.9127e-02, + 1.3206e-01, 3.3711e-01, 8.7887e-01, 9.1463e-01, + 9.0189e-01, 3.2399e-01, 9.8427e-01, 7.9605e-01, + 8.7016e-01, 2.6479e-01, 3.3046e-01, 8.5608e-01, + 5.0330e-01, 3.5859e-01, 3.7095e-01, 7.9947e-01, + 2.6424e-01, 3.3618e-01, 4.4976e-01, 8.1172e-01, + 4.8894e-01, 4.6422e-01, 9.5039e-01, 9.6798e-01, + 2.4127e-01, 7.6211e-01, 7.2662e-01, 4.1168e-01, + 8.3000e-01, 4.8703e-01, 7.5198e-01, 4.7890e-01, + 1.5512e-01, 7.9225e-01, 7.9685e-01, 4.0731e-01, + 7.4448e-01, 9.9225e-01, 6.7984e-01, 4.0042e-01, + 7.3762e-01, 9.2067e-01, 5.4934e-01, 1.0152e-01, + 4.9403e-01, 2.5246e-01, 5.5601e-01, 9.4113e-01, + 4.1592e-01, 7.7156e-01, 1.8773e-01, 7.2005e-01, + 4.4225e-01, 9.9509e-01, 7.2526e-01, 3.5499e-01, + 7.2122e-01, 2.5306e-01, 3.0650e-01, 4.3409e-01, + 9.9816e-02, 1.9639e-01, 7.3558e-01, 2.7641e-01, + 3.1877e-01, 6.3029e-01, 4.9436e-02, 6.1666e-02, + 1.8330e-01, 9.5107e-01, 5.5439e-01, 6.6792e-01, + 8.2772e-01, 9.6656e-01, 7.8306e-01, 5.9062e-01, + 3.0826e-01, 5.0366e-01, 5.5761e-01, 7.9138e-01, + 2.2155e-01, 7.3410e-02, 4.8333e-02, 9.9263e-01, + 7.4308e-01, 4.5779e-01, 1.3558e-01, 8.5205e-01, + 5.2809e-01, 8.2809e-01, 1.6841e-01, 7.6734e-01, + 3.7537e-01, 5.0999e-02, 3.3261e-01, 5.7544e-01, + 1.6073e-01, 5.1166e-01, 8.5912e-01, 5.0852e-02, + 4.5223e-01, 5.5238e-01, 3.9554e-01, 9.8357e-01, + 3.0320e-01, 8.7744e-01, 1.4758e-01, 1.4655e-01, + 2.7316e-02, 9.5691e-01, 8.9691e-01, 8.6758e-01, + 5.7732e-01, 4.4704e-02, 3.4562e-01, 4.2914e-01, + 4.5108e-01, 4.4187e-01, 2.1185e-01, 9.2910e-01, + 4.6358e-01, 2.3072e-01, 3.7005e-01, 1.0026e-01, + 6.3892e-01, 9.6763e-01, 5.1773e-01, 6.6996e-01, + 2.4993e-01, 3.3250e-02, 6.3957e-02, 9.9907e-01, + 2.8018e-01, 6.7336e-01, 8.3360e-01, 4.4062e-01, + 1.2094e-01, 8.5862e-01, 2.2777e-01, 4.7701e-01, + 1.6576e-01, 4.1261e-01, 2.5234e-01, 7.5435e-01, + 1.7376e-01, 2.5681e-03, 1.8495e-01, 1.0000e+00, + 9.1607e-01, 5.7275e-01, 4.7565e-01, 9.3616e-01, + 6.6086e-01, 8.0503e-01, 7.3343e-01, 8.6196e-03, + 6.5366e-01, 7.8956e-01, 7.3516e-01, 9.6460e-01, + 4.0672e-01, 5.2738e-01, 2.5533e-01, 4.3053e-01, + 2.3248e-01, 7.6982e-01, 1.9515e-01, 4.8751e-01, + 4.8569e-01, 4.4632e-02, 6.9747e-01, 6.4328e-01, + 6.9961e-01, 9.5667e-01, 3.0583e-01, 9.0765e-01, + 2.3249e-02, 4.0540e-01, 7.4445e-01, 2.1055e-03, + 2.4658e-01, 1.3805e-01, 2.0760e-01, 1.1964e-03, + 9.9521e-01, 4.0956e-01, 6.7347e-01, 2.0308e-01, + 4.3028e-01, 1.1904e-01, 8.9652e-01, 3.4449e-01, + 4.7361e-01, 3.5879e-01, 8.1937e-01, 6.3050e-02, + 1.6508e-01, 1.7439e-01, 7.8374e-01, 3.9950e-01, + 1.7227e-01, 8.7328e-01, 7.8658e-01, 4.9498e-01, + 3.3876e-02, 8.3172e-01, 9.1283e-01, 8.7917e-01, + 2.0169e-01, 6.5658e-02, 7.7190e-01, 6.6651e-01, + 7.9569e-01, 2.1801e-01, 7.7614e-01, 5.0401e-01, + 5.7857e-01, 2.8377e-01, 9.9870e-01, 6.0524e-01, + 7.4395e-01, 8.9735e-01, 8.8323e-01, 4.1359e-01, + 4.4244e-01, 4.9893e-01, 1.5681e-01, 5.2141e-01, + 2.4167e-01, 8.8374e-01, 5.1436e-01, 1.8407e-01, + 6.6165e-01, 1.7410e-01, 5.4492e-01, 5.6033e-01, + 6.9790e-01, 6.9770e-01, 6.8980e-01, 9.2096e-01, + 1.9386e-01, 5.1630e-01, 8.9049e-01, 3.9166e-01, + 6.6505e-01, 3.9973e-01, 1.9501e-01, 9.8197e-01, + 2.8066e-01, 3.8555e-01, 3.7152e-01, 7.5428e-01, + 3.9550e-01, 9.2786e-01, 7.2301e-02, 1.0858e-01, + 9.1461e-01, 6.1283e-01, 4.0075e-01, 1.5337e-01, + 3.0898e-01, 7.1566e-01, 3.1539e-01, 1.4693e-01, + 5.4193e-01, 8.0422e-01, 9.2951e-01, 3.5892e-01, + 5.5562e-01, 4.9334e-01, 9.4294e-01, 9.0476e-01, + 8.7139e-01, 6.3989e-01, 7.3484e-01, 1.7721e-01, + 7.1621e-02, 1.7601e-01, 9.0718e-01, 8.1852e-01, + 7.5315e-01, 3.7997e-02, 4.8028e-01, 7.0264e-01, + 6.9360e-01, 9.5192e-01, 3.6020e-03, 8.5375e-01, + 6.6345e-01, 4.0143e-01, 8.1646e-01, 6.6356e-01, + 8.1196e-01, 1.8913e-01, 3.6135e-01, 7.6445e-01, + 8.0113e-01, 3.8505e-01, 6.2087e-01, 4.0541e-01, + 9.0328e-01, 9.8960e-01, 3.9491e-01, 3.2861e-01, + 2.4077e-01, 8.5295e-02, 6.6063e-01, 7.5284e-01, + 8.6461e-01, 6.5027e-01, 2.6920e-01, 6.3180e-01, + 6.6640e-01, 4.2697e-01, 5.4910e-01, 7.7115e-01, + 9.1563e-01, 8.3503e-01, 4.3982e-02, 8.1717e-01, + 2.1455e-01, 9.0696e-01, 1.4532e-01, 2.5161e-01, + 8.5910e-01, 4.0841e-01, 1.0155e-01, 3.8966e-01, + 5.8026e-02, 2.4796e-01, 9.2143e-01, 9.6167e-01, + 2.3908e-01, 8.9428e-01, 5.9576e-01, 2.3688e-01, + 7.7864e-01, 2.9945e-01, 7.3154e-01, 2.9258e-01, + 9.3863e-01, 7.0769e-01, 1.1840e-01, 5.5584e-01, + 8.0917e-01, 7.3230e-01, 3.6595e-01, 2.3943e-01, + 2.5254e-01, 4.2600e-01, 8.0429e-01, 8.2102e-01, + 9.9357e-01, 7.0730e-01, 2.8622e-02, 7.6121e-01, + 9.1161e-01, 1.5261e-01, 8.1279e-01, 3.6005e-01, + 6.3957e-04, 6.8287e-01, 5.1550e-01, 3.4230e-01, + 4.7016e-01, 9.2087e-01, 7.2029e-02, 9.0169e-01, + 1.2168e-01, 5.4687e-01, 7.5098e-01, 7.1094e-01, + 1.2890e-01, 4.1783e-01, 8.7660e-01, 7.6871e-01, + 7.2846e-01, 8.9185e-01, 2.9842e-01, 7.1083e-02, + 2.1251e-01, 5.6444e-01, 8.8363e-01, 6.3027e-01, + 1.5886e-01, 7.5043e-02, 2.0065e-01, 4.5332e-02, + 8.5014e-01, 9.8359e-01, 1.9084e-01, 1.2278e-01, + 9.8313e-01, 5.6321e-01, 6.7610e-02, 6.7602e-01, + 3.5669e-01, 4.5722e-01, 7.2843e-01, 1.8102e-02, + 5.1385e-01, 3.9632e-01, 6.0896e-01, 8.7720e-01, + 4.9369e-01, 8.2367e-01, 2.7371e-02, 9.4003e-01, + 9.2922e-01, 5.5031e-01, 5.7247e-01, 5.0883e-01, + 5.6550e-01, 7.0519e-01, 6.5189e-01, 8.6799e-01, + 5.8797e-01, 4.2679e-01, 1.4672e-01, 2.9150e-01, + 6.7406e-01, 3.7419e-01, 3.4793e-01, 9.8250e-01, + 4.0231e-01, 3.7652e-01, 3.0567e-02, 6.0512e-01, + 5.5014e-01, 9.1482e-02, 5.5522e-01, 4.9000e-02, + 6.9051e-01, 8.7529e-01, 9.8904e-01, 4.2412e-01, + 6.0836e-01, 9.3958e-01, 5.7984e-01, 1.2874e-01, + 1.0658e-01, 2.0434e-01, 6.1495e-02, 6.8177e-02, + 2.9000e-01, 5.5890e-01, 3.6138e-01, 6.1168e-02, + 1.5298e-01, 4.3844e-01, 7.6368e-01, 7.0733e-01, + 6.7977e-02, 8.9536e-01, 3.5137e-01, 4.6033e-03, + 9.8618e-01, 4.0626e-01, 4.9003e-01, 2.0614e-01, + 6.7460e-01, 6.4832e-01, 6.7194e-01, 4.6664e-01, + 3.6714e-01, 5.7607e-02, 2.2149e-01, 8.4017e-01, + 8.5011e-01, 4.7437e-03, 5.9469e-02, 6.3446e-02, + 7.8122e-01, 1.9259e-01, 1.9575e-01, 6.2391e-01, + 7.8385e-01, 6.8522e-02, 1.9338e-01, 4.9934e-01, + 5.6845e-01, 2.6027e-01, 4.3072e-01, 3.2517e-01, + 5.6396e-01, 6.3102e-01, 3.4160e-01, 5.4922e-01, + 8.7075e-01, 7.7916e-01, 4.1675e-01, 4.9554e-01, + 5.8587e-01, 6.4762e-01, 7.8119e-01, 4.3919e-03, + 6.5845e-01, 1.3019e-01, 7.9377e-01, 5.9151e-01, + 7.6120e-01, 5.4426e-01, 4.3287e-01, 4.8359e-01, + 5.4465e-01, 4.4114e-01, 5.0528e-01, 9.1374e-01, + 7.8911e-02, 8.8833e-01, 8.6521e-01, 3.9218e-01, + 8.2893e-01, 3.8719e-01, 2.3135e-01, 6.4209e-01, + 9.9701e-02, 4.4302e-02, 1.9806e-01, 6.8147e-01, + 8.9793e-01, 3.7123e-01, 2.5056e-03, 3.3164e-01, + 4.3052e-01, 1.5819e-01, 3.4733e-01, 6.7513e-01, + 6.8054e-01, 5.7332e-01, 8.8727e-01, 4.3738e-01, + 1.3038e-01, 8.4389e-01, 1.3206e-01, 1.2855e-01, + 3.5118e-01, 3.4285e-01, 2.5724e-01, 9.4073e-01, + 8.8163e-01, 6.5458e-01, 3.9933e-01, 9.9961e-01, + 6.4215e-01, 6.6192e-01, 8.5375e-01, 7.6771e-01, + 7.7652e-02, 9.1099e-01, 1.4887e-01, 1.6307e-01, + 8.0566e-01, 7.9000e-01, 2.7610e-01, 7.2547e-01, + 4.1175e-01, 1.4861e-01, 3.5427e-01, 9.0601e-01, + 1.3963e-01, 7.6889e-01, 2.5794e-01, 3.5241e-01, + 1.6184e-02, 7.5011e-02, 7.3078e-02, 5.1412e-01, + 2.3442e-01, 1.5875e-01, 5.4081e-01, 5.7278e-01, + 9.4080e-01, 4.9640e-01, 1.2958e-01, 8.8391e-01, + 1.3533e-01, 1.4652e-01, 5.7397e-01, 8.2022e-02, + 5.7562e-01, 1.4406e-02, 7.3305e-01, 4.5024e-01, + 7.8591e-01, 7.0032e-01, 3.6518e-01, 2.8109e-01, + 3.9979e-01, 3.9276e-01, 1.1768e-01, 7.5371e-03, + 5.4561e-01, 8.7778e-01, 2.5035e-01, 5.8518e-01, + 3.9610e-01, 5.8396e-01, 1.8499e-01, 1.8055e-01, + 7.4208e-01, 4.6330e-02, 7.0902e-01, 4.7633e-01, + 9.7388e-01, 4.3459e-01, 4.5693e-01, 9.6058e-01, + 3.2471e-02, 8.8009e-01, 5.3947e-02, 9.2192e-01, + 4.2470e-01, 1.6426e-01, 5.1955e-01, 2.8258e-02, + 9.5450e-01, 5.4952e-02, 7.6706e-01, 9.9013e-01, + 9.3347e-01, 6.5279e-01, 8.3907e-03, 7.4736e-01, + 4.8191e-01, 2.1136e-01, 2.1377e-01, 1.3309e-01, + 6.5307e-01, 8.5574e-01, 5.0506e-01, 5.9193e-01, + 6.8677e-01, 8.7248e-01, 8.1080e-01, 8.9582e-01, + 6.8958e-01, 2.1142e-01, 4.7573e-03, 9.6826e-01, + 1.7756e-01, 6.4615e-01, 3.5576e-01, 9.5981e-02, + 5.3339e-01, 4.7324e-01, 8.5182e-01, 5.4994e-01, + 1.5602e-01, 7.6608e-01, 3.7498e-02, 8.7590e-01, + 7.6750e-01, 2.4040e-01, 2.8479e-01, 1.4211e-01, + 4.0393e-01, 9.2875e-01, 3.4455e-01, 6.4688e-01, + 6.1014e-01, 4.8376e-01, 5.0182e-01, 2.0583e-01, + 3.3219e-01, 1.3503e-02, 1.8121e-01, 6.7349e-01, + 8.2809e-01, 8.1199e-01, 3.4731e-01, 5.8011e-01, + 7.3567e-01, 5.8088e-02, 4.2337e-01, 3.1059e-01, + 3.8965e-01, 6.0153e-01, 7.2484e-01, 9.4294e-01, + 8.0136e-01, 7.4927e-01, 8.1955e-01, 5.6761e-01, + 5.7307e-01, 1.5681e-01, 1.8125e-01, 3.3417e-01, + 6.7939e-01, 1.5777e-01, 4.1142e-01, 8.4487e-01, + 8.5166e-01, 7.0297e-02, 3.8760e-01, 8.0969e-01, + 4.3013e-01, 8.0027e-02, 3.0601e-01, 4.5496e-01, + 2.7219e-01, 1.8382e-01, 1.5202e-01, 2.9277e-01, + 4.7900e-01, 2.9491e-03, 6.8582e-03, 9.3870e-01, + 6.3725e-01, 9.1911e-01, 8.0806e-01, 9.5255e-01, + 4.8689e-01, 2.9544e-01, 6.0318e-01, 7.4514e-01, + 9.0766e-01, 5.6968e-01, 8.2934e-01, 4.0808e-01, + 4.1912e-01, 2.8695e-01, 6.9635e-01, 6.9616e-01, + 1.0305e-01, 3.1863e-01, 6.8020e-01, 3.9003e-01, + 8.5212e-02, 9.6813e-01, 9.9275e-01, 6.9106e-01, + 2.8397e-01, 9.8639e-01, 9.0359e-01, 1.3666e-01, + 4.2407e-01, 1.6893e-01, 9.4976e-02, 1.5622e-01, + 6.0583e-02, 7.0498e-01, 3.0456e-01, 2.8378e-01, + 7.3705e-01, 7.2083e-01, 8.5651e-01, 1.7320e-01, + 4.1473e-01, 5.5244e-01, 3.3491e-01, 1.4843e-02, + 1.2679e-01, 1.0535e-01, 3.4498e-01, 1.5715e-01, + 9.2407e-01, 1.5016e-01, 4.4311e-01, 8.4977e-01, + 2.9208e-01, 8.7145e-01, 5.5186e-01, 3.9970e-01, + 7.5886e-01, 8.8531e-01, 9.4403e-01, 2.3981e-01, + 1.2418e-01, 9.5008e-01, 8.6218e-01, 6.4700e-01, + 4.8679e-01, 5.1759e-01, 7.9521e-01, 3.4292e-01, + 8.7986e-01, 9.5241e-01, 8.3451e-01, 3.1721e-01, + 4.7064e-01, 8.2071e-01, 2.3395e-01, 4.0132e-01, + 6.0260e-01, 3.5722e-01, 2.3449e-01, 1.4632e-01, + 4.4251e-01, 1.8677e-01, 7.8052e-01, 2.4572e-01, + 8.9974e-01, 8.9147e-01, 4.9607e-01, 6.3807e-01, + 9.2431e-01, 3.4173e-01, 6.7472e-01, 3.5796e-02, + 5.2657e-02, 1.6358e-01, 9.7416e-01, 4.1858e-01, + 5.4197e-01, 8.6592e-01, 5.3852e-02, 7.8010e-01, + 6.1395e-01, 5.2540e-01, 8.7942e-01, 1.3611e-02, + 4.5894e-01, 9.8083e-01, 2.9155e-01, 6.4002e-01, + 3.2940e-01, 3.1442e-01, 7.1375e-01, 4.7953e-01, + 2.0662e-02, 8.9532e-01, 7.6775e-01, 8.2081e-01, + 6.1977e-01, 3.2007e-01, 7.0391e-01, 4.5597e-01, + 4.7747e-01, 2.8398e-01, 8.1237e-02, 2.7245e-01, + 1.3464e-01, 2.3925e-01, 4.4407e-01, 7.0109e-01, + 4.8102e-01, 5.9986e-01, 1.3919e-01, 5.1251e-02, + 4.3007e-01, 9.2592e-01, 1.9823e-01, 1.3662e-02, + 8.3138e-01, 3.8364e-01, 6.3230e-02, 6.2167e-01, + 8.0504e-01, 9.8605e-01, 5.1738e-01, 9.6224e-01, + 4.4702e-01, 8.1295e-01, 7.6471e-01, 9.6230e-01, + 3.6418e-01, 9.4362e-01, 6.0845e-01, 1.2274e-01, + 7.8330e-01, 2.2238e-01, 2.2081e-01, 3.2742e-02, + 2.6091e-01, 3.2325e-02, 9.7243e-02, 1.4127e-01, + 9.4095e-01, 7.6524e-01, 4.9794e-02, 9.4281e-01, + 9.3715e-01, 9.8293e-01, 3.9644e-01, 1.0333e-01, + 5.8857e-01, 4.0045e-01, 4.9636e-01, 1.0200e-02, + 4.0024e-01, 3.6433e-01, 2.4720e-01, 5.1993e-01, + 3.2291e-01, 3.2678e-01, 2.5348e-01, 1.5660e-02, + 8.4176e-01, 4.0487e-02, 2.3948e-02, 3.8300e-01, + 9.0769e-03, 5.7714e-01, 2.3274e-01, 5.2108e-01, + 2.4439e-02, 7.4414e-01, 1.0144e-01, 1.0115e-01, + 6.6259e-01, 2.3982e-01, 1.6289e-01, 5.6218e-01, + 9.0632e-01, 7.1038e-01, 2.7735e-01, 5.4562e-01, + 5.2784e-01, 1.5750e-01, 5.8376e-01, 2.9590e-02, + 7.0418e-01, 8.0219e-01, 5.7950e-01, 7.2401e-03, + 4.4886e-01, 4.6777e-01, 7.9362e-01, 2.4707e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3198, 0.6605, 0.6944, ..., 0.6134, 0.5235, 0.6507]) +tensor([0.5683, 0.7872, 0.4946, ..., 0.0674, 0.9451, 0.0950]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,650 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.015525579452514648 seconds +Time: 0.01529836654663086 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 67630 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.8812994956970215} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), - col_indices=tensor([8647, 4654, 5499, 4449, 7451, 3553, 6737, 3727, 9375, - 328, 3652, 7464, 1792, 3337, 3219, 9816, 8774, 722, - 7286, 8975, 5239, 5207, 7520, 2047, 9786, 1775, 7824, - 1686, 6523, 1807, 7651, 3617, 6569, 7853, 4440, 4649, - 2754, 2264, 1878, 5699, 1779, 6715, 4641, 6933, 4479, - 8379, 2007, 678, 8640, 712, 2714, 3092, 8404, 958, - 6892, 5229, 232, 1407, 5081, 5811, 8597, 2045, 959, - 9609, 9725, 137, 6909, 8887, 5531, 9907, 8259, 3598, - 3106, 4242, 1459, 975, 9373, 279, 5289, 239, 4982, - 7449, 2338, 3106, 7326, 5651, 7345, 5951, 2276, 9406, - 1555, 187, 5936, 3172, 5886, 8072, 5078, 8086, 5802, - 9928, 7066, 2942, 5468, 8880, 3550, 6036, 7923, 2059, - 5727, 8966, 3271, 5191, 8019, 143, 8926, 3410, 1927, - 5129, 6995, 4214, 8413, 2923, 5122, 289, 6094, 9084, - 3943, 7811, 5472, 4461, 9629, 2075, 1933, 9084, 666, - 1579, 5138, 7074, 4251, 2116, 8273, 7982, 7533, 4948, - 9847, 5061, 4404, 5976, 6566, 4338, 4776, 2382, 9615, - 4466, 7669, 7291, 5642, 7421, 9834, 953, 7266, 4045, - 4744, 4897, 9364, 1730, 4368, 9844, 1957, 9401, 1180, - 2799, 2434, 2079, 3790, 944, 1503, 5578, 6431, 8745, - 9367, 9630, 9634, 3838, 2042, 1290, 752, 5314, 9275, - 1425, 3780, 3456, 7397, 6659, 2673, 3050, 213, 3865, - 108, 374, 8126, 9474, 7399, 1622, 8557, 3681, 4079, - 3800, 9932, 1087, 1720, 2470, 9941, 7514, 803, 6483, - 6709, 7368, 2720, 2999, 6038, 6911, 1766, 360, 1533, - 1807, 5357, 5158, 2293, 7675, 8708, 6003, 955, 3712, - 7798, 1523, 5030, 4107, 8054, 1811, 47, 8376, 4369, - 3092, 3036, 7834, 3657, 7158, 1452, 2555, 7073, 7909, - 7008, 2655, 9224, 6660, 9253, 1818, 4261, 3550, 579, - 5989, 8739, 5752, 2907, 2571, 1252, 8072, 119, 3969, - 1752, 2664, 8669, 8282, 3748, 1807, 2214, 2082, 1936, - 5224, 9356, 8771, 9920, 2445, 9815, 3980, 583, 9973, - 8012, 4204, 3618, 3650, 5388, 358, 1532, 1955, 4875, - 158, 6910, 686, 8612, 6716, 2830, 2503, 2259, 8773, - 9299, 2216, 6023, 9677, 1449, 3531, 9724, 643, 8636, - 3334, 5334, 3817, 8516, 9990, 2373, 5938, 4650, 9304, - 4529, 4507, 9739, 8866, 5608, 724, 4488, 9831, 257, - 2286, 7193, 1443, 6012, 8866, 327, 2031, 3505, 4806, - 6614, 384, 3462, 494, 3168, 3506, 6739, 5696, 7354, - 9211, 6058, 9379, 1835, 4352, 2144, 4085, 70, 1475, - 9132, 9185, 6667, 9380, 4485, 7941, 1673, 3774, 5096, - 1961, 1035, 737, 664, 885, 9541, 1359, 1408, 8312, - 5862, 2042, 7184, 2999, 9384, 5279, 8734, 1236, 2120, - 7519, 5331, 9850, 3362, 7044, 5123, 9555, 7957, 1966, - 4504, 9159, 7008, 2116, 6625, 9822, 5379, 5010, 96, - 6277, 7752, 9818, 4290, 1742, 8951, 1186, 3862, 2517, - 9435, 8552, 3364, 3956, 4806, 2389, 9824, 8423, 1737, - 3271, 5071, 836, 5580, 9804, 5970, 4791, 6708, 506, - 5852, 8007, 4051, 4343, 4073, 9299, 5039, 3444, 1200, - 3576, 7481, 5426, 7249, 286, 6046, 6879, 1392, 3912, - 5483, 3682, 6743, 235, 4404, 9434, 5444, 7481, 6774, - 1351, 3311, 6546, 8886, 4270, 2502, 694, 1728, 6058, - 5335, 9753, 5031, 6419, 2685, 4780, 4575, 2462, 1331, - 6584, 7477, 346, 1448, 1524, 8493, 3416, 5801, 5122, - 9403, 20, 3995, 7626, 7649, 4382, 6018, 4365, 415, - 1738, 1419, 3860, 5817, 1889, 5409, 6227, 459, 3324, - 462, 8734, 6319, 6608, 2415, 1202, 8065, 3675, 3079, - 7072, 1983, 4067, 8193, 238, 8863, 1444, 1643, 9833, - 9735, 3625, 4690, 2588, 2956, 8114, 2900, 2471, 2097, - 9641, 705, 9577, 9145, 5785, 4597, 41, 1057, 2855, - 1577, 3417, 2854, 6268, 5170, 6633, 6168, 6675, 610, - 79, 3397, 4486, 8438, 1953, 8993, 2, 9913, 3737, - 8115, 1773, 5791, 7003, 8850, 7715, 7068, 2545, 22, - 2997, 7784, 2193, 8115, 4276, 9097, 8603, 2402, 6620, - 4490, 3390, 8009, 9364, 1928, 6058, 1117, 8459, 7279, - 9802, 937, 4414, 269, 5996, 3388, 2213, 8303, 1471, - 6791, 5503, 786, 3105, 6092, 3993, 5432, 5514, 1482, - 284, 4826, 3661, 1299, 3045, 947, 2784, 6465, 53, - 3237, 3325, 8755, 4946, 5582, 9687, 4216, 8261, 8643, - 4128, 9567, 5015, 2626, 7520, 7301, 1665, 4611, 1178, - 4466, 8878, 105, 9091, 656, 38, 6417, 9503, 8593, - 85, 2340, 2943, 6566, 86, 5872, 5085, 7636, 6075, - 8279, 3927, 2818, 3066, 5835, 9800, 3701, 8166, 5053, - 2914, 5582, 6646, 4031, 152, 1816, 2513, 6513, 9514, - 7368, 2044, 9261, 8730, 2154, 9077, 900, 5045, 1209, - 7769, 5333, 2182, 9868, 7515, 6495, 8081, 9467, 485, - 4842, 7595, 346, 3896, 4589, 7304, 102, 9131, 1881, - 7363, 4667, 3124, 1494, 7640, 2085, 5969, 327, 8422, - 4445, 7075, 7495, 4090, 7290, 414, 7042, 2615, 5534, - 9631, 6285, 5895, 6803, 3363, 109, 1042, 5715, 699, - 6263, 3089, 8671, 1903, 1932, 1891, 6086, 2033, 4653, - 236, 8358, 3454, 2498, 1012, 1022, 7187, 8474, 6396, - 2842, 3209, 5149, 2014, 5465, 5988, 5452, 2136, 2496, - 5828, 8399, 834, 1934, 4582, 2435, 4518, 8149, 1215, - 2360, 572, 7934, 7805, 7177, 4755, 7951, 6277, 3279, - 3301, 4324, 7617, 3138, 4131, 8647, 4535, 537, 9550, - 7765, 7501, 7201, 79, 676, 5103, 21, 5905, 5104, - 8383, 1494, 9361, 748, 2928, 9945, 8214, 6916, 8639, - 9369, 2781, 3513, 6667, 4267, 1563, 9150, 329, 6830, - 2855, 9810, 1621, 8822, 1933, 9318, 1258, 8146, 8930, - 8826, 5989, 2187, 6638, 9013, 7520, 9266, 3394, 1048, - 8429, 6417, 8432, 9384, 9431, 9345, 7911, 313, 7347, - 7640, 9284, 302, 4482, 517, 9019, 2279, 7907, 7949, - 94, 3374, 6951, 3507, 2953, 7996, 9649, 4108, 9885, - 1601, 9214, 4791, 7898, 3000, 8842, 4620, 6241, 6616, - 2733, 147, 5293, 9372, 2648, 2261, 1230, 6095, 4677, - 2716, 6197, 8800, 7896, 4094, 6693, 1892, 1616, 5199, - 6890, 5117, 7860, 6533, 6255, 6453, 8306, 6190, 616, - 6010, 482, 4524, 4452, 5701, 2687, 6600, 6294, 7700, - 8616, 665, 690, 6448, 6296, 6304, 8118, 8907, 31, - 6487, 9750, 5850, 4854, 162, 383, 206, 7720, 7300, - 1965, 2943, 9050, 8357, 4610, 1564, 3450, 7501, 5942, - 805, 2704, 8681, 8214, 7783, 2722, 8026, 3060, 4714, - 1241, 6614, 1767, 9333, 6280, 2376, 4705, 5029, 507, - 2510, 3816, 3176, 4093, 4640, 1394, 4466, 1634, 5625, - 9616, 9856, 5608, 847, 5783, 8919, 4444, 2893, 8647, - 657, 2075, 373, 2227, 3120, 4500, 9463, 5337, 7565, - 9168, 5069, 8132, 5707, 2138, 1858, 3288, 6897, 4099, - 3082, 484, 3044, 2456, 7707, 5443, 5260, 805, 9070, - 2483, 8225, 9487, 1239, 1687, 3954, 6303, 8989, 5541, - 7977, 7424, 7522, 2231, 1796, 5265, 9895, 4183, 3338, - 1377]), - values=tensor([2.6711e-01, 1.5256e-01, 4.7895e-01, 9.7177e-01, - 3.3217e-01, 4.8662e-01, 9.6837e-02, 4.9364e-02, - 4.1631e-01, 7.9638e-01, 4.9818e-01, 1.0329e-01, - 8.5050e-01, 3.5207e-01, 8.8170e-01, 1.3600e-01, - 8.9499e-01, 8.9928e-01, 9.5856e-02, 2.0110e-01, - 6.7254e-01, 8.1814e-01, 1.4681e-01, 1.3722e-01, - 8.1086e-01, 1.9533e-01, 8.2412e-01, 8.9212e-01, - 9.7206e-01, 6.8201e-02, 3.7707e-01, 2.6646e-01, - 7.2796e-01, 8.8582e-01, 2.5316e-01, 6.6138e-01, - 7.9265e-01, 3.3510e-01, 6.5242e-02, 9.4971e-01, - 4.5732e-01, 4.8267e-02, 1.9899e-01, 8.6938e-01, - 2.0791e-01, 4.0034e-01, 7.8684e-01, 4.0125e-01, - 2.0749e-02, 7.8661e-01, 1.5651e-02, 2.8427e-01, - 1.1615e-01, 8.5568e-01, 1.5852e-01, 5.8611e-02, - 3.7139e-01, 9.1537e-01, 7.3291e-01, 6.1366e-02, - 9.4998e-01, 3.3764e-01, 5.4722e-01, 2.2605e-01, - 9.0416e-01, 2.5427e-01, 8.4136e-01, 5.3180e-01, - 9.3963e-01, 8.3464e-01, 7.9459e-02, 2.1388e-01, - 7.0909e-01, 1.5803e-01, 4.7915e-01, 5.6413e-01, - 7.7630e-01, 9.0240e-01, 1.3854e-01, 7.6111e-01, - 5.9490e-01, 7.5820e-01, 4.9648e-01, 3.5839e-01, - 9.7634e-01, 6.3633e-01, 6.7076e-01, 1.2479e-01, - 6.1116e-01, 2.5072e-01, 3.9865e-01, 3.1353e-01, - 8.0245e-01, 8.9919e-01, 7.6141e-01, 5.2578e-02, - 6.3377e-01, 3.9030e-01, 5.8898e-01, 9.7807e-01, - 6.7515e-02, 9.7033e-01, 5.3258e-01, 2.4756e-01, - 6.1442e-01, 5.7467e-01, 9.1736e-01, 3.8045e-01, - 6.4755e-01, 8.4549e-01, 3.4941e-02, 2.2137e-01, - 2.6165e-01, 1.5812e-01, 4.4836e-01, 4.0372e-01, - 6.8160e-01, 3.4621e-01, 3.1729e-01, 7.9594e-01, - 8.5949e-03, 4.1101e-01, 2.9323e-01, 3.0476e-01, - 2.1348e-01, 1.9134e-01, 9.2995e-01, 5.6460e-01, - 1.6420e-01, 4.6461e-01, 1.6407e-01, 9.0001e-01, - 2.1794e-01, 4.7626e-01, 9.9720e-01, 1.9139e-01, - 7.5819e-01, 6.4924e-01, 2.8439e-01, 8.1343e-01, - 9.9954e-01, 2.0023e-01, 1.9576e-01, 2.2422e-01, - 3.9389e-01, 4.0590e-01, 9.4587e-01, 9.5859e-01, - 9.1196e-01, 3.3142e-01, 6.5074e-02, 8.1017e-01, - 2.6550e-01, 5.0111e-01, 4.9418e-01, 3.1742e-01, - 4.8848e-01, 7.9520e-01, 2.2326e-01, 9.0457e-01, - 2.3438e-01, 3.5241e-01, 8.5370e-01, 2.5089e-01, - 9.9178e-01, 5.9049e-01, 1.4631e-01, 3.6536e-02, - 5.5173e-01, 4.9073e-01, 4.1231e-01, 2.7039e-01, - 3.3169e-01, 2.5824e-01, 9.4158e-03, 2.0982e-01, - 3.0498e-01, 8.5326e-01, 4.7060e-01, 7.8361e-01, - 1.3420e-01, 2.0663e-01, 7.7739e-01, 5.1495e-01, - 1.6129e-01, 6.9651e-01, 1.0713e-02, 6.5069e-01, - 5.6758e-01, 4.3940e-01, 8.3930e-01, 6.0393e-01, - 3.0148e-01, 9.9091e-02, 5.5146e-02, 3.0113e-01, - 7.6004e-01, 5.5121e-01, 4.1376e-01, 3.0646e-01, - 8.9187e-01, 3.8609e-01, 4.1549e-01, 2.1979e-01, - 5.1501e-01, 2.2452e-01, 1.4563e-01, 1.4372e-02, - 4.8200e-02, 5.2964e-01, 1.1575e-03, 3.5902e-01, - 7.5079e-01, 3.0657e-01, 9.7819e-01, 6.8395e-01, - 5.8067e-01, 8.4327e-01, 5.8382e-01, 7.1746e-01, - 2.3417e-03, 4.3092e-01, 9.2003e-01, 9.2056e-01, - 3.9954e-01, 7.7219e-01, 1.6823e-01, 6.3088e-01, - 9.6458e-01, 8.5932e-02, 6.4564e-02, 8.0078e-01, - 2.8911e-01, 6.4202e-01, 9.9214e-01, 3.9409e-01, - 4.4199e-01, 5.1642e-01, 7.1285e-01, 4.9413e-01, - 7.5052e-01, 2.8058e-01, 2.2623e-01, 2.1988e-01, - 6.1212e-01, 6.0671e-01, 8.2609e-01, 9.1290e-01, - 7.1068e-01, 9.9832e-01, 2.6371e-01, 6.6080e-01, - 5.4266e-01, 4.3958e-01, 1.5237e-01, 3.0533e-01, - 9.5470e-01, 7.7750e-01, 7.7943e-01, 5.6432e-01, - 8.5319e-01, 3.9678e-02, 8.6073e-01, 5.0306e-01, - 8.2644e-01, 8.2269e-01, 3.4132e-01, 9.1906e-01, - 7.9166e-01, 3.2085e-01, 4.6701e-01, 9.9773e-01, - 6.9791e-01, 3.6137e-01, 1.1040e-01, 1.7501e-01, - 9.8849e-01, 4.4263e-01, 1.9965e-01, 3.4535e-01, - 5.6003e-01, 3.6558e-01, 9.4761e-01, 8.2034e-01, - 2.4737e-01, 4.2745e-01, 1.7134e-01, 5.8162e-01, - 1.6103e-01, 6.0025e-01, 2.7172e-01, 7.9051e-01, - 9.2993e-01, 3.7228e-02, 4.5206e-01, 6.1660e-01, - 3.6437e-01, 4.8195e-01, 9.5990e-01, 6.9781e-01, - 7.3194e-01, 9.0573e-01, 9.1063e-01, 8.5010e-01, - 8.7019e-01, 3.7969e-01, 1.0579e-01, 3.2359e-01, - 2.3793e-01, 3.0734e-01, 4.3956e-01, 7.3710e-01, - 3.4507e-01, 4.9102e-02, 8.9148e-01, 6.3684e-01, - 3.3010e-01, 4.9130e-01, 1.4864e-01, 8.4955e-01, - 7.1091e-01, 1.8830e-01, 7.9391e-01, 3.4619e-01, - 2.0274e-01, 7.6287e-01, 9.2535e-01, 8.5751e-01, - 2.0318e-01, 3.7269e-01, 8.1414e-01, 6.3746e-01, - 2.4272e-02, 9.0444e-01, 2.7878e-01, 6.2372e-01, - 8.5718e-01, 5.8344e-01, 7.8253e-01, 8.7141e-02, - 1.8131e-01, 7.4047e-01, 3.6256e-01, 4.1791e-01, - 9.5543e-01, 5.4189e-01, 8.9479e-01, 9.2614e-01, - 3.3140e-01, 3.7395e-02, 1.7005e-02, 6.2850e-01, - 6.3872e-01, 7.6598e-02, 5.2965e-01, 5.2871e-02, - 3.4428e-01, 3.5255e-01, 3.5881e-01, 4.4021e-01, - 9.4895e-01, 5.3642e-02, 1.2858e-01, 2.7157e-01, - 4.5236e-01, 4.0391e-01, 4.7756e-01, 8.1354e-01, - 8.3760e-02, 5.6691e-01, 6.4345e-02, 7.4975e-01, - 4.5835e-02, 2.0528e-01, 2.5545e-01, 5.2956e-01, - 8.8487e-02, 3.1582e-03, 9.6494e-01, 6.3755e-01, - 9.1466e-01, 4.2722e-01, 3.3045e-01, 7.7722e-01, - 2.3250e-01, 9.4868e-01, 5.6161e-01, 3.1958e-01, - 8.5872e-01, 7.0911e-01, 3.9428e-01, 7.6624e-01, - 3.2459e-01, 4.3472e-01, 1.1225e-01, 5.3608e-01, - 3.8279e-01, 1.1010e-01, 5.9535e-01, 8.1914e-01, - 3.5874e-01, 2.9956e-01, 7.5475e-01, 4.1724e-01, - 6.6390e-01, 4.5093e-02, 1.4779e-01, 3.3404e-01, - 6.0949e-01, 3.4111e-01, 5.3452e-01, 4.6772e-01, - 6.5799e-01, 1.4563e-01, 8.9795e-02, 7.0677e-01, - 9.6720e-01, 3.5216e-01, 6.8202e-01, 6.4105e-01, - 5.2730e-02, 9.0455e-01, 5.0695e-01, 2.2618e-01, - 4.1787e-01, 4.9977e-01, 1.7190e-01, 8.1795e-01, - 2.5523e-01, 1.6846e-01, 3.0340e-01, 1.2116e-01, - 5.7507e-01, 3.2181e-02, 4.0972e-01, 5.3684e-01, - 7.7024e-02, 2.5776e-01, 3.8207e-02, 5.0350e-01, - 6.8774e-01, 3.7681e-01, 7.8401e-01, 3.5398e-01, - 1.7284e-01, 8.2055e-01, 8.0188e-01, 7.5652e-01, - 8.4424e-01, 7.5359e-02, 5.7660e-01, 6.7798e-01, - 5.1769e-01, 2.5610e-01, 6.8145e-01, 1.9421e-01, - 2.4913e-01, 3.1308e-01, 7.1412e-01, 5.3230e-01, - 3.8665e-01, 2.4735e-01, 3.3487e-01, 3.6920e-01, - 9.0259e-01, 4.9511e-01, 5.0551e-01, 4.5521e-01, - 2.8694e-01, 8.0323e-01, 7.3814e-01, 3.2435e-02, - 9.1875e-01, 1.7711e-01, 1.7888e-01, 4.0248e-01, - 7.6070e-01, 1.6275e-01, 2.8341e-01, 7.5435e-01, - 8.6267e-01, 4.2804e-01, 4.0729e-01, 4.6713e-01, - 6.8111e-01, 6.2087e-01, 5.1119e-01, 5.5762e-01, - 2.1222e-01, 3.0056e-01, 9.3891e-01, 7.6294e-01, - 9.2317e-01, 6.5980e-01, 6.9628e-01, 3.8871e-01, - 2.1686e-02, 8.4704e-01, 1.3727e-01, 2.5067e-01, - 3.1685e-01, 2.0661e-01, 4.0914e-01, 5.7463e-01, - 9.9025e-01, 9.5729e-01, 9.5966e-01, 7.1123e-01, - 4.3624e-01, 8.1709e-01, 6.3299e-01, 7.6594e-01, - 2.4838e-01, 3.6857e-01, 9.2991e-01, 1.9568e-01, - 8.4934e-01, 9.8161e-01, 4.7543e-01, 3.4680e-01, - 8.5980e-01, 5.5619e-01, 8.9898e-01, 1.1013e-01, - 5.1630e-01, 6.2091e-01, 8.6039e-01, 4.2882e-01, - 6.1202e-01, 8.7425e-01, 4.3832e-01, 5.2528e-01, - 9.2188e-01, 8.4098e-01, 6.1872e-01, 7.1332e-01, - 7.0086e-01, 9.5349e-01, 2.9772e-01, 1.4673e-01, - 4.6976e-01, 1.7743e-01, 7.4055e-01, 9.7266e-01, - 8.9465e-01, 8.7893e-01, 5.5357e-01, 9.5969e-01, - 9.4823e-01, 8.3593e-01, 7.5239e-01, 8.5419e-01, - 8.2583e-01, 6.3540e-02, 9.2498e-01, 9.5993e-01, - 7.7555e-01, 2.7650e-01, 4.8435e-01, 3.2763e-01, - 6.3364e-01, 3.2754e-01, 5.6219e-01, 2.8495e-01, - 5.3835e-01, 3.8651e-01, 1.3108e-02, 7.7623e-01, - 3.0955e-01, 6.0302e-01, 8.1158e-01, 2.7890e-01, - 2.2545e-01, 5.6962e-02, 4.3985e-01, 6.0311e-01, - 1.9924e-01, 1.1954e-01, 6.5392e-02, 7.3132e-01, - 4.1029e-01, 3.1499e-01, 4.5299e-01, 6.2903e-01, - 3.9752e-01, 3.2276e-01, 7.1679e-01, 2.7750e-01, - 1.6683e-01, 8.9769e-01, 2.6029e-02, 2.7055e-01, - 8.4552e-02, 3.8641e-01, 1.2100e-01, 5.4977e-01, - 5.6060e-01, 9.5285e-01, 7.9471e-01, 8.9880e-01, - 7.2027e-01, 6.6256e-01, 4.0963e-02, 7.9062e-01, - 1.1688e-01, 6.4308e-01, 7.9788e-01, 2.4992e-01, - 2.6054e-01, 2.4177e-01, 6.5576e-01, 5.0046e-02, - 2.6875e-01, 7.4788e-01, 9.9256e-01, 7.4616e-01, - 9.4601e-01, 8.6212e-01, 3.7690e-01, 6.9520e-01, - 5.7234e-01, 8.1622e-01, 5.1617e-01, 4.5916e-01, - 1.9811e-01, 8.1565e-01, 6.0003e-01, 8.9139e-01, - 5.3729e-01, 7.2110e-01, 8.6277e-01, 9.4231e-01, - 8.4618e-01, 4.9181e-01, 6.9875e-01, 2.7656e-01, - 2.0255e-01, 5.9269e-02, 1.4714e-01, 6.9136e-01, - 3.8930e-01, 5.4488e-01, 5.5210e-01, 9.9922e-01, - 2.7776e-01, 6.7954e-01, 2.6406e-01, 9.1547e-01, - 4.4960e-01, 3.0461e-01, 2.7204e-02, 3.8050e-01, - 7.6526e-01, 9.9671e-01, 9.6274e-02, 4.6527e-01, - 6.3199e-01, 1.3465e-01, 1.2424e-01, 4.2475e-01, - 1.5085e-02, 5.3115e-01, 2.5265e-01, 5.4656e-01, - 8.3119e-01, 7.9993e-01, 1.3612e-01, 6.2849e-01, - 9.6348e-01, 8.4367e-01, 1.0647e-01, 3.5132e-01, - 5.4878e-02, 3.0414e-01, 3.8242e-01, 3.4146e-01, - 5.8380e-01, 4.3874e-01, 6.8347e-01, 3.3214e-01, - 9.5222e-01, 2.4075e-01, 5.4615e-01, 5.0775e-01, - 6.2950e-01, 6.5791e-01, 5.3492e-01, 4.6581e-01, - 2.9513e-01, 5.5712e-01, 9.6987e-01, 9.2809e-01, - 6.6533e-01, 1.5652e-01, 3.0325e-01, 8.3782e-01, - 3.8865e-02, 6.0800e-01, 5.9107e-03, 7.1721e-01, - 1.8278e-01, 2.9456e-01, 7.1928e-01, 4.3675e-01, - 4.2686e-04, 3.2199e-01, 3.1531e-01, 9.5563e-01, - 5.9256e-01, 6.6577e-01, 9.4022e-01, 5.4210e-01, - 2.2494e-01, 8.8292e-01, 5.0262e-01, 1.7811e-01, - 7.3303e-01, 3.7569e-01, 1.6461e-01, 4.7748e-01, - 6.5068e-01, 7.6588e-01, 6.9365e-01, 1.6987e-01, - 9.3394e-01, 5.6170e-01, 7.6574e-01, 4.8042e-01, - 5.2666e-01, 4.4468e-02, 7.1181e-01, 5.8269e-01, - 5.8374e-01, 3.1109e-01, 4.0255e-01, 4.9434e-01, - 6.6836e-01, 2.9763e-01, 7.7282e-01, 7.7640e-01, - 5.5950e-01, 8.2989e-01, 3.9255e-01, 3.2097e-01, - 3.5498e-01, 1.4379e-01, 6.9156e-01, 6.0956e-01, - 5.7389e-01, 2.1995e-01, 5.8540e-02, 1.3269e-01, - 2.2999e-01, 4.0583e-01, 7.6687e-01, 3.5515e-02, - 3.8287e-01, 5.1448e-01, 8.5014e-01, 2.6067e-01, - 6.8776e-01, 4.4364e-01, 5.2084e-01, 5.8748e-01, - 7.7529e-01, 5.3467e-03, 3.3276e-01, 2.7419e-01, - 2.1121e-01, 5.0536e-01, 6.9922e-01, 3.7431e-02, - 8.7829e-01, 5.9974e-01, 7.9058e-03, 3.0586e-01, - 3.7018e-01, 7.6306e-01, 7.8815e-01, 6.7061e-01, - 9.4150e-01, 5.2596e-01, 9.8341e-01, 7.4131e-01, - 4.6374e-01, 3.6025e-02, 8.4405e-01, 5.0067e-01, - 1.3499e-01, 1.0402e-01, 2.6472e-02, 9.9025e-01, - 4.3637e-01, 2.0747e-01, 8.8258e-01, 7.6534e-01, - 2.0794e-01, 6.9392e-01, 3.5335e-01, 3.5247e-02, - 2.4280e-01, 3.4927e-01, 3.3197e-01, 6.3293e-01, - 5.4021e-01, 3.0541e-01, 1.3719e-01, 4.9790e-01, - 6.1273e-01, 5.6672e-01, 1.8746e-01, 9.1112e-01, - 5.0549e-01, 2.7840e-01, 8.5873e-01, 3.6234e-01, - 7.5441e-01, 8.7309e-01, 2.3727e-01, 5.8749e-01, - 8.4630e-02, 7.2921e-01, 4.7617e-01, 2.0277e-01, - 7.1199e-01, 6.3057e-01, 7.4494e-01, 5.9390e-01, - 6.3551e-01, 7.0786e-01, 7.5953e-01, 9.4655e-01, - 6.3295e-01, 2.9744e-01, 5.7620e-02, 8.4973e-01, - 2.3404e-01, 9.0329e-01, 9.1647e-01, 8.5317e-01, - 3.9654e-03, 6.7276e-01, 7.7647e-01, 2.8644e-03, - 1.2519e-01, 2.2041e-01, 9.9379e-01, 7.0161e-01, - 9.8575e-01, 1.5756e-01, 5.1589e-01, 6.4543e-01, - 9.7651e-01, 6.5372e-01, 8.7253e-01, 3.7374e-01, - 2.1858e-01, 2.3323e-01, 3.9934e-02, 5.3707e-01, - 7.2272e-01, 3.0457e-01, 8.7516e-01, 9.0816e-01, - 6.7837e-01, 4.3251e-01, 3.3498e-01, 9.0997e-01, - 6.9389e-02, 7.7021e-01, 8.7092e-01, 3.8024e-01, - 5.4805e-01, 1.9497e-01, 5.6864e-01, 5.6939e-01, - 1.8313e-02, 1.8784e-01, 3.8429e-01, 5.5461e-02, - 1.5808e-01, 4.2510e-01, 4.4210e-01, 2.2740e-01, - 6.7933e-01, 4.3438e-01, 9.9588e-01, 8.9235e-01, - 4.4846e-01, 9.2256e-01, 4.5922e-01, 6.8803e-01, - 1.9931e-01, 2.3433e-01, 1.4563e-01, 8.9319e-01, - 8.7190e-01, 7.9103e-01, 2.3878e-01, 3.1898e-01, - 8.8999e-01, 6.2143e-01, 1.3975e-01, 3.9679e-01, - 3.7014e-01, 4.5515e-01, 2.9703e-01, 9.1947e-01, - 1.2286e-01, 4.4267e-01, 7.6303e-01, 5.3320e-01, - 5.6167e-01, 9.1226e-02, 2.2957e-01, 3.7713e-01, - 8.1719e-01, 2.0355e-02, 4.1507e-01, 6.6296e-01, - 3.8091e-01, 6.7962e-01, 5.0347e-01, 4.7727e-01, - 7.4304e-01, 5.9352e-01, 6.8517e-01, 1.9044e-02, - 9.1414e-01, 1.4885e-01, 1.5007e-01, 3.7674e-02, - 6.0095e-01, 2.4963e-01, 1.8637e-01, 6.8931e-01, - 2.8481e-01, 6.1241e-01, 5.2515e-01, 9.8569e-01, - 8.9238e-01, 7.5556e-01, 4.9874e-01, 7.9759e-01, - 9.6169e-01, 7.9671e-01, 8.1893e-01, 4.5951e-01, - 7.5276e-01, 4.0520e-01, 4.0465e-02, 1.9412e-01, - 3.5283e-01, 6.9192e-01, 3.7641e-01, 4.3958e-02, - 8.6658e-01, 1.6543e-01, 8.0442e-01, 2.8446e-01, - 7.9453e-01, 5.9950e-01, 8.1424e-02, 7.0566e-01, - 8.2535e-01, 1.1568e-01, 2.6458e-01, 4.1053e-02, - 9.9795e-01, 1.0083e-01, 8.4276e-01, 9.7061e-01, - 6.5811e-01, 8.8023e-01, 9.4782e-01, 1.7553e-01, - 2.3410e-02, 7.5416e-01, 9.5642e-01, 5.3947e-01, - 1.7772e-01, 2.1459e-01, 8.0957e-01, 7.8863e-01, - 7.3928e-01, 5.6401e-01, 5.4479e-01, 1.4586e-01, - 5.2477e-01, 8.2925e-01, 7.6176e-01, 7.7261e-01, - 9.2809e-01, 7.2949e-01, 6.4136e-01, 4.5900e-01, - 9.7133e-01, 8.6138e-01, 7.7439e-01, 9.1759e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0493, 0.7033, 0.8450, ..., 0.3708, 0.9702, 0.0106]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 4.8812994956970215 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 145476 -ss 10000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.560847282409668} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68634 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.2000732421875} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5150, 3949, 6725, 9833, 6214, 5081, 1554, 3283, 4592, - 1367, 5501, 6306, 1908, 8847, 9684, 2365, 4516, 2030, - 6246, 1037, 3129, 6994, 6859, 6054, 2697, 3073, 4745, - 5149, 9683, 6168, 7171, 2986, 8282, 5962, 9044, 9859, - 4416, 2729, 3201, 9334, 70, 3704, 1992, 4539, 3854, - 9774, 4984, 5696, 4186, 7809, 2303, 2349, 6788, 8167, - 7467, 8573, 1950, 1383, 4872, 1688, 3467, 531, 2507, - 77, 4045, 9094, 8112, 2615, 9777, 5813, 1750, 6905, - 7986, 8272, 5559, 7416, 2650, 6301, 7023, 6146, 5728, - 154, 3305, 852, 1556, 1672, 8260, 3100, 4027, 2524, - 7136, 1881, 3609, 4077, 5415, 1342, 7463, 0, 7237, - 949, 4271, 7500, 4398, 202, 6370, 6523, 8149, 1606, - 644, 1179, 4251, 732, 4140, 9430, 7869, 4357, 3082, - 7706, 929, 1350, 2066, 3153, 7727, 3490, 4529, 2830, - 604, 9531, 3037, 674, 1118, 7439, 1800, 9605, 7647, - 7273, 8451, 1727, 5065, 8403, 4478, 8920, 7608, 949, - 4223, 6471, 1464, 4183, 9344, 7358, 8169, 5124, 7436, - 7379, 4302, 4579, 6954, 7352, 2475, 6524, 6023, 4419, - 4933, 4411, 5203, 2328, 5122, 1046, 4112, 415, 8434, - 5247, 5282, 7252, 5516, 6132, 9262, 1383, 4445, 3838, - 3480, 5499, 6857, 5666, 2223, 3948, 5622, 6586, 8728, - 1267, 1798, 4209, 2945, 7759, 2936, 2435, 508, 7969, - 658, 334, 4257, 5246, 6393, 357, 3505, 4514, 1464, - 8590, 9903, 4716, 9051, 4470, 9570, 4777, 3595, 8441, - 9110, 9670, 2187, 8548, 3616, 2595, 1352, 5502, 4886, - 8337, 7807, 4921, 643, 1759, 191, 6520, 5823, 2075, - 4740, 3014, 8077, 3584, 7499, 5306, 1223, 1385, 7517, - 8020, 7952, 2205, 3204, 4927, 5397, 2795, 9112, 9159, - 2689, 6163, 2719, 3134, 7968, 7593, 4653, 54, 597, - 3621, 6399, 2434, 6710, 967, 6700, 1213, 1026, 551, - 5577, 3471, 9061, 2300, 9239, 9461, 23, 3996, 596, - 2494, 5453, 3959, 6826, 5001, 7382, 416, 1281, 7317, - 7289, 1990, 4004, 9139, 5821, 8865, 9134, 9753, 7856, - 3678, 597, 9576, 3434, 7138, 8525, 4380, 4708, 1098, - 1864, 2641, 5053, 725, 7343, 1604, 5046, 9253, 1076, - 1733, 7452, 5764, 4569, 2060, 2335, 2788, 666, 560, - 4714, 8624, 8928, 6013, 1750, 3137, 5802, 2485, 8605, - 4692, 8176, 851, 670, 6436, 4116, 3254, 4703, 3344, - 9079, 2177, 3623, 7381, 508, 1878, 8203, 179, 3099, - 5174, 8679, 6987, 5356, 2553, 3507, 667, 8739, 4484, - 5254, 6753, 2060, 7479, 4766, 9854, 7766, 9804, 802, - 2515, 3329, 2210, 7753, 2069, 70, 3487, 1593, 4737, - 6304, 4273, 1474, 5728, 8160, 440, 2963, 5961, 9940, - 3963, 6482, 6596, 1104, 8908, 2324, 4677, 901, 5681, - 7684, 1987, 3941, 1028, 3945, 522, 155, 9582, 618, - 9191, 2285, 2363, 1005, 6492, 2446, 5028, 8066, 9102, - 7602, 8504, 3215, 2742, 946, 538, 5191, 4078, 3141, - 982, 1552, 5688, 9154, 3693, 2376, 5721, 4025, 5733, - 4065, 1715, 8170, 7847, 2860, 4001, 475, 7239, 5611, - 5117, 3773, 7630, 4037, 8971, 3180, 5691, 7586, 7012, - 1221, 6168, 6276, 2660, 6301, 4688, 6558, 7433, 4113, - 3119, 4374, 617, 7732, 5185, 9146, 7702, 4380, 1370, - 4149, 205, 3424, 954, 5456, 1934, 9488, 5247, 9946, - 2494, 4258, 7045, 5497, 588, 1288, 5965, 6814, 3539, - 1764, 4809, 3064, 9477, 5056, 3833, 5491, 8813, 623, - 9737, 8723, 3926, 2417, 6250, 1832, 4224, 3868, 2666, - 5475, 2503, 4400, 2951, 8789, 8901, 2576, 1586, 2358, - 8683, 701, 2218, 324, 2750, 4642, 6563, 7866, 2142, - 2049, 8029, 6801, 8630, 4530, 1913, 7492, 4784, 1756, - 2725, 811, 3186, 5499, 565, 5265, 4256, 9478, 1700, - 8194, 7843, 9531, 2519, 7331, 6421, 4938, 2047, 9115, - 5085, 6558, 2790, 5305, 7400, 6481, 8276, 6051, 376, - 2232, 3828, 5087, 901, 859, 311, 5986, 646, 2534, - 3646, 6330, 6624, 44, 9526, 1720, 7671, 786, 2103, - 7291, 6456, 6706, 9840, 151, 5594, 5539, 8478, 8848, - 9399, 6447, 7659, 8019, 3178, 3373, 8082, 4366, 3291, - 1095, 8101, 138, 8637, 5651, 2986, 4186, 1262, 8903, - 7692, 5148, 5226, 6772, 7717, 1398, 9840, 6283, 4653, - 916, 1943, 7857, 14, 2435, 9045, 6355, 6437, 5876, - 2079, 6222, 9106, 7167, 8674, 3568, 5387, 6404, 4327, - 9378, 7023, 3428, 7208, 3991, 3503, 1112, 5994, 7543, - 8077, 3450, 7732, 95, 4124, 4136, 4748, 216, 5127, - 2129, 134, 2135, 8151, 9959, 5515, 3277, 4587, 1429, - 2979, 932, 7056, 783, 3334, 7562, 460, 5737, 5306, - 1839, 8429, 174, 7235, 4614, 34, 4009, 7389, 701, - 5758, 8329, 9563, 8743, 4338, 3477, 1180, 1540, 5860, - 9475, 5545, 4339, 9525, 5428, 2826, 7521, 4441, 8311, - 1115, 7387, 6593, 2389, 3900, 8596, 9443, 4909, 2179, - 2149, 5400, 6357, 6980, 7081, 5466, 6799, 5303, 6495, - 572, 6717, 3685, 9544, 9986, 5977, 7724, 1134, 4967, - 2145, 3442, 1984, 9728, 2275, 9320, 1257, 102, 4987, - 8007, 1513, 959, 4257, 799, 4709, 204, 6285, 7273, - 8278, 2651, 8788, 6857, 3198, 3750, 7764, 9530, 2525, - 5355, 2828, 6803, 3742, 9808, 2702, 3428, 5248, 7386, - 9259, 3003, 8487, 8936, 9681, 4024, 4691, 9077, 8624, - 670, 3644, 2024, 5179, 8050, 1290, 8269, 8524, 4786, - 2503, 35, 5675, 3549, 6350, 1714, 678, 5292, 9599, - 9848, 2823, 3918, 8633, 2596, 7468, 1783, 4984, 5190, - 2747, 7283, 6599, 5946, 9563, 8503, 9940, 9756, 3292, - 4953, 1009, 8896, 653, 1878, 8055, 8508, 2037, 2440, - 37, 3449, 5269, 3294, 7638, 3773, 5688, 5609, 7128, - 1885, 2653, 15, 148, 8228, 7481, 7965, 3146, 3889, - 6682, 8636, 1153, 8978, 9471, 4557, 8139, 4145, 2086, - 2623, 2951, 2381, 6771, 7405, 7405, 7866, 6651, 961, - 6157, 9117, 8248, 4454, 4081, 1801, 8411, 624, 6352, - 5499, 2637, 2893, 2441, 8243, 5608, 5218, 6178, 2460, - 7724, 4117, 2164, 8643, 7095, 2933, 8474, 6756, 7154, - 4556, 7730, 8140, 7610, 525, 1161, 9855, 6850, 3574, - 5534, 4716, 9318, 179, 5902, 7492, 3084, 3893, 1120, - 3911, 5634, 9144, 2088, 5803, 5300, 9679, 7556, 7644, - 1213, 9980, 8515, 3166, 262, 4231, 480, 4900, 7635, - 9607, 6653, 3153, 9282, 6882, 6866, 9053, 6078, 1605, - 2679, 9434, 1838, 5200, 8526, 145, 8529, 81, 7768, - 2792, 1368, 1146, 6845, 2082, 779, 3727, 3189, 5616, - 2883, 6077, 3380, 5032, 8816, 3861, 2679, 2096, 1826, - 4481, 3081, 2880, 9558, 48, 8852, 7596, 4096, 1993, - 3536, 4469, 1068, 8771, 110, 3938, 7946, 5524, 2146, - 2369, 7248, 6850, 2766, 3813, 4176, 2118, 2240, 2650, - 661, 3663, 7071, 4825, 3012, 5874, 7763, 2, 9014, - 386, 6880, 6261, 4740, 4948, 6569, 5412, 7485, 5084, - 2240, 7660, 5562, 8271, 9281, 7660, 7758, 8339, 2685, - 4281]), - values=tensor([0.7305, 0.2440, 0.3019, 0.7007, 0.9449, 0.8426, 0.6302, - 0.6840, 0.3760, 0.2330, 0.8640, 0.1425, 0.7949, 0.3165, - 0.9440, 0.0634, 0.1824, 0.8939, 0.0831, 0.8632, 0.5557, - 0.9222, 0.7152, 0.4959, 0.3642, 0.8742, 0.0533, 0.8773, - 0.3271, 0.1850, 0.8267, 0.0692, 0.8764, 0.6532, 0.0689, - 0.0059, 0.9796, 0.6870, 0.9154, 0.4784, 0.5447, 0.7909, - 0.5242, 0.2989, 0.3478, 0.3160, 0.9938, 0.4624, 0.7891, - 0.5624, 0.6799, 0.1505, 0.3167, 0.1174, 0.4734, 0.9357, - 0.4414, 0.2681, 0.3600, 0.9851, 0.0984, 0.9817, 0.6071, - 0.2437, 0.6382, 0.9904, 0.2276, 0.5007, 0.7924, 0.8288, - 0.4341, 0.5700, 0.0922, 0.1698, 0.6893, 0.1819, 0.6179, - 0.0688, 0.2326, 0.7694, 0.2736, 0.3807, 0.6497, 0.0934, - 0.2985, 0.3575, 0.2669, 0.2111, 0.0895, 0.1728, 0.5018, - 0.9851, 0.2093, 0.7627, 0.8317, 0.2321, 0.2733, 0.9575, - 0.1373, 0.4112, 0.4346, 0.2690, 0.0832, 0.9308, 0.0856, - 0.4248, 0.1271, 0.2415, 0.1779, 0.9677, 0.0600, 0.9191, - 0.1112, 0.1565, 0.1580, 0.1619, 0.6745, 0.6379, 0.9366, - 0.4143, 0.1271, 0.0864, 0.7266, 0.3906, 0.9727, 0.7339, - 0.0407, 0.0059, 0.6237, 0.7622, 0.9907, 0.2749, 0.4392, - 0.2002, 0.6558, 0.9415, 0.5261, 0.2561, 0.7660, 0.3448, - 0.1122, 0.7076, 0.6037, 0.4897, 0.4670, 0.8339, 0.5757, - 0.4177, 0.7623, 0.5865, 0.8495, 0.6497, 0.9778, 0.6297, - 0.3389, 0.2747, 0.0702, 0.1398, 0.1005, 0.4201, 0.1831, - 0.2403, 0.3332, 0.4306, 0.5321, 0.7778, 0.0204, 0.4136, - 0.1536, 0.7541, 0.0838, 0.5081, 0.7545, 0.0833, 0.4845, - 0.3873, 0.2490, 0.4675, 0.2018, 0.9437, 0.0574, 0.7574, - 0.4217, 0.4481, 0.8897, 0.5683, 0.3881, 0.1927, 0.8680, - 0.3664, 0.8069, 0.2192, 0.4607, 0.7321, 0.3086, 0.6446, - 0.3613, 0.6584, 0.1132, 0.8934, 0.0915, 0.5183, 0.8634, - 0.8577, 0.6923, 0.8618, 0.6853, 0.8256, 0.8384, 0.2295, - 0.3689, 0.5760, 0.2086, 0.3564, 0.8483, 0.6783, 0.7902, - 0.6082, 0.2549, 0.6415, 0.4994, 0.2078, 0.5032, 0.3790, - 0.4926, 0.0907, 0.6911, 0.2975, 0.8561, 0.2283, 0.4530, - 0.2236, 0.5933, 0.7581, 0.3982, 0.1913, 0.0682, 0.2359, - 0.4488, 0.3220, 0.4875, 0.7597, 0.7930, 0.2062, 0.4708, - 0.1870, 0.0373, 0.0367, 0.8306, 0.1947, 0.2315, 0.0207, - 0.8163, 0.7626, 0.9301, 0.8099, 0.3633, 0.5641, 0.8120, - 0.6141, 0.5594, 0.4658, 0.2150, 0.1654, 0.3051, 0.0343, - 0.3400, 0.5978, 0.5354, 0.0263, 0.3151, 0.8046, 0.5327, - 0.3066, 0.2487, 0.3111, 0.0893, 0.1612, 0.5907, 0.3107, - 0.8584, 0.8229, 0.4034, 0.2252, 0.3659, 0.2053, 0.7999, - 0.7701, 0.7502, 0.7518, 0.5690, 0.1322, 0.1586, 0.1160, - 0.9796, 0.3007, 0.1296, 0.7414, 0.1813, 0.4708, 0.0361, - 0.8196, 0.1266, 0.8025, 0.2789, 0.7870, 0.4347, 0.9531, - 0.2674, 0.9698, 0.4764, 0.0519, 0.6509, 0.3234, 0.4769, - 0.3125, 0.1997, 0.6900, 0.7719, 0.1567, 0.8227, 0.8361, - 0.3658, 0.9962, 0.8049, 0.2839, 0.0239, 0.5230, 0.1679, - 0.0012, 0.1941, 0.1825, 0.6328, 0.8021, 0.7872, 0.5540, - 0.0997, 0.9139, 0.1493, 0.1197, 0.6714, 0.2685, 0.5289, - 0.4741, 0.4375, 0.4510, 0.6505, 0.0060, 0.0804, 0.3052, - 0.4065, 0.1432, 0.8715, 0.0973, 0.2004, 0.0400, 0.2303, - 0.6891, 0.4089, 0.8692, 0.1440, 0.3744, 0.8894, 0.4839, - 0.5594, 0.2863, 0.2636, 0.1806, 0.4900, 0.9233, 0.0287, - 0.1519, 0.0238, 0.9718, 0.0696, 0.4915, 0.4842, 0.1594, - 0.7936, 0.6860, 0.5880, 0.4352, 0.0092, 0.6365, 0.1016, - 0.1299, 0.9522, 0.9906, 0.4238, 0.2435, 0.5165, 0.1731, - 0.6409, 0.2362, 0.6101, 0.1717, 0.7603, 0.4938, 0.2723, - 0.7599, 0.9112, 0.0268, 0.9393, 0.8969, 0.3631, 0.2998, - 0.3500, 0.7422, 0.9345, 0.9975, 0.0517, 0.1544, 0.6201, - 0.5972, 0.3434, 0.9546, 0.0842, 0.2284, 0.4080, 0.3233, - 0.9813, 0.3166, 0.9934, 0.5541, 0.2131, 0.8230, 0.1841, - 0.3426, 0.7598, 0.6218, 0.4928, 0.7872, 0.4823, 0.5321, - 0.2430, 0.9717, 0.6158, 0.5835, 0.2198, 0.3750, 0.8817, - 0.8977, 0.6208, 0.4204, 0.5681, 0.7034, 0.0297, 0.1272, - 0.9109, 0.2850, 0.6202, 0.3491, 0.4077, 0.0265, 0.9469, - 0.5186, 0.0530, 0.5136, 0.4687, 0.4898, 0.2296, 0.6555, - 0.6814, 0.2474, 0.5224, 0.6744, 0.7214, 0.5305, 0.5181, - 0.3443, 0.7930, 0.3254, 0.8409, 0.0232, 0.8876, 0.4606, - 0.8758, 0.4816, 0.3285, 0.3194, 0.0473, 0.5240, 0.4558, - 0.2643, 0.3823, 0.3375, 0.5496, 0.8842, 0.2643, 0.8295, - 0.0774, 0.7973, 0.2342, 0.2846, 0.4365, 0.5561, 0.8911, - 0.9974, 0.3818, 0.8914, 0.6534, 0.3445, 0.0570, 0.5409, - 0.8934, 0.8748, 0.8762, 0.1700, 0.1286, 0.5886, 0.2604, - 0.5122, 0.3910, 0.6718, 0.8386, 0.7880, 0.7735, 0.8382, - 0.7683, 0.8908, 0.5226, 0.8048, 0.4884, 0.3660, 0.1809, - 0.2335, 0.6855, 0.8170, 0.5295, 0.7842, 0.7798, 0.2910, - 0.3853, 0.8829, 0.7835, 0.1690, 0.4663, 0.2045, 0.1035, - 0.4997, 0.5297, 0.1289, 0.5179, 0.0606, 0.8583, 0.3048, - 0.7708, 0.1893, 0.6309, 0.7686, 0.9828, 0.9048, 0.3841, - 0.2185, 0.6435, 0.4478, 0.9468, 0.8737, 0.2111, 0.1349, - 0.9495, 0.4091, 0.3367, 0.4818, 0.6928, 0.9558, 0.4398, - 0.5439, 0.7708, 0.4337, 0.4611, 0.2583, 0.1955, 0.4439, - 0.4445, 0.7809, 0.8863, 0.3069, 0.4805, 0.8381, 0.8548, - 0.5700, 0.1066, 0.1066, 0.8937, 0.5723, 0.6920, 0.0327, - 0.2123, 0.4301, 0.6855, 0.9118, 0.1103, 0.8642, 0.3839, - 0.6499, 0.7405, 0.8262, 0.1273, 0.8596, 0.1214, 0.5538, - 0.2683, 0.7685, 0.0380, 0.9668, 0.3679, 0.4231, 0.3206, - 0.2939, 0.2000, 0.2628, 0.6644, 0.0046, 0.4634, 0.6009, - 0.9618, 0.0916, 0.1533, 0.8964, 0.0750, 0.6530, 0.7420, - 0.0171, 0.1412, 0.9378, 0.1278, 0.5952, 0.5957, 0.1602, - 0.4569, 0.6997, 0.3923, 0.9702, 0.9204, 0.6140, 0.5178, - 0.9051, 0.2480, 0.1154, 0.8068, 0.0751, 0.2951, 0.2063, - 0.5678, 0.8248, 0.3371, 0.6619, 0.2068, 0.7570, 0.0168, - 0.6561, 0.1487, 0.6018, 0.3884, 0.9519, 0.9836, 0.1373, - 0.8836, 0.8831, 0.2552, 0.0736, 0.2226, 0.3008, 0.4150, - 0.1279, 0.9067, 0.1991, 0.2146, 0.2746, 0.7514, 0.1652, - 0.6727, 0.7846, 0.1471, 0.6132, 0.2493, 0.3325, 0.1885, - 0.6417, 0.6590, 0.4757, 0.4061, 0.4359, 0.7236, 0.4441, - 0.8643, 0.8479, 0.1003, 0.0185, 0.7836, 0.3159, 0.9798, - 0.5805, 0.8284, 0.5046, 0.3086, 0.3844, 0.2360, 0.1980, - 0.4207, 0.9796, 0.1878, 0.5333, 0.3240, 0.4450, 0.8072, - 0.9168, 0.9993, 0.1170, 0.7410, 0.7954, 0.0307, 0.0989, - 0.1791, 0.2914, 0.1820, 0.6230, 0.6850, 0.2813, 0.1157, - 0.9867, 0.0737, 0.8029, 0.1015, 0.4682, 0.7620, 0.7696, - 0.1064, 0.5765, 0.9709, 0.1174, 0.5832, 0.0978, 0.3568, - 0.0730, 0.0563, 0.7057, 0.7628, 0.0959, 0.4747, 0.3801, - 0.4508, 0.6394, 0.2715, 0.6105, 0.9926, 0.0896, 0.9574, - 0.6793, 0.6180, 0.4648, 0.2921, 0.3386, 0.6820, 0.5212, - 0.2154, 0.4717, 0.5014, 0.2617, 0.2977, 0.8006, 0.3353, - 0.8470, 0.7611, 0.1607, 0.8669, 0.7715, 0.5232, 0.4841, - 0.9115, 0.0228, 0.4693, 0.3305, 0.8215, 0.0869, 0.1824, - 0.6579, 0.0833, 0.9613, 0.4715, 0.3285, 0.5471, 0.6905, - 0.3957, 0.1840, 0.6202, 0.6851, 0.4733, 0.5934, 0.1386, - 0.7851, 0.7342, 0.6370, 0.2348, 0.9459, 0.2391, 0.7316, - 0.0941, 0.4717, 0.1253, 0.8566, 0.1216, 0.7571, 0.4196, - 0.8179, 0.8132, 0.2885, 0.9707, 0.1275, 0.3342, 0.3134, - 0.7312, 0.8352, 0.1365, 0.9373, 0.3091, 0.3749, 0.0663, - 0.0702, 0.0409, 0.7695, 0.6795, 0.1004, 0.2734, 0.4594, - 0.7910, 0.2158, 0.2145, 0.3182, 0.4406, 0.5618, 0.1494, - 0.2911, 0.5915, 0.9878, 0.1365, 0.7549, 0.8342, 0.9059, - 0.4562, 0.0558, 0.1508, 0.1673, 0.9872, 0.7096, 0.7520, - 0.1691, 0.3662, 0.9410, 0.2625, 0.1475, 0.8645, 0.3039, - 0.2029, 0.3516, 0.6215, 0.5740, 0.4584, 0.2225, 0.4006, - 0.3128, 0.0380, 0.6050, 0.2938, 0.0878, 0.3289, 0.8456, - 0.2525, 0.9209, 0.0593, 0.6809, 0.3115, 0.4275, 0.2499, - 0.9295, 0.5425, 0.4391, 0.3831, 0.4593, 0.7149, 0.1921, - 0.1184, 0.2786, 0.9624, 0.1167, 0.0044, 0.4051, 0.0108, - 0.4520, 0.6056, 0.9254, 0.0610, 0.3916, 0.2002, 0.0181, - 0.2959, 0.8354, 0.8965, 0.7722, 0.6322, 0.5425, 0.0976, - 0.8386, 0.2720, 0.5773, 0.9188, 0.0410, 0.3565, 0.1415, - 0.0175, 0.2161, 0.8758, 0.7699, 0.2833, 0.0538, 0.3260, - 0.5205, 0.1162, 0.9185, 0.1645, 0.4861, 0.6898, 0.0238, - 0.7657, 0.0799, 0.4505, 0.6300, 0.6548, 0.1225, 0.0206, - 0.5211, 0.1829, 0.9455, 0.5775, 0.0847, 0.2663, 0.6607, - 0.9023, 0.5472, 0.7809, 0.3315, 0.1532, 0.1912, 0.3343, - 0.7726, 0.2157, 0.2423, 0.7378, 0.9800, 0.4469, 0.4539, - 0.9687, 0.0064, 0.0441, 0.0697, 0.5833, 0.6814, 0.6849, - 0.0435, 0.1250, 0.2613, 0.0854, 0.3080, 0.5157, 0.4405, - 0.5866, 0.4456, 0.1962, 0.6798, 0.4460, 0.0218, 0.8899, - 0.8373, 0.1209, 0.8163, 0.3718, 0.6930, 0.1628, 0.4197, - 0.2782, 0.5692, 0.9005, 0.5938, 0.2539, 0.8654, 0.7168, - 0.9464, 0.8460, 0.4902, 0.5805, 0.0640, 0.5710, 0.7328, - 0.9874, 0.0901, 0.6221, 0.7762, 0.9765, 0.8525]), + col_indices=tensor([2643, 9156, 5239, 6705, 3391, 839, 9521, 9391, 2223, + 6628, 8403, 8917, 6732, 1536, 5664, 469, 9121, 8963, + 5239, 3251, 4615, 8397, 2614, 5830, 9713, 1088, 8108, + 4794, 9712, 8260, 3831, 8749, 6304, 3729, 5562, 7546, + 2581, 4191, 7958, 3187, 3754, 8464, 565, 8724, 2228, + 3973, 5213, 5759, 5811, 2760, 8509, 1973, 3080, 7249, + 4892, 5460, 6915, 3690, 4242, 5068, 7834, 2232, 8743, + 544, 9106, 8630, 6015, 8391, 7980, 7256, 2585, 1908, + 1433, 4507, 283, 5230, 7019, 8937, 3267, 8367, 236, + 6614, 5880, 5971, 6353, 207, 8362, 7090, 3803, 5077, + 8722, 2224, 5135, 4535, 2258, 4477, 7947, 5250, 3236, + 9325, 1955, 6579, 4773, 4019, 7273, 8155, 2892, 1319, + 4400, 6040, 8754, 4161, 5904, 8899, 2414, 1491, 1685, + 8971, 9308, 668, 3441, 9561, 8930, 9228, 1619, 5628, + 2781, 3944, 3148, 4475, 2262, 558, 1416, 1780, 79, + 2616, 7370, 2573, 136, 792, 7036, 7486, 844, 6456, + 9989, 6636, 314, 5078, 4011, 7015, 1787, 9026, 5458, + 6674, 3849, 2394, 8363, 5068, 9923, 1896, 5951, 7826, + 9831, 4672, 7630, 9605, 1468, 7492, 7343, 2120, 5384, + 4830, 9242, 6864, 7079, 2295, 4330, 8755, 2617, 5237, + 8212, 3380, 6150, 1197, 9598, 7425, 1175, 597, 7001, + 6162, 8394, 6453, 3374, 4865, 4206, 532, 3005, 6705, + 4796, 9997, 179, 3345, 4020, 4356, 3193, 2688, 7568, + 8501, 7366, 1850, 1728, 5732, 4671, 1791, 6455, 836, + 8772, 8593, 454, 1960, 4110, 5089, 585, 2401, 4550, + 9376, 5266, 3449, 7307, 9894, 2509, 1433, 3290, 590, + 7669, 155, 224, 9310, 21, 8658, 1922, 9748, 7731, + 9589, 5695, 3501, 2394, 6535, 1090, 3835, 8765, 9557, + 7471, 730, 7148, 4767, 4296, 2092, 9934, 6551, 28, + 8126, 7173, 3431, 1124, 5127, 5572, 538, 8887, 7254, + 3495, 5672, 7562, 3320, 4466, 6188, 7199, 7862, 2848, + 5850, 3919, 7278, 2539, 6799, 4907, 5152, 6065, 3299, + 4508, 3418, 7978, 6379, 2168, 877, 7412, 9236, 762, + 506, 3817, 2690, 3686, 1855, 6050, 9516, 2726, 2044, + 5420, 437, 7997, 8052, 8291, 7531, 1443, 8231, 7184, + 1151, 5685, 2226, 8746, 7614, 9927, 4734, 1138, 6606, + 2711, 2543, 8416, 4052, 2001, 5074, 4533, 7696, 3536, + 2514, 3406, 8422, 3259, 597, 7870, 7565, 2221, 9480, + 100, 9225, 1776, 2488, 727, 8511, 6764, 972, 2217, + 8066, 2185, 4117, 2192, 7083, 6385, 2231, 5040, 464, + 7017, 8113, 627, 1680, 2135, 7362, 9947, 3902, 9500, + 904, 1080, 4328, 6054, 9575, 1974, 2927, 8340, 3469, + 4365, 7040, 8148, 2311, 1084, 1849, 8748, 46, 8604, + 4568, 6666, 8392, 1208, 7705, 1577, 5825, 3715, 2279, + 4916, 4816, 298, 2832, 9067, 3431, 5538, 3270, 4028, + 2213, 1048, 1872, 6988, 4832, 7884, 9242, 9575, 7812, + 7767, 8233, 7925, 1413, 9594, 2329, 1197, 8519, 2531, + 4656, 7427, 5356, 3952, 8104, 6478, 344, 2922, 4510, + 5211, 9027, 3287, 6348, 580, 1649, 4314, 6808, 3638, + 6067, 9192, 6473, 2227, 2554, 8506, 545, 4419, 7131, + 688, 3113, 7030, 4111, 8062, 5326, 5696, 7470, 3434, + 9845, 951, 9766, 9931, 4112, 7412, 6551, 950, 4249, + 3837, 468, 5904, 7199, 7807, 2362, 8149, 8933, 1209, + 6058, 5993, 2616, 199, 3718, 1257, 3904, 9271, 2038, + 6950, 5643, 3943, 4957, 5353, 6683, 5076, 8642, 3712, + 4297, 1037, 8367, 889, 7165, 1955, 8390, 4942, 6640, + 8386, 5532, 7481, 6251, 4513, 6001, 5604, 8912, 3890, + 5657, 4658, 4883, 2938, 765, 2394, 1968, 4120, 3, + 1536, 7183, 4411, 1368, 2965, 6576, 7244, 2711, 5973, + 1635, 6544, 277, 8023, 2264, 9537, 9282, 9283, 901, + 4915, 1958, 3005, 4739, 7066, 7365, 7015, 7, 1592, + 7273, 8408, 7622, 3084, 6208, 5461, 8339, 2005, 1277, + 558, 3792, 6656, 3447, 2369, 1019, 7974, 9882, 2753, + 5298, 8303, 1347, 9559, 9523, 8308, 7733, 3512, 5379, + 4589, 535, 1438, 2839, 8352, 9041, 7083, 8114, 6823, + 8710, 8028, 5591, 8625, 9840, 1076, 8324, 7345, 7393, + 3576, 1001, 1689, 3043, 1360, 6968, 8714, 4945, 7394, + 7715, 3670, 9709, 4384, 3483, 7554, 9671, 7282, 3799, + 7938, 2009, 3258, 5008, 1098, 4096, 9587, 646, 8961, + 1311, 1421, 6227, 8589, 4493, 717, 1413, 568, 2569, + 6661, 2013, 2241, 1077, 8894, 9184, 9964, 453, 972, + 2648, 7121, 7537, 3111, 4376, 9111, 5227, 4426, 2940, + 491, 8810, 9560, 4378, 819, 8801, 3067, 6630, 1929, + 6716, 2255, 8834, 1368, 344, 2237, 4263, 4928, 2809, + 6903, 9070, 7158, 3333, 7668, 1915, 8655, 6926, 4402, + 6985, 4453, 9354, 8139, 7456, 7992, 8554, 2845, 3941, + 2143, 6612, 8923, 9296, 9796, 2645, 8237, 3481, 3525, + 2630, 5099, 1176, 5377, 871, 983, 5110, 134, 8709, + 6304, 1501, 2070, 7958, 3691, 7598, 1381, 3329, 7770, + 5112, 8837, 9125, 1557, 3201, 6451, 628, 7820, 7528, + 822, 1480, 6285, 5175, 9148, 8607, 3543, 6984, 4840, + 1771, 2616, 9460, 8623, 6456, 6254, 842, 9810, 6174, + 4408, 6058, 2265, 5023, 2722, 9372, 6240, 9288, 3637, + 3367, 6049, 7155, 7723, 5513, 123, 9416, 3789, 3665, + 9812, 1276, 2912, 1212, 9786, 6090, 2259, 6467, 7353, + 6336, 1421, 9736, 3906, 7832, 2152, 266, 9896, 8740, + 9255, 8505, 1506, 8162, 9481, 1996, 4805, 8216, 8817, + 5654, 6388, 4998, 583, 2482, 2597, 540, 5138, 9683, + 5302, 1699, 1689, 9891, 2412, 7765, 1319, 9252, 7366, + 4617, 2776, 4095, 9669, 5890, 7178, 2932, 3327, 1405, + 2910, 3847, 6544, 4418, 2705, 5180, 4912, 3120, 5468, + 1072, 2113, 2417, 2539, 1503, 6565, 7170, 7241, 9711, + 6029, 9612, 6179, 9418, 7097, 171, 4471, 4989, 7223, + 9925, 6874, 2485, 813, 5325, 9467, 3727, 2359, 36, + 1274, 3671, 8113, 5982, 2368, 4118, 1345, 586, 3928, + 4772, 8240, 4801, 8080, 8898, 7002, 9688, 4608, 7129, + 8829, 5197, 3697, 3640, 2750, 4139, 8848, 5165, 8514, + 998, 6231, 9239, 3123, 1315, 2385, 6702, 8204, 8608, + 2907, 225, 9272, 9039, 7883, 3856, 9828, 3601, 7421, + 8036, 4227, 5967, 2737, 5600, 8461, 675, 5773, 8778, + 2276, 1824, 2231, 9563, 4378, 4278, 2854, 9026, 2633, + 8224, 864, 7509, 7784, 9069, 4470, 3927, 5928, 1670, + 914, 170, 8602, 3457, 7212, 6974, 1479, 9370, 7381, + 2658, 8489, 1270, 5298, 5099, 4398, 8171, 8584, 8883, + 8105, 1816, 1666, 2447, 6017, 7956, 6099, 7829, 681, + 5228, 5267, 6255, 8416, 8511, 763, 4121, 8578, 7506, + 3587, 7689, 2308, 4738, 5535, 9414, 4761, 5035, 2359, + 1265, 3016, 2175, 3688, 993, 1972, 3867, 1048, 837, + 6958, 200, 3699, 913, 9285, 6511, 9801, 5152, 1139, + 2389, 2481, 5440, 488, 9237, 8179, 9064, 2240, 4993, + 3756, 903, 5468, 1257, 5091, 435, 138, 2580, 1542, + 2751]), + values=tensor([4.4308e-01, 3.1197e-01, 1.8797e-01, 5.3445e-02, + 2.2937e-01, 9.7808e-01, 8.6859e-01, 5.3955e-01, + 2.1077e-01, 3.2643e-01, 7.1922e-01, 6.0351e-01, + 2.6532e-01, 5.6889e-01, 8.3896e-01, 1.2645e-01, + 9.7861e-02, 2.8205e-01, 7.4936e-01, 6.0377e-01, + 1.2261e-01, 3.7267e-01, 4.0961e-01, 8.2495e-01, + 9.6980e-01, 2.0190e-01, 3.0680e-02, 6.1510e-01, + 7.1294e-01, 4.1234e-01, 4.4412e-01, 6.1923e-01, + 5.3037e-01, 6.0855e-01, 5.9939e-01, 8.5625e-01, + 9.5107e-01, 3.0707e-02, 7.0614e-01, 6.1032e-01, + 2.5620e-01, 4.4318e-01, 6.5729e-01, 6.8602e-01, + 4.5617e-01, 1.3985e-01, 7.4612e-01, 4.7567e-01, + 8.2686e-01, 3.4842e-01, 5.1703e-01, 4.3092e-01, + 3.9918e-01, 4.9985e-01, 5.5205e-01, 8.8465e-01, + 8.3533e-01, 6.2632e-01, 2.7394e-01, 7.9917e-02, + 4.8558e-02, 4.4009e-01, 4.2965e-01, 7.9919e-01, + 2.1837e-02, 2.3487e-01, 7.8166e-01, 8.4304e-01, + 2.7351e-01, 9.9755e-01, 5.7443e-01, 3.2118e-01, + 2.4803e-01, 6.6275e-01, 7.2858e-01, 8.9274e-01, + 4.0945e-01, 6.1216e-01, 7.9265e-01, 6.8616e-01, + 7.6406e-01, 7.4852e-01, 2.6320e-01, 6.5669e-01, + 8.0488e-01, 7.3041e-01, 5.3850e-01, 9.5393e-01, + 2.6172e-01, 1.7101e-01, 3.7747e-01, 3.0522e-02, + 8.3492e-01, 9.6472e-02, 9.4380e-01, 7.8516e-01, + 8.4441e-02, 9.2874e-01, 8.3642e-01, 9.3727e-01, + 8.5621e-01, 9.0620e-01, 1.2436e-01, 1.9523e-01, + 1.0191e-01, 3.8991e-01, 7.8448e-02, 1.0538e-01, + 8.5416e-01, 8.0478e-01, 4.2164e-01, 3.5669e-01, + 9.3736e-01, 6.6439e-01, 9.2715e-01, 2.5514e-01, + 5.4293e-01, 4.5494e-01, 2.1190e-01, 7.1212e-01, + 3.9122e-01, 7.5207e-01, 9.9190e-01, 3.0678e-01, + 5.8320e-01, 5.7906e-02, 8.7208e-01, 2.6309e-01, + 1.2189e-02, 4.1862e-01, 8.5081e-01, 7.9324e-01, + 4.1593e-01, 6.3148e-02, 4.4499e-02, 9.7484e-01, + 5.6103e-01, 4.3773e-01, 8.2553e-01, 5.8905e-02, + 1.0871e-01, 7.8207e-01, 1.0389e-02, 8.7864e-01, + 8.7822e-01, 4.5651e-02, 9.0901e-01, 7.9490e-01, + 2.9619e-01, 7.7587e-01, 7.4085e-02, 7.3010e-01, + 1.2423e-01, 9.4310e-01, 1.0742e-01, 9.0548e-01, + 8.2929e-01, 6.0406e-01, 1.7998e-01, 1.6626e-01, + 6.1376e-01, 2.5725e-01, 4.7882e-01, 1.6807e-01, + 6.0994e-01, 1.8796e-01, 9.6735e-01, 9.9662e-01, + 9.3664e-01, 4.3673e-01, 3.6363e-01, 7.7986e-01, + 5.3309e-01, 3.9472e-01, 1.4060e-01, 5.0255e-02, + 1.2567e-01, 9.6868e-01, 5.6954e-01, 4.2275e-01, + 6.3812e-01, 1.8943e-01, 9.9834e-01, 7.9944e-01, + 5.1828e-01, 4.0514e-02, 7.7939e-01, 6.9358e-01, + 6.7819e-01, 2.3179e-01, 9.1312e-01, 5.5724e-01, + 4.7010e-01, 9.5221e-01, 4.0189e-01, 4.2718e-01, + 7.5305e-01, 6.0030e-01, 7.9298e-01, 3.3161e-01, + 3.4465e-02, 2.4851e-01, 4.8949e-01, 4.2166e-01, + 3.8611e-01, 2.4291e-01, 6.7249e-02, 9.4941e-01, + 4.1943e-01, 2.5243e-01, 2.6618e-01, 2.2313e-01, + 5.1704e-01, 5.6617e-01, 5.7171e-01, 5.7648e-01, + 9.1784e-01, 1.3624e-01, 8.9469e-01, 3.0806e-01, + 6.4664e-01, 8.6902e-02, 6.6542e-01, 9.0732e-01, + 7.3210e-01, 9.7756e-01, 2.3843e-01, 3.3792e-01, + 7.8418e-01, 9.8922e-01, 4.4495e-01, 9.3954e-01, + 3.7566e-01, 9.2505e-01, 4.4295e-01, 7.2794e-01, + 7.9211e-02, 4.4404e-01, 4.6100e-02, 5.7765e-01, + 5.8187e-01, 9.6985e-01, 9.6213e-01, 7.4658e-01, + 7.0761e-01, 8.2159e-01, 6.1804e-01, 3.5292e-02, + 3.2185e-01, 7.7924e-01, 8.3511e-01, 8.7269e-01, + 4.0579e-01, 6.7668e-01, 9.1162e-01, 6.8107e-01, + 7.2504e-01, 5.6021e-01, 7.6876e-01, 1.9868e-01, + 8.8142e-01, 4.3284e-01, 9.2824e-01, 6.5991e-01, + 1.9411e-01, 4.3159e-01, 4.0906e-01, 4.1694e-01, + 6.1194e-01, 4.8754e-01, 9.0157e-01, 2.1611e-01, + 2.3560e-01, 3.2849e-01, 1.3202e-01, 9.7021e-01, + 7.1787e-01, 8.4372e-01, 4.8915e-01, 8.2151e-01, + 2.5876e-01, 2.9861e-01, 5.2132e-01, 3.1746e-01, + 6.9837e-01, 2.8788e-02, 7.2401e-01, 2.8029e-01, + 8.6162e-01, 8.1062e-01, 4.3280e-01, 2.6151e-02, + 7.5531e-01, 3.7269e-01, 1.9142e-01, 4.0646e-01, + 4.8904e-01, 2.4040e-01, 2.3500e-01, 2.0173e-01, + 8.1912e-01, 8.6856e-01, 8.0745e-01, 7.5404e-02, + 2.3064e-01, 1.9327e-01, 5.4272e-01, 5.2229e-01, + 3.6629e-01, 8.5411e-01, 3.7488e-01, 2.9113e-01, + 5.3255e-01, 9.4887e-02, 8.5627e-01, 6.6076e-01, + 9.3440e-01, 2.6320e-01, 3.8258e-01, 5.3860e-01, + 8.2461e-01, 7.2406e-01, 5.3972e-01, 1.5403e-02, + 1.3688e-01, 9.0014e-01, 6.1849e-01, 4.6596e-01, + 7.9783e-01, 1.3173e-01, 2.4939e-01, 4.4904e-02, + 9.9006e-01, 5.0564e-01, 5.8197e-01, 5.0547e-01, + 8.5285e-01, 5.1257e-01, 2.0612e-01, 6.5477e-02, + 6.2484e-01, 7.7242e-01, 4.7953e-01, 1.9551e-01, + 3.3426e-01, 4.9184e-01, 5.0963e-01, 7.0138e-02, + 4.4230e-01, 5.4150e-02, 6.5081e-01, 7.1804e-01, + 6.8186e-01, 5.1620e-01, 9.2933e-01, 4.4101e-01, + 5.7504e-01, 7.7823e-01, 2.0146e-01, 6.9688e-01, + 1.6087e-01, 6.2297e-01, 9.6083e-01, 6.9426e-01, + 2.9018e-01, 3.0373e-02, 4.8914e-01, 8.0373e-01, + 7.0638e-02, 7.3174e-01, 3.4269e-01, 7.8326e-02, + 7.9921e-01, 8.7982e-01, 4.7077e-01, 4.5954e-01, + 9.9795e-01, 4.9164e-02, 4.1212e-01, 3.4281e-02, + 8.4880e-01, 6.6315e-01, 7.0144e-01, 7.2621e-01, + 5.1354e-01, 8.6921e-01, 6.6347e-01, 8.3727e-01, + 8.2096e-01, 6.1195e-01, 9.0285e-01, 9.3773e-01, + 7.1234e-01, 9.0891e-01, 1.6423e-02, 2.6716e-01, + 1.3582e-01, 7.1543e-03, 7.7423e-01, 1.8183e-01, + 9.0016e-01, 9.5084e-01, 3.0473e-01, 2.4591e-01, + 9.4028e-01, 5.9764e-01, 2.9427e-01, 4.6081e-01, + 4.3355e-01, 4.0985e-02, 3.3277e-01, 3.8733e-01, + 1.8604e-01, 3.3168e-01, 5.9840e-01, 7.6944e-01, + 9.3999e-01, 2.7987e-01, 1.4614e-01, 8.2056e-02, + 7.3622e-01, 6.1784e-01, 8.8956e-03, 9.6177e-01, + 1.8727e-01, 9.8084e-01, 7.5338e-01, 6.5027e-01, + 9.3160e-01, 7.2607e-01, 9.2861e-01, 8.2079e-01, + 5.8834e-01, 4.1508e-01, 4.8964e-01, 6.9344e-01, + 6.2267e-01, 3.1750e-01, 3.1374e-01, 7.1592e-01, + 7.9280e-01, 2.5358e-01, 4.6457e-01, 1.6452e-01, + 1.4029e-01, 5.7432e-01, 1.1376e-01, 5.3578e-01, + 6.1778e-01, 8.4333e-01, 2.8659e-02, 6.4221e-01, + 7.8001e-01, 3.2380e-01, 4.9484e-01, 7.9635e-02, + 6.1991e-01, 9.0335e-01, 6.6962e-01, 3.7802e-01, + 9.6051e-01, 5.8880e-01, 1.1893e-01, 2.6532e-01, + 9.3653e-01, 6.8492e-01, 2.3320e-01, 7.1576e-01, + 4.6454e-01, 2.4145e-02, 5.1605e-01, 7.0425e-01, + 4.8942e-01, 9.4910e-01, 4.8179e-01, 8.2509e-01, + 9.6174e-01, 7.6413e-01, 5.1746e-01, 2.2933e-01, + 9.1494e-01, 5.0612e-01, 9.0929e-01, 7.2939e-01, + 6.7376e-01, 4.7265e-02, 1.1090e-01, 7.0121e-01, + 8.8588e-01, 8.4663e-01, 6.6640e-01, 1.1304e-01, + 3.1106e-01, 5.1455e-01, 5.0340e-01, 2.4264e-01, + 8.6313e-01, 1.7859e-01, 9.9837e-01, 2.3745e-01, + 6.6207e-01, 8.7917e-01, 2.2590e-01, 9.7588e-01, + 5.9396e-02, 3.1123e-01, 3.8080e-01, 4.1327e-01, + 2.1431e-01, 4.1212e-01, 7.5213e-01, 5.7615e-01, + 1.8865e-01, 7.0497e-01, 3.1937e-01, 8.7796e-01, + 4.5445e-01, 7.6495e-01, 7.5492e-01, 8.6074e-01, + 2.2876e-01, 1.4310e-01, 8.1997e-03, 6.7831e-01, + 7.0371e-02, 4.6128e-01, 5.4054e-01, 4.6381e-02, + 9.1602e-01, 1.9521e-01, 8.3652e-01, 8.5711e-01, + 3.8294e-01, 4.4077e-01, 6.1819e-01, 3.5847e-01, + 5.3851e-01, 8.9008e-02, 9.0594e-01, 6.0964e-02, + 2.8076e-01, 2.6622e-01, 1.8305e-01, 1.6878e-01, + 2.3463e-01, 5.3373e-01, 5.8612e-01, 3.0940e-01, + 7.6451e-01, 2.4468e-01, 4.4342e-01, 4.3062e-01, + 5.2287e-01, 3.8341e-02, 9.5136e-02, 5.9392e-01, + 3.3357e-01, 4.8642e-01, 4.9403e-01, 4.7105e-01, + 1.7180e-01, 6.7583e-01, 2.6339e-02, 8.5978e-01, + 1.9540e-02, 9.8866e-01, 2.5108e-01, 9.3451e-01, + 6.4160e-01, 9.7454e-01, 7.4633e-01, 3.2774e-02, + 4.3418e-01, 5.7237e-01, 2.9944e-01, 1.9401e-03, + 7.3222e-01, 7.1967e-01, 2.9681e-01, 9.4555e-01, + 4.1127e-01, 7.4210e-01, 9.1907e-04, 1.4807e-01, + 9.4434e-01, 1.4080e-01, 7.1379e-01, 9.5724e-01, + 1.7743e-01, 3.8089e-01, 8.1445e-01, 9.7984e-01, + 6.7702e-01, 4.7894e-01, 7.4479e-01, 9.6224e-01, + 9.7178e-01, 1.6373e-01, 9.7581e-01, 1.2381e-01, + 8.9762e-01, 3.6616e-01, 3.4958e-01, 9.0339e-01, + 7.1716e-01, 8.3146e-01, 4.4127e-01, 2.9912e-01, + 2.8329e-01, 7.5943e-01, 6.7100e-01, 8.0922e-01, + 9.3158e-01, 5.2751e-01, 4.7764e-01, 4.9973e-01, + 3.2727e-01, 6.6267e-01, 8.2950e-01, 5.7327e-01, + 2.0853e-01, 7.7014e-02, 8.0484e-01, 9.3265e-01, + 1.8783e-01, 3.9755e-01, 8.1811e-03, 5.7883e-01, + 3.1646e-01, 7.5932e-02, 9.2300e-01, 5.6335e-02, + 8.5733e-01, 5.9133e-01, 8.6764e-03, 9.7398e-02, + 2.7581e-01, 3.7587e-01, 6.4723e-01, 2.3720e-01, + 6.2694e-01, 9.7038e-01, 2.9279e-01, 4.3549e-01, + 6.7804e-01, 8.3553e-01, 1.7600e-01, 8.0817e-01, + 1.2729e-01, 8.7875e-01, 8.1679e-01, 7.7046e-01, + 9.8636e-01, 6.0119e-01, 4.6031e-02, 2.5393e-01, + 9.9894e-01, 9.7021e-01, 1.3820e-01, 2.3325e-03, + 7.2663e-01, 3.2775e-01, 3.1634e-01, 2.6114e-01, + 9.0123e-01, 7.4601e-01, 3.2515e-01, 3.8715e-01, + 7.2023e-01, 2.1980e-01, 7.1412e-01, 4.0252e-01, + 7.0211e-01, 4.6541e-01, 4.7331e-01, 4.3111e-01, + 4.0260e-01, 3.3605e-01, 4.0227e-01, 1.9313e-01, + 1.1364e-01, 3.5222e-01, 2.7377e-02, 1.9598e-01, + 4.2073e-01, 5.4640e-01, 2.9823e-02, 1.4818e-01, + 3.5992e-01, 8.1821e-01, 9.2584e-01, 9.7053e-01, + 9.9569e-01, 4.2409e-01, 7.1566e-01, 6.2511e-01, + 5.3642e-01, 2.8108e-01, 6.3471e-01, 2.1692e-01, + 9.1099e-01, 8.4695e-01, 7.2126e-01, 2.7157e-01, + 5.0612e-01, 1.7701e-01, 2.4788e-01, 7.9484e-01, + 8.2669e-01, 6.4434e-01, 5.3043e-01, 1.7046e-01, + 4.9023e-02, 7.9341e-01, 2.8008e-01, 8.0916e-01, + 4.4487e-01, 5.7368e-01, 3.6173e-01, 7.0322e-01, + 5.6399e-01, 3.8124e-01, 6.1293e-01, 9.3225e-01, + 5.0029e-01, 6.1324e-01, 5.7252e-01, 4.9592e-01, + 7.5016e-01, 2.3895e-01, 4.5346e-01, 1.7553e-01, + 4.9399e-01, 9.0784e-01, 3.4911e-01, 8.2497e-01, + 8.2017e-01, 4.9820e-01, 1.2991e-01, 1.3254e-01, + 8.6879e-01, 7.0386e-01, 2.9204e-01, 6.9002e-01, + 5.5192e-01, 5.8313e-01, 5.9638e-01, 4.5730e-01, + 8.6935e-02, 1.7300e-01, 2.8268e-01, 4.6859e-01, + 9.9142e-01, 8.7411e-01, 3.8983e-01, 3.0834e-01, + 5.8188e-01, 5.8482e-01, 8.3218e-01, 7.2262e-01, + 7.1773e-01, 8.9067e-01, 8.8226e-02, 5.4611e-01, + 2.6339e-01, 1.1401e-01, 1.4575e-01, 8.0093e-01, + 2.4324e-02, 2.5539e-01, 7.1831e-01, 8.3530e-01, + 4.9244e-01, 1.0876e-01, 9.7313e-01, 2.5486e-01, + 1.9986e-01, 5.3664e-01, 7.4498e-01, 9.5942e-01, + 9.1708e-02, 5.6893e-01, 8.8484e-01, 2.4138e-01, + 8.5057e-01, 3.2608e-02, 6.5487e-01, 1.5557e-01, + 5.7798e-02, 1.6615e-01, 8.8160e-01, 1.6253e-01, + 1.0406e-02, 8.5150e-01, 4.1359e-01, 6.0387e-01, + 4.5344e-01, 3.6798e-02, 3.0587e-02, 4.6506e-01, + 8.7986e-01, 9.9254e-01, 6.2568e-01, 5.2101e-01, + 2.1593e-01, 2.5865e-01, 2.7210e-01, 6.4908e-01, + 9.8695e-01, 8.4254e-02, 8.9464e-01, 1.4813e-01, + 6.4456e-01, 7.0081e-01, 4.9831e-01, 9.9326e-01, + 6.9768e-01, 2.6295e-01, 1.4056e-01, 6.6932e-01, + 3.5894e-01, 9.4878e-01, 2.5528e-01, 7.0174e-01, + 1.7039e-01, 3.7893e-01, 4.0578e-01, 2.7171e-01, + 3.0042e-01, 9.4481e-01, 4.6919e-01, 2.1658e-01, + 1.4714e-01, 8.3456e-01, 7.2751e-01, 6.5299e-02, + 7.5182e-01, 9.3894e-01, 7.0599e-01, 1.8945e-01, + 1.6018e-01, 9.1569e-01, 6.8546e-01, 1.3309e-01, + 2.4677e-01, 7.7508e-01, 5.3586e-01, 8.4616e-01, + 3.5252e-01, 2.2873e-02, 4.2090e-01, 4.6061e-01, + 6.8745e-01, 2.8012e-01, 7.9005e-01, 2.3404e-01, + 8.5578e-01, 4.6122e-01, 9.2978e-01, 3.9343e-01, + 2.1038e-01, 7.4110e-01, 3.1010e-01, 5.9250e-01, + 5.6887e-01, 3.5838e-01, 6.8762e-01, 9.3346e-01, + 4.8240e-01, 3.9596e-03, 1.2628e-01, 2.2419e-01, + 3.0224e-02, 6.4842e-01, 1.0593e-01, 6.8638e-01, + 9.0986e-01, 3.5948e-01, 9.7487e-01, 9.2991e-01, + 4.5758e-01, 1.3481e-01, 5.4167e-01, 8.2850e-02, + 6.8895e-01, 2.9804e-01, 2.2898e-01, 6.2436e-02, + 8.1796e-02, 1.4088e-01, 6.3335e-01, 8.5689e-02, + 2.3819e-01, 6.6270e-01, 3.6730e-01, 8.1432e-01, + 9.9546e-01, 2.8211e-01, 6.6515e-01, 7.7506e-01, + 8.0189e-01, 3.6996e-01, 5.9008e-01, 4.3812e-01, + 4.2691e-01, 9.3955e-01, 2.7083e-01, 2.2072e-01, + 3.5474e-01, 7.2354e-01, 9.0015e-01, 5.7344e-01, + 3.2159e-01, 1.1370e-01, 8.9330e-02, 9.4579e-01, + 8.3997e-01, 2.1094e-01, 1.3783e-01, 9.9271e-01, + 6.9326e-01, 7.4426e-01, 2.6947e-01, 9.1784e-01, + 6.9434e-01, 1.2349e-01, 7.4276e-01, 8.6064e-01, + 3.8890e-01, 3.5853e-01, 1.0771e-01, 1.9632e-02, + 6.9343e-02, 5.0898e-01, 7.2526e-01, 2.2546e-02, + 7.2323e-01, 9.5991e-01, 2.9579e-01, 5.4421e-01, + 5.5662e-02, 7.8309e-01, 4.0353e-01, 4.5436e-01, + 4.2018e-03, 1.0133e-01, 1.5065e-01, 8.4231e-01, + 1.2021e-01, 1.8812e-01, 2.5846e-01, 9.7319e-01, + 8.6629e-01, 6.9570e-01, 4.1393e-02, 7.5289e-01, + 3.8621e-01, 2.9277e-01, 1.6119e-01, 5.0434e-01, + 6.2621e-01, 9.6651e-01, 7.5212e-01, 2.6830e-01, + 8.3525e-01, 6.8277e-01, 5.0696e-01, 1.9014e-01, + 6.1986e-02, 7.7024e-01, 2.1413e-01, 5.9126e-01, + 5.6955e-01, 9.5158e-01, 9.4663e-01, 3.2338e-02, + 3.6121e-01, 6.4154e-01, 1.4738e-03, 4.0990e-02, + 4.7973e-01, 3.2736e-01, 4.3832e-02, 8.6107e-01, + 2.0124e-01, 8.5864e-01, 5.2427e-01, 5.9686e-01, + 9.5699e-01, 8.5008e-01, 3.9900e-01, 4.1158e-01, + 1.5749e-01, 1.8221e-01, 2.2084e-01, 8.5518e-01, + 1.9073e-01, 8.3382e-01, 9.1158e-01, 5.7477e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4976, 0.0481, 0.1913, ..., 0.0301, 0.3766, 0.0826]) +tensor([0.2470, 0.1789, 0.3752, ..., 0.5099, 0.7520, 0.5628]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,268 +754,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.560847282409668 seconds +Time: 5.2000732421875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 138585 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.35036301612854} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5150, 3949, 6725, 9833, 6214, 5081, 1554, 3283, 4592, - 1367, 5501, 6306, 1908, 8847, 9684, 2365, 4516, 2030, - 6246, 1037, 3129, 6994, 6859, 6054, 2697, 3073, 4745, - 5149, 9683, 6168, 7171, 2986, 8282, 5962, 9044, 9859, - 4416, 2729, 3201, 9334, 70, 3704, 1992, 4539, 3854, - 9774, 4984, 5696, 4186, 7809, 2303, 2349, 6788, 8167, - 7467, 8573, 1950, 1383, 4872, 1688, 3467, 531, 2507, - 77, 4045, 9094, 8112, 2615, 9777, 5813, 1750, 6905, - 7986, 8272, 5559, 7416, 2650, 6301, 7023, 6146, 5728, - 154, 3305, 852, 1556, 1672, 8260, 3100, 4027, 2524, - 7136, 1881, 3609, 4077, 5415, 1342, 7463, 0, 7237, - 949, 4271, 7500, 4398, 202, 6370, 6523, 8149, 1606, - 644, 1179, 4251, 732, 4140, 9430, 7869, 4357, 3082, - 7706, 929, 1350, 2066, 3153, 7727, 3490, 4529, 2830, - 604, 9531, 3037, 674, 1118, 7439, 1800, 9605, 7647, - 7273, 8451, 1727, 5065, 8403, 4478, 8920, 7608, 949, - 4223, 6471, 1464, 4183, 9344, 7358, 8169, 5124, 7436, - 7379, 4302, 4579, 6954, 7352, 2475, 6524, 6023, 4419, - 4933, 4411, 5203, 2328, 5122, 1046, 4112, 415, 8434, - 5247, 5282, 7252, 5516, 6132, 9262, 1383, 4445, 3838, - 3480, 5499, 6857, 5666, 2223, 3948, 5622, 6586, 8728, - 1267, 1798, 4209, 2945, 7759, 2936, 2435, 508, 7969, - 658, 334, 4257, 5246, 6393, 357, 3505, 4514, 1464, - 8590, 9903, 4716, 9051, 4470, 9570, 4777, 3595, 8441, - 9110, 9670, 2187, 8548, 3616, 2595, 1352, 5502, 4886, - 8337, 7807, 4921, 643, 1759, 191, 6520, 5823, 2075, - 4740, 3014, 8077, 3584, 7499, 5306, 1223, 1385, 7517, - 8020, 7952, 2205, 3204, 4927, 5397, 2795, 9112, 9159, - 2689, 6163, 2719, 3134, 7968, 7593, 4653, 54, 597, - 3621, 6399, 2434, 6710, 967, 6700, 1213, 1026, 551, - 5577, 3471, 9061, 2300, 9239, 9461, 23, 3996, 596, - 2494, 5453, 3959, 6826, 5001, 7382, 416, 1281, 7317, - 7289, 1990, 4004, 9139, 5821, 8865, 9134, 9753, 7856, - 3678, 597, 9576, 3434, 7138, 8525, 4380, 4708, 1098, - 1864, 2641, 5053, 725, 7343, 1604, 5046, 9253, 1076, - 1733, 7452, 5764, 4569, 2060, 2335, 2788, 666, 560, - 4714, 8624, 8928, 6013, 1750, 3137, 5802, 2485, 8605, - 4692, 8176, 851, 670, 6436, 4116, 3254, 4703, 3344, - 9079, 2177, 3623, 7381, 508, 1878, 8203, 179, 3099, - 5174, 8679, 6987, 5356, 2553, 3507, 667, 8739, 4484, - 5254, 6753, 2060, 7479, 4766, 9854, 7766, 9804, 802, - 2515, 3329, 2210, 7753, 2069, 70, 3487, 1593, 4737, - 6304, 4273, 1474, 5728, 8160, 440, 2963, 5961, 9940, - 3963, 6482, 6596, 1104, 8908, 2324, 4677, 901, 5681, - 7684, 1987, 3941, 1028, 3945, 522, 155, 9582, 618, - 9191, 2285, 2363, 1005, 6492, 2446, 5028, 8066, 9102, - 7602, 8504, 3215, 2742, 946, 538, 5191, 4078, 3141, - 982, 1552, 5688, 9154, 3693, 2376, 5721, 4025, 5733, - 4065, 1715, 8170, 7847, 2860, 4001, 475, 7239, 5611, - 5117, 3773, 7630, 4037, 8971, 3180, 5691, 7586, 7012, - 1221, 6168, 6276, 2660, 6301, 4688, 6558, 7433, 4113, - 3119, 4374, 617, 7732, 5185, 9146, 7702, 4380, 1370, - 4149, 205, 3424, 954, 5456, 1934, 9488, 5247, 9946, - 2494, 4258, 7045, 5497, 588, 1288, 5965, 6814, 3539, - 1764, 4809, 3064, 9477, 5056, 3833, 5491, 8813, 623, - 9737, 8723, 3926, 2417, 6250, 1832, 4224, 3868, 2666, - 5475, 2503, 4400, 2951, 8789, 8901, 2576, 1586, 2358, - 8683, 701, 2218, 324, 2750, 4642, 6563, 7866, 2142, - 2049, 8029, 6801, 8630, 4530, 1913, 7492, 4784, 1756, - 2725, 811, 3186, 5499, 565, 5265, 4256, 9478, 1700, - 8194, 7843, 9531, 2519, 7331, 6421, 4938, 2047, 9115, - 5085, 6558, 2790, 5305, 7400, 6481, 8276, 6051, 376, - 2232, 3828, 5087, 901, 859, 311, 5986, 646, 2534, - 3646, 6330, 6624, 44, 9526, 1720, 7671, 786, 2103, - 7291, 6456, 6706, 9840, 151, 5594, 5539, 8478, 8848, - 9399, 6447, 7659, 8019, 3178, 3373, 8082, 4366, 3291, - 1095, 8101, 138, 8637, 5651, 2986, 4186, 1262, 8903, - 7692, 5148, 5226, 6772, 7717, 1398, 9840, 6283, 4653, - 916, 1943, 7857, 14, 2435, 9045, 6355, 6437, 5876, - 2079, 6222, 9106, 7167, 8674, 3568, 5387, 6404, 4327, - 9378, 7023, 3428, 7208, 3991, 3503, 1112, 5994, 7543, - 8077, 3450, 7732, 95, 4124, 4136, 4748, 216, 5127, - 2129, 134, 2135, 8151, 9959, 5515, 3277, 4587, 1429, - 2979, 932, 7056, 783, 3334, 7562, 460, 5737, 5306, - 1839, 8429, 174, 7235, 4614, 34, 4009, 7389, 701, - 5758, 8329, 9563, 8743, 4338, 3477, 1180, 1540, 5860, - 9475, 5545, 4339, 9525, 5428, 2826, 7521, 4441, 8311, - 1115, 7387, 6593, 2389, 3900, 8596, 9443, 4909, 2179, - 2149, 5400, 6357, 6980, 7081, 5466, 6799, 5303, 6495, - 572, 6717, 3685, 9544, 9986, 5977, 7724, 1134, 4967, - 2145, 3442, 1984, 9728, 2275, 9320, 1257, 102, 4987, - 8007, 1513, 959, 4257, 799, 4709, 204, 6285, 7273, - 8278, 2651, 8788, 6857, 3198, 3750, 7764, 9530, 2525, - 5355, 2828, 6803, 3742, 9808, 2702, 3428, 5248, 7386, - 9259, 3003, 8487, 8936, 9681, 4024, 4691, 9077, 8624, - 670, 3644, 2024, 5179, 8050, 1290, 8269, 8524, 4786, - 2503, 35, 5675, 3549, 6350, 1714, 678, 5292, 9599, - 9848, 2823, 3918, 8633, 2596, 7468, 1783, 4984, 5190, - 2747, 7283, 6599, 5946, 9563, 8503, 9940, 9756, 3292, - 4953, 1009, 8896, 653, 1878, 8055, 8508, 2037, 2440, - 37, 3449, 5269, 3294, 7638, 3773, 5688, 5609, 7128, - 1885, 2653, 15, 148, 8228, 7481, 7965, 3146, 3889, - 6682, 8636, 1153, 8978, 9471, 4557, 8139, 4145, 2086, - 2623, 2951, 2381, 6771, 7405, 7405, 7866, 6651, 961, - 6157, 9117, 8248, 4454, 4081, 1801, 8411, 624, 6352, - 5499, 2637, 2893, 2441, 8243, 5608, 5218, 6178, 2460, - 7724, 4117, 2164, 8643, 7095, 2933, 8474, 6756, 7154, - 4556, 7730, 8140, 7610, 525, 1161, 9855, 6850, 3574, - 5534, 4716, 9318, 179, 5902, 7492, 3084, 3893, 1120, - 3911, 5634, 9144, 2088, 5803, 5300, 9679, 7556, 7644, - 1213, 9980, 8515, 3166, 262, 4231, 480, 4900, 7635, - 9607, 6653, 3153, 9282, 6882, 6866, 9053, 6078, 1605, - 2679, 9434, 1838, 5200, 8526, 145, 8529, 81, 7768, - 2792, 1368, 1146, 6845, 2082, 779, 3727, 3189, 5616, - 2883, 6077, 3380, 5032, 8816, 3861, 2679, 2096, 1826, - 4481, 3081, 2880, 9558, 48, 8852, 7596, 4096, 1993, - 3536, 4469, 1068, 8771, 110, 3938, 7946, 5524, 2146, - 2369, 7248, 6850, 2766, 3813, 4176, 2118, 2240, 2650, - 661, 3663, 7071, 4825, 3012, 5874, 7763, 2, 9014, - 386, 6880, 6261, 4740, 4948, 6569, 5412, 7485, 5084, - 2240, 7660, 5562, 8271, 9281, 7660, 7758, 8339, 2685, - 4281]), - values=tensor([0.7305, 0.2440, 0.3019, 0.7007, 0.9449, 0.8426, 0.6302, - 0.6840, 0.3760, 0.2330, 0.8640, 0.1425, 0.7949, 0.3165, - 0.9440, 0.0634, 0.1824, 0.8939, 0.0831, 0.8632, 0.5557, - 0.9222, 0.7152, 0.4959, 0.3642, 0.8742, 0.0533, 0.8773, - 0.3271, 0.1850, 0.8267, 0.0692, 0.8764, 0.6532, 0.0689, - 0.0059, 0.9796, 0.6870, 0.9154, 0.4784, 0.5447, 0.7909, - 0.5242, 0.2989, 0.3478, 0.3160, 0.9938, 0.4624, 0.7891, - 0.5624, 0.6799, 0.1505, 0.3167, 0.1174, 0.4734, 0.9357, - 0.4414, 0.2681, 0.3600, 0.9851, 0.0984, 0.9817, 0.6071, - 0.2437, 0.6382, 0.9904, 0.2276, 0.5007, 0.7924, 0.8288, - 0.4341, 0.5700, 0.0922, 0.1698, 0.6893, 0.1819, 0.6179, - 0.0688, 0.2326, 0.7694, 0.2736, 0.3807, 0.6497, 0.0934, - 0.2985, 0.3575, 0.2669, 0.2111, 0.0895, 0.1728, 0.5018, - 0.9851, 0.2093, 0.7627, 0.8317, 0.2321, 0.2733, 0.9575, - 0.1373, 0.4112, 0.4346, 0.2690, 0.0832, 0.9308, 0.0856, - 0.4248, 0.1271, 0.2415, 0.1779, 0.9677, 0.0600, 0.9191, - 0.1112, 0.1565, 0.1580, 0.1619, 0.6745, 0.6379, 0.9366, - 0.4143, 0.1271, 0.0864, 0.7266, 0.3906, 0.9727, 0.7339, - 0.0407, 0.0059, 0.6237, 0.7622, 0.9907, 0.2749, 0.4392, - 0.2002, 0.6558, 0.9415, 0.5261, 0.2561, 0.7660, 0.3448, - 0.1122, 0.7076, 0.6037, 0.4897, 0.4670, 0.8339, 0.5757, - 0.4177, 0.7623, 0.5865, 0.8495, 0.6497, 0.9778, 0.6297, - 0.3389, 0.2747, 0.0702, 0.1398, 0.1005, 0.4201, 0.1831, - 0.2403, 0.3332, 0.4306, 0.5321, 0.7778, 0.0204, 0.4136, - 0.1536, 0.7541, 0.0838, 0.5081, 0.7545, 0.0833, 0.4845, - 0.3873, 0.2490, 0.4675, 0.2018, 0.9437, 0.0574, 0.7574, - 0.4217, 0.4481, 0.8897, 0.5683, 0.3881, 0.1927, 0.8680, - 0.3664, 0.8069, 0.2192, 0.4607, 0.7321, 0.3086, 0.6446, - 0.3613, 0.6584, 0.1132, 0.8934, 0.0915, 0.5183, 0.8634, - 0.8577, 0.6923, 0.8618, 0.6853, 0.8256, 0.8384, 0.2295, - 0.3689, 0.5760, 0.2086, 0.3564, 0.8483, 0.6783, 0.7902, - 0.6082, 0.2549, 0.6415, 0.4994, 0.2078, 0.5032, 0.3790, - 0.4926, 0.0907, 0.6911, 0.2975, 0.8561, 0.2283, 0.4530, - 0.2236, 0.5933, 0.7581, 0.3982, 0.1913, 0.0682, 0.2359, - 0.4488, 0.3220, 0.4875, 0.7597, 0.7930, 0.2062, 0.4708, - 0.1870, 0.0373, 0.0367, 0.8306, 0.1947, 0.2315, 0.0207, - 0.8163, 0.7626, 0.9301, 0.8099, 0.3633, 0.5641, 0.8120, - 0.6141, 0.5594, 0.4658, 0.2150, 0.1654, 0.3051, 0.0343, - 0.3400, 0.5978, 0.5354, 0.0263, 0.3151, 0.8046, 0.5327, - 0.3066, 0.2487, 0.3111, 0.0893, 0.1612, 0.5907, 0.3107, - 0.8584, 0.8229, 0.4034, 0.2252, 0.3659, 0.2053, 0.7999, - 0.7701, 0.7502, 0.7518, 0.5690, 0.1322, 0.1586, 0.1160, - 0.9796, 0.3007, 0.1296, 0.7414, 0.1813, 0.4708, 0.0361, - 0.8196, 0.1266, 0.8025, 0.2789, 0.7870, 0.4347, 0.9531, - 0.2674, 0.9698, 0.4764, 0.0519, 0.6509, 0.3234, 0.4769, - 0.3125, 0.1997, 0.6900, 0.7719, 0.1567, 0.8227, 0.8361, - 0.3658, 0.9962, 0.8049, 0.2839, 0.0239, 0.5230, 0.1679, - 0.0012, 0.1941, 0.1825, 0.6328, 0.8021, 0.7872, 0.5540, - 0.0997, 0.9139, 0.1493, 0.1197, 0.6714, 0.2685, 0.5289, - 0.4741, 0.4375, 0.4510, 0.6505, 0.0060, 0.0804, 0.3052, - 0.4065, 0.1432, 0.8715, 0.0973, 0.2004, 0.0400, 0.2303, - 0.6891, 0.4089, 0.8692, 0.1440, 0.3744, 0.8894, 0.4839, - 0.5594, 0.2863, 0.2636, 0.1806, 0.4900, 0.9233, 0.0287, - 0.1519, 0.0238, 0.9718, 0.0696, 0.4915, 0.4842, 0.1594, - 0.7936, 0.6860, 0.5880, 0.4352, 0.0092, 0.6365, 0.1016, - 0.1299, 0.9522, 0.9906, 0.4238, 0.2435, 0.5165, 0.1731, - 0.6409, 0.2362, 0.6101, 0.1717, 0.7603, 0.4938, 0.2723, - 0.7599, 0.9112, 0.0268, 0.9393, 0.8969, 0.3631, 0.2998, - 0.3500, 0.7422, 0.9345, 0.9975, 0.0517, 0.1544, 0.6201, - 0.5972, 0.3434, 0.9546, 0.0842, 0.2284, 0.4080, 0.3233, - 0.9813, 0.3166, 0.9934, 0.5541, 0.2131, 0.8230, 0.1841, - 0.3426, 0.7598, 0.6218, 0.4928, 0.7872, 0.4823, 0.5321, - 0.2430, 0.9717, 0.6158, 0.5835, 0.2198, 0.3750, 0.8817, - 0.8977, 0.6208, 0.4204, 0.5681, 0.7034, 0.0297, 0.1272, - 0.9109, 0.2850, 0.6202, 0.3491, 0.4077, 0.0265, 0.9469, - 0.5186, 0.0530, 0.5136, 0.4687, 0.4898, 0.2296, 0.6555, - 0.6814, 0.2474, 0.5224, 0.6744, 0.7214, 0.5305, 0.5181, - 0.3443, 0.7930, 0.3254, 0.8409, 0.0232, 0.8876, 0.4606, - 0.8758, 0.4816, 0.3285, 0.3194, 0.0473, 0.5240, 0.4558, - 0.2643, 0.3823, 0.3375, 0.5496, 0.8842, 0.2643, 0.8295, - 0.0774, 0.7973, 0.2342, 0.2846, 0.4365, 0.5561, 0.8911, - 0.9974, 0.3818, 0.8914, 0.6534, 0.3445, 0.0570, 0.5409, - 0.8934, 0.8748, 0.8762, 0.1700, 0.1286, 0.5886, 0.2604, - 0.5122, 0.3910, 0.6718, 0.8386, 0.7880, 0.7735, 0.8382, - 0.7683, 0.8908, 0.5226, 0.8048, 0.4884, 0.3660, 0.1809, - 0.2335, 0.6855, 0.8170, 0.5295, 0.7842, 0.7798, 0.2910, - 0.3853, 0.8829, 0.7835, 0.1690, 0.4663, 0.2045, 0.1035, - 0.4997, 0.5297, 0.1289, 0.5179, 0.0606, 0.8583, 0.3048, - 0.7708, 0.1893, 0.6309, 0.7686, 0.9828, 0.9048, 0.3841, - 0.2185, 0.6435, 0.4478, 0.9468, 0.8737, 0.2111, 0.1349, - 0.9495, 0.4091, 0.3367, 0.4818, 0.6928, 0.9558, 0.4398, - 0.5439, 0.7708, 0.4337, 0.4611, 0.2583, 0.1955, 0.4439, - 0.4445, 0.7809, 0.8863, 0.3069, 0.4805, 0.8381, 0.8548, - 0.5700, 0.1066, 0.1066, 0.8937, 0.5723, 0.6920, 0.0327, - 0.2123, 0.4301, 0.6855, 0.9118, 0.1103, 0.8642, 0.3839, - 0.6499, 0.7405, 0.8262, 0.1273, 0.8596, 0.1214, 0.5538, - 0.2683, 0.7685, 0.0380, 0.9668, 0.3679, 0.4231, 0.3206, - 0.2939, 0.2000, 0.2628, 0.6644, 0.0046, 0.4634, 0.6009, - 0.9618, 0.0916, 0.1533, 0.8964, 0.0750, 0.6530, 0.7420, - 0.0171, 0.1412, 0.9378, 0.1278, 0.5952, 0.5957, 0.1602, - 0.4569, 0.6997, 0.3923, 0.9702, 0.9204, 0.6140, 0.5178, - 0.9051, 0.2480, 0.1154, 0.8068, 0.0751, 0.2951, 0.2063, - 0.5678, 0.8248, 0.3371, 0.6619, 0.2068, 0.7570, 0.0168, - 0.6561, 0.1487, 0.6018, 0.3884, 0.9519, 0.9836, 0.1373, - 0.8836, 0.8831, 0.2552, 0.0736, 0.2226, 0.3008, 0.4150, - 0.1279, 0.9067, 0.1991, 0.2146, 0.2746, 0.7514, 0.1652, - 0.6727, 0.7846, 0.1471, 0.6132, 0.2493, 0.3325, 0.1885, - 0.6417, 0.6590, 0.4757, 0.4061, 0.4359, 0.7236, 0.4441, - 0.8643, 0.8479, 0.1003, 0.0185, 0.7836, 0.3159, 0.9798, - 0.5805, 0.8284, 0.5046, 0.3086, 0.3844, 0.2360, 0.1980, - 0.4207, 0.9796, 0.1878, 0.5333, 0.3240, 0.4450, 0.8072, - 0.9168, 0.9993, 0.1170, 0.7410, 0.7954, 0.0307, 0.0989, - 0.1791, 0.2914, 0.1820, 0.6230, 0.6850, 0.2813, 0.1157, - 0.9867, 0.0737, 0.8029, 0.1015, 0.4682, 0.7620, 0.7696, - 0.1064, 0.5765, 0.9709, 0.1174, 0.5832, 0.0978, 0.3568, - 0.0730, 0.0563, 0.7057, 0.7628, 0.0959, 0.4747, 0.3801, - 0.4508, 0.6394, 0.2715, 0.6105, 0.9926, 0.0896, 0.9574, - 0.6793, 0.6180, 0.4648, 0.2921, 0.3386, 0.6820, 0.5212, - 0.2154, 0.4717, 0.5014, 0.2617, 0.2977, 0.8006, 0.3353, - 0.8470, 0.7611, 0.1607, 0.8669, 0.7715, 0.5232, 0.4841, - 0.9115, 0.0228, 0.4693, 0.3305, 0.8215, 0.0869, 0.1824, - 0.6579, 0.0833, 0.9613, 0.4715, 0.3285, 0.5471, 0.6905, - 0.3957, 0.1840, 0.6202, 0.6851, 0.4733, 0.5934, 0.1386, - 0.7851, 0.7342, 0.6370, 0.2348, 0.9459, 0.2391, 0.7316, - 0.0941, 0.4717, 0.1253, 0.8566, 0.1216, 0.7571, 0.4196, - 0.8179, 0.8132, 0.2885, 0.9707, 0.1275, 0.3342, 0.3134, - 0.7312, 0.8352, 0.1365, 0.9373, 0.3091, 0.3749, 0.0663, - 0.0702, 0.0409, 0.7695, 0.6795, 0.1004, 0.2734, 0.4594, - 0.7910, 0.2158, 0.2145, 0.3182, 0.4406, 0.5618, 0.1494, - 0.2911, 0.5915, 0.9878, 0.1365, 0.7549, 0.8342, 0.9059, - 0.4562, 0.0558, 0.1508, 0.1673, 0.9872, 0.7096, 0.7520, - 0.1691, 0.3662, 0.9410, 0.2625, 0.1475, 0.8645, 0.3039, - 0.2029, 0.3516, 0.6215, 0.5740, 0.4584, 0.2225, 0.4006, - 0.3128, 0.0380, 0.6050, 0.2938, 0.0878, 0.3289, 0.8456, - 0.2525, 0.9209, 0.0593, 0.6809, 0.3115, 0.4275, 0.2499, - 0.9295, 0.5425, 0.4391, 0.3831, 0.4593, 0.7149, 0.1921, - 0.1184, 0.2786, 0.9624, 0.1167, 0.0044, 0.4051, 0.0108, - 0.4520, 0.6056, 0.9254, 0.0610, 0.3916, 0.2002, 0.0181, - 0.2959, 0.8354, 0.8965, 0.7722, 0.6322, 0.5425, 0.0976, - 0.8386, 0.2720, 0.5773, 0.9188, 0.0410, 0.3565, 0.1415, - 0.0175, 0.2161, 0.8758, 0.7699, 0.2833, 0.0538, 0.3260, - 0.5205, 0.1162, 0.9185, 0.1645, 0.4861, 0.6898, 0.0238, - 0.7657, 0.0799, 0.4505, 0.6300, 0.6548, 0.1225, 0.0206, - 0.5211, 0.1829, 0.9455, 0.5775, 0.0847, 0.2663, 0.6607, - 0.9023, 0.5472, 0.7809, 0.3315, 0.1532, 0.1912, 0.3343, - 0.7726, 0.2157, 0.2423, 0.7378, 0.9800, 0.4469, 0.4539, - 0.9687, 0.0064, 0.0441, 0.0697, 0.5833, 0.6814, 0.6849, - 0.0435, 0.1250, 0.2613, 0.0854, 0.3080, 0.5157, 0.4405, - 0.5866, 0.4456, 0.1962, 0.6798, 0.4460, 0.0218, 0.8899, - 0.8373, 0.1209, 0.8163, 0.3718, 0.6930, 0.1628, 0.4197, - 0.2782, 0.5692, 0.9005, 0.5938, 0.2539, 0.8654, 0.7168, - 0.9464, 0.8460, 0.4902, 0.5805, 0.0640, 0.5710, 0.7328, - 0.9874, 0.0901, 0.6221, 0.7762, 0.9765, 0.8525]), + col_indices=tensor([4697, 7166, 5061, 5588, 16, 2135, 4356, 6963, 1471, + 6448, 7291, 4803, 9505, 5043, 277, 2464, 2086, 1290, + 3678, 4268, 7790, 7161, 9391, 6870, 1104, 4318, 4328, + 1307, 8762, 527, 1408, 291, 3379, 2223, 1854, 5746, + 7689, 3155, 2599, 5985, 562, 7105, 2363, 1336, 2354, + 9722, 6938, 4604, 544, 4480, 6924, 756, 8519, 2161, + 2844, 3795, 2249, 2838, 3737, 2502, 2121, 8082, 770, + 4029, 3355, 9855, 1073, 6942, 4266, 2582, 9197, 4023, + 3337, 9541, 584, 3520, 718, 2188, 91, 1677, 7180, + 592, 5441, 4364, 2632, 5143, 3577, 379, 912, 6391, + 6876, 2862, 4814, 5828, 8125, 8381, 5895, 5542, 654, + 1163, 551, 2472, 3859, 1488, 8338, 6429, 945, 8843, + 6221, 7264, 1584, 1135, 5029, 8739, 5361, 306, 3702, + 75, 8678, 7822, 7516, 3077, 8719, 1293, 8302, 407, + 6441, 7874, 8592, 726, 7436, 8339, 9066, 5519, 1742, + 3358, 7547, 6659, 1182, 3189, 5263, 1911, 3620, 557, + 7335, 5792, 2274, 5567, 8946, 6704, 9355, 8223, 1415, + 5086, 1888, 4346, 3256, 8462, 3487, 4165, 7526, 5764, + 4191, 3013, 5959, 5517, 5792, 6426, 3961, 8148, 8232, + 8407, 9509, 5897, 2897, 660, 586, 9313, 2281, 2249, + 4092, 7785, 3447, 7078, 7930, 4221, 8471, 7519, 806, + 5956, 8872, 8481, 2636, 4425, 4719, 791, 1030, 6174, + 9293, 1990, 1045, 151, 5089, 8392, 1036, 4003, 1497, + 5244, 39, 2275, 7530, 5261, 6843, 988, 2126, 982, + 9083, 8042, 1348, 6681, 3802, 2121, 7115, 1054, 2435, + 7205, 9552, 8690, 5114, 9476, 5717, 2408, 8184, 7824, + 9313, 3761, 2873, 9785, 7938, 2945, 6900, 5518, 4488, + 7101, 1388, 4667, 8374, 9455, 8857, 1793, 5987, 9401, + 9386, 1164, 9305, 3791, 9871, 3319, 6660, 1072, 7802, + 181, 5603, 275, 5311, 3636, 7068, 8257, 1049, 2594, + 5178, 5931, 8597, 4244, 1, 4929, 2430, 2753, 6854, + 19, 6445, 6179, 7023, 415, 455, 4640, 6197, 565, + 5244, 3724, 3740, 3737, 6799, 2854, 1528, 2703, 6907, + 1692, 7223, 5568, 8391, 903, 5562, 3692, 13, 9742, + 6760, 9116, 8219, 6225, 6872, 7491, 1948, 8894, 6951, + 6701, 3888, 177, 1145, 6019, 2062, 5666, 4204, 263, + 8520, 9857, 4, 3328, 5003, 3936, 9338, 1578, 4530, + 8595, 8915, 7184, 6826, 4275, 5111, 5759, 194, 4188, + 5918, 277, 7106, 8782, 8414, 7949, 7170, 1639, 62, + 6818, 7225, 5182, 7488, 4554, 6393, 9730, 3220, 5838, + 7300, 7246, 5851, 8938, 6887, 9953, 2555, 8583, 4094, + 4848, 6356, 2647, 4351, 2030, 9870, 213, 3914, 1007, + 1771, 3262, 7381, 1220, 9636, 4597, 4571, 6196, 3920, + 7343, 3690, 9563, 876, 8237, 3502, 2169, 2666, 3964, + 1868, 3662, 6173, 2861, 3009, 2617, 1520, 1969, 9945, + 2217, 216, 3405, 2727, 7331, 5516, 3313, 8867, 1398, + 9333, 7370, 890, 431, 1265, 5794, 8592, 2943, 4729, + 2449, 7705, 1592, 7102, 105, 7044, 8103, 6453, 8235, + 1866, 1739, 9559, 8080, 2393, 7927, 9771, 9848, 7439, + 606, 3851, 9329, 3911, 3040, 9978, 1385, 6935, 3193, + 923, 5265, 6459, 7864, 421, 7866, 9885, 8293, 5188, + 9844, 2097, 4729, 8319, 3571, 7741, 768, 7514, 9274, + 8040, 5362, 3316, 5984, 1691, 6189, 179, 1960, 2418, + 2380, 818, 4105, 5449, 3011, 6572, 8379, 4064, 2221, + 3990, 3608, 8595, 8812, 7798, 942, 5137, 3586, 3751, + 7609, 6584, 6940, 8755, 3038, 8349, 6614, 9352, 6535, + 5672, 7318, 1183, 2235, 6156, 2771, 3010, 6933, 6284, + 4329, 9589, 5099, 5335, 4662, 1484, 6974, 7662, 1907, + 4536, 4157, 6097, 8339, 2664, 9437, 6400, 5198, 5295, + 3450, 1767, 5829, 4204, 5317, 990, 7113, 982, 1601, + 2203, 9050, 8231, 6364, 2072, 7833, 1626, 2611, 6349, + 5277, 6459, 3614, 8542, 5678, 2063, 8267, 8075, 6328, + 8771, 8386, 3488, 8238, 7698, 4839, 7903, 9478, 8118, + 5907, 2929, 543, 2445, 3729, 5852, 1085, 7448, 256, + 3524, 6094, 1707, 6414, 2192, 7583, 281, 9497, 5621, + 3582, 357, 875, 1889, 4366, 111, 4286, 5521, 4390, + 3595, 8641, 3019, 6735, 185, 9620, 3018, 7380, 6427, + 1431, 3365, 2286, 6767, 5235, 167, 5980, 5058, 9416, + 457, 7842, 4375, 7558, 8255, 3159, 7486, 8525, 7368, + 3900, 1742, 5818, 9119, 216, 3787, 1563, 4456, 3168, + 1714, 7483, 4349, 5644, 7353, 8821, 5300, 7350, 2124, + 5380, 7389, 3502, 2051, 2871, 403, 8987, 1586, 2339, + 4954, 8779, 5254, 4481, 2374, 9632, 2469, 7763, 7531, + 9749, 6006, 5879, 241, 7108, 5803, 1919, 7453, 3908, + 5973, 8400, 7594, 9580, 3496, 5265, 3448, 6072, 7019, + 1631, 5492, 6815, 7914, 7129, 4280, 3139, 7265, 9921, + 6177, 227, 9221, 9352, 1035, 3449, 5135, 5318, 7434, + 9956, 1960, 6678, 1670, 3095, 2436, 8385, 1344, 7536, + 3642, 9326, 4412, 945, 8602, 9960, 5698, 1538, 7613, + 5248, 2831, 769, 1902, 8574, 7714, 2715, 2006, 3548, + 5972, 1183, 1596, 7102, 1942, 3782, 2597, 7787, 8812, + 9823, 9943, 414, 3922, 9450, 2232, 7133, 5729, 7806, + 4545, 4241, 5058, 2578, 1957, 7221, 3913, 747, 591, + 2874, 1028, 5677, 8246, 6676, 2445, 6604, 3128, 5240, + 4373, 3918, 412, 4926, 7491, 3044, 9462, 1663, 9367, + 3323, 1540, 6270, 7647, 3406, 7353, 669, 2696, 630, + 2206, 3019, 9522, 4546, 4195, 9321, 4046, 7269, 552, + 9485, 436, 8075, 3796, 6400, 1002, 1708, 5611, 4150, + 8260, 9883, 5484, 4309, 5729, 5444, 9315, 9791, 2575, + 21, 5939, 7374, 7460, 9398, 7401, 2483, 6104, 7336, + 8951, 691, 4859, 9986, 7873, 784, 1614, 8961, 4257, + 3142, 491, 1414, 2894, 808, 5257, 2024, 1463, 2984, + 7475, 5506, 575, 3884, 1043, 668, 4232, 1125, 1743, + 3099, 140, 7426, 4914, 5008, 2730, 2533, 2149, 9150, + 8519, 5321, 6271, 1325, 7804, 5386, 8836, 2279, 1472, + 9600, 7373, 4852, 8986, 2120, 342, 2579, 7341, 7044, + 8551, 827, 1384, 9090, 7444, 9174, 9425, 3649, 7168, + 574, 4050, 3751, 4862, 3962, 5757, 6505, 9944, 2188, + 6405, 8366, 1403, 9207, 6574, 5873, 2922, 6905, 9658, + 3370, 6019, 3256, 9137, 4856, 8275, 8950, 703, 7183, + 3407, 876, 615, 9393, 4885, 9969, 8268, 8745, 347, + 965, 6224, 5082, 9201, 1428, 1111, 7082, 1555, 1826, + 4007, 6138, 232, 1467, 3782, 6875, 7892, 8057, 7561, + 6852, 2513, 9099, 5320, 6933, 9825, 1800, 5055, 79, + 1663, 5080, 8451, 7032, 589, 6114, 9378, 9981, 2461, + 5754, 2549, 4881, 8389, 336, 5068, 9716, 5623, 5220, + 780, 4178, 8944, 8996, 4259, 5611, 7223, 559, 4236, + 3072, 6971, 2748, 6288, 3209, 8683, 808, 7949, 3106, + 7493, 683, 8652, 520, 1346, 9307, 2626, 4801, 6474, + 1886, 5674, 4177, 7853, 5033, 2575, 8954, 2573, 2504, + 7279, 822, 9405, 6370, 4013, 515, 4766, 4745, 4190, + 2478]), + values=tensor([0.3702, 0.7345, 0.6826, 0.7317, 0.9145, 0.4627, 0.9182, + 0.3102, 0.9769, 0.3102, 0.7105, 0.5020, 0.2193, 0.6551, + 0.9847, 0.7108, 0.2795, 0.0577, 0.0636, 0.0958, 0.3394, + 0.7039, 0.6050, 0.4109, 0.6582, 0.9148, 0.5523, 0.8511, + 0.8664, 0.2784, 0.7959, 0.7882, 0.2125, 0.8703, 0.7766, + 0.6655, 0.0159, 0.0276, 0.8728, 0.5178, 0.1993, 0.7845, + 0.0622, 0.7651, 0.8345, 0.7501, 0.4233, 0.7647, 0.8375, + 0.9567, 0.6640, 0.0925, 0.5011, 0.4186, 0.2305, 0.1209, + 0.3675, 0.7229, 0.4434, 0.7536, 0.8675, 0.9273, 0.9000, + 0.5859, 0.7991, 0.6617, 0.7567, 0.6604, 0.3565, 0.8418, + 0.2492, 0.3778, 0.6739, 0.0841, 0.8716, 0.0141, 0.4204, + 0.6631, 0.2720, 0.2107, 0.9940, 0.9248, 0.6300, 0.0939, + 0.3527, 0.0997, 0.7059, 0.7313, 0.5081, 0.9516, 0.3249, + 0.2754, 0.3468, 0.6433, 0.2963, 0.4800, 0.6656, 0.3843, + 0.8499, 0.9066, 0.6688, 0.3822, 0.2357, 0.9725, 0.0942, + 0.6006, 0.3470, 0.5653, 0.2485, 0.6372, 0.9438, 0.3322, + 0.5338, 0.5472, 0.6636, 0.8549, 0.6282, 0.8324, 0.4419, + 0.7712, 0.8225, 0.7963, 0.1885, 0.3427, 0.1840, 0.2347, + 0.3586, 0.2267, 0.3582, 0.0502, 0.2847, 0.0839, 0.4980, + 0.5720, 0.8244, 0.5548, 0.1585, 0.6117, 0.5615, 0.1633, + 0.4852, 0.7591, 0.6312, 0.0304, 0.0115, 0.4654, 0.7039, + 0.0731, 0.0469, 0.9080, 0.8957, 0.8995, 0.5979, 0.0627, + 0.3616, 0.6156, 0.4437, 0.6334, 0.5153, 0.3494, 0.3764, + 0.7916, 0.2639, 0.1979, 0.8023, 0.2074, 0.6444, 0.3220, + 0.2499, 0.5444, 0.5863, 0.7301, 0.7425, 0.8764, 0.7607, + 0.6959, 0.7554, 0.5870, 0.4559, 0.2950, 0.7498, 0.5176, + 0.5120, 0.6993, 0.4963, 0.1680, 0.5563, 0.5449, 0.3519, + 0.0066, 0.3301, 0.4540, 0.4376, 0.9639, 0.5641, 0.5537, + 0.4402, 0.8707, 0.3104, 0.0956, 0.0155, 0.3271, 0.3330, + 0.7348, 0.9656, 0.2688, 0.9458, 0.9582, 0.1176, 0.1584, + 0.7224, 0.7984, 0.3026, 0.3236, 0.2042, 0.4762, 0.3403, + 0.5459, 0.2082, 0.8081, 0.1694, 0.4823, 0.8493, 0.6452, + 0.7315, 0.2215, 0.0742, 0.7020, 0.7765, 0.4754, 0.6994, + 0.9915, 0.6092, 0.9429, 0.7011, 0.7557, 0.5936, 0.3090, + 0.9360, 0.4815, 0.8630, 0.0761, 0.2189, 0.7047, 0.3294, + 0.8754, 0.6163, 0.0488, 0.6185, 0.3304, 0.5333, 0.4623, + 0.9880, 0.5947, 0.4723, 0.6939, 0.1686, 0.4334, 0.7793, + 0.9469, 0.2092, 0.8309, 0.2025, 0.9665, 0.7773, 0.2745, + 0.3283, 0.9296, 0.4085, 0.1317, 0.7192, 0.9151, 0.7353, + 0.1923, 0.0083, 0.9296, 0.4358, 0.9813, 0.4807, 0.4300, + 0.4265, 0.2061, 0.0956, 0.3277, 0.4200, 0.0172, 0.9850, + 0.0293, 0.0540, 0.4821, 0.8662, 0.6567, 0.1402, 0.0878, + 0.7397, 0.0970, 0.5083, 0.8546, 0.5348, 0.5054, 0.9112, + 0.7615, 0.7712, 0.5950, 0.3213, 0.6891, 0.1245, 0.9648, + 0.7567, 0.8608, 0.0447, 0.4044, 0.0727, 0.2904, 0.5760, + 0.0299, 0.5736, 0.8328, 0.2022, 0.0552, 0.1688, 0.5362, + 0.2555, 0.9247, 0.5106, 0.9483, 0.3127, 0.9420, 0.9133, + 0.9511, 0.9206, 0.0691, 0.4223, 0.0884, 0.8678, 0.9136, + 0.6899, 0.4196, 0.8049, 0.3845, 0.7849, 0.7494, 0.3493, + 0.4705, 0.9510, 0.5380, 0.4472, 0.5142, 0.2078, 0.4678, + 0.6305, 0.3763, 0.9058, 0.4229, 0.2960, 0.8140, 0.9217, + 0.7544, 0.2774, 0.2173, 0.2528, 0.8161, 0.2436, 0.5060, + 0.1386, 0.0998, 0.0274, 0.5298, 0.7264, 0.7633, 0.2756, + 0.3740, 0.5586, 0.1052, 0.8699, 0.6931, 0.8838, 0.0909, + 0.3013, 0.4689, 0.7725, 0.9619, 0.6939, 0.5267, 0.4246, + 0.4545, 0.9840, 0.3185, 0.5327, 0.6481, 0.9211, 0.8926, + 0.9382, 0.0907, 0.6506, 0.8946, 0.6039, 0.4161, 0.1871, + 0.1562, 0.6607, 0.1406, 0.0681, 0.4142, 0.9674, 0.8903, + 0.0812, 0.4035, 0.3474, 0.1792, 0.3996, 0.5828, 0.7523, + 0.7441, 0.4798, 0.8895, 0.2256, 0.1005, 0.5124, 0.8363, + 0.5373, 0.0602, 0.9539, 0.6347, 0.7295, 0.5553, 0.7356, + 0.2765, 0.4046, 0.2894, 0.2378, 0.0906, 0.1991, 0.7791, + 0.2130, 0.3761, 0.0030, 0.1246, 0.3022, 0.6047, 0.3588, + 0.4770, 0.4184, 0.1301, 0.7836, 0.7623, 0.0285, 0.6739, + 0.1521, 0.6648, 0.7060, 0.0412, 0.5957, 0.5624, 0.7317, + 0.4113, 0.1090, 0.2314, 0.3418, 0.0085, 0.6108, 0.2004, + 0.5023, 0.8270, 0.0196, 0.8551, 0.6085, 0.5507, 0.8094, + 0.6719, 0.4063, 0.4349, 0.7709, 0.2693, 0.9615, 0.8472, + 0.7428, 0.1841, 0.8906, 0.0929, 0.7519, 0.5412, 0.5132, + 0.3513, 0.7644, 0.7524, 0.9962, 0.4799, 0.4440, 0.3911, + 0.2358, 0.1587, 0.2872, 0.6304, 0.2955, 0.7964, 0.5753, + 0.6496, 0.7159, 0.6989, 0.6410, 0.5193, 0.6679, 0.9569, + 0.4824, 0.7854, 0.4632, 0.8506, 0.7043, 0.6992, 0.9478, + 0.3157, 0.9262, 0.1433, 0.3194, 0.5697, 0.4453, 0.2121, + 0.4918, 0.0088, 0.2532, 0.1680, 0.7915, 0.4725, 0.0541, + 0.5171, 0.7866, 0.5141, 0.9219, 0.3571, 0.4144, 0.4267, + 0.4197, 0.3179, 0.7666, 0.4105, 0.8278, 0.0931, 0.4260, + 0.1653, 0.7664, 0.4711, 0.4836, 0.7452, 0.2694, 0.4619, + 0.9855, 0.1263, 0.3199, 0.2967, 0.0623, 0.0352, 0.6075, + 0.3481, 0.3103, 0.2426, 0.5666, 0.6000, 0.8346, 0.2513, + 0.5969, 0.6719, 0.2889, 0.9800, 0.2658, 0.9419, 0.3988, + 0.9431, 0.2828, 0.1212, 0.9124, 0.5436, 0.9395, 0.5665, + 0.1457, 0.2993, 0.0088, 0.6849, 0.2646, 0.6046, 0.7091, + 0.5640, 0.2001, 0.1909, 0.4755, 0.8515, 0.8336, 0.4361, + 0.5450, 0.9247, 0.8869, 0.1052, 0.4689, 0.9392, 0.3462, + 0.6900, 0.9248, 0.0537, 0.3782, 0.1908, 0.5049, 0.4270, + 0.3395, 0.2842, 0.4865, 0.5856, 0.0222, 0.9926, 0.9730, + 0.0817, 0.0675, 0.4517, 0.1263, 0.8838, 0.8783, 0.4115, + 0.2525, 0.4711, 0.5641, 0.0227, 0.7361, 0.4045, 0.5903, + 0.2142, 0.7279, 0.3643, 0.9899, 0.9122, 0.0894, 0.3638, + 0.0428, 0.0807, 0.4574, 0.9185, 0.3067, 0.5898, 0.6322, + 0.7618, 0.2611, 0.0179, 0.6498, 0.5482, 0.5843, 0.9259, + 0.0342, 0.3278, 0.5209, 0.5626, 0.7217, 0.7630, 0.4717, + 0.1226, 0.7583, 0.4247, 0.3745, 0.3878, 0.5098, 0.9031, + 0.2063, 0.8497, 0.7373, 0.0304, 0.2596, 0.6167, 0.1262, + 0.2780, 0.2264, 0.0850, 0.2639, 0.0474, 0.6024, 0.5272, + 0.4037, 0.6045, 0.4000, 0.9516, 0.0312, 0.9304, 0.3896, + 0.8359, 0.4345, 0.2206, 0.2536, 0.0078, 0.1233, 0.2517, + 0.5320, 0.2096, 0.0542, 0.5649, 0.9250, 0.1789, 0.8673, + 0.8596, 0.8670, 0.0768, 0.6596, 0.6105, 0.4560, 0.7780, + 0.7446, 0.8923, 0.6887, 0.3979, 0.6499, 0.2418, 0.6771, + 0.1069, 0.0476, 0.2795, 0.7506, 0.8123, 0.3477, 0.9466, + 0.4612, 0.8765, 0.5407, 0.1132, 0.2169, 0.4562, 0.3486, + 0.2932, 0.4447, 0.9977, 0.4921, 0.6346, 0.2716, 0.3191, + 0.6753, 0.2344, 0.3287, 0.2551, 0.0432, 0.5714, 0.6748, + 0.2214, 0.0381, 0.0833, 0.8212, 0.5324, 0.1167, 0.1493, + 0.1853, 0.1389, 0.8453, 0.3733, 0.2821, 0.4216, 0.5246, + 0.1333, 0.6029, 0.6290, 0.7883, 0.9594, 0.8854, 0.7485, + 0.3870, 0.1067, 0.1966, 0.4271, 0.7628, 0.8373, 0.3200, + 0.1845, 0.8313, 0.0457, 0.2017, 0.6754, 0.2106, 0.0419, + 0.5727, 0.0987, 0.1401, 0.7601, 0.0706, 0.7073, 0.7539, + 0.5117, 0.2418, 0.0494, 0.1620, 0.5325, 0.5103, 0.4603, + 0.3475, 0.6883, 0.2514, 0.1698, 0.0632, 0.4282, 0.4644, + 0.4507, 0.9851, 0.0060, 0.1329, 0.3982, 0.1040, 0.4201, + 0.1874, 0.8353, 0.2741, 0.6136, 0.9434, 0.6324, 0.4589, + 0.2868, 0.4690, 0.3524, 0.0356, 0.5431, 0.4156, 0.1175, + 0.2385, 0.4442, 0.6899, 0.6014, 0.5447, 0.3774, 0.9261, + 0.8417, 0.7853, 0.0420, 0.9714, 0.3316, 0.7012, 0.3391, + 0.2711, 0.0862, 0.5467, 0.7750, 0.9744, 0.2605, 0.3449, + 0.9459, 0.1400, 0.6033, 0.0513, 0.0538, 0.7436, 0.3939, + 0.7581, 0.8160, 0.6601, 0.5367, 0.7078, 0.8727, 0.3365, + 0.1655, 0.3867, 0.7308, 0.0489, 0.0998, 0.1735, 0.9075, + 0.0635, 0.6241, 0.6404, 0.5116, 0.5405, 0.3342, 0.5746, + 0.1325, 0.7165, 0.7555, 0.3742, 0.3287, 0.4662, 0.9798, + 0.7566, 0.4785, 0.7101, 0.3347, 0.6637, 0.4961, 0.7390, + 0.0086, 0.0799, 0.1370, 0.7191, 0.6408, 0.9380, 0.9514, + 0.7180, 0.3022, 0.0085, 0.2363, 0.1710, 0.0619, 0.3727, + 0.9494, 0.0484, 0.2475, 0.5328, 0.3577, 0.7509, 0.7576, + 0.6142, 0.1244, 0.1672, 0.1609, 0.9942, 0.8837, 0.9627, + 0.4728, 0.4490, 0.3722, 0.5474, 0.9561, 0.0191, 0.0116, + 0.0409, 0.5484, 0.0722, 0.3519, 0.0360, 0.4568, 0.9589, + 0.1275, 0.8380, 0.5898, 0.7968, 0.6287, 0.3775, 0.3057, + 0.4428, 0.9063, 0.9362, 0.6310, 0.2828, 0.6381, 0.1642, + 0.1973, 0.3656, 0.0240, 0.0025, 0.3395, 0.3695, 0.8744, + 0.9097, 0.7335, 0.2404, 0.0369, 0.6290, 0.1712, 0.3940, + 0.3991, 0.5322, 0.9830, 0.7075, 0.4980, 0.3804, 0.4277, + 0.4732, 0.3721, 0.2580, 0.7335, 0.2379, 0.0923, 0.4525, + 0.9507, 0.1057, 0.7727, 0.1758, 0.7368, 0.0289, 0.1710, + 0.2037, 0.6810, 0.9099, 0.7890, 0.0406, 0.0261, 0.9874, + 0.9915, 0.3904, 0.8019, 0.2435, 0.3811, 0.1826, 0.1831, + 0.8831, 0.1668, 0.0829, 0.3309, 0.0780, 0.1112, 0.8197, + 0.0271, 0.4877, 0.3209, 0.2643, 0.2855, 0.3654, 0.4651, + 0.9876, 0.5843, 0.6127, 0.0052, 0.3528, 0.5656]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4976, 0.0481, 0.1913, ..., 0.0301, 0.3766, 0.0826]) +tensor([0.8355, 0.8843, 0.0720, ..., 0.0094, 0.5931, 0.4853]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1295,13 +1026,282 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.560847282409668 seconds +Time: 10.35036301612854 seconds -[20.4, 20.16, 20.52, 20.6, 20.92, 20.92, 20.96, 20.76, 20.68, 20.44] -[20.4, 20.52, 20.92, 23.12, 24.52, 26.12, 26.6, 26.16, 24.72, 24.72, 23.36, 23.32, 23.4, 23.52] -14.230781316757202 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 145476, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.560847282409668, 'TIME_S_1KI': 0.07259511728676668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.8679765701293, 'W': 22.125839021878733} -[20.4, 20.16, 20.52, 20.6, 20.92, 20.92, 20.96, 20.76, 20.68, 20.44, 20.2, 20.44, 20.48, 20.56, 20.52, 20.48, 20.4, 20.4, 20.4, 20.4] -369.9200000000001 -18.496000000000002 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 145476, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.560847282409668, 'TIME_S_1KI': 0.07259511728676668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.8679765701293, 'W': 22.125839021878733, 'J_1KI': 2.1643980902013342, 'W_1KI': 0.15209270960075016, 'W_D': 3.6298390218787304, 'J_D': 51.65544533538807, 'W_D_1KI': 0.024951462934633413, 'J_D_1KI': 0.00017151600906426773} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([4697, 7166, 5061, 5588, 16, 2135, 4356, 6963, 1471, + 6448, 7291, 4803, 9505, 5043, 277, 2464, 2086, 1290, + 3678, 4268, 7790, 7161, 9391, 6870, 1104, 4318, 4328, + 1307, 8762, 527, 1408, 291, 3379, 2223, 1854, 5746, + 7689, 3155, 2599, 5985, 562, 7105, 2363, 1336, 2354, + 9722, 6938, 4604, 544, 4480, 6924, 756, 8519, 2161, + 2844, 3795, 2249, 2838, 3737, 2502, 2121, 8082, 770, + 4029, 3355, 9855, 1073, 6942, 4266, 2582, 9197, 4023, + 3337, 9541, 584, 3520, 718, 2188, 91, 1677, 7180, + 592, 5441, 4364, 2632, 5143, 3577, 379, 912, 6391, + 6876, 2862, 4814, 5828, 8125, 8381, 5895, 5542, 654, + 1163, 551, 2472, 3859, 1488, 8338, 6429, 945, 8843, + 6221, 7264, 1584, 1135, 5029, 8739, 5361, 306, 3702, + 75, 8678, 7822, 7516, 3077, 8719, 1293, 8302, 407, + 6441, 7874, 8592, 726, 7436, 8339, 9066, 5519, 1742, + 3358, 7547, 6659, 1182, 3189, 5263, 1911, 3620, 557, + 7335, 5792, 2274, 5567, 8946, 6704, 9355, 8223, 1415, + 5086, 1888, 4346, 3256, 8462, 3487, 4165, 7526, 5764, + 4191, 3013, 5959, 5517, 5792, 6426, 3961, 8148, 8232, + 8407, 9509, 5897, 2897, 660, 586, 9313, 2281, 2249, + 4092, 7785, 3447, 7078, 7930, 4221, 8471, 7519, 806, + 5956, 8872, 8481, 2636, 4425, 4719, 791, 1030, 6174, + 9293, 1990, 1045, 151, 5089, 8392, 1036, 4003, 1497, + 5244, 39, 2275, 7530, 5261, 6843, 988, 2126, 982, + 9083, 8042, 1348, 6681, 3802, 2121, 7115, 1054, 2435, + 7205, 9552, 8690, 5114, 9476, 5717, 2408, 8184, 7824, + 9313, 3761, 2873, 9785, 7938, 2945, 6900, 5518, 4488, + 7101, 1388, 4667, 8374, 9455, 8857, 1793, 5987, 9401, + 9386, 1164, 9305, 3791, 9871, 3319, 6660, 1072, 7802, + 181, 5603, 275, 5311, 3636, 7068, 8257, 1049, 2594, + 5178, 5931, 8597, 4244, 1, 4929, 2430, 2753, 6854, + 19, 6445, 6179, 7023, 415, 455, 4640, 6197, 565, + 5244, 3724, 3740, 3737, 6799, 2854, 1528, 2703, 6907, + 1692, 7223, 5568, 8391, 903, 5562, 3692, 13, 9742, + 6760, 9116, 8219, 6225, 6872, 7491, 1948, 8894, 6951, + 6701, 3888, 177, 1145, 6019, 2062, 5666, 4204, 263, + 8520, 9857, 4, 3328, 5003, 3936, 9338, 1578, 4530, + 8595, 8915, 7184, 6826, 4275, 5111, 5759, 194, 4188, + 5918, 277, 7106, 8782, 8414, 7949, 7170, 1639, 62, + 6818, 7225, 5182, 7488, 4554, 6393, 9730, 3220, 5838, + 7300, 7246, 5851, 8938, 6887, 9953, 2555, 8583, 4094, + 4848, 6356, 2647, 4351, 2030, 9870, 213, 3914, 1007, + 1771, 3262, 7381, 1220, 9636, 4597, 4571, 6196, 3920, + 7343, 3690, 9563, 876, 8237, 3502, 2169, 2666, 3964, + 1868, 3662, 6173, 2861, 3009, 2617, 1520, 1969, 9945, + 2217, 216, 3405, 2727, 7331, 5516, 3313, 8867, 1398, + 9333, 7370, 890, 431, 1265, 5794, 8592, 2943, 4729, + 2449, 7705, 1592, 7102, 105, 7044, 8103, 6453, 8235, + 1866, 1739, 9559, 8080, 2393, 7927, 9771, 9848, 7439, + 606, 3851, 9329, 3911, 3040, 9978, 1385, 6935, 3193, + 923, 5265, 6459, 7864, 421, 7866, 9885, 8293, 5188, + 9844, 2097, 4729, 8319, 3571, 7741, 768, 7514, 9274, + 8040, 5362, 3316, 5984, 1691, 6189, 179, 1960, 2418, + 2380, 818, 4105, 5449, 3011, 6572, 8379, 4064, 2221, + 3990, 3608, 8595, 8812, 7798, 942, 5137, 3586, 3751, + 7609, 6584, 6940, 8755, 3038, 8349, 6614, 9352, 6535, + 5672, 7318, 1183, 2235, 6156, 2771, 3010, 6933, 6284, + 4329, 9589, 5099, 5335, 4662, 1484, 6974, 7662, 1907, + 4536, 4157, 6097, 8339, 2664, 9437, 6400, 5198, 5295, + 3450, 1767, 5829, 4204, 5317, 990, 7113, 982, 1601, + 2203, 9050, 8231, 6364, 2072, 7833, 1626, 2611, 6349, + 5277, 6459, 3614, 8542, 5678, 2063, 8267, 8075, 6328, + 8771, 8386, 3488, 8238, 7698, 4839, 7903, 9478, 8118, + 5907, 2929, 543, 2445, 3729, 5852, 1085, 7448, 256, + 3524, 6094, 1707, 6414, 2192, 7583, 281, 9497, 5621, + 3582, 357, 875, 1889, 4366, 111, 4286, 5521, 4390, + 3595, 8641, 3019, 6735, 185, 9620, 3018, 7380, 6427, + 1431, 3365, 2286, 6767, 5235, 167, 5980, 5058, 9416, + 457, 7842, 4375, 7558, 8255, 3159, 7486, 8525, 7368, + 3900, 1742, 5818, 9119, 216, 3787, 1563, 4456, 3168, + 1714, 7483, 4349, 5644, 7353, 8821, 5300, 7350, 2124, + 5380, 7389, 3502, 2051, 2871, 403, 8987, 1586, 2339, + 4954, 8779, 5254, 4481, 2374, 9632, 2469, 7763, 7531, + 9749, 6006, 5879, 241, 7108, 5803, 1919, 7453, 3908, + 5973, 8400, 7594, 9580, 3496, 5265, 3448, 6072, 7019, + 1631, 5492, 6815, 7914, 7129, 4280, 3139, 7265, 9921, + 6177, 227, 9221, 9352, 1035, 3449, 5135, 5318, 7434, + 9956, 1960, 6678, 1670, 3095, 2436, 8385, 1344, 7536, + 3642, 9326, 4412, 945, 8602, 9960, 5698, 1538, 7613, + 5248, 2831, 769, 1902, 8574, 7714, 2715, 2006, 3548, + 5972, 1183, 1596, 7102, 1942, 3782, 2597, 7787, 8812, + 9823, 9943, 414, 3922, 9450, 2232, 7133, 5729, 7806, + 4545, 4241, 5058, 2578, 1957, 7221, 3913, 747, 591, + 2874, 1028, 5677, 8246, 6676, 2445, 6604, 3128, 5240, + 4373, 3918, 412, 4926, 7491, 3044, 9462, 1663, 9367, + 3323, 1540, 6270, 7647, 3406, 7353, 669, 2696, 630, + 2206, 3019, 9522, 4546, 4195, 9321, 4046, 7269, 552, + 9485, 436, 8075, 3796, 6400, 1002, 1708, 5611, 4150, + 8260, 9883, 5484, 4309, 5729, 5444, 9315, 9791, 2575, + 21, 5939, 7374, 7460, 9398, 7401, 2483, 6104, 7336, + 8951, 691, 4859, 9986, 7873, 784, 1614, 8961, 4257, + 3142, 491, 1414, 2894, 808, 5257, 2024, 1463, 2984, + 7475, 5506, 575, 3884, 1043, 668, 4232, 1125, 1743, + 3099, 140, 7426, 4914, 5008, 2730, 2533, 2149, 9150, + 8519, 5321, 6271, 1325, 7804, 5386, 8836, 2279, 1472, + 9600, 7373, 4852, 8986, 2120, 342, 2579, 7341, 7044, + 8551, 827, 1384, 9090, 7444, 9174, 9425, 3649, 7168, + 574, 4050, 3751, 4862, 3962, 5757, 6505, 9944, 2188, + 6405, 8366, 1403, 9207, 6574, 5873, 2922, 6905, 9658, + 3370, 6019, 3256, 9137, 4856, 8275, 8950, 703, 7183, + 3407, 876, 615, 9393, 4885, 9969, 8268, 8745, 347, + 965, 6224, 5082, 9201, 1428, 1111, 7082, 1555, 1826, + 4007, 6138, 232, 1467, 3782, 6875, 7892, 8057, 7561, + 6852, 2513, 9099, 5320, 6933, 9825, 1800, 5055, 79, + 1663, 5080, 8451, 7032, 589, 6114, 9378, 9981, 2461, + 5754, 2549, 4881, 8389, 336, 5068, 9716, 5623, 5220, + 780, 4178, 8944, 8996, 4259, 5611, 7223, 559, 4236, + 3072, 6971, 2748, 6288, 3209, 8683, 808, 7949, 3106, + 7493, 683, 8652, 520, 1346, 9307, 2626, 4801, 6474, + 1886, 5674, 4177, 7853, 5033, 2575, 8954, 2573, 2504, + 7279, 822, 9405, 6370, 4013, 515, 4766, 4745, 4190, + 2478]), + values=tensor([0.3702, 0.7345, 0.6826, 0.7317, 0.9145, 0.4627, 0.9182, + 0.3102, 0.9769, 0.3102, 0.7105, 0.5020, 0.2193, 0.6551, + 0.9847, 0.7108, 0.2795, 0.0577, 0.0636, 0.0958, 0.3394, + 0.7039, 0.6050, 0.4109, 0.6582, 0.9148, 0.5523, 0.8511, + 0.8664, 0.2784, 0.7959, 0.7882, 0.2125, 0.8703, 0.7766, + 0.6655, 0.0159, 0.0276, 0.8728, 0.5178, 0.1993, 0.7845, + 0.0622, 0.7651, 0.8345, 0.7501, 0.4233, 0.7647, 0.8375, + 0.9567, 0.6640, 0.0925, 0.5011, 0.4186, 0.2305, 0.1209, + 0.3675, 0.7229, 0.4434, 0.7536, 0.8675, 0.9273, 0.9000, + 0.5859, 0.7991, 0.6617, 0.7567, 0.6604, 0.3565, 0.8418, + 0.2492, 0.3778, 0.6739, 0.0841, 0.8716, 0.0141, 0.4204, + 0.6631, 0.2720, 0.2107, 0.9940, 0.9248, 0.6300, 0.0939, + 0.3527, 0.0997, 0.7059, 0.7313, 0.5081, 0.9516, 0.3249, + 0.2754, 0.3468, 0.6433, 0.2963, 0.4800, 0.6656, 0.3843, + 0.8499, 0.9066, 0.6688, 0.3822, 0.2357, 0.9725, 0.0942, + 0.6006, 0.3470, 0.5653, 0.2485, 0.6372, 0.9438, 0.3322, + 0.5338, 0.5472, 0.6636, 0.8549, 0.6282, 0.8324, 0.4419, + 0.7712, 0.8225, 0.7963, 0.1885, 0.3427, 0.1840, 0.2347, + 0.3586, 0.2267, 0.3582, 0.0502, 0.2847, 0.0839, 0.4980, + 0.5720, 0.8244, 0.5548, 0.1585, 0.6117, 0.5615, 0.1633, + 0.4852, 0.7591, 0.6312, 0.0304, 0.0115, 0.4654, 0.7039, + 0.0731, 0.0469, 0.9080, 0.8957, 0.8995, 0.5979, 0.0627, + 0.3616, 0.6156, 0.4437, 0.6334, 0.5153, 0.3494, 0.3764, + 0.7916, 0.2639, 0.1979, 0.8023, 0.2074, 0.6444, 0.3220, + 0.2499, 0.5444, 0.5863, 0.7301, 0.7425, 0.8764, 0.7607, + 0.6959, 0.7554, 0.5870, 0.4559, 0.2950, 0.7498, 0.5176, + 0.5120, 0.6993, 0.4963, 0.1680, 0.5563, 0.5449, 0.3519, + 0.0066, 0.3301, 0.4540, 0.4376, 0.9639, 0.5641, 0.5537, + 0.4402, 0.8707, 0.3104, 0.0956, 0.0155, 0.3271, 0.3330, + 0.7348, 0.9656, 0.2688, 0.9458, 0.9582, 0.1176, 0.1584, + 0.7224, 0.7984, 0.3026, 0.3236, 0.2042, 0.4762, 0.3403, + 0.5459, 0.2082, 0.8081, 0.1694, 0.4823, 0.8493, 0.6452, + 0.7315, 0.2215, 0.0742, 0.7020, 0.7765, 0.4754, 0.6994, + 0.9915, 0.6092, 0.9429, 0.7011, 0.7557, 0.5936, 0.3090, + 0.9360, 0.4815, 0.8630, 0.0761, 0.2189, 0.7047, 0.3294, + 0.8754, 0.6163, 0.0488, 0.6185, 0.3304, 0.5333, 0.4623, + 0.9880, 0.5947, 0.4723, 0.6939, 0.1686, 0.4334, 0.7793, + 0.9469, 0.2092, 0.8309, 0.2025, 0.9665, 0.7773, 0.2745, + 0.3283, 0.9296, 0.4085, 0.1317, 0.7192, 0.9151, 0.7353, + 0.1923, 0.0083, 0.9296, 0.4358, 0.9813, 0.4807, 0.4300, + 0.4265, 0.2061, 0.0956, 0.3277, 0.4200, 0.0172, 0.9850, + 0.0293, 0.0540, 0.4821, 0.8662, 0.6567, 0.1402, 0.0878, + 0.7397, 0.0970, 0.5083, 0.8546, 0.5348, 0.5054, 0.9112, + 0.7615, 0.7712, 0.5950, 0.3213, 0.6891, 0.1245, 0.9648, + 0.7567, 0.8608, 0.0447, 0.4044, 0.0727, 0.2904, 0.5760, + 0.0299, 0.5736, 0.8328, 0.2022, 0.0552, 0.1688, 0.5362, + 0.2555, 0.9247, 0.5106, 0.9483, 0.3127, 0.9420, 0.9133, + 0.9511, 0.9206, 0.0691, 0.4223, 0.0884, 0.8678, 0.9136, + 0.6899, 0.4196, 0.8049, 0.3845, 0.7849, 0.7494, 0.3493, + 0.4705, 0.9510, 0.5380, 0.4472, 0.5142, 0.2078, 0.4678, + 0.6305, 0.3763, 0.9058, 0.4229, 0.2960, 0.8140, 0.9217, + 0.7544, 0.2774, 0.2173, 0.2528, 0.8161, 0.2436, 0.5060, + 0.1386, 0.0998, 0.0274, 0.5298, 0.7264, 0.7633, 0.2756, + 0.3740, 0.5586, 0.1052, 0.8699, 0.6931, 0.8838, 0.0909, + 0.3013, 0.4689, 0.7725, 0.9619, 0.6939, 0.5267, 0.4246, + 0.4545, 0.9840, 0.3185, 0.5327, 0.6481, 0.9211, 0.8926, + 0.9382, 0.0907, 0.6506, 0.8946, 0.6039, 0.4161, 0.1871, + 0.1562, 0.6607, 0.1406, 0.0681, 0.4142, 0.9674, 0.8903, + 0.0812, 0.4035, 0.3474, 0.1792, 0.3996, 0.5828, 0.7523, + 0.7441, 0.4798, 0.8895, 0.2256, 0.1005, 0.5124, 0.8363, + 0.5373, 0.0602, 0.9539, 0.6347, 0.7295, 0.5553, 0.7356, + 0.2765, 0.4046, 0.2894, 0.2378, 0.0906, 0.1991, 0.7791, + 0.2130, 0.3761, 0.0030, 0.1246, 0.3022, 0.6047, 0.3588, + 0.4770, 0.4184, 0.1301, 0.7836, 0.7623, 0.0285, 0.6739, + 0.1521, 0.6648, 0.7060, 0.0412, 0.5957, 0.5624, 0.7317, + 0.4113, 0.1090, 0.2314, 0.3418, 0.0085, 0.6108, 0.2004, + 0.5023, 0.8270, 0.0196, 0.8551, 0.6085, 0.5507, 0.8094, + 0.6719, 0.4063, 0.4349, 0.7709, 0.2693, 0.9615, 0.8472, + 0.7428, 0.1841, 0.8906, 0.0929, 0.7519, 0.5412, 0.5132, + 0.3513, 0.7644, 0.7524, 0.9962, 0.4799, 0.4440, 0.3911, + 0.2358, 0.1587, 0.2872, 0.6304, 0.2955, 0.7964, 0.5753, + 0.6496, 0.7159, 0.6989, 0.6410, 0.5193, 0.6679, 0.9569, + 0.4824, 0.7854, 0.4632, 0.8506, 0.7043, 0.6992, 0.9478, + 0.3157, 0.9262, 0.1433, 0.3194, 0.5697, 0.4453, 0.2121, + 0.4918, 0.0088, 0.2532, 0.1680, 0.7915, 0.4725, 0.0541, + 0.5171, 0.7866, 0.5141, 0.9219, 0.3571, 0.4144, 0.4267, + 0.4197, 0.3179, 0.7666, 0.4105, 0.8278, 0.0931, 0.4260, + 0.1653, 0.7664, 0.4711, 0.4836, 0.7452, 0.2694, 0.4619, + 0.9855, 0.1263, 0.3199, 0.2967, 0.0623, 0.0352, 0.6075, + 0.3481, 0.3103, 0.2426, 0.5666, 0.6000, 0.8346, 0.2513, + 0.5969, 0.6719, 0.2889, 0.9800, 0.2658, 0.9419, 0.3988, + 0.9431, 0.2828, 0.1212, 0.9124, 0.5436, 0.9395, 0.5665, + 0.1457, 0.2993, 0.0088, 0.6849, 0.2646, 0.6046, 0.7091, + 0.5640, 0.2001, 0.1909, 0.4755, 0.8515, 0.8336, 0.4361, + 0.5450, 0.9247, 0.8869, 0.1052, 0.4689, 0.9392, 0.3462, + 0.6900, 0.9248, 0.0537, 0.3782, 0.1908, 0.5049, 0.4270, + 0.3395, 0.2842, 0.4865, 0.5856, 0.0222, 0.9926, 0.9730, + 0.0817, 0.0675, 0.4517, 0.1263, 0.8838, 0.8783, 0.4115, + 0.2525, 0.4711, 0.5641, 0.0227, 0.7361, 0.4045, 0.5903, + 0.2142, 0.7279, 0.3643, 0.9899, 0.9122, 0.0894, 0.3638, + 0.0428, 0.0807, 0.4574, 0.9185, 0.3067, 0.5898, 0.6322, + 0.7618, 0.2611, 0.0179, 0.6498, 0.5482, 0.5843, 0.9259, + 0.0342, 0.3278, 0.5209, 0.5626, 0.7217, 0.7630, 0.4717, + 0.1226, 0.7583, 0.4247, 0.3745, 0.3878, 0.5098, 0.9031, + 0.2063, 0.8497, 0.7373, 0.0304, 0.2596, 0.6167, 0.1262, + 0.2780, 0.2264, 0.0850, 0.2639, 0.0474, 0.6024, 0.5272, + 0.4037, 0.6045, 0.4000, 0.9516, 0.0312, 0.9304, 0.3896, + 0.8359, 0.4345, 0.2206, 0.2536, 0.0078, 0.1233, 0.2517, + 0.5320, 0.2096, 0.0542, 0.5649, 0.9250, 0.1789, 0.8673, + 0.8596, 0.8670, 0.0768, 0.6596, 0.6105, 0.4560, 0.7780, + 0.7446, 0.8923, 0.6887, 0.3979, 0.6499, 0.2418, 0.6771, + 0.1069, 0.0476, 0.2795, 0.7506, 0.8123, 0.3477, 0.9466, + 0.4612, 0.8765, 0.5407, 0.1132, 0.2169, 0.4562, 0.3486, + 0.2932, 0.4447, 0.9977, 0.4921, 0.6346, 0.2716, 0.3191, + 0.6753, 0.2344, 0.3287, 0.2551, 0.0432, 0.5714, 0.6748, + 0.2214, 0.0381, 0.0833, 0.8212, 0.5324, 0.1167, 0.1493, + 0.1853, 0.1389, 0.8453, 0.3733, 0.2821, 0.4216, 0.5246, + 0.1333, 0.6029, 0.6290, 0.7883, 0.9594, 0.8854, 0.7485, + 0.3870, 0.1067, 0.1966, 0.4271, 0.7628, 0.8373, 0.3200, + 0.1845, 0.8313, 0.0457, 0.2017, 0.6754, 0.2106, 0.0419, + 0.5727, 0.0987, 0.1401, 0.7601, 0.0706, 0.7073, 0.7539, + 0.5117, 0.2418, 0.0494, 0.1620, 0.5325, 0.5103, 0.4603, + 0.3475, 0.6883, 0.2514, 0.1698, 0.0632, 0.4282, 0.4644, + 0.4507, 0.9851, 0.0060, 0.1329, 0.3982, 0.1040, 0.4201, + 0.1874, 0.8353, 0.2741, 0.6136, 0.9434, 0.6324, 0.4589, + 0.2868, 0.4690, 0.3524, 0.0356, 0.5431, 0.4156, 0.1175, + 0.2385, 0.4442, 0.6899, 0.6014, 0.5447, 0.3774, 0.9261, + 0.8417, 0.7853, 0.0420, 0.9714, 0.3316, 0.7012, 0.3391, + 0.2711, 0.0862, 0.5467, 0.7750, 0.9744, 0.2605, 0.3449, + 0.9459, 0.1400, 0.6033, 0.0513, 0.0538, 0.7436, 0.3939, + 0.7581, 0.8160, 0.6601, 0.5367, 0.7078, 0.8727, 0.3365, + 0.1655, 0.3867, 0.7308, 0.0489, 0.0998, 0.1735, 0.9075, + 0.0635, 0.6241, 0.6404, 0.5116, 0.5405, 0.3342, 0.5746, + 0.1325, 0.7165, 0.7555, 0.3742, 0.3287, 0.4662, 0.9798, + 0.7566, 0.4785, 0.7101, 0.3347, 0.6637, 0.4961, 0.7390, + 0.0086, 0.0799, 0.1370, 0.7191, 0.6408, 0.9380, 0.9514, + 0.7180, 0.3022, 0.0085, 0.2363, 0.1710, 0.0619, 0.3727, + 0.9494, 0.0484, 0.2475, 0.5328, 0.3577, 0.7509, 0.7576, + 0.6142, 0.1244, 0.1672, 0.1609, 0.9942, 0.8837, 0.9627, + 0.4728, 0.4490, 0.3722, 0.5474, 0.9561, 0.0191, 0.0116, + 0.0409, 0.5484, 0.0722, 0.3519, 0.0360, 0.4568, 0.9589, + 0.1275, 0.8380, 0.5898, 0.7968, 0.6287, 0.3775, 0.3057, + 0.4428, 0.9063, 0.9362, 0.6310, 0.2828, 0.6381, 0.1642, + 0.1973, 0.3656, 0.0240, 0.0025, 0.3395, 0.3695, 0.8744, + 0.9097, 0.7335, 0.2404, 0.0369, 0.6290, 0.1712, 0.3940, + 0.3991, 0.5322, 0.9830, 0.7075, 0.4980, 0.3804, 0.4277, + 0.4732, 0.3721, 0.2580, 0.7335, 0.2379, 0.0923, 0.4525, + 0.9507, 0.1057, 0.7727, 0.1758, 0.7368, 0.0289, 0.1710, + 0.2037, 0.6810, 0.9099, 0.7890, 0.0406, 0.0261, 0.9874, + 0.9915, 0.3904, 0.8019, 0.2435, 0.3811, 0.1826, 0.1831, + 0.8831, 0.1668, 0.0829, 0.3309, 0.0780, 0.1112, 0.8197, + 0.0271, 0.4877, 0.3209, 0.2643, 0.2855, 0.3654, 0.4651, + 0.9876, 0.5843, 0.6127, 0.0052, 0.3528, 0.5656]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8355, 0.8843, 0.0720, ..., 0.0094, 0.5931, 0.4853]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.35036301612854 seconds + +[20.52, 20.56, 20.76, 20.76, 20.48, 20.44, 20.32, 20.56, 20.68, 20.96] +[20.96, 20.6, 21.04, 25.8, 27.56, 28.4, 28.4, 29.0, 26.08, 24.84, 23.8, 23.52, 23.6, 23.72] +14.237599849700928 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 138585, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.35036301612854, 'TIME_S_1KI': 0.07468602674263838, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 330.615868434906, 'W': 23.22132044200209} +[20.52, 20.56, 20.76, 20.76, 20.48, 20.44, 20.32, 20.56, 20.68, 20.96, 20.52, 20.56, 20.76, 20.52, 20.68, 20.6, 20.48, 20.48, 20.2, 20.08] +369.88 +18.494 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 138585, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.35036301612854, 'TIME_S_1KI': 0.07468602674263838, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 330.615868434906, 'W': 23.22132044200209, 'J_1KI': 2.385654063822968, 'W_1KI': 0.16756012874410714, 'W_D': 4.727320442002089, 'J_D': 67.30569681453706, 'W_D_1KI': 0.034111342800462456, 'J_D_1KI': 0.0002461402229711906} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.json index 2bcb239..e6d8cff 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 52342, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.44128155708313, "TIME_S_1KI": 0.19948189899283808, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 334.7413140869141, "W": 23.567352524316387, "J_1KI": 6.395271752835469, "W_1KI": 0.4502570120422679, "W_D": 3.4093525243163825, "J_D": 48.42508902931206, "W_D_1KI": 0.06513607665577133, "J_D_1KI": 0.0012444323231013588} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 51464, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.445167779922485, "TIME_S_1KI": 0.20296066726104628, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.6867702293396, "W": 22.03824534762116, "J_1KI": 6.114697074252673, "W_1KI": 0.4282264368805604, "W_D": 3.5672453476211636, "J_D": 50.937127677440664, "W_D_1KI": 0.06931535340473269, "J_D_1KI": 0.0013468706941693746} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.output index 0a68cee..abbe8e8 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.027875900268554688} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02733898162841797} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([4151, 7566, 61, ..., 1923, 6890, 8738]), - values=tensor([0.6199, 0.1524, 0.8589, ..., 0.4429, 0.5764, 0.1533]), + col_indices=tensor([9723, 8611, 8438, ..., 5624, 673, 6841]), + values=tensor([0.4875, 0.3962, 0.0358, ..., 0.4364, 0.5339, 0.1666]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.5335, 0.6247, 0.4039, ..., 0.6064, 0.4993, 0.4017]) +tensor([0.8431, 0.8974, 0.5076, ..., 0.9535, 0.3028, 0.2501]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.027875900268554688 seconds +Time: 0.02733898162841797 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37666 -ss 10000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.555848598480225} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 38406 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.835801124572754} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), - col_indices=tensor([4832, 7617, 3198, ..., 2337, 8239, 2535]), - values=tensor([0.0012, 0.2497, 0.5477, ..., 0.0331, 0.3343, 0.4565]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 5000, 5000]), + col_indices=tensor([5082, 2494, 7957, ..., 5477, 1429, 2123]), + values=tensor([0.1927, 0.5682, 0.3872, ..., 0.1095, 0.5904, 0.1329]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.1414, 0.6293, 0.2915, ..., 0.6179, 0.0556, 0.9688]) +tensor([0.1556, 0.9461, 0.0409, ..., 0.3172, 0.5821, 0.0029]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 7.555848598480225 seconds +Time: 7.835801124572754 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52342 -ss 10000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.44128155708313} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 51464 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.445167779922485} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), - col_indices=tensor([6929, 6481, 6208, ..., 5185, 5914, 4436]), - values=tensor([0.2292, 0.3731, 0.2148, ..., 0.4978, 0.6385, 0.1071]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([1626, 9635, 3828, ..., 5045, 7904, 9593]), + values=tensor([0.2200, 0.6966, 0.2889, ..., 0.9700, 0.2141, 0.3368]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.7171, 0.2412, 0.9457, ..., 0.4356, 0.0163, 0.8101]) +tensor([0.8545, 0.5671, 0.0230, ..., 0.8560, 0.7069, 0.3058]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.44128155708313 seconds +Time: 10.445167779922485 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), - col_indices=tensor([6929, 6481, 6208, ..., 5185, 5914, 4436]), - values=tensor([0.2292, 0.3731, 0.2148, ..., 0.4978, 0.6385, 0.1071]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([1626, 9635, 3828, ..., 5045, 7904, 9593]), + values=tensor([0.2200, 0.6966, 0.2889, ..., 0.9700, 0.2141, 0.3368]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.7171, 0.2412, 0.9457, ..., 0.4356, 0.0163, 0.8101]) +tensor([0.8545, 0.5671, 0.0230, ..., 0.8560, 0.7069, 0.3058]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.44128155708313 seconds +Time: 10.445167779922485 seconds -[25.48, 25.4, 25.4, 24.8, 24.68, 24.36, 24.4, 24.16, 24.24, 23.68] -[23.36, 22.52, 25.48, 26.0, 27.84, 27.84, 27.8, 28.32, 24.88, 24.76, 23.72, 23.52, 23.72, 23.68] -14.20360279083252 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52342, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.44128155708313, 'TIME_S_1KI': 0.19948189899283808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.7413140869141, 'W': 23.567352524316387} -[25.48, 25.4, 25.4, 24.8, 24.68, 24.36, 24.4, 24.16, 24.24, 23.68, 20.0, 19.92, 19.76, 20.2, 20.12, 20.12, 20.36, 20.24, 20.24, 20.36] -403.1600000000001 -20.158000000000005 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52342, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.44128155708313, 'TIME_S_1KI': 0.19948189899283808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.7413140869141, 'W': 23.567352524316387, 'J_1KI': 6.395271752835469, 'W_1KI': 0.4502570120422679, 'W_D': 3.4093525243163825, 'J_D': 48.42508902931206, 'W_D_1KI': 0.06513607665577133, 'J_D_1KI': 0.0012444323231013588} +[20.64, 20.72, 20.64, 20.8, 20.8, 20.76, 20.6, 20.64, 20.4, 20.16] +[20.28, 20.2, 20.6, 21.76, 24.52, 25.28, 26.2, 25.88, 25.88, 25.72, 23.64, 23.4, 23.36, 23.24] +14.279120922088623 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 51464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.445167779922485, 'TIME_S_1KI': 0.20296066726104628, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.6867702293396, 'W': 22.03824534762116} +[20.64, 20.72, 20.64, 20.8, 20.8, 20.76, 20.6, 20.64, 20.4, 20.16, 20.56, 20.44, 20.32, 20.6, 20.48, 20.36, 20.44, 20.36, 20.36, 20.04] +369.41999999999996 +18.470999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 51464, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.445167779922485, 'TIME_S_1KI': 0.20296066726104628, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.6867702293396, 'W': 22.03824534762116, 'J_1KI': 6.114697074252673, 'W_1KI': 0.4282264368805604, 'W_D': 3.5672453476211636, 'J_D': 50.937127677440664, 'W_D_1KI': 0.06931535340473269, 'J_D_1KI': 0.0013468706941693746} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.json index ece3918..05807f6 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 137, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396055936813354, "TIME_S_1KI": 75.88361997673981, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 753.5857475948335, "W": 33.1694657622459, "J_1KI": 5500.625894852799, "W_1KI": 242.11288877551752, "W_D": 14.2684657622459, "J_D": 324.16899673771866, "W_D_1KI": 104.14938512588249, "J_D_1KI": 760.2144899699452} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 134, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467389583587646, "TIME_S_1KI": 78.11484763871378, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 706.7122743606567, "W": 32.619576005812384, "J_1KI": 5273.972196721319, "W_1KI": 243.42967168516705, "W_D": 13.915576005812383, "J_D": 301.4848619232177, "W_D_1KI": 103.84758213292824, "J_D_1KI": 774.9819562158824} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.output index 5419742..71de004 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.629654169082642} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.793238162994385} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 44, 92, ..., 24999905, - 24999955, 25000000]), - col_indices=tensor([ 2191, 6192, 41052, ..., 471066, 488040, - 493296]), - values=tensor([0.3986, 0.5227, 0.3241, ..., 0.9261, 0.7192, 0.3287]), +tensor(crow_indices=tensor([ 0, 60, 106, ..., 24999903, + 24999944, 25000000]), + col_indices=tensor([ 5013, 8672, 15970, ..., 458181, 460431, + 489743]), + values=tensor([0.5903, 0.4134, 0.2669, ..., 0.7684, 0.5815, 0.3065]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.0957, 0.3468, 0.1431, ..., 0.5849, 0.2942, 0.3782]) +tensor([0.8384, 0.7499, 0.3876, ..., 0.2128, 0.0590, 0.5085]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 7.629654169082642 seconds +Time: 7.793238162994385 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 137 -ss 500000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396055936813354} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 134 -ss 500000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467389583587646} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 94, ..., 24999919, - 24999951, 25000000]), - col_indices=tensor([ 2485, 12624, 22152, ..., 462150, 467889, - 476331]), - values=tensor([0.9572, 0.0985, 0.5455, ..., 0.5648, 0.8530, 0.8208]), +tensor(crow_indices=tensor([ 0, 60, 123, ..., 24999898, + 24999943, 25000000]), + col_indices=tensor([ 6210, 15955, 17043, ..., 486318, 493056, + 494837]), + values=tensor([0.0668, 0.1833, 0.6551, ..., 0.5227, 0.0230, 0.1644]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3242, 0.9080, 0.9457, ..., 0.2147, 0.3332, 0.4113]) +tensor([0.4055, 0.4142, 0.8884, ..., 0.0877, 0.6540, 0.4930]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.396055936813354 seconds +Time: 10.467389583587646 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 94, ..., 24999919, - 24999951, 25000000]), - col_indices=tensor([ 2485, 12624, 22152, ..., 462150, 467889, - 476331]), - values=tensor([0.9572, 0.0985, 0.5455, ..., 0.5648, 0.8530, 0.8208]), +tensor(crow_indices=tensor([ 0, 60, 123, ..., 24999898, + 24999943, 25000000]), + col_indices=tensor([ 6210, 15955, 17043, ..., 486318, 493056, + 494837]), + values=tensor([0.0668, 0.1833, 0.6551, ..., 0.5227, 0.0230, 0.1644]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3242, 0.9080, 0.9457, ..., 0.2147, 0.3332, 0.4113]) +tensor([0.4055, 0.4142, 0.8884, ..., 0.0877, 0.6540, 0.4930]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.396055936813354 seconds +Time: 10.467389583587646 seconds -[20.64, 20.72, 20.84, 20.96, 21.08, 21.36, 21.56, 21.56, 21.36, 21.36] -[21.36, 21.04, 20.8, 24.16, 25.16, 26.72, 28.8, 27.16, 30.52, 29.84, 29.76, 29.92, 29.92, 30.28, 32.88, 38.36, 44.52, 49.04, 53.12, 53.28, 53.2, 52.84] -22.719260931015015 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396055936813354, 'TIME_S_1KI': 75.88361997673981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.5857475948335, 'W': 33.1694657622459} -[20.64, 20.72, 20.84, 20.96, 21.08, 21.36, 21.56, 21.56, 21.36, 21.36, 20.8, 20.84, 20.88, 21.0, 21.04, 20.84, 20.96, 20.68, 20.68, 20.52] -378.02 -18.901 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396055936813354, 'TIME_S_1KI': 75.88361997673981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.5857475948335, 'W': 33.1694657622459, 'J_1KI': 5500.625894852799, 'W_1KI': 242.11288877551752, 'W_D': 14.2684657622459, 'J_D': 324.16899673771866, 'W_D_1KI': 104.14938512588249, 'J_D_1KI': 760.2144899699452} +[21.04, 21.08, 21.12, 20.72, 20.84, 20.76, 20.48, 20.6, 20.8, 21.12] +[21.12, 21.12, 21.36, 22.64, 24.76, 25.8, 28.0, 29.0, 30.12, 31.08, 31.04, 30.64, 30.88, 31.48, 37.76, 37.76, 43.28, 49.24, 53.92, 53.72, 53.68] +21.66528081893921 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467389583587646, 'TIME_S_1KI': 78.11484763871378, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 706.7122743606567, 'W': 32.619576005812384} +[21.04, 21.08, 21.12, 20.72, 20.84, 20.76, 20.48, 20.6, 20.8, 21.12, 20.52, 20.72, 20.56, 20.52, 20.64, 20.88, 20.72, 20.84, 21.0, 20.92] +374.08 +18.704 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467389583587646, 'TIME_S_1KI': 78.11484763871378, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 706.7122743606567, 'W': 32.619576005812384, 'J_1KI': 5273.972196721319, 'W_1KI': 243.42967168516705, 'W_D': 13.915576005812383, 'J_D': 301.4848619232177, 'W_D_1KI': 103.84758213292824, 'J_D_1KI': 774.9819562158824} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json index 28ed47d..2ff6880 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1548, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.061279058456421, "TIME_S_1KI": 7.145529107529987, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 576.3272948646545, "W": 36.84956710355697, "J_1KI": 372.3044540469344, "W_1KI": 23.80462991185851, "W_D": 18.03556710355697, "J_D": 282.0763014917373, "W_D_1KI": 11.65088314183267, "J_D_1KI": 7.526410298341518} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.265558242797852, "TIME_S_1KI": 7.1938430669207225, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 590.2575314998627, "W": 37.710029711893654, "J_1KI": 376.920518199146, "W_1KI": 24.08047874322711, "W_D": 19.015029711893654, "J_D": 297.63340376257895, "W_D_1KI": 12.142419994823534, "J_D_1KI": 7.75378032875066} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output index ddb3af3..9ff0fff 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.6782102584838867} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.6704628467559814} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 13, ..., 2499990, - 2499997, 2500000]), - col_indices=tensor([ 40175, 122073, 147940, ..., 245767, 297950, - 495791]), - values=tensor([0.1248, 0.8645, 0.7112, ..., 0.2227, 0.8085, 0.2637]), +tensor(crow_indices=tensor([ 0, 6, 13, ..., 2499992, + 2499996, 2500000]), + col_indices=tensor([ 18465, 224310, 266853, ..., 261692, 275322, + 390795]), + values=tensor([0.7820, 0.2662, 0.3463, ..., 0.8983, 0.8249, 0.4987]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3055, 0.1588, 0.3916, ..., 0.3608, 0.8122, 0.4114]) +tensor([0.5044, 0.5686, 0.5423, ..., 0.2915, 0.5450, 0.7110]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.6782102584838867 seconds +Time: 0.6704628467559814 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1548 -ss 500000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.061279058456421} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1566 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.265558242797852} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 4, ..., 2499989, +tensor(crow_indices=tensor([ 0, 4, 12, ..., 2499989, 2499994, 2500000]), - col_indices=tensor([ 33871, 87157, 252512, ..., 380315, 410804, - 497208]), - values=tensor([0.0607, 0.8545, 0.0688, ..., 0.9965, 0.6178, 0.6113]), + col_indices=tensor([ 32811, 195297, 326453, ..., 287008, 298525, + 381158]), + values=tensor([0.7187, 0.3561, 0.0889, ..., 0.2717, 0.8525, 0.2350]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3094, 0.9384, 0.5289, ..., 0.5205, 0.7717, 0.7334]) +tensor([0.4888, 0.5473, 0.1593, ..., 0.6089, 0.0990, 0.9360]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 11.061279058456421 seconds +Time: 11.265558242797852 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 4, ..., 2499989, +tensor(crow_indices=tensor([ 0, 4, 12, ..., 2499989, 2499994, 2500000]), - col_indices=tensor([ 33871, 87157, 252512, ..., 380315, 410804, - 497208]), - values=tensor([0.0607, 0.8545, 0.0688, ..., 0.9965, 0.6178, 0.6113]), + col_indices=tensor([ 32811, 195297, 326453, ..., 287008, 298525, + 381158]), + values=tensor([0.7187, 0.3561, 0.0889, ..., 0.2717, 0.8525, 0.2350]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3094, 0.9384, 0.5289, ..., 0.5205, 0.7717, 0.7334]) +tensor([0.4888, 0.5473, 0.1593, ..., 0.6089, 0.0990, 0.9360]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 11.061279058456421 seconds +Time: 11.265558242797852 seconds -[20.72, 20.76, 21.12, 20.92, 20.92, 20.92, 20.88, 20.44, 20.48, 20.48] -[20.56, 20.56, 21.24, 22.4, 24.48, 28.6, 36.72, 41.88, 48.52, 48.52, 52.36, 52.92, 53.48, 53.4, 53.48] -15.640001773834229 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.061279058456421, 'TIME_S_1KI': 7.145529107529987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.3272948646545, 'W': 36.84956710355697} -[20.72, 20.76, 21.12, 20.92, 20.92, 20.92, 20.88, 20.44, 20.48, 20.48, 20.6, 20.68, 20.88, 21.24, 21.48, 21.32, 21.48, 20.96, 20.64, 20.52] -376.28 -18.814 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.061279058456421, 'TIME_S_1KI': 7.145529107529987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.3272948646545, 'W': 36.84956710355697, 'J_1KI': 372.3044540469344, 'W_1KI': 23.80462991185851, 'W_D': 18.03556710355697, 'J_D': 282.0763014917373, 'W_D_1KI': 11.65088314183267, 'J_D_1KI': 7.526410298341518} +[20.76, 20.8, 20.76, 20.88, 20.84, 20.68, 20.68, 21.0, 20.88, 20.92] +[21.0, 21.08, 21.12, 24.92, 26.6, 30.76, 37.4, 42.12, 47.32, 52.72, 53.68, 53.68, 53.36, 53.36, 53.48] +15.652534246444702 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.265558242797852, 'TIME_S_1KI': 7.1938430669207225, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.2575314998627, 'W': 37.710029711893654} +[20.76, 20.8, 20.76, 20.88, 20.84, 20.68, 20.68, 21.0, 20.88, 20.92, 20.52, 20.76, 21.0, 20.92, 20.88, 20.72, 20.56, 20.52, 20.6, 20.64] +373.9 +18.695 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.265558242797852, 'TIME_S_1KI': 7.1938430669207225, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.2575314998627, 'W': 37.710029711893654, 'J_1KI': 376.920518199146, 'W_1KI': 24.08047874322711, 'W_D': 19.015029711893654, 'J_D': 297.63340376257895, 'W_D_1KI': 12.142419994823534, 'J_D_1KI': 7.75378032875066} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.json index 9620135..6f5ae4b 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 285, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.060697317123413, "TIME_S_1KI": 38.809464270608466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 641.8192787170411, "W": 34.323302144261945, "J_1KI": 2251.9974691826005, "W_1KI": 120.43263910267349, "W_D": 15.818302144261942, "J_D": 295.7900504469872, "W_D_1KI": 55.50281454126997, "J_D_1KI": 194.74671768866656} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 282, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.728825569152832, "TIME_S_1KI": 38.04548074167671, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 652.2455996704102, "W": 34.861341364200754, "J_1KI": 2312.92765840571, "W_1KI": 123.62177788723672, "W_D": 15.989341364200758, "J_D": 299.15594576454185, "W_D_1KI": 56.699792071633894, "J_D_1KI": 201.06309245260246} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.output index 0f40f5a..6fcb344 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.6834404468536377} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.716564655303955} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 36, 64, ..., 12499962, - 12499977, 12500000]), - col_indices=tensor([ 6540, 37225, 45963, ..., 476281, 491551, - 491729]), - values=tensor([0.4995, 0.3434, 0.2289, ..., 0.9980, 0.3953, 0.2839]), +tensor(crow_indices=tensor([ 0, 34, 56, ..., 12499948, + 12499973, 12500000]), + col_indices=tensor([ 6554, 12774, 29441, ..., 452817, 453786, + 473967]), + values=tensor([0.5936, 0.4042, 0.6702, ..., 0.6965, 0.8881, 0.9167]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.6494, 0.0196, 0.1697, ..., 0.1655, 0.3294, 0.8926]) +tensor([0.1409, 0.2128, 0.4133, ..., 0.3735, 0.0685, 0.1386]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 3.6834404468536377 seconds +Time: 3.716564655303955 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 285 -ss 500000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.060697317123413} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 282 -ss 500000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.728825569152832} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 51, ..., 12499947, - 12499979, 12500000]), - col_indices=tensor([ 32854, 40713, 51141, ..., 464012, 471829, - 496055]), - values=tensor([0.7704, 0.8573, 0.2864, ..., 0.9432, 0.9508, 0.7094]), +tensor(crow_indices=tensor([ 0, 29, 62, ..., 12499958, + 12499978, 12500000]), + col_indices=tensor([ 57103, 90320, 96727, ..., 418254, 450816, + 472842]), + values=tensor([0.1970, 0.6853, 0.2290, ..., 0.5665, 0.4164, 0.8779]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9627, 0.2273, 0.6691, ..., 0.1238, 0.9472, 0.0057]) +tensor([0.7571, 0.9474, 0.9034, ..., 0.2940, 0.2447, 0.3708]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 11.060697317123413 seconds +Time: 10.728825569152832 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 51, ..., 12499947, - 12499979, 12500000]), - col_indices=tensor([ 32854, 40713, 51141, ..., 464012, 471829, - 496055]), - values=tensor([0.7704, 0.8573, 0.2864, ..., 0.9432, 0.9508, 0.7094]), +tensor(crow_indices=tensor([ 0, 29, 62, ..., 12499958, + 12499978, 12500000]), + col_indices=tensor([ 57103, 90320, 96727, ..., 418254, 450816, + 472842]), + values=tensor([0.1970, 0.6853, 0.2290, ..., 0.5665, 0.4164, 0.8779]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9627, 0.2273, 0.6691, ..., 0.1238, 0.9472, 0.0057]) +tensor([0.7571, 0.9474, 0.9034, ..., 0.2940, 0.2447, 0.3708]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 11.060697317123413 seconds +Time: 10.728825569152832 seconds -[20.52, 20.84, 20.92, 20.88, 20.88, 20.72, 20.52, 20.32, 20.36, 20.2] -[20.44, 20.6, 21.48, 21.48, 23.84, 25.24, 27.32, 30.28, 29.76, 32.76, 37.84, 41.44, 46.92, 51.96, 52.12, 52.64, 52.6, 52.8] -18.699228763580322 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.060697317123413, 'TIME_S_1KI': 38.809464270608466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 641.8192787170411, 'W': 34.323302144261945} -[20.52, 20.84, 20.92, 20.88, 20.88, 20.72, 20.52, 20.32, 20.36, 20.2, 20.52, 20.44, 20.6, 20.44, 20.44, 20.48, 20.48, 20.36, 20.44, 20.72] -370.1 -18.505000000000003 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.060697317123413, 'TIME_S_1KI': 38.809464270608466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 641.8192787170411, 'W': 34.323302144261945, 'J_1KI': 2251.9974691826005, 'W_1KI': 120.43263910267349, 'W_D': 15.818302144261942, 'J_D': 295.7900504469872, 'W_D_1KI': 55.50281454126997, 'J_D_1KI': 194.74671768866656} +[21.0, 20.76, 20.92, 21.0, 21.12, 21.12, 21.16, 21.08, 20.88, 20.52] +[20.56, 20.52, 20.52, 21.4, 22.64, 24.72, 27.0, 30.12, 30.6, 35.4, 40.24, 43.88, 49.0, 53.2, 53.36, 53.16, 52.72, 52.48] +18.709710359573364 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.728825569152832, 'TIME_S_1KI': 38.04548074167671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.2455996704102, 'W': 34.861341364200754} +[21.0, 20.76, 20.92, 21.0, 21.12, 21.12, 21.16, 21.08, 20.88, 20.52, 20.6, 20.68, 21.08, 21.08, 20.96, 21.2, 21.32, 20.88, 20.8, 20.68] +377.43999999999994 +18.871999999999996 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 282, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.728825569152832, 'TIME_S_1KI': 38.04548074167671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.2455996704102, 'W': 34.861341364200754, 'J_1KI': 2312.92765840571, 'W_1KI': 123.62177788723672, 'W_D': 15.989341364200758, 'J_D': 299.15594576454185, 'W_D_1KI': 56.699792071633894, 'J_D_1KI': 201.06309245260246} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json index 29a7f3d..bba8bd2 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3424, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.731184720993042, "TIME_S_1KI": 3.1341076872059115, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 463.79336466789243, "W": 31.765422510706816, "J_1KI": 135.4536695875854, "W_1KI": 9.27728461177185, "W_D": 13.349422510706813, "J_D": 194.90921553230277, "W_D_1KI": 3.8987799388746534, "J_D_1KI": 1.1386623653255412} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3338, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433396339416504, "TIME_S_1KI": 3.1256430016226795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 439.5648366165161, "W": 32.3031062563951, "J_1KI": 131.68509185635594, "W_1KI": 9.67738353996258, "W_D": 13.753106256395096, "J_D": 187.14552887201313, "W_D_1KI": 4.120163647811593, "J_D_1KI": 1.2343210448806448} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output index 192c47b..58f50a5 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.4611988067626953} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3145158290863037} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 9, ..., 249990, 249996, +tensor(crow_indices=tensor([ 0, 2, 8, ..., 249990, 249997, 250000]), - col_indices=tensor([ 1266, 4071, 18947, ..., 33754, 36171, 46993]), - values=tensor([0.2894, 0.3028, 0.5808, ..., 0.9499, 0.5530, 0.4490]), + col_indices=tensor([16316, 21389, 513, ..., 3438, 9548, 13900]), + values=tensor([0.7129, 0.1949, 0.4120, ..., 0.9486, 0.3922, 0.7881]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9097, 0.0887, 0.0049, ..., 0.6179, 0.8641, 0.1772]) +tensor([0.6555, 0.2877, 0.8843, ..., 0.9355, 0.9146, 0.1438]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.4611988067626953 seconds +Time: 0.3145158290863037 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2276 -ss 50000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.978086233139038} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3338 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433396339416504} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 249990, 249995, +tensor(crow_indices=tensor([ 0, 2, 5, ..., 249989, 249995, 250000]), - col_indices=tensor([ 2233, 6887, 19755, ..., 38632, 41476, 48223]), - values=tensor([0.3109, 0.9167, 0.4160, ..., 0.6671, 0.8506, 0.5777]), + col_indices=tensor([16110, 27292, 28034, ..., 24392, 29295, 38009]), + values=tensor([0.3064, 0.1653, 0.6261, ..., 0.3241, 0.1335, 0.8482]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7812, 0.4224, 0.5960, ..., 0.2514, 0.6292, 0.3012]) +tensor([0.1035, 0.4360, 0.8597, ..., 0.2911, 0.6198, 0.0473]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 6.978086233139038 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3424 -ss 50000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.731184720993042} +Time: 10.433396339416504 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 7, ..., 249990, 249996, +tensor(crow_indices=tensor([ 0, 2, 5, ..., 249989, 249995, 250000]), - col_indices=tensor([12104, 14436, 24112, ..., 12878, 32819, 38734]), - values=tensor([0.5759, 0.9600, 0.3696, ..., 0.0040, 0.7766, 0.9665]), + col_indices=tensor([16110, 27292, 28034, ..., 24392, 29295, 38009]), + values=tensor([0.3064, 0.1653, 0.6261, ..., 0.3241, 0.1335, 0.8482]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4011, 0.3434, 0.3941, ..., 0.7256, 0.6030, 0.5117]) +tensor([0.1035, 0.4360, 0.8597, ..., 0.2911, 0.6198, 0.0473]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,30 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.731184720993042 seconds +Time: 10.433396339416504 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 7, ..., 249990, 249996, - 250000]), - col_indices=tensor([12104, 14436, 24112, ..., 12878, 32819, 38734]), - values=tensor([0.5759, 0.9600, 0.3696, ..., 0.0040, 0.7766, 0.9665]), - size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4011, 0.3434, 0.3941, ..., 0.7256, 0.6030, 0.5117]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 250000 -Density: 0.0001 -Time: 10.731184720993042 seconds - -[20.72, 20.68, 20.48, 20.6, 20.64, 20.32, 20.32, 20.36, 20.32, 20.4] -[20.52, 20.76, 22.24, 22.24, 24.16, 27.64, 32.84, 38.08, 39.84, 44.4, 44.52, 43.96, 44.12, 44.28] -14.60057282447815 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.731184720993042, 'TIME_S_1KI': 3.1341076872059115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.79336466789243, 'W': 31.765422510706816} -[20.72, 20.68, 20.48, 20.6, 20.64, 20.32, 20.32, 20.36, 20.32, 20.4, 20.68, 20.44, 20.48, 20.48, 20.2, 20.32, 20.44, 20.44, 20.6, 20.6] -368.32000000000005 -18.416000000000004 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.731184720993042, 'TIME_S_1KI': 3.1341076872059115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.79336466789243, 'W': 31.765422510706816, 'J_1KI': 135.4536695875854, 'W_1KI': 9.27728461177185, 'W_D': 13.349422510706813, 'J_D': 194.90921553230277, 'W_D_1KI': 3.8987799388746534, 'J_D_1KI': 1.1386623653255412} +[20.96, 20.44, 20.52, 20.6, 20.6, 20.6, 20.72, 20.88, 20.68, 20.84] +[20.88, 20.88, 21.28, 25.44, 27.56, 32.8, 38.08, 39.28, 42.28, 44.04, 44.08, 44.32, 44.32] +13.607509851455688 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3338, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433396339416504, 'TIME_S_1KI': 3.1256430016226795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 439.5648366165161, 'W': 32.3031062563951} +[20.96, 20.44, 20.52, 20.6, 20.6, 20.6, 20.72, 20.88, 20.68, 20.84, 20.56, 20.56, 20.44, 20.36, 20.72, 20.68, 20.72, 20.6, 20.48, 20.44] +371.0 +18.55 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3338, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433396339416504, 'TIME_S_1KI': 3.1256430016226795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 439.5648366165161, 'W': 32.3031062563951, 'J_1KI': 131.68509185635594, 'W_1KI': 9.67738353996258, 'W_D': 13.753106256395096, 'J_D': 187.14552887201313, 'W_D_1KI': 4.120163647811593, 'J_D_1KI': 1.2343210448806448} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json index e699ae2..f0bd04f 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 385, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.064192295074463, "TIME_S_1KI": 28.738161805388216, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 468.0283072185517, "W": 32.037671646494225, "J_1KI": 1215.657940827407, "W_1KI": 83.21473154933565, "W_D": 13.341671646494227, "J_D": 194.90430094528205, "W_D_1KI": 34.65369258829669, "J_D_1KI": 90.00959113843297} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 381, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.208153009414673, "TIME_S_1KI": 29.417724434159247, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 471.2987487792969, "W": 32.20972724950075, "J_1KI": 1237.0045899719078, "W_1KI": 84.53996653412271, "W_D": 13.801727249500754, "J_D": 201.94945251464853, "W_D_1KI": 36.22500590420145, "J_D_1KI": 95.07875565407205} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output index c964dd6..46637b5 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.9453940391540527} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.7509913444519043} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 37, 86, ..., 2499902, - 2499952, 2500000]), - col_indices=tensor([ 541, 1139, 1813, ..., 42919, 43072, 44933]), - values=tensor([0.0452, 0.1724, 0.8861, ..., 0.4157, 0.9772, 0.2120]), +tensor(crow_indices=tensor([ 0, 53, 99, ..., 2499896, + 2499945, 2500000]), + col_indices=tensor([ 151, 1532, 2515, ..., 45409, 45788, 48199]), + values=tensor([0.6730, 0.0619, 0.7951, ..., 0.3963, 0.0592, 0.9045]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1045, 0.1557, 0.1178, ..., 0.5894, 0.9079, 0.5773]) +tensor([0.5006, 0.5607, 0.2495, ..., 0.5708, 0.0051, 0.0518]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 2.9453940391540527 seconds +Time: 2.7509913444519043 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 356 -ss 50000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.708060026168823} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 381 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.208153009414673} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 99, ..., 2499902, - 2499948, 2500000]), - col_indices=tensor([ 4031, 7226, 7309, ..., 44877, 48582, 49711]), - values=tensor([0.9329, 0.4420, 0.5313, ..., 0.9423, 0.2849, 0.2389]), +tensor(crow_indices=tensor([ 0, 55, 110, ..., 2499902, + 2499954, 2500000]), + col_indices=tensor([ 1027, 1222, 4121, ..., 48262, 49731, 49819]), + values=tensor([0.4693, 0.9048, 0.3075, ..., 0.6723, 0.2017, 0.4340]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0088, 0.8123, 0.3302, ..., 0.9483, 0.6171, 0.9552]) +tensor([0.7107, 0.1882, 0.3199, ..., 0.6310, 0.1253, 0.0320]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 9.708060026168823 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 385 -ss 50000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.064192295074463} +Time: 11.208153009414673 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 67, 125, ..., 2499902, - 2499956, 2500000]), - col_indices=tensor([ 1129, 2884, 2891, ..., 49010, 49022, 49816]), - values=tensor([0.8127, 0.7656, 0.2912, ..., 0.8978, 0.1718, 0.1428]), +tensor(crow_indices=tensor([ 0, 55, 110, ..., 2499902, + 2499954, 2500000]), + col_indices=tensor([ 1027, 1222, 4121, ..., 48262, 49731, 49819]), + values=tensor([0.4693, 0.9048, 0.3075, ..., 0.6723, 0.2017, 0.4340]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7569, 0.5985, 0.1427, ..., 0.6714, 0.1732, 0.3064]) +tensor([0.7107, 0.1882, 0.3199, ..., 0.6310, 0.1253, 0.0320]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,30 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 11.064192295074463 seconds +Time: 11.208153009414673 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 67, 125, ..., 2499902, - 2499956, 2500000]), - col_indices=tensor([ 1129, 2884, 2891, ..., 49010, 49022, 49816]), - values=tensor([0.8127, 0.7656, 0.2912, ..., 0.8978, 0.1718, 0.1428]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7569, 0.5985, 0.1427, ..., 0.6714, 0.1732, 0.3064]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 11.064192295074463 seconds - -[20.72, 20.52, 20.56, 20.48, 20.48, 20.52, 20.56, 20.6, 20.36, 20.6] -[20.48, 20.52, 21.48, 23.4, 25.44, 29.64, 35.04, 38.48, 42.0, 43.12, 43.12, 43.84, 43.68, 43.32] -14.608686685562134 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 385, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.064192295074463, 'TIME_S_1KI': 28.738161805388216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 468.0283072185517, 'W': 32.037671646494225} -[20.72, 20.52, 20.56, 20.48, 20.48, 20.52, 20.56, 20.6, 20.36, 20.6, 20.84, 20.8, 21.0, 21.24, 21.16, 21.08, 21.28, 20.92, 20.84, 20.88] -373.91999999999996 -18.695999999999998 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 385, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.064192295074463, 'TIME_S_1KI': 28.738161805388216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 468.0283072185517, 'W': 32.037671646494225, 'J_1KI': 1215.657940827407, 'W_1KI': 83.21473154933565, 'W_D': 13.341671646494227, 'J_D': 194.90430094528205, 'W_D_1KI': 34.65369258829669, 'J_D_1KI': 90.00959113843297} +[20.52, 20.72, 20.76, 20.76, 20.84, 20.76, 20.44, 20.24, 20.6, 20.6] +[20.52, 20.56, 23.48, 24.32, 26.96, 31.16, 36.28, 37.6, 41.44, 41.44, 43.16, 42.84, 43.16, 42.96] +14.632186889648438 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.208153009414673, 'TIME_S_1KI': 29.417724434159247, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 471.2987487792969, 'W': 32.20972724950075} +[20.52, 20.72, 20.76, 20.76, 20.84, 20.76, 20.44, 20.24, 20.6, 20.6, 20.28, 20.2, 20.4, 20.32, 20.0, 20.04, 20.16, 20.32, 20.6, 20.6] +368.15999999999997 +18.407999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.208153009414673, 'TIME_S_1KI': 29.417724434159247, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 471.2987487792969, 'W': 32.20972724950075, 'J_1KI': 1237.0045899719078, 'W_1KI': 84.53996653412271, 'W_D': 13.801727249500754, 'J_D': 201.94945251464853, 'W_D_1KI': 36.22500590420145, 'J_D_1KI': 95.07875565407205} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.json index 3c1f90f..7eccccc 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 32.80737018585205, "TIME_S_1KI": 328.0737018585205, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1398.267660713196, "W": 32.405416577679354, "J_1KI": 13982.676607131958, "W_1KI": 324.0541657767935, "W_D": 13.697416577679352, "J_D": 591.0325080986024, "W_D_1KI": 136.97416577679354, "J_D_1KI": 1369.7416577679353} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 31.461788177490234, "TIME_S_1KI": 314.61788177490234, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1422.216083526611, "W": 32.08903074827424, "J_1KI": 14222.160835266111, "W_1KI": 320.8903074827424, "W_D": 13.453030748274237, "J_D": 596.2510009250636, "W_D_1KI": 134.53030748274236, "J_D_1KI": 1345.3030748274234} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.output index 0d92e6b..3ea4230 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 32.80737018585205} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 31.461788177490234} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 489, 963, ..., 24999055, - 24999529, 25000000]), - col_indices=tensor([ 18, 157, 241, ..., 49747, 49771, 49960]), - values=tensor([0.7706, 0.7949, 0.9210, ..., 0.0962, 0.6322, 0.0053]), +tensor(crow_indices=tensor([ 0, 495, 1024, ..., 24998992, + 24999507, 25000000]), + col_indices=tensor([ 89, 121, 308, ..., 49151, 49365, 49592]), + values=tensor([0.2016, 0.7170, 0.9969, ..., 0.6090, 0.1562, 0.9700]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2729, 0.2896, 0.6966, ..., 0.6831, 0.4086, 0.6520]) +tensor([0.5426, 0.2677, 0.5298, ..., 0.8739, 0.7597, 0.2370]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 32.80737018585205 seconds +Time: 31.461788177490234 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 489, 963, ..., 24999055, - 24999529, 25000000]), - col_indices=tensor([ 18, 157, 241, ..., 49747, 49771, 49960]), - values=tensor([0.7706, 0.7949, 0.9210, ..., 0.0962, 0.6322, 0.0053]), +tensor(crow_indices=tensor([ 0, 495, 1024, ..., 24998992, + 24999507, 25000000]), + col_indices=tensor([ 89, 121, 308, ..., 49151, 49365, 49592]), + values=tensor([0.2016, 0.7170, 0.9969, ..., 0.6090, 0.1562, 0.9700]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2729, 0.2896, 0.6966, ..., 0.6831, 0.4086, 0.6520]) +tensor([0.5426, 0.2677, 0.5298, ..., 0.8739, 0.7597, 0.2370]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 32.80737018585205 seconds +Time: 31.461788177490234 seconds -[20.96, 20.68, 20.68, 20.68, 20.72, 20.64, 20.64, 20.76, 20.84, 20.92] -[20.92, 20.84, 21.32, 22.6, 25.12, 26.44, 26.44, 28.96, 28.92, 32.68, 31.76, 31.16, 31.4, 31.52, 32.12, 34.6, 36.28, 37.8, 37.28, 37.56, 37.96, 37.96, 38.28, 38.36, 37.96, 37.32, 36.48, 36.68, 36.84, 37.44, 37.44, 37.76, 38.0, 39.0, 37.68, 36.84, 37.4, 37.4, 36.8, 37.32, 37.6, 37.72] -43.14919567108154 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 32.80737018585205, 'TIME_S_1KI': 328.0737018585205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.267660713196, 'W': 32.405416577679354} -[20.96, 20.68, 20.68, 20.68, 20.72, 20.64, 20.64, 20.76, 20.84, 20.92, 21.04, 20.84, 20.72, 20.68, 20.72, 20.6, 20.92, 21.2, 20.96, 20.84] -374.16 -18.708000000000002 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 32.80737018585205, 'TIME_S_1KI': 328.0737018585205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.267660713196, 'W': 32.405416577679354, 'J_1KI': 13982.676607131958, 'W_1KI': 324.0541657767935, 'W_D': 13.697416577679352, 'J_D': 591.0325080986024, 'W_D_1KI': 136.97416577679354, 'J_D_1KI': 1369.7416577679353} +[20.44, 20.56, 20.8, 20.92, 20.84, 20.88, 20.84, 20.76, 20.8, 20.8] +[20.72, 20.8, 20.96, 21.88, 23.96, 25.36, 27.04, 27.8, 28.0, 30.52, 30.92, 30.4, 31.04, 31.04, 29.84, 32.08, 33.8, 35.52, 36.96, 37.0, 37.28, 37.72, 38.12, 37.84, 37.64, 36.96, 36.08, 37.0, 37.08, 37.76, 37.16, 37.16, 37.32, 37.92, 38.36, 37.44, 38.12, 38.32, 37.68, 37.96, 37.68, 37.6, 37.44] +44.320942401885986 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 31.461788177490234, 'TIME_S_1KI': 314.61788177490234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1422.216083526611, 'W': 32.08903074827424} +[20.44, 20.56, 20.8, 20.92, 20.84, 20.88, 20.84, 20.76, 20.8, 20.8, 20.72, 20.6, 20.4, 20.56, 20.56, 20.72, 20.76, 20.8, 20.6, 20.68] +372.72 +18.636000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 31.461788177490234, 'TIME_S_1KI': 314.61788177490234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1422.216083526611, 'W': 32.08903074827424, 'J_1KI': 14222.160835266111, 'W_1KI': 320.8903074827424, 'W_D': 13.453030748274237, 'J_D': 596.2510009250636, 'W_D_1KI': 134.53030748274236, 'J_D_1KI': 1345.3030748274234} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json index a4de095..0d86cc8 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 19951, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.462696075439453, "TIME_S_1KI": 0.5244196318700542, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 488.71538955688476, "W": 33.3802333465878, "J_1KI": 24.49578414900931, "W_1KI": 1.6731107887618566, "W_D": 15.011233346587801, "J_D": 219.77739569807053, "W_D_1KI": 0.752405059725718, "J_D_1KI": 0.03771264897627778} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 18570, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.544741868972778, "TIME_S_1KI": 0.5678374727502842, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 417.4055347442627, "W": 30.6890173810843, "J_1KI": 22.47741167174274, "W_1KI": 1.652612675341104, "W_D": 12.3270173810843, "J_D": 167.6614541893005, "W_D_1KI": 0.6638135369458428, "J_D_1KI": 0.03574655557058927} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output index 06a37e6..1958e0c 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05949115753173828} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.12336349487304688} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), - col_indices=tensor([35821, 49411, 3789, ..., 32092, 27347, 39445]), - values=tensor([0.1439, 0.1701, 0.0383, ..., 0.6521, 0.3755, 0.5678]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24998, 24999, 25000]), + col_indices=tensor([10786, 46751, 16470, ..., 29231, 39472, 27659]), + values=tensor([0.1604, 0.9127, 0.3132, ..., 0.0234, 0.0202, 0.1700]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8709, 0.6173, 0.3475, ..., 0.7020, 0.1451, 0.7453]) +tensor([0.4453, 0.6856, 0.2424, ..., 0.5766, 0.9662, 0.5674]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.05949115753173828 seconds +Time: 0.12336349487304688 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17649 -ss 50000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.288093090057373} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8511 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.812279939651489} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 25000, 25000]), - col_indices=tensor([10903, 22613, 1325, ..., 4616, 25772, 38217]), - values=tensor([0.1548, 0.5404, 0.0562, ..., 0.6796, 0.5534, 0.6437]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([16974, 41354, 19010, ..., 27208, 3324, 14724]), + values=tensor([0.8188, 0.7622, 0.0432, ..., 0.2727, 0.7144, 0.5814]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8066, 0.3465, 0.3699, ..., 0.5654, 0.2544, 0.1290]) +tensor([0.3360, 0.6018, 0.6109, ..., 0.5049, 0.3747, 0.6381]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.288093090057373 seconds +Time: 4.812279939651489 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 19951 -ss 50000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.462696075439453} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 18570 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.544741868972778} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), - col_indices=tensor([36526, 27522, 9271, ..., 28337, 20494, 41611]), - values=tensor([0.2838, 0.5711, 0.3512, ..., 0.1758, 0.7475, 0.3339]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([15172, 40157, 23776, ..., 14049, 32572, 38298]), + values=tensor([0.0933, 0.9209, 0.0305, ..., 0.9069, 0.4875, 0.3887]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9803, 0.0496, 0.4924, ..., 0.5397, 0.0486, 0.3592]) +tensor([0.5338, 0.8875, 0.4383, ..., 0.6019, 0.9284, 0.7233]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.462696075439453 seconds +Time: 10.544741868972778 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), - col_indices=tensor([36526, 27522, 9271, ..., 28337, 20494, 41611]), - values=tensor([0.2838, 0.5711, 0.3512, ..., 0.1758, 0.7475, 0.3339]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([15172, 40157, 23776, ..., 14049, 32572, 38298]), + values=tensor([0.0933, 0.9209, 0.0305, ..., 0.9069, 0.4875, 0.3887]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9803, 0.0496, 0.4924, ..., 0.5397, 0.0486, 0.3592]) +tensor([0.5338, 0.8875, 0.4383, ..., 0.6019, 0.9284, 0.7233]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.462696075439453 seconds +Time: 10.544741868972778 seconds -[20.44, 20.56, 20.6, 20.64, 20.64, 20.44, 20.44, 20.44, 20.4, 20.56] -[20.44, 20.64, 23.28, 24.2, 26.6, 32.52, 37.24, 40.44, 44.28, 44.96, 44.96, 44.92, 44.44, 43.84] -14.640861988067627 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.462696075439453, 'TIME_S_1KI': 0.5244196318700542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 488.71538955688476, 'W': 33.3802333465878} -[20.44, 20.56, 20.6, 20.64, 20.64, 20.44, 20.44, 20.44, 20.4, 20.56, 20.32, 20.32, 20.12, 20.24, 20.32, 20.56, 20.52, 20.28, 20.2, 20.0] -367.38 -18.369 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.462696075439453, 'TIME_S_1KI': 0.5244196318700542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 488.71538955688476, 'W': 33.3802333465878, 'J_1KI': 24.49578414900931, 'W_1KI': 1.6731107887618566, 'W_D': 15.011233346587801, 'J_D': 219.77739569807053, 'W_D_1KI': 0.752405059725718, 'J_D_1KI': 0.03771264897627778} +[20.52, 20.32, 20.16, 20.16, 20.24, 20.32, 20.44, 20.52, 20.52, 20.28] +[20.32, 20.36, 21.0, 23.24, 24.96, 30.08, 34.16, 37.96, 40.96, 40.96, 42.56, 43.04, 43.56] +13.601137161254883 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 18570, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.544741868972778, 'TIME_S_1KI': 0.5678374727502842, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 417.4055347442627, 'W': 30.6890173810843} +[20.52, 20.32, 20.16, 20.16, 20.24, 20.32, 20.44, 20.52, 20.52, 20.28, 20.4, 20.4, 20.36, 20.28, 20.56, 20.56, 20.64, 20.52, 20.4, 20.48] +367.24 +18.362000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 18570, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.544741868972778, 'TIME_S_1KI': 0.5678374727502842, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 417.4055347442627, 'W': 30.6890173810843, 'J_1KI': 22.47741167174274, 'W_1KI': 1.652612675341104, 'W_D': 12.3270173810843, 'J_D': 167.6614541893005, 'W_D_1KI': 0.6638135369458428, 'J_D_1KI': 0.03574655557058927} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.json index cff7542..d86e43a 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 6322, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.657772779464722, "TIME_S_1KI": 1.68582296416715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 425.8721401214599, "W": 31.344652680689325, "J_1KI": 67.3635147297469, "W_1KI": 4.958027946961298, "W_D": 12.573652680689325, "J_D": 170.83514789009087, "W_D_1KI": 1.9888726163697126, "J_D_1KI": 0.31459547870447846} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 6116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.511497974395752, "TIME_S_1KI": 1.7186883542177487, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 426.78555660247804, "W": 31.380359239851575, "J_1KI": 69.78181108608209, "W_1KI": 5.130863185064025, "W_D": 12.985359239851572, "J_D": 176.60612896442407, "W_D_1KI": 2.1231784237821407, "J_D_1KI": 0.3471514754385449} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.output index 57f8461..0c1ead7 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3423471450805664} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.17166757583618164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 124998, 124999, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 124999, 124999, 125000]), - col_indices=tensor([ 303, 26221, 28347, ..., 8622, 14261, 4291]), - values=tensor([0.9240, 0.5223, 0.0365, ..., 0.6044, 0.0072, 0.5479]), + col_indices=tensor([45961, 10128, 6506, ..., 18491, 35128, 14957]), + values=tensor([0.8466, 0.2605, 0.1046, ..., 0.4924, 0.9420, 0.7185]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.1523, 0.9417, 0.1754, ..., 0.6908, 0.2427, 0.5501]) +tensor([0.3816, 0.8434, 0.3348, ..., 0.6276, 0.4301, 0.2374]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.3423471450805664 seconds +Time: 0.17166757583618164 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3067 -ss 50000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 5.0935986042022705} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6116 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.511497974395752} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 124997, 125000, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 124988, 124994, 125000]), - col_indices=tensor([ 1194, 5034, 6320, ..., 11179, 21504, 33093]), - values=tensor([0.7209, 0.3055, 0.4482, ..., 0.3076, 0.8643, 0.0918]), + col_indices=tensor([ 1782, 22893, 30856, ..., 16042, 33293, 42752]), + values=tensor([0.7242, 0.8386, 0.5964, ..., 0.7254, 0.7751, 0.2503]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.9680, 0.6265, 0.9723, ..., 0.1304, 0.1284, 0.7215]) +tensor([0.0331, 0.9093, 0.3340, ..., 0.9159, 0.0637, 0.5700]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 5.0935986042022705 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6322 -ss 50000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.657772779464722} +Time: 10.511497974395752 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 124992, 124996, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 124988, 124994, 125000]), - col_indices=tensor([41720, 5446, 23409, ..., 23991, 37197, 42632]), - values=tensor([0.7857, 0.2010, 0.0929, ..., 0.8446, 0.3352, 0.3559]), + col_indices=tensor([ 1782, 22893, 30856, ..., 16042, 33293, 42752]), + values=tensor([0.7242, 0.8386, 0.5964, ..., 0.7254, 0.7751, 0.2503]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5851, 0.1828, 0.1733, ..., 0.7326, 0.4663, 0.8685]) +tensor([0.0331, 0.9093, 0.3340, ..., 0.9159, 0.0637, 0.5700]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,30 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.657772779464722 seconds +Time: 10.511497974395752 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 124992, 124996, - 125000]), - col_indices=tensor([41720, 5446, 23409, ..., 23991, 37197, 42632]), - values=tensor([0.7857, 0.2010, 0.0929, ..., 0.8446, 0.3352, 0.3559]), - size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5851, 0.1828, 0.1733, ..., 0.7326, 0.4663, 0.8685]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 125000 -Density: 5e-05 -Time: 10.657772779464722 seconds - -[20.44, 20.32, 20.44, 20.56, 20.76, 20.84, 20.88, 21.04, 21.08, 21.08] -[21.08, 20.88, 20.92, 24.6, 25.64, 31.32, 35.8, 37.92, 41.28, 43.2, 43.12, 43.4, 43.48] -13.586755752563477 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6322, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.657772779464722, 'TIME_S_1KI': 1.68582296416715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.8721401214599, 'W': 31.344652680689325} -[20.44, 20.32, 20.44, 20.56, 20.76, 20.84, 20.88, 21.04, 21.08, 21.08, 20.84, 20.88, 20.84, 20.72, 20.96, 20.96, 21.08, 21.16, 21.2, 21.04] -375.42 -18.771 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6322, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.657772779464722, 'TIME_S_1KI': 1.68582296416715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.8721401214599, 'W': 31.344652680689325, 'J_1KI': 67.3635147297469, 'W_1KI': 4.958027946961298, 'W_D': 12.573652680689325, 'J_D': 170.83514789009087, 'W_D_1KI': 1.9888726163697126, 'J_D_1KI': 0.31459547870447846} +[20.36, 20.36, 20.4, 20.12, 20.08, 20.2, 20.32, 20.4, 20.72, 20.6] +[20.6, 20.8, 20.68, 23.76, 24.96, 29.8, 35.08, 39.36, 41.64, 44.0, 44.2, 44.36, 43.48] +13.600403785705566 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.511497974395752, 'TIME_S_1KI': 1.7186883542177487, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 426.78555660247804, 'W': 31.380359239851575} +[20.36, 20.36, 20.4, 20.12, 20.08, 20.2, 20.32, 20.4, 20.72, 20.6, 20.68, 20.68, 20.56, 20.64, 20.6, 20.52, 20.52, 20.4, 20.44, 20.24] +367.90000000000003 +18.395000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.511497974395752, 'TIME_S_1KI': 1.7186883542177487, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 426.78555660247804, 'W': 31.380359239851575, 'J_1KI': 69.78181108608209, 'W_1KI': 5.130863185064025, 'W_D': 12.985359239851572, 'J_D': 176.60612896442407, 'W_D_1KI': 2.1231784237821407, 'J_D_1KI': 0.3471514754385449} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json index 3609346..9b01442 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 98325, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519834756851196, "TIME_S_1KI": 0.10699043739487614, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.21768825531, "W": 22.200046075478067, "J_1KI": 3.216045647142741, "W_1KI": 0.22578231452304162, "W_D": 3.72304607547807, "J_D": 53.03110719919203, "W_D_1KI": 0.03786469438574187, "J_D_1KI": 0.0003850973240350051} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 94617, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.157525777816772, "TIME_S_1KI": 0.10735413062997952, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 323.48465412139893, "W": 22.672925840081955, "J_1KI": 3.4188851276345575, "W_1KI": 0.23962845831174057, "W_D": 4.139925840081958, "J_D": 59.066151757955616, "W_D_1KI": 0.043754566727775744, "J_D_1KI": 0.000462438744916619} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output index 555507d..924f8be 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018892765045166016} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01822948455810547} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), - col_indices=tensor([3456, 1605, 749, ..., 2516, 837, 4620]), - values=tensor([0.8429, 0.4221, 0.2092, ..., 0.3256, 0.3578, 0.9398]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([3941, 541, 1824, ..., 314, 1337, 2093]), + values=tensor([0.8128, 0.3162, 0.2437, ..., 0.1898, 0.9919, 0.9815]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.1595, 0.2560, 0.8545, ..., 0.4673, 0.4412, 0.6412]) +tensor([0.0259, 0.2275, 0.6989, ..., 0.5897, 0.4789, 0.5857]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,38 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.018892765045166016 seconds +Time: 0.01822948455810547 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 55576 -ss 5000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.934850692749023} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 57598 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.391798257827759} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 2499, 2500, 2500]), - col_indices=tensor([2304, 3497, 2599, ..., 3517, 2336, 3180]), - values=tensor([7.9793e-01, 1.3489e-04, 7.1193e-01, ..., - 7.4115e-01, 8.0632e-01, 9.8789e-03]), size=(5000, 5000), - nnz=2500, layout=torch.sparse_csr) -tensor([0.4232, 0.5545, 0.0889, ..., 0.2237, 0.6245, 0.5041]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 2500 -Density: 0.0001 -Time: 5.934850692749023 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 98325 -ss 5000 -sd 0.0001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519834756851196} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), - col_indices=tensor([ 417, 1523, 4116, ..., 1599, 2107, 3220]), - values=tensor([0.7284, 0.4903, 0.1270, ..., 0.3684, 0.2323, 0.2388]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([2397, 3853, 4072, ..., 1513, 2142, 2593]), + values=tensor([0.3690, 0.1022, 0.2006, ..., 0.2544, 0.7570, 0.9378]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8570, 0.2399, 0.2271, ..., 0.1785, 0.2270, 0.3588]) +tensor([0.3198, 0.5308, 0.2405, ..., 0.6744, 0.2635, 0.8401]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -54,15 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.519834756851196 seconds +Time: 6.391798257827759 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 94617 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.157525777816772} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), - col_indices=tensor([ 417, 1523, 4116, ..., 1599, 2107, 3220]), - values=tensor([0.7284, 0.4903, 0.1270, ..., 0.3684, 0.2323, 0.2388]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([ 413, 1888, 3337, ..., 2077, 3725, 374]), + values=tensor([0.2969, 0.4624, 0.7154, ..., 0.0630, 0.2523, 0.5001]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8570, 0.2399, 0.2271, ..., 0.1785, 0.2270, 0.3588]) +tensor([0.4946, 0.1451, 0.8446, ..., 0.8105, 0.4300, 0.6554]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -70,13 +53,29 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.519834756851196 seconds +Time: 10.157525777816772 seconds -[20.24, 20.24, 20.16, 20.08, 20.32, 20.6, 20.6, 20.56, 20.52, 20.6] -[20.68, 20.8, 21.12, 22.88, 24.6, 25.72, 26.32, 26.12, 25.12, 25.12, 23.36, 23.52, 23.52, 23.76] -14.24401044845581 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 98325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519834756851196, 'TIME_S_1KI': 0.10699043739487614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.21768825531, 'W': 22.200046075478067} -[20.24, 20.24, 20.16, 20.08, 20.32, 20.6, 20.6, 20.56, 20.52, 20.6, 20.68, 20.68, 20.72, 20.52, 20.44, 20.56, 20.8, 20.84, 20.76, 20.76] -369.53999999999996 -18.476999999999997 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 98325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519834756851196, 'TIME_S_1KI': 0.10699043739487614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.21768825531, 'W': 22.200046075478067, 'J_1KI': 3.216045647142741, 'W_1KI': 0.22578231452304162, 'W_D': 3.72304607547807, 'J_D': 53.03110719919203, 'W_D_1KI': 0.03786469438574187, 'J_D_1KI': 0.0003850973240350051} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([ 413, 1888, 3337, ..., 2077, 3725, 374]), + values=tensor([0.2969, 0.4624, 0.7154, ..., 0.0630, 0.2523, 0.5001]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.4946, 0.1451, 0.8446, ..., 0.8105, 0.4300, 0.6554]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.157525777816772 seconds + +[20.24, 20.04, 20.12, 20.56, 20.56, 20.32, 20.44, 20.84, 20.8, 20.48] +[20.56, 20.44, 20.48, 24.24, 26.4, 28.08, 28.6, 25.84, 25.84, 25.64, 23.24, 23.28, 23.32, 23.2] +14.267441987991333 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 94617, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.157525777816772, 'TIME_S_1KI': 0.10735413062997952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.48465412139893, 'W': 22.672925840081955} +[20.24, 20.04, 20.12, 20.56, 20.56, 20.32, 20.44, 20.84, 20.8, 20.48, 20.96, 20.96, 20.92, 20.56, 20.64, 20.64, 20.76, 20.68, 20.68, 20.6] +370.65999999999997 +18.532999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 94617, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.157525777816772, 'TIME_S_1KI': 0.10735413062997952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.48465412139893, 'W': 22.672925840081955, 'J_1KI': 3.4188851276345575, 'W_1KI': 0.23962845831174057, 'W_D': 4.139925840081958, 'J_D': 59.066151757955616, 'W_D_1KI': 0.043754566727775744, 'J_D_1KI': 0.000462438744916619} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json index 2fa1f21..3aa279b 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 17780, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.605318784713745, "TIME_S_1KI": 0.5964746223123591, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 313.57909806251524, "W": 22.019197605482734, "J_1KI": 17.636619688555413, "W_1KI": 1.2384250621756319, "W_D": 3.4661976054827335, "J_D": 49.36270332407948, "W_D_1KI": 0.19494924665257218, "J_D_1KI": 0.010964524558637355} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 17204, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.812280178070068, "TIME_S_1KI": 0.6284747836590368, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 298.97844259262087, "W": 22.60552041406754, "J_1KI": 17.378426098152804, "W_1KI": 1.313968868522875, "W_D": 4.147520414067543, "J_D": 54.85470678424838, "W_D_1KI": 0.24107884294742754, "J_D_1KI": 0.014012952972996253} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output index 8d3b43b..66b8fcd 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06766819953918457} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.09629178047180176} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 15, ..., 24992, 24996, 25000]), - col_indices=tensor([ 734, 800, 1880, ..., 3125, 3280, 3794]), - values=tensor([0.0540, 0.4911, 0.3592, ..., 0.2590, 0.5736, 0.3057]), +tensor(crow_indices=tensor([ 0, 4, 13, ..., 24984, 24991, 25000]), + col_indices=tensor([ 906, 2302, 2949, ..., 4312, 4401, 4762]), + values=tensor([0.3111, 0.1904, 0.6965, ..., 0.8141, 0.8187, 0.4745]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9823, 0.9343, 0.9377, ..., 0.0786, 0.0908, 0.1511]) +tensor([0.5510, 0.4831, 0.5270, ..., 0.8910, 0.7950, 0.2815]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.06766819953918457 seconds +Time: 0.09629178047180176 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15516 -ss 5000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.162637948989868} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10904 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.654679298400879} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 11, ..., 24988, 24995, 25000]), - col_indices=tensor([ 62, 227, 575, ..., 2337, 2631, 3700]), - values=tensor([0.5265, 0.4146, 0.5026, ..., 0.0706, 0.1241, 0.5991]), +tensor(crow_indices=tensor([ 0, 10, 18, ..., 24990, 24995, 25000]), + col_indices=tensor([ 952, 1438, 2146, ..., 2644, 3063, 3989]), + values=tensor([0.1223, 0.6009, 0.2905, ..., 0.8222, 0.7018, 0.0633]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6610, 0.4053, 0.0257, ..., 0.7779, 0.2973, 0.6422]) +tensor([0.1062, 0.0133, 0.7952, ..., 0.7261, 0.0237, 0.2753]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 9.162637948989868 seconds +Time: 6.654679298400879 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17780 -ss 5000 -sd 0.001 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.605318784713745} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17204 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.812280178070068} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 8, ..., 24994, 24997, 25000]), - col_indices=tensor([ 423, 1662, 2124, ..., 288, 1379, 2658]), - values=tensor([0.1096, 0.1453, 0.3978, ..., 0.4089, 0.5724, 0.6122]), +tensor(crow_indices=tensor([ 0, 5, 9, ..., 24988, 24995, 25000]), + col_indices=tensor([ 489, 693, 1351, ..., 1645, 2689, 3021]), + values=tensor([0.1026, 0.4246, 0.1164, ..., 0.2742, 0.6330, 0.5157]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2174, 0.6127, 0.5782, ..., 0.6057, 0.7055, 0.7233]) +tensor([0.3786, 0.4087, 0.7676, ..., 0.4045, 0.0617, 0.3244]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.605318784713745 seconds +Time: 10.812280178070068 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 8, ..., 24994, 24997, 25000]), - col_indices=tensor([ 423, 1662, 2124, ..., 288, 1379, 2658]), - values=tensor([0.1096, 0.1453, 0.3978, ..., 0.4089, 0.5724, 0.6122]), +tensor(crow_indices=tensor([ 0, 5, 9, ..., 24988, 24995, 25000]), + col_indices=tensor([ 489, 693, 1351, ..., 1645, 2689, 3021]), + values=tensor([0.1026, 0.4246, 0.1164, ..., 0.2742, 0.6330, 0.5157]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2174, 0.6127, 0.5782, ..., 0.6057, 0.7055, 0.7233]) +tensor([0.3786, 0.4087, 0.7676, ..., 0.4045, 0.0617, 0.3244]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.605318784713745 seconds +Time: 10.812280178070068 seconds -[20.64, 20.56, 20.28, 20.4, 20.28, 20.28, 20.36, 20.44, 20.4, 20.4] -[20.64, 20.56, 20.76, 22.16, 23.52, 25.36, 26.08, 26.2, 25.48, 23.84, 23.92, 23.84, 23.84, 23.88] -14.24116826057434 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.605318784713745, 'TIME_S_1KI': 0.5964746223123591, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.57909806251524, 'W': 22.019197605482734} -[20.64, 20.56, 20.28, 20.4, 20.28, 20.28, 20.36, 20.44, 20.4, 20.4, 20.84, 20.68, 20.92, 20.96, 20.88, 20.88, 21.0, 20.8, 20.68, 20.64] -371.06 -18.553 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.605318784713745, 'TIME_S_1KI': 0.5964746223123591, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.57909806251524, 'W': 22.019197605482734, 'J_1KI': 17.636619688555413, 'W_1KI': 1.2384250621756319, 'W_D': 3.4661976054827335, 'J_D': 49.36270332407948, 'W_D_1KI': 0.19494924665257218, 'J_D_1KI': 0.010964524558637355} +[20.44, 20.32, 20.44, 20.32, 20.32, 20.32, 20.44, 20.52, 20.44, 20.56] +[20.88, 20.92, 21.12, 24.88, 27.0, 27.52, 27.92, 25.04, 25.04, 24.88, 23.4, 23.6, 23.72] +13.225903987884521 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.812280178070068, 'TIME_S_1KI': 0.6284747836590368, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.97844259262087, 'W': 22.60552041406754} +[20.44, 20.32, 20.44, 20.32, 20.32, 20.32, 20.44, 20.52, 20.44, 20.56, 20.24, 20.48, 20.56, 20.44, 20.64, 20.76, 20.72, 20.56, 20.84, 20.84] +369.15999999999997 +18.458 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.812280178070068, 'TIME_S_1KI': 0.6284747836590368, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.97844259262087, 'W': 22.60552041406754, 'J_1KI': 17.378426098152804, 'W_1KI': 1.313968868522875, 'W_D': 4.147520414067543, 'J_D': 54.85470678424838, 'W_D_1KI': 0.24107884294742754, 'J_D_1KI': 0.014012952972996253} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json index 84660f7..e540484 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1921, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.388477563858032, "TIME_S_1KI": 5.407848809920892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.91259566307076, "W": 21.966357313024826, "J_1KI": 162.89047145396708, "W_1KI": 11.434855446655297, "W_D": 3.429357313024827, "J_D": 48.85148151707659, "W_D_1KI": 1.785193812089967, "J_D_1KI": 0.9293044310723411} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1947, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.529423713684082, "TIME_S_1KI": 5.408024506257875, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.91934822082516, "W": 22.150260863076344, "J_1KI": 162.2595522449025, "W_1KI": 11.376610612776755, "W_D": 3.727260863076342, "J_D": 53.1602688469886, "W_D_1KI": 1.9143609979847676, "J_D_1KI": 0.9832362598791822} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output index 1a4afe9..7d819b0 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.593717098236084} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.5951905250549316} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 39, 88, ..., 249897, 249951, +tensor(crow_indices=tensor([ 0, 65, 114, ..., 249905, 249943, 250000]), - col_indices=tensor([ 1, 41, 120, ..., 4868, 4902, 4963]), - values=tensor([0.6487, 0.6379, 0.3189, ..., 0.3941, 0.1960, 0.9453]), + col_indices=tensor([ 243, 360, 569, ..., 4673, 4859, 4971]), + values=tensor([0.9327, 0.0572, 0.1694, ..., 0.7766, 0.8178, 0.1277]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9493, 0.7713, 0.4212, ..., 0.5345, 0.1694, 0.1229]) +tensor([0.3631, 0.7053, 0.5115, ..., 0.9329, 0.3771, 0.9303]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.593717098236084 seconds +Time: 0.5951905250549316 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1768 -ss 5000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.65909719467163} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1764 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.50925087928772} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 105, ..., 249907, 249948, +tensor(crow_indices=tensor([ 0, 39, 87, ..., 249897, 249952, 250000]), - col_indices=tensor([ 103, 261, 471, ..., 4857, 4933, 4959]), - values=tensor([0.8889, 0.3073, 0.1638, ..., 0.6109, 0.3049, 0.0052]), + col_indices=tensor([ 15, 176, 253, ..., 4823, 4923, 4997]), + values=tensor([0.4394, 0.7426, 0.7011, ..., 0.9433, 0.0282, 0.2442]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5335, 0.0728, 0.9615, ..., 0.8926, 0.1348, 0.8188]) +tensor([0.3323, 0.0904, 0.5554, ..., 0.0808, 0.6293, 0.6246]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 9.65909719467163 seconds +Time: 9.50925087928772 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1921 -ss 5000 -sd 0.01 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.388477563858032} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1947 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.529423713684082} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 65, 98, ..., 249897, 249948, +tensor(crow_indices=tensor([ 0, 42, 84, ..., 249908, 249963, 250000]), - col_indices=tensor([ 141, 179, 219, ..., 4719, 4923, 4985]), - values=tensor([0.6589, 0.9882, 0.9555, ..., 0.3007, 0.0365, 0.3378]), + col_indices=tensor([ 42, 271, 489, ..., 4615, 4714, 4994]), + values=tensor([0.3823, 0.0445, 0.0088, ..., 0.5571, 0.6706, 0.4440]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9859, 0.3282, 0.7924, ..., 0.6550, 0.5905, 0.4141]) +tensor([0.5611, 0.2970, 0.3578, ..., 0.8362, 0.6526, 0.5800]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.388477563858032 seconds +Time: 10.529423713684082 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 65, 98, ..., 249897, 249948, +tensor(crow_indices=tensor([ 0, 42, 84, ..., 249908, 249963, 250000]), - col_indices=tensor([ 141, 179, 219, ..., 4719, 4923, 4985]), - values=tensor([0.6589, 0.9882, 0.9555, ..., 0.3007, 0.0365, 0.3378]), + col_indices=tensor([ 42, 271, 489, ..., 4615, 4714, 4994]), + values=tensor([0.3823, 0.0445, 0.0088, ..., 0.5571, 0.6706, 0.4440]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9859, 0.3282, 0.7924, ..., 0.6550, 0.5905, 0.4141]) +tensor([0.5611, 0.2970, 0.3578, ..., 0.8362, 0.6526, 0.5800]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.388477563858032 seconds +Time: 10.529423713684082 seconds -[21.12, 20.84, 20.6, 20.44, 20.44, 20.44, 20.52, 20.52, 20.6, 20.96] -[20.8, 20.8, 20.76, 21.68, 22.28, 24.84, 25.84, 26.2, 25.88, 24.96, 23.84, 23.92, 23.72, 23.88] -14.245083570480347 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1921, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.388477563858032, 'TIME_S_1KI': 5.407848809920892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.91259566307076, 'W': 21.966357313024826} -[21.12, 20.84, 20.6, 20.44, 20.44, 20.44, 20.52, 20.52, 20.6, 20.96, 20.44, 20.44, 20.44, 20.48, 20.56, 20.68, 20.68, 20.56, 20.84, 20.8] -370.74 -18.537 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1921, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.388477563858032, 'TIME_S_1KI': 5.407848809920892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.91259566307076, 'W': 21.966357313024826, 'J_1KI': 162.89047145396708, 'W_1KI': 11.434855446655297, 'W_D': 3.429357313024827, 'J_D': 48.85148151707659, 'W_D_1KI': 1.785193812089967, 'J_D_1KI': 0.9293044310723411} +[20.36, 20.36, 20.48, 20.6, 20.24, 20.4, 20.28, 20.28, 20.44, 20.48] +[20.68, 20.84, 20.84, 22.12, 23.2, 25.6, 26.2, 26.4, 25.76, 24.84, 23.88, 23.88, 23.84, 23.84] +14.262556552886963 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1947, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.529423713684082, 'TIME_S_1KI': 5.408024506257875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.91934822082516, 'W': 22.150260863076344} +[20.36, 20.36, 20.48, 20.6, 20.24, 20.4, 20.28, 20.28, 20.44, 20.48, 20.52, 20.48, 20.48, 20.68, 20.72, 20.68, 20.72, 20.36, 20.44, 20.28] +368.46000000000004 +18.423000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1947, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.529423713684082, 'TIME_S_1KI': 5.408024506257875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.91934822082516, 'W': 22.150260863076344, 'J_1KI': 162.2595522449025, 'W_1KI': 11.376610612776755, 'W_D': 3.727260863076342, 'J_D': 53.1602688469886, 'W_D_1KI': 1.9143609979847676, 'J_D_1KI': 0.9832362598791822} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json index 50afdc0..21e4ae9 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 396, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.481398582458496, "TIME_S_1KI": 26.468178238531554, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 323.34643746376037, "W": 22.685982650996596, "J_1KI": 816.5314077367685, "W_1KI": 57.28783497726413, "W_D": 4.214982650996596, "J_D": 60.07672866272925, "W_D_1KI": 10.643895583324738, "J_D_1KI": 26.878524200314995} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 389, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.3775634765625, "TIME_S_1KI": 26.677541070854755, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 318.5190047359467, "W": 22.32677308088455, "J_1KI": 818.814922200377, "W_1KI": 57.39530354983175, "W_D": 3.7537730808845495, "J_D": 53.55221112322808, "W_D_1KI": 9.649802264484704, "J_D_1KI": 24.806689625924687} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output index 07e41de..4a32e24 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6507530212402344} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6972105503082275} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 267, 531, ..., 1249531, - 1249748, 1250000]), - col_indices=tensor([ 12, 24, 45, ..., 4958, 4983, 4986]), - values=tensor([0.7384, 0.2434, 0.0755, ..., 0.4736, 0.1384, 0.4678]), +tensor(crow_indices=tensor([ 0, 225, 478, ..., 1249494, + 1249751, 1250000]), + col_indices=tensor([ 16, 28, 30, ..., 4980, 4993, 4996]), + values=tensor([0.5189, 0.5626, 0.5403, ..., 0.3379, 0.7660, 0.2903]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.2921, 0.5624, 0.4015, ..., 0.8005, 0.9400, 0.6114]) +tensor([0.5904, 0.1736, 0.4962, ..., 0.6657, 0.7668, 0.4097]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 2.6507530212402344 seconds +Time: 2.6972105503082275 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 396 -ss 5000 -sd 0.05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.481398582458496} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 389 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.3775634765625} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 247, 505, ..., 1249488, - 1249757, 1250000]), - col_indices=tensor([ 27, 35, 41, ..., 4930, 4938, 4952]), - values=tensor([0.8294, 0.9821, 0.6691, ..., 0.3905, 0.4873, 0.1672]), +tensor(crow_indices=tensor([ 0, 232, 472, ..., 1249492, + 1249758, 1250000]), + col_indices=tensor([ 16, 29, 37, ..., 4970, 4975, 4986]), + values=tensor([0.2042, 0.0894, 0.5998, ..., 0.8932, 0.3489, 0.1528]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.8352, 0.4457, 0.1150, ..., 0.9988, 0.2164, 0.9018]) +tensor([0.6726, 0.9286, 0.4083, ..., 0.5149, 0.9750, 0.2214]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.481398582458496 seconds +Time: 10.3775634765625 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 247, 505, ..., 1249488, - 1249757, 1250000]), - col_indices=tensor([ 27, 35, 41, ..., 4930, 4938, 4952]), - values=tensor([0.8294, 0.9821, 0.6691, ..., 0.3905, 0.4873, 0.1672]), +tensor(crow_indices=tensor([ 0, 232, 472, ..., 1249492, + 1249758, 1250000]), + col_indices=tensor([ 16, 29, 37, ..., 4970, 4975, 4986]), + values=tensor([0.2042, 0.0894, 0.5998, ..., 0.8932, 0.3489, 0.1528]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.8352, 0.4457, 0.1150, ..., 0.9988, 0.2164, 0.9018]) +tensor([0.6726, 0.9286, 0.4083, ..., 0.5149, 0.9750, 0.2214]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.481398582458496 seconds +Time: 10.3775634765625 seconds -[20.72, 20.84, 20.92, 20.68, 20.6, 20.44, 20.48, 20.24, 20.44, 20.24] -[20.24, 20.52, 20.72, 22.36, 24.28, 26.72, 27.0, 27.56, 26.28, 25.88, 24.68, 24.52, 24.36, 24.36] -14.253137826919556 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.481398582458496, 'TIME_S_1KI': 26.468178238531554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.34643746376037, 'W': 22.685982650996596} -[20.72, 20.84, 20.92, 20.68, 20.6, 20.44, 20.48, 20.24, 20.44, 20.24, 20.24, 20.24, 20.36, 20.48, 20.48, 20.6, 20.68, 20.64, 20.4, 20.6] -369.42 -18.471 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.481398582458496, 'TIME_S_1KI': 26.468178238531554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.34643746376037, 'W': 22.685982650996596, 'J_1KI': 816.5314077367685, 'W_1KI': 57.28783497726413, 'W_D': 4.214982650996596, 'J_D': 60.07672866272925, 'W_D_1KI': 10.643895583324738, 'J_D_1KI': 26.878524200314995} +[20.68, 20.56, 20.56, 20.68, 20.88, 20.88, 21.08, 21.08, 20.76, 20.64] +[20.64, 20.48, 20.48, 21.84, 22.92, 26.08, 26.72, 27.12, 26.56, 25.64, 24.0, 23.96, 24.04, 23.96] +14.266235589981079 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.3775634765625, 'TIME_S_1KI': 26.677541070854755, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.5190047359467, 'W': 22.32677308088455} +[20.68, 20.56, 20.56, 20.68, 20.88, 20.88, 21.08, 21.08, 20.76, 20.64, 20.12, 20.08, 20.2, 20.2, 20.28, 20.64, 20.8, 20.8, 20.88, 20.76] +371.46000000000004 +18.573 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.3775634765625, 'TIME_S_1KI': 26.677541070854755, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.5190047359467, 'W': 22.32677308088455, 'J_1KI': 818.814922200377, 'W_1KI': 57.39530354983175, 'W_D': 3.7537730808845495, 'J_D': 53.55221112322808, 'W_D_1KI': 9.649802264484704, 'J_D_1KI': 24.806689625924687} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json index 721e6aa..6d6710b 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 199, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.477930784225464, "TIME_S_1KI": 52.6529185136958, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 366.7794834327698, "W": 23.985504753820422, "J_1KI": 1843.11298207422, "W_1KI": 120.53017464231368, "W_D": 5.225504753820424, "J_D": 79.90692520141607, "W_D_1KI": 26.25881785839409, "J_D_1KI": 131.95385858489493} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.158027410507202, "TIME_S_1KI": 56.639733048259906, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 389.69751098632815, "W": 25.31289781132327, "J_1KI": 1978.1599542453205, "W_1KI": 128.49186706255466, "W_D": 4.1918978113232725, "J_D": 64.53516920733456, "W_D_1KI": 21.278669093011537, "J_D_1KI": 108.01354869549004} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output index a6a4a54..943df46 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.267488241195679} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.311717987060547} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 489, 972, ..., 2499002, - 2499515, 2500000]), - col_indices=tensor([ 0, 4, 21, ..., 4965, 4988, 4998]), - values=tensor([0.4985, 0.2439, 0.0801, ..., 0.3726, 0.6532, 0.2308]), +tensor(crow_indices=tensor([ 0, 531, 990, ..., 2498988, + 2499517, 2500000]), + col_indices=tensor([ 7, 8, 15, ..., 4991, 4995, 4997]), + values=tensor([0.8258, 0.3761, 0.6668, ..., 0.9013, 0.8515, 0.3086]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7620, 0.1310, 0.6898, ..., 0.4324, 0.6267, 0.4614]) +tensor([0.4193, 0.7332, 0.2050, ..., 0.9758, 0.2089, 0.8589]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 5.267488241195679 seconds +Time: 5.311717987060547 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 199 -ss 5000 -sd 0.1 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.477930784225464} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 197 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.158027410507202} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 479, 980, ..., 2498983, - 2499479, 2500000]), - col_indices=tensor([ 7, 13, 23, ..., 4987, 4988, 4998]), - values=tensor([0.4519, 0.3203, 0.6830, ..., 0.2361, 0.6866, 0.7928]), +tensor(crow_indices=tensor([ 0, 464, 986, ..., 2499032, + 2499496, 2500000]), + col_indices=tensor([ 8, 39, 48, ..., 4964, 4987, 4997]), + values=tensor([0.1348, 0.8243, 0.3544, ..., 0.8528, 0.5821, 0.2231]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4502, 0.7188, 0.8112, ..., 0.2797, 0.2285, 0.9848]) +tensor([0.5491, 0.2706, 0.0646, ..., 0.7967, 0.8767, 0.7030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.477930784225464 seconds +Time: 11.158027410507202 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 479, 980, ..., 2498983, - 2499479, 2500000]), - col_indices=tensor([ 7, 13, 23, ..., 4987, 4988, 4998]), - values=tensor([0.4519, 0.3203, 0.6830, ..., 0.2361, 0.6866, 0.7928]), +tensor(crow_indices=tensor([ 0, 464, 986, ..., 2499032, + 2499496, 2500000]), + col_indices=tensor([ 8, 39, 48, ..., 4964, 4987, 4997]), + values=tensor([0.1348, 0.8243, 0.3544, ..., 0.8528, 0.5821, 0.2231]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4502, 0.7188, 0.8112, ..., 0.2797, 0.2285, 0.9848]) +tensor([0.5491, 0.2706, 0.0646, ..., 0.7967, 0.8767, 0.7030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.477930784225464 seconds +Time: 11.158027410507202 seconds -[20.56, 20.84, 21.36, 21.88, 21.64, 21.6, 21.4, 20.76, 20.76, 20.72] -[21.16, 21.2, 21.4, 25.96, 27.32, 30.6, 31.24, 28.68, 27.4, 26.08, 24.2, 24.2, 24.4, 24.36, 24.2] -15.291714191436768 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.477930784225464, 'TIME_S_1KI': 52.6529185136958, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.7794834327698, 'W': 23.985504753820422} -[20.56, 20.84, 21.36, 21.88, 21.64, 21.6, 21.4, 20.76, 20.76, 20.72, 20.44, 20.32, 20.44, 20.64, 20.64, 20.44, 20.4, 20.36, 20.44, 20.84] -375.2 -18.759999999999998 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.477930784225464, 'TIME_S_1KI': 52.6529185136958, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.7794834327698, 'W': 23.985504753820422, 'J_1KI': 1843.11298207422, 'W_1KI': 120.53017464231368, 'W_D': 5.225504753820424, 'J_D': 79.90692520141607, 'W_D_1KI': 26.25881785839409, 'J_D_1KI': 131.95385858489493} +[20.6, 20.28, 20.4, 20.48, 20.32, 21.48, 22.08, 23.12, 23.12, 23.84] +[23.96, 23.84, 24.08, 24.8, 25.88, 28.92, 30.16, 31.32, 32.12, 30.04, 27.68, 26.28, 24.84, 24.84, 25.6] +15.395215272903442 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.158027410507202, 'TIME_S_1KI': 56.639733048259906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 389.69751098632815, 'W': 25.31289781132327} +[20.6, 20.28, 20.4, 20.48, 20.32, 21.48, 22.08, 23.12, 23.12, 23.84, 25.76, 25.88, 25.56, 26.0, 25.76, 25.48, 25.44, 25.04, 24.68, 24.4] +422.41999999999996 +21.121 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.158027410507202, 'TIME_S_1KI': 56.639733048259906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 389.69751098632815, 'W': 25.31289781132327, 'J_1KI': 1978.1599542453205, 'W_1KI': 128.49186706255466, 'W_D': 4.1918978113232725, 'J_D': 64.53516920733456, 'W_D_1KI': 21.278669093011537, 'J_D_1KI': 108.01354869549004} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.json index 328683d..fe8e28f 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.774195671081543, "TIME_S_1KI": 107.74195671081543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 357.5530287361145, "W": 23.422029760012947, "J_1KI": 3575.5302873611454, "W_1KI": 234.22029760012947, "W_D": 5.2480297600129475, "J_D": 80.11470204830175, "W_D_1KI": 52.480297600129475, "J_D_1KI": 524.8029760012947} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.820615768432617, "TIME_S_1KI": 108.20615768432617, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 383.52341383934026, "W": 23.480020540098028, "J_1KI": 3835.2341383934026, "W_1KI": 234.80020540098027, "W_D": 4.937020540098029, "J_D": 80.64145295357712, "W_D_1KI": 49.37020540098029, "J_D_1KI": 493.7020540098029} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.output index 4419f1e..77a0a49 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.774195671081543} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.820615768432617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1044, 1986, ..., 4998023, - 4998990, 5000000]), - col_indices=tensor([ 2, 11, 17, ..., 4984, 4985, 4991]), - values=tensor([0.4872, 0.8747, 0.2341, ..., 0.7866, 0.4499, 0.5164]), +tensor(crow_indices=tensor([ 0, 975, 1962, ..., 4998021, + 4999006, 5000000]), + col_indices=tensor([ 7, 9, 14, ..., 4985, 4989, 4996]), + values=tensor([0.5148, 0.0164, 0.4922, ..., 0.3749, 0.4144, 0.9350]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5529, 0.0016, 0.5040, ..., 0.3915, 0.6771, 0.4202]) +tensor([0.3739, 0.8269, 0.8989, ..., 0.5457, 0.9173, 0.8641]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.774195671081543 seconds +Time: 10.820615768432617 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1044, 1986, ..., 4998023, - 4998990, 5000000]), - col_indices=tensor([ 2, 11, 17, ..., 4984, 4985, 4991]), - values=tensor([0.4872, 0.8747, 0.2341, ..., 0.7866, 0.4499, 0.5164]), +tensor(crow_indices=tensor([ 0, 975, 1962, ..., 4998021, + 4999006, 5000000]), + col_indices=tensor([ 7, 9, 14, ..., 4985, 4989, 4996]), + values=tensor([0.5148, 0.0164, 0.4922, ..., 0.3749, 0.4144, 0.9350]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.5529, 0.0016, 0.5040, ..., 0.3915, 0.6771, 0.4202]) +tensor([0.3739, 0.8269, 0.8989, ..., 0.5457, 0.9173, 0.8641]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.774195671081543 seconds +Time: 10.820615768432617 seconds -[20.36, 20.12, 20.12, 20.04, 20.28, 20.12, 20.08, 20.4, 20.4, 20.72] -[20.8, 21.08, 22.4, 23.32, 25.48, 25.48, 27.96, 29.08, 28.16, 27.04, 25.64, 24.2, 24.2, 24.4, 24.44] -15.265672206878662 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.774195671081543, 'TIME_S_1KI': 107.74195671081543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.5530287361145, 'W': 23.422029760012947} -[20.36, 20.12, 20.12, 20.04, 20.28, 20.12, 20.08, 20.4, 20.4, 20.72, 20.32, 20.16, 20.04, 20.08, 20.08, 20.0, 20.16, 20.28, 20.28, 20.28] -363.48 -18.174 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.774195671081543, 'TIME_S_1KI': 107.74195671081543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.5530287361145, 'W': 23.422029760012947, 'J_1KI': 3575.5302873611454, 'W_1KI': 234.22029760012947, 'W_D': 5.2480297600129475, 'J_D': 80.11470204830175, 'W_D_1KI': 52.480297600129475, 'J_D_1KI': 524.8029760012947} +[20.4, 20.56, 20.56, 20.6, 20.44, 20.44, 20.44, 20.8, 21.0, 20.96] +[21.0, 20.84, 20.88, 24.32, 26.08, 27.96, 29.8, 27.12, 27.16, 26.32, 26.32, 24.16, 24.08, 24.0, 24.0, 23.96] +16.3340322971344 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.820615768432617, 'TIME_S_1KI': 108.20615768432617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 383.52341383934026, 'W': 23.480020540098028} +[20.4, 20.56, 20.56, 20.6, 20.44, 20.44, 20.44, 20.8, 21.0, 20.96, 20.72, 20.52, 20.48, 20.56, 20.52, 20.68, 20.88, 20.56, 20.56, 20.44] +370.86 +18.543 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.820615768432617, 'TIME_S_1KI': 108.20615768432617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 383.52341383934026, 'W': 23.480020540098028, 'J_1KI': 3835.2341383934026, 'W_1KI': 234.80020540098027, 'W_D': 4.937020540098029, 'J_D': 80.64145295357712, 'W_D_1KI': 49.37020540098029, 'J_D_1KI': 493.7020540098029} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.json index 1787dbd..d8da17f 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.919427633285522, "TIME_S_1KI": 159.19427633285522, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 506.52752479553226, "W": 23.776826253573663, "J_1KI": 5065.275247955323, "W_1KI": 237.76826253573662, "W_D": 5.445826253573667, "J_D": 116.01468014574058, "W_D_1KI": 54.45826253573667, "J_D_1KI": 544.5826253573666} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.126448392868042, "TIME_S_1KI": 161.26448392868042, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 511.86830524444576, "W": 23.949620791408254, "J_1KI": 5118.683052444458, "W_1KI": 239.49620791408256, "W_D": 5.571620791408254, "J_D": 119.08063667488092, "W_D_1KI": 55.71620791408254, "J_D_1KI": 557.1620791408254} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.output index 2f8bfb8..199b724 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.919427633285522} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.126448392868042} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1464, 2929, ..., 7497018, - 7498512, 7500000]), - col_indices=tensor([ 1, 9, 13, ..., 4985, 4989, 4990]), - values=tensor([0.4014, 0.1905, 0.8906, ..., 0.4332, 0.9731, 0.1283]), +tensor(crow_indices=tensor([ 0, 1515, 2990, ..., 7496949, + 7498462, 7500000]), + col_indices=tensor([ 3, 5, 10, ..., 4991, 4994, 4996]), + values=tensor([0.2561, 0.8875, 0.1225, ..., 0.4305, 0.2716, 0.4832]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.5776, 0.8031, 0.5959, ..., 0.3626, 0.0858, 0.0842]) +tensor([0.7648, 0.0458, 0.2576, ..., 0.3703, 0.6649, 0.5067]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 15.919427633285522 seconds +Time: 16.126448392868042 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1464, 2929, ..., 7497018, - 7498512, 7500000]), - col_indices=tensor([ 1, 9, 13, ..., 4985, 4989, 4990]), - values=tensor([0.4014, 0.1905, 0.8906, ..., 0.4332, 0.9731, 0.1283]), +tensor(crow_indices=tensor([ 0, 1515, 2990, ..., 7496949, + 7498462, 7500000]), + col_indices=tensor([ 3, 5, 10, ..., 4991, 4994, 4996]), + values=tensor([0.2561, 0.8875, 0.1225, ..., 0.4305, 0.2716, 0.4832]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.5776, 0.8031, 0.5959, ..., 0.3626, 0.0858, 0.0842]) +tensor([0.7648, 0.0458, 0.2576, ..., 0.3703, 0.6649, 0.5067]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 15.919427633285522 seconds +Time: 16.126448392868042 seconds -[20.2, 20.32, 20.32, 20.32, 20.32, 20.24, 20.52, 20.56, 20.64, 20.76] -[20.76, 20.64, 21.8, 21.8, 22.88, 24.6, 28.28, 30.28, 29.56, 29.28, 26.08, 25.32, 24.68, 24.72, 24.64, 24.44, 24.44, 24.48, 24.24, 24.32, 24.48] -21.303411960601807 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.919427633285522, 'TIME_S_1KI': 159.19427633285522, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 506.52752479553226, 'W': 23.776826253573663} -[20.2, 20.32, 20.32, 20.32, 20.32, 20.24, 20.52, 20.56, 20.64, 20.76, 20.12, 20.12, 19.96, 20.2, 20.04, 20.4, 20.6, 20.56, 20.64, 20.64] -366.61999999999995 -18.330999999999996 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.919427633285522, 'TIME_S_1KI': 159.19427633285522, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 506.52752479553226, 'W': 23.776826253573663, 'J_1KI': 5065.275247955323, 'W_1KI': 237.76826253573662, 'W_D': 5.445826253573667, 'J_D': 116.01468014574058, 'W_D_1KI': 54.45826253573667, 'J_D_1KI': 544.5826253573666} +[20.12, 19.96, 20.16, 20.16, 20.28, 20.4, 20.56, 20.6, 20.4, 20.32] +[20.28, 20.24, 21.04, 22.32, 24.36, 26.76, 29.12, 29.12, 28.76, 28.44, 26.92, 25.68, 24.8, 24.64, 24.72, 24.76, 24.64, 24.56, 24.48, 24.76, 24.76] +21.37271022796631 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.126448392868042, 'TIME_S_1KI': 161.26448392868042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 511.86830524444576, 'W': 23.949620791408254} +[20.12, 19.96, 20.16, 20.16, 20.28, 20.4, 20.56, 20.6, 20.4, 20.32, 20.44, 20.4, 20.36, 20.4, 20.44, 20.6, 20.72, 20.6, 20.72, 20.72] +367.56 +18.378 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.126448392868042, 'TIME_S_1KI': 161.26448392868042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 511.86830524444576, 'W': 23.949620791408254, 'J_1KI': 5118.683052444458, 'W_1KI': 239.49620791408256, 'W_D': 5.571620791408254, 'J_D': 119.08063667488092, 'W_D_1KI': 55.71620791408254, 'J_D_1KI': 557.1620791408254} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.json index d18f4ce..75c1c11 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.475390195846558, "TIME_S_1KI": 214.75390195846558, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 659.3497357940673, "W": 24.06026504427972, "J_1KI": 6593.497357940673, "W_1KI": 240.60265044279723, "W_D": 5.657265044279722, "J_D": 155.03221620368953, "W_D_1KI": 56.57265044279722, "J_D_1KI": 565.7265044279723} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.22704839706421, "TIME_S_1KI": 212.2704839706421, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 664.4800143432619, "W": 24.218492520245295, "J_1KI": 6644.80014343262, "W_1KI": 242.18492520245294, "W_D": 5.684492520245296, "J_D": 155.96477230072048, "W_D_1KI": 56.84492520245296, "J_D_1KI": 568.4492520245296} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.output index 6dfec86..5f2127e 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.475390195846558} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.22704839706421} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2112, 4108, ..., 9995977, - 9998003, 10000000]), - col_indices=tensor([ 0, 2, 5, ..., 4993, 4997, 4998]), - values=tensor([0.6521, 0.2294, 0.7060, ..., 0.9592, 0.5713, 0.6385]), +tensor(crow_indices=tensor([ 0, 2017, 3989, ..., 9996063, + 9998055, 10000000]), + col_indices=tensor([ 1, 2, 3, ..., 4988, 4992, 4997]), + values=tensor([0.9794, 0.6063, 0.1282, ..., 0.2870, 0.4423, 0.1444]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2067, 0.4320, 0.3905, ..., 0.7782, 0.8244, 0.2696]) +tensor([0.3967, 0.7127, 0.1145, ..., 0.5862, 0.3390, 0.6491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 21.475390195846558 seconds +Time: 21.22704839706421 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2112, 4108, ..., 9995977, - 9998003, 10000000]), - col_indices=tensor([ 0, 2, 5, ..., 4993, 4997, 4998]), - values=tensor([0.6521, 0.2294, 0.7060, ..., 0.9592, 0.5713, 0.6385]), +tensor(crow_indices=tensor([ 0, 2017, 3989, ..., 9996063, + 9998055, 10000000]), + col_indices=tensor([ 1, 2, 3, ..., 4988, 4992, 4997]), + values=tensor([0.9794, 0.6063, 0.1282, ..., 0.2870, 0.4423, 0.1444]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2067, 0.4320, 0.3905, ..., 0.7782, 0.8244, 0.2696]) +tensor([0.3967, 0.7127, 0.1145, ..., 0.5862, 0.3390, 0.6491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 21.475390195846558 seconds +Time: 21.22704839706421 seconds -[20.36, 20.32, 20.12, 20.16, 20.12, 20.28, 20.68, 20.96, 21.08, 21.04] -[20.56, 20.44, 20.44, 23.96, 25.32, 27.12, 30.0, 32.28, 29.04, 28.16, 26.48, 25.72, 24.44, 24.36, 24.24, 24.24, 24.2, 24.16, 24.28, 24.32, 24.36, 24.48, 24.04, 23.92, 23.8, 23.68, 23.88] -27.40409278869629 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.475390195846558, 'TIME_S_1KI': 214.75390195846558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.3497357940673, 'W': 24.06026504427972} -[20.36, 20.32, 20.12, 20.16, 20.12, 20.28, 20.68, 20.96, 21.08, 21.04, 20.4, 20.4, 20.36, 20.56, 20.6, 20.4, 20.24, 20.36, 20.28, 20.48] -368.06 -18.403 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.475390195846558, 'TIME_S_1KI': 214.75390195846558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.3497357940673, 'W': 24.06026504427972, 'J_1KI': 6593.497357940673, 'W_1KI': 240.60265044279723, 'W_D': 5.657265044279722, 'J_D': 155.03221620368953, 'W_D_1KI': 56.57265044279722, 'J_D_1KI': 565.7265044279723} +[20.24, 20.4, 20.32, 20.32, 20.4, 20.6, 20.72, 20.96, 20.92, 20.68] +[20.52, 20.32, 20.8, 22.56, 24.2, 27.68, 29.48, 29.48, 30.08, 29.84, 28.44, 25.92, 25.08, 24.28, 24.32, 24.52, 24.48, 24.64, 24.44, 24.44, 24.48, 24.56, 24.28, 24.4, 24.28, 24.36, 24.4] +27.436885833740234 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.22704839706421, 'TIME_S_1KI': 212.2704839706421, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 664.4800143432619, 'W': 24.218492520245295} +[20.24, 20.4, 20.32, 20.32, 20.4, 20.6, 20.72, 20.96, 20.92, 20.68, 20.32, 20.52, 20.76, 20.6, 20.48, 20.52, 20.52, 20.64, 20.96, 20.84] +370.68 +18.534 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.22704839706421, 'TIME_S_1KI': 212.2704839706421, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 664.4800143432619, 'W': 24.218492520245295, 'J_1KI': 6644.80014343262, 'W_1KI': 242.18492520245294, 'W_D': 5.684492520245296, 'J_D': 155.96477230072048, 'W_D_1KI': 56.84492520245296, 'J_D_1KI': 568.4492520245296} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.json index dc66501..b69526b 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.60726523399353, "TIME_S_1KI": 266.0726523399353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 821.7415859985354, "W": 24.511735271206813, "J_1KI": 8217.415859985353, "W_1KI": 245.11735271206814, "W_D": 6.057735271206813, "J_D": 203.08203129005446, "W_D_1KI": 60.57735271206813, "J_D_1KI": 605.7735271206813} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.64250874519348, "TIME_S_1KI": 266.4250874519348, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 813.7249093627931, "W": 24.241946717289224, "J_1KI": 8137.249093627931, "W_1KI": 242.41946717289224, "W_D": 5.653946717289223, "J_D": 189.78497617053995, "W_D_1KI": 56.53946717289223, "J_D_1KI": 565.3946717289223} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.output index ad4e507..434a706 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.60726523399353} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.64250874519348} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2547, 5140, ..., 12494977, - 12497506, 12500000]), - col_indices=tensor([ 3, 4, 6, ..., 4994, 4995, 4998]), - values=tensor([0.6176, 0.1216, 0.2065, ..., 0.5783, 0.0575, 0.3833]), +tensor(crow_indices=tensor([ 0, 2517, 4999, ..., 12495052, + 12497559, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4991, 4992, 4999]), + values=tensor([0.6703, 0.4092, 0.6328, ..., 0.6598, 0.5299, 0.2341]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.3583, 0.1424, 0.2491, ..., 0.0607, 0.2583, 0.4693]) +tensor([0.4179, 0.3399, 0.2076, ..., 0.4619, 0.8936, 0.2466]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.60726523399353 seconds +Time: 26.64250874519348 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2547, 5140, ..., 12494977, - 12497506, 12500000]), - col_indices=tensor([ 3, 4, 6, ..., 4994, 4995, 4998]), - values=tensor([0.6176, 0.1216, 0.2065, ..., 0.5783, 0.0575, 0.3833]), +tensor(crow_indices=tensor([ 0, 2517, 4999, ..., 12495052, + 12497559, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4991, 4992, 4999]), + values=tensor([0.6703, 0.4092, 0.6328, ..., 0.6598, 0.5299, 0.2341]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.3583, 0.1424, 0.2491, ..., 0.0607, 0.2583, 0.4693]) +tensor([0.4179, 0.3399, 0.2076, ..., 0.4619, 0.8936, 0.2466]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.60726523399353 seconds +Time: 26.64250874519348 seconds -[20.28, 20.52, 20.68, 20.56, 20.44, 20.52, 20.44, 20.44, 20.72, 21.0] -[20.92, 20.68, 20.68, 23.84, 26.2, 27.84, 31.04, 29.96, 30.76, 29.48, 26.6, 26.6, 26.28, 24.68, 24.6, 24.48, 24.4, 24.24, 24.32, 24.32, 24.48, 24.52, 24.44, 24.52, 24.52, 24.56, 24.4, 24.56, 24.72, 24.68, 24.84, 24.8, 24.64] -33.524415016174316 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.60726523399353, 'TIME_S_1KI': 266.0726523399353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7415859985354, 'W': 24.511735271206813} -[20.28, 20.52, 20.68, 20.56, 20.44, 20.52, 20.44, 20.44, 20.72, 21.0, 20.24, 20.2, 20.4, 20.6, 20.6, 20.6, 20.44, 20.56, 20.52, 20.16] -369.08000000000004 -18.454 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.60726523399353, 'TIME_S_1KI': 266.0726523399353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7415859985354, 'W': 24.511735271206813, 'J_1KI': 8217.415859985353, 'W_1KI': 245.11735271206814, 'W_D': 6.057735271206813, 'J_D': 203.08203129005446, 'W_D_1KI': 60.57735271206813, 'J_D_1KI': 605.7735271206813} +[20.52, 20.52, 20.72, 20.76, 21.12, 21.04, 21.16, 21.04, 20.72, 20.32] +[20.28, 20.24, 21.2, 23.24, 25.08, 25.08, 28.36, 29.84, 31.04, 30.0, 28.96, 26.28, 25.44, 24.36, 24.24, 24.2, 24.48, 24.6, 24.6, 24.48, 24.6, 24.48, 24.48, 24.52, 24.12, 24.16, 24.0, 24.04, 24.28, 24.36, 24.44, 24.44, 24.32] +33.566813707351685 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.64250874519348, 'TIME_S_1KI': 266.4250874519348, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 813.7249093627931, 'W': 24.241946717289224} +[20.52, 20.52, 20.72, 20.76, 21.12, 21.04, 21.16, 21.04, 20.72, 20.32, 20.28, 20.2, 20.28, 20.76, 20.88, 20.8, 20.6, 20.24, 20.2, 20.32] +371.76 +18.588 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.64250874519348, 'TIME_S_1KI': 266.4250874519348, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 813.7249093627931, 'W': 24.241946717289224, 'J_1KI': 8137.249093627931, 'W_1KI': 242.41946717289224, 'W_D': 5.653946717289223, 'J_D': 189.78497617053995, 'W_D_1KI': 56.53946717289223, 'J_D_1KI': 565.3946717289223} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json index b24de3c..4989bfc 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 284909, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.16942024230957, "TIME_S_1KI": 0.035693573184102885, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.9542731094361, "W": 22.304497332617817, "J_1KI": 1.1124754679895548, "W_1KI": 0.07828639085679223, "W_D": 3.6764973326178207, "J_D": 52.24424125194558, "W_D_1KI": 0.012904110900736098, "J_D_1KI": 4.529204377796454e-05} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 287687, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.360889196395874, "TIME_S_1KI": 0.036014450414498654, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.19113276481625, "W": 22.12664535948525, "J_1KI": 1.095604364343249, "W_1KI": 0.07691221834662411, "W_D": 3.1666453594852477, "J_D": 45.108443765640196, "W_D_1KI": 0.011007259137483611, "J_D_1KI": 3.8261232302758245e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output index ece41ca..44a657c 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,75 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012842655181884766} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012710094451904297} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1287, 2037, 612, 4005, 465, 4495, 4486, 1954, 4095, - 1514, 3786, 3287, 3358, 3432, 3673, 489, 2823, 505, - 4424, 1572, 4277, 474, 3301, 30, 2842, 4780, 2739, - 564, 2900, 4485, 4784, 2295, 755, 3717, 1261, 1856, - 2818, 3372, 3761, 1939, 4279, 1416, 4196, 1024, 159, - 3430, 1464, 630, 4128, 1057, 4758, 4930, 4819, 4211, - 3868, 1700, 2760, 4521, 1355, 4737, 4580, 1838, 4056, - 1953, 4561, 1726, 3125, 4174, 510, 4743, 502, 2822, - 338, 1706, 4412, 4712, 3417, 4607, 4478, 4287, 4365, - 4223, 3755, 467, 2870, 999, 1516, 3711, 3345, 4540, - 4303, 4477, 4047, 3188, 522, 451, 4048, 1301, 3760, - 3807, 142, 526, 3797, 3415, 942, 3041, 1022, 555, - 2433, 3440, 4291, 2481, 2516, 1226, 4664, 1242, 2239, - 3542, 3300, 3985, 1261, 628, 3797, 3571, 1648, 545, - 3417, 523, 297, 1814, 2339, 1387, 4149, 2499, 1698, - 4107, 3910, 907, 1601, 3072, 2976, 1955, 76, 3173, - 63, 633, 2089, 1360, 1226, 4574, 730, 2472, 4618, - 425, 3915, 1299, 1950, 4945, 1796, 628, 1797, 3210, - 2055, 2428, 876, 1161, 1529, 1660, 2886, 4614, 2062, - 2433, 3539, 1521, 33, 1294, 4198, 863, 2582, 1498, - 77, 507, 2697, 2034, 2514, 1935, 4132, 2925, 876, - 2808, 4770, 271, 3697, 1635, 2519, 4995, 3590, 3245, - 130, 480, 3111, 3121, 3132, 1937, 3910, 1943, 2562, - 426, 3962, 1910, 1189, 1897, 1056, 462, 1607, 1444, - 118, 191, 2005, 615, 1379, 633, 2360, 3526, 4732, - 2267, 3397, 1029, 3432, 2182, 2675, 4099, 3777, 2171, - 2640, 3913, 4300, 2946, 3758, 3305, 1103, 4800, 3668, - 4286, 3562, 281, 919, 4442, 2167, 2728]), - values=tensor([0.8347, 0.3655, 0.0811, 0.8356, 0.0205, 0.3330, 0.9286, - 0.0736, 0.7654, 0.8451, 0.0234, 0.4126, 0.2439, 0.1012, - 0.1525, 0.4404, 0.8423, 0.5434, 0.2968, 0.3607, 0.9939, - 0.0443, 0.6432, 0.5086, 0.6326, 0.2329, 0.7870, 0.7820, - 0.9646, 0.4656, 0.9109, 0.0130, 0.3562, 0.2378, 0.0761, - 0.1724, 0.0722, 0.8084, 0.1566, 0.8788, 0.9593, 0.2473, - 0.2746, 0.1767, 0.8469, 0.1106, 0.8653, 0.5297, 0.8543, - 0.5387, 0.4683, 0.0500, 0.6408, 0.2485, 0.5053, 0.9278, - 0.6730, 0.1223, 0.9361, 0.1415, 0.0908, 0.6368, 0.4532, - 0.7711, 0.1924, 0.7435, 0.0645, 0.3989, 0.7433, 0.7022, - 0.6974, 0.8264, 0.3293, 0.6363, 0.9947, 0.1723, 0.3099, - 0.5498, 0.6041, 0.9256, 0.6505, 0.2218, 0.5727, 0.8460, - 0.3386, 0.9152, 0.1985, 0.3213, 0.2437, 0.8619, 0.4265, - 0.8019, 0.3028, 0.4559, 0.9203, 0.9762, 0.2222, 0.3112, - 0.4047, 0.0709, 0.2379, 0.3209, 0.9982, 0.9963, 0.6946, - 0.0267, 0.7677, 0.2026, 0.6034, 0.5006, 0.8273, 0.2191, - 0.6497, 0.2706, 0.0892, 0.8677, 0.9857, 0.5541, 0.2974, - 0.1559, 0.1745, 0.4744, 0.1426, 0.1224, 0.3669, 0.1827, - 0.5044, 0.5810, 0.3220, 0.7231, 0.9240, 0.0412, 0.3152, - 0.9088, 0.3617, 0.9935, 0.3508, 0.0434, 0.0453, 0.5299, - 0.2529, 0.0232, 0.7419, 0.0564, 0.5519, 0.6136, 0.5013, - 0.9801, 0.4708, 0.5636, 0.5144, 0.1368, 0.7207, 0.1775, - 0.9552, 0.2262, 0.7144, 0.1124, 0.8514, 0.1783, 0.8401, - 0.1256, 0.7454, 0.1258, 0.2191, 0.5753, 0.9252, 0.8693, - 0.6514, 0.3440, 0.7780, 0.4771, 0.0787, 0.5042, 0.0634, - 0.8013, 0.8286, 0.4280, 0.3433, 0.9749, 0.0712, 0.9286, - 0.0320, 0.8979, 0.5094, 0.4000, 0.4693, 0.8308, 0.6000, - 0.3933, 0.7591, 0.2335, 0.5450, 0.3018, 0.3121, 0.4779, - 0.9302, 0.5324, 0.1295, 0.6438, 0.5030, 0.3371, 0.9613, - 0.8059, 0.9687, 0.2898, 0.7067, 0.8974, 0.1763, 0.0222, - 0.0300, 0.9494, 0.3209, 0.6515, 0.7028, 0.8063, 0.2794, - 0.7392, 0.1814, 0.3171, 0.4591, 0.7578, 0.6336, 0.8392, - 0.6142, 0.8521, 0.4206, 0.9799, 0.4517, 0.1512, 0.3696, - 0.0957, 0.3165, 0.3328, 0.9242, 0.5247, 0.8176, 0.9760, - 0.3689, 0.9384, 0.3805, 0.7826, 0.4113, 0.3311, 0.7250, - 0.9146, 0.3319, 0.6199, 0.8288, 0.1278]), + col_indices=tensor([4504, 4925, 3483, 2236, 3654, 879, 2367, 1033, 361, + 3171, 2429, 1911, 1608, 2947, 1720, 541, 16, 4242, + 4561, 4883, 4549, 893, 3920, 3256, 4585, 388, 1746, + 2051, 2283, 738, 1665, 2175, 502, 563, 4907, 1855, + 14, 4899, 3725, 1677, 3353, 3795, 2937, 2969, 4166, + 1958, 385, 3893, 1515, 395, 4430, 4695, 3039, 4753, + 944, 263, 3749, 1098, 4314, 948, 4146, 4287, 1019, + 3967, 1990, 4586, 4167, 3845, 3896, 2089, 3578, 1869, + 201, 4920, 252, 1590, 501, 3328, 1980, 2664, 1476, + 3301, 4106, 3571, 3576, 2787, 534, 211, 4210, 1393, + 899, 932, 158, 3456, 1353, 4964, 772, 2618, 2255, + 4378, 15, 4151, 1131, 4649, 978, 2370, 1002, 3024, + 3174, 132, 2699, 4574, 436, 287, 3781, 2840, 585, + 253, 3714, 890, 1237, 2106, 453, 3253, 2687, 4038, + 2573, 3153, 1786, 753, 2141, 4976, 479, 1169, 2361, + 4685, 2106, 1532, 1247, 2436, 1977, 1409, 53, 4427, + 4105, 922, 426, 2747, 3554, 4743, 4705, 1574, 252, + 1782, 415, 2753, 4857, 4050, 3415, 3199, 3863, 581, + 1831, 2495, 4469, 1436, 1047, 4329, 3112, 3247, 1156, + 344, 50, 2457, 1549, 4534, 4902, 4188, 1412, 2307, + 3467, 2816, 1347, 1780, 3660, 4058, 4763, 117, 2240, + 294, 2891, 1018, 4652, 770, 2573, 3893, 1274, 327, + 4428, 3093, 2257, 467, 3013, 2619, 2172, 4510, 3859, + 4809, 4935, 3672, 4872, 3570, 3292, 3498, 1413, 277, + 2860, 1386, 2933, 3545, 4710, 4185, 3059, 3414, 2678, + 932, 546, 4210, 1481, 394, 1428, 3327, 2136, 1167, + 3758, 4024, 3878, 1495, 1873, 2275, 4837, 668, 3929, + 633, 1606, 4036, 1972, 2352, 646, 1920]), + values=tensor([0.3281, 0.7186, 0.0885, 0.8824, 0.0428, 0.7484, 0.2776, + 0.8498, 0.3241, 0.6346, 0.3061, 0.1212, 0.9883, 0.2794, + 0.4951, 0.1149, 0.0271, 0.6817, 0.9478, 0.2189, 0.6939, + 0.7668, 0.7775, 0.3606, 0.5102, 0.9068, 0.2902, 0.8239, + 0.1082, 0.5562, 0.6806, 0.8337, 0.9803, 0.7843, 0.1212, + 0.0360, 0.0043, 0.7720, 0.5739, 0.8531, 0.2097, 0.5490, + 0.3124, 0.4709, 0.2499, 0.3973, 0.7625, 0.0166, 0.3678, + 0.1384, 0.2763, 0.8464, 0.7425, 0.8214, 0.4504, 0.9205, + 0.2942, 0.5135, 0.6536, 0.9550, 0.5441, 0.7895, 0.7419, + 0.0915, 0.8117, 0.7446, 0.2412, 0.2200, 0.2174, 0.5999, + 0.3015, 0.4139, 0.4084, 0.2322, 0.3844, 0.1459, 0.6829, + 0.1365, 0.5820, 0.4914, 0.5954, 0.7769, 0.6741, 0.9810, + 0.5109, 0.1783, 0.4846, 0.4503, 0.9371, 0.6551, 0.0122, + 0.2070, 0.3323, 0.1199, 0.9065, 0.5230, 0.4155, 0.8497, + 0.1219, 0.7228, 0.8239, 0.3020, 0.4084, 0.1155, 0.6770, + 0.2751, 0.6906, 0.9403, 0.0971, 0.5530, 0.8845, 0.6782, + 0.0262, 0.2644, 0.3304, 0.1597, 0.1013, 0.5580, 0.4774, + 0.9154, 0.5900, 0.5700, 0.4159, 0.4515, 0.4173, 0.5242, + 0.8851, 0.9702, 0.1320, 0.3631, 0.7289, 0.2975, 0.2736, + 0.8971, 0.9762, 0.8591, 0.3679, 0.3897, 0.2074, 0.1861, + 0.7464, 0.8693, 0.8028, 0.1067, 0.6002, 0.6802, 0.0825, + 0.7664, 0.3571, 0.6935, 0.8423, 0.6814, 0.2101, 0.5250, + 0.4015, 0.3539, 0.1875, 0.1172, 0.4740, 0.5695, 0.7667, + 0.4081, 0.6735, 0.5832, 0.0738, 0.9286, 0.4354, 0.5601, + 0.3007, 0.0074, 0.7440, 0.0669, 0.3316, 0.4734, 0.4784, + 0.6725, 0.6391, 0.4030, 0.2837, 0.8512, 0.6271, 0.6672, + 0.7887, 0.7822, 0.6316, 0.7780, 0.9879, 0.0348, 0.3663, + 0.8694, 0.4406, 0.4699, 0.7568, 0.7158, 0.9662, 0.2705, + 0.3921, 0.4878, 0.4502, 0.4039, 0.5669, 0.0711, 0.4373, + 0.2590, 0.8335, 0.4542, 0.8366, 0.6447, 0.6675, 0.9508, + 0.2588, 0.9326, 0.8729, 0.1386, 0.5099, 0.0532, 0.4713, + 0.7464, 0.3701, 0.1217, 0.5396, 0.6107, 0.7487, 0.7675, + 0.2977, 0.2849, 0.4368, 0.0073, 0.4129, 0.5166, 0.4763, + 0.0076, 0.8870, 0.0419, 0.8594, 0.6650, 0.1603, 0.5692, + 0.4273, 0.6343, 0.0207, 0.6781, 0.8372, 0.2087, 0.8662, + 0.5153, 0.5825, 0.1674, 0.5871, 0.7943]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.0363, 0.1704, 0.8959, ..., 0.1381, 0.6314, 0.8045]) +tensor([0.5576, 0.8627, 0.7863, ..., 0.7513, 0.9859, 0.4856]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +77,107 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.012842655181884766 seconds +Time: 0.012710094451904297 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 81758 -ss 5000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.01309871673584} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 82611 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.0151278972625732} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([3901, 3663, 229, 3286, 2751, 3483, 2128, 1446, 4503, - 3960, 1594, 4088, 4861, 755, 2490, 2051, 2278, 4729, - 1103, 1042, 1554, 2825, 4031, 3744, 2602, 531, 4883, - 202, 3192, 4676, 1858, 1406, 3540, 1868, 4258, 1296, - 40, 2235, 3617, 4799, 680, 649, 3963, 1933, 1725, - 575, 4509, 1266, 2706, 2278, 2363, 503, 1376, 3736, - 4576, 1431, 3018, 4449, 379, 171, 699, 4269, 423, - 15, 756, 1388, 2467, 4283, 3498, 330, 1828, 2936, - 727, 2300, 2833, 2863, 109, 1560, 2849, 1474, 443, - 230, 4619, 4963, 1262, 1748, 3663, 2453, 3127, 315, - 2799, 4404, 3122, 4451, 3711, 4611, 77, 2376, 1019, - 1276, 1245, 139, 3620, 243, 636, 3728, 523, 3275, - 1319, 407, 4648, 783, 1431, 764, 2964, 886, 1325, - 4241, 4394, 4361, 2591, 57, 2666, 3482, 767, 690, - 1227, 174, 1857, 1992, 525, 1315, 1715, 350, 2942, - 4827, 2788, 3414, 2521, 2538, 2474, 540, 1078, 638, - 3541, 2163, 4818, 623, 1593, 2829, 4885, 1974, 2036, - 2239, 4847, 1633, 3181, 2785, 2293, 3813, 4130, 3145, - 586, 2131, 1377, 3635, 1416, 96, 1290, 4122, 4111, - 3371, 3627, 1730, 4198, 1682, 3780, 3696, 967, 3018, - 2372, 3866, 4983, 4085, 4264, 3039, 2899, 4144, 4646, - 2984, 172, 4147, 1120, 2707, 124, 2849, 2323, 1520, - 3250, 2548, 2660, 4708, 2235, 593, 199, 448, 1975, - 4340, 1665, 3502, 4596, 1936, 3151, 759, 4834, 1142, - 2548, 2586, 1859, 3767, 1222, 4845, 1226, 3446, 4199, - 4381, 2057, 3436, 396, 3566, 1282, 173, 1462, 3915, - 4165, 1821, 3059, 730, 1341, 2913, 4066, 2638, 3070, - 430, 826, 3316, 2761, 4646, 4961, 2657]), - values=tensor([0.2777, 0.7994, 0.6055, 0.8858, 0.5401, 0.6289, 0.2315, - 0.7108, 0.2919, 0.9440, 0.4352, 0.9935, 0.7547, 0.9487, - 0.1198, 0.7138, 0.2381, 0.9158, 0.3679, 0.3919, 0.7781, - 0.8318, 0.6054, 0.4010, 0.3469, 0.3425, 0.7408, 0.3350, - 0.1090, 0.6705, 0.8875, 0.7480, 0.9795, 0.0427, 0.1593, - 0.2017, 0.8682, 0.9704, 0.5078, 0.7021, 0.2351, 0.1990, - 0.1799, 0.1614, 0.9052, 0.7818, 0.9469, 0.9144, 0.6416, - 0.0978, 0.2154, 0.3794, 0.0722, 0.4288, 0.4423, 0.0392, - 0.2361, 0.1773, 0.5238, 0.6266, 0.9495, 0.4070, 0.9779, - 0.1080, 0.9756, 0.0212, 0.7575, 0.6901, 0.5589, 0.8829, - 0.5870, 0.3829, 0.0909, 0.7414, 0.5878, 0.3480, 0.1168, - 0.3972, 0.2804, 0.8860, 0.5903, 0.9778, 0.2522, 0.2229, - 0.0973, 0.3159, 0.6835, 0.0134, 0.3067, 0.7266, 0.6764, - 0.3082, 0.0327, 0.3921, 0.8622, 0.8074, 0.6252, 0.9606, - 0.3313, 0.3455, 0.4533, 0.6697, 0.2711, 0.3754, 0.8727, - 0.6651, 0.0380, 0.1210, 0.0259, 0.0087, 0.3017, 0.7186, - 0.9688, 0.5810, 0.6939, 0.8057, 0.2727, 0.5144, 0.0126, - 0.0636, 0.8543, 0.9756, 0.4583, 0.3014, 0.5014, 0.4285, - 0.3361, 0.3583, 0.8660, 0.8752, 0.5050, 0.1837, 0.7102, - 0.1957, 0.9064, 0.7982, 0.5015, 0.4099, 0.5809, 0.8801, - 0.0073, 0.5658, 0.8433, 0.7251, 0.8971, 0.9752, 0.6676, - 0.2814, 0.9394, 0.9811, 0.1778, 0.5627, 0.3569, 0.2951, - 0.4362, 0.7414, 0.7224, 0.6917, 0.2922, 0.7465, 0.6523, - 0.5621, 0.0779, 0.8744, 0.6553, 0.5271, 0.0990, 0.8629, - 0.6483, 0.0044, 0.2027, 0.6359, 0.0842, 0.9816, 0.4377, - 0.7291, 0.7757, 0.4150, 0.9512, 0.9053, 0.6628, 0.9162, - 0.6353, 0.3725, 0.8919, 0.1505, 0.1975, 0.7728, 0.1846, - 0.5340, 0.4217, 0.7643, 0.3438, 0.6005, 0.7795, 0.2067, - 0.6674, 0.9142, 0.4620, 0.8140, 0.1036, 0.3590, 0.3372, - 0.0756, 0.4219, 0.7019, 0.2017, 0.1876, 0.8857, 0.9443, - 0.7034, 0.3858, 0.6463, 0.0872, 0.7101, 0.2546, 0.8101, - 0.3637, 0.4495, 0.8137, 0.4469, 0.4204, 0.1055, 0.8379, - 0.1725, 0.3312, 0.1791, 0.6141, 0.0562, 0.4774, 0.5212, - 0.7724, 0.9039, 0.5626, 0.1051, 0.2569, 0.5243, 0.3982, - 0.0444, 0.0991, 0.8125, 0.2081, 0.2559, 0.6572, 0.3238, - 0.3534, 0.8270, 0.9704, 0.5262, 0.1397]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.2610, 0.0051, 0.8611, ..., 0.6706, 0.7457, 0.2823]) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 250, 250, 250]), + col_indices=tensor([ 213, 4779, 3777, 4550, 4758, 725, 2100, 1582, 629, + 788, 2305, 3460, 2892, 1710, 4886, 3352, 2622, 4787, + 4791, 2193, 3363, 3475, 1828, 4939, 98, 2267, 3550, + 3189, 4933, 2327, 2592, 2612, 1351, 4503, 941, 873, + 4654, 105, 1777, 1700, 1733, 991, 2094, 3188, 661, + 3197, 2034, 428, 2491, 4698, 4272, 4288, 967, 1515, + 1856, 1234, 2114, 911, 824, 3638, 644, 73, 2345, + 2851, 4331, 3533, 2809, 3068, 270, 55, 1384, 692, + 3091, 2195, 905, 1188, 744, 198, 1050, 3256, 3857, + 1805, 209, 604, 3050, 2910, 4230, 2736, 4913, 2159, + 4177, 2457, 1542, 4527, 1440, 848, 180, 4687, 738, + 3246, 703, 4279, 3126, 1010, 490, 1429, 2695, 470, + 1188, 1848, 488, 173, 3455, 1877, 3226, 4432, 1230, + 930, 3766, 4842, 3588, 2421, 1432, 3209, 3481, 541, + 1788, 3984, 4857, 1375, 1070, 3537, 2842, 3989, 4269, + 3037, 3850, 4833, 1104, 4156, 4852, 2097, 3322, 3330, + 2831, 3249, 4960, 2980, 4398, 4363, 2624, 3551, 934, + 2458, 1879, 3903, 814, 3517, 4966, 1013, 2114, 2710, + 4601, 4698, 4771, 3603, 3029, 1785, 2389, 3046, 978, + 1689, 2154, 317, 4784, 1353, 669, 4859, 4849, 2440, + 3879, 3510, 1182, 4553, 772, 3185, 2930, 3381, 1855, + 4333, 208, 2573, 3947, 2438, 3380, 2430, 1515, 4565, + 934, 1556, 4488, 2745, 76, 977, 1941, 3187, 1442, + 108, 3679, 945, 4774, 3359, 122, 3169, 945, 4677, + 4289, 2291, 2389, 3962, 537, 4260, 3000, 4529, 2548, + 4748, 2479, 3591, 420, 2587, 1216, 4663, 2675, 4760, + 3290, 2440, 3917, 1363, 4386, 3961, 780, 3851, 3666, + 1416, 4031, 2287, 801, 4503, 3484, 4817]), + values=tensor([4.0354e-01, 1.2267e-01, 3.2350e-01, 7.9853e-01, + 3.7774e-01, 8.1115e-01, 3.9899e-01, 5.3011e-01, + 2.7057e-01, 8.4864e-01, 6.9685e-01, 2.2026e-01, + 9.5940e-01, 9.6521e-01, 4.9025e-01, 6.8787e-01, + 4.4547e-01, 6.2420e-01, 7.6380e-01, 8.0763e-01, + 7.0165e-01, 1.8104e-01, 1.5060e-01, 4.4401e-01, + 4.8802e-01, 4.4349e-02, 4.5519e-02, 5.8012e-01, + 9.6507e-01, 7.8555e-01, 5.1555e-01, 1.5159e-01, + 8.4773e-01, 6.7004e-01, 2.4404e-02, 6.4126e-01, + 2.9397e-01, 8.5036e-01, 9.6316e-01, 4.3294e-01, + 1.7380e-01, 1.1130e-01, 1.2317e-01, 1.8201e-01, + 3.6672e-01, 6.1148e-01, 6.3677e-02, 8.6464e-02, + 3.0547e-01, 3.7679e-01, 7.0998e-01, 9.4956e-02, + 2.4487e-01, 3.0186e-01, 3.6867e-01, 1.9885e-01, + 1.6360e-01, 2.4777e-01, 4.8316e-01, 9.8799e-01, + 6.6498e-01, 8.5026e-01, 4.3559e-01, 1.6466e-01, + 3.1401e-01, 8.7264e-01, 1.8130e-01, 6.5719e-01, + 1.0428e-01, 5.9032e-01, 8.4727e-01, 7.1024e-01, + 8.3932e-01, 7.8074e-01, 4.7444e-02, 2.8611e-01, + 7.3109e-01, 3.1598e-01, 5.4665e-01, 2.1210e-01, + 9.7144e-01, 7.9161e-02, 6.5838e-01, 4.0121e-01, + 2.8324e-01, 1.4067e-01, 9.7211e-01, 5.8205e-01, + 9.3807e-01, 2.3543e-01, 3.0940e-01, 3.4699e-01, + 9.7829e-01, 8.5961e-01, 2.8642e-02, 9.0642e-01, + 7.4376e-01, 4.8960e-01, 5.0701e-01, 6.4571e-01, + 1.7579e-01, 2.2052e-02, 9.4795e-01, 4.9614e-01, + 4.0009e-01, 9.4347e-01, 7.9080e-01, 1.9277e-01, + 6.3676e-02, 5.1107e-01, 8.6055e-01, 4.7560e-01, + 5.2366e-01, 5.8318e-01, 7.7280e-01, 6.3135e-01, + 7.0670e-02, 9.5237e-01, 4.7232e-02, 4.1667e-01, + 7.5966e-01, 8.1267e-01, 9.3749e-01, 6.0677e-04, + 3.5444e-02, 3.4694e-01, 2.0328e-01, 7.5943e-01, + 8.7233e-01, 6.4782e-02, 6.3981e-02, 2.6252e-01, + 5.1718e-01, 2.8671e-01, 4.2027e-01, 3.9280e-01, + 1.0608e-01, 3.4985e-01, 3.5087e-01, 2.8564e-01, + 3.3414e-01, 3.1729e-01, 3.1801e-01, 2.4441e-01, + 2.1150e-01, 6.1645e-01, 8.1734e-01, 9.9737e-01, + 1.6126e-01, 2.6992e-01, 5.6396e-01, 2.6998e-02, + 7.7680e-01, 7.7545e-01, 5.1098e-01, 4.0545e-01, + 8.5354e-02, 9.1810e-01, 2.3721e-01, 4.9704e-02, + 2.2999e-01, 7.5881e-01, 9.6770e-01, 6.1132e-01, + 5.2798e-01, 5.8081e-01, 2.8793e-02, 6.8780e-01, + 3.5512e-01, 8.8789e-01, 3.6352e-01, 4.7635e-01, + 7.6082e-01, 2.4130e-01, 9.3603e-01, 6.5842e-03, + 3.1354e-01, 9.1445e-01, 4.6962e-01, 5.5393e-01, + 3.3911e-01, 6.9143e-01, 6.1472e-01, 2.4007e-01, + 4.1034e-01, 8.0676e-01, 2.9364e-01, 6.1775e-01, + 2.1668e-01, 3.6283e-01, 9.4232e-01, 7.2551e-01, + 8.4886e-02, 5.9005e-01, 6.7531e-01, 3.8537e-01, + 6.1740e-01, 8.8522e-01, 6.6458e-01, 5.7352e-02, + 9.9077e-01, 6.7682e-01, 5.7545e-01, 4.4455e-01, + 5.5219e-01, 1.2992e-01, 1.9966e-02, 7.1280e-01, + 5.1974e-01, 8.0110e-01, 9.8569e-01, 6.3972e-01, + 8.5091e-01, 2.5322e-03, 8.5596e-01, 7.8316e-01, + 7.2764e-01, 1.1004e-01, 4.4597e-01, 1.3273e-01, + 6.0224e-01, 1.0515e-01, 2.0270e-01, 8.8295e-01, + 2.2704e-01, 1.5559e-01, 3.1150e-01, 3.7542e-01, + 6.0848e-01, 7.3259e-01, 3.0180e-01, 8.5138e-01, + 1.2650e-02, 9.5390e-01, 6.7270e-01, 5.2163e-01, + 7.1039e-01, 6.3532e-02, 9.2610e-01, 6.1969e-01, + 3.4843e-01, 6.2312e-01, 7.2342e-01, 2.9651e-01, + 9.9131e-01, 9.5189e-03, 5.9330e-01, 8.7447e-01, + 9.5601e-01, 4.5420e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.5903, 0.2031, 0.9114, ..., 0.3396, 0.0516, 0.4557]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +185,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 3.01309871673584 seconds +Time: 3.0151278972625732 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 284909 -ss 5000 -sd 1e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.16942024230957} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 287687 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.360889196395874} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1669, 2388, 3410, 214, 4888, 2047, 1859, 3824, 1130, - 3331, 4650, 808, 1845, 4600, 2980, 4756, 2639, 4242, - 120, 4542, 2175, 1322, 104, 704, 854, 2110, 1063, - 1256, 2794, 2665, 1239, 4623, 2397, 2905, 1669, 3634, - 691, 1001, 4550, 1274, 2606, 2628, 4848, 3423, 4205, - 4849, 3844, 4805, 2751, 822, 856, 3866, 2362, 4396, - 3513, 3731, 4108, 1129, 2401, 2429, 238, 3568, 2538, - 4839, 3438, 2131, 3982, 1035, 620, 3061, 2659, 870, - 31, 582, 3725, 2164, 3897, 4881, 3537, 2824, 936, - 4420, 341, 4499, 2690, 351, 3823, 4169, 4790, 4554, - 2495, 1376, 3626, 221, 4721, 2833, 4128, 83, 287, - 4091, 4135, 4551, 3973, 764, 392, 4740, 2858, 4378, - 2517, 4820, 3243, 3784, 1749, 2694, 3058, 661, 4273, - 2427, 4542, 135, 3704, 3578, 4193, 3743, 3465, 2179, - 4188, 2714, 3316, 1323, 3063, 3972, 3355, 1842, 1656, - 2481, 1669, 1106, 4204, 1040, 565, 3967, 2999, 776, - 1132, 4335, 252, 3480, 3592, 4417, 2743, 508, 1998, - 2250, 4747, 3247, 3778, 2520, 4340, 4333, 889, 3347, - 1306, 252, 3840, 4251, 3753, 922, 1530, 732, 4724, - 4652, 2305, 676, 3763, 2577, 479, 3149, 3237, 682, - 2204, 1170, 4037, 1115, 902, 2463, 2133, 49, 3338, - 846, 2596, 1254, 611, 336, 2556, 4596, 3162, 2347, - 1052, 1946, 3013, 1910, 3262, 793, 681, 3061, 4097, - 649, 4096, 3982, 4856, 2244, 770, 1157, 3683, 1150, - 4034, 4307, 4867, 947, 1680, 3888, 190, 677, 2841, - 816, 454, 4546, 1683, 1115, 4528, 4055, 324, 2442, - 1530, 1512, 2880, 1124, 741, 2337, 2820, 1096, 969, - 4662, 1861, 4067, 2109, 3996, 1635, 499]), - values=tensor([0.8132, 0.1702, 0.5583, 0.1261, 0.6291, 0.5508, 0.1330, - 0.9627, 0.2059, 0.3644, 0.3622, 0.4731, 0.3091, 0.8919, - 0.7060, 0.5289, 0.7945, 0.3422, 0.4040, 0.9747, 0.1778, - 0.1060, 0.3373, 0.1041, 0.0936, 0.4036, 0.4021, 0.5444, - 0.4938, 0.5992, 0.1894, 0.3036, 0.6677, 0.4744, 0.8443, - 0.2067, 0.1390, 0.7860, 0.2069, 0.5019, 0.5539, 0.4807, - 0.6194, 0.5176, 0.2767, 0.7631, 0.4453, 0.0999, 0.7181, - 0.2470, 0.2255, 0.5250, 0.2866, 0.8997, 0.0544, 0.1824, - 0.2628, 0.9339, 0.2590, 0.5943, 0.2439, 0.4256, 0.8224, - 0.2204, 0.5000, 0.2703, 0.2122, 0.2501, 0.5794, 0.6155, - 0.5183, 0.5021, 0.6112, 0.1537, 0.1024, 0.3154, 0.0744, - 0.5354, 0.3979, 0.6342, 0.7319, 0.0847, 0.3194, 0.5800, - 0.2467, 0.5775, 0.6339, 0.2050, 0.2286, 0.7874, 0.1733, - 0.7255, 0.0573, 0.6716, 0.4231, 0.6554, 0.3477, 0.4703, - 0.1981, 0.3923, 0.3520, 0.4289, 0.4033, 0.1353, 0.5197, - 0.9189, 0.6985, 0.9291, 0.8051, 0.5530, 0.0423, 0.9594, - 0.8487, 0.2554, 0.0395, 0.4103, 0.1345, 0.0607, 0.2812, - 0.7571, 0.9906, 0.2249, 0.3326, 0.1389, 0.8069, 0.2156, - 0.3462, 0.2324, 0.0457, 0.8244, 0.5205, 0.0833, 0.1781, - 0.3837, 0.9227, 0.2976, 0.9031, 0.2499, 0.3484, 0.3298, - 0.6568, 0.3816, 0.5687, 0.3523, 0.3593, 0.7242, 0.1034, - 0.3478, 0.4454, 0.7734, 0.2847, 0.4512, 0.5866, 0.1633, - 0.7139, 0.4511, 0.5642, 0.2230, 0.1384, 0.2467, 0.5114, - 0.5149, 0.4901, 0.7340, 0.5840, 0.0495, 0.1493, 0.4501, - 0.5299, 0.1752, 0.0737, 0.0887, 0.7004, 0.7171, 0.6451, - 0.1099, 0.6191, 0.3209, 0.2667, 0.2735, 0.3592, 0.7035, - 0.1766, 0.2292, 0.6138, 0.2492, 0.8422, 0.5205, 0.0949, - 0.6311, 0.1200, 0.6842, 0.3167, 0.3418, 0.7978, 0.1885, - 0.9433, 0.6390, 0.5217, 0.8313, 0.4066, 0.8623, 0.9330, - 0.7999, 0.0688, 0.3315, 0.2496, 0.2006, 0.0199, 0.1239, - 0.0030, 0.9251, 0.8374, 0.2492, 0.6001, 0.0171, 0.3645, - 0.9564, 0.7314, 0.8427, 0.8917, 0.1465, 0.2355, 0.6975, - 0.9025, 0.0358, 0.2860, 0.4051, 0.9734, 0.8626, 0.4028, - 0.9642, 0.0743, 0.8714, 0.6919, 0.3640, 0.9239, 0.1573, - 0.9549, 0.3068, 0.2789, 0.0169, 0.6253, 0.7318, 0.1857, - 0.1394, 0.2220, 0.2355, 0.9726, 0.9750]), + col_indices=tensor([3835, 4100, 3288, 1111, 2615, 1842, 721, 2110, 3471, + 3298, 1062, 2204, 2583, 1594, 4310, 3017, 1522, 2893, + 2730, 967, 697, 2772, 296, 2537, 3401, 4896, 2185, + 1467, 4947, 4327, 3210, 2930, 361, 814, 2937, 1687, + 1245, 4425, 1355, 3995, 1835, 2585, 3294, 3369, 4131, + 2112, 4663, 2814, 2176, 3719, 2668, 994, 441, 3881, + 1372, 4680, 2519, 2791, 4549, 2729, 2953, 3872, 1491, + 3405, 3183, 1786, 828, 3121, 2433, 4207, 4911, 322, + 4332, 2322, 3, 1285, 153, 2814, 2246, 4247, 4666, + 1547, 3260, 3417, 4348, 1373, 3091, 1509, 4230, 2269, + 4339, 2096, 1695, 3122, 2088, 1784, 3655, 417, 4064, + 185, 3211, 1867, 853, 1958, 692, 3072, 1918, 1238, + 453, 2452, 3264, 4717, 598, 3296, 1291, 52, 4664, + 1619, 3701, 2531, 839, 1095, 1405, 1755, 4032, 1471, + 3742, 3286, 3684, 4379, 314, 1276, 2952, 1464, 3493, + 4632, 3867, 270, 1580, 3966, 1648, 4218, 4607, 1543, + 1273, 4431, 3554, 750, 2726, 3039, 4850, 204, 1982, + 1280, 1670, 435, 1029, 1265, 488, 313, 4283, 2463, + 3066, 3421, 3101, 1667, 3847, 502, 2450, 679, 4239, + 4387, 1269, 2481, 145, 1867, 117, 2524, 3731, 742, + 1231, 4496, 2560, 4909, 2316, 2336, 3858, 2912, 4745, + 4422, 458, 4406, 3773, 4481, 2073, 2667, 4275, 2595, + 2103, 4813, 1809, 3578, 3133, 3916, 1983, 2028, 1469, + 2266, 2381, 3024, 513, 3320, 179, 2026, 395, 3774, + 3246, 274, 738, 3581, 964, 3178, 1016, 1417, 4487, + 2137, 190, 1094, 41, 3224, 3890, 4493, 3175, 4452, + 2458, 2626, 1597, 2294, 1289, 4412, 4878, 2717, 611, + 3437, 165, 1044, 4973, 600, 1953, 234]), + values=tensor([0.9194, 0.9511, 0.1759, 0.0806, 0.5579, 0.5909, 0.5444, + 0.4929, 0.3283, 0.5813, 0.8577, 0.8573, 0.3370, 0.6634, + 0.3256, 0.0555, 0.4432, 0.9017, 0.3253, 0.5759, 0.1980, + 0.8117, 0.0490, 0.8318, 0.9103, 0.9173, 0.2933, 0.6711, + 0.2931, 0.9505, 0.4262, 0.7750, 0.5853, 0.8597, 0.3103, + 0.5115, 0.2132, 0.0255, 0.2347, 0.0616, 0.3497, 0.3151, + 0.3650, 0.2626, 0.4199, 0.9673, 0.5246, 0.1533, 0.1365, + 0.8928, 0.4291, 0.7577, 0.5244, 0.2399, 0.8984, 0.6947, + 0.4626, 0.8346, 0.5027, 0.3949, 0.3636, 0.5403, 0.5095, + 0.0164, 0.1264, 0.0974, 0.4505, 0.5292, 0.2234, 0.1960, + 0.5104, 0.0273, 0.8931, 0.1073, 0.8623, 0.2575, 0.7953, + 0.5701, 0.5194, 0.2709, 0.1261, 0.9933, 0.0287, 0.3824, + 0.4292, 0.1111, 0.0013, 0.8437, 0.4581, 0.4410, 0.3443, + 0.9120, 0.5789, 0.5934, 0.4413, 0.7093, 0.6011, 0.6718, + 0.1412, 0.9770, 0.5884, 0.7031, 0.5903, 0.9765, 0.1106, + 0.4818, 0.0405, 0.0041, 0.7017, 0.4360, 0.3522, 0.4112, + 0.9723, 0.0877, 0.0441, 0.1570, 0.8913, 0.1536, 0.1637, + 0.4981, 0.4859, 0.6376, 0.2129, 0.5287, 0.9896, 0.9621, + 0.1314, 0.7893, 0.3622, 0.7936, 0.0563, 0.8633, 0.0436, + 0.9765, 0.6100, 0.5316, 0.1536, 0.2153, 0.0996, 0.8797, + 0.6733, 0.6855, 0.6258, 0.3627, 0.0821, 0.4890, 0.6054, + 0.0612, 0.1414, 0.7279, 0.3036, 0.9598, 0.6434, 0.2948, + 0.2841, 0.4693, 0.6178, 0.6854, 0.2242, 0.5700, 0.9283, + 0.5926, 0.3085, 0.8456, 0.3258, 0.6163, 0.7395, 0.4116, + 0.7534, 0.4931, 0.6502, 0.7243, 0.0397, 0.2405, 0.0377, + 0.3907, 0.2557, 0.7719, 0.4558, 0.1846, 0.4629, 0.0481, + 0.8517, 0.3424, 0.8116, 0.9881, 0.9386, 0.9599, 0.5813, + 0.1414, 0.7283, 0.6897, 0.1106, 0.0774, 0.7348, 0.0658, + 0.0995, 0.0410, 0.8733, 0.4363, 0.9618, 0.9688, 0.2513, + 0.7746, 0.6495, 0.5569, 0.9848, 0.6514, 0.9270, 0.0876, + 0.9735, 0.2507, 0.3576, 0.5873, 0.9861, 0.2704, 0.2117, + 0.9499, 0.3531, 0.3033, 0.9542, 0.0598, 0.0487, 0.2094, + 0.8599, 0.3949, 0.2128, 0.4097, 0.2778, 0.3314, 0.2249, + 0.7619, 0.9244, 0.6695, 0.7027, 0.3445, 0.1185, 0.1951, + 0.1406, 0.2410, 0.4900, 0.3068, 0.2304, 0.0755, 0.6690, + 0.6082, 0.5362, 0.8224, 0.7336, 0.9890]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.9060, 0.0911, 0.6185, ..., 0.7353, 0.0547, 0.2301]) +tensor([0.8124, 0.7369, 0.0249, ..., 0.2346, 0.3675, 0.9867]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +266,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.16942024230957 seconds +Time: 10.360889196395874 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1669, 2388, 3410, 214, 4888, 2047, 1859, 3824, 1130, - 3331, 4650, 808, 1845, 4600, 2980, 4756, 2639, 4242, - 120, 4542, 2175, 1322, 104, 704, 854, 2110, 1063, - 1256, 2794, 2665, 1239, 4623, 2397, 2905, 1669, 3634, - 691, 1001, 4550, 1274, 2606, 2628, 4848, 3423, 4205, - 4849, 3844, 4805, 2751, 822, 856, 3866, 2362, 4396, - 3513, 3731, 4108, 1129, 2401, 2429, 238, 3568, 2538, - 4839, 3438, 2131, 3982, 1035, 620, 3061, 2659, 870, - 31, 582, 3725, 2164, 3897, 4881, 3537, 2824, 936, - 4420, 341, 4499, 2690, 351, 3823, 4169, 4790, 4554, - 2495, 1376, 3626, 221, 4721, 2833, 4128, 83, 287, - 4091, 4135, 4551, 3973, 764, 392, 4740, 2858, 4378, - 2517, 4820, 3243, 3784, 1749, 2694, 3058, 661, 4273, - 2427, 4542, 135, 3704, 3578, 4193, 3743, 3465, 2179, - 4188, 2714, 3316, 1323, 3063, 3972, 3355, 1842, 1656, - 2481, 1669, 1106, 4204, 1040, 565, 3967, 2999, 776, - 1132, 4335, 252, 3480, 3592, 4417, 2743, 508, 1998, - 2250, 4747, 3247, 3778, 2520, 4340, 4333, 889, 3347, - 1306, 252, 3840, 4251, 3753, 922, 1530, 732, 4724, - 4652, 2305, 676, 3763, 2577, 479, 3149, 3237, 682, - 2204, 1170, 4037, 1115, 902, 2463, 2133, 49, 3338, - 846, 2596, 1254, 611, 336, 2556, 4596, 3162, 2347, - 1052, 1946, 3013, 1910, 3262, 793, 681, 3061, 4097, - 649, 4096, 3982, 4856, 2244, 770, 1157, 3683, 1150, - 4034, 4307, 4867, 947, 1680, 3888, 190, 677, 2841, - 816, 454, 4546, 1683, 1115, 4528, 4055, 324, 2442, - 1530, 1512, 2880, 1124, 741, 2337, 2820, 1096, 969, - 4662, 1861, 4067, 2109, 3996, 1635, 499]), - values=tensor([0.8132, 0.1702, 0.5583, 0.1261, 0.6291, 0.5508, 0.1330, - 0.9627, 0.2059, 0.3644, 0.3622, 0.4731, 0.3091, 0.8919, - 0.7060, 0.5289, 0.7945, 0.3422, 0.4040, 0.9747, 0.1778, - 0.1060, 0.3373, 0.1041, 0.0936, 0.4036, 0.4021, 0.5444, - 0.4938, 0.5992, 0.1894, 0.3036, 0.6677, 0.4744, 0.8443, - 0.2067, 0.1390, 0.7860, 0.2069, 0.5019, 0.5539, 0.4807, - 0.6194, 0.5176, 0.2767, 0.7631, 0.4453, 0.0999, 0.7181, - 0.2470, 0.2255, 0.5250, 0.2866, 0.8997, 0.0544, 0.1824, - 0.2628, 0.9339, 0.2590, 0.5943, 0.2439, 0.4256, 0.8224, - 0.2204, 0.5000, 0.2703, 0.2122, 0.2501, 0.5794, 0.6155, - 0.5183, 0.5021, 0.6112, 0.1537, 0.1024, 0.3154, 0.0744, - 0.5354, 0.3979, 0.6342, 0.7319, 0.0847, 0.3194, 0.5800, - 0.2467, 0.5775, 0.6339, 0.2050, 0.2286, 0.7874, 0.1733, - 0.7255, 0.0573, 0.6716, 0.4231, 0.6554, 0.3477, 0.4703, - 0.1981, 0.3923, 0.3520, 0.4289, 0.4033, 0.1353, 0.5197, - 0.9189, 0.6985, 0.9291, 0.8051, 0.5530, 0.0423, 0.9594, - 0.8487, 0.2554, 0.0395, 0.4103, 0.1345, 0.0607, 0.2812, - 0.7571, 0.9906, 0.2249, 0.3326, 0.1389, 0.8069, 0.2156, - 0.3462, 0.2324, 0.0457, 0.8244, 0.5205, 0.0833, 0.1781, - 0.3837, 0.9227, 0.2976, 0.9031, 0.2499, 0.3484, 0.3298, - 0.6568, 0.3816, 0.5687, 0.3523, 0.3593, 0.7242, 0.1034, - 0.3478, 0.4454, 0.7734, 0.2847, 0.4512, 0.5866, 0.1633, - 0.7139, 0.4511, 0.5642, 0.2230, 0.1384, 0.2467, 0.5114, - 0.5149, 0.4901, 0.7340, 0.5840, 0.0495, 0.1493, 0.4501, - 0.5299, 0.1752, 0.0737, 0.0887, 0.7004, 0.7171, 0.6451, - 0.1099, 0.6191, 0.3209, 0.2667, 0.2735, 0.3592, 0.7035, - 0.1766, 0.2292, 0.6138, 0.2492, 0.8422, 0.5205, 0.0949, - 0.6311, 0.1200, 0.6842, 0.3167, 0.3418, 0.7978, 0.1885, - 0.9433, 0.6390, 0.5217, 0.8313, 0.4066, 0.8623, 0.9330, - 0.7999, 0.0688, 0.3315, 0.2496, 0.2006, 0.0199, 0.1239, - 0.0030, 0.9251, 0.8374, 0.2492, 0.6001, 0.0171, 0.3645, - 0.9564, 0.7314, 0.8427, 0.8917, 0.1465, 0.2355, 0.6975, - 0.9025, 0.0358, 0.2860, 0.4051, 0.9734, 0.8626, 0.4028, - 0.9642, 0.0743, 0.8714, 0.6919, 0.3640, 0.9239, 0.1573, - 0.9549, 0.3068, 0.2789, 0.0169, 0.6253, 0.7318, 0.1857, - 0.1394, 0.2220, 0.2355, 0.9726, 0.9750]), + col_indices=tensor([3835, 4100, 3288, 1111, 2615, 1842, 721, 2110, 3471, + 3298, 1062, 2204, 2583, 1594, 4310, 3017, 1522, 2893, + 2730, 967, 697, 2772, 296, 2537, 3401, 4896, 2185, + 1467, 4947, 4327, 3210, 2930, 361, 814, 2937, 1687, + 1245, 4425, 1355, 3995, 1835, 2585, 3294, 3369, 4131, + 2112, 4663, 2814, 2176, 3719, 2668, 994, 441, 3881, + 1372, 4680, 2519, 2791, 4549, 2729, 2953, 3872, 1491, + 3405, 3183, 1786, 828, 3121, 2433, 4207, 4911, 322, + 4332, 2322, 3, 1285, 153, 2814, 2246, 4247, 4666, + 1547, 3260, 3417, 4348, 1373, 3091, 1509, 4230, 2269, + 4339, 2096, 1695, 3122, 2088, 1784, 3655, 417, 4064, + 185, 3211, 1867, 853, 1958, 692, 3072, 1918, 1238, + 453, 2452, 3264, 4717, 598, 3296, 1291, 52, 4664, + 1619, 3701, 2531, 839, 1095, 1405, 1755, 4032, 1471, + 3742, 3286, 3684, 4379, 314, 1276, 2952, 1464, 3493, + 4632, 3867, 270, 1580, 3966, 1648, 4218, 4607, 1543, + 1273, 4431, 3554, 750, 2726, 3039, 4850, 204, 1982, + 1280, 1670, 435, 1029, 1265, 488, 313, 4283, 2463, + 3066, 3421, 3101, 1667, 3847, 502, 2450, 679, 4239, + 4387, 1269, 2481, 145, 1867, 117, 2524, 3731, 742, + 1231, 4496, 2560, 4909, 2316, 2336, 3858, 2912, 4745, + 4422, 458, 4406, 3773, 4481, 2073, 2667, 4275, 2595, + 2103, 4813, 1809, 3578, 3133, 3916, 1983, 2028, 1469, + 2266, 2381, 3024, 513, 3320, 179, 2026, 395, 3774, + 3246, 274, 738, 3581, 964, 3178, 1016, 1417, 4487, + 2137, 190, 1094, 41, 3224, 3890, 4493, 3175, 4452, + 2458, 2626, 1597, 2294, 1289, 4412, 4878, 2717, 611, + 3437, 165, 1044, 4973, 600, 1953, 234]), + values=tensor([0.9194, 0.9511, 0.1759, 0.0806, 0.5579, 0.5909, 0.5444, + 0.4929, 0.3283, 0.5813, 0.8577, 0.8573, 0.3370, 0.6634, + 0.3256, 0.0555, 0.4432, 0.9017, 0.3253, 0.5759, 0.1980, + 0.8117, 0.0490, 0.8318, 0.9103, 0.9173, 0.2933, 0.6711, + 0.2931, 0.9505, 0.4262, 0.7750, 0.5853, 0.8597, 0.3103, + 0.5115, 0.2132, 0.0255, 0.2347, 0.0616, 0.3497, 0.3151, + 0.3650, 0.2626, 0.4199, 0.9673, 0.5246, 0.1533, 0.1365, + 0.8928, 0.4291, 0.7577, 0.5244, 0.2399, 0.8984, 0.6947, + 0.4626, 0.8346, 0.5027, 0.3949, 0.3636, 0.5403, 0.5095, + 0.0164, 0.1264, 0.0974, 0.4505, 0.5292, 0.2234, 0.1960, + 0.5104, 0.0273, 0.8931, 0.1073, 0.8623, 0.2575, 0.7953, + 0.5701, 0.5194, 0.2709, 0.1261, 0.9933, 0.0287, 0.3824, + 0.4292, 0.1111, 0.0013, 0.8437, 0.4581, 0.4410, 0.3443, + 0.9120, 0.5789, 0.5934, 0.4413, 0.7093, 0.6011, 0.6718, + 0.1412, 0.9770, 0.5884, 0.7031, 0.5903, 0.9765, 0.1106, + 0.4818, 0.0405, 0.0041, 0.7017, 0.4360, 0.3522, 0.4112, + 0.9723, 0.0877, 0.0441, 0.1570, 0.8913, 0.1536, 0.1637, + 0.4981, 0.4859, 0.6376, 0.2129, 0.5287, 0.9896, 0.9621, + 0.1314, 0.7893, 0.3622, 0.7936, 0.0563, 0.8633, 0.0436, + 0.9765, 0.6100, 0.5316, 0.1536, 0.2153, 0.0996, 0.8797, + 0.6733, 0.6855, 0.6258, 0.3627, 0.0821, 0.4890, 0.6054, + 0.0612, 0.1414, 0.7279, 0.3036, 0.9598, 0.6434, 0.2948, + 0.2841, 0.4693, 0.6178, 0.6854, 0.2242, 0.5700, 0.9283, + 0.5926, 0.3085, 0.8456, 0.3258, 0.6163, 0.7395, 0.4116, + 0.7534, 0.4931, 0.6502, 0.7243, 0.0397, 0.2405, 0.0377, + 0.3907, 0.2557, 0.7719, 0.4558, 0.1846, 0.4629, 0.0481, + 0.8517, 0.3424, 0.8116, 0.9881, 0.9386, 0.9599, 0.5813, + 0.1414, 0.7283, 0.6897, 0.1106, 0.0774, 0.7348, 0.0658, + 0.0995, 0.0410, 0.8733, 0.4363, 0.9618, 0.9688, 0.2513, + 0.7746, 0.6495, 0.5569, 0.9848, 0.6514, 0.9270, 0.0876, + 0.9735, 0.2507, 0.3576, 0.5873, 0.9861, 0.2704, 0.2117, + 0.9499, 0.3531, 0.3033, 0.9542, 0.0598, 0.0487, 0.2094, + 0.8599, 0.3949, 0.2128, 0.4097, 0.2778, 0.3314, 0.2249, + 0.7619, 0.9244, 0.6695, 0.7027, 0.3445, 0.1185, 0.1951, + 0.1406, 0.2410, 0.4900, 0.3068, 0.2304, 0.0755, 0.6690, + 0.6082, 0.5362, 0.8224, 0.7336, 0.9890]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.9060, 0.0911, 0.6185, ..., 0.7353, 0.0547, 0.2301]) +tensor([0.8124, 0.7369, 0.0249, ..., 0.2346, 0.3675, 0.9867]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +344,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.16942024230957 seconds +Time: 10.360889196395874 seconds -[20.28, 20.4, 20.4, 20.48, 20.76, 21.28, 21.16, 20.96, 20.76, 20.4] -[20.12, 20.44, 21.32, 22.44, 25.12, 25.12, 25.76, 26.28, 25.96, 25.44, 23.88, 24.04, 24.16, 23.84] -14.210330247879028 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 284909, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.16942024230957, 'TIME_S_1KI': 0.035693573184102885, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.9542731094361, 'W': 22.304497332617817} -[20.28, 20.4, 20.4, 20.48, 20.76, 21.28, 21.16, 20.96, 20.76, 20.4, 20.8, 20.88, 20.72, 20.76, 20.84, 20.84, 20.44, 20.52, 20.32, 20.6] -372.55999999999995 -18.627999999999997 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 284909, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.16942024230957, 'TIME_S_1KI': 0.035693573184102885, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.9542731094361, 'W': 22.304497332617817, 'J_1KI': 1.1124754679895548, 'W_1KI': 0.07828639085679223, 'W_D': 3.6764973326178207, 'J_D': 52.24424125194558, 'W_D_1KI': 0.012904110900736098, 'J_D_1KI': 4.529204377796454e-05} +[20.32, 20.48, 20.36, 20.2, 20.52, 22.12, 22.84, 23.64, 23.64, 24.56] +[23.48, 22.48, 21.68, 22.32, 23.76, 25.04, 25.96, 25.76, 25.2, 23.6, 23.52, 23.52, 23.36, 23.16] +14.24486756324768 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 287687, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.360889196395874, 'TIME_S_1KI': 0.036014450414498654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.19113276481625, 'W': 22.12664535948525} +[20.32, 20.48, 20.36, 20.2, 20.52, 22.12, 22.84, 23.64, 23.64, 24.56, 20.36, 20.36, 20.52, 20.44, 20.2, 20.16, 20.28, 20.28, 20.28, 20.52] +379.2 +18.96 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 287687, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.360889196395874, 'TIME_S_1KI': 0.036014450414498654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.19113276481625, 'W': 22.12664535948525, 'J_1KI': 1.095604364343249, 'W_1KI': 0.07691221834662411, 'W_D': 3.1666453594852477, 'J_D': 45.108443765640196, 'W_D_1KI': 0.011007259137483611, 'J_D_1KI': 3.8261232302758245e-05} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.json index 758834a..c7d5689 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 16, "ITERATIONS": 154432, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.621034145355225, "TIME_S_1KI": 0.06877482740206191, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.4244461250305, "W": 22.262246477486542, "J_1KI": 2.0489564735613763, "W_1KI": 0.144155657360434, "W_D": 3.7392464774865424, "J_D": 53.14778078484536, "W_D_1KI": 0.024212899382812774, "J_D_1KI": 0.00015678680184685024} +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 149526, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.39885950088501, "TIME_S_1KI": 0.07623329388123143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.2356255340576, "W": 22.190935624507333, "J_1KI": 2.1149206528233058, "W_1KI": 0.14840854182220706, "W_D": 3.6929356245073315, "J_D": 52.62679442787168, "W_D_1KI": 0.024697615294379114, "J_D_1KI": 0.00016517271440671933} diff --git a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.output index 8f01bb5..8039b7d 100644 --- a/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_16core/altra_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014879941940307617} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014568090438842773} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1250, 1250]), - col_indices=tensor([1397, 3608, 621, ..., 1983, 2722, 4972]), - values=tensor([0.7898, 0.8890, 0.9853, ..., 0.2806, 0.4332, 0.7785]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4596, 2398, 1433, ..., 472, 1393, 6]), + values=tensor([0.6977, 0.8727, 0.5300, ..., 0.8044, 0.5079, 0.4951]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8515, 0.1205, 0.1290, ..., 0.0596, 0.1294, 0.2178]) +tensor([0.1657, 0.3721, 0.6924, ..., 0.8050, 0.8578, 0.6368]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.014879941940307617 seconds +Time: 0.014568090438842773 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 70564 -ss 5000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.797720670700073} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 72075 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 5.061230897903442} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 1249, 1250, 1250]), - col_indices=tensor([4236, 1927, 389, ..., 3900, 4084, 4178]), - values=tensor([0.5819, 0.5926, 0.4032, ..., 0.1422, 0.8129, 0.9187]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1250, 1250]), + col_indices=tensor([2260, 2424, 889, ..., 317, 2175, 3750]), + values=tensor([0.1362, 0.3286, 0.9847, ..., 0.0267, 0.4563, 0.8248]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.4782, 0.7587, 0.6755, ..., 0.4641, 0.3230, 0.1517]) +tensor([0.2596, 0.1456, 0.1990, ..., 0.1892, 0.0338, 0.8817]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 4.797720670700073 seconds +Time: 5.061230897903442 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 154432 -ss 5000 -sd 5e-05 -c 16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.621034145355225} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 149526 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.39885950088501} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([ 91, 2944, 3974, ..., 4430, 70, 3263]), - values=tensor([0.2553, 0.0855, 0.4739, ..., 0.3797, 0.6721, 0.4378]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1249, 1250]), + col_indices=tensor([2521, 379, 3911, ..., 2509, 1527, 2788]), + values=tensor([0.8338, 0.7895, 0.3495, ..., 0.2114, 0.5878, 0.1824]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8009, 0.9874, 0.1682, ..., 0.8612, 0.3697, 0.0752]) +tensor([0.2208, 0.1989, 0.1754, ..., 0.7157, 0.0206, 0.1208]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.621034145355225 seconds +Time: 11.39885950088501 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([ 91, 2944, 3974, ..., 4430, 70, 3263]), - values=tensor([0.2553, 0.0855, 0.4739, ..., 0.3797, 0.6721, 0.4378]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1249, 1250]), + col_indices=tensor([2521, 379, 3911, ..., 2509, 1527, 2788]), + values=tensor([0.8338, 0.7895, 0.3495, ..., 0.2114, 0.5878, 0.1824]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8009, 0.9874, 0.1682, ..., 0.8612, 0.3697, 0.0752]) +tensor([0.2208, 0.1989, 0.1754, ..., 0.7157, 0.0206, 0.1208]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.621034145355225 seconds +Time: 11.39885950088501 seconds -[20.4, 20.68, 20.44, 20.16, 20.16, 20.48, 20.44, 20.68, 20.64, 20.88] -[20.92, 20.96, 21.52, 22.88, 25.2, 25.52, 26.2, 26.2, 25.6, 24.92, 23.32, 23.36, 23.56, 23.44] -14.213500261306763 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 154432, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.621034145355225, 'TIME_S_1KI': 0.06877482740206191, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.4244461250305, 'W': 22.262246477486542} -[20.4, 20.68, 20.44, 20.16, 20.16, 20.48, 20.44, 20.68, 20.64, 20.88, 20.72, 20.64, 20.68, 20.52, 20.64, 20.8, 20.76, 20.76, 20.68, 20.6] -370.46 -18.523 -{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 154432, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.621034145355225, 'TIME_S_1KI': 0.06877482740206191, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.4244461250305, 'W': 22.262246477486542, 'J_1KI': 2.0489564735613763, 'W_1KI': 0.144155657360434, 'W_D': 3.7392464774865424, 'J_D': 53.14778078484536, 'W_D_1KI': 0.024212899382812774, 'J_D_1KI': 0.00015678680184685024} +[20.36, 20.44, 20.36, 20.44, 20.48, 20.84, 20.68, 20.6, 20.4, 20.28] +[20.24, 20.24, 21.48, 23.68, 23.68, 25.92, 26.48, 27.04, 25.2, 24.72, 23.36, 23.36, 23.36, 23.44] +14.250666618347168 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 149526, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.39885950088501, 'TIME_S_1KI': 0.07623329388123143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.2356255340576, 'W': 22.190935624507333} +[20.36, 20.44, 20.36, 20.44, 20.48, 20.84, 20.68, 20.6, 20.4, 20.28, 20.56, 20.48, 20.6, 20.56, 20.56, 20.64, 20.72, 20.48, 20.76, 20.64] +369.96000000000004 +18.498 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 149526, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.39885950088501, 'TIME_S_1KI': 0.07623329388123143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.2356255340576, 'W': 22.190935624507333, 'J_1KI': 2.1149206528233058, 'W_1KI': 0.14840854182220706, 'W_D': 3.6929356245073315, 'J_D': 52.62679442787168, 'W_D_1KI': 0.024697615294379114, 'J_D_1KI': 0.00016517271440671933} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..926894b --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 290, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.71990966796875, "TIME_S_1KI": 43.86175747575431, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 807.2845898437499, "W": 73.02, "J_1KI": 2783.739964978448, "W_1KI": 251.79310344827582, "W_D": 34.07825, "J_D": 376.75768383789057, "W_D_1KI": 117.51120689655171, "J_D_1KI": 405.21105826397144} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..c651080 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.157309055328369} + +tensor(indices=tensor([[34427, 69400, 51324, ..., 20097, 20987, 68178], + [56096, 96032, 39777, ..., 16265, 53922, 54655]]), + values=tensor([0.4808, 0.2302, 0.5302, ..., 0.7228, 0.2179, 0.6735]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.7793, 0.6225, 0.6202, ..., 0.6052, 0.8949, 0.1158]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 4.157309055328369 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '252', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.112457275390625} + +tensor(indices=tensor([[42842, 40092, 561, ..., 33545, 73388, 19157], + [20691, 11564, 78471, ..., 85890, 81317, 82592]]), + values=tensor([0.0680, 0.2482, 0.3556, ..., 0.0070, 0.0146, 0.7202]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.9534, 0.2150, 0.8602, ..., 0.8697, 0.8713, 0.1420]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 9.112457275390625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '290', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.71990966796875} + +tensor(indices=tensor([[49524, 98915, 53189, ..., 69391, 8827, 7893], + [56866, 5768, 28546, ..., 23557, 2810, 46095]]), + values=tensor([0.0385, 0.9490, 0.4642, ..., 0.3282, 0.6420, 0.7698]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3167, 0.7029, 0.7814, ..., 0.4362, 0.3089, 0.2209]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 12.71990966796875 seconds + +tensor(indices=tensor([[49524, 98915, 53189, ..., 69391, 8827, 7893], + [56866, 5768, 28546, ..., 23557, 2810, 46095]]), + values=tensor([0.0385, 0.9490, 0.4642, ..., 0.3282, 0.6420, 0.7698]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3167, 0.7029, 0.7814, ..., 0.4362, 0.3089, 0.2209]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 12.71990966796875 seconds + +[70.47, 65.46, 50.68, 39.76, 39.69, 39.69, 45.97, 39.73, 40.68, 40.45] +[73.02] +11.0556640625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 290, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.71990966796875, 'TIME_S_1KI': 43.86175747575431, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 807.2845898437499, 'W': 73.02} +[70.47, 65.46, 50.68, 39.76, 39.69, 39.69, 45.97, 39.73, 40.68, 40.45, 40.73, 39.56, 39.82, 39.12, 39.19, 44.87, 39.72, 39.26, 39.77, 40.08] +778.835 +38.94175 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 290, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.71990966796875, 'TIME_S_1KI': 43.86175747575431, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 807.2845898437499, 'W': 73.02, 'J_1KI': 2783.739964978448, 'W_1KI': 251.79310344827582, 'W_D': 34.07825, 'J_D': 376.75768383789057, 'W_D_1KI': 117.51120689655171, 'J_D_1KI': 405.21105826397144} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..847e4ec --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.91073727607727, "TIME_S_1KI": 289.1073727607727, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2321.0039581131937, "W": 68.79, "J_1KI": 23210.039581131936, "W_1KI": 687.9000000000001, "W_D": 33.30475000000001, "J_D": 1123.7164787610177, "W_D_1KI": 333.0475000000001, "J_D_1KI": 3330.4750000000013} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..94b706e --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.91073727607727} + +tensor(indices=tensor([[88015, 32293, 80054, ..., 45400, 29398, 26982], + [12366, 20264, 77671, ..., 77327, 11233, 183]]), + values=tensor([0.0777, 0.7453, 0.5277, ..., 0.9894, 0.3965, 0.0900]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3536, 0.8249, 0.5645, ..., 0.5094, 0.3993, 0.6786]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.91073727607727 seconds + +tensor(indices=tensor([[88015, 32293, 80054, ..., 45400, 29398, 26982], + [12366, 20264, 77671, ..., 77327, 11233, 183]]), + values=tensor([0.0777, 0.7453, 0.5277, ..., 0.9894, 0.3965, 0.0900]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3536, 0.8249, 0.5645, ..., 0.5094, 0.3993, 0.6786]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.91073727607727 seconds + +[39.79, 39.1, 39.81, 39.48, 40.2, 39.12, 39.62, 39.11, 39.26, 39.13] +[68.79] +33.740426778793335 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.91073727607727, 'TIME_S_1KI': 289.1073727607727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2321.0039581131937, 'W': 68.79} +[39.79, 39.1, 39.81, 39.48, 40.2, 39.12, 39.62, 39.11, 39.26, 39.13, 40.26, 39.3, 39.67, 40.32, 39.02, 38.96, 39.24, 39.36, 39.04, 39.01] +709.7049999999999 +35.48524999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.91073727607727, 'TIME_S_1KI': 289.1073727607727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2321.0039581131937, 'W': 68.79, 'J_1KI': 23210.039581131936, 'W_1KI': 687.9000000000001, 'W_D': 33.30475000000001, 'J_D': 1123.7164787610177, 'W_D_1KI': 333.0475000000001, 'J_D_1KI': 3330.4750000000013} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..134765b --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 289.2517776489258, "TIME_S_1KI": 2892.517776489258, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 20959.26904004574, "W": 66.54, "J_1KI": 209592.6904004574, "W_1KI": 665.4000000000001, "W_D": 30.630750000000006, "J_D": 9648.303729311408, "W_D_1KI": 306.30750000000006, "J_D_1KI": 3063.0750000000003} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..5c17e1b --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 289.2517776489258} + +tensor(indices=tensor([[49105, 71465, 52786, ..., 33256, 58756, 83042], + [69286, 41820, 96975, ..., 7793, 37010, 30288]]), + values=tensor([0.0109, 0.6981, 0.9936, ..., 0.3670, 0.9258, 0.4349]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.5046, 0.2444, 0.9485, ..., 0.4277, 0.1641, 0.0237]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 289.2517776489258 seconds + +tensor(indices=tensor([[49105, 71465, 52786, ..., 33256, 58756, 83042], + [69286, 41820, 96975, ..., 7793, 37010, 30288]]), + values=tensor([0.0109, 0.6981, 0.9936, ..., 0.3670, 0.9258, 0.4349]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.5046, 0.2444, 0.9485, ..., 0.4277, 0.1641, 0.0237]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 289.2517776489258 seconds + +[39.72, 39.1, 39.28, 39.13, 39.65, 39.95, 39.85, 39.18, 39.12, 39.66] +[66.54] +314.98751187324524 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 289.2517776489258, 'TIME_S_1KI': 2892.517776489258, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20959.26904004574, 'W': 66.54} +[39.72, 39.1, 39.28, 39.13, 39.65, 39.95, 39.85, 39.18, 39.12, 39.66, 39.95, 39.36, 39.44, 39.79, 39.72, 40.36, 45.1, 39.98, 39.85, 39.32] +718.185 +35.90925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 289.2517776489258, 'TIME_S_1KI': 2892.517776489258, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20959.26904004574, 'W': 66.54, 'J_1KI': 209592.6904004574, 'W_1KI': 665.4000000000001, 'W_D': 30.630750000000006, 'J_D': 9648.303729311408, 'W_D_1KI': 306.30750000000006, 'J_D_1KI': 3063.0750000000003} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..fbf1598 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3228, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.736116886138916, "TIME_S_1KI": 3.3259345991756244, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1111.3897745609283, "W": 83.72, "J_1KI": 344.29670835220827, "W_1KI": 25.93556381660471, "W_D": 48.1185, "J_D": 638.7769812136888, "W_D_1KI": 14.90659851301115, "J_D_1KI": 4.617905363386354} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..cb8a612 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.34845471382141113} + +tensor(indices=tensor([[64299, 76999, 75723, ..., 24258, 17072, 7308], + [96225, 24337, 62796, ..., 70747, 27844, 68996]]), + values=tensor([0.6835, 0.8753, 0.9592, ..., 0.9127, 0.0258, 0.1786]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1388, 0.2643, 0.2040, ..., 0.9874, 0.9386, 0.4420]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.34845471382141113 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3013', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.798452377319336} + +tensor(indices=tensor([[88368, 78455, 10545, ..., 11698, 81693, 1032], + [40291, 74672, 47226, ..., 38657, 1020, 61907]]), + values=tensor([0.2483, 0.8888, 0.0702, ..., 0.6709, 0.4815, 0.3893]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6377, 0.2845, 0.2211, ..., 0.5316, 0.9354, 0.1377]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 9.798452377319336 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3228', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.736116886138916} + +tensor(indices=tensor([[23698, 31205, 19102, ..., 83875, 40046, 94266], + [45748, 10244, 98231, ..., 35977, 27039, 33074]]), + values=tensor([0.9220, 0.0205, 0.5938, ..., 0.7014, 0.0764, 0.5327]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6703, 0.8339, 0.2286, ..., 0.0474, 0.8438, 0.7306]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.736116886138916 seconds + +tensor(indices=tensor([[23698, 31205, 19102, ..., 83875, 40046, 94266], + [45748, 10244, 98231, ..., 35977, 27039, 33074]]), + values=tensor([0.9220, 0.0205, 0.5938, ..., 0.7014, 0.0764, 0.5327]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6703, 0.8339, 0.2286, ..., 0.0474, 0.8438, 0.7306]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.736116886138916 seconds + +[40.06, 39.29, 39.78, 39.31, 39.4, 39.76, 39.45, 39.71, 39.97, 39.73] +[83.72] +13.275080919265747 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3228, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.736116886138916, 'TIME_S_1KI': 3.3259345991756244, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1111.3897745609283, 'W': 83.72} +[40.06, 39.29, 39.78, 39.31, 39.4, 39.76, 39.45, 39.71, 39.97, 39.73, 40.04, 39.29, 39.26, 39.3, 39.22, 39.59, 39.39, 39.76, 39.69, 39.89] +712.03 +35.6015 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3228, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.736116886138916, 'TIME_S_1KI': 3.3259345991756244, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1111.3897745609283, 'W': 83.72, 'J_1KI': 344.29670835220827, 'W_1KI': 25.93556381660471, 'W_D': 48.1185, 'J_D': 638.7769812136888, 'W_D_1KI': 14.90659851301115, 'J_D_1KI': 4.617905363386354} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..48ac41f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.366589784622192, "TIME_S_1KI": 14.959004018213841, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 964.3220639467239, "W": 74.99, "J_1KI": 1391.5181297932525, "W_1KI": 108.2106782106782, "W_D": 39.06649999999999, "J_D": 502.36948808073987, "W_D_1KI": 56.37301587301586, "J_D_1KI": 81.34634325110514} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..77f4e2a --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.5141761302947998} + +tensor(indices=tensor([[ 8245, 20158, 5409, ..., 7348, 42186, 80626], + [24730, 51070, 38408, ..., 11303, 68788, 40122]]), + values=tensor([0.7419, 0.5833, 0.5192, ..., 0.3030, 0.6627, 0.3380]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1164, 0.9234, 0.3614, ..., 0.6104, 0.3457, 0.6307]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.5141761302947998 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '693', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.366589784622192} + +tensor(indices=tensor([[53993, 57064, 89526, ..., 22186, 16728, 78618], + [86410, 51251, 8908, ..., 90673, 61390, 44731]]), + values=tensor([0.7814, 0.0708, 0.1145, ..., 0.9136, 0.2601, 0.7654]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1957, 0.7415, 0.3302, ..., 0.9205, 0.5974, 0.8171]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.366589784622192 seconds + +tensor(indices=tensor([[53993, 57064, 89526, ..., 22186, 16728, 78618], + [86410, 51251, 8908, ..., 90673, 61390, 44731]]), + values=tensor([0.7814, 0.0708, 0.1145, ..., 0.9136, 0.2601, 0.7654]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1957, 0.7415, 0.3302, ..., 0.9205, 0.5974, 0.8171]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.366589784622192 seconds + +[40.29, 39.3, 39.12, 44.8, 39.6, 39.55, 39.51, 39.43, 39.53, 39.58] +[74.99] +12.859342098236084 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.366589784622192, 'TIME_S_1KI': 14.959004018213841, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 964.3220639467239, 'W': 74.99} +[40.29, 39.3, 39.12, 44.8, 39.6, 39.55, 39.51, 39.43, 39.53, 39.58, 45.79, 39.58, 40.53, 39.43, 39.01, 39.33, 39.05, 39.19, 39.02, 39.32] +718.47 +35.923500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.366589784622192, 'TIME_S_1KI': 14.959004018213841, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 964.3220639467239, 'W': 74.99, 'J_1KI': 1391.5181297932525, 'W_1KI': 108.2106782106782, 'W_D': 39.06649999999999, 'J_D': 502.36948808073987, 'W_D_1KI': 56.37301587301586, 'J_D_1KI': 81.34634325110514} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..7c1547f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 35665, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.350804805755615, "TIME_S_1KI": 0.2902230423596135, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 844.7131268239021, "W": 65.69, "J_1KI": 23.684652371341713, "W_1KI": 1.8418617692415533, "W_D": 30.030249999999995, "J_D": 386.1614610565304, "W_D_1KI": 0.8420089723818869, "J_D_1KI": 0.023608831414044212} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..410f729 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03710126876831055} + +tensor(indices=tensor([[3887, 4268, 7342, ..., 4102, 949, 4018], + [ 939, 4556, 9100, ..., 9381, 3959, 9285]]), + values=tensor([0.4248, 0.1907, 0.6808, ..., 0.4409, 0.9486, 0.2302]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.3548, 0.6609, 0.1796, ..., 0.6805, 0.0787, 0.8219]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.03710126876831055 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '28300', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.331687211990356} + +tensor(indices=tensor([[7400, 4161, 8129, ..., 5742, 8125, 8020], + [5108, 812, 5499, ..., 5678, 7448, 9315]]), + values=tensor([0.1380, 0.9016, 0.6625, ..., 0.7928, 0.8837, 0.6832]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0140, 0.0068, 0.6359, ..., 0.9966, 0.4614, 0.3717]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 8.331687211990356 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '35665', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.350804805755615} + +tensor(indices=tensor([[2224, 7036, 9237, ..., 9920, 7092, 3925], + [5139, 5649, 1754, ..., 1299, 5394, 2424]]), + values=tensor([0.2448, 0.0821, 0.2762, ..., 0.6436, 0.9683, 0.9300]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0399, 0.2979, 0.7592, ..., 0.0014, 0.6259, 0.5615]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.350804805755615 seconds + +tensor(indices=tensor([[2224, 7036, 9237, ..., 9920, 7092, 3925], + [5139, 5649, 1754, ..., 1299, 5394, 2424]]), + values=tensor([0.2448, 0.0821, 0.2762, ..., 0.6436, 0.9683, 0.9300]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0399, 0.2979, 0.7592, ..., 0.0014, 0.6259, 0.5615]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.350804805755615 seconds + +[40.41, 38.88, 44.46, 39.75, 39.47, 39.4, 39.35, 39.18, 39.34, 40.79] +[65.69] +12.859082460403442 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 35665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.350804805755615, 'TIME_S_1KI': 0.2902230423596135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.7131268239021, 'W': 65.69} +[40.41, 38.88, 44.46, 39.75, 39.47, 39.4, 39.35, 39.18, 39.34, 40.79, 40.58, 39.13, 39.13, 38.83, 39.15, 39.01, 39.52, 39.28, 38.97, 38.91] +713.195 +35.65975 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 35665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.350804805755615, 'TIME_S_1KI': 0.2902230423596135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.7131268239021, 'W': 65.69, 'J_1KI': 23.684652371341713, 'W_1KI': 1.8418617692415533, 'W_D': 30.030249999999995, 'J_D': 386.1614610565304, 'W_D_1KI': 0.8420089723818869, 'J_D_1KI': 0.023608831414044212} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..5d4c0a8 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3594, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.165302753448486, "TIME_S_1KI": 2.828409224665689, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 829.6233745193481, "W": 65.52, "J_1KI": 230.8356634722727, "W_1KI": 18.230383973288813, "W_D": 30.2055, "J_D": 382.4662521221638, "W_D_1KI": 8.404424040066779, "J_D_1KI": 2.3384596661287644} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..33572a0 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.29212141036987305} + +tensor(indices=tensor([[ 283, 8951, 8400, ..., 4908, 3104, 1335], + [ 934, 10, 5202, ..., 7678, 1975, 2277]]), + values=tensor([0.6015, 0.8734, 0.1496, ..., 0.2591, 0.4166, 0.0179]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4975, 0.8914, 0.2035, ..., 0.9030, 0.7522, 0.5863]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.29212141036987305 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3594', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.165302753448486} + +tensor(indices=tensor([[4340, 928, 8963, ..., 3252, 898, 8195], + [1933, 6538, 1046, ..., 25, 4780, 1343]]), + values=tensor([0.2253, 0.8871, 0.1158, ..., 0.5609, 0.9374, 0.8537]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.0774, 0.4411, 0.5892, ..., 0.4225, 0.7563, 0.6864]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.165302753448486 seconds + +tensor(indices=tensor([[4340, 928, 8963, ..., 3252, 898, 8195], + [1933, 6538, 1046, ..., 25, 4780, 1343]]), + values=tensor([0.2253, 0.8871, 0.1158, ..., 0.5609, 0.9374, 0.8537]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.0774, 0.4411, 0.5892, ..., 0.4225, 0.7563, 0.6864]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.165302753448486 seconds + +[42.77, 38.99, 38.95, 38.89, 39.82, 39.23, 39.07, 38.88, 38.88, 40.25] +[65.52] +12.662139415740967 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3594, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.165302753448486, 'TIME_S_1KI': 2.828409224665689, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 829.6233745193481, 'W': 65.52} +[42.77, 38.99, 38.95, 38.89, 39.82, 39.23, 39.07, 38.88, 38.88, 40.25, 39.56, 38.84, 39.08, 38.97, 38.94, 38.83, 39.69, 38.95, 39.39, 39.2] +706.29 +35.314499999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3594, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.165302753448486, 'TIME_S_1KI': 2.828409224665689, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 829.6233745193481, 'W': 65.52, 'J_1KI': 230.8356634722727, 'W_1KI': 18.230383973288813, 'W_D': 30.2055, 'J_D': 382.4662521221638, 'W_D_1KI': 8.404424040066779, 'J_D_1KI': 2.3384596661287644} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..a8267ac --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 372, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.509546279907227, "TIME_S_1KI": 28.251468494374265, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 868.6983420300484, "W": 65.81, "J_1KI": 2335.2105968549686, "W_1KI": 176.90860215053763, "W_D": 30.457000000000008, "J_D": 402.03533510422716, "W_D_1KI": 81.87365591397852, "J_D_1KI": 220.09047288703903} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..7334943 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.821150064468384} + +tensor(indices=tensor([[4233, 2890, 9495, ..., 5839, 7420, 3132], + [2735, 9661, 8705, ..., 5181, 2323, 4234]]), + values=tensor([0.7015, 0.6532, 0.8596, ..., 0.2021, 0.6323, 0.0041]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.0966, 0.1191, 0.3091, ..., 0.0659, 0.3650, 0.3795]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.821150064468384 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '372', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.509546279907227} + +tensor(indices=tensor([[5931, 4974, 9693, ..., 3644, 6663, 2246], + [3219, 2173, 9925, ..., 5148, 4669, 4271]]), + values=tensor([0.1885, 0.7906, 0.6990, ..., 0.9020, 0.8899, 0.7652]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8229, 0.0397, 0.9356, ..., 0.4211, 0.3059, 0.5755]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.509546279907227 seconds + +tensor(indices=tensor([[5931, 4974, 9693, ..., 3644, 6663, 2246], + [3219, 2173, 9925, ..., 5148, 4669, 4271]]), + values=tensor([0.1885, 0.7906, 0.6990, ..., 0.9020, 0.8899, 0.7652]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8229, 0.0397, 0.9356, ..., 0.4211, 0.3059, 0.5755]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.509546279907227 seconds + +[39.71, 38.8, 38.9, 39.31, 39.27, 39.43, 39.31, 39.06, 38.88, 38.81] +[65.81] +13.200096368789673 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.509546279907227, 'TIME_S_1KI': 28.251468494374265, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.6983420300484, 'W': 65.81} +[39.71, 38.8, 38.9, 39.31, 39.27, 39.43, 39.31, 39.06, 38.88, 38.81, 39.78, 38.94, 39.04, 38.78, 39.04, 39.19, 39.53, 39.2, 38.81, 44.84] +707.06 +35.352999999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.509546279907227, 'TIME_S_1KI': 28.251468494374265, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.6983420300484, 'W': 65.81, 'J_1KI': 2335.2105968549686, 'W_1KI': 176.90860215053763, 'W_D': 30.457000000000008, 'J_D': 402.03533510422716, 'W_D_1KI': 81.87365591397852, 'J_D_1KI': 220.09047288703903} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..5e082a6 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.231861114501953, "TIME_S_1KI": 142.31861114501953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1164.2800281524658, "W": 66.08, "J_1KI": 11642.800281524656, "W_1KI": 660.8, "W_D": 30.454749999999997, "J_D": 536.5898484772443, "W_D_1KI": 304.54749999999996, "J_D_1KI": 3045.475} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..b419fd7 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.231861114501953} + +tensor(indices=tensor([[9495, 4719, 4666, ..., 8615, 8013, 7313], + [8715, 9370, 605, ..., 9398, 4346, 8875]]), + values=tensor([0.0750, 0.0121, 0.7089, ..., 0.8792, 0.5322, 0.0992]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.1475, 0.0641, 0.2995, ..., 0.1186, 0.1304, 0.7388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.231861114501953 seconds + +tensor(indices=tensor([[9495, 4719, 4666, ..., 8615, 8013, 7313], + [8715, 9370, 605, ..., 9398, 4346, 8875]]), + values=tensor([0.0750, 0.0121, 0.7089, ..., 0.8792, 0.5322, 0.0992]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.1475, 0.0641, 0.2995, ..., 0.1186, 0.1304, 0.7388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.231861114501953 seconds + +[39.63, 44.48, 40.11, 39.0, 39.55, 38.82, 39.47, 38.9, 39.08, 38.83] +[66.08] +17.61924982070923 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.231861114501953, 'TIME_S_1KI': 142.31861114501953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.2800281524658, 'W': 66.08} +[39.63, 44.48, 40.11, 39.0, 39.55, 38.82, 39.47, 38.9, 39.08, 38.83, 42.0, 39.42, 38.85, 39.16, 39.03, 39.52, 39.07, 38.82, 38.88, 40.23] +712.505 +35.62525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.231861114501953, 'TIME_S_1KI': 142.31861114501953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.2800281524658, 'W': 66.08, 'J_1KI': 11642.800281524656, 'W_1KI': 660.8, 'W_D': 30.454749999999997, 'J_D': 536.5898484772443, 'W_D_1KI': 304.54749999999996, 'J_D_1KI': 3045.475} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..2189ae8 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.15625023841858, "TIME_S_1KI": 281.5625023841858, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2156.524199552536, "W": 66.12, "J_1KI": 21565.24199552536, "W_1KI": 661.2, "W_D": 30.269250000000007, "J_D": 987.2409275152089, "W_D_1KI": 302.6925000000001, "J_D_1KI": 3026.925000000001} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..8fa87f8 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.15625023841858} + +tensor(indices=tensor([[2940, 6880, 7445, ..., 8417, 3373, 3567], + [6631, 4458, 8647, ..., 306, 2665, 6170]]), + values=tensor([0.7870, 0.6906, 0.4634, ..., 0.7200, 0.5540, 0.7358]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3823, 0.7370, 0.1592, ..., 0.3654, 0.8735, 0.6393]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.15625023841858 seconds + +tensor(indices=tensor([[2940, 6880, 7445, ..., 8417, 3373, 3567], + [6631, 4458, 8647, ..., 306, 2665, 6170]]), + values=tensor([0.7870, 0.6906, 0.4634, ..., 0.7200, 0.5540, 0.7358]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.3823, 0.7370, 0.1592, ..., 0.3654, 0.8735, 0.6393]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.15625023841858 seconds + +[39.51, 44.41, 39.14, 39.57, 38.96, 38.88, 39.16, 39.5, 40.57, 39.3] +[66.12] +32.6153085231781 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.15625023841858, 'TIME_S_1KI': 281.5625023841858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2156.524199552536, 'W': 66.12} +[39.51, 44.41, 39.14, 39.57, 38.96, 38.88, 39.16, 39.5, 40.57, 39.3, 40.49, 39.77, 38.92, 38.97, 38.92, 38.94, 38.96, 38.84, 44.37, 38.97] +717.015 +35.85075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.15625023841858, 'TIME_S_1KI': 281.5625023841858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2156.524199552536, 'W': 66.12, 'J_1KI': 21565.24199552536, 'W_1KI': 661.2, 'W_D': 30.269250000000007, 'J_D': 987.2409275152089, 'W_D_1KI': 302.6925000000001, 'J_D_1KI': 3026.925000000001} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..e0396a6 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.9739773273468, "TIME_S_1KI": 569.739773273468, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4132.100797698497, "W": 65.61, "J_1KI": 41321.007976984976, "W_1KI": 656.1, "W_D": 30.199999999999996, "J_D": 1901.9881739139555, "W_D_1KI": 301.99999999999994, "J_D_1KI": 3019.9999999999995} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..0887321 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.9739773273468} + +tensor(indices=tensor([[4246, 2334, 6908, ..., 2524, 2127, 1590], + [6463, 5806, 8269, ..., 2679, 8904, 8115]]), + values=tensor([0.3198, 0.0649, 0.4364, ..., 0.3374, 0.8033, 0.9805]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.3147, 0.6619, 0.2526, ..., 0.6315, 0.7235, 0.8687]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.9739773273468 seconds + +tensor(indices=tensor([[4246, 2334, 6908, ..., 2524, 2127, 1590], + [6463, 5806, 8269, ..., 2679, 8904, 8115]]), + values=tensor([0.3198, 0.0649, 0.4364, ..., 0.3374, 0.8033, 0.9805]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.3147, 0.6619, 0.2526, ..., 0.6315, 0.7235, 0.8687]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.9739773273468 seconds + +[40.63, 39.37, 39.53, 39.38, 39.07, 38.95, 39.11, 39.48, 39.03, 39.09] +[65.61] +62.979740858078 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.9739773273468, 'TIME_S_1KI': 569.739773273468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4132.100797698497, 'W': 65.61} +[40.63, 39.37, 39.53, 39.38, 39.07, 38.95, 39.11, 39.48, 39.03, 39.09, 39.53, 39.32, 39.51, 40.09, 39.12, 39.05, 38.93, 40.18, 38.93, 39.05] +708.2 +35.410000000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.9739773273468, 'TIME_S_1KI': 569.739773273468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4132.100797698497, 'W': 65.61, 'J_1KI': 41321.007976984976, 'W_1KI': 656.1, 'W_D': 30.199999999999996, 'J_D': 1901.9881739139555, 'W_D_1KI': 301.99999999999994, 'J_D_1KI': 3019.9999999999995} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..8d1db2d --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 85.62028431892395, "TIME_S_1KI": 856.2028431892395, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6094.222667913436, "W": 65.72, "J_1KI": 60942.226679134365, "W_1KI": 657.2, "W_D": 29.25425, "J_D": 2712.74974867326, "W_D_1KI": 292.54249999999996, "J_D_1KI": 2925.4249999999997} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..a1ce70e --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 85.62028431892395} + +tensor(indices=tensor([[6502, 5630, 5347, ..., 3417, 2429, 7659], + [7675, 4404, 7043, ..., 6207, 4126, 7728]]), + values=tensor([0.5336, 0.5537, 0.2767, ..., 0.0561, 0.2345, 0.6897]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.8442, 0.8373, 0.2483, ..., 0.1588, 0.4821, 0.4040]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 85.62028431892395 seconds + +tensor(indices=tensor([[6502, 5630, 5347, ..., 3417, 2429, 7659], + [7675, 4404, 7043, ..., 6207, 4126, 7728]]), + values=tensor([0.5336, 0.5537, 0.2767, ..., 0.0561, 0.2345, 0.6897]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.8442, 0.8373, 0.2483, ..., 0.1588, 0.4821, 0.4040]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 85.62028431892395 seconds + +[39.62, 39.37, 39.12, 39.2, 39.61, 39.42, 38.99, 41.31, 53.18, 39.52] +[65.72] +92.73010754585266 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 85.62028431892395, 'TIME_S_1KI': 856.2028431892395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6094.222667913436, 'W': 65.72} +[39.62, 39.37, 39.12, 39.2, 39.61, 39.42, 38.99, 41.31, 53.18, 39.52, 40.01, 38.92, 39.02, 39.0, 39.52, 44.42, 40.33, 38.99, 39.87, 38.94] +729.315 +36.46575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 85.62028431892395, 'TIME_S_1KI': 856.2028431892395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6094.222667913436, 'W': 65.72, 'J_1KI': 60942.226679134365, 'W_1KI': 657.2, 'W_D': 29.25425, 'J_D': 2712.74974867326, 'W_D_1KI': 292.54249999999996, 'J_D_1KI': 2925.4249999999997} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..1afd086 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 112.56092715263367, "TIME_S_1KI": 1125.6092715263367, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8167.820107634067, "W": 65.97, "J_1KI": 81678.20107634067, "W_1KI": 659.6999999999999, "W_D": 30.363749999999996, "J_D": 3759.3701348063346, "W_D_1KI": 303.63749999999993, "J_D_1KI": 3036.374999999999} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..272753b --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 112.56092715263367} + +tensor(indices=tensor([[6913, 6955, 3654, ..., 1712, 9364, 9759], + [4836, 5537, 1819, ..., 6045, 9297, 5941]]), + values=tensor([0.8997, 0.5854, 0.5245, ..., 0.7417, 0.4025, 0.5154]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.9740, 0.1025, 0.2346, ..., 0.9711, 0.8162, 0.4893]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 112.56092715263367 seconds + +tensor(indices=tensor([[6913, 6955, 3654, ..., 1712, 9364, 9759], + [4836, 5537, 1819, ..., 6045, 9297, 5941]]), + values=tensor([0.8997, 0.5854, 0.5245, ..., 0.7417, 0.4025, 0.5154]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.9740, 0.1025, 0.2346, ..., 0.9711, 0.8162, 0.4893]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 112.56092715263367 seconds + +[40.38, 39.01, 39.12, 41.71, 39.36, 39.14, 39.55, 39.69, 39.52, 39.43] +[65.97] +123.81112790107727 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 112.56092715263367, 'TIME_S_1KI': 1125.6092715263367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8167.820107634067, 'W': 65.97} +[40.38, 39.01, 39.12, 41.71, 39.36, 39.14, 39.55, 39.69, 39.52, 39.43, 40.0, 39.2, 39.19, 39.02, 40.89, 39.4, 39.6, 39.28, 39.04, 39.0] +712.125 +35.60625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 112.56092715263367, 'TIME_S_1KI': 1125.6092715263367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8167.820107634067, 'W': 65.97, 'J_1KI': 81678.20107634067, 'W_1KI': 659.6999999999999, 'W_D': 30.363749999999996, 'J_D': 3759.3701348063346, 'W_D_1KI': 303.63749999999993, 'J_D_1KI': 3036.374999999999} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..2118303 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 142.2544720172882, "TIME_S_1KI": 1422.544720172882, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10046.2299533844, "W": 65.76, "J_1KI": 100462.29953384401, "W_1KI": 657.6, "W_D": 29.882000000000005, "J_D": 4565.107108683587, "W_D_1KI": 298.82000000000005, "J_D_1KI": 2988.2000000000003} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..3004a6f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 142.2544720172882} + +tensor(indices=tensor([[7831, 4139, 2224, ..., 6913, 8464, 2774], + [ 532, 4220, 6578, ..., 2935, 7463, 5071]]), + values=tensor([0.6508, 0.8753, 0.2783, ..., 0.5939, 0.9144, 0.3112]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.4758, 0.2878, 0.0543, ..., 0.7972, 0.4926, 0.5204]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 142.2544720172882 seconds + +tensor(indices=tensor([[7831, 4139, 2224, ..., 6913, 8464, 2774], + [ 532, 4220, 6578, ..., 2935, 7463, 5071]]), + values=tensor([0.6508, 0.8753, 0.2783, ..., 0.5939, 0.9144, 0.3112]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.4758, 0.2878, 0.0543, ..., 0.7972, 0.4926, 0.5204]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 142.2544720172882 seconds + +[40.2, 41.28, 39.58, 39.55, 39.07, 39.07, 39.14, 39.25, 39.33, 39.48] +[65.76] +152.77113676071167 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 142.2544720172882, 'TIME_S_1KI': 1422.544720172882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10046.2299533844, 'W': 65.76} +[40.2, 41.28, 39.58, 39.55, 39.07, 39.07, 39.14, 39.25, 39.33, 39.48, 39.92, 43.79, 39.71, 39.1, 39.7, 39.59, 39.17, 39.29, 41.51, 39.26] +717.56 +35.878 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 142.2544720172882, 'TIME_S_1KI': 1422.544720172882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10046.2299533844, 'W': 65.76, 'J_1KI': 100462.29953384401, 'W_1KI': 657.6, 'W_D': 29.882000000000005, 'J_D': 4565.107108683587, 'W_D_1KI': 298.82000000000005, 'J_D_1KI': 2988.2000000000003} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..8ba0450 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 314746, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.578866243362427, "TIME_S_1KI": 0.03361080440533772, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 850.5716023397445, "W": 65.38, "J_1KI": 2.7024063922646975, "W_1KI": 0.20772305287438125, "W_D": 29.65999999999999, "J_D": 385.8665299081801, "W_D_1KI": 0.09423471624738675, "J_D_1KI": 0.0002993992497041638} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..6cb35a6 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,962 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.011737346649169922} + +tensor(indices=tensor([[6288, 8508, 9706, ..., 5538, 2485, 2652], + [4988, 1076, 662, ..., 1571, 2835, 5524]]), + values=tensor([6.6144e-01, 7.5713e-01, 3.6843e-01, 3.4268e-01, + 9.2041e-01, 6.3550e-01, 1.3596e-01, 3.9200e-01, + 3.1098e-01, 6.3590e-01, 9.3531e-01, 6.0733e-01, + 8.0780e-01, 4.0966e-02, 1.1233e-02, 9.1113e-01, + 9.6461e-01, 1.7613e-01, 2.8282e-01, 1.7851e-01, + 7.4013e-01, 5.3953e-01, 2.1577e-01, 4.8196e-01, + 8.0311e-01, 7.8248e-01, 9.1283e-01, 4.5086e-01, + 6.2587e-01, 5.8542e-01, 1.2986e-01, 4.9589e-01, + 4.4338e-01, 9.5179e-01, 8.1179e-02, 9.8078e-01, + 7.9802e-02, 7.2641e-01, 8.8751e-01, 4.7360e-01, + 9.8047e-01, 5.5996e-01, 3.8975e-02, 9.8115e-02, + 4.4715e-01, 4.7397e-01, 3.0154e-01, 8.8291e-01, + 1.9546e-01, 3.1869e-02, 5.5161e-01, 5.6631e-01, + 5.7172e-01, 4.0218e-01, 6.0762e-01, 2.9413e-01, + 8.7660e-01, 3.5967e-01, 9.9073e-01, 6.7123e-01, + 5.8378e-01, 8.1021e-01, 6.8443e-01, 4.1460e-01, + 2.5978e-01, 7.7688e-01, 7.1526e-01, 2.8646e-01, + 1.9656e-01, 4.2483e-01, 9.2612e-03, 8.1130e-01, + 3.0787e-01, 7.2412e-01, 1.0478e-01, 8.7209e-01, + 3.7300e-01, 8.8068e-01, 6.1198e-02, 7.3467e-01, + 5.9019e-01, 2.2476e-02, 4.6589e-01, 3.0155e-01, + 3.9176e-01, 7.7650e-01, 7.3373e-01, 9.0383e-01, + 4.1513e-01, 7.2203e-01, 1.4568e-01, 1.0853e-01, + 4.5242e-01, 9.1159e-01, 7.0691e-01, 2.1985e-01, + 2.2501e-02, 2.5299e-01, 9.5022e-01, 6.1020e-01, + 6.9575e-01, 8.6637e-01, 6.1692e-01, 8.5287e-01, + 8.0779e-01, 3.2109e-01, 5.1166e-01, 9.3930e-02, + 5.3702e-01, 9.1034e-01, 9.2166e-01, 5.8424e-01, + 9.4457e-01, 6.9795e-01, 4.0127e-01, 7.3820e-01, + 4.3087e-01, 8.9586e-01, 4.6684e-01, 2.6395e-01, + 8.6236e-01, 4.4743e-01, 8.0773e-02, 1.3565e-01, + 9.2059e-01, 9.0633e-01, 9.7467e-01, 8.1498e-01, + 9.1899e-01, 4.4293e-01, 7.5883e-01, 6.1520e-01, + 2.9433e-01, 6.9229e-01, 7.4568e-01, 1.3233e-02, + 8.7634e-01, 2.0693e-01, 1.8284e-01, 3.6653e-01, + 3.2318e-01, 6.9407e-02, 8.6829e-01, 5.8943e-01, + 4.7198e-01, 3.6951e-01, 9.8111e-01, 7.3151e-02, + 8.4639e-01, 4.8076e-01, 3.3710e-01, 2.9944e-01, + 5.4610e-01, 1.2911e-01, 2.8905e-01, 9.5439e-02, + 8.6752e-01, 9.6501e-01, 1.0500e-01, 9.8644e-01, + 5.6679e-01, 8.2366e-01, 1.0795e-01, 6.5719e-01, + 3.2040e-02, 8.5859e-01, 2.2284e-01, 6.9608e-01, + 2.0592e-01, 3.9939e-01, 5.9629e-01, 8.2282e-01, + 9.6401e-01, 9.1766e-01, 5.7423e-01, 9.2625e-02, + 4.2821e-01, 7.2057e-01, 3.8013e-01, 1.4111e-01, + 2.3097e-01, 4.2188e-01, 8.2867e-01, 8.6339e-01, + 9.1907e-01, 3.3945e-01, 9.0906e-01, 1.2431e-02, + 9.6247e-01, 7.5113e-01, 9.8999e-01, 8.7062e-01, + 9.4908e-01, 1.7461e-01, 4.9177e-01, 7.0601e-01, + 9.4012e-01, 2.7347e-01, 1.3649e-01, 4.1879e-01, + 9.4911e-01, 6.1975e-01, 1.3197e-01, 6.2506e-01, + 5.4957e-01, 2.3939e-02, 5.3370e-01, 2.6602e-01, + 2.5153e-01, 4.1733e-01, 3.1692e-01, 8.2990e-01, + 7.3331e-01, 1.5094e-01, 7.7106e-01, 1.9999e-01, + 3.9059e-01, 3.1866e-01, 7.1319e-01, 7.6790e-01, + 8.8534e-02, 9.5326e-01, 9.2344e-01, 7.3477e-01, + 2.3726e-02, 7.1632e-01, 7.3695e-01, 4.6891e-01, + 2.2833e-01, 7.7012e-01, 7.5952e-01, 3.9654e-01, + 3.6141e-01, 1.6469e-01, 1.7142e-01, 9.0893e-01, + 1.4685e-01, 1.0963e-01, 9.3387e-02, 4.9394e-01, + 2.9227e-01, 1.7888e-01, 1.6680e-01, 9.8284e-01, + 9.2578e-01, 9.6495e-01, 8.5995e-01, 5.4639e-01, + 8.0674e-01, 8.3439e-01, 4.1491e-01, 2.4473e-01, + 3.7860e-01, 3.6443e-01, 3.2836e-02, 3.0679e-02, + 7.7954e-01, 5.7546e-02, 9.9014e-01, 6.2607e-01, + 2.5694e-01, 3.9788e-01, 6.2693e-01, 4.5248e-01, + 3.7014e-01, 3.7496e-02, 7.6875e-04, 1.1602e-01, + 5.3383e-01, 5.2264e-01, 9.8419e-01, 3.3260e-01, + 6.8425e-01, 6.4236e-01, 8.5967e-01, 9.6324e-01, + 1.2680e-01, 3.8466e-01, 6.2514e-01, 9.9957e-01, + 3.1732e-01, 9.4798e-01, 5.2855e-01, 1.8520e-01, + 4.0352e-01, 8.9487e-01, 2.0534e-01, 9.6821e-01, + 4.9373e-01, 4.7216e-01, 9.6386e-01, 3.0587e-01, + 9.6268e-01, 1.3116e-01, 2.5193e-01, 3.3544e-01, + 9.5609e-01, 8.0298e-01, 6.0726e-01, 5.9194e-03, + 5.6876e-02, 5.0171e-01, 1.9242e-01, 9.7493e-01, + 1.8854e-01, 3.2645e-01, 7.9043e-01, 7.6653e-01, + 2.2835e-01, 3.1193e-01, 1.3549e-01, 3.6454e-01, + 7.7969e-01, 9.7668e-01, 6.0707e-01, 6.6080e-01, + 2.8037e-01, 8.5250e-01, 7.5489e-01, 3.0318e-01, + 7.5988e-01, 1.8267e-01, 7.3888e-01, 8.6558e-01, + 4.8527e-01, 2.2884e-01, 6.3606e-01, 2.3878e-02, + 8.6212e-01, 7.5879e-01, 7.0608e-01, 8.4074e-01, + 8.7640e-01, 8.4296e-01, 5.3181e-01, 9.6684e-01, + 5.2972e-01, 2.4300e-01, 4.1671e-01, 1.5267e-01, + 4.6812e-01, 6.0509e-01, 1.2301e-01, 5.1805e-01, + 7.0738e-01, 2.5675e-01, 8.4818e-01, 7.4283e-02, + 4.0118e-01, 9.0089e-01, 4.4018e-01, 1.0511e-01, + 9.7246e-01, 5.3100e-01, 8.5635e-01, 1.9352e-01, + 7.9425e-01, 9.1647e-01, 9.7970e-01, 8.7996e-01, + 5.1897e-01, 3.4054e-01, 6.5228e-02, 8.0158e-01, + 4.0377e-01, 7.3378e-01, 6.1037e-01, 3.4445e-01, + 4.0854e-01, 1.2100e-01, 6.5587e-01, 4.6397e-01, + 4.8449e-01, 9.0364e-01, 7.0071e-01, 6.8203e-01, + 3.5391e-01, 1.3422e-02, 5.9979e-01, 9.5066e-01, + 3.5533e-01, 4.5090e-02, 1.9824e-01, 1.3829e-02, + 5.5236e-01, 7.3878e-01, 9.4269e-01, 2.1242e-01, + 4.7823e-02, 4.7465e-01, 6.5004e-01, 9.6628e-01, + 6.0279e-01, 9.1507e-02, 2.5984e-01, 6.2551e-01, + 7.8028e-02, 9.5914e-01, 8.9243e-01, 2.4736e-01, + 8.6195e-01, 7.3128e-01, 5.5683e-01, 7.3131e-01, + 2.6211e-01, 2.5479e-02, 8.1990e-01, 8.9483e-01, + 4.2515e-01, 3.0632e-01, 9.5226e-01, 6.6211e-01, + 8.7620e-01, 5.2839e-01, 8.7140e-01, 9.5986e-01, + 1.2848e-02, 7.8441e-02, 2.6252e-01, 7.7681e-01, + 3.4776e-02, 9.8983e-01, 3.5973e-01, 8.6408e-01, + 5.1983e-02, 3.6449e-02, 4.9301e-01, 3.5463e-01, + 5.6620e-01, 8.2326e-01, 9.8075e-06, 3.5005e-02, + 7.0451e-01, 4.4290e-01, 7.7787e-01, 7.2240e-01, + 4.3204e-01, 5.0815e-01, 1.9257e-01, 9.4299e-01, + 3.9483e-01, 2.2365e-01, 8.0652e-01, 1.3890e-01, + 2.4944e-01, 7.0562e-01, 1.7147e-01, 6.0613e-01, + 7.0574e-01, 5.6271e-01, 4.9013e-01, 3.8296e-01, + 3.6155e-01, 9.7447e-01, 5.1084e-01, 3.8777e-01, + 7.4291e-01, 6.4846e-01, 7.1005e-01, 4.7992e-01, + 7.8831e-01, 4.9193e-01, 1.3184e-01, 3.6779e-01, + 6.9916e-01, 3.0108e-01, 4.3480e-01, 1.1014e-01, + 3.7031e-01, 1.6545e-01, 7.0864e-01, 2.5023e-01, + 1.3216e-01, 5.2500e-01, 5.7295e-02, 1.0624e-01, + 1.7142e-01, 4.6438e-01, 1.7553e-01, 3.9602e-02, + 1.9869e-01, 2.9983e-01, 8.3027e-01, 3.8854e-01, + 8.1160e-01, 7.0569e-02, 4.6563e-01, 8.0543e-01, + 7.1774e-01, 8.2643e-01, 9.5839e-01, 1.5036e-01, + 4.3531e-01, 8.6121e-02, 5.4743e-01, 2.9353e-01, + 3.1225e-01, 2.1414e-01, 4.3887e-01, 6.9379e-01, + 2.5114e-01, 2.4262e-01, 6.7064e-01, 4.6571e-01, + 2.1324e-01, 1.5470e-01, 6.4164e-01, 7.8930e-01, + 8.5053e-01, 3.3890e-01, 7.0931e-01, 5.6698e-01, + 1.2894e-01, 9.3710e-01, 8.0306e-01, 2.5428e-01, + 1.4032e-01, 9.0784e-01, 2.1437e-01, 7.1000e-01, + 4.5672e-01, 8.7665e-01, 8.5194e-01, 3.7861e-01, + 5.1291e-01, 8.2538e-01, 3.5340e-01, 4.0250e-01, + 3.4634e-01, 8.5907e-01, 8.9104e-01, 6.4704e-01, + 6.4475e-01, 7.1924e-01, 2.7053e-01, 6.6837e-01, + 6.9016e-01, 2.0770e-01, 1.3777e-02, 9.5407e-01, + 2.0928e-01, 1.6619e-01, 1.3552e-01, 8.4611e-01, + 1.2250e-01, 9.4579e-01, 7.4968e-01, 4.0834e-01, + 3.8929e-01, 3.9912e-01, 6.3379e-02, 5.6828e-01, + 6.6326e-02, 2.7126e-01, 9.0881e-01, 5.6423e-01, + 3.9747e-02, 5.9121e-01, 7.6252e-01, 6.1655e-01, + 7.7113e-01, 8.4402e-01, 2.1426e-01, 3.2299e-01, + 8.8002e-01, 7.3967e-01, 1.2463e-02, 1.9507e-01, + 2.8012e-01, 8.1871e-01, 6.7229e-01, 3.8884e-01, + 5.0955e-01, 8.7967e-01, 2.3153e-01, 5.2151e-01, + 1.1065e-01, 3.0461e-01, 6.0386e-01, 3.9428e-01, + 1.6252e-01, 7.3754e-01, 2.3963e-01, 6.0760e-02, + 4.0335e-01, 4.8930e-01, 5.1010e-01, 1.7638e-01, + 7.5762e-01, 8.8303e-01, 6.7335e-01, 2.8508e-01, + 7.9525e-01, 4.8735e-01, 6.7664e-01, 7.5828e-01, + 5.1667e-01, 4.2352e-01, 3.8715e-02, 1.6626e-01, + 6.0139e-01, 4.9088e-01, 3.5664e-01, 4.0391e-01, + 4.3973e-01, 9.0562e-01, 8.8981e-01, 9.6445e-01, + 2.2838e-02, 3.3289e-01, 6.6847e-01, 7.6298e-01, + 4.9484e-01, 5.5393e-01, 2.9954e-01, 4.2858e-01, + 1.6820e-01, 1.0117e-01, 6.3568e-01, 2.5279e-01, + 5.6215e-01, 3.5295e-01, 1.2702e-01, 8.3168e-02, + 2.4591e-01, 4.5632e-01, 5.7753e-01, 8.7992e-01, + 6.0142e-01, 6.1313e-01, 3.1233e-01, 8.5875e-01, + 5.1776e-01, 9.0468e-01, 7.8751e-01, 3.4146e-01, + 9.3689e-03, 9.9724e-01, 4.2802e-01, 4.4840e-03, + 4.9595e-01, 5.1520e-01, 1.6471e-01, 8.2734e-01, + 3.2858e-01, 4.4632e-01, 9.1366e-01, 7.8208e-01, + 6.2378e-01, 5.1728e-01, 2.2965e-01, 2.0915e-01, + 6.1450e-01, 4.2712e-01, 7.3549e-02, 8.0727e-01, + 7.9069e-01, 2.1527e-01, 8.8143e-01, 3.0178e-02, + 7.2367e-01, 1.4207e-01, 5.8985e-01, 1.6761e-01, + 4.2082e-01, 9.0417e-01, 1.7862e-02, 1.4394e-02, + 1.3363e-01, 6.2703e-01, 4.2681e-01, 6.9450e-01, + 9.7324e-01, 5.2785e-01, 6.2098e-01, 1.1771e-01, + 6.3372e-01, 7.3180e-01, 3.6872e-01, 4.6190e-01, + 7.1752e-01, 7.8837e-01, 8.7400e-02, 1.4849e-01, + 3.0670e-01, 2.5777e-01, 4.7672e-01, 4.3089e-01, + 6.5040e-01, 9.5114e-02, 9.7878e-01, 2.0803e-01, + 8.4959e-01, 8.9776e-01, 2.7861e-01, 2.8351e-01, + 9.0744e-01, 5.9793e-01, 6.9146e-01, 4.2246e-01, + 8.3571e-01, 3.9947e-01, 3.5301e-01, 2.5634e-01, + 4.3187e-01, 4.4765e-01, 1.3966e-01, 4.9502e-01, + 2.0341e-01, 2.5664e-01, 3.1122e-01, 5.9939e-01, + 4.1622e-01, 4.8503e-01, 2.3393e-01, 8.6882e-01, + 8.7269e-01, 1.5628e-01, 4.5462e-01, 9.3906e-01, + 7.2167e-01, 9.5293e-01, 4.0361e-01, 7.6635e-01, + 7.6020e-01, 5.8264e-01, 6.7833e-01, 3.1291e-03, + 8.2219e-01, 5.8166e-01, 5.3193e-01, 3.5920e-01, + 5.7393e-01, 7.7982e-02, 4.7260e-01, 3.3476e-01, + 8.3892e-01, 9.3333e-01, 1.2396e-02, 7.5121e-01, + 5.4720e-01, 3.0345e-01, 9.0629e-02, 7.0165e-01, + 2.6480e-01, 6.8553e-01, 1.4103e-01, 9.1040e-01, + 1.6815e-01, 3.3922e-01, 8.3914e-01, 7.4554e-01, + 9.6382e-01, 5.1437e-01, 5.9215e-01, 4.1178e-01, + 4.6255e-01, 4.2098e-01, 9.5458e-01, 7.1088e-01, + 2.3143e-01, 1.3543e-01, 5.3616e-01, 4.2981e-01, + 7.3072e-01, 5.9025e-01, 8.7059e-01, 1.7246e-01, + 7.2442e-01, 5.5177e-01, 8.8687e-01, 2.8289e-01, + 2.3751e-01, 8.4451e-01, 8.8561e-02, 9.3168e-01, + 6.6036e-01, 8.7134e-01, 3.0089e-01, 4.0994e-01, + 2.0474e-02, 1.8709e-01, 6.2646e-01, 7.9002e-01, + 6.6444e-02, 1.7078e-01, 3.5245e-02, 7.1065e-01, + 2.4635e-01, 1.2661e-02, 2.0978e-01, 6.6979e-01, + 5.1736e-01, 3.0498e-01, 9.4440e-01, 5.8807e-01, + 3.7392e-01, 1.4262e-01, 7.3319e-01, 8.0537e-01, + 9.5460e-01, 2.0885e-01, 3.1654e-01, 7.0763e-01, + 6.4097e-01, 2.3714e-01, 5.9085e-01, 3.3894e-01, + 1.2288e-01, 8.5127e-01, 4.4782e-02, 3.5253e-01, + 6.0104e-01, 3.5232e-01, 3.3094e-01, 6.2167e-01, + 5.8769e-01, 9.0035e-01, 8.5660e-01, 2.3677e-01, + 2.8256e-01, 2.1869e-01, 7.5623e-01, 2.3891e-01, + 7.3031e-02, 2.2827e-01, 3.3771e-01, 9.8424e-01, + 2.2376e-01, 1.8378e-01, 8.7768e-01, 2.5293e-01, + 9.8282e-01, 1.7544e-02, 5.4624e-01, 4.8714e-02, + 9.2262e-01, 2.4502e-01, 1.8993e-02, 9.4639e-01, + 9.4679e-01, 7.6868e-01, 5.5336e-01, 1.4802e-02, + 9.1702e-01, 6.5384e-01, 4.9409e-01, 7.8128e-02, + 3.6332e-01, 7.1484e-01, 9.9943e-01, 6.0779e-01, + 2.2323e-01, 8.6468e-01, 9.6439e-01, 6.2627e-01, + 8.1012e-01, 8.0643e-01, 7.9877e-01, 1.1732e-01, + 7.5931e-01, 2.2468e-01, 3.8435e-01, 6.8390e-01, + 9.8415e-01, 4.9782e-01, 8.8636e-01, 1.3636e-01, + 5.7870e-02, 5.2955e-01, 9.6983e-01, 5.8822e-01, + 1.0882e-01, 1.8996e-01, 4.7705e-01, 3.3113e-01, + 2.5977e-01, 3.8839e-01, 8.8576e-01, 1.5107e-01, + 3.9553e-03, 1.4548e-01, 1.9114e-01, 8.9790e-01, + 1.8409e-02, 7.5021e-01, 5.7490e-01, 7.2799e-01, + 3.0791e-01, 2.7017e-01, 7.0796e-01, 9.1976e-01, + 9.1340e-01, 9.1480e-01, 1.4432e-01, 9.9118e-01, + 7.9501e-01, 2.0747e-01, 4.7538e-02, 6.2276e-01, + 5.8679e-01, 4.8245e-01, 1.8000e-01, 2.0205e-01, + 7.7821e-01, 8.0124e-01, 8.2483e-01, 4.6158e-01, + 9.5190e-01, 6.6902e-01, 8.7119e-01, 3.7165e-01, + 2.8058e-01, 6.8394e-01, 5.5673e-01, 7.2465e-01, + 7.3548e-01, 9.2252e-01, 1.3043e-01, 7.1312e-01, + 8.2492e-01, 9.9034e-01, 7.9757e-01, 7.5402e-01, + 2.7626e-01, 7.7314e-01, 6.0058e-01, 4.1667e-01, + 2.7121e-01, 8.7011e-01, 5.3151e-01, 9.6655e-01, + 9.7215e-01, 7.8885e-01, 9.4084e-01, 5.5968e-01, + 5.6771e-01, 9.6322e-02, 3.3723e-01, 8.0766e-01, + 6.9460e-01, 3.8505e-02, 6.4449e-02, 2.4193e-01, + 3.4443e-02, 7.7283e-01, 6.9512e-01, 8.3728e-01, + 5.6684e-02, 8.5270e-01, 4.3748e-01, 5.5053e-02, + 9.4415e-01, 5.6588e-01, 9.7908e-01, 7.9137e-01, + 1.4530e-01, 4.4167e-02, 4.7320e-01, 5.4371e-01, + 4.3534e-01, 2.8907e-01, 4.3348e-01, 7.4928e-01, + 9.5037e-01, 6.5824e-01, 8.8626e-02, 9.5423e-01, + 2.7485e-01, 7.3717e-01, 7.7454e-01, 1.2069e-01, + 4.3520e-01, 9.2023e-01, 5.5391e-01, 4.1726e-01, + 4.1619e-01, 4.7012e-01, 2.8393e-01, 4.6308e-01, + 6.2787e-01, 2.4785e-01, 1.9481e-01, 9.4870e-02, + 2.5884e-01, 5.4665e-01, 9.8822e-02, 6.9634e-01, + 5.8421e-01, 5.1606e-01, 7.0978e-01, 4.9129e-01, + 2.8524e-01, 9.4155e-01, 5.6400e-01, 7.5768e-01, + 3.3174e-01, 1.9663e-01, 5.2951e-01, 2.7864e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.9925, 0.8028, 0.2510, ..., 0.1060, 0.6951, 0.7325]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.011737346649169922 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '89458', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.9843311309814453} + +tensor(indices=tensor([[2175, 6056, 9142, ..., 5893, 8087, 5102], + [9420, 5340, 4535, ..., 9603, 5296, 3377]]), + values=tensor([0.4448, 0.6238, 0.1188, 0.0564, 0.4403, 0.6678, 0.4470, + 0.4323, 0.3859, 0.2380, 0.2276, 0.1122, 0.3181, 0.4603, + 0.1712, 0.8129, 0.7892, 0.7791, 0.8414, 0.0721, 0.5348, + 0.7810, 0.6938, 0.8766, 0.8600, 0.4453, 0.7552, 0.3933, + 0.8263, 0.8607, 0.6904, 0.6790, 0.6027, 0.4796, 0.1674, + 0.2763, 0.9655, 0.1030, 0.9304, 0.7285, 0.0554, 0.7721, + 0.8754, 0.3444, 0.1229, 0.9639, 0.5417, 0.8627, 0.8837, + 0.5767, 0.0113, 0.7533, 0.6370, 0.3308, 0.8929, 0.8174, + 0.3690, 0.5295, 0.9107, 0.1134, 0.1961, 0.3802, 0.0235, + 0.7125, 0.6785, 0.6255, 0.2724, 0.5192, 0.1775, 0.7535, + 0.7518, 0.2705, 0.3909, 0.9778, 0.3961, 0.1170, 0.9564, + 0.2284, 0.5883, 0.2593, 0.7814, 0.3941, 0.4616, 0.6434, + 0.0535, 0.6519, 0.0081, 0.2894, 0.7737, 0.7652, 0.5439, + 0.1931, 0.3589, 0.3472, 0.0606, 0.5052, 0.3767, 0.1586, + 0.9847, 0.5299, 0.5531, 0.6655, 0.0990, 0.6000, 0.6615, + 0.7691, 0.4364, 0.5748, 0.9615, 0.2756, 0.7840, 0.6293, + 0.5122, 0.5054, 0.8629, 0.8162, 0.7203, 0.4835, 0.0628, + 0.1347, 0.1214, 0.1160, 0.9035, 0.0317, 0.0088, 0.7629, + 0.7831, 0.3217, 0.0086, 0.3718, 0.2400, 0.0708, 0.1087, + 0.1239, 0.8739, 0.0669, 0.3914, 0.7165, 0.5009, 0.3277, + 0.3071, 0.1087, 0.2290, 0.8643, 0.0458, 0.1165, 0.0325, + 0.1005, 0.1626, 0.4517, 0.0220, 0.8393, 0.2297, 0.0817, + 0.4824, 0.7648, 0.0336, 0.7642, 0.3905, 0.6186, 0.6678, + 0.4381, 0.7672, 0.9086, 0.9014, 0.9132, 0.2221, 0.2215, + 0.6055, 0.6665, 0.6225, 0.0848, 0.1326, 0.9350, 0.3497, + 0.7649, 0.4342, 0.1039, 0.4566, 0.1882, 0.6655, 0.7562, + 0.7446, 0.1947, 0.3120, 0.0513, 0.3434, 0.0612, 0.4848, + 0.6199, 0.6235, 0.8811, 0.4607, 0.8399, 0.7000, 0.9760, + 0.0270, 0.6286, 0.9555, 0.9068, 0.3490, 0.0345, 0.7264, + 0.9832, 0.9621, 0.7048, 0.3488, 0.5201, 0.5504, 0.8587, + 0.2045, 0.7956, 0.1599, 0.4879, 0.4134, 0.5564, 0.5239, + 0.4047, 0.7853, 0.2740, 0.5099, 0.6442, 0.6913, 0.4482, + 0.8011, 0.9134, 0.4775, 0.4044, 0.2604, 0.6207, 0.9517, + 0.3834, 0.0914, 0.8565, 0.0081, 0.0626, 0.2510, 0.8434, + 0.6974, 0.0140, 0.4166, 0.2637, 0.0339, 0.3535, 0.5283, + 0.2616, 0.5577, 0.4332, 0.0787, 0.2240, 0.3750, 0.2652, + 0.3087, 0.7393, 0.5416, 0.4648, 0.9812, 0.3401, 0.8672, + 0.8417, 0.9324, 0.6090, 0.4346, 0.5550, 0.6931, 0.0275, + 0.2421, 0.4170, 0.3610, 0.6508, 0.6781, 0.7832, 0.8390, + 0.8905, 0.0404, 0.0962, 0.0877, 0.6794, 0.6765, 0.7450, + 0.9198, 0.8580, 0.5758, 0.2993, 0.0050, 0.9590, 0.3609, + 0.2091, 0.6478, 0.8083, 0.0969, 0.2067, 0.2430, 0.2286, + 0.1600, 0.7897, 0.0898, 0.6133, 0.2405, 0.0212, 0.6385, + 0.6267, 0.7743, 0.1967, 0.2708, 0.9833, 0.3405, 0.8991, + 0.5078, 0.5549, 0.9575, 0.6737, 0.2498, 0.4149, 0.1493, + 0.3474, 0.8327, 0.7746, 0.0984, 0.4270, 0.6823, 0.5258, + 0.8942, 0.3403, 0.6705, 0.1128, 0.9331, 0.2225, 0.2164, + 0.9977, 0.5005, 0.0469, 0.5676, 0.2696, 0.2063, 0.8187, + 0.6690, 0.5055, 0.9828, 0.8567, 0.2010, 0.9152, 0.4725, + 0.0568, 0.1209, 0.6829, 0.7164, 0.8398, 0.8390, 0.9665, + 0.7765, 0.8011, 0.7887, 0.2646, 0.6762, 0.5973, 0.0854, + 0.6220, 0.4508, 0.2311, 0.1119, 0.8331, 0.3232, 0.1854, + 0.4354, 0.6613, 0.6911, 0.1389, 0.7790, 0.3452, 0.8824, + 0.1801, 0.3932, 0.2404, 0.8694, 0.2319, 0.1870, 0.8935, + 0.8545, 0.3899, 0.3252, 0.7525, 0.1450, 0.7482, 0.0014, + 0.4625, 0.2044, 0.6723, 0.3294, 0.6501, 0.5023, 0.3853, + 0.1255, 0.6810, 0.3492, 0.9292, 0.6755, 0.5125, 0.0014, + 0.2318, 0.6600, 0.4559, 0.7217, 0.3612, 0.5036, 0.9023, + 0.6542, 0.9817, 0.5504, 0.7913, 0.7256, 0.8842, 0.1151, + 0.0252, 0.4359, 0.1812, 0.3182, 0.6361, 0.8127, 0.2107, + 0.7672, 0.9242, 0.7934, 0.3451, 0.3109, 0.2327, 0.3725, + 0.5407, 0.7465, 0.1552, 0.5846, 0.6951, 0.2953, 0.4245, + 0.8064, 0.5000, 0.5291, 0.1250, 0.2239, 0.7111, 0.0694, + 0.9640, 0.3972, 0.8100, 0.3219, 0.7483, 0.5065, 0.2670, + 0.8001, 0.6490, 0.6803, 0.2169, 0.0129, 0.9953, 0.8798, + 0.0076, 0.4078, 0.0560, 0.6061, 0.2388, 0.4648, 0.9547, + 0.1344, 0.6210, 0.2928, 0.9633, 0.2084, 0.9296, 0.3896, + 0.3758, 0.5508, 0.7806, 0.6569, 0.5329, 0.9111, 0.9551, + 0.5340, 0.2458, 0.3313, 0.3645, 0.6024, 0.8967, 0.4619, + 0.5829, 0.1030, 0.5504, 0.3302, 0.7865, 0.8882, 0.9363, + 0.6488, 0.9930, 0.7062, 0.5007, 0.7143, 0.7860, 0.0469, + 0.8155, 0.2066, 0.5290, 0.1994, 0.0589, 0.8226, 0.1195, + 0.2253, 0.2149, 0.0739, 0.3680, 0.8951, 0.9513, 0.7215, + 0.6867, 0.3735, 0.0407, 0.8352, 0.0776, 0.0714, 0.4694, + 0.3100, 0.1896, 0.7243, 0.3123, 0.3077, 0.7038, 0.5364, + 0.9871, 0.0729, 0.1293, 0.9430, 0.5834, 0.3465, 0.8274, + 0.8367, 0.4019, 0.5168, 0.0826, 0.3876, 0.6547, 0.5213, + 0.4221, 0.1821, 0.1771, 0.2458, 0.5598, 0.5560, 0.9985, + 0.9791, 0.2319, 0.5898, 0.3161, 0.5339, 0.4036, 0.8950, + 0.6191, 0.6314, 0.0537, 0.7515, 0.3450, 0.0896, 0.5381, + 0.8668, 0.8577, 0.5617, 0.6969, 0.9856, 0.8362, 0.6696, + 0.7805, 0.8931, 0.5144, 0.0554, 0.8350, 0.2587, 0.0997, + 0.3821, 0.4713, 0.8622, 0.1391, 0.6174, 0.9970, 0.5048, + 0.1536, 0.7675, 0.1606, 0.0069, 0.5540, 0.8053, 0.7782, + 0.2442, 0.2813, 0.9123, 0.5171, 0.8731, 0.5000, 0.5118, + 0.0464, 0.2697, 0.3953, 0.7011, 0.3185, 0.2665, 0.0681, + 0.2917, 0.7485, 0.6250, 0.6407, 0.2157, 0.0537, 0.5697, + 0.8359, 0.9145, 0.1941, 0.2661, 0.7293, 0.1877, 0.3065, + 0.0216, 0.5844, 0.1058, 0.1336, 0.1392, 0.5176, 0.5914, + 0.0560, 0.0331, 0.6934, 0.3075, 0.6819, 0.8495, 0.0958, + 0.5329, 0.9196, 0.9695, 0.5226, 0.1211, 0.6215, 0.2257, + 0.6816, 0.9084, 0.5044, 0.9745, 0.8419, 0.9436, 0.3989, + 0.7334, 0.6027, 0.4541, 0.6768, 0.3879, 0.6751, 0.2023, + 0.1460, 0.9864, 0.1145, 0.6268, 0.5757, 0.5242, 0.4755, + 0.2493, 0.7184, 0.3461, 0.0517, 0.4170, 0.8669, 0.5027, + 0.9563, 0.6861, 0.3740, 0.0465, 0.5306, 0.3739, 0.7401, + 0.2825, 0.8664, 0.1490, 0.7870, 0.3967, 0.0375, 0.5700, + 0.9915, 0.0179, 0.4550, 0.8486, 0.4716, 0.5180, 0.7021, + 0.0665, 0.7265, 0.2351, 0.3647, 0.4478, 0.7440, 0.6592, + 0.8580, 0.2200, 0.9352, 0.3103, 0.7426, 0.0941, 0.4589, + 0.0686, 0.2255, 0.5142, 0.1558, 0.6673, 0.5593, 0.7554, + 0.8351, 0.8829, 0.3389, 0.7703, 0.1885, 0.4002, 0.2861, + 0.8480, 0.5545, 0.2375, 0.2123, 0.6365, 0.3919, 0.6500, + 0.8666, 0.1630, 0.1797, 0.3087, 0.8273, 0.4607, 0.5353, + 0.7168, 0.8103, 0.1927, 0.9427, 0.3481, 0.3075, 0.9298, + 0.2543, 0.7198, 0.4746, 0.1133, 0.8428, 0.5496, 0.6413, + 0.6835, 0.3745, 0.8980, 0.0814, 0.9675, 0.5676, 0.7518, + 0.0761, 0.0613, 0.8867, 0.4684, 0.3610, 0.2827, 0.6818, + 0.0479, 0.7923, 0.0132, 0.8357, 0.6856, 0.9674, 0.8231, + 0.6512, 0.0967, 0.3779, 0.8801, 0.0783, 0.1078, 0.7698, + 0.5694, 0.3343, 0.3057, 0.4386, 0.5573, 0.5801, 0.6376, + 0.8006, 0.1158, 0.4068, 0.5822, 0.6631, 0.1551, 0.9705, + 0.7223, 0.2889, 0.6010, 0.8044, 0.2842, 0.9558, 0.6582, + 0.9418, 0.3433, 0.9543, 0.6844, 0.5491, 0.6148, 0.1983, + 0.0734, 0.5311, 0.5177, 0.0871, 0.3846, 0.6356, 0.9853, + 0.5066, 0.7465, 0.7688, 0.0026, 0.5630, 0.9444, 0.6349, + 0.1026, 0.1185, 0.3080, 0.2842, 0.2881, 0.3697, 0.2541, + 0.6601, 0.1403, 0.4414, 0.2838, 0.8918, 0.7264, 0.7373, + 0.0155, 0.3697, 0.3607, 0.3404, 0.5768, 0.2007, 0.7324, + 0.7312, 0.6788, 0.6176, 0.1912, 0.2727, 0.8107, 0.8546, + 0.4602, 0.0801, 0.2113, 0.1139, 0.0941, 0.6131, 0.8345, + 0.8714, 0.7634, 0.8556, 0.3543, 0.9912, 0.9931, 0.7873, + 0.0211, 0.0084, 0.9832, 0.1716, 0.5429, 0.7833, 0.7800, + 0.4068, 0.3471, 0.8032, 0.3514, 0.9085, 0.7551, 0.7725, + 0.6506, 0.9861, 0.8680, 0.5032, 0.1156, 0.9898, 0.8616, + 0.0404, 0.2348, 0.3544, 0.2838, 0.8257, 0.0410, 0.1278, + 0.6629, 0.5377, 0.2297, 0.9140, 0.9184, 0.4060, 0.4904, + 0.6312, 0.1611, 0.1420, 0.8959, 0.9411, 0.6654, 0.7235, + 0.0537, 0.4756, 0.8528, 0.1023, 0.5885, 0.1779, 0.0687, + 0.6335, 0.8948, 0.2440, 0.7134, 0.1735, 0.9917, 0.7751, + 0.1379, 0.4267, 0.0269, 0.6124, 0.7876, 0.3508, 0.3935, + 0.4762, 0.5460, 0.1362, 0.7948, 0.9234, 0.2935, 0.1493, + 0.1667, 0.8075, 0.8666, 0.2781, 0.9985, 0.5561, 0.5272, + 0.5029, 0.4838, 0.2404, 0.4060, 0.5596, 0.9672, 0.5922, + 0.5327, 0.7185, 0.6544, 0.9001, 0.9429, 0.7905, 0.1523, + 0.5117, 0.6254, 0.8938, 0.6450, 0.3529, 0.2568, 0.2600, + 0.1101, 0.7886, 0.8808, 0.7423, 0.2189, 0.8195, 0.2780, + 0.0055, 0.3953, 0.7954, 0.1000, 0.4397, 0.9694, 0.5429, + 0.0285, 0.8611, 0.6293, 0.9778, 0.8545, 0.8742, 0.0377, + 0.0757, 0.9178, 0.5420, 0.8377, 0.5141, 0.3199, 0.6075, + 0.7541, 0.0364, 0.2807, 0.9364, 0.1621, 0.1425, 0.5166, + 0.1925, 0.7532, 0.3376, 0.6665, 0.6515, 0.2705, 0.0892, + 0.5778, 0.0086, 0.9900, 0.8838, 0.6223, 0.9426]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.3060, 0.4928, 0.5679, ..., 0.7753, 0.2916, 0.1555]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.9843311309814453 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '314746', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.578866243362427} + +tensor(indices=tensor([[4246, 828, 6796, ..., 4742, 8553, 3734], + [5746, 4774, 6584, ..., 2181, 8081, 6926]]), + values=tensor([7.6317e-01, 7.2767e-01, 7.0489e-01, 2.6890e-01, + 3.9057e-01, 2.6801e-01, 2.4869e-01, 7.1857e-01, + 5.4834e-01, 5.2795e-01, 8.4898e-01, 2.4180e-01, + 1.2362e-01, 7.3643e-01, 2.8189e-02, 9.7317e-01, + 3.8476e-01, 5.8510e-01, 4.2816e-01, 4.9641e-01, + 8.2082e-01, 4.4516e-01, 8.5046e-02, 9.8940e-01, + 5.4664e-01, 9.8564e-01, 1.2339e-01, 1.5177e-01, + 5.3124e-01, 2.7738e-01, 7.4362e-01, 7.5148e-01, + 8.6017e-01, 4.0854e-01, 2.4627e-02, 1.3314e-01, + 2.3427e-02, 8.5864e-01, 1.8208e-01, 6.7735e-02, + 9.2495e-01, 9.2212e-01, 5.1220e-01, 9.6471e-01, + 9.1531e-02, 7.4747e-01, 6.9787e-01, 8.2768e-01, + 1.3388e-02, 5.4730e-01, 3.7081e-01, 2.5352e-01, + 5.3432e-01, 9.2836e-01, 1.1016e-01, 5.5056e-03, + 7.2657e-01, 7.2464e-01, 2.5092e-01, 4.2760e-01, + 4.9202e-01, 3.0188e-01, 8.6749e-01, 6.3363e-01, + 5.8702e-01, 8.6528e-01, 4.3827e-01, 3.4959e-01, + 8.4648e-01, 2.9646e-02, 1.4311e-02, 2.8062e-01, + 1.9843e-01, 9.4654e-01, 9.4481e-01, 4.5993e-01, + 1.3691e-01, 5.1593e-01, 1.1119e-01, 5.2223e-01, + 8.3580e-01, 7.5462e-01, 1.7671e-01, 5.1804e-01, + 7.2887e-01, 4.8535e-01, 9.0237e-01, 6.9243e-01, + 6.0646e-01, 4.4214e-01, 8.8697e-01, 5.6813e-01, + 2.7563e-01, 3.6438e-01, 1.0517e-01, 8.7090e-01, + 9.8935e-01, 3.9389e-02, 6.9703e-01, 5.2097e-01, + 6.1178e-01, 8.1961e-01, 5.3775e-01, 2.3459e-01, + 6.5452e-01, 1.4901e-01, 6.5469e-01, 7.5469e-01, + 3.7176e-01, 5.4871e-01, 1.7761e-01, 5.7433e-02, + 6.2052e-01, 7.1757e-01, 9.7114e-01, 5.3990e-01, + 3.9815e-01, 4.3280e-01, 4.6556e-02, 6.7627e-01, + 7.8818e-01, 3.5725e-01, 7.0800e-01, 1.5540e-01, + 8.3014e-01, 3.4991e-01, 9.4389e-01, 1.4629e-02, + 1.6705e-01, 2.0648e-01, 5.9753e-01, 7.5985e-01, + 1.2578e-01, 4.5045e-01, 4.2623e-01, 9.4862e-01, + 4.9694e-01, 3.7690e-01, 2.4311e-01, 5.3373e-01, + 7.0215e-01, 1.9448e-01, 8.1633e-01, 9.6848e-01, + 6.4731e-03, 9.2624e-01, 3.1565e-01, 6.9259e-01, + 8.7162e-01, 2.4799e-01, 6.4515e-01, 5.4527e-01, + 4.9953e-01, 8.9049e-01, 2.5199e-01, 3.9259e-01, + 5.5523e-01, 5.5116e-01, 1.5176e-01, 6.0512e-01, + 1.6632e-02, 2.6505e-01, 1.4387e-01, 2.3200e-02, + 5.4833e-01, 5.5632e-01, 8.2105e-01, 5.1207e-01, + 4.4656e-01, 1.9516e-02, 7.9797e-01, 5.5322e-01, + 7.5405e-01, 9.0551e-01, 7.7285e-01, 6.1544e-02, + 4.8395e-01, 5.3491e-02, 3.3792e-03, 2.6831e-01, + 6.6141e-01, 3.3372e-01, 9.3039e-01, 1.2444e-02, + 2.4067e-01, 1.7608e-01, 2.1034e-01, 9.2239e-01, + 6.0205e-01, 7.7360e-01, 8.3385e-01, 5.5413e-01, + 7.8481e-01, 7.5436e-01, 1.3800e-01, 5.6546e-01, + 2.3024e-01, 8.7919e-01, 3.8220e-01, 5.7133e-01, + 9.1493e-01, 6.2179e-01, 2.5320e-01, 3.6290e-02, + 3.7262e-01, 1.4459e-01, 9.4664e-02, 2.6972e-01, + 5.6071e-02, 4.2054e-01, 9.5734e-02, 7.0301e-01, + 4.6692e-01, 8.9541e-01, 4.7113e-02, 6.7280e-01, + 1.5374e-01, 9.6536e-01, 6.8558e-01, 6.4519e-01, + 7.6022e-01, 5.6498e-01, 5.6183e-01, 9.6204e-01, + 8.8218e-01, 4.7196e-01, 9.6082e-01, 4.8644e-01, + 3.8090e-01, 4.4770e-01, 6.2881e-01, 6.7218e-01, + 7.6325e-01, 1.9125e-01, 4.1280e-01, 2.7241e-01, + 1.1509e-01, 1.2115e-01, 5.0372e-01, 7.1849e-01, + 9.5326e-01, 4.6411e-01, 8.1804e-01, 9.8995e-01, + 1.1536e-02, 2.7816e-01, 2.2951e-01, 5.8366e-01, + 1.5211e-02, 5.0460e-01, 4.6953e-01, 3.2003e-01, + 8.2131e-01, 5.7426e-01, 3.0122e-01, 3.9881e-01, + 5.1244e-01, 5.4030e-01, 6.5465e-01, 3.9288e-01, + 8.4498e-01, 5.9642e-01, 3.5942e-01, 1.8020e-01, + 3.1603e-01, 8.8412e-01, 4.9632e-01, 6.0537e-01, + 4.1016e-01, 7.9285e-01, 7.5350e-01, 2.7154e-01, + 6.5150e-01, 3.2758e-01, 7.0310e-01, 2.1100e-01, + 6.0711e-01, 5.8783e-01, 3.6056e-02, 7.7143e-01, + 1.1157e-01, 7.3015e-01, 3.5258e-01, 7.2487e-01, + 3.0272e-01, 1.8887e-01, 3.2936e-01, 1.8218e-01, + 6.7771e-01, 9.0224e-01, 7.9411e-01, 8.1901e-01, + 4.3130e-01, 8.9649e-01, 7.7967e-01, 5.0556e-01, + 4.0467e-01, 1.6909e-01, 3.6937e-01, 8.2248e-01, + 3.5811e-01, 2.0425e-01, 6.9417e-02, 3.2180e-01, + 7.3419e-01, 8.8602e-01, 1.0693e-02, 6.2963e-01, + 1.7986e-01, 6.1087e-01, 6.6710e-01, 7.7565e-01, + 1.7385e-01, 2.2038e-01, 5.8739e-01, 3.2853e-01, + 9.9557e-01, 6.7187e-02, 1.3481e-02, 3.1252e-01, + 6.2567e-01, 1.3358e-01, 8.3355e-01, 1.9399e-01, + 3.7693e-01, 3.8767e-01, 7.1638e-01, 4.5458e-01, + 3.2616e-01, 2.5213e-01, 7.1715e-01, 7.2804e-01, + 4.5712e-01, 3.0212e-01, 1.7868e-01, 2.5042e-01, + 8.9757e-01, 3.9641e-02, 6.7241e-01, 4.4978e-01, + 8.0660e-01, 8.3928e-01, 6.8263e-02, 5.9132e-01, + 1.6689e-02, 8.9592e-01, 1.5253e-01, 4.9913e-01, + 1.5179e-01, 1.3274e-01, 7.5960e-01, 3.4925e-01, + 3.3597e-02, 9.4536e-01, 3.0685e-01, 8.8238e-01, + 9.4083e-01, 1.1632e-01, 2.1149e-01, 9.6362e-01, + 4.7751e-01, 4.9917e-01, 5.9908e-01, 4.2384e-01, + 6.5233e-01, 7.6974e-01, 3.5463e-01, 6.2846e-01, + 2.0299e-01, 3.0257e-01, 8.2505e-01, 1.6577e-01, + 1.6428e-02, 8.0079e-02, 2.2837e-01, 1.7593e-01, + 1.7764e-01, 2.0808e-01, 9.0262e-01, 6.0774e-01, + 4.9060e-01, 3.5255e-01, 2.2397e-01, 1.0169e-02, + 9.2751e-01, 3.9353e-01, 6.8672e-02, 1.4738e-01, + 8.4245e-01, 6.0769e-01, 3.9110e-01, 2.3036e-01, + 5.4047e-01, 8.9627e-01, 7.8522e-01, 1.4717e-01, + 2.4128e-01, 6.9165e-01, 1.4781e-01, 8.4415e-01, + 4.7755e-01, 6.3526e-01, 8.3292e-01, 1.9761e-01, + 9.4956e-01, 7.9774e-01, 4.6258e-01, 1.5635e-01, + 9.7515e-01, 7.0306e-01, 8.0434e-01, 9.7266e-01, + 4.7663e-01, 1.5978e-01, 6.8326e-01, 8.4217e-01, + 7.7728e-01, 9.1796e-01, 3.0438e-01, 9.2749e-01, + 2.8367e-01, 8.1878e-01, 7.6135e-01, 7.7261e-01, + 7.5970e-01, 7.8470e-01, 3.3835e-01, 4.8638e-01, + 7.2534e-01, 2.7356e-01, 8.3959e-01, 2.7702e-01, + 4.1857e-01, 8.7387e-01, 7.5287e-01, 2.1303e-01, + 4.9032e-02, 6.9665e-01, 7.8687e-02, 4.5750e-01, + 1.3165e-01, 3.6889e-01, 2.8332e-01, 2.5119e-01, + 8.3086e-01, 2.3296e-01, 6.6921e-02, 9.1899e-01, + 5.7560e-01, 8.3217e-01, 2.0170e-01, 6.9164e-01, + 3.1197e-01, 8.8916e-01, 1.9368e-01, 1.4674e-01, + 9.9338e-01, 6.9286e-01, 9.0107e-01, 7.8180e-01, + 9.2550e-01, 2.4810e-01, 1.0395e-01, 7.4188e-04, + 1.9033e-01, 5.3025e-01, 2.1869e-02, 5.0117e-01, + 7.5394e-01, 5.7839e-01, 6.7504e-01, 7.7216e-01, + 6.0966e-01, 2.2873e-01, 9.0268e-01, 6.5407e-02, + 1.0237e-01, 5.0782e-01, 8.7219e-01, 8.4671e-01, + 6.5158e-01, 6.2590e-01, 7.3631e-01, 1.1639e-01, + 7.1997e-01, 1.3423e-01, 8.6535e-01, 7.2593e-01, + 2.2684e-01, 5.8317e-01, 4.1604e-01, 5.6418e-01, + 5.6815e-01, 9.7210e-01, 6.8529e-01, 4.7989e-01, + 7.4636e-01, 8.9323e-01, 4.2195e-01, 6.3636e-01, + 6.7032e-01, 3.1070e-02, 7.8747e-01, 7.1484e-01, + 8.9274e-01, 8.2122e-01, 8.5102e-02, 3.1008e-01, + 4.4688e-02, 1.2181e-01, 9.6114e-02, 2.7461e-01, + 6.0811e-01, 6.6398e-01, 4.0452e-01, 6.2526e-01, + 8.8551e-01, 5.3897e-01, 8.3429e-01, 6.6303e-01, + 9.8069e-01, 6.1840e-01, 8.7062e-01, 1.9220e-01, + 5.0471e-01, 3.0661e-01, 8.1479e-01, 4.5432e-01, + 2.9823e-01, 2.1839e-01, 9.8128e-01, 1.4816e-01, + 6.0268e-01, 1.4367e-01, 3.0145e-01, 6.9045e-01, + 5.6585e-01, 8.4757e-01, 9.3971e-01, 7.5829e-01, + 5.6056e-01, 7.3688e-01, 9.3553e-01, 6.1958e-01, + 5.1696e-01, 3.8149e-01, 7.3508e-01, 3.2311e-02, + 2.7734e-03, 7.8224e-02, 7.3794e-01, 4.3793e-01, + 4.7151e-01, 7.8128e-01, 5.9373e-01, 3.2878e-01, + 1.7135e-02, 7.5842e-01, 9.2867e-01, 6.2308e-01, + 9.0204e-01, 4.9857e-01, 5.4618e-01, 8.8027e-01, + 1.6257e-01, 1.1765e-01, 7.8830e-01, 7.7877e-01, + 3.8699e-01, 1.0533e-01, 5.5842e-01, 4.4720e-02, + 3.5234e-01, 2.2685e-01, 4.5898e-01, 7.4494e-01, + 8.7380e-01, 5.6491e-01, 6.3924e-01, 8.7823e-01, + 6.4475e-01, 7.0297e-01, 8.4363e-01, 3.7105e-01, + 2.0254e-01, 4.2419e-01, 6.0952e-01, 2.3586e-01, + 4.1613e-01, 1.9222e-01, 3.3374e-01, 8.8359e-02, + 7.0188e-01, 3.9086e-01, 8.8120e-01, 6.0316e-01, + 6.3860e-01, 9.4349e-01, 3.9290e-01, 9.4535e-01, + 9.3768e-01, 5.1397e-01, 3.1906e-01, 5.7066e-01, + 7.1099e-02, 2.7597e-01, 9.5028e-01, 2.9121e-01, + 5.9692e-01, 7.5555e-01, 3.9576e-01, 3.5754e-01, + 6.2485e-01, 5.7784e-01, 4.8578e-01, 1.7949e-01, + 8.4206e-01, 2.8342e-02, 5.6727e-01, 8.4287e-01, + 2.6310e-01, 6.0492e-01, 1.7094e-01, 6.0507e-01, + 7.0905e-01, 8.6817e-01, 9.5359e-02, 1.2825e-01, + 3.6898e-01, 2.9599e-01, 4.0677e-01, 8.1013e-01, + 2.0977e-01, 4.1703e-01, 1.9741e-01, 4.5336e-01, + 3.6911e-01, 3.4309e-01, 5.7504e-03, 2.8389e-01, + 6.6961e-01, 8.6725e-01, 7.6056e-01, 3.2372e-01, + 5.6849e-01, 6.7265e-01, 6.0928e-01, 5.6112e-01, + 4.6767e-01, 8.5339e-01, 6.8512e-02, 6.1788e-01, + 8.4590e-01, 7.9104e-01, 9.8576e-01, 7.6102e-01, + 3.1519e-03, 6.9741e-01, 7.7298e-01, 4.4526e-01, + 3.9790e-01, 3.0543e-01, 5.7871e-01, 4.5401e-01, + 7.6201e-01, 9.8924e-01, 7.7249e-01, 7.3557e-01, + 3.7639e-01, 4.2112e-01, 7.7437e-01, 6.4890e-01, + 2.3068e-01, 1.1875e-01, 3.8456e-01, 8.0965e-01, + 5.6904e-01, 4.6701e-01, 6.3700e-01, 1.0875e-01, + 8.5120e-01, 8.0211e-01, 9.7281e-01, 9.2819e-01, + 4.3294e-01, 3.4613e-01, 4.1320e-01, 2.0580e-01, + 2.8019e-01, 9.5393e-01, 6.4576e-02, 6.6932e-01, + 5.7208e-01, 1.4672e-01, 9.6601e-01, 1.3611e-01, + 4.8127e-01, 3.0625e-01, 4.7631e-01, 5.1243e-01, + 8.4902e-01, 2.7727e-01, 8.2307e-01, 5.7157e-01, + 5.3494e-01, 5.1268e-01, 9.3897e-01, 9.9544e-01, + 2.3270e-01, 8.4944e-01, 8.7995e-01, 1.8829e-01, + 6.2731e-01, 3.7190e-01, 1.8995e-01, 5.7150e-01, + 3.2054e-01, 1.5020e-01, 5.5212e-01, 9.8764e-01, + 4.2566e-02, 4.1564e-01, 2.8141e-01, 8.0248e-01, + 5.6141e-01, 4.6075e-01, 3.1413e-01, 8.4389e-01, + 8.7137e-01, 5.7158e-01, 2.9976e-01, 6.6587e-01, + 6.8440e-01, 4.5266e-01, 7.0291e-01, 7.2381e-01, + 4.1454e-01, 6.0130e-01, 2.6337e-01, 7.2339e-01, + 4.2044e-02, 8.8248e-01, 2.0266e-01, 3.5895e-01, + 8.3300e-01, 2.7577e-02, 8.1170e-02, 7.0987e-01, + 3.0191e-01, 6.1008e-01, 1.9132e-01, 6.5967e-02, + 1.5848e-02, 7.9785e-01, 2.7235e-01, 2.1549e-01, + 8.9836e-01, 7.6109e-01, 5.6691e-01, 9.1028e-01, + 5.4818e-01, 2.0284e-01, 4.2127e-01, 5.7256e-01, + 7.6239e-01, 5.2704e-01, 4.6254e-01, 1.9982e-01, + 8.3682e-01, 3.1131e-01, 6.0706e-01, 2.5334e-01, + 4.3526e-01, 1.9860e-01, 7.0067e-01, 5.2148e-01, + 1.6047e-02, 4.5806e-01, 7.4446e-01, 4.8171e-01, + 2.2165e-01, 8.2590e-01, 1.9060e-01, 4.9110e-02, + 7.3824e-01, 4.6990e-01, 1.2168e-01, 6.6203e-01, + 3.9960e-01, 6.2133e-01, 7.4437e-01, 6.6842e-01, + 9.6998e-01, 7.2441e-01, 5.6511e-01, 5.9038e-01, + 9.6725e-01, 7.3648e-01, 9.4000e-01, 8.4421e-01, + 4.1885e-01, 5.5897e-02, 3.1633e-01, 5.2225e-01, + 5.0884e-01, 2.4095e-01, 8.3392e-01, 5.9762e-01, + 5.7596e-01, 2.9829e-01, 4.8102e-01, 8.7230e-01, + 1.5353e-02, 9.3259e-01, 9.8003e-01, 1.7380e-01, + 8.2578e-01, 9.1805e-01, 3.6001e-02, 1.5917e-01, + 8.8243e-01, 5.7209e-01, 9.4876e-01, 5.4736e-01, + 2.9993e-01, 8.5133e-01, 3.9799e-01, 7.4641e-01, + 5.4279e-01, 9.4924e-01, 2.8072e-01, 6.6907e-01, + 7.1397e-01, 8.3378e-01, 7.2347e-01, 9.9922e-01, + 8.4884e-01, 1.7498e-02, 3.4863e-01, 4.1552e-01, + 7.0062e-01, 5.9942e-01, 9.3146e-01, 2.9884e-01, + 4.7785e-02, 6.7533e-01, 2.8111e-01, 2.2539e-01, + 4.3616e-01, 2.3853e-01, 9.7691e-01, 5.8120e-02, + 2.6696e-01, 3.0982e-01, 4.5703e-01, 3.8012e-01, + 1.4753e-01, 9.4378e-01, 8.5352e-01, 5.9096e-01, + 7.1149e-01, 1.2901e-01, 7.6712e-01, 8.3730e-01, + 2.9426e-01, 3.8276e-01, 3.4140e-01, 2.5044e-01, + 3.4563e-02, 7.7672e-01, 6.9620e-01, 7.4740e-01, + 2.7887e-01, 3.8712e-02, 9.4512e-01, 5.7464e-01, + 7.6470e-01, 7.0060e-02, 3.9849e-01, 1.4673e-01, + 4.5199e-01, 3.2291e-01, 5.7625e-01, 5.9872e-01, + 8.7781e-01, 2.3917e-01, 9.2858e-01, 5.0023e-01, + 6.4278e-02, 9.7291e-01, 1.8991e-01, 3.0883e-01, + 3.0552e-01, 7.4860e-01, 8.0060e-01, 5.4010e-03, + 8.7583e-01, 6.9233e-01, 3.3706e-01, 6.1186e-01, + 7.7744e-01, 8.6021e-01, 9.2422e-01, 5.9864e-01, + 5.0169e-01, 9.1794e-01, 7.8865e-01, 9.0777e-01, + 2.6436e-01, 2.5408e-01, 4.8055e-01, 7.6543e-01, + 7.4677e-01, 9.4253e-01, 5.3767e-01, 8.3906e-01, + 7.1883e-01, 2.0677e-03, 4.8462e-01, 1.3447e-01, + 1.0753e-01, 3.7768e-01, 3.3209e-01, 2.8573e-02, + 6.0197e-01, 3.4548e-01, 8.3316e-01, 7.6340e-01, + 7.9831e-01, 6.7151e-01, 7.0882e-01, 2.8907e-01, + 2.9354e-01, 7.6885e-01, 2.9248e-01, 6.5911e-01, + 7.6925e-02, 3.8351e-01, 9.8691e-01, 9.8770e-01, + 5.8982e-01, 4.6598e-01, 2.8363e-01, 5.2102e-01, + 8.0299e-02, 9.0101e-01, 4.1815e-01, 6.8484e-03, + 8.2597e-02, 9.8679e-01, 6.6304e-01, 9.3506e-01, + 4.3221e-01, 1.3987e-01, 7.1820e-01, 6.4392e-01, + 1.4668e-01, 7.4939e-01, 6.4096e-02, 9.9512e-01, + 9.3073e-02, 1.1289e-01, 6.4339e-01, 1.2726e-01, + 9.4416e-03, 6.7990e-01, 5.9670e-01, 4.3096e-01, + 7.0200e-01, 7.7336e-01, 2.1063e-01, 4.7584e-01, + 2.8800e-01, 8.3821e-01, 8.2854e-01, 8.2368e-01, + 7.3390e-01, 2.5626e-01, 2.0078e-01, 7.3877e-01, + 1.5321e-01, 7.4443e-01, 4.9281e-01, 2.8142e-01, + 5.5761e-02, 5.4628e-01, 2.0463e-03, 2.0793e-01, + 1.2056e-01, 1.8236e-01, 1.8000e-01, 6.9393e-01, + 5.1275e-01, 7.4623e-01, 1.4329e-01, 8.3984e-01, + 3.8373e-01, 5.0363e-01, 4.2102e-01, 4.2685e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.0623, 0.5203, 0.0171, ..., 0.4246, 0.3090, 0.4539]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.578866243362427 seconds + +tensor(indices=tensor([[4246, 828, 6796, ..., 4742, 8553, 3734], + [5746, 4774, 6584, ..., 2181, 8081, 6926]]), + values=tensor([7.6317e-01, 7.2767e-01, 7.0489e-01, 2.6890e-01, + 3.9057e-01, 2.6801e-01, 2.4869e-01, 7.1857e-01, + 5.4834e-01, 5.2795e-01, 8.4898e-01, 2.4180e-01, + 1.2362e-01, 7.3643e-01, 2.8189e-02, 9.7317e-01, + 3.8476e-01, 5.8510e-01, 4.2816e-01, 4.9641e-01, + 8.2082e-01, 4.4516e-01, 8.5046e-02, 9.8940e-01, + 5.4664e-01, 9.8564e-01, 1.2339e-01, 1.5177e-01, + 5.3124e-01, 2.7738e-01, 7.4362e-01, 7.5148e-01, + 8.6017e-01, 4.0854e-01, 2.4627e-02, 1.3314e-01, + 2.3427e-02, 8.5864e-01, 1.8208e-01, 6.7735e-02, + 9.2495e-01, 9.2212e-01, 5.1220e-01, 9.6471e-01, + 9.1531e-02, 7.4747e-01, 6.9787e-01, 8.2768e-01, + 1.3388e-02, 5.4730e-01, 3.7081e-01, 2.5352e-01, + 5.3432e-01, 9.2836e-01, 1.1016e-01, 5.5056e-03, + 7.2657e-01, 7.2464e-01, 2.5092e-01, 4.2760e-01, + 4.9202e-01, 3.0188e-01, 8.6749e-01, 6.3363e-01, + 5.8702e-01, 8.6528e-01, 4.3827e-01, 3.4959e-01, + 8.4648e-01, 2.9646e-02, 1.4311e-02, 2.8062e-01, + 1.9843e-01, 9.4654e-01, 9.4481e-01, 4.5993e-01, + 1.3691e-01, 5.1593e-01, 1.1119e-01, 5.2223e-01, + 8.3580e-01, 7.5462e-01, 1.7671e-01, 5.1804e-01, + 7.2887e-01, 4.8535e-01, 9.0237e-01, 6.9243e-01, + 6.0646e-01, 4.4214e-01, 8.8697e-01, 5.6813e-01, + 2.7563e-01, 3.6438e-01, 1.0517e-01, 8.7090e-01, + 9.8935e-01, 3.9389e-02, 6.9703e-01, 5.2097e-01, + 6.1178e-01, 8.1961e-01, 5.3775e-01, 2.3459e-01, + 6.5452e-01, 1.4901e-01, 6.5469e-01, 7.5469e-01, + 3.7176e-01, 5.4871e-01, 1.7761e-01, 5.7433e-02, + 6.2052e-01, 7.1757e-01, 9.7114e-01, 5.3990e-01, + 3.9815e-01, 4.3280e-01, 4.6556e-02, 6.7627e-01, + 7.8818e-01, 3.5725e-01, 7.0800e-01, 1.5540e-01, + 8.3014e-01, 3.4991e-01, 9.4389e-01, 1.4629e-02, + 1.6705e-01, 2.0648e-01, 5.9753e-01, 7.5985e-01, + 1.2578e-01, 4.5045e-01, 4.2623e-01, 9.4862e-01, + 4.9694e-01, 3.7690e-01, 2.4311e-01, 5.3373e-01, + 7.0215e-01, 1.9448e-01, 8.1633e-01, 9.6848e-01, + 6.4731e-03, 9.2624e-01, 3.1565e-01, 6.9259e-01, + 8.7162e-01, 2.4799e-01, 6.4515e-01, 5.4527e-01, + 4.9953e-01, 8.9049e-01, 2.5199e-01, 3.9259e-01, + 5.5523e-01, 5.5116e-01, 1.5176e-01, 6.0512e-01, + 1.6632e-02, 2.6505e-01, 1.4387e-01, 2.3200e-02, + 5.4833e-01, 5.5632e-01, 8.2105e-01, 5.1207e-01, + 4.4656e-01, 1.9516e-02, 7.9797e-01, 5.5322e-01, + 7.5405e-01, 9.0551e-01, 7.7285e-01, 6.1544e-02, + 4.8395e-01, 5.3491e-02, 3.3792e-03, 2.6831e-01, + 6.6141e-01, 3.3372e-01, 9.3039e-01, 1.2444e-02, + 2.4067e-01, 1.7608e-01, 2.1034e-01, 9.2239e-01, + 6.0205e-01, 7.7360e-01, 8.3385e-01, 5.5413e-01, + 7.8481e-01, 7.5436e-01, 1.3800e-01, 5.6546e-01, + 2.3024e-01, 8.7919e-01, 3.8220e-01, 5.7133e-01, + 9.1493e-01, 6.2179e-01, 2.5320e-01, 3.6290e-02, + 3.7262e-01, 1.4459e-01, 9.4664e-02, 2.6972e-01, + 5.6071e-02, 4.2054e-01, 9.5734e-02, 7.0301e-01, + 4.6692e-01, 8.9541e-01, 4.7113e-02, 6.7280e-01, + 1.5374e-01, 9.6536e-01, 6.8558e-01, 6.4519e-01, + 7.6022e-01, 5.6498e-01, 5.6183e-01, 9.6204e-01, + 8.8218e-01, 4.7196e-01, 9.6082e-01, 4.8644e-01, + 3.8090e-01, 4.4770e-01, 6.2881e-01, 6.7218e-01, + 7.6325e-01, 1.9125e-01, 4.1280e-01, 2.7241e-01, + 1.1509e-01, 1.2115e-01, 5.0372e-01, 7.1849e-01, + 9.5326e-01, 4.6411e-01, 8.1804e-01, 9.8995e-01, + 1.1536e-02, 2.7816e-01, 2.2951e-01, 5.8366e-01, + 1.5211e-02, 5.0460e-01, 4.6953e-01, 3.2003e-01, + 8.2131e-01, 5.7426e-01, 3.0122e-01, 3.9881e-01, + 5.1244e-01, 5.4030e-01, 6.5465e-01, 3.9288e-01, + 8.4498e-01, 5.9642e-01, 3.5942e-01, 1.8020e-01, + 3.1603e-01, 8.8412e-01, 4.9632e-01, 6.0537e-01, + 4.1016e-01, 7.9285e-01, 7.5350e-01, 2.7154e-01, + 6.5150e-01, 3.2758e-01, 7.0310e-01, 2.1100e-01, + 6.0711e-01, 5.8783e-01, 3.6056e-02, 7.7143e-01, + 1.1157e-01, 7.3015e-01, 3.5258e-01, 7.2487e-01, + 3.0272e-01, 1.8887e-01, 3.2936e-01, 1.8218e-01, + 6.7771e-01, 9.0224e-01, 7.9411e-01, 8.1901e-01, + 4.3130e-01, 8.9649e-01, 7.7967e-01, 5.0556e-01, + 4.0467e-01, 1.6909e-01, 3.6937e-01, 8.2248e-01, + 3.5811e-01, 2.0425e-01, 6.9417e-02, 3.2180e-01, + 7.3419e-01, 8.8602e-01, 1.0693e-02, 6.2963e-01, + 1.7986e-01, 6.1087e-01, 6.6710e-01, 7.7565e-01, + 1.7385e-01, 2.2038e-01, 5.8739e-01, 3.2853e-01, + 9.9557e-01, 6.7187e-02, 1.3481e-02, 3.1252e-01, + 6.2567e-01, 1.3358e-01, 8.3355e-01, 1.9399e-01, + 3.7693e-01, 3.8767e-01, 7.1638e-01, 4.5458e-01, + 3.2616e-01, 2.5213e-01, 7.1715e-01, 7.2804e-01, + 4.5712e-01, 3.0212e-01, 1.7868e-01, 2.5042e-01, + 8.9757e-01, 3.9641e-02, 6.7241e-01, 4.4978e-01, + 8.0660e-01, 8.3928e-01, 6.8263e-02, 5.9132e-01, + 1.6689e-02, 8.9592e-01, 1.5253e-01, 4.9913e-01, + 1.5179e-01, 1.3274e-01, 7.5960e-01, 3.4925e-01, + 3.3597e-02, 9.4536e-01, 3.0685e-01, 8.8238e-01, + 9.4083e-01, 1.1632e-01, 2.1149e-01, 9.6362e-01, + 4.7751e-01, 4.9917e-01, 5.9908e-01, 4.2384e-01, + 6.5233e-01, 7.6974e-01, 3.5463e-01, 6.2846e-01, + 2.0299e-01, 3.0257e-01, 8.2505e-01, 1.6577e-01, + 1.6428e-02, 8.0079e-02, 2.2837e-01, 1.7593e-01, + 1.7764e-01, 2.0808e-01, 9.0262e-01, 6.0774e-01, + 4.9060e-01, 3.5255e-01, 2.2397e-01, 1.0169e-02, + 9.2751e-01, 3.9353e-01, 6.8672e-02, 1.4738e-01, + 8.4245e-01, 6.0769e-01, 3.9110e-01, 2.3036e-01, + 5.4047e-01, 8.9627e-01, 7.8522e-01, 1.4717e-01, + 2.4128e-01, 6.9165e-01, 1.4781e-01, 8.4415e-01, + 4.7755e-01, 6.3526e-01, 8.3292e-01, 1.9761e-01, + 9.4956e-01, 7.9774e-01, 4.6258e-01, 1.5635e-01, + 9.7515e-01, 7.0306e-01, 8.0434e-01, 9.7266e-01, + 4.7663e-01, 1.5978e-01, 6.8326e-01, 8.4217e-01, + 7.7728e-01, 9.1796e-01, 3.0438e-01, 9.2749e-01, + 2.8367e-01, 8.1878e-01, 7.6135e-01, 7.7261e-01, + 7.5970e-01, 7.8470e-01, 3.3835e-01, 4.8638e-01, + 7.2534e-01, 2.7356e-01, 8.3959e-01, 2.7702e-01, + 4.1857e-01, 8.7387e-01, 7.5287e-01, 2.1303e-01, + 4.9032e-02, 6.9665e-01, 7.8687e-02, 4.5750e-01, + 1.3165e-01, 3.6889e-01, 2.8332e-01, 2.5119e-01, + 8.3086e-01, 2.3296e-01, 6.6921e-02, 9.1899e-01, + 5.7560e-01, 8.3217e-01, 2.0170e-01, 6.9164e-01, + 3.1197e-01, 8.8916e-01, 1.9368e-01, 1.4674e-01, + 9.9338e-01, 6.9286e-01, 9.0107e-01, 7.8180e-01, + 9.2550e-01, 2.4810e-01, 1.0395e-01, 7.4188e-04, + 1.9033e-01, 5.3025e-01, 2.1869e-02, 5.0117e-01, + 7.5394e-01, 5.7839e-01, 6.7504e-01, 7.7216e-01, + 6.0966e-01, 2.2873e-01, 9.0268e-01, 6.5407e-02, + 1.0237e-01, 5.0782e-01, 8.7219e-01, 8.4671e-01, + 6.5158e-01, 6.2590e-01, 7.3631e-01, 1.1639e-01, + 7.1997e-01, 1.3423e-01, 8.6535e-01, 7.2593e-01, + 2.2684e-01, 5.8317e-01, 4.1604e-01, 5.6418e-01, + 5.6815e-01, 9.7210e-01, 6.8529e-01, 4.7989e-01, + 7.4636e-01, 8.9323e-01, 4.2195e-01, 6.3636e-01, + 6.7032e-01, 3.1070e-02, 7.8747e-01, 7.1484e-01, + 8.9274e-01, 8.2122e-01, 8.5102e-02, 3.1008e-01, + 4.4688e-02, 1.2181e-01, 9.6114e-02, 2.7461e-01, + 6.0811e-01, 6.6398e-01, 4.0452e-01, 6.2526e-01, + 8.8551e-01, 5.3897e-01, 8.3429e-01, 6.6303e-01, + 9.8069e-01, 6.1840e-01, 8.7062e-01, 1.9220e-01, + 5.0471e-01, 3.0661e-01, 8.1479e-01, 4.5432e-01, + 2.9823e-01, 2.1839e-01, 9.8128e-01, 1.4816e-01, + 6.0268e-01, 1.4367e-01, 3.0145e-01, 6.9045e-01, + 5.6585e-01, 8.4757e-01, 9.3971e-01, 7.5829e-01, + 5.6056e-01, 7.3688e-01, 9.3553e-01, 6.1958e-01, + 5.1696e-01, 3.8149e-01, 7.3508e-01, 3.2311e-02, + 2.7734e-03, 7.8224e-02, 7.3794e-01, 4.3793e-01, + 4.7151e-01, 7.8128e-01, 5.9373e-01, 3.2878e-01, + 1.7135e-02, 7.5842e-01, 9.2867e-01, 6.2308e-01, + 9.0204e-01, 4.9857e-01, 5.4618e-01, 8.8027e-01, + 1.6257e-01, 1.1765e-01, 7.8830e-01, 7.7877e-01, + 3.8699e-01, 1.0533e-01, 5.5842e-01, 4.4720e-02, + 3.5234e-01, 2.2685e-01, 4.5898e-01, 7.4494e-01, + 8.7380e-01, 5.6491e-01, 6.3924e-01, 8.7823e-01, + 6.4475e-01, 7.0297e-01, 8.4363e-01, 3.7105e-01, + 2.0254e-01, 4.2419e-01, 6.0952e-01, 2.3586e-01, + 4.1613e-01, 1.9222e-01, 3.3374e-01, 8.8359e-02, + 7.0188e-01, 3.9086e-01, 8.8120e-01, 6.0316e-01, + 6.3860e-01, 9.4349e-01, 3.9290e-01, 9.4535e-01, + 9.3768e-01, 5.1397e-01, 3.1906e-01, 5.7066e-01, + 7.1099e-02, 2.7597e-01, 9.5028e-01, 2.9121e-01, + 5.9692e-01, 7.5555e-01, 3.9576e-01, 3.5754e-01, + 6.2485e-01, 5.7784e-01, 4.8578e-01, 1.7949e-01, + 8.4206e-01, 2.8342e-02, 5.6727e-01, 8.4287e-01, + 2.6310e-01, 6.0492e-01, 1.7094e-01, 6.0507e-01, + 7.0905e-01, 8.6817e-01, 9.5359e-02, 1.2825e-01, + 3.6898e-01, 2.9599e-01, 4.0677e-01, 8.1013e-01, + 2.0977e-01, 4.1703e-01, 1.9741e-01, 4.5336e-01, + 3.6911e-01, 3.4309e-01, 5.7504e-03, 2.8389e-01, + 6.6961e-01, 8.6725e-01, 7.6056e-01, 3.2372e-01, + 5.6849e-01, 6.7265e-01, 6.0928e-01, 5.6112e-01, + 4.6767e-01, 8.5339e-01, 6.8512e-02, 6.1788e-01, + 8.4590e-01, 7.9104e-01, 9.8576e-01, 7.6102e-01, + 3.1519e-03, 6.9741e-01, 7.7298e-01, 4.4526e-01, + 3.9790e-01, 3.0543e-01, 5.7871e-01, 4.5401e-01, + 7.6201e-01, 9.8924e-01, 7.7249e-01, 7.3557e-01, + 3.7639e-01, 4.2112e-01, 7.7437e-01, 6.4890e-01, + 2.3068e-01, 1.1875e-01, 3.8456e-01, 8.0965e-01, + 5.6904e-01, 4.6701e-01, 6.3700e-01, 1.0875e-01, + 8.5120e-01, 8.0211e-01, 9.7281e-01, 9.2819e-01, + 4.3294e-01, 3.4613e-01, 4.1320e-01, 2.0580e-01, + 2.8019e-01, 9.5393e-01, 6.4576e-02, 6.6932e-01, + 5.7208e-01, 1.4672e-01, 9.6601e-01, 1.3611e-01, + 4.8127e-01, 3.0625e-01, 4.7631e-01, 5.1243e-01, + 8.4902e-01, 2.7727e-01, 8.2307e-01, 5.7157e-01, + 5.3494e-01, 5.1268e-01, 9.3897e-01, 9.9544e-01, + 2.3270e-01, 8.4944e-01, 8.7995e-01, 1.8829e-01, + 6.2731e-01, 3.7190e-01, 1.8995e-01, 5.7150e-01, + 3.2054e-01, 1.5020e-01, 5.5212e-01, 9.8764e-01, + 4.2566e-02, 4.1564e-01, 2.8141e-01, 8.0248e-01, + 5.6141e-01, 4.6075e-01, 3.1413e-01, 8.4389e-01, + 8.7137e-01, 5.7158e-01, 2.9976e-01, 6.6587e-01, + 6.8440e-01, 4.5266e-01, 7.0291e-01, 7.2381e-01, + 4.1454e-01, 6.0130e-01, 2.6337e-01, 7.2339e-01, + 4.2044e-02, 8.8248e-01, 2.0266e-01, 3.5895e-01, + 8.3300e-01, 2.7577e-02, 8.1170e-02, 7.0987e-01, + 3.0191e-01, 6.1008e-01, 1.9132e-01, 6.5967e-02, + 1.5848e-02, 7.9785e-01, 2.7235e-01, 2.1549e-01, + 8.9836e-01, 7.6109e-01, 5.6691e-01, 9.1028e-01, + 5.4818e-01, 2.0284e-01, 4.2127e-01, 5.7256e-01, + 7.6239e-01, 5.2704e-01, 4.6254e-01, 1.9982e-01, + 8.3682e-01, 3.1131e-01, 6.0706e-01, 2.5334e-01, + 4.3526e-01, 1.9860e-01, 7.0067e-01, 5.2148e-01, + 1.6047e-02, 4.5806e-01, 7.4446e-01, 4.8171e-01, + 2.2165e-01, 8.2590e-01, 1.9060e-01, 4.9110e-02, + 7.3824e-01, 4.6990e-01, 1.2168e-01, 6.6203e-01, + 3.9960e-01, 6.2133e-01, 7.4437e-01, 6.6842e-01, + 9.6998e-01, 7.2441e-01, 5.6511e-01, 5.9038e-01, + 9.6725e-01, 7.3648e-01, 9.4000e-01, 8.4421e-01, + 4.1885e-01, 5.5897e-02, 3.1633e-01, 5.2225e-01, + 5.0884e-01, 2.4095e-01, 8.3392e-01, 5.9762e-01, + 5.7596e-01, 2.9829e-01, 4.8102e-01, 8.7230e-01, + 1.5353e-02, 9.3259e-01, 9.8003e-01, 1.7380e-01, + 8.2578e-01, 9.1805e-01, 3.6001e-02, 1.5917e-01, + 8.8243e-01, 5.7209e-01, 9.4876e-01, 5.4736e-01, + 2.9993e-01, 8.5133e-01, 3.9799e-01, 7.4641e-01, + 5.4279e-01, 9.4924e-01, 2.8072e-01, 6.6907e-01, + 7.1397e-01, 8.3378e-01, 7.2347e-01, 9.9922e-01, + 8.4884e-01, 1.7498e-02, 3.4863e-01, 4.1552e-01, + 7.0062e-01, 5.9942e-01, 9.3146e-01, 2.9884e-01, + 4.7785e-02, 6.7533e-01, 2.8111e-01, 2.2539e-01, + 4.3616e-01, 2.3853e-01, 9.7691e-01, 5.8120e-02, + 2.6696e-01, 3.0982e-01, 4.5703e-01, 3.8012e-01, + 1.4753e-01, 9.4378e-01, 8.5352e-01, 5.9096e-01, + 7.1149e-01, 1.2901e-01, 7.6712e-01, 8.3730e-01, + 2.9426e-01, 3.8276e-01, 3.4140e-01, 2.5044e-01, + 3.4563e-02, 7.7672e-01, 6.9620e-01, 7.4740e-01, + 2.7887e-01, 3.8712e-02, 9.4512e-01, 5.7464e-01, + 7.6470e-01, 7.0060e-02, 3.9849e-01, 1.4673e-01, + 4.5199e-01, 3.2291e-01, 5.7625e-01, 5.9872e-01, + 8.7781e-01, 2.3917e-01, 9.2858e-01, 5.0023e-01, + 6.4278e-02, 9.7291e-01, 1.8991e-01, 3.0883e-01, + 3.0552e-01, 7.4860e-01, 8.0060e-01, 5.4010e-03, + 8.7583e-01, 6.9233e-01, 3.3706e-01, 6.1186e-01, + 7.7744e-01, 8.6021e-01, 9.2422e-01, 5.9864e-01, + 5.0169e-01, 9.1794e-01, 7.8865e-01, 9.0777e-01, + 2.6436e-01, 2.5408e-01, 4.8055e-01, 7.6543e-01, + 7.4677e-01, 9.4253e-01, 5.3767e-01, 8.3906e-01, + 7.1883e-01, 2.0677e-03, 4.8462e-01, 1.3447e-01, + 1.0753e-01, 3.7768e-01, 3.3209e-01, 2.8573e-02, + 6.0197e-01, 3.4548e-01, 8.3316e-01, 7.6340e-01, + 7.9831e-01, 6.7151e-01, 7.0882e-01, 2.8907e-01, + 2.9354e-01, 7.6885e-01, 2.9248e-01, 6.5911e-01, + 7.6925e-02, 3.8351e-01, 9.8691e-01, 9.8770e-01, + 5.8982e-01, 4.6598e-01, 2.8363e-01, 5.2102e-01, + 8.0299e-02, 9.0101e-01, 4.1815e-01, 6.8484e-03, + 8.2597e-02, 9.8679e-01, 6.6304e-01, 9.3506e-01, + 4.3221e-01, 1.3987e-01, 7.1820e-01, 6.4392e-01, + 1.4668e-01, 7.4939e-01, 6.4096e-02, 9.9512e-01, + 9.3073e-02, 1.1289e-01, 6.4339e-01, 1.2726e-01, + 9.4416e-03, 6.7990e-01, 5.9670e-01, 4.3096e-01, + 7.0200e-01, 7.7336e-01, 2.1063e-01, 4.7584e-01, + 2.8800e-01, 8.3821e-01, 8.2854e-01, 8.2368e-01, + 7.3390e-01, 2.5626e-01, 2.0078e-01, 7.3877e-01, + 1.5321e-01, 7.4443e-01, 4.9281e-01, 2.8142e-01, + 5.5761e-02, 5.4628e-01, 2.0463e-03, 2.0793e-01, + 1.2056e-01, 1.8236e-01, 1.8000e-01, 6.9393e-01, + 5.1275e-01, 7.4623e-01, 1.4329e-01, 8.3984e-01, + 3.8373e-01, 5.0363e-01, 4.2102e-01, 4.2685e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.0623, 0.5203, 0.0171, ..., 0.4246, 0.3090, 0.4539]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.578866243362427 seconds + +[39.67, 39.07, 39.05, 39.03, 39.33, 39.42, 39.44, 39.45, 39.52, 45.94] +[65.38] +13.009660482406616 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 314746, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.578866243362427, 'TIME_S_1KI': 0.03361080440533772, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.5716023397445, 'W': 65.38} +[39.67, 39.07, 39.05, 39.03, 39.33, 39.42, 39.44, 39.45, 39.52, 45.94, 39.54, 39.07, 39.03, 39.28, 38.96, 38.86, 44.68, 39.1, 39.1, 38.87] +714.4000000000001 +35.720000000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 314746, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.578866243362427, 'TIME_S_1KI': 0.03361080440533772, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.5716023397445, 'W': 65.38, 'J_1KI': 2.7024063922646975, 'W_1KI': 0.20772305287438125, 'W_D': 29.65999999999999, 'J_D': 385.8665299081801, 'W_D_1KI': 0.09423471624738675, 'J_D_1KI': 0.0002993992497041638} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..001875f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 71859, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.571467638015747, "TIME_S_1KI": 0.14711403774079443, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 868.2272564220427, "W": 65.82, "J_1KI": 12.082373208951457, "W_1KI": 0.9159604224940507, "W_D": 30.39725, "J_D": 400.96810954535005, "W_D_1KI": 0.4230124271142098, "J_D_1KI": 0.005886700721053867} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..3633c50 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02417278289794922} + +tensor(indices=tensor([[2222, 7417, 3847, ..., 5107, 9970, 425], + [2119, 2007, 3618, ..., 6002, 615, 9580]]), + values=tensor([0.8892, 0.2402, 0.0787, ..., 0.5151, 0.9607, 0.7954]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.4220, 0.1348, 0.6393, ..., 0.8496, 0.6370, 0.8549]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.02417278289794922 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '43437', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.346956014633179} + +tensor(indices=tensor([[4884, 8876, 914, ..., 193, 6560, 3034], + [5023, 2720, 9609, ..., 9886, 681, 7990]]), + values=tensor([0.6710, 0.4059, 0.1821, ..., 0.6609, 0.3514, 0.0510]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.0379, 0.3551, 0.4712, ..., 0.5355, 0.5793, 0.5981]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.346956014633179 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '71859', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.571467638015747} + +tensor(indices=tensor([[5340, 2828, 3460, ..., 9248, 2140, 1074], + [2717, 8341, 1718, ..., 1526, 3840, 3493]]), + values=tensor([0.5855, 0.9699, 0.4922, ..., 0.6707, 0.7839, 0.8465]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.3623, 0.0793, 0.4147, ..., 0.0505, 0.6720, 0.0290]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.571467638015747 seconds + +tensor(indices=tensor([[5340, 2828, 3460, ..., 9248, 2140, 1074], + [2717, 8341, 1718, ..., 1526, 3840, 3493]]), + values=tensor([0.5855, 0.9699, 0.4922, ..., 0.6707, 0.7839, 0.8465]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.3623, 0.0793, 0.4147, ..., 0.0505, 0.6720, 0.0290]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.571467638015747 seconds + +[39.98, 39.34, 39.44, 39.4, 39.36, 39.23, 39.07, 39.88, 39.15, 38.91] +[65.82] +13.19093370437622 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 71859, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.571467638015747, 'TIME_S_1KI': 0.14711403774079443, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.2272564220427, 'W': 65.82} +[39.98, 39.34, 39.44, 39.4, 39.36, 39.23, 39.07, 39.88, 39.15, 38.91, 40.12, 40.05, 39.42, 39.42, 39.85, 38.93, 39.15, 38.88, 38.93, 38.9] +708.4549999999999 +35.42274999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 71859, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.571467638015747, 'TIME_S_1KI': 0.14711403774079443, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 868.2272564220427, 'W': 65.82, 'J_1KI': 12.082373208951457, 'W_1KI': 0.9159604224940507, 'W_D': 30.39725, 'J_D': 400.96810954535005, 'W_D_1KI': 0.4230124271142098, 'J_D_1KI': 0.005886700721053867} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..ab3505a --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 73.31841468811035, "TIME_S_1KI": 733.1841468811035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5511.393198342324, "W": 67.78, "J_1KI": 55113.93198342324, "W_1KI": 677.8, "W_D": 31.87075, "J_D": 2591.5053817655444, "W_D_1KI": 318.70750000000004, "J_D_1KI": 3187.0750000000007} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..0a71393 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 73.31841468811035} + +tensor(indices=tensor([[288044, 307973, 284985, ..., 482656, 467150, 491110], + [ 1307, 129087, 450748, ..., 7141, 469470, 103890]]), + values=tensor([0.7313, 0.0184, 0.7794, ..., 0.5939, 0.3648, 0.9357]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6035, 0.1059, 0.5639, ..., 0.0233, 0.1199, 0.4322]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 73.31841468811035 seconds + +tensor(indices=tensor([[288044, 307973, 284985, ..., 482656, 467150, 491110], + [ 1307, 129087, 450748, ..., 7141, 469470, 103890]]), + values=tensor([0.7313, 0.0184, 0.7794, ..., 0.5939, 0.3648, 0.9357]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6035, 0.1059, 0.5639, ..., 0.0233, 0.1199, 0.4322]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 73.31841468811035 seconds + +[40.3, 39.71, 39.23, 39.07, 39.58, 39.14, 39.91, 39.12, 39.23, 39.41] +[67.78] +81.31297135353088 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 73.31841468811035, 'TIME_S_1KI': 733.1841468811035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5511.393198342324, 'W': 67.78} +[40.3, 39.71, 39.23, 39.07, 39.58, 39.14, 39.91, 39.12, 39.23, 39.41, 40.63, 39.32, 39.12, 39.26, 44.8, 40.02, 41.18, 39.67, 39.64, 40.03] +718.185 +35.90925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 73.31841468811035, 'TIME_S_1KI': 733.1841468811035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5511.393198342324, 'W': 67.78, 'J_1KI': 55113.93198342324, 'W_1KI': 677.8, 'W_D': 31.87075, 'J_D': 2591.5053817655444, 'W_D_1KI': 318.70750000000004, 'J_D_1KI': 3187.0750000000007} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..13025a2 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.5661301612854, "TIME_S_1KI": 115.661301612854, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 782.0749787807465, "W": 75.56, "J_1KI": 7820.749787807465, "W_1KI": 755.6, "W_D": 39.378, "J_D": 407.5774022555351, "W_D_1KI": 393.78000000000003, "J_D_1KI": 3937.8} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..8870149 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.5661301612854} + +tensor(indices=tensor([[443415, 38931, 75715, ..., 467512, 304990, 256925], + [ 55289, 313927, 438286, ..., 153505, 342499, 322192]]), + values=tensor([0.8382, 0.5454, 0.9704, ..., 0.1198, 0.3753, 0.1198]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8952, 0.0374, 0.5165, ..., 0.1200, 0.5876, 0.9808]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.5661301612854 seconds + +tensor(indices=tensor([[443415, 38931, 75715, ..., 467512, 304990, 256925], + [ 55289, 313927, 438286, ..., 153505, 342499, 322192]]), + values=tensor([0.8382, 0.5454, 0.9704, ..., 0.1198, 0.3753, 0.1198]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8952, 0.0374, 0.5165, ..., 0.1200, 0.5876, 0.9808]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.5661301612854 seconds + +[39.92, 39.26, 44.82, 40.85, 39.41, 39.91, 40.1, 39.99, 39.48, 39.19] +[75.56] +10.350383520126343 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.5661301612854, 'TIME_S_1KI': 115.661301612854, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 782.0749787807465, 'W': 75.56} +[39.92, 39.26, 44.82, 40.85, 39.41, 39.91, 40.1, 39.99, 39.48, 39.19, 40.27, 44.63, 39.39, 39.09, 39.28, 39.03, 39.57, 39.14, 40.48, 39.04] +723.6400000000001 +36.182 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.5661301612854, 'TIME_S_1KI': 115.661301612854, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 782.0749787807465, 'W': 75.56, 'J_1KI': 7820.749787807465, 'W_1KI': 755.6, 'W_D': 39.378, 'J_D': 407.5774022555351, 'W_D_1KI': 393.78000000000003, 'J_D_1KI': 3937.8} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..3be8cad --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.8995361328125, "TIME_S_1KI": 368.995361328125, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2908.482990465164, "W": 69.73, "J_1KI": 29084.82990465164, "W_1KI": 697.3000000000001, "W_D": 34.0565, "J_D": 1420.5184420590401, "W_D_1KI": 340.565, "J_D_1KI": 3405.65} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..cf1307f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.8995361328125} + +tensor(indices=tensor([[300867, 282005, 281056, ..., 301846, 184185, 252986], + [237438, 253595, 293944, ..., 453870, 271293, 471562]]), + values=tensor([0.6829, 0.8456, 0.5106, ..., 0.8724, 0.4014, 0.7329]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7739, 0.4505, 0.1750, ..., 0.1802, 0.8995, 0.4457]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.8995361328125 seconds + +tensor(indices=tensor([[300867, 282005, 281056, ..., 301846, 184185, 252986], + [237438, 253595, 293944, ..., 453870, 271293, 471562]]), + values=tensor([0.6829, 0.8456, 0.5106, ..., 0.8724, 0.4014, 0.7329]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7739, 0.4505, 0.1750, ..., 0.1802, 0.8995, 0.4457]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.8995361328125 seconds + +[40.24, 39.14, 39.26, 39.07, 39.41, 40.78, 39.35, 39.42, 39.97, 39.78] +[69.73] +41.7106409072876 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.8995361328125, 'TIME_S_1KI': 368.995361328125, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2908.482990465164, 'W': 69.73} +[40.24, 39.14, 39.26, 39.07, 39.41, 40.78, 39.35, 39.42, 39.97, 39.78, 40.01, 39.09, 39.57, 39.35, 41.2, 39.49, 39.58, 39.53, 39.51, 39.47] +713.47 +35.673500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.8995361328125, 'TIME_S_1KI': 368.995361328125, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2908.482990465164, 'W': 69.73, 'J_1KI': 29084.82990465164, 'W_1KI': 697.3000000000001, 'W_D': 34.0565, 'J_D': 1420.5184420590401, 'W_D_1KI': 340.565, 'J_D_1KI': 3405.65} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..987d012 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1366, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.399914264678955, "TIME_S_1KI": 7.613407221580494, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1045.2067499232294, "W": 80.29, "J_1KI": 765.1586749071957, "W_1KI": 58.7774524158126, "W_D": 44.77550000000001, "J_D": 582.8827354737522, "W_D_1KI": 32.7785505124451, "J_D_1KI": 23.996010624044732} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..1da8c74 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.7683374881744385} + +tensor(indices=tensor([[41013, 1705, 48824, ..., 9410, 11669, 17120], + [45299, 1995, 36243, ..., 25728, 49364, 36140]]), + values=tensor([0.0471, 0.4851, 0.5433, ..., 0.3568, 0.4790, 0.8538]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0576, 0.7950, 0.6162, ..., 0.3400, 0.8445, 0.5198]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.7683374881744385 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1366', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.399914264678955} + +tensor(indices=tensor([[37873, 40200, 46440, ..., 37534, 24939, 36369], + [16048, 21002, 7268, ..., 19017, 47060, 18139]]), + values=tensor([0.5381, 0.5256, 0.4577, ..., 0.6173, 0.2312, 0.5078]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4660, 0.8708, 0.8137, ..., 0.6005, 0.6974, 0.8793]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.399914264678955 seconds + +tensor(indices=tensor([[37873, 40200, 46440, ..., 37534, 24939, 36369], + [16048, 21002, 7268, ..., 19017, 47060, 18139]]), + values=tensor([0.5381, 0.5256, 0.4577, ..., 0.6173, 0.2312, 0.5078]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4660, 0.8708, 0.8137, ..., 0.6005, 0.6974, 0.8793]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.399914264678955 seconds + +[40.29, 39.67, 39.18, 39.05, 39.22, 39.44, 39.24, 39.07, 40.74, 39.43] +[80.29] +13.017894506454468 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1366, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.399914264678955, 'TIME_S_1KI': 7.613407221580494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1045.2067499232294, 'W': 80.29} +[40.29, 39.67, 39.18, 39.05, 39.22, 39.44, 39.24, 39.07, 40.74, 39.43, 39.8, 39.14, 39.56, 39.69, 39.57, 39.77, 39.15, 39.01, 39.38, 39.3] +710.29 +35.5145 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1366, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.399914264678955, 'TIME_S_1KI': 7.613407221580494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1045.2067499232294, 'W': 80.29, 'J_1KI': 765.1586749071957, 'W_1KI': 58.7774524158126, 'W_D': 44.77550000000001, 'J_D': 582.8827354737522, 'W_D_1KI': 32.7785505124451, 'J_D_1KI': 23.996010624044732} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..707fa07 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 145, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38773488998413, "TIME_S_1KI": 71.6395509654078, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 917.5728541564941, "W": 68.48, "J_1KI": 6328.088649355132, "W_1KI": 472.2758620689655, "W_D": 33.05525, "J_D": 442.9118003410697, "W_D_1KI": 227.96724137931034, "J_D_1KI": 1572.1878715814505} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..584288d --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.237296104431152} + +tensor(indices=tensor([[34023, 23261, 14422, ..., 34534, 23930, 35074], + [39991, 8209, 26679, ..., 8758, 3775, 21718]]), + values=tensor([0.9407, 0.4330, 0.1074, ..., 0.2027, 0.4271, 0.6688]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8451, 0.8648, 0.7389, ..., 0.4568, 0.9951, 0.0205]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 7.237296104431152 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '145', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38773488998413} + +tensor(indices=tensor([[23678, 46274, 12453, ..., 38423, 48279, 41653], + [16343, 36797, 42166, ..., 33783, 43342, 41775]]), + values=tensor([0.1885, 0.4732, 0.0122, ..., 0.4327, 0.4775, 0.1269]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4770, 0.5781, 0.4283, ..., 0.5602, 0.9513, 0.0075]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.38773488998413 seconds + +tensor(indices=tensor([[23678, 46274, 12453, ..., 38423, 48279, 41653], + [16343, 36797, 42166, ..., 33783, 43342, 41775]]), + values=tensor([0.1885, 0.4732, 0.0122, ..., 0.4327, 0.4775, 0.1269]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4770, 0.5781, 0.4283, ..., 0.5602, 0.9513, 0.0075]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.38773488998413 seconds + +[39.79, 39.7, 39.44, 39.01, 39.46, 39.43, 39.41, 39.06, 39.12, 39.66] +[68.48] +13.399136304855347 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 145, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38773488998413, 'TIME_S_1KI': 71.6395509654078, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 917.5728541564941, 'W': 68.48} +[39.79, 39.7, 39.44, 39.01, 39.46, 39.43, 39.41, 39.06, 39.12, 39.66, 39.84, 41.06, 39.01, 38.9, 38.94, 38.87, 39.09, 39.52, 38.86, 39.94] +708.495 +35.42475 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 145, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38773488998413, 'TIME_S_1KI': 71.6395509654078, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 917.5728541564941, 'W': 68.48, 'J_1KI': 6328.088649355132, 'W_1KI': 472.2758620689655, 'W_D': 33.05525, 'J_D': 442.9118003410697, 'W_D_1KI': 227.96724137931034, 'J_D_1KI': 1572.1878715814505} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..85cf2a4 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.45479941368103, "TIME_S_1KI": 714.5479941368103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5351.296514730453, "W": 66.46, "J_1KI": 53512.96514730453, "W_1KI": 664.6, "W_D": 30.856999999999992, "J_D": 2484.57653558588, "W_D_1KI": 308.5699999999999, "J_D_1KI": 3085.699999999999} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..fed1459 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.45479941368103} + +tensor(indices=tensor([[17054, 47413, 26536, ..., 28774, 7222, 38166], + [13244, 16629, 27686, ..., 42447, 33521, 32496]]), + values=tensor([0.7223, 0.5150, 0.8761, ..., 0.3322, 0.2435, 0.7993]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3362, 0.9946, 0.4521, ..., 0.4736, 0.0397, 0.6596]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.45479941368103 seconds + +tensor(indices=tensor([[17054, 47413, 26536, ..., 28774, 7222, 38166], + [13244, 16629, 27686, ..., 42447, 33521, 32496]]), + values=tensor([0.7223, 0.5150, 0.8761, ..., 0.3322, 0.2435, 0.7993]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3362, 0.9946, 0.4521, ..., 0.4736, 0.0397, 0.6596]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.45479941368103 seconds + +[39.69, 38.9, 39.04, 38.98, 39.3, 38.93, 39.26, 39.33, 39.05, 39.14] +[66.46] +80.51905679702759 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.45479941368103, 'TIME_S_1KI': 714.5479941368103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5351.296514730453, 'W': 66.46} +[39.69, 38.9, 39.04, 38.98, 39.3, 38.93, 39.26, 39.33, 39.05, 39.14, 39.85, 39.32, 39.15, 44.53, 40.38, 39.09, 39.3, 39.32, 39.19, 39.3] +712.0600000000001 +35.603 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.45479941368103, 'TIME_S_1KI': 714.5479941368103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5351.296514730453, 'W': 66.46, 'J_1KI': 53512.96514730453, 'W_1KI': 664.6, 'W_D': 30.856999999999992, 'J_D': 2484.57653558588, 'W_D_1KI': 308.5699999999999, 'J_D_1KI': 3085.699999999999} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..f0a419f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 357.26713609695435, "TIME_S_1KI": 3572.6713609695435, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 25325.282702183726, "W": 65.9, "J_1KI": 253252.82702183723, "W_1KI": 659.0, "W_D": 30.077250000000006, "J_D": 11558.64733162755, "W_D_1KI": 300.77250000000004, "J_D_1KI": 3007.7250000000004} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..8cd9724 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 357.26713609695435} + +tensor(indices=tensor([[31912, 9588, 13548, ..., 48039, 32513, 12504], + [39783, 44039, 4596, ..., 44634, 47873, 13551]]), + values=tensor([0.1056, 0.5699, 0.4362, ..., 0.4045, 0.8470, 0.5368]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.5070, 0.3158, 0.5421, ..., 0.5519, 0.5871, 0.2866]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 357.26713609695435 seconds + +tensor(indices=tensor([[31912, 9588, 13548, ..., 48039, 32513, 12504], + [39783, 44039, 4596, ..., 44634, 47873, 13551]]), + values=tensor([0.1056, 0.5699, 0.4362, ..., 0.4045, 0.8470, 0.5368]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.5070, 0.3158, 0.5421, ..., 0.5519, 0.5871, 0.2866]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 357.26713609695435 seconds + +[39.87, 39.99, 40.23, 39.17, 39.57, 39.59, 39.13, 39.16, 39.11, 39.2] +[65.9] +384.2986752986908 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 357.26713609695435, 'TIME_S_1KI': 3572.6713609695435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25325.282702183726, 'W': 65.9} +[39.87, 39.99, 40.23, 39.17, 39.57, 39.59, 39.13, 39.16, 39.11, 39.2, 39.91, 39.17, 44.81, 39.34, 39.42, 39.54, 39.24, 39.67, 39.71, 40.23] +716.455 +35.82275 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 357.26713609695435, 'TIME_S_1KI': 3572.6713609695435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25325.282702183726, 'W': 65.9, 'J_1KI': 253252.82702183723, 'W_1KI': 659.0, 'W_D': 30.077250000000006, 'J_D': 11558.64733162755, 'W_D_1KI': 300.77250000000004, 'J_D_1KI': 3007.7250000000004} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..24a8710 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 11907, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.034393548965454, "TIME_S_1KI": 0.9267148357239821, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1071.6094908857347, "W": 83.79, "J_1KI": 89.99827755822076, "W_1KI": 7.037037037037038, "W_D": 48.31775000000001, "J_D": 617.9467654641868, "W_D_1KI": 4.057928109515412, "J_D_1KI": 0.34080189044389114} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..61fa314 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1046445369720459} + +tensor(indices=tensor([[19023, 26802, 5582, ..., 948, 15065, 16900], + [ 286, 15505, 13231, ..., 21787, 40702, 41267]]), + values=tensor([0.6266, 0.5519, 0.9673, ..., 0.5598, 0.7076, 0.9348]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6663, 0.5186, 0.8343, ..., 0.9945, 0.4311, 0.7208]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.1046445369720459 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '10033', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.846850156784058} + +tensor(indices=tensor([[ 5223, 22799, 9638, ..., 23983, 27535, 1376], + [33527, 45664, 40332, ..., 6320, 4256, 24765]]), + values=tensor([0.1803, 0.4224, 0.8670, ..., 0.3648, 0.8224, 0.6332]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6582, 0.9945, 0.5757, ..., 0.4519, 0.2714, 0.2358]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 8.846850156784058 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '11907', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.034393548965454} + +tensor(indices=tensor([[14812, 2792, 37306, ..., 40170, 32970, 38022], + [30713, 1193, 10510, ..., 33406, 12058, 19026]]), + values=tensor([0.3026, 0.4078, 0.1032, ..., 0.6991, 0.7415, 0.6751]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5457, 0.5828, 0.3390, ..., 0.4886, 0.9969, 0.3198]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 11.034393548965454 seconds + +tensor(indices=tensor([[14812, 2792, 37306, ..., 40170, 32970, 38022], + [30713, 1193, 10510, ..., 33406, 12058, 19026]]), + values=tensor([0.3026, 0.4078, 0.1032, ..., 0.6991, 0.7415, 0.6751]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5457, 0.5828, 0.3390, ..., 0.4886, 0.9969, 0.3198]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 11.034393548965454 seconds + +[41.74, 39.22, 39.25, 39.8, 39.54, 39.54, 39.55, 39.77, 39.21, 39.06] +[83.79] +12.789228916168213 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 11907, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.034393548965454, 'TIME_S_1KI': 0.9267148357239821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1071.6094908857347, 'W': 83.79} +[41.74, 39.22, 39.25, 39.8, 39.54, 39.54, 39.55, 39.77, 39.21, 39.06, 39.72, 39.2, 39.2, 38.98, 39.24, 39.03, 39.8, 38.97, 39.01, 39.75] +709.4449999999999 +35.472249999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 11907, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.034393548965454, 'TIME_S_1KI': 0.9267148357239821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1071.6094908857347, 'W': 83.79, 'J_1KI': 89.99827755822076, 'W_1KI': 7.037037037037038, 'W_D': 48.31775000000001, 'J_D': 617.9467654641868, 'W_D_1KI': 4.057928109515412, 'J_D_1KI': 0.34080189044389114} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..8aa34f7 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2838, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.517476558685303, "TIME_S_1KI": 3.7059466380145536, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1092.982902522087, "W": 82.94, "J_1KI": 385.1243490211723, "W_1KI": 29.224806201550386, "W_D": 47.390249999999995, "J_D": 624.5084759615063, "W_D_1KI": 16.69846723044397, "J_D_1KI": 5.883885563933746} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..ef421cc --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4157376289367676} + +tensor(indices=tensor([[18915, 33179, 39233, ..., 27245, 40005, 1305], + [48188, 40772, 6358, ..., 47990, 36694, 3591]]), + values=tensor([0.8022, 0.3079, 0.7844, ..., 0.9830, 0.8821, 0.2923]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.1916, 0.3655, 0.2062, ..., 0.1342, 0.1284, 0.1546]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.4157376289367676 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '2525', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.339745998382568} + +tensor(indices=tensor([[20205, 25330, 47258, ..., 22227, 10821, 12408], + [ 7196, 20006, 12982, ..., 45287, 35150, 32512]]), + values=tensor([0.8826, 0.9154, 0.0893, ..., 0.6585, 0.8780, 0.2434]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.0132, 0.3953, 0.6422, ..., 0.9381, 0.3138, 0.2458]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 9.339745998382568 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '2838', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.517476558685303} + +tensor(indices=tensor([[13321, 49743, 26499, ..., 11145, 4761, 15417], + [41579, 1224, 21842, ..., 6440, 37123, 45371]]), + values=tensor([0.3344, 0.4776, 0.8495, ..., 0.2677, 0.9379, 0.1656]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.5284, 0.4015, 0.3220, ..., 0.9081, 0.4661, 0.3148]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.517476558685303 seconds + +tensor(indices=tensor([[13321, 49743, 26499, ..., 11145, 4761, 15417], + [41579, 1224, 21842, ..., 6440, 37123, 45371]]), + values=tensor([0.3344, 0.4776, 0.8495, ..., 0.2677, 0.9379, 0.1656]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.5284, 0.4015, 0.3220, ..., 0.9081, 0.4661, 0.3148]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.517476558685303 seconds + +[41.39, 39.09, 39.61, 39.06, 39.16, 39.02, 39.47, 39.02, 39.92, 40.72] +[82.94] +13.177994966506958 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2838, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.517476558685303, 'TIME_S_1KI': 3.7059466380145536, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1092.982902522087, 'W': 82.94} +[41.39, 39.09, 39.61, 39.06, 39.16, 39.02, 39.47, 39.02, 39.92, 40.72, 40.89, 39.82, 39.38, 39.24, 39.15, 39.33, 40.15, 39.1, 39.16, 39.63] +710.995 +35.54975 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2838, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.517476558685303, 'TIME_S_1KI': 3.7059466380145536, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1092.982902522087, 'W': 82.94, 'J_1KI': 385.1243490211723, 'W_1KI': 29.224806201550386, 'W_D': 47.390249999999995, 'J_D': 624.5084759615063, 'W_D_1KI': 16.69846723044397, 'J_D_1KI': 5.883885563933746} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..4c51eae --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 137697, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.42162561416626, "TIME_S_1KI": 0.07568520457356559, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 844.7216835689544, "W": 65.63, "J_1KI": 6.134641158260197, "W_1KI": 0.47662621553120255, "W_D": 30.299749999999996, "J_D": 389.9871374633312, "W_D_1KI": 0.22004655148623423, "J_D_1KI": 0.0015980489878954097} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..4a010d8 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.017053842544555664} + +tensor(indices=tensor([[4386, 2616, 4252, ..., 3293, 1999, 3924], + [1341, 1252, 805, ..., 3978, 2855, 1559]]), + values=tensor([0.0706, 0.6181, 0.3901, ..., 0.1064, 0.5305, 0.3100]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.0399, 0.8850, 0.7072, ..., 0.7530, 0.2239, 0.3143]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.017053842544555664 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '61569', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.694897651672363} + +tensor(indices=tensor([[3531, 1170, 2352, ..., 1098, 2952, 3376], + [2100, 1359, 10, ..., 4047, 3703, 3691]]), + values=tensor([0.7179, 0.7680, 0.5464, ..., 0.7494, 0.6800, 0.3149]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.0349, 0.0363, 0.5293, ..., 0.7453, 0.8285, 0.8904]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.694897651672363 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '137697', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.42162561416626} + +tensor(indices=tensor([[3285, 488, 671, ..., 3230, 3694, 169], + [1214, 1498, 3608, ..., 1388, 14, 1768]]), + values=tensor([0.6992, 0.6685, 0.8506, ..., 0.9753, 0.9449, 0.5034]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8671, 0.3776, 0.2959, ..., 0.7793, 0.0384, 0.7107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.42162561416626 seconds + +tensor(indices=tensor([[3285, 488, 671, ..., 3230, 3694, 169], + [1214, 1498, 3608, ..., 1388, 14, 1768]]), + values=tensor([0.6992, 0.6685, 0.8506, ..., 0.9753, 0.9449, 0.5034]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8671, 0.3776, 0.2959, ..., 0.7793, 0.0384, 0.7107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.42162561416626 seconds + +[40.42, 39.49, 39.41, 38.84, 38.94, 38.82, 39.26, 39.62, 38.91, 38.79] +[65.63] +12.87096881866455 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 137697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.42162561416626, 'TIME_S_1KI': 0.07568520457356559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.7216835689544, 'W': 65.63} +[40.42, 39.49, 39.41, 38.84, 38.94, 38.82, 39.26, 39.62, 38.91, 38.79, 39.53, 39.01, 39.63, 39.14, 39.47, 39.15, 39.23, 39.6, 39.16, 39.11] +706.605 +35.33025 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 137697, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.42162561416626, 'TIME_S_1KI': 0.07568520457356559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.7216835689544, 'W': 65.63, 'J_1KI': 6.134641158260197, 'W_1KI': 0.47662621553120255, 'W_D': 30.299749999999996, 'J_D': 389.9871374633312, 'W_D_1KI': 0.22004655148623423, 'J_D_1KI': 0.0015980489878954097} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..8fffcbc --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 14742, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.538252115249634, "TIME_S_1KI": 0.7148454833299168, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 865.9526512718202, "W": 66.26, "J_1KI": 58.74051358511872, "W_1KI": 4.494641161307828, "W_D": 30.418000000000006, "J_D": 397.53316852378856, "W_D_1KI": 2.06335639668973, "J_D_1KI": 0.13996448220660224} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..a4fe4ad --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0796968936920166} + +tensor(indices=tensor([[3493, 4477, 893, ..., 1162, 4008, 3225], + [4883, 3840, 4475, ..., 982, 3016, 809]]), + values=tensor([0.4204, 0.5538, 0.7974, ..., 0.9366, 0.8006, 0.2427]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.2355, 0.0098, 0.5832, ..., 0.3378, 0.4297, 0.5889]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.0796968936920166 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '13174', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.38279390335083} + +tensor(indices=tensor([[4044, 2050, 1104, ..., 2670, 1181, 2407], + [3716, 2956, 1657, ..., 4, 4687, 4189]]), + values=tensor([0.8303, 0.9195, 0.1156, ..., 0.4883, 0.1844, 0.1460]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9410, 0.2778, 0.8966, ..., 0.0214, 0.6026, 0.9376]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.38279390335083 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '14742', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.538252115249634} + +tensor(indices=tensor([[4378, 457, 832, ..., 2899, 4138, 942], + [4482, 457, 1233, ..., 1546, 2638, 4926]]), + values=tensor([0.2483, 0.1521, 0.4865, ..., 0.8674, 0.5724, 0.9846]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9965, 0.2779, 0.3571, ..., 0.7557, 0.2721, 0.3621]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.538252115249634 seconds + +tensor(indices=tensor([[4378, 457, 832, ..., 2899, 4138, 942], + [4482, 457, 1233, ..., 1546, 2638, 4926]]), + values=tensor([0.2483, 0.1521, 0.4865, ..., 0.8674, 0.5724, 0.9846]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9965, 0.2779, 0.3571, ..., 0.7557, 0.2721, 0.3621]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.538252115249634 seconds + +[39.64, 39.35, 39.2, 38.86, 44.08, 39.37, 38.87, 39.4, 39.37, 39.52] +[66.26] +13.069010734558105 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 14742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.538252115249634, 'TIME_S_1KI': 0.7148454833299168, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.9526512718202, 'W': 66.26} +[39.64, 39.35, 39.2, 38.86, 44.08, 39.37, 38.87, 39.4, 39.37, 39.52, 39.58, 45.29, 39.55, 39.28, 39.22, 39.0, 39.03, 39.25, 38.92, 38.86] +716.8399999999999 +35.842 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 14742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.538252115249634, 'TIME_S_1KI': 0.7148454833299168, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.9526512718202, 'W': 66.26, 'J_1KI': 58.74051358511872, 'W_1KI': 4.494641161307828, 'W_D': 30.418000000000006, 'J_D': 397.53316852378856, 'W_D_1KI': 2.06335639668973, 'J_D_1KI': 0.13996448220660224} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..72313e9 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1461, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.331062078475952, "TIME_S_1KI": 7.071226610866497, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.4392486953736, "W": 66.12, "J_1KI": 586.2007177928635, "W_1KI": 45.256673511293634, "W_D": 24.464500000000008, "J_D": 316.88381729745873, "W_D_1KI": 16.745037645448328, "J_D_1KI": 11.461353624536843} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..d2c454f --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.7183005809783936} + +tensor(indices=tensor([[ 464, 964, 1316, ..., 3543, 1400, 3653], + [4282, 4503, 4641, ..., 3670, 959, 282]]), + values=tensor([0.5041, 0.6168, 0.4548, ..., 0.4811, 0.4281, 0.8493]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1137, 0.6079, 0.4428, ..., 0.7657, 0.0676, 0.7210]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.7183005809783936 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1461', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.331062078475952} + +tensor(indices=tensor([[4986, 1014, 215, ..., 689, 3311, 824], + [ 688, 2286, 1333, ..., 273, 2663, 2967]]), + values=tensor([0.0548, 0.6403, 0.1800, ..., 0.9141, 0.1317, 0.2749]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3477, 0.0655, 0.6030, ..., 0.3816, 0.0718, 0.3260]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.331062078475952 seconds + +tensor(indices=tensor([[4986, 1014, 215, ..., 689, 3311, 824], + [ 688, 2286, 1333, ..., 273, 2663, 2967]]), + values=tensor([0.0548, 0.6403, 0.1800, ..., 0.9141, 0.1317, 0.2749]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3477, 0.0655, 0.6030, ..., 0.3816, 0.0718, 0.3260]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.331062078475952 seconds + +[39.84, 39.13, 40.9, 38.96, 39.81, 39.21, 39.26, 39.21, 39.06, 38.85] +[66.12] +12.952801704406738 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1461, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.331062078475952, 'TIME_S_1KI': 7.071226610866497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4392486953736, 'W': 66.12} +[39.84, 39.13, 40.9, 38.96, 39.81, 39.21, 39.26, 39.21, 39.06, 38.85, 39.69, 38.82, 38.96, 39.29, 58.76, 63.98, 63.72, 67.17, 56.98, 61.4] +833.1099999999999 +41.655499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1461, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.331062078475952, 'TIME_S_1KI': 7.071226610866497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4392486953736, 'W': 66.12, 'J_1KI': 586.2007177928635, 'W_1KI': 45.256673511293634, 'W_D': 24.464500000000008, 'J_D': 316.88381729745873, 'W_D_1KI': 16.745037645448328, 'J_D_1KI': 11.461353624536843} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..5c5b659 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 289, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.300323009490967, "TIME_S_1KI": 35.64125608820404, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 968.6774788570405, "W": 73.06, "J_1KI": 3351.8251863565415, "W_1KI": 252.80276816608998, "W_D": 10.63375, "J_D": 140.98924364626407, "W_D_1KI": 36.79498269896193, "J_D_1KI": 127.3182792351624} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..eaf08d6 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.632103681564331} + +tensor(indices=tensor([[ 254, 4075, 3830, ..., 71, 2221, 4689], + [2309, 2270, 2207, ..., 4204, 3588, 4880]]), + values=tensor([0.2301, 0.9654, 0.0393, ..., 0.7143, 0.0997, 0.6774]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.8050, 0.9707, 0.6176, ..., 0.0576, 0.9586, 0.3299]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 3.632103681564331 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '289', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.300323009490967} + +tensor(indices=tensor([[2907, 1800, 4997, ..., 2111, 3251, 4109], + [1630, 1773, 2015, ..., 2458, 4297, 2981]]), + values=tensor([0.2517, 0.4901, 0.6951, ..., 0.4798, 0.1568, 0.7428]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7668, 0.8717, 0.7341, ..., 0.1439, 0.9675, 0.6765]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.300323009490967 seconds + +tensor(indices=tensor([[2907, 1800, 4997, ..., 2111, 3251, 4109], + [1630, 1773, 2015, ..., 2458, 4297, 2981]]), + values=tensor([0.2517, 0.4901, 0.6951, ..., 0.4798, 0.1568, 0.7428]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7668, 0.8717, 0.7341, ..., 0.1439, 0.9675, 0.6765]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.300323009490967 seconds + +[66.77, 70.67, 68.05, 66.19, 69.09, 70.9, 66.51, 63.94, 67.41, 62.98] +[73.06] +13.258656978607178 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.300323009490967, 'TIME_S_1KI': 35.64125608820404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 968.6774788570405, 'W': 73.06} +[66.77, 70.67, 68.05, 66.19, 69.09, 70.9, 66.51, 63.94, 67.41, 62.98, 74.04, 70.23, 70.4, 69.25, 71.25, 68.39, 69.91, 72.37, 71.33, 81.48] +1248.525 +62.42625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.300323009490967, 'TIME_S_1KI': 35.64125608820404, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 968.6774788570405, 'W': 73.06, 'J_1KI': 3351.8251863565415, 'W_1KI': 252.80276816608998, 'W_D': 10.63375, 'J_D': 140.98924364626407, 'W_D_1KI': 36.79498269896193, 'J_D_1KI': 127.3182792351624} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..5cdba8d --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 148, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.988237857818604, "TIME_S_1KI": 74.24485039066623, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 885.5371377134323, "W": 66.17, "J_1KI": 5983.359038604272, "W_1KI": 447.0945945945946, "W_D": 30.241, "J_D": 404.70800334882733, "W_D_1KI": 204.3310810810811, "J_D_1KI": 1380.6154127100074} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..cfad9d8 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 7.055576324462891} + +tensor(indices=tensor([[2184, 931, 3693, ..., 3563, 2976, 3249], + [1798, 794, 4876, ..., 358, 3492, 2648]]), + values=tensor([0.4288, 0.5214, 0.5611, ..., 0.9357, 0.6565, 0.7841]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5326, 0.8493, 0.0264, ..., 0.7903, 0.5077, 0.6442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 7.055576324462891 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '148', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.988237857818604} + +tensor(indices=tensor([[4112, 2386, 4419, ..., 3201, 1134, 2400], + [3605, 2479, 679, ..., 654, 847, 2522]]), + values=tensor([0.3838, 0.3952, 0.7729, ..., 0.6893, 0.2957, 0.9203]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8407, 0.7658, 0.5022, ..., 0.1346, 0.0449, 0.6384]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.988237857818604 seconds + +tensor(indices=tensor([[4112, 2386, 4419, ..., 3201, 1134, 2400], + [3605, 2479, 679, ..., 654, 847, 2522]]), + values=tensor([0.3838, 0.3952, 0.7729, ..., 0.6893, 0.2957, 0.9203]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8407, 0.7658, 0.5022, ..., 0.1346, 0.0449, 0.6384]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.988237857818604 seconds + +[42.55, 40.57, 39.74, 39.68, 39.85, 40.12, 39.43, 40.59, 39.35, 39.45] +[66.17] +13.382758617401123 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.988237857818604, 'TIME_S_1KI': 74.24485039066623, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.5371377134323, 'W': 66.17} +[42.55, 40.57, 39.74, 39.68, 39.85, 40.12, 39.43, 40.59, 39.35, 39.45, 39.73, 39.08, 39.14, 40.27, 40.04, 39.46, 39.69, 41.19, 39.81, 39.41] +718.58 +35.929 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.988237857818604, 'TIME_S_1KI': 74.24485039066623, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.5371377134323, 'W': 66.17, 'J_1KI': 5983.359038604272, 'W_1KI': 447.0945945945946, 'W_D': 30.241, 'J_D': 404.70800334882733, 'W_D_1KI': 204.3310810810811, 'J_D_1KI': 1380.6154127100074} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..9cad5a3 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.240001916885376, "TIME_S_1KI": 142.40001916885376, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1168.7469943428039, "W": 66.78, "J_1KI": 11687.469943428037, "W_1KI": 667.8000000000001, "W_D": 31.2395, "J_D": 546.7366236863136, "W_D_1KI": 312.395, "J_D_1KI": 3123.95} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..23a8845 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.240001916885376} + +tensor(indices=tensor([[3275, 2916, 2743, ..., 4943, 368, 3894], + [2594, 1427, 3310, ..., 1954, 1768, 4952]]), + values=tensor([0.0369, 0.0298, 0.4306, ..., 0.8695, 0.7596, 0.8425]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.2963, 0.5511, 0.8309, ..., 0.4292, 0.5907, 0.1419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.240001916885376 seconds + +tensor(indices=tensor([[3275, 2916, 2743, ..., 4943, 368, 3894], + [2594, 1427, 3310, ..., 1954, 1768, 4952]]), + values=tensor([0.0369, 0.0298, 0.4306, ..., 0.8695, 0.7596, 0.8425]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.2963, 0.5511, 0.8309, ..., 0.4292, 0.5907, 0.1419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.240001916885376 seconds + +[39.72, 39.01, 38.97, 39.4, 39.22, 39.38, 39.56, 38.95, 38.96, 39.26] +[66.78] +17.501452445983887 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.240001916885376, 'TIME_S_1KI': 142.40001916885376, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1168.7469943428039, 'W': 66.78} +[39.72, 39.01, 38.97, 39.4, 39.22, 39.38, 39.56, 38.95, 38.96, 39.26, 39.43, 39.34, 39.06, 38.82, 38.85, 38.88, 45.71, 38.86, 39.13, 39.01] +710.8100000000001 +35.5405 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.240001916885376, 'TIME_S_1KI': 142.40001916885376, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1168.7469943428039, 'W': 66.78, 'J_1KI': 11687.469943428037, 'W_1KI': 667.8000000000001, 'W_D': 31.2395, 'J_D': 546.7366236863136, 'W_D_1KI': 312.395, 'J_D_1KI': 3123.95} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..df39c24 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.132622480392456, "TIME_S_1KI": 211.32622480392456, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1649.5520226097105, "W": 65.38, "J_1KI": 16495.520226097105, "W_1KI": 653.8, "W_D": 29.799249999999994, "J_D": 751.8417422721384, "W_D_1KI": 297.99249999999995, "J_D_1KI": 2979.9249999999997} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..95caaf3 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.132622480392456} + +tensor(indices=tensor([[1464, 2967, 181, ..., 2275, 4709, 1505], + [ 458, 3332, 816, ..., 1214, 4608, 381]]), + values=tensor([0.5271, 0.3308, 0.6151, ..., 0.7630, 0.6539, 0.3043]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.7383, 0.7560, 0.3216, ..., 0.4446, 0.9350, 0.9498]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.132622480392456 seconds + +tensor(indices=tensor([[1464, 2967, 181, ..., 2275, 4709, 1505], + [ 458, 3332, 816, ..., 1214, 4608, 381]]), + values=tensor([0.5271, 0.3308, 0.6151, ..., 0.7630, 0.6539, 0.3043]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.7383, 0.7560, 0.3216, ..., 0.4446, 0.9350, 0.9498]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.132622480392456 seconds + +[39.52, 38.87, 39.23, 39.28, 39.43, 39.2, 39.6, 39.59, 39.07, 38.76] +[65.38] +25.230223655700684 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.132622480392456, 'TIME_S_1KI': 211.32622480392456, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1649.5520226097105, 'W': 65.38} +[39.52, 38.87, 39.23, 39.28, 39.43, 39.2, 39.6, 39.59, 39.07, 38.76, 40.29, 38.93, 39.23, 39.48, 39.21, 38.96, 44.7, 38.94, 39.23, 38.76] +711.615 +35.58075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.132622480392456, 'TIME_S_1KI': 211.32622480392456, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1649.5520226097105, 'W': 65.38, 'J_1KI': 16495.520226097105, 'W_1KI': 653.8, 'W_D': 29.799249999999994, 'J_D': 751.8417422721384, 'W_D_1KI': 297.99249999999995, 'J_D_1KI': 2979.9249999999997} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..83c867e --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.155537128448486, "TIME_S_1KI": 281.55537128448486, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2155.759934134483, "W": 65.82, "J_1KI": 21557.59934134483, "W_1KI": 658.1999999999999, "W_D": 30.531999999999996, "J_D": 999.9948694772719, "W_D_1KI": 305.32, "J_D_1KI": 3053.2} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..2ddc036 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.155537128448486} + +tensor(indices=tensor([[3645, 866, 4464, ..., 3775, 4840, 1467], + [ 325, 2018, 887, ..., 1168, 4360, 1619]]), + values=tensor([0.5801, 0.1662, 0.1735, ..., 0.9877, 0.5371, 0.4024]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9744, 0.6419, 0.2598, ..., 0.9655, 0.4765, 0.6331]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.155537128448486 seconds + +tensor(indices=tensor([[3645, 866, 4464, ..., 3775, 4840, 1467], + [ 325, 2018, 887, ..., 1168, 4360, 1619]]), + values=tensor([0.5801, 0.1662, 0.1735, ..., 0.9877, 0.5371, 0.4024]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9744, 0.6419, 0.2598, ..., 0.9655, 0.4765, 0.6331]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.155537128448486 seconds + +[40.17, 39.41, 39.27, 38.89, 39.02, 39.07, 39.26, 38.9, 39.45, 39.02] +[65.82] +32.75235390663147 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.155537128448486, 'TIME_S_1KI': 281.55537128448486, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2155.759934134483, 'W': 65.82} +[40.17, 39.41, 39.27, 38.89, 39.02, 39.07, 39.26, 38.9, 39.45, 39.02, 39.64, 39.04, 39.05, 39.11, 39.16, 39.41, 40.01, 38.89, 38.97, 38.87] +705.76 +35.288 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.155537128448486, 'TIME_S_1KI': 281.55537128448486, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2155.759934134483, 'W': 65.82, 'J_1KI': 21557.59934134483, 'W_1KI': 658.1999999999999, 'W_D': 30.531999999999996, 'J_D': 999.9948694772719, 'W_D_1KI': 305.32, 'J_D_1KI': 3053.2} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..2498f33 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.20994186401367, "TIME_S_1KI": 352.0994186401367, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2644.1709757876392, "W": 65.49, "J_1KI": 26441.709757876393, "W_1KI": 654.9, "W_D": 29.974999999999994, "J_D": 1210.2462207853791, "W_D_1KI": 299.74999999999994, "J_D_1KI": 2997.4999999999995} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..eb2720d --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.20994186401367} + +tensor(indices=tensor([[3943, 2004, 3051, ..., 3465, 198, 2587], + [4386, 4164, 493, ..., 2470, 2586, 4314]]), + values=tensor([0.5814, 0.2631, 0.1674, ..., 0.8278, 0.5822, 0.5172]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.4259, 0.1334, 0.4015, ..., 0.2681, 0.4743, 0.6607]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.20994186401367 seconds + +tensor(indices=tensor([[3943, 2004, 3051, ..., 3465, 198, 2587], + [4386, 4164, 493, ..., 2470, 2586, 4314]]), + values=tensor([0.5814, 0.2631, 0.1674, ..., 0.8278, 0.5822, 0.5172]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.4259, 0.1334, 0.4015, ..., 0.2681, 0.4743, 0.6607]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.20994186401367 seconds + +[39.7, 39.04, 39.15, 39.74, 39.44, 39.55, 40.05, 39.58, 39.14, 38.95] +[65.49] +40.37518668174744 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.20994186401367, 'TIME_S_1KI': 352.0994186401367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2644.1709757876392, 'W': 65.49} +[39.7, 39.04, 39.15, 39.74, 39.44, 39.55, 40.05, 39.58, 39.14, 38.95, 39.67, 39.43, 39.6, 39.57, 39.46, 40.37, 39.57, 38.91, 38.97, 39.14] +710.3000000000001 +35.515 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.20994186401367, 'TIME_S_1KI': 352.0994186401367, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2644.1709757876392, 'W': 65.49, 'J_1KI': 26441.709757876393, 'W_1KI': 654.9, 'W_D': 29.974999999999994, 'J_D': 1210.2462207853791, 'W_D_1KI': 299.74999999999994, 'J_D_1KI': 2997.4999999999995} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..c611346 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 902550, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.34853196144104, "TIME_S_1KI": 0.011465882179869304, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 847.5583872699738, "W": 66.12, "J_1KI": 0.9390708406957773, "W_1KI": 0.07325909921887984, "W_D": 30.845499999999994, "J_D": 395.3926532748937, "W_D_1KI": 0.034175945930973346, "J_D_1KI": 3.786598629546657e-05} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..4df0ceb --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,510 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.009576559066772461} + +tensor(indices=tensor([[ 418, 1162, 2359, 2601, 1813, 1813, 791, 1483, 1468, + 4223, 4160, 1019, 3621, 2080, 2346, 786, 3640, 913, + 4036, 2855, 661, 2756, 4290, 2540, 2123, 920, 4400, + 2943, 1545, 4999, 166, 938, 1056, 139, 524, 4715, + 1458, 4621, 3763, 2348, 2764, 3646, 3382, 2526, 1058, + 478, 2037, 3793, 3528, 323, 1912, 1378, 3087, 3125, + 2963, 4435, 2754, 1297, 2061, 1335, 1004, 1182, 2842, + 4883, 830, 2149, 1826, 2031, 3435, 4130, 4223, 2227, + 2651, 3328, 2681, 332, 4628, 1023, 2150, 3764, 2806, + 3739, 1672, 2382, 112, 2673, 4619, 4823, 4800, 953, + 401, 1271, 772, 4228, 1483, 2305, 2097, 4928, 3083, + 1111, 773, 3595, 428, 2967, 4466, 3298, 1717, 375, + 463, 412, 4650, 2627, 4994, 1619, 1923, 164, 2274, + 957, 3015, 4790, 2055, 1321, 2166, 3867, 3965, 4664, + 2753, 764, 4736, 1654, 144, 2988, 1892, 2330, 3468, + 3996, 784, 1111, 2879, 1224, 2082, 491, 604, 1382, + 4376, 312, 3653, 1070, 4901, 2318, 47, 2327, 719, + 788, 1659, 4174, 2500, 4374, 2409, 4357, 4129, 839, + 2953, 1388, 3226, 2238, 3242, 3880, 551, 3397, 1602, + 1436, 3162, 4591, 782, 4412, 1532, 1707, 1004, 864, + 1100, 1651, 3565, 2559, 2684, 217, 1837, 2250, 1252, + 3665, 2597, 1981, 3958, 3590, 2398, 720, 3024, 929, + 2255, 3839, 455, 3784, 1241, 1398, 4314, 2668, 2095, + 3108, 4498, 4826, 404, 1335, 3699, 2423, 4435, 2207, + 4232, 1464, 4648, 2484, 4158, 3121, 1348, 1596, 4765, + 2342, 1506, 350, 2676, 3504, 4032, 1944, 4722, 1493, + 2160, 201, 2925, 1950, 2791, 590, 4079, 148, 1035, + 842, 437, 1003, 442, 3839, 2868, 1346], + [1378, 4438, 246, 3641, 1179, 408, 2180, 3191, 2130, + 4167, 1266, 1649, 188, 2711, 1942, 4976, 3808, 3877, + 2481, 3078, 2364, 4622, 2869, 1501, 3525, 1686, 2064, + 3901, 389, 147, 1230, 3244, 3536, 913, 2033, 1407, + 4324, 4702, 3958, 1556, 263, 2245, 172, 197, 2320, + 2731, 2855, 4034, 1698, 2794, 1063, 3044, 2192, 972, + 40, 3354, 2302, 2492, 4946, 3286, 3916, 535, 3175, + 3544, 2129, 1102, 1733, 2623, 706, 1707, 447, 1925, + 4128, 2690, 1814, 430, 2586, 1598, 3641, 2838, 4752, + 177, 4176, 1504, 4488, 3088, 4814, 2349, 1038, 1432, + 4472, 4205, 2116, 1626, 580, 4474, 5, 44, 417, + 268, 4705, 854, 1913, 4573, 2121, 2068, 454, 4813, + 3734, 1207, 368, 3832, 1707, 4389, 223, 3056, 937, + 1016, 320, 4526, 4145, 4364, 2361, 36, 1814, 888, + 3507, 1320, 2980, 1110, 2014, 4627, 1701, 3819, 14, + 1240, 2585, 2028, 633, 1285, 2319, 360, 1840, 185, + 1006, 3832, 3435, 4876, 1580, 4458, 2446, 1321, 1287, + 1949, 1941, 4987, 1169, 571, 4572, 3918, 565, 4858, + 2042, 3925, 1256, 2053, 1923, 2637, 2813, 1329, 1914, + 4829, 4333, 4527, 3619, 2568, 152, 989, 2375, 878, + 3248, 3303, 1409, 4851, 4052, 3567, 4170, 1833, 4463, + 466, 3894, 4620, 3257, 1144, 4180, 3303, 1161, 582, + 732, 1628, 90, 2562, 3556, 2283, 4937, 1, 581, + 2214, 1614, 3724, 1585, 560, 4588, 2906, 522, 4602, + 1059, 3546, 4904, 963, 2445, 3032, 1449, 1258, 1792, + 3205, 1779, 4698, 4617, 4679, 3549, 526, 3656, 2188, + 2886, 3531, 2301, 995, 2576, 4401, 3881, 4704, 1184, + 2297, 4829, 3869, 3196, 1355, 2316, 4076]]), + values=tensor([0.9352, 0.0437, 0.9994, 0.0995, 0.5329, 0.3617, 0.9993, + 0.7981, 0.9436, 0.3157, 0.0822, 0.6699, 0.6975, 0.6556, + 0.3895, 0.6913, 0.9268, 0.4772, 0.5624, 0.8425, 0.5464, + 0.4903, 0.7584, 0.8770, 0.8406, 0.6819, 0.6797, 0.6093, + 0.6712, 0.0553, 0.2899, 0.6849, 0.7443, 0.9740, 0.0908, + 0.9469, 0.1069, 0.7675, 0.1705, 0.2410, 0.9005, 0.0315, + 0.6337, 0.7062, 0.1707, 0.7706, 0.8960, 0.4628, 0.4589, + 0.9681, 0.0605, 0.0061, 0.8557, 0.6310, 0.2477, 0.0540, + 0.3141, 0.0870, 0.3466, 0.7128, 0.0101, 0.9338, 0.5551, + 0.3969, 0.2840, 0.2360, 0.5860, 0.2145, 0.1199, 0.7155, + 0.4094, 0.5045, 0.8322, 0.2074, 0.9382, 0.6930, 0.6146, + 0.2724, 0.6235, 0.3790, 0.3492, 0.8556, 0.0087, 0.5117, + 0.1080, 0.6333, 0.9266, 0.1584, 0.6835, 0.6539, 0.9436, + 0.2705, 0.0688, 0.8272, 0.6894, 0.5123, 0.0454, 0.4270, + 0.9862, 0.4902, 0.8677, 0.7444, 0.5288, 0.6545, 0.8383, + 0.3530, 0.7729, 0.9581, 0.3600, 0.6907, 0.6444, 0.1023, + 0.3625, 0.3298, 0.4150, 0.7732, 0.9269, 0.9577, 0.0855, + 0.8310, 0.7494, 0.2169, 0.0042, 0.5551, 0.7341, 0.3303, + 0.9187, 0.8432, 0.4774, 0.3086, 0.5701, 0.6327, 0.7272, + 0.8355, 0.7561, 0.4454, 0.6163, 0.7916, 0.6582, 0.5194, + 0.8253, 0.2183, 0.7798, 0.0066, 0.6794, 0.3608, 0.2119, + 0.0755, 0.4612, 0.9928, 0.4613, 0.9666, 0.3813, 0.5349, + 0.8863, 0.4788, 0.2940, 0.5191, 0.5346, 0.7816, 0.8786, + 0.7804, 0.3957, 0.2060, 0.9546, 0.0519, 0.3106, 0.8635, + 0.3171, 0.7097, 0.7760, 0.5015, 0.4481, 0.9506, 0.7207, + 0.5922, 0.9201, 0.6481, 0.9442, 0.6667, 0.2737, 0.5447, + 0.6421, 0.0304, 0.8637, 0.7936, 0.7537, 0.5853, 0.8180, + 0.7957, 0.9005, 0.0996, 0.8031, 0.5175, 0.3175, 0.5673, + 0.6515, 0.9234, 0.1166, 0.7563, 0.8088, 0.0152, 0.4627, + 0.0057, 0.2710, 0.6652, 0.3647, 0.8513, 0.4901, 0.3717, + 0.2591, 0.5842, 0.4992, 0.3024, 0.1369, 0.7559, 0.3801, + 0.1185, 0.0956, 0.5270, 0.0725, 0.6279, 0.0854, 0.4070, + 0.0183, 0.5668, 0.8248, 0.4906, 0.2435, 0.1101, 0.9901, + 0.7481, 0.7403, 0.2835, 0.7377, 0.5362, 0.4786, 0.6534, + 0.0082, 0.7563, 0.3299, 0.8591, 0.3018, 0.2367, 0.2207, + 0.4544, 0.8430, 0.8384, 0.9546, 0.8552]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.5879, 0.6996, 0.0883, ..., 0.7806, 0.0252, 0.8592]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.009576559066772461 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '109642', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2755417823791504} + +tensor(indices=tensor([[2771, 2463, 1524, 2584, 4199, 3993, 2102, 765, 3424, + 427, 4624, 1827, 1682, 2491, 2222, 2698, 2875, 1422, + 4005, 4312, 1349, 4638, 4193, 2351, 2034, 762, 3884, + 3323, 1639, 2080, 296, 1978, 4913, 3293, 4501, 3456, + 148, 2522, 1061, 1630, 781, 164, 4222, 3599, 178, + 1303, 4771, 4095, 893, 1113, 190, 4285, 4444, 3016, + 784, 2307, 2766, 65, 4090, 4782, 998, 3763, 1137, + 3102, 1190, 547, 991, 3413, 4911, 1257, 3126, 757, + 3614, 2611, 4203, 3993, 2984, 30, 360, 2613, 2184, + 435, 3448, 3250, 2896, 1704, 2935, 1712, 683, 3453, + 2036, 2962, 1842, 3782, 3229, 1585, 1467, 325, 3755, + 219, 4803, 4471, 4345, 4167, 2846, 3130, 551, 3323, + 4756, 1191, 579, 2785, 1172, 1775, 4268, 2795, 344, + 4530, 3585, 374, 4493, 3592, 4978, 4143, 3579, 1665, + 893, 105, 1237, 4364, 1579, 1126, 736, 4780, 2456, + 609, 2611, 971, 4263, 1263, 2544, 1350, 4811, 4710, + 3049, 1985, 134, 4707, 920, 1785, 4907, 4494, 854, + 1051, 4154, 477, 755, 1860, 1741, 3953, 324, 1016, + 4536, 2443, 104, 2985, 2713, 4806, 1232, 1226, 831, + 1035, 2794, 3914, 1207, 2507, 1407, 1271, 351, 1911, + 2482, 3709, 1728, 3405, 2516, 4230, 995, 1690, 3652, + 2620, 4759, 3805, 4989, 4503, 913, 3943, 264, 321, + 2780, 2940, 2169, 2653, 4696, 3261, 3929, 3241, 1895, + 141, 1003, 3494, 2590, 4458, 3462, 740, 308, 19, + 938, 31, 571, 361, 4325, 2355, 848, 3889, 4775, + 2354, 3448, 3694, 4809, 483, 580, 4816, 2768, 2727, + 4676, 4486, 2282, 2638, 1747, 4860, 3931, 4087, 3087, + 1474, 383, 525, 315, 916, 3600, 1553], + [4777, 421, 2826, 1805, 3722, 712, 3305, 3852, 3845, + 3902, 3409, 2203, 1043, 4770, 1491, 4731, 1449, 1295, + 4382, 2451, 2148, 1230, 4442, 2801, 1086, 3330, 2617, + 4368, 1090, 1611, 1863, 1347, 2796, 1051, 1733, 2003, + 2320, 1421, 1187, 1284, 3701, 902, 889, 2955, 3819, + 2710, 3796, 3981, 4309, 4758, 4864, 4407, 2026, 499, + 2818, 1767, 3939, 3754, 4777, 820, 1872, 3028, 2019, + 3257, 1417, 2356, 3824, 3881, 66, 4190, 1741, 682, + 2959, 477, 3628, 1785, 2017, 4668, 4918, 982, 4550, + 2726, 924, 2825, 50, 967, 502, 1511, 2346, 1676, + 3474, 4791, 134, 4105, 283, 1373, 1162, 4621, 4567, + 3667, 2666, 4125, 837, 619, 3775, 3592, 994, 2165, + 3766, 175, 4769, 3508, 1920, 4957, 792, 1845, 4680, + 317, 1926, 2433, 1585, 798, 791, 3229, 953, 2060, + 3968, 2464, 2066, 2557, 4122, 4167, 3514, 4218, 4129, + 1826, 460, 348, 1835, 1818, 3447, 2002, 3111, 4778, + 3923, 1761, 621, 3469, 4725, 3404, 3129, 1515, 1838, + 3807, 1820, 1716, 4006, 2587, 4456, 1093, 931, 2456, + 3000, 4807, 1412, 4856, 1078, 1562, 4016, 1830, 2196, + 314, 3475, 2844, 2100, 152, 1403, 4135, 904, 3160, + 4981, 3340, 854, 4243, 3823, 4127, 1388, 2662, 4617, + 2249, 2173, 529, 3770, 1608, 315, 1836, 2057, 2373, + 268, 1820, 4412, 2515, 4014, 448, 3294, 1307, 3411, + 4755, 400, 3273, 2941, 2862, 1934, 1217, 708, 4254, + 3500, 3268, 3553, 3038, 4541, 4884, 2225, 1457, 4688, + 1536, 738, 1420, 4621, 2999, 4186, 362, 1290, 2763, + 1455, 216, 1000, 1698, 2515, 2164, 938, 883, 4961, + 2475, 712, 1795, 4772, 3326, 2459, 4653]]), + values=tensor([2.1372e-01, 9.0755e-01, 7.2700e-02, 8.8764e-01, + 3.8572e-01, 5.9833e-01, 6.9243e-01, 2.7260e-01, + 8.9714e-01, 1.4138e-01, 8.9130e-02, 6.5834e-01, + 8.7112e-01, 9.0933e-01, 9.0372e-01, 9.7558e-01, + 1.1379e-01, 2.8514e-01, 1.5759e-01, 3.6366e-01, + 8.6862e-01, 8.4160e-01, 6.0091e-01, 4.1866e-01, + 5.2715e-01, 3.1149e-01, 5.8793e-01, 6.0171e-01, + 7.9107e-01, 4.7852e-01, 7.1376e-01, 6.1304e-01, + 6.9397e-03, 9.9617e-01, 4.8142e-01, 4.7029e-02, + 4.9170e-01, 8.4825e-01, 7.6293e-02, 9.3935e-01, + 3.8689e-01, 3.4100e-02, 5.6340e-02, 3.6969e-01, + 7.2751e-02, 4.0314e-01, 4.3080e-01, 7.9986e-02, + 2.3793e-01, 5.5069e-01, 2.4102e-01, 5.0579e-01, + 8.5295e-01, 8.5144e-02, 8.2390e-01, 5.5736e-01, + 5.0944e-02, 5.9255e-01, 1.8967e-01, 7.5560e-01, + 7.1157e-01, 9.3837e-01, 5.8539e-02, 7.1933e-01, + 7.0337e-01, 9.5005e-01, 3.1080e-01, 7.3607e-01, + 4.4071e-02, 8.5407e-01, 1.6511e-01, 4.2693e-01, + 7.7688e-01, 1.8903e-01, 9.2954e-01, 5.6032e-04, + 4.0511e-01, 2.9470e-01, 4.1723e-01, 7.8482e-01, + 7.0775e-02, 2.3705e-01, 4.5789e-01, 8.5520e-01, + 5.4666e-02, 7.1095e-01, 9.0722e-01, 2.0950e-01, + 3.0023e-01, 2.6386e-02, 8.3028e-02, 8.4287e-01, + 1.4028e-01, 8.0877e-01, 3.3759e-01, 5.0557e-01, + 6.8215e-01, 2.4444e-01, 5.5512e-01, 3.6207e-01, + 3.4283e-01, 8.9251e-01, 8.7378e-01, 4.4457e-01, + 5.8663e-01, 2.4177e-01, 1.0030e-01, 6.1962e-02, + 5.6720e-01, 7.3179e-01, 1.2491e-01, 4.3403e-01, + 6.5021e-01, 3.2597e-02, 9.8014e-01, 5.5806e-01, + 4.1539e-01, 2.8371e-01, 7.4446e-01, 6.7835e-01, + 1.0862e-01, 2.6972e-01, 7.6401e-01, 8.2875e-01, + 8.9141e-01, 1.6565e-01, 6.7638e-03, 4.5970e-01, + 3.2321e-01, 9.3953e-01, 7.5674e-01, 8.8040e-01, + 3.2279e-01, 1.1500e-01, 9.3941e-02, 2.0111e-01, + 2.5466e-01, 2.4058e-01, 9.1816e-01, 5.7130e-01, + 9.9392e-01, 6.2353e-01, 2.8480e-01, 5.7722e-01, + 2.4293e-01, 7.6359e-01, 5.2775e-03, 1.1386e-01, + 2.9204e-01, 6.1642e-02, 7.5416e-01, 1.2335e-01, + 2.7984e-01, 4.2800e-01, 7.1757e-01, 1.4936e-01, + 8.8247e-01, 1.8334e-01, 3.1354e-01, 9.2904e-03, + 2.9991e-01, 4.1631e-01, 2.4596e-01, 8.1734e-01, + 3.1240e-01, 1.8540e-02, 3.0496e-01, 1.9830e-01, + 2.2610e-01, 6.6767e-01, 2.6102e-01, 9.7133e-01, + 5.8787e-02, 4.8214e-01, 8.6921e-01, 6.4564e-01, + 1.8226e-01, 3.7187e-02, 6.2079e-01, 2.4138e-02, + 5.9806e-01, 8.8357e-01, 1.9777e-01, 3.7196e-01, + 7.2278e-01, 9.4728e-01, 2.7519e-01, 7.1086e-01, + 4.9566e-01, 4.7897e-01, 6.8266e-01, 7.7770e-01, + 9.0472e-01, 3.9425e-01, 1.5475e-01, 5.0695e-01, + 1.4869e-01, 1.7896e-02, 9.9033e-01, 5.2035e-01, + 1.1451e-01, 9.5150e-01, 6.6662e-01, 1.4237e-04, + 2.7819e-01, 9.0215e-02, 5.3211e-01, 4.6277e-01, + 7.5314e-01, 1.1906e-01, 5.0509e-01, 4.9929e-01, + 8.6112e-02, 7.1571e-01, 1.0421e-01, 4.1471e-01, + 9.4236e-01, 1.5935e-01, 9.8902e-01, 9.6206e-03, + 8.4932e-01, 2.4450e-01, 3.3489e-01, 1.6199e-02, + 4.3995e-01, 5.3283e-02, 5.9930e-01, 7.7285e-01, + 3.5164e-01, 1.3672e-01, 5.0499e-01, 7.4237e-01, + 7.2329e-01, 9.8163e-01, 7.0178e-01, 7.3562e-01, + 7.5516e-01, 5.9631e-01, 9.3437e-01, 1.1083e-01, + 1.2579e-03, 1.6805e-01, 9.9098e-01, 8.7904e-01, + 7.0813e-01, 9.7652e-01, 2.0325e-01, 8.9776e-01, + 8.4596e-01, 8.1570e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9682, 0.7083, 0.9084, ..., 0.3706, 0.1441, 0.8707]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.2755417823791504 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '902550', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.34853196144104} + +tensor(indices=tensor([[ 306, 2613, 3875, 3610, 4802, 3108, 679, 4515, 3236, + 3535, 2174, 1828, 3315, 4451, 933, 661, 4429, 3191, + 454, 4316, 2214, 4814, 48, 2929, 88, 2585, 1848, + 1291, 2475, 3162, 3177, 3064, 3576, 4563, 4392, 1512, + 631, 4599, 2823, 4247, 621, 2000, 3260, 1189, 899, + 838, 211, 2999, 1230, 4078, 687, 3886, 2320, 2446, + 1512, 314, 1225, 470, 1431, 3308, 1061, 4285, 2548, + 1098, 3248, 4986, 3350, 3008, 3107, 60, 226, 362, + 2853, 4165, 3909, 2143, 897, 3903, 4172, 3106, 3057, + 3260, 3644, 654, 2290, 3852, 89, 67, 1892, 1327, + 2351, 368, 4972, 4134, 1558, 3747, 2440, 3479, 2420, + 1773, 1362, 3489, 1342, 159, 1673, 2879, 393, 1416, + 4827, 1336, 3, 2544, 1410, 263, 4991, 1466, 1863, + 4133, 123, 1086, 337, 3665, 3762, 3375, 462, 3822, + 2401, 3455, 396, 4130, 2956, 2703, 3290, 472, 4157, + 4078, 4580, 3148, 2953, 4751, 4355, 4498, 4324, 3350, + 1236, 3102, 1146, 3653, 4949, 1461, 3913, 1378, 4950, + 317, 1219, 2427, 860, 3246, 3636, 1545, 3986, 3850, + 2547, 807, 1356, 366, 3380, 2318, 48, 1482, 451, + 3723, 4512, 1120, 709, 548, 141, 4394, 225, 864, + 2225, 4396, 276, 129, 1252, 2103, 1403, 4317, 2228, + 1930, 59, 734, 2669, 4535, 4022, 1175, 2333, 2756, + 3571, 298, 3935, 3318, 901, 2326, 832, 4913, 4956, + 1121, 1295, 3690, 4289, 4036, 984, 4072, 2346, 2619, + 4057, 1777, 3717, 1750, 1479, 1131, 3780, 3342, 1042, + 2164, 2489, 2706, 3844, 4091, 91, 2822, 3795, 2397, + 4082, 2613, 4057, 3111, 1009, 4636, 1315, 4978, 1563, + 1761, 3275, 628, 2050, 1606, 2773, 3352], + [3216, 1608, 241, 4457, 3059, 4752, 25, 3894, 4019, + 2943, 2983, 1613, 1584, 3296, 499, 2341, 1914, 1445, + 2303, 3153, 844, 290, 4411, 2873, 3114, 1547, 820, + 3002, 1891, 1811, 1468, 3494, 1235, 568, 569, 3092, + 583, 4546, 1230, 4259, 3225, 3059, 4669, 3744, 672, + 732, 4063, 2653, 4537, 4042, 3797, 2428, 1625, 2519, + 4549, 157, 3174, 4182, 833, 4091, 523, 4888, 2657, + 1313, 2636, 3131, 428, 1899, 2250, 1001, 2012, 3690, + 2242, 388, 3651, 2556, 19, 2905, 3493, 1988, 3410, + 3390, 255, 1377, 931, 3117, 4200, 45, 3312, 1087, + 1479, 2478, 1669, 2052, 2631, 4330, 1018, 1300, 1439, + 3474, 2541, 2949, 981, 4076, 281, 234, 1287, 1971, + 3957, 4243, 3502, 1312, 193, 3408, 3104, 1244, 2876, + 517, 2845, 72, 4435, 3172, 4658, 3745, 1398, 4643, + 66, 807, 3691, 3927, 1869, 2348, 819, 100, 1268, + 668, 1920, 858, 1028, 3522, 4424, 3515, 85, 2589, + 4511, 645, 1502, 443, 4086, 3561, 672, 502, 511, + 881, 83, 1037, 1820, 4111, 1276, 2708, 3205, 4575, + 2066, 392, 2646, 4643, 707, 20, 156, 436, 94, + 2102, 3165, 4716, 1237, 3831, 1423, 2671, 3396, 4578, + 1979, 867, 279, 4613, 2117, 4746, 4483, 3251, 2186, + 3550, 2991, 4162, 2234, 2851, 1864, 3902, 1317, 1056, + 2692, 85, 1498, 1195, 3483, 3386, 669, 4109, 1654, + 3546, 621, 2603, 3617, 2978, 284, 3404, 4539, 2219, + 1560, 4479, 2054, 1356, 2565, 1342, 4964, 4263, 2652, + 3813, 4075, 3358, 4777, 1640, 3025, 3169, 1554, 974, + 3534, 1909, 4670, 793, 1762, 4383, 3637, 519, 857, + 4490, 219, 4697, 887, 771, 2758, 3772]]), + values=tensor([9.2062e-01, 3.9701e-02, 3.6965e-01, 5.7675e-01, + 6.4319e-01, 4.6791e-01, 7.8892e-01, 1.5433e-01, + 9.0486e-02, 3.3103e-01, 3.6038e-01, 2.5012e-01, + 9.9839e-01, 3.7065e-01, 7.9388e-01, 7.4908e-01, + 5.9847e-01, 4.6273e-01, 8.4695e-01, 2.3829e-01, + 4.0326e-01, 3.7050e-01, 4.1837e-01, 5.7153e-01, + 9.4207e-01, 5.4265e-01, 3.0184e-01, 7.4221e-01, + 1.2275e-01, 5.7861e-01, 6.9903e-01, 4.6735e-01, + 7.6452e-01, 5.4519e-01, 4.5348e-01, 6.0431e-01, + 1.5789e-01, 3.2781e-03, 6.4889e-01, 1.0464e-01, + 5.0995e-01, 6.2281e-01, 1.5779e-01, 6.3989e-01, + 4.6426e-01, 4.0541e-01, 5.3563e-01, 4.6301e-01, + 1.9044e-01, 3.1216e-01, 2.7079e-01, 2.9108e-01, + 5.4173e-01, 4.5645e-01, 3.0261e-01, 5.9907e-01, + 4.9237e-02, 5.6570e-01, 7.0277e-01, 2.9601e-01, + 1.4712e-01, 6.2370e-01, 8.2546e-01, 7.8002e-01, + 8.2635e-01, 1.1076e-01, 5.4241e-02, 1.1735e-01, + 4.2309e-01, 5.9747e-02, 9.3844e-01, 5.9598e-01, + 6.6091e-01, 8.8695e-01, 5.3397e-01, 1.4632e-01, + 6.3034e-01, 2.8510e-01, 6.2160e-01, 1.8690e-02, + 7.3228e-01, 5.5485e-01, 4.1142e-01, 4.1379e-01, + 1.7401e-01, 5.8853e-01, 8.6117e-01, 8.7734e-01, + 2.3568e-01, 9.3668e-01, 3.9184e-01, 4.8996e-01, + 1.2349e-01, 4.3263e-01, 5.4776e-01, 6.0004e-01, + 2.2715e-01, 7.4688e-01, 7.3781e-01, 5.8731e-01, + 8.8322e-01, 4.1334e-01, 8.3901e-02, 2.7478e-02, + 8.6491e-01, 8.2487e-01, 3.3455e-01, 2.1982e-01, + 9.4380e-01, 7.0682e-02, 1.7546e-01, 3.7645e-01, + 4.4185e-01, 6.1556e-01, 3.4127e-01, 3.7162e-01, + 9.2115e-01, 7.9456e-01, 6.2735e-02, 6.1302e-01, + 2.4051e-01, 5.7595e-01, 3.9996e-01, 2.5947e-02, + 1.6728e-01, 8.9707e-01, 9.7375e-01, 6.4334e-01, + 4.6448e-02, 6.5447e-01, 4.4558e-01, 2.6167e-01, + 4.9067e-01, 1.6287e-01, 6.2353e-02, 8.3374e-01, + 1.0030e-02, 7.5462e-01, 1.9327e-01, 2.6345e-01, + 5.4946e-01, 2.3292e-01, 3.1699e-01, 5.5393e-01, + 9.8630e-01, 9.1209e-02, 7.6698e-01, 6.9951e-01, + 7.2779e-01, 3.9999e-01, 2.9197e-01, 6.6399e-01, + 7.9303e-02, 1.5227e-01, 3.6460e-01, 7.1880e-01, + 9.7164e-01, 7.2743e-01, 1.5051e-01, 1.5918e-01, + 7.3321e-02, 9.2513e-01, 8.7543e-01, 4.3056e-01, + 2.0387e-01, 3.0620e-01, 1.6288e-01, 4.5819e-01, + 2.2828e-01, 5.0489e-01, 9.0777e-01, 4.2606e-01, + 4.6666e-01, 4.3093e-01, 1.4174e-01, 3.3851e-01, + 3.9002e-02, 3.4278e-01, 2.9888e-01, 8.7555e-01, + 7.2429e-01, 9.7080e-01, 1.5119e-01, 3.8583e-01, + 9.6628e-01, 8.9424e-01, 9.3835e-01, 1.3075e-01, + 8.1473e-01, 5.4337e-03, 5.3563e-01, 4.7457e-01, + 8.5956e-01, 9.4455e-02, 1.9129e-01, 8.5819e-01, + 2.9763e-01, 7.9352e-01, 1.8035e-01, 4.6759e-01, + 5.3902e-01, 7.4220e-01, 4.6249e-01, 7.1035e-01, + 1.8051e-01, 7.0845e-01, 9.9192e-01, 2.4013e-01, + 3.8797e-01, 4.0880e-01, 3.8605e-01, 5.6473e-01, + 5.6851e-01, 7.3082e-01, 4.0350e-01, 6.8686e-01, + 6.1003e-01, 7.2511e-01, 7.6318e-01, 6.0505e-01, + 6.7644e-01, 3.7277e-01, 2.0771e-02, 1.7390e-01, + 4.0508e-01, 8.5314e-01, 6.3482e-01, 2.0897e-01, + 6.7366e-01, 9.3616e-01, 4.6320e-01, 2.8972e-01, + 6.4920e-01, 2.0384e-01, 5.9353e-01, 1.2870e-01, + 6.3852e-04, 2.4399e-01, 1.7368e-01, 5.8490e-02, + 6.6522e-01, 7.2997e-01, 7.5063e-01, 2.1039e-02, + 6.3538e-01, 1.0710e-01, 3.3675e-01, 7.9830e-01, + 6.4726e-01, 3.1141e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4524, 0.0586, 0.3134, ..., 0.6494, 0.7756, 0.1103]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.34853196144104 seconds + +tensor(indices=tensor([[ 306, 2613, 3875, 3610, 4802, 3108, 679, 4515, 3236, + 3535, 2174, 1828, 3315, 4451, 933, 661, 4429, 3191, + 454, 4316, 2214, 4814, 48, 2929, 88, 2585, 1848, + 1291, 2475, 3162, 3177, 3064, 3576, 4563, 4392, 1512, + 631, 4599, 2823, 4247, 621, 2000, 3260, 1189, 899, + 838, 211, 2999, 1230, 4078, 687, 3886, 2320, 2446, + 1512, 314, 1225, 470, 1431, 3308, 1061, 4285, 2548, + 1098, 3248, 4986, 3350, 3008, 3107, 60, 226, 362, + 2853, 4165, 3909, 2143, 897, 3903, 4172, 3106, 3057, + 3260, 3644, 654, 2290, 3852, 89, 67, 1892, 1327, + 2351, 368, 4972, 4134, 1558, 3747, 2440, 3479, 2420, + 1773, 1362, 3489, 1342, 159, 1673, 2879, 393, 1416, + 4827, 1336, 3, 2544, 1410, 263, 4991, 1466, 1863, + 4133, 123, 1086, 337, 3665, 3762, 3375, 462, 3822, + 2401, 3455, 396, 4130, 2956, 2703, 3290, 472, 4157, + 4078, 4580, 3148, 2953, 4751, 4355, 4498, 4324, 3350, + 1236, 3102, 1146, 3653, 4949, 1461, 3913, 1378, 4950, + 317, 1219, 2427, 860, 3246, 3636, 1545, 3986, 3850, + 2547, 807, 1356, 366, 3380, 2318, 48, 1482, 451, + 3723, 4512, 1120, 709, 548, 141, 4394, 225, 864, + 2225, 4396, 276, 129, 1252, 2103, 1403, 4317, 2228, + 1930, 59, 734, 2669, 4535, 4022, 1175, 2333, 2756, + 3571, 298, 3935, 3318, 901, 2326, 832, 4913, 4956, + 1121, 1295, 3690, 4289, 4036, 984, 4072, 2346, 2619, + 4057, 1777, 3717, 1750, 1479, 1131, 3780, 3342, 1042, + 2164, 2489, 2706, 3844, 4091, 91, 2822, 3795, 2397, + 4082, 2613, 4057, 3111, 1009, 4636, 1315, 4978, 1563, + 1761, 3275, 628, 2050, 1606, 2773, 3352], + [3216, 1608, 241, 4457, 3059, 4752, 25, 3894, 4019, + 2943, 2983, 1613, 1584, 3296, 499, 2341, 1914, 1445, + 2303, 3153, 844, 290, 4411, 2873, 3114, 1547, 820, + 3002, 1891, 1811, 1468, 3494, 1235, 568, 569, 3092, + 583, 4546, 1230, 4259, 3225, 3059, 4669, 3744, 672, + 732, 4063, 2653, 4537, 4042, 3797, 2428, 1625, 2519, + 4549, 157, 3174, 4182, 833, 4091, 523, 4888, 2657, + 1313, 2636, 3131, 428, 1899, 2250, 1001, 2012, 3690, + 2242, 388, 3651, 2556, 19, 2905, 3493, 1988, 3410, + 3390, 255, 1377, 931, 3117, 4200, 45, 3312, 1087, + 1479, 2478, 1669, 2052, 2631, 4330, 1018, 1300, 1439, + 3474, 2541, 2949, 981, 4076, 281, 234, 1287, 1971, + 3957, 4243, 3502, 1312, 193, 3408, 3104, 1244, 2876, + 517, 2845, 72, 4435, 3172, 4658, 3745, 1398, 4643, + 66, 807, 3691, 3927, 1869, 2348, 819, 100, 1268, + 668, 1920, 858, 1028, 3522, 4424, 3515, 85, 2589, + 4511, 645, 1502, 443, 4086, 3561, 672, 502, 511, + 881, 83, 1037, 1820, 4111, 1276, 2708, 3205, 4575, + 2066, 392, 2646, 4643, 707, 20, 156, 436, 94, + 2102, 3165, 4716, 1237, 3831, 1423, 2671, 3396, 4578, + 1979, 867, 279, 4613, 2117, 4746, 4483, 3251, 2186, + 3550, 2991, 4162, 2234, 2851, 1864, 3902, 1317, 1056, + 2692, 85, 1498, 1195, 3483, 3386, 669, 4109, 1654, + 3546, 621, 2603, 3617, 2978, 284, 3404, 4539, 2219, + 1560, 4479, 2054, 1356, 2565, 1342, 4964, 4263, 2652, + 3813, 4075, 3358, 4777, 1640, 3025, 3169, 1554, 974, + 3534, 1909, 4670, 793, 1762, 4383, 3637, 519, 857, + 4490, 219, 4697, 887, 771, 2758, 3772]]), + values=tensor([9.2062e-01, 3.9701e-02, 3.6965e-01, 5.7675e-01, + 6.4319e-01, 4.6791e-01, 7.8892e-01, 1.5433e-01, + 9.0486e-02, 3.3103e-01, 3.6038e-01, 2.5012e-01, + 9.9839e-01, 3.7065e-01, 7.9388e-01, 7.4908e-01, + 5.9847e-01, 4.6273e-01, 8.4695e-01, 2.3829e-01, + 4.0326e-01, 3.7050e-01, 4.1837e-01, 5.7153e-01, + 9.4207e-01, 5.4265e-01, 3.0184e-01, 7.4221e-01, + 1.2275e-01, 5.7861e-01, 6.9903e-01, 4.6735e-01, + 7.6452e-01, 5.4519e-01, 4.5348e-01, 6.0431e-01, + 1.5789e-01, 3.2781e-03, 6.4889e-01, 1.0464e-01, + 5.0995e-01, 6.2281e-01, 1.5779e-01, 6.3989e-01, + 4.6426e-01, 4.0541e-01, 5.3563e-01, 4.6301e-01, + 1.9044e-01, 3.1216e-01, 2.7079e-01, 2.9108e-01, + 5.4173e-01, 4.5645e-01, 3.0261e-01, 5.9907e-01, + 4.9237e-02, 5.6570e-01, 7.0277e-01, 2.9601e-01, + 1.4712e-01, 6.2370e-01, 8.2546e-01, 7.8002e-01, + 8.2635e-01, 1.1076e-01, 5.4241e-02, 1.1735e-01, + 4.2309e-01, 5.9747e-02, 9.3844e-01, 5.9598e-01, + 6.6091e-01, 8.8695e-01, 5.3397e-01, 1.4632e-01, + 6.3034e-01, 2.8510e-01, 6.2160e-01, 1.8690e-02, + 7.3228e-01, 5.5485e-01, 4.1142e-01, 4.1379e-01, + 1.7401e-01, 5.8853e-01, 8.6117e-01, 8.7734e-01, + 2.3568e-01, 9.3668e-01, 3.9184e-01, 4.8996e-01, + 1.2349e-01, 4.3263e-01, 5.4776e-01, 6.0004e-01, + 2.2715e-01, 7.4688e-01, 7.3781e-01, 5.8731e-01, + 8.8322e-01, 4.1334e-01, 8.3901e-02, 2.7478e-02, + 8.6491e-01, 8.2487e-01, 3.3455e-01, 2.1982e-01, + 9.4380e-01, 7.0682e-02, 1.7546e-01, 3.7645e-01, + 4.4185e-01, 6.1556e-01, 3.4127e-01, 3.7162e-01, + 9.2115e-01, 7.9456e-01, 6.2735e-02, 6.1302e-01, + 2.4051e-01, 5.7595e-01, 3.9996e-01, 2.5947e-02, + 1.6728e-01, 8.9707e-01, 9.7375e-01, 6.4334e-01, + 4.6448e-02, 6.5447e-01, 4.4558e-01, 2.6167e-01, + 4.9067e-01, 1.6287e-01, 6.2353e-02, 8.3374e-01, + 1.0030e-02, 7.5462e-01, 1.9327e-01, 2.6345e-01, + 5.4946e-01, 2.3292e-01, 3.1699e-01, 5.5393e-01, + 9.8630e-01, 9.1209e-02, 7.6698e-01, 6.9951e-01, + 7.2779e-01, 3.9999e-01, 2.9197e-01, 6.6399e-01, + 7.9303e-02, 1.5227e-01, 3.6460e-01, 7.1880e-01, + 9.7164e-01, 7.2743e-01, 1.5051e-01, 1.5918e-01, + 7.3321e-02, 9.2513e-01, 8.7543e-01, 4.3056e-01, + 2.0387e-01, 3.0620e-01, 1.6288e-01, 4.5819e-01, + 2.2828e-01, 5.0489e-01, 9.0777e-01, 4.2606e-01, + 4.6666e-01, 4.3093e-01, 1.4174e-01, 3.3851e-01, + 3.9002e-02, 3.4278e-01, 2.9888e-01, 8.7555e-01, + 7.2429e-01, 9.7080e-01, 1.5119e-01, 3.8583e-01, + 9.6628e-01, 8.9424e-01, 9.3835e-01, 1.3075e-01, + 8.1473e-01, 5.4337e-03, 5.3563e-01, 4.7457e-01, + 8.5956e-01, 9.4455e-02, 1.9129e-01, 8.5819e-01, + 2.9763e-01, 7.9352e-01, 1.8035e-01, 4.6759e-01, + 5.3902e-01, 7.4220e-01, 4.6249e-01, 7.1035e-01, + 1.8051e-01, 7.0845e-01, 9.9192e-01, 2.4013e-01, + 3.8797e-01, 4.0880e-01, 3.8605e-01, 5.6473e-01, + 5.6851e-01, 7.3082e-01, 4.0350e-01, 6.8686e-01, + 6.1003e-01, 7.2511e-01, 7.6318e-01, 6.0505e-01, + 6.7644e-01, 3.7277e-01, 2.0771e-02, 1.7390e-01, + 4.0508e-01, 8.5314e-01, 6.3482e-01, 2.0897e-01, + 6.7366e-01, 9.3616e-01, 4.6320e-01, 2.8972e-01, + 6.4920e-01, 2.0384e-01, 5.9353e-01, 1.2870e-01, + 6.3852e-04, 2.4399e-01, 1.7368e-01, 5.8490e-02, + 6.6522e-01, 7.2997e-01, 7.5063e-01, 2.1039e-02, + 6.3538e-01, 1.0710e-01, 3.3675e-01, 7.9830e-01, + 6.4726e-01, 3.1141e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4524, 0.0586, 0.3134, ..., 0.6494, 0.7756, 0.1103]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.34853196144104 seconds + +[39.81, 40.27, 38.96, 39.22, 39.03, 38.89, 39.13, 39.07, 38.9, 39.57] +[66.12] +12.818487405776978 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 902550, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.34853196144104, 'TIME_S_1KI': 0.011465882179869304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5583872699738, 'W': 66.12} +[39.81, 40.27, 38.96, 39.22, 39.03, 38.89, 39.13, 39.07, 38.9, 39.57, 39.68, 39.49, 39.0, 38.83, 39.38, 38.83, 39.13, 39.36, 38.85, 39.24] +705.4900000000002 +35.27450000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 902550, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.34853196144104, 'TIME_S_1KI': 0.011465882179869304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5583872699738, 'W': 66.12, 'J_1KI': 0.9390708406957773, 'W_1KI': 0.07325909921887984, 'W_D': 30.845499999999994, 'J_D': 395.3926532748937, 'W_D_1KI': 0.034175945930973346, 'J_D_1KI': 3.786598629546657e-05} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..3d5f606 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 258935, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.24148416519165, "TIME_S_1KI": 0.03955233616618707, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 840.220828986168, "W": 66.18, "J_1KI": 3.244910224520316, "W_1KI": 0.25558537856991137, "W_D": 30.746000000000002, "J_D": 390.3510064673424, "W_D_1KI": 0.11874022438063608, "J_D_1KI": 0.00045857155031431084} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..b403cfa --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.012699604034423828} + +tensor(indices=tensor([[1535, 1540, 3289, ..., 518, 4582, 424], + [1789, 4912, 3642, ..., 3176, 3940, 610]]), + values=tensor([0.9554, 0.3420, 0.8995, ..., 0.3639, 0.3050, 0.2873]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6672, 0.6134, 0.3158, ..., 0.8387, 0.6834, 0.8093]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.012699604034423828 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '82679', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.3526804447174072} + +tensor(indices=tensor([[3241, 3119, 845, ..., 4027, 403, 718], + [1623, 3637, 3747, ..., 4878, 1315, 4958]]), + values=tensor([0.1631, 0.9487, 0.6293, ..., 0.9557, 0.9637, 0.3364]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4113, 0.1819, 0.8804, ..., 0.5658, 0.4005, 0.8604]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.3526804447174072 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '258935', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.24148416519165} + +tensor(indices=tensor([[4271, 1026, 1697, ..., 1708, 1234, 3092], + [4474, 4097, 3082, ..., 3772, 923, 3613]]), + values=tensor([0.1472, 0.5851, 0.7428, ..., 0.4348, 0.9587, 0.6151]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.0897, 0.8625, 0.0519, ..., 0.9021, 0.8765, 0.6189]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.24148416519165 seconds + +tensor(indices=tensor([[4271, 1026, 1697, ..., 1708, 1234, 3092], + [4474, 4097, 3082, ..., 3772, 923, 3613]]), + values=tensor([0.1472, 0.5851, 0.7428, ..., 0.4348, 0.9587, 0.6151]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.0897, 0.8625, 0.0519, ..., 0.9021, 0.8765, 0.6189]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.24148416519165 seconds + +[39.45, 38.84, 38.86, 38.9, 39.22, 39.4, 39.32, 38.86, 39.25, 39.18] +[66.18] +12.695993185043335 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 258935, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.24148416519165, 'TIME_S_1KI': 0.03955233616618707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.220828986168, 'W': 66.18} +[39.45, 38.84, 38.86, 38.9, 39.22, 39.4, 39.32, 38.86, 39.25, 39.18, 39.7, 38.74, 39.18, 38.93, 39.07, 39.04, 39.39, 43.94, 39.02, 39.11] +708.6800000000001 +35.434000000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 258935, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.24148416519165, 'TIME_S_1KI': 0.03955233616618707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.220828986168, 'W': 66.18, 'J_1KI': 3.244910224520316, 'W_1KI': 0.25558537856991137, 'W_D': 30.746000000000002, 'J_D': 390.3510064673424, 'W_D_1KI': 0.11874022438063608, 'J_D_1KI': 0.00045857155031431084} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json index a438e21..e67142b 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 65446, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.748796463012695, "TIME_S_1KI": 0.16423916607604278, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1866.1015530323982, "W": 143.13, "J_1KI": 28.51360744785622, "W_1KI": 2.1869938575314, "W_D": 106.99974999999999, "J_D": 1395.0422668139338, "W_D_1KI": 1.634931852214039, "J_D_1KI": 0.024981386978792274} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 66714, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.461231231689453, "TIME_S_1KI": 0.15680713540920127, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1791.6115726852415, "W": 132.88, "J_1KI": 26.855106464688696, "W_1KI": 1.9917858320592376, "W_D": 96.93974999999999, "J_D": 1307.0317425738572, "W_D_1KI": 1.4530645741523518, "J_D_1KI": 0.02178050445412285} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output index 7f0e014..3d6e8e6 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,54 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.060246944427490234} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.053905487060546875} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 25, ..., 999980, - 999989, 1000000]), - col_indices=tensor([ 4573, 4595, 4948, ..., 71788, 92544, 99741]), - values=tensor([0.3512, 0.1040, 0.2729, ..., 0.2513, 0.9554, 0.9408]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1257, 0.5794, 0.5612, ..., 0.8235, 0.1474, 0.3975]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 0.060246944427490234 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17428', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7960927486419678} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 17, ..., 999980, - 999989, 1000000]), - col_indices=tensor([11836, 34889, 39226, ..., 79566, 86668, 94364]), - values=tensor([0.7886, 0.3777, 0.4340, ..., 0.5250, 0.8836, 0.4934]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9435, 0.7532, 0.3829, ..., 0.0561, 0.6547, 0.0145]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 2.7960927486419678 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65446', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.748796463012695} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, +tensor(crow_indices=tensor([ 0, 8, 13, ..., 999979, 999990, 1000000]), - col_indices=tensor([ 6624, 6694, 37331, ..., 71444, 97628, 99166]), - values=tensor([0.8094, 0.0427, 0.0622, ..., 0.4502, 0.4633, 0.1157]), + col_indices=tensor([ 5399, 28611, 38632, ..., 74258, 77167, 84233]), + values=tensor([0.8812, 0.0922, 0.5645, ..., 0.4488, 0.4543, 0.9991]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2357, 0.1643, 0.3206, ..., 0.7759, 0.8620, 0.1771]) +tensor([0.4452, 0.7873, 0.7879, ..., 0.7551, 0.2377, 0.9069]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.748796463012695 seconds +Time: 0.053905487060546875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19478', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.0656049251556396} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, - 999990, 1000000]), - col_indices=tensor([ 6624, 6694, 37331, ..., 71444, 97628, 99166]), - values=tensor([0.8094, 0.0427, 0.0622, ..., 0.4502, 0.4633, 0.1157]), +tensor(crow_indices=tensor([ 0, 9, 19, ..., 999985, + 999991, 1000000]), + col_indices=tensor([ 7446, 17202, 36833, ..., 70909, 76716, 77433]), + values=tensor([0.5834, 0.0757, 0.4016, ..., 0.9038, 0.5261, 0.7581]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2357, 0.1643, 0.3206, ..., 0.7759, 0.8620, 0.1771]) +tensor([0.0103, 0.1480, 0.1219, ..., 0.8946, 0.0388, 0.8580]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +36,50 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.748796463012695 seconds +Time: 3.0656049251556396 seconds -[40.51, 40.31, 40.08, 39.71, 39.8, 39.59, 39.58, 41.48, 39.56, 40.06] -[143.13] -13.037808656692505 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 65446, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.748796463012695, 'TIME_S_1KI': 0.16423916607604278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.1015530323982, 'W': 143.13} -[40.51, 40.31, 40.08, 39.71, 39.8, 39.59, 39.58, 41.48, 39.56, 40.06, 41.48, 39.67, 40.17, 39.98, 41.44, 39.98, 40.5, 40.23, 39.59, 39.82] -722.605 -36.130250000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 65446, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.748796463012695, 'TIME_S_1KI': 0.16423916607604278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.1015530323982, 'W': 143.13, 'J_1KI': 28.51360744785622, 'W_1KI': 2.1869938575314, 'W_D': 106.99974999999999, 'J_D': 1395.0422668139338, 'W_D_1KI': 1.634931852214039, 'J_D_1KI': 0.024981386978792274} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '66714', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.461231231689453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 30, ..., 999988, + 999991, 1000000]), + col_indices=tensor([27709, 30156, 33688, ..., 70455, 73760, 83220]), + values=tensor([0.5285, 0.4310, 0.0900, ..., 0.7077, 0.0499, 0.7494]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8412, 0.9269, 0.8422, ..., 0.3451, 0.3063, 0.1702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.461231231689453 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 30, ..., 999988, + 999991, 1000000]), + col_indices=tensor([27709, 30156, 33688, ..., 70455, 73760, 83220]), + values=tensor([0.5285, 0.4310, 0.0900, ..., 0.7077, 0.0499, 0.7494]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8412, 0.9269, 0.8422, ..., 0.3451, 0.3063, 0.1702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.461231231689453 seconds + +[40.5, 39.66, 40.11, 39.57, 39.63, 39.63, 39.55, 39.48, 39.56, 40.05] +[132.88] +13.48292875289917 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66714, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.461231231689453, 'TIME_S_1KI': 0.15680713540920127, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1791.6115726852415, 'W': 132.88} +[40.5, 39.66, 40.11, 39.57, 39.63, 39.63, 39.55, 39.48, 39.56, 40.05, 40.81, 42.06, 40.15, 39.68, 39.89, 39.6, 39.82, 39.72, 39.56, 40.91] +718.8050000000001 +35.940250000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 66714, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.461231231689453, 'TIME_S_1KI': 0.15680713540920127, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1791.6115726852415, 'W': 132.88, 'J_1KI': 26.855106464688696, 'W_1KI': 1.9917858320592376, 'W_D': 96.93974999999999, 'J_D': 1307.0317425738572, 'W_D_1KI': 1.4530645741523518, 'J_D_1KI': 0.02178050445412285} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json index d0be3f6..694db45 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.009309530258179, "TIME_S_1KI": 2.558983492490556, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2204.305100774765, "W": 128.95, "J_1KI": 469.7006394150362, "W_1KI": 27.477093543575535, "W_D": 92.7465, "J_D": 1585.4329820008277, "W_D_1KI": 19.76273172810569, "J_D_1KI": 4.211108401471487} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3901, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.46561336517334, "TIME_S_1KI": 2.682802708324363, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1865.2596230363845, "W": 124.74, "J_1KI": 478.14909588218006, "W_1KI": 31.976416303511918, "W_D": 88.61325, "J_D": 1325.0498419995904, "W_D_1KI": 22.715521661112536, "J_D_1KI": 5.8229996567835265} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output index 561971c..e401268 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.27472805976867676} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.28267550468444824} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 224, ..., 9999788, - 9999890, 10000000]), - col_indices=tensor([ 311, 3365, 5161, ..., 98602, 99530, 99576]), - values=tensor([0.9917, 0.0583, 0.3712, ..., 0.9136, 0.4986, 0.7909]), +tensor(crow_indices=tensor([ 0, 103, 201, ..., 9999817, + 9999916, 10000000]), + col_indices=tensor([ 1103, 1843, 2136, ..., 95563, 97729, 98839]), + values=tensor([0.5689, 0.1057, 0.5632, ..., 0.4681, 0.0483, 0.5356]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4323, 0.4083, 0.9080, ..., 0.7530, 0.1922, 0.7136]) +tensor([0.5591, 0.4203, 0.6956, ..., 0.1866, 0.2601, 0.3966]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 0.27472805976867676 seconds +Time: 0.28267550468444824 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3821', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.548935651779175} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3714', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.995946168899536} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 86, 193, ..., 9999790, - 9999889, 10000000]), - col_indices=tensor([ 598, 3163, 6325, ..., 93333, 94869, 95502]), - values=tensor([0.3479, 0.2007, 0.7107, ..., 0.5121, 0.1193, 0.0296]), +tensor(crow_indices=tensor([ 0, 95, 175, ..., 9999784, + 9999900, 10000000]), + col_indices=tensor([ 1431, 2503, 3033, ..., 95829, 98067, 99882]), + values=tensor([0.4976, 0.5701, 0.5342, ..., 0.4648, 0.3219, 0.3156]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.9967, 0.6546, 0.0107, ..., 0.1473, 0.4856, 0.1261]) +tensor([0.4285, 0.0770, 0.0067, ..., 0.8194, 0.8320, 0.3047]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 8.548935651779175 seconds +Time: 9.995946168899536 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4693', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.009309530258179} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3901', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.46561336517334} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 80, 177, ..., 9999782, - 9999890, 10000000]), - col_indices=tensor([ 1894, 3295, 3747, ..., 98404, 98823, 99018]), - values=tensor([0.1540, 0.7163, 0.3077, ..., 0.3211, 0.5255, 0.5012]), +tensor(crow_indices=tensor([ 0, 122, 223, ..., 9999792, + 9999895, 10000000]), + col_indices=tensor([ 1544, 1798, 2744, ..., 97459, 97993, 98974]), + values=tensor([0.0071, 0.6500, 0.6831, ..., 0.9318, 0.2280, 0.2243]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8104, 0.7178, 0.6885, ..., 0.8661, 0.7147, 0.1559]) +tensor([0.4502, 0.2455, 0.1160, ..., 0.7937, 0.8261, 0.8290]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 12.009309530258179 seconds +Time: 10.46561336517334 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 80, 177, ..., 9999782, - 9999890, 10000000]), - col_indices=tensor([ 1894, 3295, 3747, ..., 98404, 98823, 99018]), - values=tensor([0.1540, 0.7163, 0.3077, ..., 0.3211, 0.5255, 0.5012]), +tensor(crow_indices=tensor([ 0, 122, 223, ..., 9999792, + 9999895, 10000000]), + col_indices=tensor([ 1544, 1798, 2744, ..., 97459, 97993, 98974]), + values=tensor([0.0071, 0.6500, 0.6831, ..., 0.9318, 0.2280, 0.2243]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8104, 0.7178, 0.6885, ..., 0.8661, 0.7147, 0.1559]) +tensor([0.4502, 0.2455, 0.1160, ..., 0.7937, 0.8261, 0.8290]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 12.009309530258179 seconds +Time: 10.46561336517334 seconds -[41.32, 39.94, 39.97, 39.81, 39.83, 40.24, 40.5, 40.22, 40.21, 41.33] -[128.95] -17.09426212310791 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.009309530258179, 'TIME_S_1KI': 2.558983492490556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2204.305100774765, 'W': 128.95} -[41.32, 39.94, 39.97, 39.81, 39.83, 40.24, 40.5, 40.22, 40.21, 41.33, 40.63, 40.93, 40.28, 39.66, 39.87, 41.7, 39.67, 40.03, 39.68, 39.78] -724.0699999999999 -36.2035 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.009309530258179, 'TIME_S_1KI': 2.558983492490556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2204.305100774765, 'W': 128.95, 'J_1KI': 469.7006394150362, 'W_1KI': 27.477093543575535, 'W_D': 92.7465, 'J_D': 1585.4329820008277, 'W_D_1KI': 19.76273172810569, 'J_D_1KI': 4.211108401471487} +[40.65, 40.02, 40.14, 40.39, 40.44, 40.48, 42.08, 40.13, 39.85, 39.7] +[124.74] +14.953179597854614 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.46561336517334, 'TIME_S_1KI': 2.682802708324363, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1865.2596230363845, 'W': 124.74} +[40.65, 40.02, 40.14, 40.39, 40.44, 40.48, 42.08, 40.13, 39.85, 39.7, 40.82, 39.8, 39.83, 39.68, 39.82, 39.74, 40.19, 39.72, 39.68, 39.92] +722.5350000000001 +36.12675 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.46561336517334, 'TIME_S_1KI': 2.682802708324363, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1865.2596230363845, 'W': 124.74, 'J_1KI': 478.14909588218006, 'W_1KI': 31.976416303511918, 'W_D': 88.61325, 'J_D': 1325.0498419995904, 'W_D_1KI': 22.715521661112536, 'J_D_1KI': 5.8229996567835265} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..10e69e4 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 494, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.353509664535522, "TIME_S_1KI": 20.958521588128587, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3731.4962283182144, "W": 99.42, "J_1KI": 7553.636089712984, "W_1KI": 201.25506072874495, "W_D": 63.24625, "J_D": 2373.799470230043, "W_D_1KI": 128.02884615384616, "J_D_1KI": 259.1677047648708} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..17582d7 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1213040351867676} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1022, 2009, ..., + 99997908, 99998970, 100000000]), + col_indices=tensor([ 163, 253, 363, ..., 99772, 99825, 99860]), + values=tensor([0.0148, 0.7993, 0.6700, ..., 0.9519, 0.4393, 0.7559]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.5071, 0.9341, 0.1013, ..., 0.3446, 0.4492, 0.8404]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 2.1213040351867676 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '494', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.353509664535522} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 964, 1956, ..., + 99998009, 99999004, 100000000]), + col_indices=tensor([ 38, 40, 408, ..., 99675, 99715, 99925]), + values=tensor([0.6358, 0.6526, 0.7974, ..., 0.8113, 0.8396, 0.0716]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6561, 0.8272, 0.0568, ..., 0.8206, 0.9827, 0.1445]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.353509664535522 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 964, 1956, ..., + 99998009, 99999004, 100000000]), + col_indices=tensor([ 38, 40, 408, ..., 99675, 99715, 99925]), + values=tensor([0.6358, 0.6526, 0.7974, ..., 0.8113, 0.8396, 0.0716]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6561, 0.8272, 0.0568, ..., 0.8206, 0.9827, 0.1445]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.353509664535522 seconds + +[41.04, 40.08, 40.58, 39.71, 40.86, 39.62, 39.7, 39.56, 39.69, 39.62] +[99.42] +37.53265166282654 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 494, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.353509664535522, 'TIME_S_1KI': 20.958521588128587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3731.4962283182144, 'W': 99.42} +[41.04, 40.08, 40.58, 39.71, 40.86, 39.62, 39.7, 39.56, 39.69, 39.62, 46.01, 39.86, 40.26, 39.93, 40.17, 40.1, 39.63, 41.02, 39.56, 39.62] +723.475 +36.17375 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 494, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.353509664535522, 'TIME_S_1KI': 20.958521588128587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3731.4962283182144, 'W': 99.42, 'J_1KI': 7553.636089712984, 'W_1KI': 201.25506072874495, 'W_D': 63.24625, 'J_D': 2373.799470230043, 'W_D_1KI': 128.02884615384616, 'J_D_1KI': 259.1677047648708} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json index 5f6b7ac..ef46be1 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 99857, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.432250738143921, "TIME_S_1KI": 0.10447190220158749, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1536.9044136524199, "W": 115.09999999999998, "J_1KI": 15.391053342804408, "W_1KI": 1.152648287050482, "W_D": 79.15799999999997, "J_D": 1056.9789711198803, "W_D_1KI": 0.7927135804200004, "J_D_1KI": 0.007938487841813797} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 99236, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.395461559295654, "TIME_S_1KI": 0.10475494336022868, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1532.423426902294, "W": 114.49, "J_1KI": 15.442212774621044, "W_1KI": 1.1537143778467491, "W_D": 78.631, "J_D": 1052.4586119377614, "W_D_1KI": 0.7923636583497925, "J_D_1KI": 0.007984639227193684} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output index 5aa8a41..a845885 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.043670654296875} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.09090495109558105} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99998, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([ 6609, 19255, 81333, ..., 81128, 51531, 76130]), - values=tensor([0.9876, 0.0139, 0.8085, ..., 0.3685, 0.4758, 0.0266]), + col_indices=tensor([59722, 26553, 65309, ..., 96537, 72323, 51178]), + values=tensor([0.8619, 0.8559, 0.4334, ..., 0.5838, 0.3974, 0.6330]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1735, 0.8240, 0.8190, ..., 0.4288, 0.7745, 0.1715]) +tensor([0.4984, 0.6054, 0.7345, ..., 0.7298, 0.5548, 0.3418]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.043670654296875 seconds +Time: 0.09090495109558105 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24043', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.5281074047088623} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11550', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2220795154571533} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 99999, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 100000, 100000, 100000]), - col_indices=tensor([69039, 75318, 84133, ..., 16483, 23976, 47642]), - values=tensor([0.3961, 0.2517, 0.3876, ..., 0.3761, 0.7912, 0.1675]), + col_indices=tensor([97989, 94542, 6731, ..., 193, 6668, 85074]), + values=tensor([0.2761, 0.7667, 0.4159, ..., 0.3284, 0.0255, 0.9429]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9918, 0.3750, 0.7737, ..., 0.5214, 0.0832, 0.2225]) +tensor([0.3205, 0.1159, 0.9286, ..., 0.1316, 0.0577, 0.6513]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 2.5281074047088623 seconds +Time: 1.2220795154571533 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '99857', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.432250738143921} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '99236', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.395461559295654} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 100000, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([18969, 38131, 43029, ..., 81495, 1519, 27704]), - values=tensor([0.3850, 0.3770, 0.8820, ..., 0.3865, 0.0804, 0.8829]), + col_indices=tensor([81931, 10489, 91860, ..., 16255, 66492, 51947]), + values=tensor([0.9027, 0.7980, 0.1818, ..., 0.8534, 0.1736, 0.7236]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4374, 0.1348, 0.8967, ..., 0.5157, 0.0353, 0.0014]) +tensor([0.8190, 0.0065, 0.8917, ..., 0.2823, 0.5210, 0.8907]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.432250738143921 seconds +Time: 10.395461559295654 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 100000, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([18969, 38131, 43029, ..., 81495, 1519, 27704]), - values=tensor([0.3850, 0.3770, 0.8820, ..., 0.3865, 0.0804, 0.8829]), + col_indices=tensor([81931, 10489, 91860, ..., 16255, 66492, 51947]), + values=tensor([0.9027, 0.7980, 0.1818, ..., 0.8534, 0.1736, 0.7236]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4374, 0.1348, 0.8967, ..., 0.5157, 0.0353, 0.0014]) +tensor([0.8190, 0.0065, 0.8917, ..., 0.2823, 0.5210, 0.8907]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.432250738143921 seconds +Time: 10.395461559295654 seconds -[40.36, 39.67, 39.74, 39.69, 39.75, 39.62, 40.16, 41.41, 40.17, 40.09] -[115.1] -13.35277509689331 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99857, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.432250738143921, 'TIME_S_1KI': 0.10447190220158749, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.9044136524199, 'W': 115.09999999999998} -[40.36, 39.67, 39.74, 39.69, 39.75, 39.62, 40.16, 41.41, 40.17, 40.09, 40.33, 39.61, 39.56, 41.58, 39.51, 39.51, 39.84, 39.34, 39.38, 39.82] -718.8400000000001 -35.94200000000001 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99857, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.432250738143921, 'TIME_S_1KI': 0.10447190220158749, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.9044136524199, 'W': 115.09999999999998, 'J_1KI': 15.391053342804408, 'W_1KI': 1.152648287050482, 'W_D': 79.15799999999997, 'J_D': 1056.9789711198803, 'W_D_1KI': 0.7927135804200004, 'J_D_1KI': 0.007938487841813797} +[40.35, 39.53, 39.89, 39.5, 41.28, 39.51, 39.64, 39.86, 39.48, 39.41] +[114.49] +13.384779691696167 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99236, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.395461559295654, 'TIME_S_1KI': 0.10475494336022868, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1532.423426902294, 'W': 114.49} +[40.35, 39.53, 39.89, 39.5, 41.28, 39.51, 39.64, 39.86, 39.48, 39.41, 40.47, 40.01, 39.54, 39.38, 39.48, 39.86, 39.66, 39.85, 39.97, 41.25] +717.1800000000001 +35.859 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99236, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.395461559295654, 'TIME_S_1KI': 0.10475494336022868, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1532.423426902294, 'W': 114.49, 'J_1KI': 15.442212774621044, 'W_1KI': 1.1537143778467491, 'W_D': 78.631, 'J_D': 1052.4586119377614, 'W_D_1KI': 0.7923636583497925, 'J_D_1KI': 0.007984639227193684} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json index 80ab5da..7a818be 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 81276, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.078009605407715, "TIME_S_1KI": 0.12399736214144047, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1722.4750410318375, "W": 132.75, "J_1KI": 21.192911081153568, "W_1KI": 1.6333234903292484, "W_D": 96.411, "J_D": 1250.9645286698342, "W_D_1KI": 1.1862173335301935, "J_D_1KI": 0.014594927574317062} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 83410, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.996037721633911, "TIME_S_1KI": 0.13183116798506067, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1795.560396194458, "W": 132.0, "J_1KI": 21.526919987944584, "W_1KI": 1.582544059465292, "W_D": 95.85775, "J_D": 1303.9271179417372, "W_D_1KI": 1.149235703153099, "J_D_1KI": 0.013778152537502687} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output index 0f8dc83..a884f81 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04085874557495117} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.053716182708740234} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 499987, 499993, +tensor(crow_indices=tensor([ 0, 5, 8, ..., 499985, 499992, 500000]), - col_indices=tensor([ 4658, 51132, 55767, ..., 77897, 84680, 91168]), - values=tensor([0.8716, 0.7460, 0.9968, ..., 0.7762, 0.8585, 0.9878]), + col_indices=tensor([ 3808, 29545, 30689, ..., 61411, 72054, 99531]), + values=tensor([0.0219, 0.7195, 0.4237, ..., 0.5900, 0.7914, 0.0313]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.7678, 0.5187, 0.4774, ..., 0.8664, 0.3724, 0.0254]) +tensor([0.7421, 0.2952, 0.1401, ..., 0.8551, 0.8773, 0.7735]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.04085874557495117 seconds +Time: 0.053716182708740234 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25698', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.3198819160461426} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19547', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.460651159286499} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 499992, 499998, +tensor(crow_indices=tensor([ 0, 2, 6, ..., 499989, 499996, 500000]), - col_indices=tensor([33478, 35089, 63624, ..., 93258, 3464, 77760]), - values=tensor([0.8303, 0.5286, 0.9064, ..., 0.8655, 0.5788, 0.5903]), + col_indices=tensor([55237, 65503, 31375, ..., 76224, 77752, 79533]), + values=tensor([0.6791, 0.3647, 0.0311, ..., 0.1415, 0.5862, 0.6146]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.0892, 0.6340, 0.1475, ..., 0.5230, 0.0009, 0.8265]) +tensor([0.0236, 0.3685, 0.8621, ..., 0.9429, 0.5386, 0.6335]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 3.3198819160461426 seconds +Time: 2.460651159286499 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '81276', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.078009605407715} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '83410', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.996037721633911} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 9, ..., 499997, 500000, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 499990, 499995, 500000]), - col_indices=tensor([38450, 44227, 69625, ..., 8507, 39094, 82179]), - values=tensor([0.2677, 0.9845, 0.1042, ..., 0.9974, 0.0756, 0.3422]), + col_indices=tensor([17836, 30465, 31708, ..., 69225, 71763, 99122]), + values=tensor([0.9507, 0.2665, 0.6401, ..., 0.4112, 0.5507, 0.3231]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.8400, 0.1962, 0.3075, ..., 0.6034, 0.5737, 0.0994]) +tensor([0.5095, 0.9305, 0.2807, ..., 0.2466, 0.1767, 0.8179]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.078009605407715 seconds +Time: 10.996037721633911 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 9, ..., 499997, 500000, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 499990, 499995, 500000]), - col_indices=tensor([38450, 44227, 69625, ..., 8507, 39094, 82179]), - values=tensor([0.2677, 0.9845, 0.1042, ..., 0.9974, 0.0756, 0.3422]), + col_indices=tensor([17836, 30465, 31708, ..., 69225, 71763, 99122]), + values=tensor([0.9507, 0.2665, 0.6401, ..., 0.4112, 0.5507, 0.3231]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.8400, 0.1962, 0.3075, ..., 0.6034, 0.5737, 0.0994]) +tensor([0.5095, 0.9305, 0.2807, ..., 0.2466, 0.1767, 0.8179]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.078009605407715 seconds +Time: 10.996037721633911 seconds -[41.19, 39.74, 39.62, 40.23, 39.69, 39.5, 44.76, 39.57, 39.56, 39.98] -[132.75] -12.975329875946045 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 81276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.078009605407715, 'TIME_S_1KI': 0.12399736214144047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1722.4750410318375, 'W': 132.75} -[41.19, 39.74, 39.62, 40.23, 39.69, 39.5, 44.76, 39.57, 39.56, 39.98, 40.29, 39.57, 39.62, 45.06, 39.69, 39.48, 40.04, 39.95, 40.05, 39.84] -726.78 -36.339 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 81276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.078009605407715, 'TIME_S_1KI': 0.12399736214144047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1722.4750410318375, 'W': 132.75, 'J_1KI': 21.192911081153568, 'W_1KI': 1.6333234903292484, 'W_D': 96.411, 'J_D': 1250.9645286698342, 'W_D_1KI': 1.1862173335301935, 'J_D_1KI': 0.014594927574317062} +[40.63, 39.65, 39.67, 39.68, 40.38, 40.41, 40.18, 39.64, 39.74, 41.83] +[132.0] +13.60273027420044 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83410, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.996037721633911, 'TIME_S_1KI': 0.13183116798506067, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1795.560396194458, 'W': 132.0} +[40.63, 39.65, 39.67, 39.68, 40.38, 40.41, 40.18, 39.64, 39.74, 41.83, 41.59, 40.04, 39.76, 39.63, 40.43, 39.58, 40.65, 39.49, 39.56, 44.66] +722.845 +36.142250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83410, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.996037721633911, 'TIME_S_1KI': 0.13183116798506067, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1795.560396194458, 'W': 132.0, 'J_1KI': 21.526919987944584, 'W_1KI': 1.582544059465292, 'W_D': 95.85775, 'J_D': 1303.9271179417372, 'W_D_1KI': 1.149235703153099, 'J_D_1KI': 0.013778152537502687} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json index 0d37837..da82dae 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 280711, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.449347734451294, "TIME_S_1KI": 0.03722457521953644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.5745413303375, "W": 98.5, "J_1KI": 4.593957989998032, "W_1KI": 0.35089469240606885, "W_D": 62.83475, "J_D": 822.6405473183394, "W_D_1KI": 0.22384142409809377, "J_D_1KI": 0.0007974088086968226} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 219306, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 35.10541844367981, "TIME_S_1KI": 0.16007504784948795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1057.4668346881867, "W": 97.9, "J_1KI": 4.821878264562696, "W_1KI": 0.4464082150055174, "W_D": 61.93775000000001, "J_D": 669.0205969377757, "W_D_1KI": 0.2824261534112154, "J_D_1KI": 0.001287817722320481} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output index d302c45..12fb597 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.019724130630493164} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6370301246643066} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), - col_indices=tensor([ 730, 4220, 7544, ..., 4458, 7562, 5619]), - values=tensor([0.0181, 0.7832, 0.5914, ..., 0.2469, 0.2734, 0.2796]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9997, 9998, 10000]), + col_indices=tensor([1761, 2053, 4137, ..., 881, 5922, 6796]), + values=tensor([0.5342, 0.2362, 0.4713, ..., 0.3700, 0.5232, 0.3591]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.6994, 0.7339, 0.7582, ..., 0.9456, 0.1186, 0.3856]) +tensor([0.7431, 0.7442, 0.3927, ..., 0.0109, 0.6031, 0.1086]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.019724130630493164 seconds +Time: 0.6370301246643066 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '53234', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.991217851638794} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1648', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0789031982421875} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 5, ..., 9999, 9999, 10000]), - col_indices=tensor([2031, 5960, 7493, ..., 3747, 8534, 6060]), - values=tensor([0.1847, 0.1000, 0.1920, ..., 0.9911, 0.4392, 0.2330]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9992, 9994, 10000]), + col_indices=tensor([5434, 5706, 2706, ..., 6154, 6878, 9484]), + values=tensor([0.7658, 0.7554, 0.9636, ..., 0.3967, 0.1504, 0.1698]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.7239, 0.0636, 0.4781, ..., 0.2276, 0.2279, 0.8613]) +tensor([0.1722, 0.0043, 0.4306, ..., 0.6854, 0.6622, 0.9446]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 1.991217851638794 seconds +Time: 0.0789031982421875 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '280711', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.449347734451294} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '219306', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 35.10541844367981} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9999, 10000]), - col_indices=tensor([8732, 42, 2512, ..., 1373, 9550, 9690]), - values=tensor([0.4706, 0.1126, 0.6045, ..., 0.0102, 0.1178, 0.6557]), +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 10000, 10000]), + col_indices=tensor([3315, 5391, 5786, ..., 9646, 2823, 7853]), + values=tensor([0.3181, 0.3008, 0.6377, ..., 0.9012, 0.7876, 0.7122]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4976, 0.6299, 0.3127, ..., 0.9623, 0.9434, 0.7070]) +tensor([0.3394, 0.6194, 0.9177, ..., 0.2625, 0.9091, 0.8518]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.449347734451294 seconds +Time: 35.10541844367981 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9999, 10000]), - col_indices=tensor([8732, 42, 2512, ..., 1373, 9550, 9690]), - values=tensor([0.4706, 0.1126, 0.6045, ..., 0.0102, 0.1178, 0.6557]), +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 10000, 10000]), + col_indices=tensor([3315, 5391, 5786, ..., 9646, 2823, 7853]), + values=tensor([0.3181, 0.3008, 0.6377, ..., 0.9012, 0.7876, 0.7122]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4976, 0.6299, 0.3127, ..., 0.9623, 0.9434, 0.7070]) +tensor([0.3394, 0.6194, 0.9177, ..., 0.2625, 0.9091, 0.8518]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.449347734451294 seconds +Time: 35.10541844367981 seconds -[40.09, 39.2, 39.41, 39.39, 39.38, 42.02, 40.51, 39.25, 39.25, 39.51] -[98.5] -13.092127323150635 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 280711, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.449347734451294, 'TIME_S_1KI': 0.03722457521953644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.5745413303375, 'W': 98.5} -[40.09, 39.2, 39.41, 39.39, 39.38, 42.02, 40.51, 39.25, 39.25, 39.51, 40.09, 39.17, 39.95, 40.04, 39.24, 39.23, 39.51, 39.16, 39.2, 39.1] -713.3050000000001 -35.66525 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 280711, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.449347734451294, 'TIME_S_1KI': 0.03722457521953644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.5745413303375, 'W': 98.5, 'J_1KI': 4.593957989998032, 'W_1KI': 0.35089469240606885, 'W_D': 62.83475, 'J_D': 822.6405473183394, 'W_D_1KI': 0.22384142409809377, 'J_D_1KI': 0.0007974088086968226} +[40.99, 41.44, 39.95, 40.1, 39.6, 39.61, 39.97, 41.1, 39.92, 39.69] +[97.9] +10.801499843597412 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 219306, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 35.10541844367981, 'TIME_S_1KI': 0.16007504784948795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1057.4668346881867, 'W': 97.9} +[40.99, 41.44, 39.95, 40.1, 39.6, 39.61, 39.97, 41.1, 39.92, 39.69, 40.29, 40.15, 39.6, 39.59, 39.38, 39.37, 39.69, 39.8, 39.74, 39.5] +719.2449999999999 +35.96225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 219306, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 35.10541844367981, 'TIME_S_1KI': 0.16007504784948795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1057.4668346881867, 'W': 97.9, 'J_1KI': 4.821878264562696, 'W_1KI': 0.4464082150055174, 'W_D': 61.93775000000001, 'J_D': 669.0205969377757, 'W_D_1KI': 0.2824261534112154, 'J_D_1KI': 0.001287817722320481} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json index 9665b3b..31e8c17 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 193546, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.657205820083618, "TIME_S_1KI": 0.05506290917964524, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1424.268603067398, "W": 108.67, "J_1KI": 7.358811874528009, "W_1KI": 0.5614685914459612, "W_D": 72.91375, "J_D": 955.6341663467883, "W_D_1KI": 0.3767256879501513, "J_D_1KI": 0.001946440060503195} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 190785, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.442018032073975, "TIME_S_1KI": 0.054731860639326856, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1412.9676432013512, "W": 108.65, "J_1KI": 7.406073030905738, "W_1KI": 0.5694892156091936, "W_D": 72.7955, "J_D": 946.6883209449053, "W_D_1KI": 0.38155777445815975, "J_D_1KI": 0.0019999359197953705} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output index 77b5a62..eb0c584 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.025844097137451172} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.023200273513793945} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 28, ..., 99969, 99983, +tensor(crow_indices=tensor([ 0, 6, 16, ..., 99984, 99994, 100000]), - col_indices=tensor([1079, 2122, 3254, ..., 9373, 9823, 9958]), - values=tensor([0.1589, 0.8596, 0.7837, ..., 0.1493, 0.1272, 0.2084]), + col_indices=tensor([ 877, 1361, 5328, ..., 2956, 4479, 8265]), + values=tensor([0.7502, 0.1905, 0.1800, ..., 0.1428, 0.2404, 0.5567]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0719, 0.4122, 0.7875, ..., 0.0407, 0.8322, 0.6511]) +tensor([0.3837, 0.7818, 0.3585, ..., 0.1122, 0.3287, 0.9396]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.025844097137451172 seconds +Time: 0.023200273513793945 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '40628', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.4707634449005127} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45258', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.860283851623535} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 18, ..., 99974, 99987, +tensor(crow_indices=tensor([ 0, 9, 24, ..., 99972, 99984, 100000]), - col_indices=tensor([ 792, 1032, 1238, ..., 8561, 8731, 9370]), - values=tensor([0.4488, 0.9659, 0.1268, ..., 0.7863, 0.6709, 0.3638]), + col_indices=tensor([ 507, 571, 1974, ..., 8031, 8511, 8620]), + values=tensor([0.4538, 0.2010, 0.5381, ..., 0.8680, 0.9127, 0.3188]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8213, 0.7389, 0.9585, ..., 0.8858, 0.0787, 0.3979]) +tensor([0.4786, 0.3866, 0.1367, ..., 0.0637, 0.2113, 0.9673]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.4707634449005127 seconds +Time: 2.860283851623535 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '172656', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.366683721542358} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '166140', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.143608093261719} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 24, ..., 99973, 99986, +tensor(crow_indices=tensor([ 0, 8, 20, ..., 99973, 99987, 100000]), - col_indices=tensor([ 684, 3301, 3344, ..., 8499, 8709, 9229]), - values=tensor([0.0104, 0.6771, 0.5927, ..., 0.6883, 0.2524, 0.4550]), + col_indices=tensor([ 842, 2941, 3621, ..., 6856, 7349, 9824]), + values=tensor([0.3470, 0.6068, 0.6754, ..., 0.9311, 0.7786, 0.1425]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4786, 0.6837, 0.1379, ..., 0.3005, 0.2266, 0.1673]) +tensor([0.7247, 0.2486, 0.0596, ..., 0.5982, 0.5318, 0.1045]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,19 +56,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 9.366683721542358 seconds +Time: 9.143608093261719 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '193546', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.657205820083618} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '190785', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.442018032073975} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 24, ..., 99982, 99990, +tensor(crow_indices=tensor([ 0, 13, 23, ..., 99982, 99991, 100000]), - col_indices=tensor([ 667, 823, 2535, ..., 7218, 8112, 8309]), - values=tensor([0.9044, 0.9079, 0.6825, ..., 0.1587, 0.6143, 0.0618]), + col_indices=tensor([1528, 2535, 3076, ..., 7834, 8468, 8792]), + values=tensor([0.1851, 0.9405, 0.0866, ..., 0.2777, 0.6232, 0.3496]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5914, 0.6686, 0.5823, ..., 0.5362, 0.3609, 0.2297]) +tensor([0.1487, 0.9958, 0.2037, ..., 0.3870, 0.3266, 0.1189]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -76,16 +76,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.657205820083618 seconds +Time: 10.442018032073975 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 24, ..., 99982, 99990, +tensor(crow_indices=tensor([ 0, 13, 23, ..., 99982, 99991, 100000]), - col_indices=tensor([ 667, 823, 2535, ..., 7218, 8112, 8309]), - values=tensor([0.9044, 0.9079, 0.6825, ..., 0.1587, 0.6143, 0.0618]), + col_indices=tensor([1528, 2535, 3076, ..., 7834, 8468, 8792]), + values=tensor([0.1851, 0.9405, 0.0866, ..., 0.2777, 0.6232, 0.3496]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5914, 0.6686, 0.5823, ..., 0.5362, 0.3609, 0.2297]) +tensor([0.1487, 0.9958, 0.2037, ..., 0.3870, 0.3266, 0.1189]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -93,13 +93,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.657205820083618 seconds +Time: 10.442018032073975 seconds -[40.9, 39.63, 39.53, 39.38, 39.44, 39.45, 39.92, 39.64, 39.85, 39.99] -[108.67] -13.106364250183105 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 193546, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.657205820083618, 'TIME_S_1KI': 0.05506290917964524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.268603067398, 'W': 108.67} -[40.9, 39.63, 39.53, 39.38, 39.44, 39.45, 39.92, 39.64, 39.85, 39.99, 39.96, 39.48, 39.45, 39.41, 39.63, 39.38, 40.94, 39.79, 39.91, 39.74] -715.125 -35.75625 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 193546, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.657205820083618, 'TIME_S_1KI': 0.05506290917964524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.268603067398, 'W': 108.67, 'J_1KI': 7.358811874528009, 'W_1KI': 0.5614685914459612, 'W_D': 72.91375, 'J_D': 955.6341663467883, 'W_D_1KI': 0.3767256879501513, 'J_D_1KI': 0.001946440060503195} +[41.13, 39.88, 39.62, 39.91, 39.95, 40.5, 39.94, 39.62, 40.31, 39.46] +[108.65] +13.004764318466187 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 190785, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.442018032073975, 'TIME_S_1KI': 0.054731860639326856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1412.9676432013512, 'W': 108.65} +[41.13, 39.88, 39.62, 39.91, 39.95, 40.5, 39.94, 39.62, 40.31, 39.46, 40.18, 39.45, 39.6, 39.53, 40.04, 39.99, 39.86, 39.34, 39.48, 39.37] +717.0899999999999 +35.854499999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 190785, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.442018032073975, 'TIME_S_1KI': 0.054731860639326856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1412.9676432013512, 'W': 108.65, 'J_1KI': 7.406073030905738, 'W_1KI': 0.5694892156091936, 'W_D': 72.7955, 'J_D': 946.6883209449053, 'W_D_1KI': 0.38155777445815975, 'J_D_1KI': 0.0019999359197953705} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json index 4c6ec73..88ae591 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102691, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.217256307601929, "TIME_S_1KI": 0.09949514862648069, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1713.9247414016722, "W": 132.92, "J_1KI": 16.690116382172462, "W_1KI": 1.294368542520766, "W_D": 96.75025, "J_D": 1247.5372194688318, "W_D_1KI": 0.9421492633239523, "J_D_1KI": 0.009174604038561825} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 103627, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.609182357788086, "TIME_S_1KI": 0.10237855344445063, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1720.314517021179, "W": 134.76, "J_1KI": 16.6010259586901, "W_1KI": 1.3004332847616933, "W_D": 98.40899999999999, "J_D": 1256.266186594963, "W_D_1KI": 0.9496463276945197, "J_D_1KI": 0.009164082022007004} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output index 7818ee2..9074ecb 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.027860403060913086} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.0329434871673584} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 86, 184, ..., 999787, - 999901, 1000000]), - col_indices=tensor([ 81, 93, 211, ..., 9891, 9936, 9983]), - values=tensor([0.0273, 0.9948, 0.2764, ..., 0.0318, 0.5538, 0.8532]), +tensor(crow_indices=tensor([ 0, 121, 243, ..., 999828, + 999916, 1000000]), + col_indices=tensor([ 22, 79, 111, ..., 9424, 9628, 9890]), + values=tensor([0.5175, 0.7264, 0.7156, ..., 0.6361, 0.8828, 0.5482]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8459, 0.7440, 0.9932, ..., 0.5464, 0.7654, 0.2266]) +tensor([0.5630, 0.9387, 0.8377, ..., 0.1993, 0.7981, 0.3313]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.027860403060913086 seconds +Time: 0.0329434871673584 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37687', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.853411912918091} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '31872', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.229402542114258} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 90, 178, ..., 999807, - 999899, 1000000]), - col_indices=tensor([ 9, 87, 435, ..., 9776, 9821, 9947]), - values=tensor([0.6051, 0.3509, 0.6551, ..., 0.3060, 0.1178, 0.2325]), +tensor(crow_indices=tensor([ 0, 102, 190, ..., 999801, + 999892, 1000000]), + col_indices=tensor([ 5, 53, 116, ..., 9752, 9921, 9934]), + values=tensor([0.1910, 0.3732, 0.9811, ..., 0.5962, 0.0722, 0.9092]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6802, 0.0969, 0.8232, ..., 0.8757, 0.6573, 0.4893]) +tensor([0.7610, 0.7524, 0.2994, ..., 0.6189, 0.3748, 0.4768]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 3.853411912918091 seconds +Time: 3.229402542114258 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102691', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.217256307601929} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '103627', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.609182357788086} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 98, 191, ..., 999800, - 999904, 1000000]), - col_indices=tensor([ 18, 19, 89, ..., 9675, 9719, 9959]), - values=tensor([0.5811, 0.2000, 0.4195, ..., 0.8918, 0.7545, 0.5786]), +tensor(crow_indices=tensor([ 0, 111, 198, ..., 999814, + 999913, 1000000]), + col_indices=tensor([ 10, 117, 189, ..., 9629, 9777, 9974]), + values=tensor([0.2221, 0.1933, 0.3520, ..., 0.0590, 0.4848, 0.5886]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3032, 0.6522, 0.8844, ..., 0.7793, 0.6874, 0.5546]) +tensor([0.5413, 0.7704, 0.2091, ..., 0.3119, 0.8409, 0.8046]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.217256307601929 seconds +Time: 10.609182357788086 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 98, 191, ..., 999800, - 999904, 1000000]), - col_indices=tensor([ 18, 19, 89, ..., 9675, 9719, 9959]), - values=tensor([0.5811, 0.2000, 0.4195, ..., 0.8918, 0.7545, 0.5786]), +tensor(crow_indices=tensor([ 0, 111, 198, ..., 999814, + 999913, 1000000]), + col_indices=tensor([ 10, 117, 189, ..., 9629, 9777, 9974]), + values=tensor([0.2221, 0.1933, 0.3520, ..., 0.0590, 0.4848, 0.5886]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.3032, 0.6522, 0.8844, ..., 0.7793, 0.6874, 0.5546]) +tensor([0.5413, 0.7704, 0.2091, ..., 0.3119, 0.8409, 0.8046]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.217256307601929 seconds +Time: 10.609182357788086 seconds -[40.93, 39.91, 40.43, 39.4, 39.51, 40.07, 39.51, 39.35, 39.39, 44.7] -[132.92] -12.894408226013184 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102691, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.217256307601929, 'TIME_S_1KI': 0.09949514862648069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.9247414016722, 'W': 132.92} -[40.93, 39.91, 40.43, 39.4, 39.51, 40.07, 39.51, 39.35, 39.39, 44.7, 40.04, 39.9, 39.44, 39.42, 39.81, 39.87, 45.14, 39.82, 39.78, 39.62] -723.395 -36.16975 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102691, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.217256307601929, 'TIME_S_1KI': 0.09949514862648069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.9247414016722, 'W': 132.92, 'J_1KI': 16.690116382172462, 'W_1KI': 1.294368542520766, 'W_D': 96.75025, 'J_D': 1247.5372194688318, 'W_D_1KI': 0.9421492633239523, 'J_D_1KI': 0.009174604038561825} +[41.07, 40.24, 40.08, 40.72, 40.0, 39.5, 39.52, 44.93, 40.06, 39.45] +[134.76] +12.765765190124512 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 103627, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.609182357788086, 'TIME_S_1KI': 0.10237855344445063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.314517021179, 'W': 134.76} +[41.07, 40.24, 40.08, 40.72, 40.0, 39.5, 39.52, 44.93, 40.06, 39.45, 40.32, 39.39, 39.52, 40.09, 39.8, 44.62, 39.58, 39.55, 39.35, 39.3] +727.02 +36.351 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 103627, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.609182357788086, 'TIME_S_1KI': 0.10237855344445063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.314517021179, 'W': 134.76, 'J_1KI': 16.6010259586901, 'W_1KI': 1.3004332847616933, 'W_D': 98.40899999999999, 'J_D': 1256.266186594963, 'W_D_1KI': 0.9496463276945197, 'J_D_1KI': 0.009164082022007004} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json index 52cad91..515b811 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27775, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.346900463104248, "TIME_S_1KI": 0.3725256692386768, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2138.1740951538086, "W": 151.25, "J_1KI": 76.98196562209931, "W_1KI": 5.445544554455445, "W_D": 115.164, "J_D": 1628.037563598633, "W_D_1KI": 4.146318631863187, "J_D_1KI": 0.1492823989869734} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27766, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.416308164596558, "TIME_S_1KI": 0.37514615589557576, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2173.3968332290647, "W": 149.04, "J_1KI": 78.27547479756049, "W_1KI": 5.367715911546496, "W_D": 113.1685, "J_D": 1650.2956221234797, "W_D_1KI": 4.0757941367139665, "J_D_1KI": 0.14679082823287354} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output index 19caf4c..37becdc 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07912850379943848} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07421493530273438} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 485, 973, ..., 4998984, - 4999512, 5000000]), - col_indices=tensor([ 23, 33, 35, ..., 9878, 9920, 9946]), - values=tensor([0.8956, 0.5440, 0.5650, ..., 0.6571, 0.0981, 0.4530]), +tensor(crow_indices=tensor([ 0, 508, 1029, ..., 4998978, + 4999492, 5000000]), + col_indices=tensor([ 4, 25, 37, ..., 9913, 9926, 9946]), + values=tensor([0.1924, 0.7374, 0.0518, ..., 0.5778, 0.6928, 0.0428]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3320, 0.0557, 0.6993, ..., 0.8374, 0.3528, 0.6849]) +tensor([0.0706, 0.1319, 0.8441, ..., 0.8315, 0.3967, 0.9583]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.07912850379943848 seconds +Time: 0.07421493530273438 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13269', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.016182899475098} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14148', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.350146532058716} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 490, 975, ..., 4999017, - 4999511, 5000000]), - col_indices=tensor([ 5, 7, 17, ..., 9925, 9927, 9956]), - values=tensor([0.3061, 0.0982, 0.7519, ..., 0.4711, 0.1343, 0.2753]), +tensor(crow_indices=tensor([ 0, 491, 1021, ..., 4998988, + 4999483, 5000000]), + col_indices=tensor([ 2, 82, 84, ..., 9944, 9947, 9986]), + values=tensor([0.2655, 0.6510, 0.4554, ..., 0.8118, 0.7172, 0.4313]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.4300, 0.5593, 0.7816, ..., 0.7590, 0.1985, 0.5681]) +tensor([0.3392, 0.3098, 0.7947, ..., 0.5067, 0.5991, 0.2806]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 5.016182899475098 seconds +Time: 5.350146532058716 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27775', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.346900463104248} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27766', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.416308164596558} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 516, 985, ..., 4998986, - 4999503, 5000000]), - col_indices=tensor([ 0, 38, 62, ..., 9969, 9984, 9993]), - values=tensor([0.4538, 0.1922, 0.3497, ..., 0.8541, 0.7038, 0.0561]), +tensor(crow_indices=tensor([ 0, 489, 982, ..., 4999022, + 4999480, 5000000]), + col_indices=tensor([ 17, 23, 33, ..., 9974, 9981, 9997]), + values=tensor([0.3994, 0.6610, 0.6708, ..., 0.9264, 0.3736, 0.5616]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3516, 0.9610, 0.6827, ..., 0.5287, 0.4040, 0.0575]) +tensor([0.2036, 0.5321, 0.8533, ..., 0.2446, 0.1152, 0.9621]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.346900463104248 seconds +Time: 10.416308164596558 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 516, 985, ..., 4998986, - 4999503, 5000000]), - col_indices=tensor([ 0, 38, 62, ..., 9969, 9984, 9993]), - values=tensor([0.4538, 0.1922, 0.3497, ..., 0.8541, 0.7038, 0.0561]), +tensor(crow_indices=tensor([ 0, 489, 982, ..., 4999022, + 4999480, 5000000]), + col_indices=tensor([ 17, 23, 33, ..., 9974, 9981, 9997]), + values=tensor([0.3994, 0.6610, 0.6708, ..., 0.9264, 0.3736, 0.5616]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3516, 0.9610, 0.6827, ..., 0.5287, 0.4040, 0.0575]) +tensor([0.2036, 0.5321, 0.8533, ..., 0.2446, 0.1152, 0.9621]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.346900463104248 seconds +Time: 10.416308164596558 seconds -[41.25, 39.91, 39.68, 39.83, 39.58, 41.43, 40.59, 39.78, 40.26, 39.69] -[151.25] -14.136688232421875 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.346900463104248, 'TIME_S_1KI': 0.3725256692386768, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2138.1740951538086, 'W': 151.25} -[41.25, 39.91, 39.68, 39.83, 39.58, 41.43, 40.59, 39.78, 40.26, 39.69, 40.82, 40.72, 40.11, 39.71, 39.6, 39.85, 39.65, 39.99, 39.77, 40.76] -721.72 -36.086 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.346900463104248, 'TIME_S_1KI': 0.3725256692386768, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2138.1740951538086, 'W': 151.25, 'J_1KI': 76.98196562209931, 'W_1KI': 5.445544554455445, 'W_D': 115.164, 'J_D': 1628.037563598633, 'W_D_1KI': 4.146318631863187, 'J_D_1KI': 0.1492823989869734} +[40.88, 39.74, 39.62, 39.95, 41.08, 39.43, 39.57, 39.93, 39.45, 39.37] +[149.04] +14.582641124725342 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27766, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.416308164596558, 'TIME_S_1KI': 0.37514615589557576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2173.3968332290647, 'W': 149.04} +[40.88, 39.74, 39.62, 39.95, 41.08, 39.43, 39.57, 39.93, 39.45, 39.37, 40.17, 39.68, 39.59, 39.61, 39.55, 39.52, 40.72, 39.82, 39.91, 40.1] +717.4300000000001 +35.871500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27766, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.416308164596558, 'TIME_S_1KI': 0.37514615589557576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2173.3968332290647, 'W': 149.04, 'J_1KI': 78.27547479756049, 'W_1KI': 5.367715911546496, 'W_D': 113.1685, 'J_D': 1650.2956221234797, 'W_D_1KI': 4.0757941367139665, 'J_D_1KI': 0.14679082823287354} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json index 6336d4d..4016a02 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4427, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.78234076499939, "TIME_S_1KI": 2.4355863485428935, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1855.6078462219239, "W": 121.94, "J_1KI": 419.15695645401485, "W_1KI": 27.544612604472555, "W_D": 85.48649999999999, "J_D": 1300.8809262428283, "W_D_1KI": 19.310255251863563, "J_D_1KI": 4.361927999065633} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4516, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.659436702728271, "TIME_S_1KI": 2.360371280497846, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1934.6844965219498, "W": 122.57, "J_1KI": 428.4066644202723, "W_1KI": 27.141275465013283, "W_D": 86.43999999999998, "J_D": 1364.3968987464903, "W_D_1KI": 19.140832595217002, "J_D_1KI": 4.238448316035651} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output index 68c803b..e9b85bc 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.23713254928588867} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.25728321075439453} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 967, 1927, ..., 9997983, - 9998974, 10000000]), - col_indices=tensor([ 2, 7, 17, ..., 9977, 9981, 9986]), - values=tensor([0.0113, 0.4578, 0.3712, ..., 0.8300, 0.4518, 0.5288]), +tensor(crow_indices=tensor([ 0, 1018, 2040, ..., 9998067, + 9999061, 10000000]), + col_indices=tensor([ 3, 36, 63, ..., 9974, 9977, 9997]), + values=tensor([0.8836, 0.0615, 0.3071, ..., 0.0562, 0.8753, 0.9316]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6464, 0.5946, 0.9135, ..., 0.7384, 0.8851, 0.3138]) +tensor([0.4941, 0.9379, 0.9279, ..., 0.6862, 0.5933, 0.8774]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 0.23713254928588867 seconds +Time: 0.25728321075439453 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4427', '-ss', '10000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.78234076499939} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4081', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.48789381980896} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1000, 1968, ..., 9997976, - 9998957, 10000000]), - col_indices=tensor([ 18, 35, 37, ..., 9972, 9974, 9993]), - values=tensor([0.5495, 0.5155, 0.6909, ..., 0.5748, 0.2988, 0.6189]), +tensor(crow_indices=tensor([ 0, 1009, 2010, ..., 9997989, + 9998993, 10000000]), + col_indices=tensor([ 21, 25, 26, ..., 9986, 9990, 9997]), + values=tensor([0.5236, 0.7266, 0.2242, ..., 0.1473, 0.8152, 0.7468]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2327, 0.3005, 0.5005, ..., 0.5867, 0.2890, 0.0524]) +tensor([0.7137, 0.8450, 0.7472, ..., 0.9518, 0.9096, 0.7253]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.78234076499939 seconds +Time: 9.48789381980896 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4516', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.659436702728271} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1000, 1968, ..., 9997976, - 9998957, 10000000]), - col_indices=tensor([ 18, 35, 37, ..., 9972, 9974, 9993]), - values=tensor([0.5495, 0.5155, 0.6909, ..., 0.5748, 0.2988, 0.6189]), +tensor(crow_indices=tensor([ 0, 991, 1952, ..., 9997980, + 9999013, 10000000]), + col_indices=tensor([ 15, 17, 23, ..., 9988, 9992, 9997]), + values=tensor([0.8039, 0.3382, 0.4278, ..., 0.1060, 0.4366, 0.4507]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2327, 0.3005, 0.5005, ..., 0.5867, 0.2890, 0.0524]) +tensor([0.1772, 0.5080, 0.9123, ..., 0.8797, 0.3828, 0.1213]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +56,30 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.78234076499939 seconds +Time: 10.659436702728271 seconds -[40.15, 39.73, 39.55, 39.93, 39.94, 39.96, 44.96, 39.82, 40.15, 39.43] -[121.94] -15.217384338378906 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.78234076499939, 'TIME_S_1KI': 2.4355863485428935, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1855.6078462219239, 'W': 121.94} -[40.15, 39.73, 39.55, 39.93, 39.94, 39.96, 44.96, 39.82, 40.15, 39.43, 40.99, 45.07, 39.68, 40.13, 40.01, 39.56, 39.72, 40.75, 40.01, 39.63] -729.07 -36.453500000000005 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.78234076499939, 'TIME_S_1KI': 2.4355863485428935, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1855.6078462219239, 'W': 121.94, 'J_1KI': 419.15695645401485, 'W_1KI': 27.544612604472555, 'W_D': 85.48649999999999, 'J_D': 1300.8809262428283, 'W_D_1KI': 19.310255251863563, 'J_D_1KI': 4.361927999065633} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 991, 1952, ..., 9997980, + 9999013, 10000000]), + col_indices=tensor([ 15, 17, 23, ..., 9988, 9992, 9997]), + values=tensor([0.8039, 0.3382, 0.4278, ..., 0.1060, 0.4366, 0.4507]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1772, 0.5080, 0.9123, ..., 0.8797, 0.3828, 0.1213]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.659436702728271 seconds + +[41.13, 40.13, 39.96, 40.14, 39.81, 40.27, 39.7, 39.78, 39.58, 39.57] +[122.57] +15.78432321548462 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.659436702728271, 'TIME_S_1KI': 2.360371280497846, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1934.6844965219498, 'W': 122.57} +[41.13, 40.13, 39.96, 40.14, 39.81, 40.27, 39.7, 39.78, 39.58, 39.57, 41.84, 40.09, 40.2, 39.67, 39.68, 42.84, 40.03, 39.84, 39.63, 39.96] +722.6000000000001 +36.13000000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.659436702728271, 'TIME_S_1KI': 2.360371280497846, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1934.6844965219498, 'W': 122.57, 'J_1KI': 428.4066644202723, 'W_1KI': 27.141275465013283, 'W_D': 86.43999999999998, 'J_D': 1364.3968987464903, 'W_D_1KI': 19.140832595217002, 'J_D_1KI': 4.238448316035651} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json index 3673122..278787d 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2210, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.475984573364258, "TIME_S_1KI": 4.740264512834506, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2065.6901198005676, "W": 119.44, "J_1KI": 934.701411674465, "W_1KI": 54.04524886877828, "W_D": 83.048, "J_D": 1436.2979995746614, "W_D_1KI": 37.57828054298643, "J_D_1KI": 17.00374685203006} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2205, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.094979763031006, "TIME_S_1KI": 4.578222114753291, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2048.2452651548383, "W": 120.01999999999998, "J_1KI": 928.9094173037815, "W_1KI": 54.43083900226756, "W_D": 83.94674999999998, "J_D": 1432.6240061042902, "W_D_1KI": 38.07108843537414, "J_D_1KI": 17.26579974393385} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output index 5e9953a..20a5467 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.5103092193603516} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.4761507511138916} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2001, 3993, ..., 19996027, - 19997998, 20000000]), - col_indices=tensor([ 4, 8, 12, ..., 9988, 9991, 9998]), - values=tensor([0.1397, 0.5991, 0.8904, ..., 0.1163, 0.3047, 0.7503]), +tensor(crow_indices=tensor([ 0, 2041, 4060, ..., 19995939, + 19998009, 20000000]), + col_indices=tensor([ 5, 6, 7, ..., 9974, 9989, 9994]), + values=tensor([0.7672, 0.1156, 0.6832, ..., 0.8423, 0.9155, 0.0866]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7325, 0.0863, 0.4494, ..., 0.5445, 0.3494, 0.7015]) +tensor([0.0377, 0.2575, 0.6177, ..., 0.7817, 0.7902, 0.6305]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 0.5103092193603516 seconds +Time: 0.4761507511138916 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2057', '-ss', '10000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.769307136535645} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2205', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.094979763031006} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1965, 3996, ..., 19995929, - 19997992, 20000000]), - col_indices=tensor([ 4, 9, 15, ..., 9975, 9986, 9992]), - values=tensor([0.0708, 0.7889, 0.9973, ..., 0.4384, 0.2830, 0.3299]), +tensor(crow_indices=tensor([ 0, 2010, 4012, ..., 19996040, + 19998053, 20000000]), + col_indices=tensor([ 0, 4, 5, ..., 9985, 9988, 9991]), + values=tensor([0.1815, 0.5931, 0.3008, ..., 0.6351, 0.8074, 0.0454]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.8359, 0.1884, 0.2769, ..., 0.8252, 0.8191, 0.5472]) +tensor([0.7445, 0.8255, 0.9105, ..., 0.5241, 0.5119, 0.2642]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 9.769307136535645 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2210', '-ss', '10000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.475984573364258} +Time: 10.094979763031006 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2066, 4070, ..., 19995990, - 19998002, 20000000]), - col_indices=tensor([ 1, 2, 8, ..., 9986, 9990, 9993]), - values=tensor([0.6258, 0.8376, 0.0180, ..., 0.7990, 0.4511, 0.0511]), +tensor(crow_indices=tensor([ 0, 2010, 4012, ..., 19996040, + 19998053, 20000000]), + col_indices=tensor([ 0, 4, 5, ..., 9985, 9988, 9991]), + values=tensor([0.1815, 0.5931, 0.3008, ..., 0.6351, 0.8074, 0.0454]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7373, 0.4078, 0.5568, ..., 0.6016, 0.2858, 0.4434]) +tensor([0.7445, 0.8255, 0.9105, ..., 0.5241, 0.5119, 0.2642]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.475984573364258 seconds +Time: 10.094979763031006 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2066, 4070, ..., 19995990, - 19998002, 20000000]), - col_indices=tensor([ 1, 2, 8, ..., 9986, 9990, 9993]), - values=tensor([0.6258, 0.8376, 0.0180, ..., 0.7990, 0.4511, 0.0511]), - size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7373, 0.4078, 0.5568, ..., 0.6016, 0.2858, 0.4434]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 20000000 -Density: 0.2 -Time: 10.475984573364258 seconds - -[40.41, 45.21, 40.37, 40.41, 40.26, 39.65, 40.47, 41.87, 39.92, 39.65] -[119.44] -17.294793367385864 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2210, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.475984573364258, 'TIME_S_1KI': 4.740264512834506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.6901198005676, 'W': 119.44} -[40.41, 45.21, 40.37, 40.41, 40.26, 39.65, 40.47, 41.87, 39.92, 39.65, 40.39, 39.85, 39.69, 40.06, 40.15, 39.58, 40.59, 39.58, 39.95, 40.01] -727.8399999999999 -36.391999999999996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2210, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.475984573364258, 'TIME_S_1KI': 4.740264512834506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.6901198005676, 'W': 119.44, 'J_1KI': 934.701411674465, 'W_1KI': 54.04524886877828, 'W_D': 83.048, 'J_D': 1436.2979995746614, 'W_D_1KI': 37.57828054298643, 'J_D_1KI': 17.00374685203006} +[46.59, 39.84, 39.85, 39.77, 40.13, 40.49, 39.59, 39.67, 39.54, 39.65] +[120.02] +17.065866231918335 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.094979763031006, 'TIME_S_1KI': 4.578222114753291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.2452651548383, 'W': 120.01999999999998} +[46.59, 39.84, 39.85, 39.77, 40.13, 40.49, 39.59, 39.67, 39.54, 39.65, 40.65, 39.88, 39.99, 39.88, 39.9, 39.95, 39.9, 39.56, 40.32, 39.52] +721.465 +36.07325 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.094979763031006, 'TIME_S_1KI': 4.578222114753291, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.2452651548383, 'W': 120.01999999999998, 'J_1KI': 928.9094173037815, 'W_1KI': 54.43083900226756, 'W_D': 83.94674999999998, 'J_D': 1432.6240061042902, 'W_D_1KI': 38.07108843537414, 'J_D_1KI': 17.26579974393385} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json index 21b0219..49ab440 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1434, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.430570602416992, "TIME_S_1KI": 7.273759136971403, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2171.006755914688, "W": 115.95999999999998, "J_1KI": 1513.9517126322787, "W_1KI": 80.8647140864714, "W_D": 79.27299999999998, "J_D": 1484.1515915973182, "W_D_1KI": 55.281032078103195, "J_D_1KI": 38.55023157468842} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1481, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.553231000900269, "TIME_S_1KI": 7.125746793315509, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2207.1686755657197, "W": 116.60000000000001, "J_1KI": 1490.3232110504523, "W_1KI": 78.73058744091831, "W_D": 80.35075, "J_D": 1520.9919250275493, "W_D_1KI": 54.254388926401084, "J_D_1KI": 36.6336184513174} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output index 29bb118..88bfec9 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.7320935726165771} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.7448434829711914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2976, 5887, ..., 29993981, - 29996974, 30000000]), - col_indices=tensor([ 2, 12, 13, ..., 9995, 9997, 9999]), - values=tensor([0.2872, 0.6919, 0.0045, ..., 0.7234, 0.8152, 0.1470]), +tensor(crow_indices=tensor([ 0, 3028, 6029, ..., 29993945, + 29997046, 30000000]), + col_indices=tensor([ 3, 6, 9, ..., 9989, 9994, 9995]), + values=tensor([0.2334, 0.2063, 0.5218, ..., 0.4045, 0.0852, 0.7956]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3759, 0.5048, 0.7452, ..., 0.9323, 0.0206, 0.6020]) +tensor([0.9496, 0.6426, 0.7887, ..., 0.2739, 0.8783, 0.2377]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 0.7320935726165771 seconds +Time: 0.7448434829711914 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1434', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.430570602416992} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1409', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.988589763641357} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2946, 5856, ..., 29993956, - 29997054, 30000000]), - col_indices=tensor([ 1, 3, 10, ..., 9992, 9994, 9995]), - values=tensor([0.6658, 0.8893, 0.2640, ..., 0.2436, 0.9944, 0.7745]), +tensor(crow_indices=tensor([ 0, 3038, 6082, ..., 29994099, + 29997000, 30000000]), + col_indices=tensor([ 1, 11, 12, ..., 9989, 9991, 9992]), + values=tensor([0.0184, 0.7499, 0.9505, ..., 0.0295, 0.2513, 0.0037]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7478, 0.4417, 0.0487, ..., 0.7713, 0.8445, 0.5646]) +tensor([0.7073, 0.1266, 0.0657, ..., 0.5297, 0.8116, 0.3350]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.430570602416992 seconds +Time: 9.988589763641357 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1481', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.553231000900269} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2946, 5856, ..., 29993956, - 29997054, 30000000]), - col_indices=tensor([ 1, 3, 10, ..., 9992, 9994, 9995]), - values=tensor([0.6658, 0.8893, 0.2640, ..., 0.2436, 0.9944, 0.7745]), +tensor(crow_indices=tensor([ 0, 3022, 5996, ..., 29993950, + 29996975, 30000000]), + col_indices=tensor([ 0, 1, 4, ..., 9978, 9992, 9996]), + values=tensor([0.2043, 0.8510, 0.1478, ..., 0.0867, 0.6823, 0.3008]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7478, 0.4417, 0.0487, ..., 0.7713, 0.8445, 0.5646]) +tensor([0.1816, 0.4570, 0.5794, ..., 0.4377, 0.8669, 0.6802]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +56,30 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.430570602416992 seconds +Time: 10.553231000900269 seconds -[40.39, 40.27, 40.31, 40.01, 39.78, 40.12, 45.36, 39.94, 40.02, 52.21] -[115.96] -18.722031354904175 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1434, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.430570602416992, 'TIME_S_1KI': 7.273759136971403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2171.006755914688, 'W': 115.95999999999998} -[40.39, 40.27, 40.31, 40.01, 39.78, 40.12, 45.36, 39.94, 40.02, 52.21, 41.63, 40.16, 40.0, 40.35, 41.93, 39.55, 39.78, 39.55, 39.63, 39.73] -733.74 -36.687 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1434, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.430570602416992, 'TIME_S_1KI': 7.273759136971403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2171.006755914688, 'W': 115.95999999999998, 'J_1KI': 1513.9517126322787, 'W_1KI': 80.8647140864714, 'W_D': 79.27299999999998, 'J_D': 1484.1515915973182, 'W_D_1KI': 55.281032078103195, 'J_D_1KI': 38.55023157468842} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3022, 5996, ..., 29993950, + 29996975, 30000000]), + col_indices=tensor([ 0, 1, 4, ..., 9978, 9992, 9996]), + values=tensor([0.2043, 0.8510, 0.1478, ..., 0.0867, 0.6823, 0.3008]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.1816, 0.4570, 0.5794, ..., 0.4377, 0.8669, 0.6802]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.553231000900269 seconds + +[40.7, 39.95, 41.13, 39.75, 39.96, 39.81, 39.8, 40.3, 40.1, 40.79] +[116.6] +18.929405450820923 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.553231000900269, 'TIME_S_1KI': 7.125746793315509, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2207.1686755657197, 'W': 116.60000000000001} +[40.7, 39.95, 41.13, 39.75, 39.96, 39.81, 39.8, 40.3, 40.1, 40.79, 40.83, 39.9, 39.87, 39.53, 39.71, 39.99, 39.53, 44.76, 40.0, 39.47] +724.985 +36.24925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.553231000900269, 'TIME_S_1KI': 7.125746793315509, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2207.1686755657197, 'W': 116.60000000000001, 'J_1KI': 1490.3232110504523, 'W_1KI': 78.73058744091831, 'W_D': 80.35075, 'J_D': 1520.9919250275493, 'W_D_1KI': 54.254388926401084, 'J_D_1KI': 36.6336184513174} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..251a245 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1051, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.514868974685669, "TIME_S_1KI": 10.004632706646689, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2300.9219420003888, "W": 111.81999999999998, "J_1KI": 2189.269212179247, "W_1KI": 106.39391056137009, "W_D": 75.59924999999998, "J_D": 1555.6069855461712, "W_D_1KI": 71.93078020932444, "J_D_1KI": 68.44032370059413} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..eda5681 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.9982414245605469} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4012, 7981, ..., 39991879, + 39995963, 40000000]), + col_indices=tensor([ 10, 11, 14, ..., 9991, 9992, 9999]), + values=tensor([0.1419, 0.4895, 0.7407, ..., 0.3538, 0.7528, 0.8593]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.6249, 0.2212, 0.6359, ..., 0.9566, 0.8492, 0.7458]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 0.9982414245605469 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1051', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.514868974685669} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3996, 7991, ..., 39991996, + 39996043, 40000000]), + col_indices=tensor([ 0, 1, 8, ..., 9995, 9997, 9998]), + values=tensor([0.0226, 0.5124, 0.4820, ..., 0.6985, 0.7901, 0.5930]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.2895, 0.6282, 0.6522, ..., 0.0069, 0.2490, 0.3252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.514868974685669 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3996, 7991, ..., 39991996, + 39996043, 40000000]), + col_indices=tensor([ 0, 1, 8, ..., 9995, 9997, 9998]), + values=tensor([0.0226, 0.5124, 0.4820, ..., 0.6985, 0.7901, 0.5930]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.2895, 0.6282, 0.6522, ..., 0.0069, 0.2490, 0.3252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.514868974685669 seconds + +[40.61, 41.42, 39.83, 39.75, 39.61, 39.54, 39.65, 39.44, 40.5, 39.44] +[111.82] +20.5770161151886 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1051, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.514868974685669, 'TIME_S_1KI': 10.004632706646689, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.9219420003888, 'W': 111.81999999999998} +[40.61, 41.42, 39.83, 39.75, 39.61, 39.54, 39.65, 39.44, 40.5, 39.44, 41.27, 40.95, 39.61, 39.66, 40.18, 44.62, 39.52, 39.76, 39.99, 39.45] +724.415 +36.220749999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1051, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.514868974685669, 'TIME_S_1KI': 10.004632706646689, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.9219420003888, 'W': 111.81999999999998, 'J_1KI': 2189.269212179247, 'W_1KI': 106.39391056137009, 'W_D': 75.59924999999998, 'J_D': 1555.6069855461712, 'W_D_1KI': 71.93078020932444, 'J_D_1KI': 68.44032370059413} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..c052723 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 895, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.780815124511719, "TIME_S_1KI": 12.045603491074546, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2479.408223028183, "W": 110.17, "J_1KI": 2770.288517349925, "W_1KI": 123.09497206703911, "W_D": 73.96549999999999, "J_D": 1664.6153119759558, "W_D_1KI": 82.64301675977653, "J_D_1KI": 92.33856621204082} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..04c06a7 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.1728613376617432} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5004, 10037, ..., 49990101, + 49995102, 50000000]), + col_indices=tensor([ 1, 2, 4, ..., 9993, 9994, 9999]), + values=tensor([0.6341, 0.8760, 0.7654, ..., 0.7207, 0.8990, 0.3578]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.6162, 0.5934, 0.0411, ..., 0.5420, 0.7601, 0.3985]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 1.1728613376617432 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '895', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.780815124511719} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4874, 9936, ..., 49989936, + 49995021, 50000000]), + col_indices=tensor([ 2, 4, 5, ..., 9994, 9996, 9998]), + values=tensor([0.2487, 0.9013, 0.3272, ..., 0.3285, 0.4935, 0.2106]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1115, 0.0772, 0.3726, ..., 0.1024, 0.4966, 0.6352]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.780815124511719 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4874, 9936, ..., 49989936, + 49995021, 50000000]), + col_indices=tensor([ 2, 4, 5, ..., 9994, 9996, 9998]), + values=tensor([0.2487, 0.9013, 0.3272, ..., 0.3285, 0.4935, 0.2106]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1115, 0.0772, 0.3726, ..., 0.1024, 0.4966, 0.6352]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.780815124511719 seconds + +[41.06, 40.51, 39.91, 39.52, 39.91, 41.08, 40.18, 39.56, 39.99, 45.52] +[110.17] +22.50529384613037 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.780815124511719, 'TIME_S_1KI': 12.045603491074546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2479.408223028183, 'W': 110.17} +[41.06, 40.51, 39.91, 39.52, 39.91, 41.08, 40.18, 39.56, 39.99, 45.52, 40.27, 39.95, 40.09, 41.56, 39.52, 40.05, 39.98, 39.48, 39.55, 39.65] +724.09 +36.2045 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.780815124511719, 'TIME_S_1KI': 12.045603491074546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2479.408223028183, 'W': 110.17, 'J_1KI': 2770.288517349925, 'W_1KI': 123.09497206703911, 'W_D': 73.96549999999999, 'J_D': 1664.6153119759558, 'W_D_1KI': 82.64301675977653, 'J_D_1KI': 92.33856621204082} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json index 7eea71b..a50e2bb 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 349456, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.173964023590088, "TIME_S_1KI": 0.03197531026392475, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1252.9686733055114, "W": 97.63, "J_1KI": 3.585483360724988, "W_1KI": 0.2793770889611282, "W_D": 61.798, "J_D": 793.1061976127625, "W_D_1KI": 0.17684057506524428, "J_D_1KI": 0.0005060453249200021} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 360318, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.527426958084106, "TIME_S_1KI": 0.029217044272237598, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1308.6020423936845, "W": 97.79, "J_1KI": 3.631797585448644, "W_1KI": 0.2713991529704317, "W_D": 61.57600000000001, "J_D": 823.9950850028993, "W_D_1KI": 0.17089348852957667, "J_D_1KI": 0.00047428518289282433} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output index 518e71c..fe1a991 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,1024 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.018783092498779297} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7710, 770, 4407, 1870, 7174, 5318, 1450, 1184, 2850, - 4403, 2123, 255, 810, 7400, 565, 3550, 9613, 7496, - 6015, 975, 9271, 7248, 4063, 4805, 7196, 7869, 4589, - 3665, 14, 7810, 5136, 5919, 2466, 4173, 9881, 3733, - 1870, 1230, 6453, 5008, 3988, 9434, 1211, 7162, 7902, - 9745, 4411, 5286, 1509, 4833, 7496, 1798, 4, 3586, - 7060, 2811, 8112, 4764, 9435, 3836, 5882, 3125, 9850, - 2118, 4178, 9802, 8785, 5451, 5614, 8254, 6937, 7688, - 7331, 3332, 6913, 3221, 6350, 4032, 1334, 7216, 129, - 9945, 7932, 9294, 1550, 7754, 4771, 5013, 294, 9524, - 9420, 8530, 7837, 2039, 9838, 1792, 5287, 5887, 573, - 7513, 5802, 9931, 5757, 4848, 2498, 9852, 172, 7356, - 7917, 1181, 539, 4514, 1534, 8369, 4234, 9382, 7442, - 6827, 9408, 9891, 2006, 5444, 9700, 4515, 5848, 7202, - 8465, 8202, 7227, 5428, 6226, 7163, 1915, 9424, 7937, - 3286, 7876, 401, 9372, 5269, 534, 8486, 5070, 2186, - 2780, 8211, 5164, 9491, 8801, 2669, 4834, 6041, 906, - 6304, 6419, 5278, 8990, 5734, 1221, 935, 3464, 2356, - 7735, 7209, 5212, 1318, 8268, 9084, 3729, 231, 1156, - 1335, 6818, 8619, 6611, 8933, 2141, 101, 2488, 1604, - 1138, 1432, 2355, 1314, 1098, 1619, 4109, 4763, 1605, - 2774, 7361, 843, 6381, 1534, 9790, 4775, 7164, 7105, - 6239, 3980, 382, 411, 7505, 7659, 3367, 1766, 3195, - 9470, 2528, 9322, 6959, 2216, 491, 8725, 2371, 4382, - 5718, 5217, 7058, 4457, 5435, 1249, 9086, 4957, 5280, - 7131, 4958, 8168, 4939, 4037, 3075, 3367, 7266, 6066, - 5464, 2058, 4013, 2658, 7499, 9138, 8214, 6222, 4674, - 6822, 9675, 5801, 8254, 8831, 8871, 6911, 8523, 885, - 5318, 2866, 8999, 8361, 5772, 5641, 171, 8207, 6823, - 1799, 2385, 3933, 8099, 9437, 4173, 4273, 4399, 7947, - 538, 8128, 6522, 1001, 6458, 9133, 842, 3215, 3937, - 8256, 4826, 8778, 6085, 7131, 1758, 1134, 7002, 8167, - 700, 4064, 7323, 8384, 4717, 7001, 6775, 3204, 701, - 2756, 9559, 5146, 8344, 1565, 2384, 9287, 6270, 6517, - 9569, 3605, 1309, 1171, 4748, 2704, 3906, 6066, 1315, - 4722, 9012, 2854, 9405, 9514, 7715, 2699, 2312, 9518, - 1435, 2191, 85, 1881, 1234, 2331, 4237, 5757, 7932, - 9712, 3332, 7705, 1404, 8567, 6763, 9879, 1736, 3894, - 5687, 8039, 1164, 621, 2677, 2713, 7254, 2093, 2442, - 8214, 7080, 8260, 2130, 5394, 902, 5406, 5871, 8131, - 8425, 1840, 8258, 10, 3515, 7795, 4351, 23, 2317, - 4596, 1004, 626, 9911, 3655, 1067, 5724, 9150, 3004, - 1248, 7595, 4785, 5244, 8336, 4341, 9387, 1506, 8501, - 2147, 1566, 6495, 4554, 7326, 6889, 8027, 651, 2541, - 5072, 7045, 8106, 4631, 6832, 1063, 4955, 3837, 5979, - 5449, 2272, 9439, 522, 2753, 9349, 9909, 7912, 2140, - 3823, 565, 5366, 4263, 9476, 8456, 9778, 4716, 8861, - 3068, 7529, 8311, 2167, 8097, 9726, 5963, 8961, 8951, - 8141, 2407, 5529, 1677, 2350, 6893, 7197, 3753, 5887, - 8876, 4491, 4643, 8990, 596, 8161, 7846, 2289, 9180, - 7037, 5854, 6062, 8432, 824, 4812, 3129, 8178, 4110, - 9628, 3692, 997, 3675, 9501, 8580, 8343, 4389, 3215, - 2282, 3336, 4379, 5429, 3859, 5531, 1391, 1842, 9054, - 3059, 3924, 4109, 4585, 1402, 5590, 2428, 5822, 4347, - 9652, 4937, 8518, 2936, 4541, 4664, 7316, 3628, 1576, - 100, 6717, 6869, 5782, 4769, 6381, 420, 7325, 9706, - 5457, 6444, 6589, 843, 7648, 9948, 200, 1616, 3739, - 1287, 4261, 1655, 8236, 7002, 480, 4835, 8836, 2589, - 7842, 2577, 3714, 9973, 9407, 6133, 4479, 8904, 166, - 7918, 5703, 6259, 4920, 9996, 9898, 9260, 9581, 5155, - 2417, 2267, 2847, 3259, 4611, 7016, 3338, 8196, 6441, - 9820, 9477, 466, 5776, 9899, 731, 9644, 9431, 476, - 9327, 5566, 7989, 3039, 9320, 1202, 381, 1915, 2369, - 3818, 8016, 6121, 6692, 1702, 1859, 3194, 8553, 3794, - 1815, 8135, 7504, 9443, 5902, 8225, 1819, 3757, 9067, - 7582, 7642, 3549, 7259, 1959, 3004, 6866, 1165, 2517, - 9690, 7905, 3279, 6036, 7950, 6116, 3751, 5294, 4615, - 9314, 6307, 3673, 1490, 4111, 1240, 8263, 4110, 4571, - 2905, 2774, 1985, 3126, 6084, 5700, 1229, 5879, 8154, - 2148, 4874, 2092, 9170, 3482, 664, 5554, 1113, 9484, - 3978, 7225, 3749, 8894, 8534, 5256, 5887, 8419, 5698, - 8427, 8697, 7604, 2812, 2806, 1845, 6003, 3130, 760, - 2497, 2541, 9127, 1210, 1651, 530, 1994, 1040, 8645, - 8666, 9304, 4335, 8289, 1101, 6332, 1007, 8169, 9969, - 943, 4525, 8690, 4159, 1817, 6073, 4957, 596, 6283, - 3051, 4485, 7766, 8516, 526, 7053, 8582, 7056, 943, - 6579, 9174, 2254, 8501, 1773, 956, 877, 7705, 4613, - 5648, 4661, 1809, 2688, 8514, 8802, 4722, 545, 4747, - 6633, 3814, 8935, 7588, 1678, 8488, 5607, 673, 2681, - 8017, 6281, 3685, 3530, 8546, 4879, 7593, 4273, 2876, - 3735, 2848, 836, 940, 4597, 1074, 1122, 8693, 963, - 3887, 7554, 930, 4424, 9656, 8086, 135, 4899, 5218, - 8994, 418, 969, 6615, 8605, 1505, 6523, 1169, 6709, - 5427, 5511, 3558, 5322, 8458, 850, 2892, 4597, 5142, - 8980, 9053, 5609, 1077, 5309, 1757, 306, 460, 2458, - 4048, 8475, 4050, 6980, 4499, 6977, 1259, 1464, 3611, - 8751, 6728, 7071, 9544, 9297, 512, 4042, 6462, 2944, - 5752, 9601, 679, 9648, 7279, 2893, 973, 4996, 1919, - 5484, 6026, 3293, 7948, 7029, 8673, 3172, 2794, 8323, - 821, 1937, 6356, 4283, 5725, 2893, 1377, 9981, 7003, - 5977, 8646, 3216, 6885, 4725, 8141, 3585, 6384, 9261, - 9676, 1311, 3308, 6435, 8132, 8525, 2917, 7023, 2344, - 7600, 4383, 4886, 6233, 7280, 8202, 1726, 1918, 3206, - 8062, 434, 361, 124, 6241, 4326, 7134, 295, 2286, - 4397, 3630, 485, 8219, 5832, 5393, 8505, 8369, 461, - 8188, 4054, 8534, 5850, 8902, 4999, 6423, 3476, 6192, - 6572, 6196, 5040, 4242, 7067, 7952, 3299, 5653, 1326, - 858, 1204, 3430, 462, 9954, 4911, 2910, 3563, 4861, - 4090, 3934, 3405, 8602, 1698, 9993, 6680, 3885, 6072, - 4399, 7396, 8831, 6459, 8768, 6290, 6186, 7949, 3125, - 1261, 9770, 5246, 3605, 357, 7933, 8926, 5531, 3614, - 3729, 5340, 883, 1173, 7534, 8589, 6681, 4108, 8234, - 2703, 7749, 2006, 4313, 509, 3164, 3554, 5906, 4437, - 7829, 1278, 3055, 4732, 6488, 1268, 6449, 6632, 7614, - 45, 1360, 9281, 7504, 2281, 4706, 4960, 4576, 206, - 6864, 5459, 1834, 4347, 1803, 929, 7593, 5081, 9952, - 4949, 323, 8662, 2381, 5486, 2263, 306, 3850, 8532, - 8650, 6620, 5629, 3893, 7949, 275, 3546, 8768, 8253, - 9465, 120, 747, 2625, 603, 4903, 2906, 4444, 8192, - 5259, 2313, 293, 2063, 856, 8668, 8628, 7481, 5244, - 9114, 8455, 3952, 4502, 7495, 3074, 8209, 6343, 4118, - 8429]), - values=tensor([1.0187e-01, 9.3060e-02, 7.1844e-01, 6.0591e-01, - 1.7420e-01, 4.7341e-01, 4.6521e-01, 2.2759e-01, - 8.5069e-01, 3.6280e-01, 9.4888e-01, 6.2180e-01, - 8.8663e-01, 8.0888e-01, 1.3511e-01, 8.9212e-01, - 4.9632e-01, 7.1062e-02, 8.8913e-01, 3.6555e-01, - 5.7802e-01, 9.5902e-01, 6.7906e-01, 5.7533e-03, - 9.6566e-01, 6.2483e-01, 5.0879e-01, 6.7668e-01, - 9.1177e-01, 4.3629e-01, 2.3313e-01, 8.4588e-01, - 4.5185e-01, 2.4600e-01, 8.2141e-01, 2.8976e-01, - 9.1767e-01, 3.5967e-01, 1.4506e-01, 7.2241e-01, - 6.2974e-01, 6.3780e-01, 8.8090e-01, 2.9092e-01, - 1.6965e-01, 3.6359e-01, 7.3928e-01, 8.5548e-01, - 5.7369e-01, 3.7085e-01, 5.3463e-01, 1.7885e-02, - 8.9381e-01, 5.2509e-01, 7.4178e-01, 4.7149e-01, - 8.9602e-01, 8.5585e-01, 4.7822e-01, 9.9631e-01, - 1.3915e-01, 5.7821e-01, 7.0603e-02, 7.6275e-01, - 3.7581e-01, 4.5089e-01, 1.0040e-01, 2.8821e-01, - 4.0836e-02, 8.7711e-01, 2.5635e-01, 4.7419e-01, - 3.1472e-01, 4.7404e-01, 6.0210e-01, 1.7740e-01, - 3.7222e-01, 1.8530e-02, 6.6757e-01, 7.5811e-02, - 1.3391e-01, 9.8196e-01, 3.7396e-01, 5.2924e-02, - 5.7431e-01, 4.5457e-03, 6.3555e-01, 6.9843e-01, - 8.9610e-01, 8.6400e-01, 8.8516e-01, 3.9290e-01, - 7.0320e-01, 6.1583e-01, 8.4624e-01, 7.7953e-01, - 9.0684e-01, 4.3665e-01, 8.3774e-01, 1.8001e-01, - 4.1709e-01, 3.5144e-01, 3.1082e-02, 5.4723e-01, - 8.2229e-01, 4.0371e-01, 4.8804e-01, 8.0640e-01, - 7.9319e-01, 3.0761e-01, 3.5246e-01, 6.6259e-01, - 7.5795e-01, 3.3689e-03, 2.9805e-01, 4.4254e-01, - 7.3029e-01, 1.1907e-01, 4.1236e-01, 3.9221e-01, - 2.1364e-01, 6.5107e-01, 1.3201e-01, 6.9985e-01, - 2.8995e-01, 6.0845e-01, 7.6109e-01, 5.2480e-01, - 4.7204e-01, 1.9708e-01, 7.3808e-02, 8.5301e-01, - 5.7930e-02, 5.6419e-03, 6.5604e-01, 1.1150e-01, - 2.7579e-01, 5.0248e-01, 8.6125e-01, 5.2749e-01, - 4.8346e-01, 5.3141e-01, 9.3706e-01, 4.5452e-01, - 9.6538e-02, 5.6358e-01, 1.0907e-01, 2.9151e-02, - 2.9892e-01, 8.9018e-01, 6.1924e-02, 4.5372e-02, - 4.1819e-01, 4.4428e-01, 3.8712e-01, 2.9124e-01, - 6.8060e-02, 4.4495e-01, 8.6639e-01, 1.2161e-01, - 1.9770e-01, 8.9914e-01, 7.8519e-01, 7.4160e-01, - 6.3062e-01, 9.6251e-01, 6.3046e-01, 5.4702e-01, - 3.9281e-02, 6.5735e-01, 1.2776e-01, 4.1871e-01, - 9.6472e-01, 8.5551e-01, 3.7850e-01, 2.9965e-01, - 8.3225e-01, 2.3553e-02, 8.0975e-01, 9.3845e-02, - 9.4994e-01, 4.7182e-01, 6.9838e-01, 2.9612e-01, - 5.5496e-01, 6.5603e-01, 4.6633e-01, 4.0017e-01, - 5.0322e-01, 7.8427e-01, 4.3211e-01, 2.4713e-01, - 6.8107e-01, 7.3829e-01, 7.4132e-01, 8.6413e-01, - 9.8284e-01, 4.1509e-01, 4.1459e-01, 5.9436e-01, - 7.1965e-01, 3.9605e-01, 7.9076e-01, 1.5670e-01, - 7.7054e-01, 8.0907e-01, 5.7976e-01, 8.4957e-01, - 7.5938e-01, 2.6316e-01, 3.6742e-01, 6.7139e-01, - 6.2663e-01, 2.8545e-01, 8.4638e-01, 3.3650e-01, - 1.9393e-01, 8.2539e-01, 8.7496e-01, 4.5566e-01, - 3.5352e-01, 6.8968e-01, 9.7918e-01, 8.4556e-01, - 9.0911e-01, 5.3111e-01, 4.9952e-01, 3.4829e-01, - 9.9674e-01, 9.1414e-01, 9.0159e-01, 7.1385e-01, - 4.0399e-01, 1.9201e-02, 6.1073e-02, 9.3500e-01, - 7.7952e-01, 6.8003e-01, 7.1526e-03, 2.5124e-01, - 8.2126e-02, 2.2497e-01, 8.0977e-01, 1.9978e-01, - 9.0065e-01, 6.1026e-01, 1.6002e-01, 9.5304e-01, - 6.4606e-01, 8.9434e-01, 6.6189e-01, 8.8742e-01, - 7.8087e-01, 9.0584e-01, 7.2847e-01, 5.8028e-01, - 6.0780e-01, 4.9432e-01, 9.2651e-01, 9.0691e-01, - 4.3390e-01, 1.2290e-01, 8.1558e-02, 4.9520e-01, - 2.3303e-01, 6.8030e-01, 8.5078e-02, 5.9635e-01, - 4.9854e-01, 3.5607e-01, 2.3071e-02, 2.0342e-01, - 3.3403e-01, 4.7020e-01, 1.1812e-01, 1.9543e-01, - 2.3376e-01, 3.8392e-01, 8.7559e-02, 3.5012e-01, - 3.1852e-01, 4.5980e-01, 3.7863e-01, 4.7870e-01, - 8.9047e-01, 9.6861e-01, 1.7266e-01, 8.3507e-01, - 4.3536e-02, 5.5165e-01, 3.4361e-01, 8.9113e-01, - 2.0902e-01, 8.3518e-01, 6.6187e-01, 3.8314e-01, - 8.0300e-01, 6.5042e-01, 6.9120e-01, 6.0188e-01, - 4.8010e-01, 2.7651e-01, 9.0978e-01, 7.1078e-01, - 5.9103e-01, 6.3327e-01, 5.3087e-01, 2.9399e-01, - 7.3804e-01, 4.2389e-01, 1.6557e-01, 8.5186e-01, - 2.3146e-01, 6.6881e-01, 2.2386e-01, 1.5332e-01, - 2.7304e-01, 1.0240e-01, 4.3476e-01, 7.8105e-01, - 5.8972e-02, 7.5568e-01, 7.1605e-01, 8.5101e-01, - 8.7931e-01, 4.0044e-01, 2.4097e-01, 8.2776e-01, - 6.5361e-01, 4.0509e-01, 5.9794e-01, 7.8265e-01, - 3.4318e-01, 5.8254e-01, 2.8341e-01, 6.9728e-01, - 6.3303e-01, 7.0794e-01, 6.7661e-01, 8.4756e-01, - 9.3971e-01, 2.7594e-01, 3.4066e-01, 8.1946e-01, - 5.6985e-01, 2.8557e-01, 3.6155e-01, 9.1018e-01, - 4.4262e-01, 9.1349e-01, 6.9133e-01, 1.1543e-01, - 8.8510e-01, 2.8478e-01, 5.3501e-01, 9.5937e-01, - 3.1066e-01, 2.5479e-01, 6.5123e-01, 2.6004e-01, - 3.6510e-01, 3.3774e-01, 1.0742e-01, 1.6244e-01, - 9.6209e-01, 7.8917e-01, 5.9193e-01, 1.4050e-01, - 4.6784e-01, 1.3862e-01, 6.8783e-01, 3.8758e-01, - 3.8488e-01, 4.5045e-01, 9.8813e-01, 3.1351e-02, - 9.9879e-01, 3.6004e-01, 2.5033e-01, 4.8943e-01, - 9.7100e-01, 8.4453e-01, 6.6768e-01, 8.8569e-02, - 7.1994e-01, 9.2108e-01, 3.8162e-01, 2.4625e-01, - 3.0837e-01, 1.7831e-01, 1.5312e-01, 6.7478e-01, - 3.7105e-01, 2.7460e-01, 5.6371e-01, 7.5959e-01, - 3.7224e-01, 2.1087e-01, 6.5640e-01, 1.1956e-01, - 1.8089e-01, 3.5428e-01, 1.1136e-02, 7.2999e-01, - 4.7290e-02, 3.2416e-01, 9.4583e-01, 2.1888e-01, - 6.7653e-01, 6.1755e-01, 7.2680e-01, 9.9049e-01, - 2.7798e-02, 1.3499e-01, 2.4043e-01, 8.1927e-01, - 6.0770e-01, 9.3564e-01, 4.0376e-01, 6.0613e-01, - 4.2311e-01, 2.4050e-01, 4.2569e-01, 3.2700e-01, - 6.6681e-01, 9.1598e-01, 2.4610e-02, 3.1564e-01, - 7.7390e-01, 5.8683e-01, 2.3132e-01, 2.3585e-01, - 4.6167e-01, 2.3949e-01, 2.4135e-01, 3.5397e-01, - 9.1516e-01, 4.1043e-01, 7.7809e-01, 7.3179e-02, - 5.8349e-01, 2.8828e-01, 6.1494e-01, 2.4378e-01, - 8.2912e-01, 7.3234e-01, 6.8131e-01, 3.3134e-02, - 7.6679e-02, 5.6862e-01, 5.4287e-01, 7.6513e-01, - 6.2055e-01, 5.7089e-01, 7.2783e-01, 2.7541e-01, - 9.3864e-01, 5.8064e-01, 9.2294e-01, 5.6055e-01, - 7.2024e-01, 2.5839e-01, 8.3608e-02, 7.3547e-01, - 7.6127e-02, 3.2870e-01, 7.7548e-02, 6.2544e-01, - 3.8702e-01, 7.9350e-01, 4.0659e-01, 2.4674e-01, - 2.7607e-01, 5.0202e-01, 9.5886e-01, 9.5818e-01, - 3.3974e-02, 9.7595e-01, 7.9134e-01, 9.5878e-01, - 1.0231e-01, 7.7522e-01, 7.3580e-01, 9.2291e-01, - 9.1663e-01, 4.4586e-01, 4.6182e-01, 7.2430e-01, - 9.2178e-04, 8.5347e-01, 5.4921e-01, 4.5417e-01, - 2.2747e-02, 2.5915e-01, 4.5402e-01, 3.1192e-01, - 8.2311e-01, 9.7335e-01, 2.7230e-01, 5.4272e-01, - 8.7519e-01, 9.1728e-01, 2.9016e-01, 1.6495e-01, - 4.2395e-01, 8.5072e-01, 1.9714e-03, 2.7308e-01, - 9.7770e-01, 9.1682e-01, 9.9182e-01, 3.7958e-01, - 7.9282e-01, 1.7761e-01, 6.2555e-01, 6.8101e-01, - 6.7394e-01, 8.8343e-01, 3.4308e-01, 8.5129e-01, - 4.1297e-01, 7.1079e-01, 5.1222e-02, 5.7159e-01, - 6.0001e-01, 5.4284e-01, 5.7793e-01, 3.1507e-01, - 9.8278e-01, 8.2539e-01, 4.6261e-01, 7.3225e-02, - 8.8612e-01, 6.5413e-01, 3.0173e-01, 2.1194e-02, - 3.2441e-01, 3.9085e-01, 8.6803e-01, 7.0461e-01, - 7.6602e-01, 1.9908e-01, 2.9866e-01, 9.3469e-01, - 9.5369e-02, 2.5631e-01, 9.0341e-01, 5.0032e-01, - 6.7851e-01, 9.1421e-01, 5.2777e-01, 2.0974e-01, - 6.0356e-01, 7.4990e-01, 2.5120e-01, 7.3056e-01, - 5.7654e-01, 8.6192e-01, 4.5282e-01, 4.8927e-01, - 1.0209e-01, 1.4365e-01, 1.0649e-01, 1.5559e-01, - 3.7894e-02, 6.8080e-01, 3.9419e-01, 1.0516e-02, - 2.1907e-01, 7.3503e-01, 4.9516e-01, 9.0746e-01, - 4.9746e-01, 9.1123e-01, 4.1414e-01, 8.3851e-02, - 1.0745e-01, 9.4031e-01, 6.3535e-01, 8.9329e-02, - 7.6318e-01, 5.4906e-01, 3.5425e-01, 2.6120e-01, - 8.3019e-01, 8.3127e-01, 2.5971e-02, 7.0373e-01, - 4.5245e-01, 1.0757e-02, 1.2942e-01, 8.2025e-01, - 7.7421e-01, 1.9773e-01, 2.3885e-01, 9.0482e-01, - 2.8218e-01, 7.5291e-01, 7.6466e-01, 5.2751e-01, - 3.9001e-02, 3.3145e-01, 7.2533e-01, 3.6741e-01, - 9.0607e-01, 7.0040e-01, 4.2489e-01, 4.9042e-01, - 2.3157e-01, 2.5353e-01, 4.2850e-01, 1.3589e-01, - 2.8043e-01, 5.4906e-01, 9.4282e-01, 8.9276e-02, - 1.1162e-01, 8.5177e-01, 7.7036e-01, 7.0436e-02, - 5.3515e-01, 9.3477e-01, 5.8106e-01, 9.6481e-01, - 9.4024e-01, 7.2166e-01, 2.4153e-01, 4.2523e-01, - 3.5659e-01, 6.0512e-01, 2.9269e-01, 8.0189e-01, - 4.4130e-01, 3.9543e-01, 6.0493e-01, 6.3652e-01, - 6.5235e-02, 2.5942e-01, 6.6071e-01, 2.2841e-01, - 3.8493e-02, 3.4606e-01, 6.8945e-01, 3.6160e-01, - 4.8673e-01, 8.7753e-01, 6.6910e-01, 8.8066e-01, - 7.5074e-01, 7.3308e-01, 2.4525e-01, 1.5114e-01, - 2.3688e-01, 6.3304e-01, 1.9778e-01, 6.5651e-01, - 2.9999e-01, 6.9139e-01, 9.5397e-01, 9.8093e-01, - 3.1673e-01, 1.1094e-02, 8.6636e-01, 7.6271e-01, - 4.9755e-02, 3.7028e-01, 7.5132e-01, 1.0659e-01, - 7.1971e-02, 4.3799e-01, 3.8666e-01, 8.3342e-01, - 6.1201e-01, 8.8591e-01, 9.8481e-01, 6.1100e-01, - 2.3665e-01, 8.8731e-01, 3.7244e-01, 9.9093e-01, - 1.0523e-01, 4.3914e-01, 6.7271e-01, 7.1795e-02, - 4.5039e-01, 8.5481e-01, 6.8785e-01, 1.2909e-01, - 5.7161e-01, 4.2390e-01, 7.6292e-01, 7.7430e-01, - 3.4998e-01, 8.0385e-01, 2.4192e-01, 1.8999e-01, - 3.6927e-02, 6.7924e-01, 3.3984e-01, 4.8839e-01, - 5.0435e-01, 9.1106e-01, 8.7183e-01, 9.3404e-01, - 7.4605e-01, 1.5313e-01, 9.5520e-01, 2.0581e-01, - 8.7214e-01, 1.4978e-01, 9.4661e-02, 7.5151e-01, - 5.6273e-01, 4.2754e-01, 2.4315e-03, 1.1371e-02, - 8.0237e-01, 5.4754e-01, 2.6562e-01, 6.4585e-01, - 2.4939e-01, 5.2355e-01, 7.6659e-01, 1.1318e-01, - 6.1209e-01, 3.7856e-01, 7.3262e-01, 5.2861e-01, - 6.9979e-01, 3.6198e-01, 8.5596e-01, 9.1891e-01, - 7.8492e-01, 6.1844e-01, 8.1427e-01, 3.6322e-01, - 7.6290e-02, 3.5808e-01, 5.2445e-02, 9.3998e-01, - 8.7392e-01, 3.5312e-01, 5.3949e-01, 6.0424e-01, - 1.2667e-01, 5.7768e-01, 8.0920e-02, 5.9784e-01, - 1.6667e-01, 5.3329e-01, 7.7051e-01, 2.4444e-01, - 5.4399e-01, 1.7330e-01, 9.2604e-01, 7.7836e-01, - 7.0428e-01, 9.4967e-01, 6.7980e-01, 6.7804e-01, - 3.5864e-02, 2.5274e-01, 9.2254e-01, 7.9618e-01, - 4.4765e-01, 7.0517e-01, 9.5345e-01, 8.4152e-01, - 1.7730e-01, 8.2868e-01, 9.0518e-01, 6.7487e-02, - 9.0151e-01, 1.0078e-02, 1.8471e-01, 7.8913e-01, - 4.7998e-01, 7.7939e-01, 1.8432e-01, 6.7453e-01, - 4.6409e-01, 3.5097e-01, 5.1731e-01, 6.5674e-01, - 5.7423e-01, 9.0521e-01, 7.9832e-01, 5.1312e-01, - 7.4221e-01, 6.1487e-01, 7.2648e-01, 5.1655e-01, - 8.6038e-01, 4.0014e-01, 7.3141e-01, 1.9754e-01, - 6.2998e-02, 1.4990e-01, 6.3575e-01, 4.4178e-01, - 9.7002e-01, 4.0844e-01, 8.9150e-01, 9.0679e-01, - 1.1578e-01, 6.5042e-02, 6.7489e-01, 6.4330e-01, - 4.7278e-01, 4.3936e-02, 1.5943e-01, 1.6540e-01, - 6.8697e-01, 8.3274e-01, 6.2265e-01, 6.5555e-01, - 7.5682e-04, 9.0246e-01, 7.4646e-02, 1.0355e-01, - 4.0761e-01, 7.4609e-01, 2.0902e-01, 3.1042e-01, - 1.7561e-01, 6.6688e-02, 6.0252e-01, 1.8240e-01, - 2.0381e-02, 6.2593e-02, 6.0495e-01, 1.1569e-01, - 7.6802e-02, 7.6070e-01, 8.1046e-01, 6.4437e-01, - 3.0459e-01, 8.7910e-01, 3.7994e-01, 2.7927e-01, - 6.6902e-02, 4.4550e-01, 8.7868e-01, 5.1148e-02, - 3.7950e-01, 7.1735e-01, 7.3888e-01, 9.8846e-01, - 9.4200e-01, 3.9824e-01, 2.8120e-01, 6.0547e-01, - 7.3369e-01, 3.7128e-01, 2.6315e-01, 2.1216e-01, - 5.2662e-01, 1.2747e-02, 6.0734e-02, 9.7093e-01, - 5.7339e-01, 8.1099e-01, 2.1891e-01, 9.8452e-01, - 8.1244e-01, 5.1478e-01, 8.6700e-01, 4.3527e-01, - 1.5690e-01, 5.7105e-01, 2.3991e-01, 2.5589e-01, - 1.2496e-01, 2.7035e-01, 9.8064e-02, 4.6583e-01, - 6.9609e-01, 6.3492e-01, 6.7879e-01, 2.9228e-01, - 3.7713e-01, 8.8354e-01, 7.9828e-01, 2.7429e-01, - 3.2010e-01, 3.4870e-01, 7.3549e-01, 9.9698e-01, - 8.0784e-01, 2.8592e-01, 4.7241e-01, 7.1215e-01, - 9.8854e-01, 9.2548e-01, 3.7415e-01, 6.5025e-01, - 5.6132e-01, 5.7550e-01, 1.9049e-01, 9.3661e-02, - 7.0370e-01, 2.4399e-01, 8.8730e-01, 3.4681e-01, - 8.1782e-01, 7.0824e-01, 4.1793e-01, 4.9273e-01, - 1.5299e-01, 9.9841e-01, 4.9900e-01, 4.2334e-01, - 7.8859e-01, 3.3689e-01, 1.3827e-01, 5.2274e-01, - 8.2524e-01, 4.7324e-01, 1.6809e-01, 8.3103e-01, - 2.3078e-01, 2.4739e-02, 3.6660e-01, 2.9660e-01, - 8.3582e-01, 6.6770e-01, 5.1626e-01, 1.7800e-02, - 4.9796e-01, 5.8092e-01, 3.3015e-01, 7.0134e-01, - 3.7040e-01, 3.8461e-01, 6.1818e-01, 3.8031e-01, - 7.7129e-01, 1.1370e-02, 4.1709e-01, 3.1538e-01, - 4.6468e-01, 2.7931e-01, 7.6062e-01, 9.2389e-01, - 1.6115e-01, 5.9450e-01, 7.3925e-01, 5.6612e-01, - 8.8551e-01, 3.5357e-01, 5.2400e-02, 2.6849e-01, - 9.4801e-01, 5.3116e-01, 3.0980e-01, 4.0228e-01, - 6.9226e-01, 4.7104e-01, 1.8029e-01, 6.9745e-01, - 4.2109e-01, 5.1660e-01, 8.5430e-01, 5.8448e-01, - 2.6279e-01, 3.2066e-01, 1.2135e-01, 1.3921e-01, - 8.6237e-01, 7.7077e-01, 5.2734e-01, 6.8075e-01, - 4.5462e-01, 9.2154e-02, 4.7049e-01, 6.0273e-01, - 8.9532e-01, 7.8371e-01, 1.1158e-01, 4.5607e-01, - 4.3482e-01, 2.9216e-01, 5.6974e-01, 5.3652e-01, - 4.7961e-01, 5.8789e-01, 7.9824e-02, 8.6520e-01, - 5.4582e-01, 4.2012e-01, 6.6184e-01, 5.0529e-01, - 4.4942e-01, 7.7487e-01, 1.9653e-01, 1.0956e-01, - 5.8909e-01, 6.1073e-01, 5.5245e-01, 6.0942e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4323, 0.3291, 0.3785, ..., 0.2185, 0.3372, 0.4003]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.018783092498779297 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '55901', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.679638147354126} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7474, 402, 4271, 3233, 8082, 7229, 4634, 7787, 4139, - 3184, 8224, 1171, 3944, 602, 9794, 2437, 4751, 9537, - 8646, 3618, 186, 1642, 8812, 1242, 6520, 4714, 2375, - 2593, 5999, 3487, 2542, 2298, 7454, 1919, 5592, 9881, - 6340, 6606, 1382, 7859, 8424, 8722, 1593, 4453, 3038, - 9025, 3357, 7978, 9937, 4294, 2449, 9575, 6564, 6276, - 883, 9180, 2863, 9885, 8632, 9035, 8425, 5850, 610, - 1914, 1347, 7858, 6976, 4743, 6956, 1699, 1661, 6170, - 336, 195, 1588, 176, 1410, 3810, 8376, 7753, 8420, - 5111, 9143, 2252, 3445, 968, 2649, 4802, 4386, 4390, - 3856, 5946, 9251, 5561, 7673, 4626, 6108, 802, 2218, - 8978, 5078, 9764, 5448, 4147, 233, 4734, 7018, 8424, - 5226, 2598, 3429, 28, 4519, 7944, 9855, 3672, 3809, - 5753, 8466, 4146, 6093, 810, 8061, 4052, 9601, 4604, - 818, 6420, 4798, 7209, 3129, 9918, 6820, 1159, 4560, - 3413, 3996, 900, 8498, 4167, 8888, 5552, 1503, 8624, - 8190, 8742, 253, 6403, 2361, 9538, 7803, 9212, 3306, - 561, 4066, 1545, 8624, 9070, 817, 9505, 5304, 9902, - 9588, 4190, 4564, 1941, 5789, 4881, 7808, 2742, 9896, - 9814, 8308, 9966, 306, 3204, 483, 7727, 7743, 781, - 1084, 1996, 9435, 2002, 1910, 9365, 9283, 1276, 9833, - 2109, 8937, 8736, 1298, 9466, 2942, 4000, 6727, 6715, - 5783, 3492, 4474, 2726, 7300, 8062, 8665, 182, 4401, - 2159, 8816, 1208, 122, 1779, 9734, 1941, 8593, 4497, - 8591, 2043, 7547, 200, 3582, 5291, 5588, 8296, 7784, - 5573, 53, 5585, 6863, 7705, 8912, 113, 6393, 7106, - 1899, 4638, 1767, 509, 5809, 3099, 2012, 4241, 3965, - 6050, 4378, 8158, 2543, 8935, 2526, 8040, 7916, 1251, - 8490, 1561, 7903, 7690, 3879, 5358, 4429, 5514, 1218, - 3181, 2273, 5271, 1374, 6798, 3010, 1069, 7396, 4595, - 4108, 3810, 5708, 259, 495, 452, 9294, 2754, 9327, - 5207, 2007, 7593, 8569, 4568, 4691, 8679, 4288, 2957, - 7944, 5890, 6915, 2899, 6454, 4798, 8719, 7914, 697, - 5029, 6407, 1105, 1480, 7252, 9894, 4850, 778, 1134, - 7817, 1564, 3689, 9871, 7592, 7381, 1205, 2768, 1398, - 2224, 3669, 4966, 2550, 6171, 4076, 569, 837, 1810, - 1565, 4292, 9588, 8580, 4720, 6023, 3245, 902, 3324, - 2882, 6953, 3218, 1304, 2699, 534, 9799, 9291, 5836, - 2609, 4360, 3972, 550, 3300, 9664, 5239, 7619, 5234, - 3913, 9667, 3284, 8888, 6535, 1525, 8741, 9815, 3428, - 6435, 7905, 5354, 8689, 2951, 4126, 6289, 8090, 8264, - 7102, 591, 6806, 9220, 9527, 6407, 2999, 4388, 5788, - 7840, 8653, 7644, 5096, 2704, 5155, 7094, 8131, 964, - 2988, 6101, 3237, 6695, 2278, 2679, 1832, 9266, 822, - 413, 20, 1159, 2063, 1047, 13, 4951, 4358, 2145, - 3079, 4853, 5128, 3539, 379, 4698, 535, 2243, 9366, - 2957, 8548, 2180, 957, 8760, 3114, 6528, 8576, 740, - 2977, 9372, 7207, 5125, 7766, 9898, 1447, 7912, 2688, - 8526, 7536, 9880, 7461, 314, 3118, 4813, 9877, 2664, - 6841, 1277, 4285, 3049, 3052, 6605, 7912, 1719, 1547, - 1830, 754, 4077, 8603, 2931, 1461, 7307, 7420, 5528, - 7680, 3465, 9981, 3600, 2391, 5835, 5280, 2395, 7655, - 3823, 4572, 4401, 9732, 8071, 8770, 9859, 6200, 7651, - 7986, 4568, 3522, 7352, 4261, 14, 2749, 8122, 4595, - 4871, 4534, 8033, 6163, 9675, 4652, 9505, 5908, 2354, - 4770, 696, 8663, 201, 9616, 2877, 726, 9392, 4651, - 1284, 7500, 1237, 7136, 9476, 1417, 4564, 5733, 1714, - 2018, 4023, 8645, 1744, 8472, 4682, 3125, 1339, 5985, - 8156, 7481, 3542, 4239, 5035, 4828, 295, 9246, 919, - 1946, 7126, 8957, 1335, 1854, 4461, 2038, 4656, 9164, - 6688, 4854, 2201, 2533, 6427, 5659, 6924, 9837, 9889, - 896, 1352, 2294, 2913, 9291, 8315, 7856, 6541, 1652, - 5978, 6806, 4867, 7270, 6309, 878, 6658, 3440, 7761, - 8300, 363, 3538, 3015, 6228, 677, 6171, 2029, 7473, - 8711, 6599, 5750, 7300, 1942, 5038, 4232, 8403, 9898, - 2781, 8752, 6564, 9145, 7463, 7228, 6378, 7276, 760, - 6688, 4993, 7962, 1680, 6992, 5243, 8266, 7625, 3992, - 1426, 3102, 8691, 6168, 4229, 3244, 1791, 752, 2816, - 7848, 8662, 1165, 8574, 678, 5737, 5869, 4196, 8060, - 2226, 3670, 2019, 1405, 2650, 1175, 5521, 886, 8428, - 8384, 9876, 6357, 7281, 7347, 2948, 3477, 9159, 4891, - 9619, 7188, 1356, 4593, 9718, 829, 6537, 9136, 1319, - 9881, 1493, 3880, 4611, 904, 1159, 6772, 947, 9108, - 3713, 6279, 5568, 6957, 9320, 8603, 3315, 7261, 1083, - 7955, 6619, 1533, 4035, 1644, 6380, 3159, 7664, 6635, - 7010, 4484, 7555, 4741, 2029, 9507, 9463, 7724, 3939, - 6630, 3148, 5513, 6818, 5349, 9426, 2527, 9088, 5175, - 3398, 3562, 3698, 5546, 9677, 9645, 1089, 4987, 1588, - 5400, 997, 8204, 7080, 1581, 6450, 5721, 2570, 8985, - 9655, 7207, 384, 487, 9982, 884, 3779, 2760, 5096, - 1349, 1888, 4225, 3395, 3970, 4646, 7583, 9619, 3024, - 226, 9037, 2697, 4047, 9011, 9620, 6892, 9658, 8405, - 9974, 4082, 2185, 8401, 6453, 4955, 9910, 2090, 7872, - 2973, 4384, 4551, 5019, 2072, 3985, 5566, 5818, 2739, - 7315, 3918, 7030, 2407, 1085, 759, 7595, 1724, 8345, - 4101, 915, 233, 8369, 8491, 6816, 6359, 2000, 6105, - 5328, 7003, 1174, 3746, 9600, 4454, 8912, 6274, 812, - 5016, 7930, 3968, 4977, 2233, 9247, 632, 534, 6275, - 4526, 9112, 1607, 6533, 7979, 5590, 6773, 2615, 6613, - 2449, 8635, 4735, 3037, 3616, 5594, 7556, 815, 9178, - 233, 966, 8555, 5895, 3211, 3489, 4949, 4399, 2015, - 4444, 9117, 1201, 1746, 5340, 7041, 9002, 4285, 8899, - 8799, 4926, 9759, 4966, 2564, 7672, 3383, 5379, 1321, - 488, 8135, 4402, 2898, 8060, 6845, 910, 9794, 2420, - 2773, 5897, 4123, 2035, 1978, 7074, 626, 2443, 5219, - 4190, 1817, 5925, 2603, 27, 5888, 5141, 5613, 7485, - 3221, 2555, 2781, 2440, 1444, 2932, 9889, 3263, 8142, - 1522, 8477, 736, 6138, 5110, 1155, 1226, 4897, 782, - 1576, 5524, 7332, 4653, 896, 1859, 1494, 8755, 1068, - 2329, 8628, 6988, 8155, 2169, 8370, 9504, 6113, 7726, - 1798, 1231, 4601, 1532, 8271, 5057, 5133, 1550, 8115, - 4603, 9258, 4758, 4981, 6773, 6250, 2595, 6963, 7362, - 8398, 4742, 9289, 5866, 7042, 2952, 9132, 5059, 5200, - 1236, 5265, 6298, 8382, 9486, 7607, 2, 1189, 6527, - 9288, 783, 3711, 4547, 589, 3935, 7821, 5344, 7939, - 2620, 4710, 7997, 3225, 3819, 3494, 762, 1601, 6882, - 1087, 4501, 3360, 7246, 4102, 4591, 331, 2833, 4244, - 7334, 7150, 2705, 1786, 1120, 7881, 1701, 7739, 6367, - 3566, 8207, 3982, 7649, 4077, 4547, 3041, 7716, 2492, - 1787, 4571, 2117, 777, 4970, 7293, 9061, 999, 9637, - 6539, 4905, 3402, 2942, 3746, 2946, 453, 9138, 9033, - 52]), - values=tensor([6.5468e-01, 1.4127e-01, 9.2428e-02, 5.9432e-01, - 6.5125e-01, 1.7186e-02, 9.5240e-01, 7.4522e-01, - 6.1665e-01, 9.7634e-01, 8.0519e-01, 2.5245e-01, - 5.9150e-01, 2.5213e-02, 1.2295e-01, 5.0980e-01, - 9.3599e-01, 9.5879e-02, 6.2288e-01, 7.3794e-01, - 4.6483e-01, 2.7344e-01, 7.8522e-01, 4.5768e-01, - 5.0021e-01, 2.1527e-01, 4.8710e-01, 7.8890e-01, - 9.6876e-01, 2.1589e-01, 6.7212e-01, 5.0782e-02, - 6.5297e-01, 9.2223e-01, 6.4293e-01, 7.3252e-01, - 5.7216e-01, 8.1965e-01, 6.4715e-01, 5.7331e-01, - 3.4892e-01, 7.1783e-01, 5.9625e-01, 5.2807e-01, - 3.2411e-01, 4.1204e-03, 4.2174e-01, 2.6368e-01, - 9.3583e-01, 3.2853e-01, 7.7884e-01, 9.3648e-01, - 2.4406e-01, 8.3021e-01, 8.9553e-01, 9.5575e-01, - 8.2181e-01, 8.3678e-01, 6.6541e-01, 7.9095e-01, - 2.7910e-01, 1.3023e-02, 8.3080e-01, 1.8069e-01, - 8.4296e-02, 9.0477e-01, 9.2088e-01, 7.8766e-01, - 6.7970e-01, 6.3044e-01, 6.4393e-01, 2.5343e-02, - 9.4176e-01, 7.7297e-03, 2.9408e-03, 1.1966e-02, - 4.5537e-01, 8.5939e-01, 2.1291e-01, 7.3170e-01, - 1.3142e-01, 2.3822e-01, 3.6079e-01, 2.1476e-01, - 2.2288e-01, 5.3629e-01, 1.9466e-01, 2.2744e-01, - 4.9497e-01, 4.2304e-01, 2.2479e-01, 2.5827e-01, - 6.0850e-01, 7.9026e-01, 9.2549e-01, 4.4427e-01, - 7.3695e-01, 1.4928e-01, 8.5298e-01, 6.8729e-01, - 8.7128e-01, 1.9788e-01, 1.6850e-01, 2.3224e-01, - 7.3516e-01, 7.9172e-01, 5.9172e-01, 4.2299e-01, - 1.9668e-01, 2.9149e-01, 9.2247e-01, 3.6969e-01, - 3.5744e-01, 8.0301e-01, 4.5058e-01, 4.3867e-01, - 7.2633e-01, 2.0703e-01, 8.1141e-01, 5.6927e-01, - 4.2970e-01, 8.9740e-01, 4.4980e-03, 1.4946e-01, - 6.6063e-01, 9.5086e-01, 5.4202e-01, 4.5991e-01, - 6.1559e-01, 1.1459e-01, 1.1966e-01, 1.5409e-01, - 9.1550e-02, 8.1800e-01, 6.5368e-01, 6.6150e-01, - 8.3864e-01, 8.0492e-01, 3.1258e-01, 7.2768e-01, - 2.5795e-01, 9.0517e-01, 3.9278e-02, 1.2073e-01, - 7.4348e-01, 2.4786e-01, 7.9648e-01, 9.1521e-01, - 9.3120e-01, 6.7637e-01, 8.2965e-01, 5.5535e-01, - 4.5810e-01, 8.7278e-01, 8.9360e-01, 7.2430e-01, - 8.7103e-01, 8.9370e-01, 4.6244e-01, 6.2316e-01, - 1.7015e-02, 7.8933e-01, 8.3526e-01, 2.4711e-01, - 4.4930e-02, 1.8375e-01, 1.0750e-02, 5.6333e-01, - 2.6142e-01, 7.2245e-01, 7.2771e-01, 5.8421e-01, - 8.8772e-01, 9.9297e-01, 5.0362e-01, 5.6718e-01, - 3.7669e-01, 7.6483e-01, 1.1379e-03, 5.6667e-02, - 3.8279e-01, 8.1423e-01, 7.2754e-01, 7.7699e-01, - 8.6811e-01, 5.1382e-01, 6.3474e-01, 4.6463e-01, - 2.1111e-01, 4.6935e-01, 3.3339e-01, 5.6516e-01, - 5.4652e-01, 7.4076e-01, 6.7244e-01, 7.7735e-01, - 3.9087e-01, 4.5901e-01, 5.5385e-01, 6.8343e-01, - 3.5511e-01, 9.7234e-01, 2.6306e-01, 3.0582e-01, - 4.3505e-01, 7.9083e-01, 1.3138e-01, 9.8500e-01, - 6.1727e-01, 5.7070e-01, 2.6766e-01, 9.8175e-01, - 7.4367e-01, 6.4888e-01, 7.9271e-01, 4.5871e-01, - 6.5195e-01, 5.1494e-01, 2.4723e-01, 2.2316e-01, - 1.6363e-02, 3.2662e-01, 8.4102e-01, 7.5499e-01, - 5.1587e-01, 2.0677e-01, 8.1494e-02, 3.6295e-01, - 9.9312e-01, 7.7645e-02, 4.9895e-01, 7.0330e-01, - 2.0760e-02, 2.9267e-01, 7.2543e-01, 4.0653e-01, - 9.5142e-01, 4.4437e-01, 6.7606e-01, 2.5093e-01, - 8.6923e-01, 4.3498e-01, 6.2468e-01, 8.8420e-01, - 8.7101e-01, 2.9757e-01, 8.0408e-01, 3.1026e-01, - 9.8217e-01, 2.5237e-01, 9.8783e-02, 1.7549e-02, - 3.4025e-01, 5.4986e-01, 9.8266e-02, 9.7562e-01, - 8.5991e-01, 3.8755e-01, 6.8829e-01, 3.8024e-01, - 6.3418e-02, 4.0307e-01, 8.2416e-01, 6.7926e-01, - 1.4744e-01, 3.7698e-01, 1.5581e-01, 1.0774e-02, - 1.7693e-01, 8.6081e-01, 7.4972e-01, 3.3748e-01, - 8.1871e-01, 8.4121e-01, 8.1049e-02, 9.2583e-02, - 7.8722e-01, 6.8745e-01, 2.7982e-01, 9.7242e-01, - 7.2433e-01, 8.4648e-01, 3.6121e-01, 5.7503e-01, - 6.4006e-01, 3.0470e-01, 6.4213e-01, 6.6883e-01, - 4.9966e-01, 8.0032e-01, 2.8840e-01, 4.9434e-01, - 9.0461e-01, 4.8427e-01, 9.6448e-01, 8.4693e-02, - 5.4306e-01, 4.0319e-01, 7.6967e-02, 6.6348e-01, - 6.9692e-01, 3.7373e-01, 2.0746e-01, 5.5117e-01, - 5.0787e-01, 9.7123e-03, 1.9341e-01, 7.6638e-01, - 9.0915e-01, 4.8971e-01, 1.5754e-01, 4.0329e-01, - 7.0863e-01, 5.7428e-01, 3.2445e-01, 3.7045e-02, - 7.6333e-01, 3.2380e-01, 4.3436e-01, 4.3863e-01, - 6.6006e-01, 8.8965e-01, 6.4759e-01, 6.8022e-01, - 5.0229e-01, 5.7813e-01, 1.6286e-02, 6.7272e-01, - 7.8948e-01, 7.6149e-01, 3.7736e-01, 6.5737e-01, - 2.7235e-01, 5.4383e-01, 3.2413e-01, 1.7815e-02, - 8.4094e-01, 7.8613e-01, 7.6281e-01, 8.9462e-01, - 8.3942e-01, 4.8247e-01, 6.2688e-01, 6.4393e-01, - 4.4888e-01, 3.6597e-01, 7.3200e-01, 5.3547e-01, - 1.1008e-01, 3.6434e-01, 2.5069e-01, 7.4258e-01, - 4.9019e-01, 8.4201e-01, 8.0305e-01, 7.7847e-01, - 7.8059e-01, 3.7616e-01, 8.1493e-01, 5.1010e-01, - 4.5501e-01, 5.5581e-01, 4.4741e-02, 9.9913e-02, - 3.7803e-01, 3.9358e-01, 1.5395e-01, 3.4878e-01, - 9.0430e-01, 6.0733e-02, 5.7344e-01, 2.5730e-01, - 7.7168e-01, 3.6176e-02, 9.4183e-01, 9.1086e-01, - 6.4821e-01, 4.9507e-01, 7.2036e-01, 1.4739e-01, - 6.3503e-01, 1.9288e-01, 4.2898e-02, 5.6821e-01, - 8.8470e-01, 4.5399e-01, 8.8041e-03, 1.5915e-01, - 7.0359e-01, 5.7698e-01, 8.8443e-01, 3.4394e-01, - 3.0485e-01, 4.4242e-01, 3.4913e-01, 7.9540e-01, - 5.5193e-02, 4.3887e-02, 6.1585e-01, 7.0753e-01, - 5.1805e-01, 6.1761e-01, 6.8989e-01, 2.1920e-02, - 3.8471e-01, 4.0989e-01, 5.5155e-01, 6.9767e-01, - 5.2420e-01, 3.7373e-01, 3.2347e-01, 9.1508e-01, - 3.6032e-01, 7.9389e-01, 6.6820e-01, 8.6269e-01, - 6.7740e-01, 6.3416e-01, 4.8768e-01, 4.0602e-01, - 4.9812e-01, 8.4111e-01, 8.4279e-01, 5.5415e-02, - 8.1731e-01, 1.4413e-01, 6.5361e-01, 1.7266e-01, - 4.1468e-01, 2.0062e-01, 7.2803e-01, 6.2329e-01, - 1.0537e-01, 5.5486e-01, 3.2931e-01, 7.7181e-01, - 3.4008e-01, 7.8573e-02, 8.1483e-02, 3.2319e-01, - 5.5933e-01, 4.0264e-01, 2.3848e-01, 7.8816e-01, - 9.0911e-01, 6.3020e-01, 3.6047e-01, 3.6079e-01, - 2.2380e-01, 4.8150e-01, 4.9177e-01, 8.1853e-01, - 4.0528e-01, 2.9267e-01, 8.7137e-01, 4.4712e-01, - 7.8846e-01, 4.5490e-01, 3.4711e-01, 4.8177e-01, - 3.4496e-01, 6.1416e-01, 7.6675e-01, 6.6943e-01, - 2.9519e-01, 6.5316e-01, 8.7975e-01, 7.8729e-01, - 9.1207e-01, 8.4710e-01, 2.7400e-01, 3.1100e-01, - 8.2089e-02, 9.4351e-02, 2.4426e-01, 5.9016e-02, - 4.9262e-01, 6.9916e-02, 2.2964e-02, 5.0292e-01, - 9.1043e-01, 9.4623e-01, 4.6797e-01, 3.9579e-01, - 8.6736e-01, 7.6480e-01, 7.9036e-01, 3.2983e-01, - 4.2598e-01, 5.3954e-01, 9.9338e-01, 1.5591e-03, - 5.1260e-01, 7.1729e-01, 7.0432e-01, 6.4879e-01, - 2.0475e-01, 1.3935e-01, 4.2309e-01, 3.2121e-01, - 5.5416e-01, 5.8721e-01, 2.1480e-01, 6.1175e-01, - 3.9361e-01, 3.6750e-01, 8.0177e-01, 4.1339e-01, - 6.9380e-01, 7.5490e-01, 8.6979e-01, 9.3450e-01, - 4.8427e-01, 6.5258e-01, 2.2409e-01, 2.6382e-01, - 7.1099e-01, 2.0483e-01, 5.7935e-01, 2.2667e-01, - 4.0693e-01, 3.4867e-01, 6.5637e-01, 6.6115e-01, - 9.7780e-01, 1.8873e-01, 2.5250e-01, 9.0736e-01, - 8.1793e-01, 5.3495e-01, 8.8298e-01, 3.0053e-02, - 7.0905e-01, 3.5784e-01, 4.0520e-01, 8.0837e-01, - 7.7093e-01, 7.3130e-01, 5.7167e-01, 2.3679e-01, - 9.2100e-01, 3.3596e-01, 9.3138e-02, 4.5639e-01, - 1.1073e-01, 7.1548e-01, 5.4077e-01, 4.3521e-01, - 6.9792e-01, 7.3280e-02, 9.4501e-01, 5.6171e-01, - 9.2932e-01, 6.2774e-01, 8.4495e-01, 6.6811e-01, - 3.4443e-01, 2.0575e-01, 1.2763e-01, 2.8694e-01, - 4.9355e-01, 4.7717e-01, 2.1047e-01, 6.0261e-01, - 3.7883e-01, 7.5454e-01, 8.2122e-01, 5.7723e-01, - 7.7142e-01, 5.5063e-02, 6.0631e-01, 8.2983e-01, - 7.4182e-02, 3.3580e-01, 8.5823e-01, 5.3831e-01, - 4.5795e-01, 8.3477e-01, 5.1350e-01, 9.3417e-01, - 5.4356e-01, 2.5336e-01, 6.1465e-01, 3.4845e-01, - 1.8660e-01, 6.6369e-01, 1.3363e-01, 2.2671e-02, - 5.3456e-02, 1.7256e-01, 6.1136e-01, 2.5285e-01, - 7.5915e-01, 9.3478e-01, 4.1491e-01, 2.6218e-02, - 3.5023e-01, 2.1390e-01, 6.4510e-01, 9.6921e-01, - 5.8877e-01, 4.9975e-01, 2.1645e-01, 3.8917e-01, - 7.1814e-01, 4.7198e-01, 7.4306e-01, 4.1652e-01, - 8.0271e-01, 4.2433e-01, 8.8599e-01, 7.2558e-01, - 3.0764e-01, 9.3710e-01, 5.6792e-01, 6.9030e-01, - 9.3938e-01, 8.8332e-01, 4.5213e-01, 6.4834e-01, - 1.9065e-01, 2.9216e-01, 7.9948e-01, 9.8870e-01, - 2.6744e-01, 8.7429e-02, 1.6926e-01, 9.8162e-01, - 3.1604e-01, 2.8905e-01, 6.9247e-01, 7.0212e-01, - 6.2858e-01, 2.5725e-01, 4.1328e-01, 3.9903e-01, - 1.7692e-01, 6.0405e-01, 9.0258e-01, 1.6863e-01, - 2.1314e-01, 7.8599e-01, 7.8953e-01, 9.3110e-01, - 3.4957e-01, 8.0046e-01, 1.6543e-01, 3.2844e-01, - 5.9632e-03, 6.5679e-01, 9.2040e-02, 3.3725e-01, - 6.7726e-01, 7.1442e-01, 1.8081e-01, 4.4483e-01, - 7.0590e-01, 1.0540e-01, 8.2332e-01, 1.9922e-01, - 1.3106e-01, 1.4727e-01, 1.7056e-01, 2.2487e-01, - 8.1177e-01, 4.5516e-01, 3.6043e-01, 8.3065e-01, - 8.9321e-02, 6.4483e-01, 9.4118e-01, 7.2658e-01, - 6.0324e-01, 3.2634e-02, 4.9464e-01, 2.9326e-01, - 1.4061e-01, 5.4972e-01, 3.8987e-01, 1.5617e-02, - 7.2013e-01, 2.5322e-01, 6.2362e-01, 8.6245e-01, - 7.2103e-01, 7.2221e-02, 8.1671e-01, 5.6625e-01, - 6.9137e-01, 9.5519e-01, 7.0531e-01, 7.9257e-02, - 4.0152e-02, 6.3328e-01, 8.7228e-01, 4.2235e-02, - 5.5240e-01, 1.8645e-01, 4.4119e-01, 5.1872e-01, - 3.0241e-01, 4.6970e-02, 3.3567e-01, 7.3336e-01, - 4.3809e-01, 6.0532e-01, 9.1692e-01, 2.7482e-01, - 3.3255e-01, 5.6474e-01, 5.1644e-01, 1.5826e-01, - 1.7806e-01, 4.3779e-01, 8.8205e-01, 4.6870e-01, - 9.3173e-01, 7.9373e-01, 4.0371e-01, 7.6367e-01, - 7.7558e-01, 8.4337e-01, 7.3356e-01, 2.5059e-01, - 4.3406e-01, 7.9969e-01, 3.2800e-01, 8.2351e-01, - 9.8393e-01, 4.2720e-01, 6.4308e-01, 4.1650e-01, - 4.5876e-01, 6.0922e-01, 6.6100e-01, 2.6225e-01, - 7.5511e-01, 7.4482e-01, 8.5879e-01, 1.5262e-01, - 5.5190e-01, 9.3653e-02, 2.4859e-01, 1.2244e-01, - 7.2058e-01, 6.8568e-01, 2.9328e-01, 3.0763e-02, - 7.3235e-01, 1.0132e-01, 1.3963e-02, 1.3632e-01, - 3.2207e-01, 2.5190e-01, 3.2095e-01, 9.5220e-01, - 2.2414e-01, 9.5574e-01, 6.6512e-01, 2.1393e-01, - 4.3569e-01, 6.5079e-01, 6.1620e-01, 4.8482e-01, - 2.7463e-01, 4.2786e-01, 3.6777e-01, 8.0895e-01, - 5.0708e-01, 5.4724e-02, 8.9217e-01, 5.0493e-01, - 7.4006e-01, 4.3982e-01, 1.3634e-01, 4.0648e-01, - 3.1583e-01, 2.9091e-01, 6.8608e-01, 8.8614e-01, - 5.0014e-01, 1.9714e-01, 9.0919e-01, 1.0143e-01, - 3.3742e-01, 7.9946e-01, 1.6835e-01, 9.3829e-01, - 3.9281e-01, 1.0649e-01, 2.5096e-01, 9.3147e-01, - 9.6402e-01, 6.3148e-01, 7.3458e-01, 1.6974e-01, - 4.7950e-01, 5.1505e-01, 8.8022e-01, 8.7978e-01, - 1.9932e-01, 5.8288e-01, 2.8453e-01, 7.0267e-01, - 1.3926e-01, 5.5163e-01, 9.0355e-01, 1.9504e-01, - 6.1798e-01, 1.4056e-01, 5.6601e-03, 9.6328e-01, - 9.0382e-01, 5.4381e-01, 9.7316e-01, 1.1029e-01, - 4.5424e-01, 5.9510e-01, 6.7831e-01, 7.2744e-01, - 8.0666e-01, 8.5186e-01, 3.0594e-01, 1.2956e-01, - 5.1455e-01, 5.0766e-01, 4.8751e-01, 9.1844e-01, - 4.6636e-01, 5.6813e-02, 3.7275e-01, 9.0117e-01, - 3.4902e-01, 2.3409e-01, 2.4325e-02, 6.8071e-01, - 6.3166e-01, 7.4586e-01, 3.5234e-01, 4.3537e-03, - 7.3571e-01, 3.9115e-01, 9.1061e-01, 5.0277e-02, - 6.2925e-01, 4.1784e-01, 7.3323e-02, 7.3164e-01, - 4.8462e-01, 7.6654e-01, 4.4826e-01, 3.9398e-02, - 6.9417e-01, 7.4395e-01, 6.1366e-01, 4.9977e-01, - 4.1028e-01, 6.6273e-01, 8.3655e-01, 6.0125e-01, - 4.6044e-01, 9.2920e-01, 2.1166e-01, 7.5961e-01, - 9.6397e-01, 6.7286e-02, 2.6344e-01, 2.0692e-01, - 1.1673e-01, 4.3703e-02, 5.2607e-01, 1.0118e-01, - 2.2023e-01, 9.8295e-01, 7.7590e-01, 6.2933e-01, - 7.0037e-01, 1.6284e-01, 3.1877e-01, 7.7901e-01, - 2.1463e-01, 8.4394e-01, 7.3672e-02, 5.4399e-01, - 6.9698e-01, 2.5617e-01, 4.5407e-01, 5.6064e-01, - 8.1694e-01, 3.4660e-01, 7.0410e-01, 6.7050e-01, - 2.3489e-01, 1.8168e-01, 7.9661e-01, 8.3635e-01, - 5.8756e-01, 8.8388e-01, 4.7640e-01, 9.5453e-01, - 5.4838e-01, 1.0417e-01, 3.4849e-01, 1.8089e-01, - 7.5269e-01, 6.1014e-01, 4.0905e-01, 6.2377e-01, - 4.3600e-01, 5.8630e-01, 7.4917e-01, 4.4090e-01, - 8.8014e-01, 8.0938e-01, 7.9802e-01, 2.1206e-01, - 2.7673e-01, 1.0645e-01, 6.0725e-01, 3.0038e-01, - 6.4655e-01, 8.6462e-01, 9.6500e-01, 1.8784e-01, - 1.7909e-01, 5.6496e-01, 5.1067e-01, 3.3174e-01, - 1.8409e-01, 3.6191e-02, 8.8537e-01, 3.4596e-02, - 5.4171e-01, 8.8436e-01, 3.4009e-02, 7.4543e-02, - 7.6922e-01, 6.0467e-02, 5.3161e-02, 5.1383e-02, - 1.3021e-01, 6.6623e-01, 3.3683e-01, 8.7086e-01, - 3.4739e-01, 9.4711e-03, 4.9679e-01, 9.8853e-01, - 5.0873e-01, 2.8727e-01, 8.4395e-01, 5.7766e-02, - 4.5070e-01, 4.5636e-01, 8.9634e-01, 3.9843e-01, - 7.8137e-01, 9.3941e-01, 9.5578e-01, 9.2556e-02, - 8.1074e-02, 1.1447e-01, 6.3234e-01, 9.5197e-01, - 4.1311e-01, 1.8941e-01, 8.9359e-01, 6.9563e-01, - 8.1728e-01, 1.2234e-02, 2.1731e-01, 7.6004e-01, - 6.1252e-01, 5.0718e-01, 1.6782e-01, 4.0585e-01, - 7.8919e-01, 5.1338e-01, 8.9386e-01, 7.3014e-02, - 3.4906e-01, 4.2267e-02, 9.2989e-01, 9.1272e-04, - 7.8198e-01, 3.0679e-01, 4.8281e-02, 2.7044e-01, - 1.2871e-01, 4.4472e-02, 4.7767e-01, 6.8016e-01, - 8.0017e-01, 2.1361e-02, 3.1301e-01, 4.8972e-01, - 4.0420e-01, 7.3745e-01, 2.3749e-01, 2.0239e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3431, 0.4493, 0.0337, ..., 0.1419, 0.4440, 0.1516]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 1.679638147354126 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '349456', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.173964023590088} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02082538604736328} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([1074, 7022, 4087, 4354, 1115, 9252, 2574, 3979, 915, - 3835, 8518, 2769, 112, 3623, 1291, 615, 191, 66, - 1214, 5633, 6476, 7907, 7161, 7265, 5563, 2980, 7771, - 8906, 5431, 4607, 4770, 626, 9524, 3730, 3866, 8666, - 6423, 5296, 979, 1727, 6975, 9285, 7075, 7134, 6667, - 7325, 5220, 9414, 9490, 8118, 9426, 4227, 7390, 7605, - 9168, 8384, 6401, 6035, 7830, 9972, 5870, 7420, 9935, - 3091, 834, 8862, 5740, 3062, 5112, 2908, 639, 6091, - 436, 2916, 4734, 7108, 513, 2302, 7905, 4519, 3497, - 5454, 9753, 2744, 5180, 8794, 3393, 8207, 4827, 280, - 5568, 3468, 6511, 9773, 2254, 47, 1155, 342, 995, - 9068, 3240, 1324, 1365, 6342, 2285, 1870, 8660, 4106, - 1186, 2521, 7988, 2968, 4045, 8137, 1538, 2198, 760, - 50, 2598, 195, 7444, 4957, 6809, 4374, 6456, 7197, - 9552, 1863, 5023, 3277, 4323, 9903, 7260, 7125, 6873, - 513, 9772, 5373, 7137, 4122, 7749, 5169, 1971, 6272, - 9111, 6015, 736, 3176, 6128, 5202, 6847, 7163, 5150, - 4610, 4581, 9646, 6385, 4193, 8418, 6616, 2414, 1755, - 8625, 575, 1748, 6885, 2435, 2666, 6742, 5614, 244, - 879, 6562, 1359, 3194, 5727, 3551, 7083, 1842, 9514, - 4863, 8479, 941, 2601, 7999, 6494, 5141, 4403, 7993, - 1185, 1573, 5845, 2439, 1922, 1733, 8393, 7228, 6830, - 5908, 6228, 9466, 2785, 4515, 6730, 3151, 3419, 3419, - 776, 3574, 2419, 4836, 3293, 8124, 1875, 7572, 2958, - 4651, 9262, 660, 8285, 6455, 653, 672, 0, 5228, - 9450, 3945, 8974, 8121, 9650, 2145, 4773, 7437, 4493, - 2675, 1190, 9285, 6100, 4192, 7922, 232, 671, 1102, - 7639, 9274, 9775, 7093, 5205, 6899, 4017, 9239, 5474, - 4012, 2403, 7759, 1283, 4106, 9062, 6550, 1753, 6450, - 8863, 2663, 1355, 6905, 1092, 2895, 2727, 2867, 1269, - 6117, 7865, 2347, 1781, 2367, 3514, 5957, 7481, 9656, - 4374, 661, 4235, 5018, 7284, 6968, 9032, 5003, 1196, - 3640, 4689, 1986, 9599, 2163, 9857, 2986, 6217, 1124, - 3869, 6932, 9440, 839, 1387, 3226, 4043, 7157, 7379, - 8428, 8483, 4369, 6851, 8809, 7897, 703, 2976, 3699, - 3438, 2355, 6736, 7238, 176, 7739, 889, 2831, 6969, - 8789, 5800, 3110, 6498, 4705, 6469, 9448, 7282, 2134, - 3678, 5467, 1622, 3274, 5066, 5127, 8447, 100, 2983, - 4463, 3099, 8883, 2393, 5221, 5531, 6706, 7577, 5800, - 8450, 3440, 6855, 944, 6894, 3303, 1082, 5849, 737, - 1687, 3315, 15, 7406, 3990, 2259, 7989, 8348, 7160, - 9351, 3002, 3148, 9126, 6367, 8090, 2998, 8051, 9049, - 2364, 8211, 5174, 5618, 94, 3866, 7923, 9652, 3819, - 3703, 4529, 1291, 2502, 976, 2602, 9342, 2433, 3989, - 7743, 4041, 7702, 1609, 486, 6429, 3042, 7286, 1452, - 35, 1305, 4754, 7796, 3723, 7653, 6695, 5085, 8985, - 3829, 2905, 9438, 2209, 6382, 4736, 9664, 3410, 9780, - 7415, 5568, 4279, 7830, 7106, 4004, 4722, 8650, 6975, - 2172, 2395, 8553, 3340, 3045, 7146, 5817, 3873, 8869, - 6247, 1175, 2122, 3744, 5725, 6888, 1097, 33, 3388, - 835, 6001, 1599, 2732, 8805, 8439, 7209, 2690, 5523, - 678, 764, 9297, 4921, 6303, 5169, 995, 8534, 8061, - 778, 3083, 4497, 1526, 7166, 9518, 9740, 5623, 1874, - 4112, 6650, 4724, 3864, 1864, 8807, 4615, 8323, 2087, - 3311, 7066, 3549, 9226, 6333, 9414, 1630, 6645, 4844, - 115, 7748, 2922, 9690, 9811, 2934, 356, 3565, 7527, - 5670, 3614, 2851, 2629, 3790, 7609, 2460, 7042, 9397, - 5659, 2806, 8290, 2345, 2448, 7257, 2879, 1633, 6514, - 6562, 6696, 7140, 2633, 5425, 7796, 6662, 1393, 2046, - 8374, 7261, 3390, 3981, 7552, 254, 5767, 2429, 5613, - 4296, 4489, 6937, 299, 9296, 2149, 2092, 8555, 8603, - 3237, 1398, 9762, 2121, 3314, 6752, 4047, 3531, 2282, - 9126, 155, 6181, 9517, 318, 9471, 241, 3861, 4693, - 7187, 8904, 9238, 3275, 8438, 9069, 1864, 9231, 9667, - 4009, 9288, 4307, 8620, 7353, 7065, 3457, 9361, 7063, - 4553, 4913, 406, 2735, 5848, 6549, 3893, 4278, 1320, - 8124, 6472, 1508, 6416, 9674, 982, 6780, 7269, 2117, - 4160, 2842, 7527, 8109, 2175, 6435, 756, 2829, 4551, - 1101, 5613, 2799, 6159, 3030, 9674, 3371, 4668, 2685, - 9264, 9477, 5536, 5293, 9200, 7997, 9530, 5953, 890, - 5904, 2629, 8659, 5653, 9317, 4267, 9976, 2511, 6842, - 90, 512, 2068, 6073, 7386, 8238, 8424, 3409, 3193, - 1239, 1431, 3902, 4600, 1601, 3567, 9765, 5150, 8916, - 2840, 3297, 5983, 970, 249, 369, 3172, 7109, 7253, - 6112, 1285, 9881, 3414, 9391, 3724, 571, 8346, 8358, - 4162, 7136, 6172, 8809, 6689, 815, 5048, 3642, 959, - 6179, 9525, 7319, 8094, 4231, 3522, 6081, 9527, 8667, - 4007, 6492, 5206, 2671, 131, 2593, 293, 1464, 8045, - 2173, 608, 8313, 546, 7335, 3665, 2688, 8988, 3584, - 6303, 4955, 3386, 1723, 7105, 4572, 2844, 5600, 6702, - 5410, 4316, 2976, 4686, 3464, 2186, 1556, 2021, 8745, - 13, 3322, 1670, 708, 3608, 2776, 8220, 2918, 692, - 5296, 5873, 4095, 1083, 1521, 7223, 7591, 5664, 4154, - 6765, 3312, 7008, 4212, 7168, 7463, 2544, 4892, 8746, - 7430, 8621, 2945, 8225, 1113, 3741, 6687, 6930, 3774, - 7211, 1927, 7164, 9820, 2083, 2916, 9934, 6793, 5105, - 4835, 3112, 3094, 1326, 7804, 7578, 4182, 739, 1123, - 1767, 8724, 9857, 2143, 6971, 6801, 376, 4107, 954, - 5018, 8170, 7753, 3368, 2333, 1, 2240, 2793, 1283, - 7038, 8646, 9694, 359, 2258, 5039, 1080, 5544, 9098, - 4490, 5886, 352, 1011, 1449, 531, 9355, 9342, 2004, - 9274, 7734, 8205, 2292, 9551, 5966, 9837, 1814, 4079, - 8382, 4410, 2600, 7625, 9363, 1842, 2879, 1253, 809, - 7014, 5812, 209, 5239, 2093, 1770, 314, 8424, 2510, - 8571, 9426, 7724, 9614, 5503, 3753, 2866, 1758, 9962, - 6, 7364, 8977, 2787, 766, 2450, 2844, 7089, 3871, - 3747, 9673, 87, 5211, 3728, 3843, 1425, 9293, 1646, - 8032, 5484, 3610, 2890, 6677, 6468, 1558, 3598, 3915, - 1993, 1609, 8274, 966, 8311, 8117, 5411, 9792, 5362, - 4456, 6051, 3532, 1282, 4790, 4302, 1110, 1051, 5344, - 197, 7166, 3635, 796, 2322, 8834, 7592, 9685, 6175, - 1580, 7579, 7995, 5470, 7573, 5255, 5134, 8199, 4635, - 6559, 7004, 535, 234, 482, 8638, 109, 9961, 4999, - 7801, 2361, 4476, 9026, 8063, 7010, 7559, 3651, 6179, - 1451, 928, 8960, 630, 76, 1209, 5158, 5630, 9966, - 5033, 8228, 9559, 9213, 1023, 7495, 1852, 4508, 8270, - 1627, 1030, 4994, 541, 3130, 827, 3601, 8830, 6755, - 4725, 6005, 7586, 3820, 5409, 4677, 5963, 549, 7869, - 8701, 7230, 9014, 1899, 4054, 7192, 5440, 4348, 1719, - 2600, 6431, 2356, 4202, 427, 9630, 7862, 4930, 8419, - 7485, 2384, 6304, 9889, 7541, 9723, 7193, 9094, 6906, - 3017]), - values=tensor([0.9304, 0.0325, 0.3106, 0.4781, 0.2864, 0.4721, 0.3382, - 0.8821, 0.2738, 0.6632, 0.0397, 0.5410, 0.8129, 0.5302, - 0.8584, 0.6728, 0.7839, 0.2842, 0.6220, 0.2198, 0.9853, - 0.4195, 0.8925, 0.9780, 0.4936, 0.9314, 0.4002, 0.0589, - 0.0362, 0.6841, 0.4069, 0.9644, 0.1471, 0.1097, 0.7122, - 0.6469, 0.7726, 0.7037, 0.8236, 0.5724, 0.6757, 0.0210, - 0.7908, 0.1342, 0.5146, 0.6874, 0.0836, 0.6105, 0.6489, - 0.7550, 0.2489, 0.2644, 0.8196, 0.5567, 0.9361, 0.0192, - 0.7166, 0.4988, 0.6757, 0.6719, 0.0246, 0.9112, 0.3677, - 0.3643, 0.2678, 0.8468, 0.3772, 0.5807, 0.7059, 0.0144, - 0.4017, 0.2251, 0.1445, 0.5897, 0.5839, 0.8811, 0.5419, - 0.1899, 0.6807, 0.8358, 0.3278, 0.1587, 0.4733, 0.8342, - 0.8985, 0.6485, 0.7474, 0.6612, 0.0844, 0.3828, 0.0157, - 0.7714, 0.6608, 0.1525, 0.9463, 0.3394, 0.3867, 0.0432, - 0.8090, 0.3213, 0.9897, 0.9084, 0.9424, 0.2045, 0.4579, - 0.7852, 0.4647, 0.4513, 0.6407, 0.5200, 0.9790, 0.1694, - 0.8201, 0.7817, 0.9650, 0.1348, 0.2654, 0.0726, 0.6476, - 0.9696, 0.1144, 0.2269, 0.5974, 0.7825, 0.6712, 0.1593, - 0.9034, 0.9204, 0.3969, 0.4522, 0.3790, 0.7055, 0.0019, - 0.1878, 0.4210, 0.9245, 0.3068, 0.7871, 0.0815, 0.1037, - 0.9467, 0.2547, 0.9280, 0.3139, 0.5680, 0.6516, 0.6068, - 0.5981, 0.3913, 0.0407, 0.0947, 0.2105, 0.0303, 0.5718, - 0.2321, 0.5689, 0.9476, 0.1321, 0.0247, 0.4729, 0.7661, - 0.8935, 0.9971, 0.2980, 0.9100, 0.1945, 0.7887, 0.3662, - 0.6700, 0.4029, 0.0956, 0.0108, 0.3242, 0.1912, 0.0388, - 0.8159, 0.1239, 0.6482, 0.0548, 0.7241, 0.4628, 0.1188, - 0.3048, 0.7968, 0.6538, 0.0308, 0.2637, 0.2733, 0.6327, - 0.6480, 0.2813, 0.5175, 0.6726, 0.0450, 0.6176, 0.2589, - 0.7109, 0.0980, 0.7310, 0.5738, 0.9989, 0.1733, 0.0559, - 0.0624, 0.6747, 0.0930, 0.2298, 0.6306, 0.1193, 0.8276, - 0.2679, 0.3372, 0.0565, 0.5821, 0.6562, 0.4518, 0.8406, - 0.7838, 0.3267, 0.0377, 0.0535, 0.4407, 0.8150, 0.3303, - 0.6133, 0.4116, 0.9870, 0.8925, 0.8368, 0.9237, 0.9331, - 0.5461, 0.4353, 0.9658, 0.0052, 0.5013, 0.8249, 0.8077, - 0.8369, 0.9914, 0.7366, 0.6685, 0.5210, 0.7940, 0.8071, - 0.1086, 0.3441, 0.1479, 0.3668, 0.5370, 0.5706, 0.8571, - 0.3522, 0.2921, 0.1107, 0.4358, 0.9740, 0.6370, 0.7121, - 0.3797, 0.6431, 0.6432, 0.8570, 0.3341, 0.8876, 0.4912, - 0.3012, 0.2244, 0.8304, 0.8708, 0.6993, 0.6415, 0.1354, - 0.4973, 0.6766, 0.5583, 0.8158, 0.1846, 0.7576, 0.3301, - 0.2284, 0.7380, 0.2497, 0.8663, 0.4244, 0.8319, 0.8766, - 0.3127, 0.9083, 0.6717, 0.2607, 0.9060, 0.3022, 0.6929, - 0.7216, 0.1864, 0.9103, 0.0908, 0.8280, 0.6772, 0.9170, - 0.1666, 0.0432, 0.9895, 0.8882, 0.4445, 0.2577, 0.6991, - 0.0134, 0.7908, 0.1595, 0.2586, 0.1384, 0.8815, 0.9964, - 0.6719, 0.3849, 0.3745, 0.5578, 0.6641, 0.8020, 0.8256, - 0.8774, 0.1735, 0.0160, 0.3321, 0.9095, 0.6863, 0.8819, - 0.2760, 0.0176, 0.9191, 0.2224, 0.5883, 0.6735, 0.2168, - 0.8084, 0.2051, 0.7731, 0.7349, 0.4183, 0.4549, 0.7852, - 0.1645, 0.2619, 0.7500, 0.6211, 0.9320, 0.1410, 0.8013, - 0.3936, 0.8135, 0.1711, 0.7508, 0.1565, 0.5072, 0.4150, - 0.0222, 0.7654, 0.2057, 0.7224, 0.5103, 0.0219, 0.2565, - 0.1947, 0.2598, 0.2876, 0.9465, 0.1945, 0.3773, 0.5036, - 0.9181, 0.1480, 0.8127, 0.9489, 0.5086, 0.2695, 0.5627, - 0.6161, 0.4583, 0.0870, 0.7396, 0.9559, 0.7672, 0.7594, - 0.2165, 0.4330, 0.5886, 0.0477, 0.3072, 0.0691, 0.3499, - 0.5368, 0.0098, 0.1604, 0.5787, 0.5961, 0.6748, 0.5240, - 0.6174, 0.6377, 0.0557, 0.8169, 0.1661, 0.8698, 0.1999, - 0.5072, 0.0521, 0.7366, 0.9190, 0.1330, 0.7979, 0.2571, - 0.8104, 0.6892, 0.6507, 0.8704, 0.6904, 0.5395, 0.5915, - 0.1731, 0.6768, 0.9334, 0.5447, 0.3147, 0.7565, 0.2840, - 0.8455, 0.7829, 0.1389, 0.9161, 0.9734, 0.2521, 0.5519, - 0.3612, 0.4687, 0.1449, 0.2398, 0.6605, 0.8039, 0.8419, - 0.6954, 0.8245, 0.4611, 0.7124, 0.8750, 0.0816, 0.7331, - 0.2216, 0.8005, 0.9334, 0.8519, 0.5866, 0.8732, 0.9519, - 0.4971, 0.6068, 0.1175, 0.8603, 0.6101, 0.3972, 0.0193, - 0.1596, 0.3517, 0.7516, 0.7870, 0.6434, 0.0362, 0.2214, - 0.6042, 0.3964, 0.6328, 0.2889, 0.2855, 0.8849, 0.5080, - 0.7797, 0.4322, 0.0814, 0.3647, 0.2061, 0.6314, 0.3193, - 0.1895, 0.3658, 0.6642, 0.1466, 0.1805, 0.6190, 0.9850, - 0.3325, 0.8191, 0.3665, 0.4316, 0.4284, 0.3112, 0.9226, - 0.5944, 0.0376, 0.1741, 0.8903, 0.0662, 0.9770, 0.9188, - 0.9310, 0.5606, 0.4272, 0.1108, 0.4718, 0.4460, 0.6248, - 0.5358, 0.2156, 0.0885, 0.3174, 0.8396, 0.1886, 0.2096, - 0.3417, 0.8097, 0.5694, 0.1045, 0.8763, 0.1113, 0.1353, - 0.0123, 0.9512, 0.5017, 0.3234, 0.1403, 0.0730, 0.8981, - 0.2740, 0.4134, 0.2135, 0.9805, 0.0445, 0.9458, 0.7869, - 0.3360, 0.7234, 0.2980, 0.1314, 0.3499, 0.6698, 0.4526, - 0.6499, 0.4686, 0.7291, 0.1916, 0.4110, 0.7064, 0.0622, - 0.1843, 0.1217, 0.1311, 0.8602, 0.9506, 0.1258, 0.9113, - 0.9310, 0.4848, 0.0104, 0.0267, 0.5186, 0.0305, 0.5081, - 0.5501, 0.3248, 0.8113, 0.3173, 0.7019, 0.0515, 0.0562, - 0.1638, 0.4617, 0.6547, 0.3705, 0.4788, 0.0628, 0.4462, - 0.3249, 0.8781, 0.7038, 0.4954, 0.3617, 0.8045, 0.1896, - 0.8468, 0.7628, 0.4651, 0.3750, 0.9370, 0.8226, 0.5039, - 0.7669, 0.5888, 0.7467, 0.1323, 0.9814, 0.9275, 0.4832, - 0.2850, 0.4635, 0.1488, 0.7094, 0.2071, 0.4950, 0.1863, - 0.2851, 0.7798, 0.3730, 0.3994, 0.2529, 0.5052, 0.4832, - 0.3839, 0.7730, 0.1994, 0.8801, 0.9634, 0.1279, 0.5202, - 0.4480, 0.2752, 0.8425, 0.2605, 0.6678, 0.0019, 0.1146, - 0.5118, 0.8404, 0.4252, 0.8911, 0.6844, 0.6707, 0.4919, - 0.4044, 0.9689, 0.9549, 0.5260, 0.2040, 0.3758, 0.8436, - 0.0719, 0.9238, 0.4595, 0.7501, 0.7038, 0.0842, 0.8019, - 0.5135, 0.4312, 0.0042, 0.3774, 0.0300, 0.3045, 0.1310, - 0.9498, 0.2047, 0.0065, 0.4750, 0.7205, 0.8268, 0.2414, - 0.5067, 0.3174, 0.7835, 0.1750, 0.2497, 0.8182, 0.4410, - 0.4937, 0.0668, 0.7211, 0.3732, 0.1165, 0.5206, 0.1429, - 0.5890, 0.1326, 0.1841, 0.2299, 0.6975, 0.0815, 0.8063, - 0.7601, 0.5190, 0.4973, 0.7820, 0.2718, 0.3645, 0.2388, - 0.6349, 0.4919, 0.5469, 0.5835, 0.1693, 0.2555, 0.6997, - 0.0080, 0.8024, 0.8773, 0.4755, 0.9793, 0.3227, 0.7826, - 0.4776, 0.5918, 0.9313, 0.3366, 0.3665, 0.6137, 0.6442, - 0.7228, 0.1110, 0.6812, 0.4938, 0.1352, 0.2780, 0.6843, - 0.3593, 0.6866, 0.5996, 0.6977, 0.2843, 0.3178, 0.7528, - 0.8342, 0.5386, 0.6190, 0.0664, 0.9083, 0.4500, 0.1762, - 0.6839, 0.7444, 0.6654, 0.5698, 0.6462, 0.6689, 0.6885, - 0.6494, 0.1899, 0.7462, 0.4931, 0.7785, 0.4078, 0.0875, - 0.6143, 0.8570, 0.9888, 0.3955, 0.6640, 0.6587, 0.7544, - 0.9891, 0.6031, 0.0927, 0.6392, 0.0282, 0.2897, 0.0799, - 0.3039, 0.7537, 0.0064, 0.0830, 0.2575, 0.1555, 0.1809, - 0.6300, 0.6627, 0.5442, 0.5686, 0.4545, 0.6247, 0.0230, - 0.1211, 0.9727, 0.2738, 0.3745, 0.1564, 0.8676, 0.3949, - 0.4342, 0.6528, 0.7121, 0.9790, 0.6180, 0.8497, 0.6934, - 0.8114, 0.4502, 0.2751, 0.7561, 0.6667, 0.0282, 0.9822, - 0.8863, 0.6138, 0.1747, 0.4926, 0.0927, 0.1322, 0.4515, - 0.6778, 0.3334, 0.7750, 0.2319, 0.6137, 0.2868, 0.8848, - 0.0267, 0.5097, 0.9541, 0.4569, 0.9397, 0.3945, 0.3763, - 0.9350, 0.7996, 0.5528, 0.9459, 0.6150, 0.5344, 0.9224, - 0.4343, 0.2583, 0.5743, 0.1810, 0.4732, 0.1012, 0.2292, - 0.5431, 0.6259, 0.8319, 0.9927, 0.5847, 0.8053, 0.0391, - 0.3854, 0.8999, 0.7842, 0.4838, 0.5113, 0.9715, 0.1757, - 0.7568, 0.4008, 0.5341, 0.1516, 0.0878, 0.5977, 0.5872, - 0.7439, 0.8081, 0.9535, 0.6902, 0.8931, 0.9586, 0.7881, - 0.0686, 0.4547, 0.0160, 0.3146, 0.6264, 0.2480, 0.7559, - 0.5560, 0.9085, 0.3908, 0.0424, 0.3481, 0.0393, 0.1234, - 0.5520, 0.5796, 0.8048, 0.0202, 0.8271, 0.9243, 0.6015, - 0.0508, 0.8893, 0.9673, 0.3880, 0.3853, 0.1352, 0.4800, - 0.1939, 0.7035, 0.2054, 0.1146, 0.2407, 0.4234, 0.9640, - 0.4558, 0.8502, 0.7625, 0.3075, 0.9902, 0.1845, 0.0707, - 0.8518, 0.7267, 0.6494, 0.4761, 0.1632, 0.1248, 0.8200, - 0.2043, 0.6022, 0.7800, 0.0537, 0.1505, 0.0646, 0.9228, - 0.0966, 0.8036, 0.2431, 0.5992, 0.1858, 0.5672, 0.8294, - 0.0135, 0.2238, 0.0068, 0.8473, 0.0323, 0.2138, 0.3134, - 0.2674, 0.4860, 0.8071, 0.7400, 0.9168, 0.0701, 0.7855, - 0.7080, 0.5714, 0.5288, 0.1187, 0.1954, 0.0067, 0.7680, - 0.5930, 0.8250, 0.6028, 0.2144, 0.0255, 0.2917, 0.4790, - 0.3892, 0.3563, 0.0423, 0.3253, 0.0092, 0.8956, 0.2515, - 0.1414, 0.9761, 0.8159, 0.7089, 0.4956, 0.0026, 0.1488, - 0.2902, 0.9089, 0.4432, 0.7989, 0.9160, 0.2680, 0.3317, - 0.6128, 0.6111, 0.3647, 0.4016, 0.8650, 0.7226, 0.2642, - 0.4868, 0.9208, 0.7252, 0.5230, 0.1652, 0.0793, 0.9874, - 0.5129, 0.3412, 0.3833, 0.8354, 0.9507, 0.1921, 0.2168, - 0.6983, 0.4500, 0.7444, 0.9235, 0.5009, 0.2575]), + col_indices=tensor([ 813, 3555, 5601, 2348, 6751, 3509, 8898, 6891, 6764, + 5696, 6541, 6774, 2761, 7603, 3910, 1146, 6601, 1434, + 8170, 9040, 1901, 783, 8536, 2520, 9733, 8200, 6188, + 4362, 3870, 7625, 4486, 110, 6322, 3876, 291, 4958, + 9579, 1085, 3805, 472, 9657, 720, 8087, 9783, 7571, + 9065, 5745, 9662, 2310, 4215, 5071, 1348, 1029, 743, + 5214, 463, 7547, 5088, 5989, 417, 3916, 3929, 8541, + 6280, 7493, 5100, 9253, 7371, 4836, 7845, 4239, 9204, + 2923, 1154, 9876, 5286, 5596, 8607, 5033, 3536, 5053, + 6328, 7634, 1405, 6980, 4904, 3404, 1086, 7231, 2337, + 3255, 7204, 7985, 4151, 7601, 1810, 3086, 6652, 4402, + 6503, 4960, 9076, 5183, 9064, 4615, 3311, 5060, 1114, + 6735, 3710, 2170, 1654, 5456, 6593, 9320, 7894, 9506, + 790, 6158, 816, 9358, 5096, 8267, 4536, 7570, 371, + 9159, 9814, 1158, 9035, 7510, 6889, 7686, 534, 6481, + 9178, 5, 6909, 8029, 8368, 4612, 3446, 8374, 422, + 2417, 5087, 1253, 1851, 4856, 2516, 263, 2852, 7027, + 65, 4754, 6331, 4147, 2983, 9809, 3581, 4874, 5547, + 7267, 6542, 4049, 987, 3681, 392, 662, 6371, 2330, + 344, 7425, 9016, 8897, 4097, 5999, 7054, 6509, 7474, + 5001, 3406, 8898, 7007, 8678, 959, 2801, 6382, 5882, + 577, 3382, 6393, 2341, 3537, 7010, 6752, 8821, 6679, + 9797, 4911, 1595, 9144, 1561, 1448, 9642, 6603, 3153, + 1734, 2346, 3739, 1514, 5583, 7190, 9741, 8267, 7010, + 3732, 3264, 2223, 1296, 6863, 841, 3550, 2499, 4135, + 401, 5487, 2802, 5306, 9121, 8020, 7789, 611, 9844, + 8945, 4342, 9275, 7486, 934, 3382, 7915, 656, 8638, + 9369, 8807, 631, 7317, 164, 4778, 2206, 5226, 9494, + 1976, 3145, 5299, 9242, 9921, 4254, 1436, 7372, 1236, + 2362, 6016, 1402, 5228, 1341, 144, 6728, 981, 1940, + 7572, 6285, 7527, 4486, 721, 7315, 7448, 8414, 7212, + 9090, 5181, 5486, 2009, 9361, 278, 4551, 9246, 1415, + 7758, 6968, 4058, 9262, 7007, 9277, 6293, 6094, 7073, + 4745, 8378, 7857, 9808, 3485, 3196, 8484, 3019, 1781, + 4214, 4923, 4116, 8212, 4662, 9041, 3339, 5640, 5439, + 66, 4070, 4818, 9843, 1133, 4311, 1669, 7820, 6734, + 9288, 7931, 4209, 3461, 3495, 1140, 3338, 5532, 36, + 1683, 3563, 88, 8595, 8968, 3734, 1245, 2902, 4550, + 8556, 4982, 2280, 3346, 4418, 2128, 2045, 216, 3792, + 1493, 7607, 6390, 9522, 4970, 3552, 6522, 4670, 8229, + 5179, 9283, 8877, 3294, 8391, 2526, 1115, 9405, 6429, + 4133, 6848, 96, 5914, 1817, 8318, 8917, 725, 5309, + 1877, 3931, 1802, 3182, 4611, 3632, 742, 4119, 2756, + 9905, 3374, 7991, 7388, 2034, 4513, 8580, 9451, 441, + 1254, 4152, 6933, 3679, 9552, 2347, 2672, 5834, 4391, + 1094, 4819, 1694, 496, 4483, 431, 1180, 1500, 4452, + 2319, 381, 9021, 7640, 3161, 1436, 7570, 2387, 40, + 3494, 4349, 6559, 7205, 2493, 2886, 7507, 6542, 897, + 3251, 8480, 9942, 9274, 2164, 6932, 6920, 8997, 2896, + 8707, 3076, 3418, 2685, 4990, 4485, 3016, 6367, 3557, + 1197, 2587, 5851, 6422, 7118, 9887, 1978, 2043, 5570, + 8601, 2617, 2788, 8786, 7955, 9278, 5474, 5203, 9884, + 6480, 8785, 339, 7489, 2303, 6648, 7103, 4433, 9814, + 9820, 4531, 5252, 4655, 1315, 2884, 1625, 5107, 7108, + 4322, 5850, 6601, 115, 3162, 1276, 4095, 5697, 8428, + 2598, 6411, 905, 7857, 337, 9527, 2855, 2469, 5198, + 2750, 3197, 7238, 3383, 2212, 7504, 7445, 7201, 8899, + 1535, 902, 277, 7044, 3555, 9764, 2620, 5312, 1500, + 4360, 9854, 157, 7036, 9055, 6989, 6836, 7277, 8982, + 9040, 8111, 7216, 5948, 7699, 9008, 2934, 869, 4012, + 8466, 524, 9470, 8379, 6710, 1385, 9803, 8271, 2917, + 4364, 6712, 4109, 3275, 4994, 2546, 6251, 2106, 4095, + 891, 1231, 3453, 7091, 6439, 320, 6742, 3882, 9141, + 8755, 121, 8780, 8959, 1055, 8627, 2460, 8367, 232, + 3023, 6872, 9502, 6733, 4800, 3441, 7025, 9105, 5775, + 9115, 8218, 725, 1095, 8238, 6587, 9792, 2556, 1505, + 8143, 7312, 8400, 3654, 5516, 491, 8881, 1263, 7506, + 9766, 9661, 6376, 1097, 7626, 451, 6319, 3277, 4033, + 685, 2713, 8034, 4258, 2042, 5272, 4011, 63, 5624, + 3877, 1575, 9410, 6388, 390, 7694, 7116, 8042, 6803, + 3708, 8956, 2463, 2844, 6577, 9971, 7142, 4669, 553, + 6022, 3179, 6970, 3283, 4990, 7346, 603, 2211, 2131, + 5801, 7903, 3068, 2030, 5941, 961, 4099, 642, 2159, + 2973, 1246, 1112, 2138, 2355, 3899, 2656, 5274, 9769, + 5896, 4952, 6996, 281, 1645, 1311, 8786, 8160, 8411, + 7134, 797, 9654, 5299, 1190, 9957, 1443, 7140, 4342, + 8772, 8508, 4419, 8634, 4948, 623, 8720, 8005, 4860, + 5512, 8470, 5262, 435, 3305, 3548, 848, 8631, 9158, + 9394, 4628, 7003, 5483, 7847, 1578, 5837, 82, 2475, + 8931, 7632, 3999, 4784, 6100, 6615, 5319, 7378, 1399, + 6357, 8572, 959, 9013, 8257, 7134, 5028, 148, 5897, + 5449, 7286, 8353, 7105, 6701, 1189, 9533, 973, 7926, + 6631, 3141, 6232, 1158, 3168, 3374, 7690, 1359, 4743, + 345, 3340, 2176, 4855, 1619, 7960, 5601, 1244, 536, + 3313, 4159, 9944, 3833, 2859, 3490, 7764, 6444, 1928, + 185, 4751, 1828, 4990, 2180, 5337, 7033, 2344, 5409, + 6007, 6275, 6365, 921, 5272, 8032, 3837, 3037, 7299, + 2627, 9875, 8052, 2699, 3802, 5534, 3268, 7264, 8506, + 5653, 9886, 3827, 1919, 42, 9511, 432, 929, 9245, + 5203, 2249, 7106, 3701, 6503, 8032, 6097, 1520, 3841, + 889, 8077, 3240, 5297, 8911, 7780, 8046, 3170, 3146, + 4227, 8501, 5873, 6519, 5210, 7832, 1216, 675, 2061, + 5267, 8286, 4845, 7303, 8647, 4002, 1374, 9399, 8142, + 4587, 4176, 6423, 235, 6570, 5985, 7585, 4413, 8081, + 9831, 8554, 5423, 8356, 3458, 3472, 3245, 4394, 5760, + 4890, 7249, 57, 2640, 2357, 1828, 6517, 7118, 6175, + 8900, 7588, 4696, 9116, 2166, 6879, 7800, 4822, 5540, + 3213, 4018, 5864, 7283, 6200, 5411, 8208, 141, 8434, + 5029, 6201, 4915, 8572, 5022, 5224, 8631, 4877, 8770, + 4280, 662, 8139, 1255, 2874, 4843, 5169, 9393, 5170, + 5093, 6964, 2127, 9397, 3560, 2901, 8762, 9455, 5913, + 9053, 9678, 3965, 2513, 7545, 8199, 7340, 4334, 6412, + 267, 8294, 8573, 6264, 5724, 6907, 6745, 7963, 137, + 221, 1483, 7667, 324, 4043, 2690, 480, 1832, 1470, + 1358, 500, 408, 4583, 7995, 2209, 2836, 3244, 3458, + 6751, 275, 5234, 4718, 6642, 7823, 5339, 3648, 5042, + 918, 1032, 6681, 1900, 7030, 7663, 2279, 5984, 3565, + 4882, 2905, 9063, 9270, 8771, 5715, 5757, 5233, 5639, + 6550, 4592, 9060, 2668, 3220, 2927, 7412, 4584, 9350, + 9582, 4903, 1867, 5407, 5509, 7014, 1325, 260, 3008, + 7844, 6098, 3817, 1436, 8377, 1362, 9029, 8090, 3180, + 1252]), + values=tensor([2.0168e-01, 1.2140e-01, 5.0960e-01, 9.6448e-01, + 8.6295e-01, 6.3562e-01, 5.2035e-01, 4.6173e-01, + 3.5476e-01, 5.1713e-01, 1.0050e-01, 8.3824e-01, + 3.2532e-02, 9.4186e-01, 8.2939e-01, 7.7668e-01, + 8.4041e-01, 9.1558e-01, 2.3027e-01, 2.0557e-01, + 8.5858e-01, 8.0504e-01, 1.9974e-01, 7.7226e-01, + 1.7858e-01, 9.5446e-01, 8.3561e-01, 2.1225e-01, + 4.9029e-01, 2.0300e-01, 8.9603e-01, 4.8134e-01, + 7.8138e-01, 5.8272e-01, 2.3755e-01, 9.9237e-01, + 3.3020e-01, 1.3179e-01, 9.3117e-01, 2.2103e-01, + 3.3461e-01, 9.1409e-01, 7.0179e-02, 8.4768e-01, + 8.3083e-01, 5.1755e-01, 3.0216e-01, 2.9965e-02, + 6.8091e-01, 8.7331e-01, 9.9333e-01, 3.3984e-01, + 4.4267e-01, 5.0840e-01, 5.2241e-01, 9.4787e-01, + 7.2046e-01, 7.6206e-02, 8.3097e-01, 9.2695e-01, + 7.4191e-01, 8.6065e-01, 9.2375e-01, 5.6996e-01, + 7.6208e-01, 8.6025e-01, 7.1852e-02, 9.2091e-01, + 4.4480e-01, 6.7627e-01, 7.8731e-01, 7.3778e-01, + 5.8506e-01, 2.9066e-01, 7.3545e-01, 7.2686e-01, + 5.9338e-01, 6.7793e-01, 3.1309e-01, 8.6727e-01, + 8.4004e-02, 1.7873e-01, 1.9926e-01, 5.4044e-01, + 9.0451e-01, 7.9830e-01, 5.4665e-01, 9.4017e-02, + 5.3506e-01, 4.1130e-01, 4.8792e-01, 1.6034e-01, + 7.4180e-01, 3.0353e-01, 2.2200e-01, 4.8129e-01, + 1.3307e-01, 3.0214e-01, 8.8482e-01, 7.8676e-01, + 7.5812e-01, 1.8872e-01, 3.5814e-01, 8.5258e-01, + 7.8096e-02, 2.1230e-01, 3.9813e-01, 2.5994e-01, + 6.9629e-02, 7.4526e-01, 8.7716e-01, 6.2964e-01, + 3.1198e-01, 8.3764e-01, 6.6254e-01, 1.7225e-01, + 6.3255e-01, 3.6667e-01, 6.8200e-03, 6.0239e-01, + 3.1961e-01, 1.6300e-01, 2.2253e-01, 6.8969e-01, + 4.6999e-01, 7.1555e-01, 3.6711e-01, 8.8306e-01, + 7.6598e-01, 8.2220e-01, 2.6772e-01, 3.4665e-01, + 6.5859e-01, 6.6520e-01, 7.7785e-01, 7.3284e-01, + 2.4239e-01, 5.4189e-01, 6.5043e-01, 5.5327e-01, + 4.1857e-01, 8.2022e-01, 7.9680e-01, 8.9623e-01, + 2.2680e-01, 1.9899e-01, 1.0455e-01, 5.6407e-01, + 5.9409e-01, 3.5160e-02, 3.2148e-01, 4.9411e-01, + 5.7219e-01, 7.7570e-01, 4.6275e-02, 2.4249e-01, + 6.3982e-01, 4.4430e-01, 3.3127e-01, 1.6301e-01, + 4.7117e-01, 6.6820e-01, 2.8054e-01, 1.6461e-01, + 8.0215e-01, 2.0496e-01, 6.3700e-01, 2.0095e-01, + 7.8357e-01, 5.5162e-01, 9.5040e-01, 8.6044e-01, + 7.3848e-02, 6.5052e-02, 3.7047e-01, 1.6937e-01, + 6.8451e-01, 7.9149e-01, 4.1051e-01, 7.4685e-01, + 6.9969e-01, 5.8710e-01, 3.6306e-01, 3.5459e-01, + 9.5698e-01, 6.0647e-01, 4.1667e-01, 9.2310e-01, + 5.3303e-02, 3.5939e-01, 8.1438e-01, 2.9938e-01, + 9.6505e-01, 2.5057e-02, 5.0417e-01, 5.2763e-01, + 7.2449e-01, 8.8056e-01, 6.2937e-01, 1.2211e-01, + 9.2414e-01, 1.3236e-02, 5.0426e-01, 1.7438e-01, + 1.2193e-01, 5.8518e-01, 6.4863e-01, 9.3548e-01, + 4.9147e-01, 9.8486e-01, 6.3010e-01, 9.7888e-01, + 4.9719e-01, 3.4464e-01, 6.4133e-01, 1.0294e-01, + 7.4896e-01, 7.7473e-01, 6.4964e-01, 8.2067e-01, + 8.5363e-01, 2.9785e-01, 9.7463e-02, 1.1944e-01, + 5.9206e-01, 9.6536e-01, 5.8305e-02, 9.5568e-01, + 1.5151e-01, 4.5195e-02, 6.6548e-01, 4.4771e-01, + 8.6998e-01, 9.3204e-01, 8.2489e-01, 4.8361e-01, + 3.4566e-02, 8.5340e-01, 4.8811e-01, 1.6767e-02, + 3.3576e-01, 9.5577e-01, 3.5776e-01, 7.4859e-01, + 1.0858e-01, 6.5785e-01, 3.3716e-03, 7.5364e-01, + 2.9489e-01, 9.4578e-01, 8.1882e-01, 4.4566e-01, + 3.5629e-01, 7.1985e-01, 1.1546e-01, 8.5560e-01, + 9.2977e-01, 8.6684e-01, 9.9593e-02, 4.2349e-01, + 1.5755e-01, 3.2913e-01, 3.9274e-01, 2.4889e-01, + 6.7351e-01, 8.1585e-01, 6.4212e-02, 7.6844e-01, + 6.5965e-01, 8.8900e-01, 4.4547e-01, 3.5234e-01, + 5.0209e-01, 3.5679e-01, 5.2734e-01, 9.5229e-01, + 2.4374e-01, 7.9390e-01, 9.7137e-01, 3.0650e-02, + 3.2402e-01, 7.9347e-01, 7.0211e-01, 9.0628e-01, + 6.3157e-02, 6.9842e-01, 3.0079e-01, 8.2320e-01, + 2.2085e-02, 1.0760e-01, 2.9088e-01, 6.6437e-01, + 8.2786e-01, 9.5402e-01, 7.4723e-01, 7.4083e-01, + 9.3318e-01, 6.0157e-03, 9.6056e-01, 4.3251e-01, + 9.5002e-01, 6.3586e-01, 4.4240e-02, 4.4923e-01, + 2.0841e-01, 7.4434e-01, 7.1395e-01, 5.5027e-01, + 4.6227e-01, 1.1166e-01, 8.5573e-01, 1.6180e-01, + 2.4032e-01, 2.3931e-01, 8.3028e-01, 6.1341e-01, + 2.6788e-01, 8.0691e-01, 1.4561e-01, 4.9309e-01, + 4.1449e-01, 9.0847e-01, 3.9895e-01, 4.8030e-01, + 1.0981e-01, 2.3355e-01, 4.1112e-02, 1.2865e-02, + 5.1324e-01, 3.4171e-01, 7.4743e-01, 3.2950e-01, + 2.2296e-02, 2.8363e-03, 7.1159e-01, 2.8934e-01, + 8.8645e-01, 2.6853e-01, 6.4701e-01, 8.1935e-01, + 2.1827e-01, 1.9581e-01, 3.8112e-01, 1.5816e-01, + 9.9896e-01, 2.2611e-01, 8.8302e-01, 2.3234e-02, + 3.9298e-01, 2.1668e-01, 9.0124e-01, 1.6860e-02, + 4.2946e-01, 2.0302e-01, 3.8172e-01, 2.5265e-01, + 1.3931e-01, 2.7569e-01, 9.8244e-01, 2.9712e-01, + 4.8984e-02, 3.3183e-01, 7.6106e-01, 4.2434e-01, + 9.0253e-02, 6.9706e-01, 9.8453e-01, 7.6594e-01, + 8.3283e-01, 6.2202e-01, 3.4127e-01, 4.3436e-01, + 7.2563e-01, 9.5914e-01, 3.6111e-01, 6.5374e-01, + 3.6105e-01, 7.7889e-01, 4.2160e-01, 1.4598e-01, + 2.4479e-01, 6.5836e-01, 5.0046e-01, 3.6631e-01, + 9.1485e-04, 2.3470e-01, 5.0950e-01, 9.5273e-01, + 4.5408e-01, 7.9593e-01, 6.0422e-01, 7.1647e-01, + 9.3593e-01, 2.3595e-01, 2.7304e-02, 9.4325e-01, + 4.8512e-01, 7.6924e-01, 1.9354e-01, 3.9815e-01, + 7.8779e-01, 8.1959e-01, 3.8596e-01, 2.7309e-01, + 9.9093e-01, 9.7121e-01, 2.3517e-01, 7.0347e-01, + 3.3435e-01, 6.0069e-01, 7.6260e-01, 1.6623e-01, + 5.3131e-01, 1.8031e-01, 5.9741e-01, 2.7057e-01, + 5.4321e-01, 9.1425e-01, 1.1677e-01, 9.6779e-01, + 8.2042e-01, 9.8641e-01, 4.3791e-01, 2.9000e-01, + 7.1536e-01, 4.7604e-01, 8.2022e-02, 5.7202e-01, + 6.5967e-01, 7.2261e-01, 6.6184e-01, 9.3529e-01, + 6.2343e-01, 3.8226e-02, 8.0734e-01, 7.6148e-01, + 9.0256e-01, 8.6921e-01, 8.6367e-01, 6.6208e-01, + 8.4168e-01, 5.5106e-01, 2.4523e-01, 2.2916e-01, + 6.1551e-01, 9.8622e-01, 1.1858e-01, 5.9894e-01, + 1.8447e-02, 8.0723e-01, 9.8567e-02, 8.5396e-01, + 3.1936e-01, 6.9508e-01, 3.3799e-02, 7.9581e-02, + 8.9289e-01, 1.8876e-01, 2.7400e-01, 4.8001e-01, + 4.6605e-01, 1.4703e-01, 9.3793e-01, 9.1956e-01, + 3.8718e-01, 6.6345e-01, 2.0475e-01, 3.1392e-01, + 4.5872e-01, 8.6656e-01, 7.5890e-01, 4.7953e-01, + 8.9882e-01, 4.2607e-01, 3.2254e-01, 5.1586e-01, + 6.5610e-02, 4.5037e-01, 7.5950e-01, 6.6424e-01, + 3.6377e-03, 8.0630e-01, 6.5011e-01, 4.8020e-01, + 7.1264e-01, 1.5613e-03, 6.9591e-01, 3.5020e-01, + 6.5447e-01, 3.4656e-01, 2.3239e-01, 1.6053e-01, + 1.7956e-02, 6.4804e-01, 7.5848e-01, 7.3696e-01, + 9.1302e-01, 1.6144e-02, 2.1144e-02, 9.7844e-01, + 4.9360e-01, 5.4399e-01, 6.6588e-01, 4.3358e-01, + 2.9374e-01, 7.3177e-01, 8.2318e-01, 6.6575e-01, + 7.9798e-01, 3.0376e-01, 6.6247e-01, 1.1562e-01, + 2.8079e-01, 8.1109e-01, 5.5641e-02, 1.5485e-01, + 4.6251e-01, 3.4910e-01, 1.7611e-01, 7.0897e-01, + 3.7575e-01, 2.3170e-01, 7.7257e-01, 9.7591e-02, + 3.8272e-01, 8.4603e-01, 3.7098e-01, 2.5186e-01, + 8.8233e-01, 5.2091e-01, 9.3126e-01, 7.9369e-01, + 5.6783e-01, 8.2118e-01, 5.4528e-01, 8.5778e-01, + 1.8679e-01, 6.0231e-01, 6.6085e-01, 8.0616e-01, + 9.6219e-01, 6.4123e-01, 8.4781e-01, 5.8183e-01, + 7.3107e-01, 4.4392e-01, 9.4165e-01, 3.2753e-01, + 1.9438e-01, 1.1520e-01, 7.8106e-02, 3.2243e-01, + 9.0972e-02, 5.1190e-01, 4.6979e-01, 7.2306e-02, + 9.1044e-01, 9.0083e-01, 8.1890e-01, 9.3319e-01, + 2.6205e-01, 5.4010e-01, 8.3323e-01, 4.8994e-01, + 5.5675e-01, 4.1259e-01, 1.2074e-01, 9.0364e-01, + 6.4901e-01, 1.7426e-01, 1.1971e-01, 7.3514e-01, + 3.0741e-01, 2.8048e-01, 2.7273e-01, 5.4624e-01, + 7.2918e-01, 9.3357e-01, 5.5853e-01, 9.4738e-02, + 1.6750e-01, 8.2048e-01, 2.0105e-02, 1.3700e-01, + 7.6481e-01, 8.9388e-01, 8.8445e-01, 7.7138e-02, + 6.4948e-01, 7.9172e-02, 2.2845e-01, 6.0226e-02, + 9.6704e-01, 1.9694e-02, 5.6619e-01, 8.5330e-01, + 9.0732e-01, 7.9406e-01, 8.1097e-01, 4.4696e-01, + 4.2978e-01, 9.9185e-01, 5.5064e-01, 2.8806e-01, + 7.3586e-01, 6.4286e-01, 6.2535e-01, 8.9069e-01, + 1.9965e-01, 8.3625e-01, 5.5803e-01, 4.8866e-01, + 5.9030e-01, 2.6694e-01, 9.8528e-01, 6.9992e-01, + 3.6785e-01, 8.8530e-01, 2.9410e-01, 7.9645e-01, + 1.3131e-01, 8.0033e-02, 5.1810e-01, 3.9315e-01, + 5.4661e-01, 1.8726e-01, 8.7709e-01, 6.1621e-02, + 6.7285e-01, 9.1442e-01, 1.8765e-01, 3.8825e-01, + 8.3771e-01, 1.7540e-01, 7.9589e-01, 4.5798e-01, + 1.3253e-01, 7.0988e-01, 4.7417e-01, 5.1118e-01, + 9.2486e-01, 9.4572e-01, 3.0077e-01, 5.9193e-01, + 5.1667e-01, 8.0613e-01, 9.0121e-01, 6.4047e-01, + 8.7799e-01, 2.6040e-01, 5.8987e-02, 4.0194e-01, + 4.8165e-01, 2.6593e-01, 9.1186e-01, 5.7387e-02, + 7.8801e-01, 4.0508e-01, 7.9766e-01, 9.3763e-01, + 8.6369e-01, 3.9941e-01, 7.9396e-01, 9.4886e-01, + 7.3904e-01, 6.6017e-01, 2.0276e-01, 5.9140e-02, + 1.7071e-01, 6.1488e-01, 9.3398e-01, 2.3972e-01, + 8.1637e-01, 5.9041e-01, 2.3534e-01, 8.9411e-01, + 6.9687e-01, 6.6548e-01, 9.0171e-01, 9.5816e-01, + 4.7727e-01, 5.8060e-01, 2.8094e-01, 9.7343e-01, + 3.9873e-01, 2.3065e-01, 3.8061e-01, 1.5275e-02, + 8.2720e-01, 2.5299e-01, 7.6057e-01, 8.2794e-01, + 4.4189e-01, 3.7723e-01, 9.2253e-01, 1.8516e-02, + 1.3589e-01, 7.7955e-01, 6.0263e-01, 3.6902e-01, + 8.1875e-01, 7.9078e-01, 3.3531e-01, 8.2074e-01, + 6.2023e-01, 5.5543e-01, 6.1303e-01, 8.0047e-01, + 1.5819e-01, 7.3330e-01, 5.4253e-01, 4.7690e-01, + 8.4634e-01, 6.0393e-01, 6.4694e-01, 6.5021e-01, + 8.9254e-01, 5.3664e-01, 2.4242e-01, 3.2012e-01, + 6.1479e-01, 2.5664e-01, 1.5481e-02, 9.8927e-01, + 7.6709e-01, 4.5919e-01, 8.1010e-01, 3.1257e-01, + 3.6097e-02, 8.7068e-01, 5.9498e-01, 5.6365e-01, + 5.2813e-01, 9.8967e-01, 5.7994e-01, 2.1316e-01, + 4.7522e-01, 9.6766e-01, 2.7211e-01, 5.0273e-01, + 3.5597e-02, 1.8671e-01, 6.5253e-01, 2.2581e-01, + 4.7961e-01, 4.2469e-01, 5.9623e-01, 2.1458e-01, + 5.5358e-01, 6.2381e-01, 1.1454e-01, 7.6868e-01, + 7.7229e-01, 2.0820e-01, 8.1646e-01, 3.8201e-01, + 6.7840e-01, 2.6142e-01, 2.3761e-01, 4.3843e-02, + 5.9473e-01, 5.4741e-01, 9.0342e-01, 6.6320e-01, + 5.2649e-01, 1.0730e-01, 3.8445e-01, 1.3308e-01, + 3.5149e-01, 5.1842e-01, 9.9723e-01, 4.0092e-01, + 4.6982e-01, 6.8334e-01, 7.9338e-01, 3.5292e-01, + 5.5741e-01, 4.4414e-01, 4.1370e-01, 9.2697e-01, + 4.0179e-01, 1.7830e-01, 4.6244e-02, 3.8517e-01, + 3.9281e-01, 8.2781e-01, 3.8412e-01, 4.5869e-01, + 9.0373e-01, 3.4088e-01, 4.1020e-01, 4.5640e-01, + 9.8115e-01, 6.6237e-01, 5.9441e-01, 8.1020e-01, + 3.2213e-01, 5.5563e-01, 9.5901e-01, 6.0565e-01, + 5.5377e-01, 9.8541e-02, 9.2742e-01, 1.0612e-01, + 6.4928e-01, 8.7697e-01, 5.2892e-01, 3.9208e-01, + 7.4062e-01, 7.6663e-01, 7.2999e-01, 1.8732e-01, + 1.7938e-01, 9.6027e-01, 2.7430e-01, 3.3237e-01, + 5.2032e-01, 1.2714e-01, 2.7276e-01, 1.3603e-01, + 5.4768e-01, 9.4909e-01, 3.0523e-01, 3.6889e-01, + 1.6466e-01, 5.7130e-01, 1.0658e-01, 5.5163e-01, + 5.6656e-01, 6.9756e-01, 5.6090e-01, 1.9476e-01, + 2.3180e-02, 2.9697e-01, 6.7185e-01, 4.1718e-02, + 2.6692e-01, 5.5692e-01, 5.3916e-01, 1.7386e-01, + 5.6647e-01, 1.4696e-01, 3.9978e-01, 9.2819e-01, + 1.0570e-01, 9.5575e-01, 5.8847e-01, 4.2353e-01, + 1.2888e-01, 8.3630e-01, 5.0450e-01, 4.7961e-02, + 7.1906e-02, 3.7907e-01, 2.7187e-01, 2.4273e-01, + 8.6734e-01, 8.9647e-01, 4.8106e-01, 6.1337e-02, + 7.1226e-01, 5.7793e-01, 4.3539e-02, 4.2502e-01, + 7.1089e-01, 8.5125e-02, 2.4260e-01, 6.8884e-01, + 5.5539e-01, 4.0129e-01, 9.1977e-01, 4.6523e-02, + 6.2506e-01, 9.1543e-01, 6.9383e-01, 9.2070e-01, + 3.0667e-01, 4.7424e-01, 3.1540e-01, 6.1782e-01, + 6.0718e-01, 7.9771e-01, 8.6723e-01, 6.3895e-01, + 3.9551e-01, 7.7269e-01, 4.6791e-01, 5.7870e-01, + 1.4800e-01, 1.7498e-01, 4.6313e-03, 2.7767e-01, + 7.3040e-01, 3.8696e-01, 5.7798e-01, 3.0407e-01, + 3.9069e-01, 9.2083e-01, 8.8217e-01, 6.7625e-01, + 3.7996e-01, 5.4605e-01, 3.3830e-01, 1.7492e-01, + 2.3472e-01, 5.6540e-02, 9.5993e-01, 5.5770e-01, + 9.9132e-01, 1.2244e-01, 1.5699e-01, 1.6615e-01, + 1.2011e-01, 7.8216e-01, 7.1345e-01, 1.4447e-01, + 5.6962e-01, 9.2663e-01, 9.2819e-01, 6.3480e-01, + 9.5093e-01, 9.7704e-01, 1.5964e-01, 1.9221e-01, + 3.8753e-02, 5.9679e-01, 3.7099e-01, 2.5940e-01, + 3.7521e-01, 4.5516e-01, 8.5663e-01, 7.2485e-01, + 2.5475e-01, 6.1547e-01, 9.8406e-01, 6.1584e-01, + 8.1973e-01, 7.0296e-01, 8.1837e-01, 3.3715e-01, + 5.6230e-01, 6.9749e-01, 9.1797e-01, 1.7311e-02, + 5.1825e-01, 6.2676e-02, 7.3056e-01, 2.9493e-01, + 7.1981e-01, 3.5029e-01, 3.6041e-01, 2.0449e-01, + 5.9370e-01, 7.3638e-01, 3.5419e-01, 6.1436e-01, + 7.1113e-01, 8.1002e-01, 5.0918e-01, 1.1820e-01, + 8.4235e-01, 8.9597e-01, 3.8792e-01, 9.6675e-01, + 2.4864e-02, 6.5138e-01, 9.2083e-01, 1.7413e-01, + 1.3460e-01, 3.7871e-03, 4.0474e-01, 5.2195e-01, + 8.6458e-01, 6.5792e-01, 1.7325e-01, 6.1083e-01, + 8.1487e-01, 7.3026e-01, 2.3331e-01, 7.6255e-01, + 5.6146e-01, 7.8958e-01, 8.4394e-01, 8.6436e-01, + 6.1874e-01, 8.2650e-01, 1.2794e-01, 8.2823e-01, + 9.7619e-01, 2.8685e-01, 6.7237e-01, 5.5918e-01, + 8.3540e-01, 1.5060e-01, 8.7090e-02, 9.2178e-01, + 1.9882e-01, 1.1897e-01, 8.5784e-01, 6.6522e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5348, 0.7986, 0.2200, ..., 0.0453, 0.2085, 0.0080]) +tensor([0.2548, 0.7292, 0.0167, ..., 0.4496, 0.0224, 0.9290]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,268 +375,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 11.173964023590088 seconds +Time: 0.02082538604736328 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '50419', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.4692554473876953} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([1074, 7022, 4087, 4354, 1115, 9252, 2574, 3979, 915, - 3835, 8518, 2769, 112, 3623, 1291, 615, 191, 66, - 1214, 5633, 6476, 7907, 7161, 7265, 5563, 2980, 7771, - 8906, 5431, 4607, 4770, 626, 9524, 3730, 3866, 8666, - 6423, 5296, 979, 1727, 6975, 9285, 7075, 7134, 6667, - 7325, 5220, 9414, 9490, 8118, 9426, 4227, 7390, 7605, - 9168, 8384, 6401, 6035, 7830, 9972, 5870, 7420, 9935, - 3091, 834, 8862, 5740, 3062, 5112, 2908, 639, 6091, - 436, 2916, 4734, 7108, 513, 2302, 7905, 4519, 3497, - 5454, 9753, 2744, 5180, 8794, 3393, 8207, 4827, 280, - 5568, 3468, 6511, 9773, 2254, 47, 1155, 342, 995, - 9068, 3240, 1324, 1365, 6342, 2285, 1870, 8660, 4106, - 1186, 2521, 7988, 2968, 4045, 8137, 1538, 2198, 760, - 50, 2598, 195, 7444, 4957, 6809, 4374, 6456, 7197, - 9552, 1863, 5023, 3277, 4323, 9903, 7260, 7125, 6873, - 513, 9772, 5373, 7137, 4122, 7749, 5169, 1971, 6272, - 9111, 6015, 736, 3176, 6128, 5202, 6847, 7163, 5150, - 4610, 4581, 9646, 6385, 4193, 8418, 6616, 2414, 1755, - 8625, 575, 1748, 6885, 2435, 2666, 6742, 5614, 244, - 879, 6562, 1359, 3194, 5727, 3551, 7083, 1842, 9514, - 4863, 8479, 941, 2601, 7999, 6494, 5141, 4403, 7993, - 1185, 1573, 5845, 2439, 1922, 1733, 8393, 7228, 6830, - 5908, 6228, 9466, 2785, 4515, 6730, 3151, 3419, 3419, - 776, 3574, 2419, 4836, 3293, 8124, 1875, 7572, 2958, - 4651, 9262, 660, 8285, 6455, 653, 672, 0, 5228, - 9450, 3945, 8974, 8121, 9650, 2145, 4773, 7437, 4493, - 2675, 1190, 9285, 6100, 4192, 7922, 232, 671, 1102, - 7639, 9274, 9775, 7093, 5205, 6899, 4017, 9239, 5474, - 4012, 2403, 7759, 1283, 4106, 9062, 6550, 1753, 6450, - 8863, 2663, 1355, 6905, 1092, 2895, 2727, 2867, 1269, - 6117, 7865, 2347, 1781, 2367, 3514, 5957, 7481, 9656, - 4374, 661, 4235, 5018, 7284, 6968, 9032, 5003, 1196, - 3640, 4689, 1986, 9599, 2163, 9857, 2986, 6217, 1124, - 3869, 6932, 9440, 839, 1387, 3226, 4043, 7157, 7379, - 8428, 8483, 4369, 6851, 8809, 7897, 703, 2976, 3699, - 3438, 2355, 6736, 7238, 176, 7739, 889, 2831, 6969, - 8789, 5800, 3110, 6498, 4705, 6469, 9448, 7282, 2134, - 3678, 5467, 1622, 3274, 5066, 5127, 8447, 100, 2983, - 4463, 3099, 8883, 2393, 5221, 5531, 6706, 7577, 5800, - 8450, 3440, 6855, 944, 6894, 3303, 1082, 5849, 737, - 1687, 3315, 15, 7406, 3990, 2259, 7989, 8348, 7160, - 9351, 3002, 3148, 9126, 6367, 8090, 2998, 8051, 9049, - 2364, 8211, 5174, 5618, 94, 3866, 7923, 9652, 3819, - 3703, 4529, 1291, 2502, 976, 2602, 9342, 2433, 3989, - 7743, 4041, 7702, 1609, 486, 6429, 3042, 7286, 1452, - 35, 1305, 4754, 7796, 3723, 7653, 6695, 5085, 8985, - 3829, 2905, 9438, 2209, 6382, 4736, 9664, 3410, 9780, - 7415, 5568, 4279, 7830, 7106, 4004, 4722, 8650, 6975, - 2172, 2395, 8553, 3340, 3045, 7146, 5817, 3873, 8869, - 6247, 1175, 2122, 3744, 5725, 6888, 1097, 33, 3388, - 835, 6001, 1599, 2732, 8805, 8439, 7209, 2690, 5523, - 678, 764, 9297, 4921, 6303, 5169, 995, 8534, 8061, - 778, 3083, 4497, 1526, 7166, 9518, 9740, 5623, 1874, - 4112, 6650, 4724, 3864, 1864, 8807, 4615, 8323, 2087, - 3311, 7066, 3549, 9226, 6333, 9414, 1630, 6645, 4844, - 115, 7748, 2922, 9690, 9811, 2934, 356, 3565, 7527, - 5670, 3614, 2851, 2629, 3790, 7609, 2460, 7042, 9397, - 5659, 2806, 8290, 2345, 2448, 7257, 2879, 1633, 6514, - 6562, 6696, 7140, 2633, 5425, 7796, 6662, 1393, 2046, - 8374, 7261, 3390, 3981, 7552, 254, 5767, 2429, 5613, - 4296, 4489, 6937, 299, 9296, 2149, 2092, 8555, 8603, - 3237, 1398, 9762, 2121, 3314, 6752, 4047, 3531, 2282, - 9126, 155, 6181, 9517, 318, 9471, 241, 3861, 4693, - 7187, 8904, 9238, 3275, 8438, 9069, 1864, 9231, 9667, - 4009, 9288, 4307, 8620, 7353, 7065, 3457, 9361, 7063, - 4553, 4913, 406, 2735, 5848, 6549, 3893, 4278, 1320, - 8124, 6472, 1508, 6416, 9674, 982, 6780, 7269, 2117, - 4160, 2842, 7527, 8109, 2175, 6435, 756, 2829, 4551, - 1101, 5613, 2799, 6159, 3030, 9674, 3371, 4668, 2685, - 9264, 9477, 5536, 5293, 9200, 7997, 9530, 5953, 890, - 5904, 2629, 8659, 5653, 9317, 4267, 9976, 2511, 6842, - 90, 512, 2068, 6073, 7386, 8238, 8424, 3409, 3193, - 1239, 1431, 3902, 4600, 1601, 3567, 9765, 5150, 8916, - 2840, 3297, 5983, 970, 249, 369, 3172, 7109, 7253, - 6112, 1285, 9881, 3414, 9391, 3724, 571, 8346, 8358, - 4162, 7136, 6172, 8809, 6689, 815, 5048, 3642, 959, - 6179, 9525, 7319, 8094, 4231, 3522, 6081, 9527, 8667, - 4007, 6492, 5206, 2671, 131, 2593, 293, 1464, 8045, - 2173, 608, 8313, 546, 7335, 3665, 2688, 8988, 3584, - 6303, 4955, 3386, 1723, 7105, 4572, 2844, 5600, 6702, - 5410, 4316, 2976, 4686, 3464, 2186, 1556, 2021, 8745, - 13, 3322, 1670, 708, 3608, 2776, 8220, 2918, 692, - 5296, 5873, 4095, 1083, 1521, 7223, 7591, 5664, 4154, - 6765, 3312, 7008, 4212, 7168, 7463, 2544, 4892, 8746, - 7430, 8621, 2945, 8225, 1113, 3741, 6687, 6930, 3774, - 7211, 1927, 7164, 9820, 2083, 2916, 9934, 6793, 5105, - 4835, 3112, 3094, 1326, 7804, 7578, 4182, 739, 1123, - 1767, 8724, 9857, 2143, 6971, 6801, 376, 4107, 954, - 5018, 8170, 7753, 3368, 2333, 1, 2240, 2793, 1283, - 7038, 8646, 9694, 359, 2258, 5039, 1080, 5544, 9098, - 4490, 5886, 352, 1011, 1449, 531, 9355, 9342, 2004, - 9274, 7734, 8205, 2292, 9551, 5966, 9837, 1814, 4079, - 8382, 4410, 2600, 7625, 9363, 1842, 2879, 1253, 809, - 7014, 5812, 209, 5239, 2093, 1770, 314, 8424, 2510, - 8571, 9426, 7724, 9614, 5503, 3753, 2866, 1758, 9962, - 6, 7364, 8977, 2787, 766, 2450, 2844, 7089, 3871, - 3747, 9673, 87, 5211, 3728, 3843, 1425, 9293, 1646, - 8032, 5484, 3610, 2890, 6677, 6468, 1558, 3598, 3915, - 1993, 1609, 8274, 966, 8311, 8117, 5411, 9792, 5362, - 4456, 6051, 3532, 1282, 4790, 4302, 1110, 1051, 5344, - 197, 7166, 3635, 796, 2322, 8834, 7592, 9685, 6175, - 1580, 7579, 7995, 5470, 7573, 5255, 5134, 8199, 4635, - 6559, 7004, 535, 234, 482, 8638, 109, 9961, 4999, - 7801, 2361, 4476, 9026, 8063, 7010, 7559, 3651, 6179, - 1451, 928, 8960, 630, 76, 1209, 5158, 5630, 9966, - 5033, 8228, 9559, 9213, 1023, 7495, 1852, 4508, 8270, - 1627, 1030, 4994, 541, 3130, 827, 3601, 8830, 6755, - 4725, 6005, 7586, 3820, 5409, 4677, 5963, 549, 7869, - 8701, 7230, 9014, 1899, 4054, 7192, 5440, 4348, 1719, - 2600, 6431, 2356, 4202, 427, 9630, 7862, 4930, 8419, - 7485, 2384, 6304, 9889, 7541, 9723, 7193, 9094, 6906, - 3017]), - values=tensor([0.9304, 0.0325, 0.3106, 0.4781, 0.2864, 0.4721, 0.3382, - 0.8821, 0.2738, 0.6632, 0.0397, 0.5410, 0.8129, 0.5302, - 0.8584, 0.6728, 0.7839, 0.2842, 0.6220, 0.2198, 0.9853, - 0.4195, 0.8925, 0.9780, 0.4936, 0.9314, 0.4002, 0.0589, - 0.0362, 0.6841, 0.4069, 0.9644, 0.1471, 0.1097, 0.7122, - 0.6469, 0.7726, 0.7037, 0.8236, 0.5724, 0.6757, 0.0210, - 0.7908, 0.1342, 0.5146, 0.6874, 0.0836, 0.6105, 0.6489, - 0.7550, 0.2489, 0.2644, 0.8196, 0.5567, 0.9361, 0.0192, - 0.7166, 0.4988, 0.6757, 0.6719, 0.0246, 0.9112, 0.3677, - 0.3643, 0.2678, 0.8468, 0.3772, 0.5807, 0.7059, 0.0144, - 0.4017, 0.2251, 0.1445, 0.5897, 0.5839, 0.8811, 0.5419, - 0.1899, 0.6807, 0.8358, 0.3278, 0.1587, 0.4733, 0.8342, - 0.8985, 0.6485, 0.7474, 0.6612, 0.0844, 0.3828, 0.0157, - 0.7714, 0.6608, 0.1525, 0.9463, 0.3394, 0.3867, 0.0432, - 0.8090, 0.3213, 0.9897, 0.9084, 0.9424, 0.2045, 0.4579, - 0.7852, 0.4647, 0.4513, 0.6407, 0.5200, 0.9790, 0.1694, - 0.8201, 0.7817, 0.9650, 0.1348, 0.2654, 0.0726, 0.6476, - 0.9696, 0.1144, 0.2269, 0.5974, 0.7825, 0.6712, 0.1593, - 0.9034, 0.9204, 0.3969, 0.4522, 0.3790, 0.7055, 0.0019, - 0.1878, 0.4210, 0.9245, 0.3068, 0.7871, 0.0815, 0.1037, - 0.9467, 0.2547, 0.9280, 0.3139, 0.5680, 0.6516, 0.6068, - 0.5981, 0.3913, 0.0407, 0.0947, 0.2105, 0.0303, 0.5718, - 0.2321, 0.5689, 0.9476, 0.1321, 0.0247, 0.4729, 0.7661, - 0.8935, 0.9971, 0.2980, 0.9100, 0.1945, 0.7887, 0.3662, - 0.6700, 0.4029, 0.0956, 0.0108, 0.3242, 0.1912, 0.0388, - 0.8159, 0.1239, 0.6482, 0.0548, 0.7241, 0.4628, 0.1188, - 0.3048, 0.7968, 0.6538, 0.0308, 0.2637, 0.2733, 0.6327, - 0.6480, 0.2813, 0.5175, 0.6726, 0.0450, 0.6176, 0.2589, - 0.7109, 0.0980, 0.7310, 0.5738, 0.9989, 0.1733, 0.0559, - 0.0624, 0.6747, 0.0930, 0.2298, 0.6306, 0.1193, 0.8276, - 0.2679, 0.3372, 0.0565, 0.5821, 0.6562, 0.4518, 0.8406, - 0.7838, 0.3267, 0.0377, 0.0535, 0.4407, 0.8150, 0.3303, - 0.6133, 0.4116, 0.9870, 0.8925, 0.8368, 0.9237, 0.9331, - 0.5461, 0.4353, 0.9658, 0.0052, 0.5013, 0.8249, 0.8077, - 0.8369, 0.9914, 0.7366, 0.6685, 0.5210, 0.7940, 0.8071, - 0.1086, 0.3441, 0.1479, 0.3668, 0.5370, 0.5706, 0.8571, - 0.3522, 0.2921, 0.1107, 0.4358, 0.9740, 0.6370, 0.7121, - 0.3797, 0.6431, 0.6432, 0.8570, 0.3341, 0.8876, 0.4912, - 0.3012, 0.2244, 0.8304, 0.8708, 0.6993, 0.6415, 0.1354, - 0.4973, 0.6766, 0.5583, 0.8158, 0.1846, 0.7576, 0.3301, - 0.2284, 0.7380, 0.2497, 0.8663, 0.4244, 0.8319, 0.8766, - 0.3127, 0.9083, 0.6717, 0.2607, 0.9060, 0.3022, 0.6929, - 0.7216, 0.1864, 0.9103, 0.0908, 0.8280, 0.6772, 0.9170, - 0.1666, 0.0432, 0.9895, 0.8882, 0.4445, 0.2577, 0.6991, - 0.0134, 0.7908, 0.1595, 0.2586, 0.1384, 0.8815, 0.9964, - 0.6719, 0.3849, 0.3745, 0.5578, 0.6641, 0.8020, 0.8256, - 0.8774, 0.1735, 0.0160, 0.3321, 0.9095, 0.6863, 0.8819, - 0.2760, 0.0176, 0.9191, 0.2224, 0.5883, 0.6735, 0.2168, - 0.8084, 0.2051, 0.7731, 0.7349, 0.4183, 0.4549, 0.7852, - 0.1645, 0.2619, 0.7500, 0.6211, 0.9320, 0.1410, 0.8013, - 0.3936, 0.8135, 0.1711, 0.7508, 0.1565, 0.5072, 0.4150, - 0.0222, 0.7654, 0.2057, 0.7224, 0.5103, 0.0219, 0.2565, - 0.1947, 0.2598, 0.2876, 0.9465, 0.1945, 0.3773, 0.5036, - 0.9181, 0.1480, 0.8127, 0.9489, 0.5086, 0.2695, 0.5627, - 0.6161, 0.4583, 0.0870, 0.7396, 0.9559, 0.7672, 0.7594, - 0.2165, 0.4330, 0.5886, 0.0477, 0.3072, 0.0691, 0.3499, - 0.5368, 0.0098, 0.1604, 0.5787, 0.5961, 0.6748, 0.5240, - 0.6174, 0.6377, 0.0557, 0.8169, 0.1661, 0.8698, 0.1999, - 0.5072, 0.0521, 0.7366, 0.9190, 0.1330, 0.7979, 0.2571, - 0.8104, 0.6892, 0.6507, 0.8704, 0.6904, 0.5395, 0.5915, - 0.1731, 0.6768, 0.9334, 0.5447, 0.3147, 0.7565, 0.2840, - 0.8455, 0.7829, 0.1389, 0.9161, 0.9734, 0.2521, 0.5519, - 0.3612, 0.4687, 0.1449, 0.2398, 0.6605, 0.8039, 0.8419, - 0.6954, 0.8245, 0.4611, 0.7124, 0.8750, 0.0816, 0.7331, - 0.2216, 0.8005, 0.9334, 0.8519, 0.5866, 0.8732, 0.9519, - 0.4971, 0.6068, 0.1175, 0.8603, 0.6101, 0.3972, 0.0193, - 0.1596, 0.3517, 0.7516, 0.7870, 0.6434, 0.0362, 0.2214, - 0.6042, 0.3964, 0.6328, 0.2889, 0.2855, 0.8849, 0.5080, - 0.7797, 0.4322, 0.0814, 0.3647, 0.2061, 0.6314, 0.3193, - 0.1895, 0.3658, 0.6642, 0.1466, 0.1805, 0.6190, 0.9850, - 0.3325, 0.8191, 0.3665, 0.4316, 0.4284, 0.3112, 0.9226, - 0.5944, 0.0376, 0.1741, 0.8903, 0.0662, 0.9770, 0.9188, - 0.9310, 0.5606, 0.4272, 0.1108, 0.4718, 0.4460, 0.6248, - 0.5358, 0.2156, 0.0885, 0.3174, 0.8396, 0.1886, 0.2096, - 0.3417, 0.8097, 0.5694, 0.1045, 0.8763, 0.1113, 0.1353, - 0.0123, 0.9512, 0.5017, 0.3234, 0.1403, 0.0730, 0.8981, - 0.2740, 0.4134, 0.2135, 0.9805, 0.0445, 0.9458, 0.7869, - 0.3360, 0.7234, 0.2980, 0.1314, 0.3499, 0.6698, 0.4526, - 0.6499, 0.4686, 0.7291, 0.1916, 0.4110, 0.7064, 0.0622, - 0.1843, 0.1217, 0.1311, 0.8602, 0.9506, 0.1258, 0.9113, - 0.9310, 0.4848, 0.0104, 0.0267, 0.5186, 0.0305, 0.5081, - 0.5501, 0.3248, 0.8113, 0.3173, 0.7019, 0.0515, 0.0562, - 0.1638, 0.4617, 0.6547, 0.3705, 0.4788, 0.0628, 0.4462, - 0.3249, 0.8781, 0.7038, 0.4954, 0.3617, 0.8045, 0.1896, - 0.8468, 0.7628, 0.4651, 0.3750, 0.9370, 0.8226, 0.5039, - 0.7669, 0.5888, 0.7467, 0.1323, 0.9814, 0.9275, 0.4832, - 0.2850, 0.4635, 0.1488, 0.7094, 0.2071, 0.4950, 0.1863, - 0.2851, 0.7798, 0.3730, 0.3994, 0.2529, 0.5052, 0.4832, - 0.3839, 0.7730, 0.1994, 0.8801, 0.9634, 0.1279, 0.5202, - 0.4480, 0.2752, 0.8425, 0.2605, 0.6678, 0.0019, 0.1146, - 0.5118, 0.8404, 0.4252, 0.8911, 0.6844, 0.6707, 0.4919, - 0.4044, 0.9689, 0.9549, 0.5260, 0.2040, 0.3758, 0.8436, - 0.0719, 0.9238, 0.4595, 0.7501, 0.7038, 0.0842, 0.8019, - 0.5135, 0.4312, 0.0042, 0.3774, 0.0300, 0.3045, 0.1310, - 0.9498, 0.2047, 0.0065, 0.4750, 0.7205, 0.8268, 0.2414, - 0.5067, 0.3174, 0.7835, 0.1750, 0.2497, 0.8182, 0.4410, - 0.4937, 0.0668, 0.7211, 0.3732, 0.1165, 0.5206, 0.1429, - 0.5890, 0.1326, 0.1841, 0.2299, 0.6975, 0.0815, 0.8063, - 0.7601, 0.5190, 0.4973, 0.7820, 0.2718, 0.3645, 0.2388, - 0.6349, 0.4919, 0.5469, 0.5835, 0.1693, 0.2555, 0.6997, - 0.0080, 0.8024, 0.8773, 0.4755, 0.9793, 0.3227, 0.7826, - 0.4776, 0.5918, 0.9313, 0.3366, 0.3665, 0.6137, 0.6442, - 0.7228, 0.1110, 0.6812, 0.4938, 0.1352, 0.2780, 0.6843, - 0.3593, 0.6866, 0.5996, 0.6977, 0.2843, 0.3178, 0.7528, - 0.8342, 0.5386, 0.6190, 0.0664, 0.9083, 0.4500, 0.1762, - 0.6839, 0.7444, 0.6654, 0.5698, 0.6462, 0.6689, 0.6885, - 0.6494, 0.1899, 0.7462, 0.4931, 0.7785, 0.4078, 0.0875, - 0.6143, 0.8570, 0.9888, 0.3955, 0.6640, 0.6587, 0.7544, - 0.9891, 0.6031, 0.0927, 0.6392, 0.0282, 0.2897, 0.0799, - 0.3039, 0.7537, 0.0064, 0.0830, 0.2575, 0.1555, 0.1809, - 0.6300, 0.6627, 0.5442, 0.5686, 0.4545, 0.6247, 0.0230, - 0.1211, 0.9727, 0.2738, 0.3745, 0.1564, 0.8676, 0.3949, - 0.4342, 0.6528, 0.7121, 0.9790, 0.6180, 0.8497, 0.6934, - 0.8114, 0.4502, 0.2751, 0.7561, 0.6667, 0.0282, 0.9822, - 0.8863, 0.6138, 0.1747, 0.4926, 0.0927, 0.1322, 0.4515, - 0.6778, 0.3334, 0.7750, 0.2319, 0.6137, 0.2868, 0.8848, - 0.0267, 0.5097, 0.9541, 0.4569, 0.9397, 0.3945, 0.3763, - 0.9350, 0.7996, 0.5528, 0.9459, 0.6150, 0.5344, 0.9224, - 0.4343, 0.2583, 0.5743, 0.1810, 0.4732, 0.1012, 0.2292, - 0.5431, 0.6259, 0.8319, 0.9927, 0.5847, 0.8053, 0.0391, - 0.3854, 0.8999, 0.7842, 0.4838, 0.5113, 0.9715, 0.1757, - 0.7568, 0.4008, 0.5341, 0.1516, 0.0878, 0.5977, 0.5872, - 0.7439, 0.8081, 0.9535, 0.6902, 0.8931, 0.9586, 0.7881, - 0.0686, 0.4547, 0.0160, 0.3146, 0.6264, 0.2480, 0.7559, - 0.5560, 0.9085, 0.3908, 0.0424, 0.3481, 0.0393, 0.1234, - 0.5520, 0.5796, 0.8048, 0.0202, 0.8271, 0.9243, 0.6015, - 0.0508, 0.8893, 0.9673, 0.3880, 0.3853, 0.1352, 0.4800, - 0.1939, 0.7035, 0.2054, 0.1146, 0.2407, 0.4234, 0.9640, - 0.4558, 0.8502, 0.7625, 0.3075, 0.9902, 0.1845, 0.0707, - 0.8518, 0.7267, 0.6494, 0.4761, 0.1632, 0.1248, 0.8200, - 0.2043, 0.6022, 0.7800, 0.0537, 0.1505, 0.0646, 0.9228, - 0.0966, 0.8036, 0.2431, 0.5992, 0.1858, 0.5672, 0.8294, - 0.0135, 0.2238, 0.0068, 0.8473, 0.0323, 0.2138, 0.3134, - 0.2674, 0.4860, 0.8071, 0.7400, 0.9168, 0.0701, 0.7855, - 0.7080, 0.5714, 0.5288, 0.1187, 0.1954, 0.0067, 0.7680, - 0.5930, 0.8250, 0.6028, 0.2144, 0.0255, 0.2917, 0.4790, - 0.3892, 0.3563, 0.0423, 0.3253, 0.0092, 0.8956, 0.2515, - 0.1414, 0.9761, 0.8159, 0.7089, 0.4956, 0.0026, 0.1488, - 0.2902, 0.9089, 0.4432, 0.7989, 0.9160, 0.2680, 0.3317, - 0.6128, 0.6111, 0.3647, 0.4016, 0.8650, 0.7226, 0.2642, - 0.4868, 0.9208, 0.7252, 0.5230, 0.1652, 0.0793, 0.9874, - 0.5129, 0.3412, 0.3833, 0.8354, 0.9507, 0.1921, 0.2168, - 0.6983, 0.4500, 0.7444, 0.9235, 0.5009, 0.2575]), + col_indices=tensor([9040, 5122, 5464, 9665, 3805, 7324, 1101, 4202, 3618, + 548, 8991, 3805, 4000, 9051, 2104, 4199, 2659, 7669, + 5563, 100, 1652, 273, 6742, 2361, 2451, 827, 2210, + 4949, 4449, 215, 1756, 1146, 2459, 2647, 7745, 5364, + 1320, 65, 2667, 5440, 6880, 7751, 7310, 6102, 4866, + 4110, 5625, 8442, 1988, 4774, 6528, 9907, 7887, 1197, + 4402, 1468, 1188, 6555, 1733, 7550, 2262, 8238, 820, + 9123, 7059, 7826, 4514, 9178, 8092, 1783, 2131, 2104, + 4910, 4986, 3776, 1109, 9403, 7124, 5076, 4233, 9126, + 1708, 9288, 7682, 9171, 4902, 9460, 6679, 1401, 9808, + 466, 9819, 2111, 2700, 5442, 2266, 322, 8594, 9508, + 7353, 1203, 8663, 3088, 5378, 5302, 3605, 4390, 9794, + 2382, 7611, 4466, 242, 4793, 5634, 360, 4553, 2571, + 1675, 4788, 9381, 4818, 653, 2859, 3193, 162, 5417, + 4905, 1437, 1699, 7147, 814, 4476, 4283, 6843, 8944, + 2842, 2538, 8267, 1997, 356, 6879, 7104, 5178, 9490, + 1132, 1728, 3959, 3389, 1996, 5900, 2118, 5958, 5704, + 1439, 6320, 231, 7775, 3128, 8190, 7632, 5596, 438, + 159, 6050, 4154, 7593, 1065, 3516, 895, 7767, 1834, + 4275, 5079, 8435, 2124, 8451, 9380, 8095, 590, 7658, + 5370, 4750, 7110, 2865, 258, 3150, 5917, 8127, 4441, + 9140, 7763, 489, 8921, 4923, 9526, 7257, 2964, 7444, + 8435, 8160, 194, 6649, 5503, 5862, 6977, 152, 9854, + 9192, 2781, 9485, 2025, 3868, 6056, 2968, 4004, 691, + 7965, 1958, 2728, 2423, 8565, 6986, 6813, 1226, 9230, + 8480, 6961, 2287, 2314, 8873, 5591, 8105, 2276, 8255, + 291, 2371, 5343, 9567, 8954, 2135, 3799, 2744, 8320, + 3787, 8615, 4482, 4541, 4244, 3312, 783, 9291, 7438, + 9226, 7854, 301, 1101, 6808, 42, 3655, 3820, 3635, + 5279, 8470, 2202, 2739, 9131, 7201, 1296, 4443, 6142, + 8871, 4757, 902, 5193, 3587, 6019, 7685, 953, 8179, + 9495, 9063, 6519, 1335, 9205, 9741, 121, 3874, 5311, + 5680, 8901, 6459, 4917, 7675, 8440, 7502, 2408, 3072, + 5488, 9439, 7107, 2364, 1073, 1676, 2576, 7855, 5822, + 824, 8266, 2666, 989, 6612, 416, 2185, 9384, 3032, + 8751, 638, 8364, 876, 4886, 2311, 4228, 8153, 6928, + 6708, 5917, 5672, 5005, 8188, 2813, 1971, 9505, 4490, + 5199, 8647, 5228, 8753, 1287, 4562, 7457, 7765, 9109, + 1769, 6147, 8221, 167, 8131, 5204, 6806, 2708, 6231, + 9297, 1470, 8072, 1589, 2721, 7367, 6084, 4979, 8558, + 2773, 3513, 6150, 5854, 1083, 1478, 9566, 8397, 7235, + 3195, 6260, 8154, 2412, 6246, 9831, 8903, 2007, 2846, + 9587, 2092, 7644, 8207, 2241, 5877, 9734, 1978, 355, + 9034, 1856, 5170, 827, 6262, 1698, 7657, 8912, 7168, + 9506, 9168, 40, 3217, 4495, 7116, 7053, 2792, 8948, + 2237, 8320, 5355, 9833, 676, 3356, 4541, 6684, 6916, + 4027, 7413, 3237, 3018, 9023, 535, 3661, 1118, 3821, + 9856, 5333, 8294, 2279, 8253, 5682, 5017, 9524, 3761, + 2827, 953, 2140, 6709, 3589, 9913, 8125, 153, 8937, + 1201, 4288, 2780, 1083, 6984, 2382, 2788, 2654, 672, + 8640, 169, 7995, 4267, 1013, 4901, 2694, 9178, 9406, + 9090, 9218, 9450, 376, 818, 2903, 8081, 6480, 2147, + 7095, 2796, 3997, 1282, 9917, 8423, 7887, 6967, 1108, + 8879, 2809, 1517, 1772, 7465, 6367, 7000, 9415, 3406, + 6295, 858, 5572, 1817, 3200, 4765, 6889, 5625, 2000, + 9237, 9460, 1075, 4805, 2540, 3411, 9404, 17, 4672, + 8686, 827, 2382, 874, 4177, 745, 8897, 1796, 4225, + 42, 9758, 2416, 3642, 189, 3883, 7021, 5957, 8256, + 4047, 7976, 4897, 1493, 7805, 9801, 1585, 8262, 5762, + 1484, 2473, 4448, 9596, 2874, 6632, 1793, 9562, 4266, + 4689, 7771, 3367, 6169, 2468, 3153, 5826, 6538, 1523, + 2251, 6320, 5321, 367, 3355, 4887, 1346, 9105, 1331, + 7460, 5286, 5505, 2342, 4100, 9650, 4005, 2567, 8701, + 5229, 3117, 2665, 1073, 8019, 2704, 2988, 959, 7465, + 8571, 4316, 264, 8169, 2124, 5670, 4896, 1391, 1478, + 9164, 8468, 3506, 636, 3131, 5518, 4605, 8384, 8743, + 6044, 2773, 4978, 3058, 7992, 4471, 1499, 1046, 635, + 5031, 6854, 8407, 6897, 790, 2496, 9914, 5269, 7280, + 2701, 6526, 6869, 2312, 6436, 8839, 8744, 8334, 9613, + 2170, 8284, 1024, 964, 2885, 5019, 5363, 6780, 2979, + 3128, 5872, 5584, 6878, 311, 8352, 2391, 2244, 2109, + 2904, 2992, 6884, 452, 4123, 7424, 7249, 1455, 8283, + 3341, 4981, 4141, 6247, 3799, 780, 8334, 1888, 2452, + 226, 7200, 8384, 4152, 4424, 7842, 5022, 5954, 2739, + 3404, 5395, 5924, 3030, 7104, 3274, 4516, 4930, 8440, + 7617, 3043, 6251, 6255, 1212, 2814, 7872, 1381, 4402, + 8474, 4300, 9166, 9827, 3317, 4780, 5537, 4823, 9195, + 1818, 3287, 4841, 4077, 6542, 9509, 7722, 8874, 9933, + 574, 9737, 284, 7742, 6015, 1975, 391, 3599, 4901, + 2276, 2265, 5370, 8189, 2026, 3228, 3853, 5026, 2807, + 9342, 1246, 8352, 8105, 8434, 4421, 9358, 3721, 3149, + 1162, 6017, 9998, 8439, 8750, 4812, 5022, 145, 8551, + 3165, 4940, 430, 9996, 9448, 6774, 3062, 2840, 558, + 1341, 2418, 8099, 3195, 9477, 982, 3033, 5334, 5905, + 6257, 1252, 5641, 2782, 7892, 9688, 7221, 2958, 1970, + 3177, 387, 7452, 1571, 1459, 7229, 6026, 5322, 9500, + 8013, 9966, 95, 4416, 5582, 7307, 5623, 9228, 2381, + 1900, 7680, 1782, 9693, 7277, 4005, 9471, 8609, 3041, + 9688, 4604, 8286, 4468, 9192, 1626, 7388, 1278, 4470, + 5474, 403, 9150, 205, 9277, 5426, 7168, 9258, 5896, + 345, 1337, 567, 9808, 2808, 8183, 7145, 7871, 219, + 7582, 8307, 4878, 7721, 6662, 8074, 4384, 4177, 6751, + 4064, 3852, 985, 6824, 8130, 8563, 1154, 1991, 5790, + 1721, 4071, 4684, 7730, 7955, 4668, 735, 9028, 728, + 7029, 2914, 938, 5705, 8627, 2388, 1106, 4899, 6765, + 4456, 4885, 7443, 7732, 8499, 5689, 4751, 578, 4468, + 8994, 9335, 38, 3516, 9736, 8851, 7346, 5014, 7348, + 7321, 8426, 8574, 721, 8105, 4299, 9058, 3294, 2411, + 5205, 5338, 3010, 6919, 105, 9225, 5867, 3016, 3624, + 9359, 8723, 7460, 9762, 2815, 7186, 7260, 6982, 9338, + 2376, 1300, 2386, 351, 3845, 1976, 9708, 9531, 4256, + 817, 1007, 1479, 3568, 8927, 8671, 5124, 1028, 1717, + 3339, 6864, 9462, 9945, 447, 569, 1139, 914, 4597, + 6697, 4309, 2367, 3981, 2509, 3461, 4040, 7806, 630, + 87, 6562, 6093, 187, 105, 117, 1451, 8479, 7240, + 351, 7175, 8821, 8518, 2800, 3083, 2048, 870, 3140, + 1897, 5214, 2370, 8206, 9765, 9134, 524, 8484, 5411, + 8642, 437, 8461, 7669, 8915, 2502, 1796, 835, 9375, + 2531, 583, 7733, 1003, 2155, 987, 6630, 8808, 8770, + 9726, 4450, 6483, 9277, 7364, 1951, 4987, 405, 7268, + 7026, 2565, 6059, 2118, 5884, 1838, 4096, 171, 7318, + 5854]), + values=tensor([0.1631, 0.8894, 0.0096, 0.4110, 0.3494, 0.1643, 0.7499, + 0.5978, 0.9455, 0.1671, 0.8961, 0.7461, 0.7006, 0.0945, + 0.1719, 0.6989, 0.0616, 0.2681, 0.5854, 0.5252, 0.5961, + 0.8441, 0.7249, 0.8473, 0.1511, 0.1204, 0.9651, 0.0672, + 0.6786, 0.0429, 0.1103, 0.4555, 0.7548, 0.3597, 0.8733, + 0.4866, 0.1972, 0.4093, 0.7090, 0.4899, 0.4260, 0.2233, + 0.8535, 0.5622, 0.8588, 0.6774, 0.7796, 0.9570, 0.2810, + 0.0687, 0.5447, 0.0374, 0.1927, 0.4881, 0.7663, 0.1635, + 0.4787, 0.5810, 0.8486, 0.1754, 0.7138, 0.8411, 0.6174, + 0.2657, 0.6475, 0.7442, 0.8339, 0.0888, 0.3325, 0.0591, + 0.0913, 0.7785, 0.4246, 0.3225, 0.8001, 0.0463, 0.1853, + 0.5832, 0.5475, 0.7850, 0.0403, 0.8138, 0.2773, 0.9343, + 0.2873, 0.8164, 0.3979, 0.9870, 0.9519, 0.3676, 0.1250, + 0.1491, 0.6255, 0.5977, 0.5822, 0.5678, 0.9402, 0.2323, + 0.2212, 0.1032, 0.8803, 0.3110, 0.1695, 0.4521, 0.8724, + 0.2019, 0.4133, 0.3417, 0.0986, 0.4390, 0.8022, 0.8839, + 0.6181, 0.1178, 0.6029, 0.1557, 0.9100, 0.7201, 0.7842, + 0.7241, 0.9498, 0.7953, 0.0897, 0.6845, 0.8710, 0.5091, + 0.9873, 0.7293, 0.8904, 0.3358, 0.7744, 0.5191, 0.1667, + 0.3565, 0.2615, 0.2565, 0.5379, 0.8773, 0.1323, 0.6555, + 0.3582, 0.1608, 0.7166, 0.4360, 0.3916, 0.8947, 0.2525, + 0.2634, 0.2973, 0.4543, 0.1023, 0.3410, 0.4143, 0.9116, + 0.0312, 0.1559, 0.9357, 0.7890, 0.1647, 0.6969, 0.7615, + 0.0057, 0.9544, 0.9041, 0.9795, 0.6236, 0.8323, 0.3455, + 0.8900, 0.4963, 0.6340, 0.6257, 0.2148, 0.0022, 0.9164, + 0.9285, 0.7883, 0.6729, 0.3769, 0.6958, 0.6005, 0.2697, + 0.8674, 0.0992, 0.6459, 0.6046, 0.5854, 0.9809, 0.0304, + 0.6260, 0.7378, 0.6836, 0.1410, 0.8686, 0.8525, 0.7508, + 0.1556, 0.3802, 0.7925, 0.5165, 0.2581, 0.1414, 0.7040, + 0.3888, 0.9464, 0.3004, 0.2916, 0.3967, 0.1039, 0.9065, + 0.8347, 0.3336, 0.7087, 0.6067, 0.9572, 0.1600, 0.8060, + 0.2109, 0.8948, 0.5183, 0.7575, 0.8692, 0.4483, 0.5755, + 0.4449, 0.0177, 0.7302, 0.9562, 0.1642, 0.1753, 0.0774, + 0.1479, 0.8973, 0.1379, 0.3834, 0.4071, 0.1145, 0.6409, + 0.2502, 0.4046, 0.6576, 0.7386, 0.7872, 0.5015, 0.1940, + 0.7904, 0.7292, 0.6573, 0.9934, 0.6037, 0.8756, 0.9944, + 0.8350, 0.7906, 0.7256, 0.9732, 0.3718, 0.7132, 0.0238, + 0.7062, 0.1655, 0.5788, 0.3841, 0.4191, 0.0041, 0.6620, + 0.4693, 0.7912, 0.4461, 0.5280, 0.3983, 0.7847, 0.8014, + 0.7017, 0.8233, 0.3860, 0.5716, 0.2772, 0.6580, 0.7371, + 0.9645, 0.8337, 0.3466, 0.5222, 0.2580, 0.7169, 0.2145, + 0.8527, 0.2043, 0.3554, 0.0228, 0.0774, 0.7591, 0.4610, + 0.7792, 0.1180, 0.2219, 0.2849, 0.8745, 0.3587, 0.4147, + 0.7938, 0.5143, 0.0099, 0.3290, 0.8310, 0.4640, 0.2939, + 0.8996, 0.9448, 0.2782, 0.9194, 0.4979, 0.3061, 0.5600, + 0.9559, 0.9730, 0.9194, 0.6091, 0.7429, 0.7016, 0.8796, + 0.8789, 0.2287, 0.6748, 0.0561, 0.5949, 0.8939, 0.9024, + 0.5123, 0.4554, 0.6667, 0.8562, 0.0146, 0.2090, 0.9528, + 0.7350, 0.4491, 0.2127, 0.5166, 0.6902, 0.3695, 0.4233, + 0.6943, 0.4046, 0.0044, 0.8105, 0.3380, 0.9158, 0.0798, + 0.1702, 0.5559, 0.5461, 0.6980, 0.4085, 0.7820, 0.1853, + 0.1856, 0.0690, 0.2480, 0.7545, 0.2783, 0.8091, 0.5671, + 0.7910, 0.8452, 0.7722, 0.6532, 0.6848, 0.4856, 0.5250, + 0.4002, 0.6006, 0.6879, 0.4909, 0.4564, 0.3005, 0.2667, + 0.7924, 0.8030, 0.9840, 0.4878, 0.3714, 0.1412, 0.4534, + 0.7304, 0.7493, 0.6954, 0.8599, 0.8552, 0.2300, 0.8180, + 0.9873, 0.8495, 0.8609, 0.1945, 0.2438, 0.4306, 0.8172, + 0.2409, 0.1025, 0.5383, 0.5810, 0.3842, 0.6957, 0.3435, + 0.2926, 0.3042, 0.7420, 0.6237, 0.2952, 0.8309, 0.7177, + 0.1590, 0.1934, 0.6120, 0.5030, 0.0204, 0.1078, 0.0097, + 0.0135, 0.3834, 0.0202, 0.1322, 0.6632, 0.1937, 0.0307, + 0.1332, 0.0603, 0.2069, 0.5426, 0.7311, 0.2646, 0.1054, + 0.2366, 0.0151, 0.7103, 0.7689, 0.6429, 0.5792, 0.5061, + 0.2310, 0.9529, 0.7922, 0.4945, 0.6393, 0.3253, 0.7458, + 0.0460, 0.0681, 0.4941, 0.1864, 0.8046, 0.1182, 0.0896, + 0.2081, 0.4531, 0.0463, 0.0407, 0.5995, 0.5312, 0.3880, + 0.8311, 0.2207, 0.1846, 0.4510, 0.1915, 0.1883, 0.8431, + 0.4604, 0.3665, 0.8844, 0.7589, 0.7788, 0.4406, 0.6004, + 0.8282, 0.2245, 0.9361, 0.3940, 0.0810, 0.6473, 0.4515, + 0.1074, 0.5187, 0.0484, 0.1033, 0.1630, 0.6082, 0.4244, + 0.3451, 0.4847, 0.5401, 0.6981, 0.9077, 0.3187, 0.0204, + 0.1500, 0.9561, 0.3307, 0.8541, 0.4664, 0.0679, 0.6008, + 0.3551, 0.1602, 0.1356, 0.7739, 0.9922, 0.1115, 0.4221, + 0.7342, 0.3701, 0.5516, 0.7290, 0.1823, 0.8391, 0.3144, + 0.2842, 0.4287, 0.9846, 0.1931, 0.8186, 0.4273, 0.1672, + 0.7529, 0.5201, 0.8910, 0.3887, 0.0088, 0.6169, 0.6593, + 0.7385, 0.8881, 0.0425, 0.1650, 0.5108, 0.1111, 0.7378, + 0.2538, 0.8382, 0.8390, 0.0324, 0.3769, 0.2078, 0.3192, + 0.3708, 0.6287, 0.1001, 0.9337, 0.4211, 0.5995, 0.8693, + 0.3094, 0.3899, 0.0074, 0.7492, 0.9247, 0.5356, 0.0615, + 0.4187, 0.4511, 0.8228, 0.9573, 0.6267, 0.9966, 0.6610, + 0.1486, 0.2982, 0.8276, 0.5406, 0.6792, 0.7083, 0.2795, + 0.4601, 0.1948, 0.7605, 0.0703, 0.5373, 0.1498, 0.5550, + 0.8890, 0.1968, 0.1402, 0.4658, 0.9248, 0.9920, 0.9675, + 0.8792, 0.8261, 0.8542, 0.6318, 0.3856, 0.8031, 0.6640, + 0.4809, 0.7477, 0.0880, 0.4756, 0.5062, 0.3891, 0.0419, + 0.6061, 0.5966, 0.9425, 0.9925, 0.2942, 0.5823, 0.3644, + 0.7992, 0.6200, 0.9739, 0.6564, 0.9608, 0.4431, 0.8049, + 0.0261, 0.8261, 0.0569, 0.0773, 0.3239, 0.8467, 0.9510, + 0.5867, 0.3719, 0.7573, 0.9140, 0.2131, 0.9985, 0.3629, + 0.7478, 0.4739, 0.2988, 0.8909, 0.2061, 0.6149, 0.4109, + 0.8872, 0.0729, 0.4503, 0.5052, 0.2616, 0.2537, 0.4288, + 0.3026, 0.1166, 0.8285, 0.6443, 0.9405, 0.1953, 0.8049, + 0.6424, 0.6070, 0.2630, 0.8745, 0.4341, 0.8408, 0.4939, + 0.5749, 0.3702, 0.1510, 0.6433, 0.8389, 0.8675, 0.7267, + 0.3359, 0.6067, 0.3091, 0.3531, 0.8159, 0.1911, 0.2045, + 0.4270, 0.2235, 0.7773, 0.9192, 0.2554, 0.1911, 0.6234, + 0.6872, 0.2590, 0.7714, 0.2995, 0.0364, 0.0604, 0.3942, + 0.3067, 0.8713, 0.1744, 0.9769, 0.6966, 0.7875, 0.5039, + 0.8877, 0.5834, 0.8390, 0.2182, 0.7345, 0.1361, 0.7280, + 0.1712, 0.1210, 0.1302, 0.2556, 0.0231, 0.0590, 0.9899, + 0.7543, 0.4957, 0.3975, 0.5063, 0.8250, 0.7945, 0.6271, + 0.4730, 0.2552, 0.7163, 0.2119, 0.6677, 0.7119, 0.5786, + 0.8142, 0.9839, 0.5980, 0.4963, 0.5903, 0.4274, 0.5568, + 0.6179, 0.0355, 0.3895, 0.4834, 0.8036, 0.1842, 0.9675, + 0.3580, 0.3098, 0.8756, 0.0144, 0.9480, 0.9859, 0.8073, + 0.7403, 0.3881, 0.7293, 0.6198, 0.2605, 0.2585, 0.4811, + 0.6619, 0.2322, 0.1379, 0.0945, 0.5638, 0.6420, 0.7869, + 0.9598, 0.1587, 0.4906, 0.9286, 0.7092, 0.0321, 0.6453, + 0.5691, 0.8260, 0.7424, 0.3870, 0.0368, 0.1228, 0.0535, + 0.7224, 0.7164, 0.1630, 0.0085, 0.6827, 0.0758, 0.8906, + 0.4015, 0.3826, 0.3400, 0.5792, 0.9513, 0.0686, 0.3178, + 0.4990, 0.0991, 0.1219, 0.2115, 0.8141, 0.3597, 0.3945, + 0.9179, 0.7319, 0.4944, 0.2500, 0.9913, 0.8114, 0.7975, + 0.3436, 0.9109, 0.8007, 0.4988, 0.3442, 0.3462, 0.8481, + 0.9295, 0.9331, 0.6901, 0.2219, 0.0720, 0.7241, 0.2444, + 0.5265, 0.4410, 0.6343, 0.4417, 0.3540, 0.0340, 0.0955, + 0.2848, 0.2900, 0.2775, 0.5273, 0.3970, 0.9068, 0.3147, + 0.3788, 0.2923, 0.7122, 0.0976, 0.6726, 0.9263, 0.8296, + 0.6670, 0.5959, 0.2637, 0.4644, 0.6565, 0.3214, 0.3965, + 0.7429, 0.9013, 0.6067, 0.1423, 0.1004, 0.2761, 0.8907, + 0.8446, 0.5993, 0.2319, 0.0820, 0.0772, 0.7393, 0.4599, + 0.3353, 0.3535, 0.2938, 0.2741, 0.3530, 0.2932, 0.1869, + 0.2035, 0.9574, 0.2805, 0.5650, 0.9268, 0.0476, 0.0202, + 0.7141, 0.7697, 0.5112, 0.8726, 0.0623, 0.0022, 0.2799, + 0.4643, 0.2345, 0.0668, 0.5760, 0.5095, 0.3629, 0.1234, + 0.3023, 0.5972, 0.1842, 0.3496, 0.6820, 0.6209, 0.3469, + 0.9843, 0.4363, 0.1818, 0.2092, 0.5560, 0.5562, 0.2474, + 0.4956, 0.2390, 0.9192, 0.7562, 0.6895, 0.7175, 0.8805, + 0.1489, 0.9286, 0.4439, 0.9506, 0.2816, 0.4374, 0.4558, + 0.8701, 0.9653, 0.1760, 0.3643, 0.4340, 0.3964, 0.6224, + 0.4880, 0.9824, 0.1759, 0.9561, 0.6413, 0.7429, 0.8542, + 0.8454, 0.5271, 0.7311, 0.2477, 0.2064, 0.7319, 0.9307, + 0.7469, 0.0050, 0.0455, 0.1567, 0.1868, 0.2406, 0.9384, + 0.3766, 0.9221, 0.0626, 0.6697, 0.7479, 0.2498, 0.4700, + 0.5883, 0.9985, 0.0279, 0.4965, 0.7133, 0.9930, 0.3775, + 0.8616, 0.7460, 0.2314, 0.2420, 0.1500, 0.0134, 0.2393, + 0.2658, 0.8369, 0.0505, 0.3261, 0.4490, 0.3279, 0.1204, + 0.2416, 0.3865, 0.1735, 0.9687, 0.4705, 0.5012, 0.7498, + 0.1842, 0.9457, 0.2389, 0.2729, 0.0209, 0.5914, 0.4534, + 0.2782, 0.0655, 0.6488, 0.9723, 0.4052, 0.1188, 0.3484, + 0.5745, 0.4265, 0.9198, 0.6865, 0.4844, 0.4980, 0.6529, + 0.7642, 0.5329, 0.5549, 0.3109, 0.3843, 0.2091]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5348, 0.7986, 0.2200, ..., 0.0453, 0.2085, 0.0080]) +tensor([0.6698, 0.4290, 0.4685, ..., 0.2920, 0.9011, 0.2731]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1295,13 +647,554 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 11.173964023590088 seconds +Time: 1.4692554473876953 seconds -[40.48, 41.2, 39.7, 39.62, 39.96, 39.55, 39.84, 39.41, 40.04, 39.52] -[97.63] -12.83384895324707 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349456, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.173964023590088, 'TIME_S_1KI': 0.03197531026392475, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1252.9686733055114, 'W': 97.63} -[40.48, 41.2, 39.7, 39.62, 39.96, 39.55, 39.84, 39.41, 40.04, 39.52, 40.24, 39.6, 39.87, 39.86, 39.87, 39.92, 39.37, 39.31, 39.74, 39.32] -716.6399999999999 -35.831999999999994 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349456, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.173964023590088, 'TIME_S_1KI': 0.03197531026392475, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1252.9686733055114, 'W': 97.63, 'J_1KI': 3.585483360724988, 'W_1KI': 0.2793770889611282, 'W_D': 61.798, 'J_D': 793.1061976127625, 'W_D_1KI': 0.17684057506524428, 'J_D_1KI': 0.0005060453249200021} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '360318', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.527426958084106} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([5766, 7973, 2780, 3211, 9594, 8202, 4276, 3468, 2763, + 8172, 125, 6130, 209, 4027, 1805, 4863, 7827, 7923, + 3604, 7708, 857, 2121, 5611, 3524, 1803, 5126, 6664, + 5422, 9198, 8270, 9855, 8562, 4744, 5249, 7489, 3063, + 6337, 838, 6681, 3140, 8354, 4214, 3457, 9437, 9530, + 7940, 1764, 8298, 8134, 3741, 2409, 877, 1958, 5798, + 4406, 8279, 5232, 156, 5823, 3467, 4772, 6553, 3983, + 1528, 7910, 4804, 3175, 9452, 8577, 1586, 8580, 2673, + 1982, 1239, 8183, 2424, 7278, 9218, 1945, 4169, 4883, + 8116, 3091, 1525, 9572, 6102, 7072, 6015, 2321, 4696, + 4138, 870, 5212, 8282, 6668, 2421, 8337, 2293, 6466, + 1478, 3627, 937, 2553, 5768, 1496, 2794, 8455, 9783, + 4843, 2503, 4723, 1482, 8959, 3694, 866, 8594, 5963, + 1988, 7810, 7838, 2751, 5201, 8677, 4864, 8139, 632, + 6644, 3097, 1394, 6059, 8155, 2846, 1777, 6456, 8176, + 7371, 3358, 6829, 157, 1846, 7927, 3646, 7977, 3389, + 8996, 5123, 7577, 810, 5309, 6645, 5201, 6517, 9349, + 1972, 397, 2696, 6505, 3505, 39, 1681, 7539, 7649, + 4541, 2592, 4090, 372, 8070, 8656, 550, 3575, 2398, + 6124, 3969, 4102, 973, 6050, 5302, 7479, 298, 1762, + 3079, 4897, 4669, 5001, 2187, 4099, 251, 8622, 440, + 1353, 2831, 2162, 1680, 7185, 1025, 5353, 3100, 7889, + 1938, 8777, 7175, 1776, 9811, 5332, 6540, 2814, 694, + 8330, 5599, 3505, 5819, 8034, 3497, 5576, 2073, 6030, + 138, 4627, 3752, 4267, 3405, 2313, 8469, 9762, 2892, + 4848, 2525, 691, 3619, 7610, 8942, 3854, 8504, 1471, + 3760, 4764, 1573, 7874, 7032, 7439, 9628, 5646, 1475, + 6274, 2342, 6803, 3382, 4207, 7742, 4043, 375, 1643, + 8059, 5392, 2503, 6652, 8805, 7787, 8553, 5137, 5953, + 6638, 7014, 333, 6667, 410, 6995, 3350, 5519, 9203, + 2250, 5521, 9060, 3468, 9685, 6676, 1913, 6068, 5291, + 8086, 4244, 6935, 8677, 3927, 9877, 1712, 4499, 7550, + 2208, 7466, 8899, 1163, 3567, 9330, 7614, 837, 7386, + 1207, 6186, 3769, 4615, 5503, 5696, 812, 9654, 7215, + 1250, 7888, 7297, 3472, 5343, 7643, 8090, 2799, 2243, + 5708, 3321, 551, 1353, 2112, 6927, 8579, 5133, 6802, + 7964, 3082, 527, 2094, 6787, 3945, 3280, 5614, 1362, + 6669, 1618, 3865, 1564, 7508, 4551, 2912, 9003, 3151, + 4468, 2159, 1953, 3730, 383, 4555, 2147, 9572, 9533, + 8992, 9380, 977, 1777, 2066, 7805, 2087, 4545, 800, + 1911, 4772, 1351, 4646, 9159, 8853, 6520, 9296, 8554, + 8993, 270, 9662, 5551, 5841, 6357, 8019, 5684, 9061, + 8044, 5258, 6707, 9853, 8476, 8954, 5654, 1915, 3233, + 175, 2245, 7482, 2372, 6187, 3278, 2076, 692, 1683, + 6682, 2987, 7871, 5412, 2871, 896, 5481, 5299, 7647, + 9950, 4599, 3290, 7484, 3256, 6591, 149, 8225, 5726, + 7784, 393, 5157, 1112, 7343, 4079, 7528, 9702, 6988, + 4663, 7760, 4903, 2015, 7938, 905, 2607, 1033, 9160, + 514, 1784, 6839, 6591, 6451, 6516, 1438, 5978, 6160, + 9174, 4819, 8898, 2916, 5567, 4279, 340, 8595, 947, + 521, 5373, 1413, 6042, 1592, 5285, 695, 4327, 77, + 997, 854, 1558, 7313, 4062, 5510, 6943, 5376, 7868, + 8283, 4925, 1167, 4142, 5954, 4909, 5283, 8990, 4942, + 8381, 9851, 1222, 4712, 5720, 480, 5291, 6214, 2824, + 7553, 7284, 4984, 44, 6027, 6142, 8184, 2560, 2911, + 4092, 4130, 7559, 4036, 2656, 7028, 9951, 2315, 5328, + 2994, 882, 8401, 7911, 1158, 7357, 34, 6289, 4948, + 7136, 9135, 7311, 9058, 4734, 5930, 4968, 572, 6415, + 2254, 4762, 6785, 2544, 6717, 3467, 7226, 5846, 4206, + 5084, 6907, 3958, 1671, 4980, 7592, 9231, 9713, 5456, + 7720, 2621, 6631, 6005, 1750, 3047, 928, 5415, 8335, + 5248, 2008, 3093, 3399, 6956, 9536, 4775, 2141, 3347, + 2976, 4134, 6091, 6340, 7590, 6947, 8762, 8119, 901, + 5735, 9315, 9430, 4881, 5215, 7281, 9725, 2507, 4021, + 7807, 4231, 1772, 6609, 1408, 690, 8765, 6284, 2689, + 3768, 9517, 458, 1785, 5761, 2479, 9693, 59, 6684, + 9533, 3733, 6053, 4729, 1965, 4472, 8449, 3802, 4913, + 6197, 4917, 18, 4715, 3687, 1155, 4309, 303, 1629, + 0, 2952, 3872, 5574, 6212, 793, 4453, 8523, 519, + 1219, 5489, 8441, 8122, 8721, 7792, 332, 5298, 5893, + 481, 4870, 3944, 5109, 2865, 8524, 2512, 4975, 1275, + 4071, 8515, 696, 6081, 6543, 1541, 7327, 3413, 2145, + 2993, 7736, 9160, 3908, 3567, 7205, 9450, 1976, 985, + 1693, 4916, 5845, 8346, 4185, 5244, 6056, 3484, 4239, + 1549, 8660, 9160, 520, 7363, 7220, 1099, 2796, 738, + 3718, 7599, 8991, 1567, 4930, 9106, 4062, 4469, 1844, + 8012, 7785, 2836, 5066, 4422, 8475, 4181, 8001, 4746, + 4668, 5691, 7248, 6395, 332, 370, 8249, 3840, 4839, + 7860, 7458, 2064, 2321, 9156, 800, 5902, 9560, 2242, + 4945, 1905, 9861, 3851, 6720, 1160, 8577, 8167, 1481, + 9998, 2191, 3479, 9049, 3427, 5432, 604, 6459, 6684, + 7201, 5475, 4558, 6035, 9400, 1225, 8724, 7372, 2582, + 5844, 6264, 7388, 3557, 1080, 7232, 2970, 1724, 8444, + 5896, 7380, 7158, 3667, 2368, 2231, 752, 4210, 1229, + 1906, 4325, 9109, 2316, 4312, 6237, 9376, 2388, 5153, + 3608, 7996, 2247, 7622, 5112, 1190, 9688, 1046, 8634, + 5815, 3913, 7631, 682, 7706, 9675, 1212, 4143, 1979, + 8444, 8027, 466, 2232, 534, 3309, 6778, 8942, 8420, + 8137, 1049, 2527, 8576, 1275, 1282, 731, 1269, 9470, + 3681, 1173, 6631, 7480, 9196, 2302, 1949, 8787, 1129, + 1125, 4297, 4943, 4447, 1664, 6711, 8112, 1408, 7950, + 7599, 2171, 4712, 2838, 8879, 1125, 4047, 4182, 8087, + 1569, 5203, 1187, 2617, 8221, 1044, 8367, 2455, 9256, + 8542, 2734, 4154, 564, 862, 2938, 8181, 9553, 6213, + 4745, 4737, 1459, 7909, 3161, 1590, 3897, 6011, 6214, + 2569, 549, 3805, 446, 5247, 3137, 4796, 1746, 1189, + 3620, 3084, 3566, 8386, 5910, 6207, 6634, 5459, 6679, + 5492, 6169, 124, 5792, 2812, 9746, 2003, 1033, 4312, + 295, 1228, 2246, 1734, 5746, 506, 5980, 3733, 2354, + 387, 556, 1008, 8019, 8686, 6438, 7916, 7645, 2553, + 9979, 2435, 675, 7835, 4651, 751, 7124, 697, 7035, + 9518, 4087, 9620, 6975, 1731, 5914, 3484, 1352, 6257, + 6718, 908, 8601, 3268, 5350, 4527, 6830, 1658, 5034, + 5775, 558, 5664, 6090, 4634, 8462, 7963, 8837, 5514, + 4446, 4641, 8679, 1259, 6362, 624, 4563, 3516, 6882, + 5871, 3970, 1698, 7248, 2012, 4600, 7752, 7422, 3099, + 8312, 7899, 5221, 3299, 4879, 9154, 8020, 8920, 1984, + 9823, 3973, 7049, 3087, 1749, 9956, 1222, 1950, 4061, + 5333, 9764, 1607, 7203, 1789, 6608, 3583, 5351, 3495, + 6607, 3337, 1557, 8046, 6830, 6440, 9293, 2777, 4730, + 3417, 3138, 4900, 2214, 2925, 3126, 1266, 4692, 4795, + 8310]), + values=tensor([0.3748, 0.7251, 0.6085, 0.0086, 0.0647, 0.8448, 0.3069, + 0.9220, 0.8517, 0.8690, 0.3399, 0.1551, 0.9748, 0.3433, + 0.3810, 0.4962, 0.9344, 0.8982, 0.2178, 0.6185, 0.3368, + 0.0322, 0.4531, 0.8663, 0.4863, 0.9404, 0.6500, 0.4692, + 0.0552, 0.1812, 0.8243, 0.9327, 0.6762, 0.7790, 0.5390, + 0.0307, 0.0611, 0.5630, 0.7745, 0.3836, 0.2714, 0.9120, + 0.8423, 0.5016, 0.0365, 0.1538, 0.4632, 0.4705, 0.1960, + 0.0621, 0.5818, 0.7436, 0.7049, 0.2547, 0.7136, 0.4854, + 0.6972, 0.8742, 0.4889, 0.1266, 0.3965, 0.8480, 0.3949, + 0.2237, 0.6648, 0.4717, 0.3323, 0.6358, 0.4995, 0.0382, + 0.6581, 0.1458, 0.2292, 0.4686, 0.8858, 0.6781, 0.2618, + 0.8092, 0.6558, 0.8634, 0.7807, 0.2261, 0.9837, 0.3291, + 0.0461, 0.2869, 0.4145, 0.3646, 0.5953, 0.2180, 0.0938, + 0.0363, 0.7765, 0.7685, 0.7201, 0.1291, 0.6075, 0.0551, + 0.5780, 0.8844, 0.6624, 0.6469, 0.4489, 0.4839, 0.7254, + 0.1189, 0.0221, 0.0949, 0.5389, 0.7826, 0.9657, 0.1309, + 0.4357, 0.2953, 0.3822, 0.3422, 0.2824, 0.1551, 0.3769, + 0.7963, 0.7335, 0.7200, 0.5477, 0.9971, 0.7656, 0.7009, + 0.4415, 0.3722, 0.8372, 0.6898, 0.4507, 0.9078, 0.6026, + 0.1948, 0.8051, 0.7007, 0.3757, 0.2947, 0.8832, 0.3545, + 0.8982, 0.6194, 0.8628, 0.7932, 0.4420, 0.6263, 0.1084, + 0.9280, 0.6953, 0.8438, 0.5079, 0.6797, 0.4695, 0.4882, + 0.0405, 0.9585, 0.6813, 0.6481, 0.8626, 0.3202, 0.4896, + 0.2720, 0.3578, 0.3427, 0.3651, 0.4992, 0.5512, 0.3105, + 0.2782, 0.8411, 0.6370, 0.6306, 0.1504, 0.8961, 0.4184, + 0.3611, 0.1219, 0.5984, 0.2975, 0.4269, 0.4172, 0.2465, + 0.1520, 0.6790, 0.6422, 0.4095, 0.9047, 0.8998, 0.9223, + 0.9677, 0.6900, 0.4485, 0.9418, 0.6051, 0.8188, 0.2070, + 0.2354, 0.5114, 0.6536, 0.3031, 0.8623, 0.5110, 0.3916, + 0.2410, 0.4749, 0.6854, 0.2508, 0.4230, 0.5440, 0.9638, + 0.9553, 0.5685, 0.0033, 0.4347, 0.6212, 0.0085, 0.5291, + 0.4839, 0.1671, 0.8202, 0.7597, 0.1218, 0.0892, 0.0038, + 0.7729, 0.5608, 0.6710, 0.1275, 0.4216, 0.8768, 0.5410, + 0.9480, 0.7271, 0.9592, 0.6500, 0.3142, 0.4128, 0.3751, + 0.7960, 0.7044, 0.6279, 0.8593, 0.4373, 0.5930, 0.0710, + 0.9933, 0.7874, 0.4969, 0.7723, 0.5060, 0.4951, 0.9938, + 0.1735, 0.2681, 0.5656, 0.5160, 0.0646, 0.9346, 0.1120, + 0.3977, 0.2682, 0.0630, 0.5012, 0.0665, 0.9114, 0.4135, + 0.7401, 0.5131, 0.2648, 0.6703, 0.8294, 0.1391, 0.0821, + 0.5445, 0.1275, 0.3653, 0.2153, 0.3795, 0.3805, 0.8597, + 0.6993, 0.3980, 0.6514, 0.3216, 0.3582, 0.3989, 0.8129, + 0.3725, 0.5783, 0.4660, 0.4170, 0.7458, 0.0339, 0.5013, + 0.3898, 0.7208, 0.3136, 0.5609, 0.2838, 0.2580, 0.7442, + 0.8922, 0.7068, 0.1947, 0.7608, 0.4739, 0.5507, 0.2497, + 0.5240, 0.4929, 0.5431, 0.5771, 0.5305, 0.2866, 0.5271, + 0.3870, 0.0164, 0.9674, 0.8514, 0.9314, 0.5889, 0.6201, + 0.4773, 0.8407, 0.5378, 0.1488, 0.7085, 0.5677, 0.6798, + 0.3955, 0.6448, 0.9775, 0.9333, 0.0839, 0.7466, 0.9914, + 0.6412, 0.6891, 0.4489, 0.8129, 0.0040, 0.1733, 0.4027, + 0.9507, 0.8529, 0.8990, 0.3280, 0.1092, 0.8370, 0.7069, + 0.1977, 0.9423, 0.8664, 0.3980, 0.2829, 0.7771, 0.5671, + 0.8737, 0.5975, 0.9560, 0.8458, 0.6644, 0.5465, 0.8816, + 0.3812, 0.1084, 0.4217, 0.7369, 0.5760, 0.7519, 0.7681, + 0.5632, 0.3303, 0.0164, 0.8602, 0.3311, 0.1529, 0.1060, + 0.9357, 0.4612, 0.1917, 0.3903, 0.3151, 0.5589, 0.3146, + 0.2859, 0.3759, 0.2309, 0.7163, 0.9242, 0.2431, 0.9469, + 0.7537, 0.1414, 0.1493, 0.2855, 0.6242, 0.5471, 0.3069, + 0.0082, 0.1997, 0.5926, 0.5560, 0.0189, 0.3735, 0.3183, + 0.0434, 0.5654, 0.2451, 0.1150, 0.8137, 0.4292, 0.1227, + 0.6254, 0.7803, 0.4394, 0.5156, 0.1965, 0.1529, 0.3605, + 0.4396, 0.2921, 0.4553, 0.4954, 0.4495, 0.4114, 0.6994, + 0.0402, 0.8686, 0.7799, 0.0799, 0.9058, 0.9904, 0.3757, + 0.5766, 0.8563, 0.3999, 0.0764, 0.1339, 0.2859, 0.9220, + 0.1372, 0.5098, 0.4436, 0.6043, 0.1046, 0.3047, 0.4362, + 0.8862, 0.2658, 0.3846, 0.8853, 0.2010, 0.0637, 0.7516, + 0.8739, 0.6536, 0.5557, 0.0019, 0.6673, 0.2605, 0.7656, + 0.2845, 0.7096, 0.1054, 0.8094, 0.9011, 0.0095, 0.4612, + 0.3009, 0.1121, 0.4638, 0.6538, 0.3057, 0.3029, 0.9791, + 0.9391, 0.2713, 0.9208, 0.8081, 0.9573, 0.5411, 0.0150, + 0.5008, 0.5982, 0.5124, 0.6222, 0.2512, 0.5804, 0.8259, + 0.8207, 0.8416, 0.9741, 0.9387, 0.1226, 0.5152, 0.4031, + 0.6112, 0.6101, 0.5564, 0.9427, 0.4266, 0.0362, 0.6724, + 0.1725, 0.6694, 0.1189, 0.6417, 0.1205, 0.0397, 0.9793, + 0.1935, 0.2486, 0.4492, 0.4217, 0.1563, 0.5550, 0.7021, + 0.3402, 0.5412, 0.4358, 0.6247, 0.6236, 0.8801, 0.8823, + 0.7117, 0.4558, 0.8653, 0.3245, 0.2788, 0.5348, 0.7691, + 0.2454, 0.8368, 0.1853, 0.3516, 0.2712, 0.7348, 0.9175, + 0.8120, 0.2641, 0.6998, 0.6171, 0.2259, 0.0069, 0.3326, + 0.4369, 0.9441, 0.3206, 0.8807, 0.2807, 0.8370, 0.7341, + 0.9830, 0.9303, 0.7959, 0.7809, 0.0183, 0.8274, 0.4472, + 0.6717, 0.2722, 0.8454, 0.3657, 0.2428, 0.1641, 0.8040, + 0.3907, 0.6985, 0.1781, 0.9196, 0.8354, 0.6765, 0.8170, + 0.8622, 0.5979, 0.6590, 0.8547, 0.6768, 0.9637, 0.5364, + 0.6575, 0.6928, 0.0506, 0.5094, 0.8572, 0.6505, 0.1098, + 0.9111, 0.0404, 0.6170, 0.4561, 0.2598, 0.5996, 0.7542, + 0.6567, 0.3609, 0.1279, 0.9176, 0.7239, 0.8986, 0.9571, + 0.0599, 0.0161, 0.5733, 0.9994, 0.6168, 0.0569, 0.8098, + 0.1598, 0.9042, 0.2293, 0.6013, 0.4809, 0.6291, 0.4472, + 0.9610, 0.2924, 0.4591, 0.3643, 0.6817, 0.1028, 0.6984, + 0.5224, 0.8035, 0.4562, 0.6993, 0.3960, 0.1675, 0.4842, + 0.8168, 0.6750, 0.2827, 0.1016, 0.7698, 0.6292, 0.5174, + 0.8470, 0.8590, 0.0201, 0.1578, 0.3095, 0.3445, 0.5396, + 0.0354, 0.8137, 0.3273, 0.4281, 0.1615, 0.1839, 0.2116, + 0.9247, 0.8111, 0.0353, 0.8826, 0.1547, 0.6781, 0.1169, + 0.4152, 0.9039, 0.6496, 0.1482, 0.7520, 0.7951, 0.4736, + 0.0454, 0.0622, 0.6978, 0.2267, 0.9860, 0.6663, 0.3956, + 0.2141, 0.3549, 0.0910, 0.2909, 0.5470, 0.7548, 0.0972, + 0.4757, 0.3250, 0.5499, 0.2243, 0.5297, 0.8612, 0.2274, + 0.0708, 0.5436, 0.9682, 0.7714, 0.8643, 0.1466, 0.6872, + 0.5096, 0.0677, 0.4723, 0.2781, 0.2643, 0.7250, 0.5338, + 0.9605, 0.4880, 0.8087, 0.3404, 0.9499, 0.2186, 0.5648, + 0.1601, 0.4222, 0.4619, 0.9274, 0.9196, 0.5713, 0.0610, + 0.2794, 0.8313, 0.2162, 0.2106, 0.1214, 0.6217, 0.5001, + 0.6973, 0.2053, 0.6011, 0.1748, 0.9410, 0.1447, 0.2945, + 0.6250, 0.9228, 0.9360, 0.5967, 0.4091, 0.1808, 0.3765, + 0.1337, 0.5400, 0.9876, 0.6793, 0.7304, 0.3127, 0.5520, + 0.6348, 0.1406, 0.6950, 0.0929, 0.0627, 0.4506, 0.3951, + 0.3316, 0.5265, 0.5732, 0.8916, 0.5309, 0.1661, 0.8605, + 0.4698, 0.9510, 0.7370, 0.1038, 0.2103, 0.5296, 0.6083, + 0.3996, 0.3776, 0.7427, 0.9500, 0.4692, 0.4301, 0.2318, + 0.6475, 0.6818, 0.7210, 0.4957, 0.3573, 0.1896, 0.8362, + 0.0892, 0.9289, 0.0416, 0.7232, 0.0997, 0.1239, 0.9977, + 0.4358, 0.5742, 0.1304, 0.2148, 0.2201, 0.4924, 0.9838, + 0.4169, 0.4513, 0.6597, 0.5735, 0.7650, 0.1572, 0.3173, + 0.0146, 0.3963, 0.3763, 0.1653, 0.1574, 0.5134, 0.0387, + 0.9809, 0.8512, 0.4693, 0.5948, 0.2904, 0.1408, 0.4624, + 0.8130, 0.5553, 0.5373, 0.2494, 0.8360, 0.7498, 0.9334, + 0.9454, 0.3945, 0.9701, 0.6077, 0.5653, 0.6160, 0.2710, + 0.6122, 0.6190, 0.1168, 0.2814, 0.5760, 0.8999, 0.6450, + 0.0683, 0.5070, 0.2019, 0.8585, 0.1942, 0.2940, 0.5216, + 0.3103, 0.6126, 0.6675, 0.8710, 0.7201, 0.0100, 0.0375, + 0.0963, 0.7717, 0.1452, 0.0646, 0.5055, 0.9412, 0.1846, + 0.6680, 0.1879, 0.4989, 0.0299, 0.1019, 0.2317, 0.9361, + 0.3513, 0.7939, 0.6448, 0.0672, 0.3504, 0.7322, 0.1915, + 0.9575, 0.1252, 0.0016, 0.2806, 0.1718, 0.3189, 0.9448, + 0.8493, 0.4983, 0.7286, 0.9555, 0.2622, 0.8758, 0.8372, + 0.5610, 0.8622, 0.9444, 0.1900, 0.1986, 0.2019, 0.1261, + 0.5115, 0.8721, 0.0151, 0.3809, 0.2622, 0.9685, 0.2802, + 0.4487, 0.0746, 0.0535, 0.8313, 0.2256, 0.3443, 0.6934, + 0.8284, 0.7207, 0.7301, 0.8063, 0.2291, 0.1601, 0.6156, + 0.5280, 0.1061, 0.8350, 0.8964, 0.7324, 0.3965, 0.6242, + 0.2963, 0.1018, 0.1467, 0.4504, 0.5584, 0.6791, 0.9586, + 0.0520, 0.7818, 0.3798, 0.9374, 0.8768, 0.2057, 0.9814, + 0.1150, 0.7122, 0.3423, 0.3305, 0.0945, 0.8800, 0.5622, + 0.1210, 0.3190, 0.5406, 0.1019, 0.4162, 0.4732, 0.2433, + 0.6735, 0.8204, 0.0250, 0.4980, 0.9544, 0.9459, 0.0656, + 0.6166, 0.6686, 0.0882, 0.4563, 0.0171, 0.2508, 0.3079, + 0.1192, 0.9060, 0.2876, 0.0179, 0.2806, 0.5330, 0.0535, + 0.8679, 0.6396, 0.9573, 0.3695, 0.7753, 0.1809, 0.4376, + 0.2260, 0.3963, 0.6636, 0.7233, 0.8930, 0.6234, 0.6296, + 0.0101, 0.1070, 0.4268, 0.6794, 0.1008, 0.6421, 0.1477, + 0.9139, 0.6810, 0.1847, 0.6845, 0.0817, 0.7445, 0.4639, + 0.9411, 0.7143, 0.1906, 0.2476, 0.7680, 0.7234]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.7076, 0.7623, 0.0310, ..., 0.6712, 0.8172, 0.8298]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.527426958084106 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([5766, 7973, 2780, 3211, 9594, 8202, 4276, 3468, 2763, + 8172, 125, 6130, 209, 4027, 1805, 4863, 7827, 7923, + 3604, 7708, 857, 2121, 5611, 3524, 1803, 5126, 6664, + 5422, 9198, 8270, 9855, 8562, 4744, 5249, 7489, 3063, + 6337, 838, 6681, 3140, 8354, 4214, 3457, 9437, 9530, + 7940, 1764, 8298, 8134, 3741, 2409, 877, 1958, 5798, + 4406, 8279, 5232, 156, 5823, 3467, 4772, 6553, 3983, + 1528, 7910, 4804, 3175, 9452, 8577, 1586, 8580, 2673, + 1982, 1239, 8183, 2424, 7278, 9218, 1945, 4169, 4883, + 8116, 3091, 1525, 9572, 6102, 7072, 6015, 2321, 4696, + 4138, 870, 5212, 8282, 6668, 2421, 8337, 2293, 6466, + 1478, 3627, 937, 2553, 5768, 1496, 2794, 8455, 9783, + 4843, 2503, 4723, 1482, 8959, 3694, 866, 8594, 5963, + 1988, 7810, 7838, 2751, 5201, 8677, 4864, 8139, 632, + 6644, 3097, 1394, 6059, 8155, 2846, 1777, 6456, 8176, + 7371, 3358, 6829, 157, 1846, 7927, 3646, 7977, 3389, + 8996, 5123, 7577, 810, 5309, 6645, 5201, 6517, 9349, + 1972, 397, 2696, 6505, 3505, 39, 1681, 7539, 7649, + 4541, 2592, 4090, 372, 8070, 8656, 550, 3575, 2398, + 6124, 3969, 4102, 973, 6050, 5302, 7479, 298, 1762, + 3079, 4897, 4669, 5001, 2187, 4099, 251, 8622, 440, + 1353, 2831, 2162, 1680, 7185, 1025, 5353, 3100, 7889, + 1938, 8777, 7175, 1776, 9811, 5332, 6540, 2814, 694, + 8330, 5599, 3505, 5819, 8034, 3497, 5576, 2073, 6030, + 138, 4627, 3752, 4267, 3405, 2313, 8469, 9762, 2892, + 4848, 2525, 691, 3619, 7610, 8942, 3854, 8504, 1471, + 3760, 4764, 1573, 7874, 7032, 7439, 9628, 5646, 1475, + 6274, 2342, 6803, 3382, 4207, 7742, 4043, 375, 1643, + 8059, 5392, 2503, 6652, 8805, 7787, 8553, 5137, 5953, + 6638, 7014, 333, 6667, 410, 6995, 3350, 5519, 9203, + 2250, 5521, 9060, 3468, 9685, 6676, 1913, 6068, 5291, + 8086, 4244, 6935, 8677, 3927, 9877, 1712, 4499, 7550, + 2208, 7466, 8899, 1163, 3567, 9330, 7614, 837, 7386, + 1207, 6186, 3769, 4615, 5503, 5696, 812, 9654, 7215, + 1250, 7888, 7297, 3472, 5343, 7643, 8090, 2799, 2243, + 5708, 3321, 551, 1353, 2112, 6927, 8579, 5133, 6802, + 7964, 3082, 527, 2094, 6787, 3945, 3280, 5614, 1362, + 6669, 1618, 3865, 1564, 7508, 4551, 2912, 9003, 3151, + 4468, 2159, 1953, 3730, 383, 4555, 2147, 9572, 9533, + 8992, 9380, 977, 1777, 2066, 7805, 2087, 4545, 800, + 1911, 4772, 1351, 4646, 9159, 8853, 6520, 9296, 8554, + 8993, 270, 9662, 5551, 5841, 6357, 8019, 5684, 9061, + 8044, 5258, 6707, 9853, 8476, 8954, 5654, 1915, 3233, + 175, 2245, 7482, 2372, 6187, 3278, 2076, 692, 1683, + 6682, 2987, 7871, 5412, 2871, 896, 5481, 5299, 7647, + 9950, 4599, 3290, 7484, 3256, 6591, 149, 8225, 5726, + 7784, 393, 5157, 1112, 7343, 4079, 7528, 9702, 6988, + 4663, 7760, 4903, 2015, 7938, 905, 2607, 1033, 9160, + 514, 1784, 6839, 6591, 6451, 6516, 1438, 5978, 6160, + 9174, 4819, 8898, 2916, 5567, 4279, 340, 8595, 947, + 521, 5373, 1413, 6042, 1592, 5285, 695, 4327, 77, + 997, 854, 1558, 7313, 4062, 5510, 6943, 5376, 7868, + 8283, 4925, 1167, 4142, 5954, 4909, 5283, 8990, 4942, + 8381, 9851, 1222, 4712, 5720, 480, 5291, 6214, 2824, + 7553, 7284, 4984, 44, 6027, 6142, 8184, 2560, 2911, + 4092, 4130, 7559, 4036, 2656, 7028, 9951, 2315, 5328, + 2994, 882, 8401, 7911, 1158, 7357, 34, 6289, 4948, + 7136, 9135, 7311, 9058, 4734, 5930, 4968, 572, 6415, + 2254, 4762, 6785, 2544, 6717, 3467, 7226, 5846, 4206, + 5084, 6907, 3958, 1671, 4980, 7592, 9231, 9713, 5456, + 7720, 2621, 6631, 6005, 1750, 3047, 928, 5415, 8335, + 5248, 2008, 3093, 3399, 6956, 9536, 4775, 2141, 3347, + 2976, 4134, 6091, 6340, 7590, 6947, 8762, 8119, 901, + 5735, 9315, 9430, 4881, 5215, 7281, 9725, 2507, 4021, + 7807, 4231, 1772, 6609, 1408, 690, 8765, 6284, 2689, + 3768, 9517, 458, 1785, 5761, 2479, 9693, 59, 6684, + 9533, 3733, 6053, 4729, 1965, 4472, 8449, 3802, 4913, + 6197, 4917, 18, 4715, 3687, 1155, 4309, 303, 1629, + 0, 2952, 3872, 5574, 6212, 793, 4453, 8523, 519, + 1219, 5489, 8441, 8122, 8721, 7792, 332, 5298, 5893, + 481, 4870, 3944, 5109, 2865, 8524, 2512, 4975, 1275, + 4071, 8515, 696, 6081, 6543, 1541, 7327, 3413, 2145, + 2993, 7736, 9160, 3908, 3567, 7205, 9450, 1976, 985, + 1693, 4916, 5845, 8346, 4185, 5244, 6056, 3484, 4239, + 1549, 8660, 9160, 520, 7363, 7220, 1099, 2796, 738, + 3718, 7599, 8991, 1567, 4930, 9106, 4062, 4469, 1844, + 8012, 7785, 2836, 5066, 4422, 8475, 4181, 8001, 4746, + 4668, 5691, 7248, 6395, 332, 370, 8249, 3840, 4839, + 7860, 7458, 2064, 2321, 9156, 800, 5902, 9560, 2242, + 4945, 1905, 9861, 3851, 6720, 1160, 8577, 8167, 1481, + 9998, 2191, 3479, 9049, 3427, 5432, 604, 6459, 6684, + 7201, 5475, 4558, 6035, 9400, 1225, 8724, 7372, 2582, + 5844, 6264, 7388, 3557, 1080, 7232, 2970, 1724, 8444, + 5896, 7380, 7158, 3667, 2368, 2231, 752, 4210, 1229, + 1906, 4325, 9109, 2316, 4312, 6237, 9376, 2388, 5153, + 3608, 7996, 2247, 7622, 5112, 1190, 9688, 1046, 8634, + 5815, 3913, 7631, 682, 7706, 9675, 1212, 4143, 1979, + 8444, 8027, 466, 2232, 534, 3309, 6778, 8942, 8420, + 8137, 1049, 2527, 8576, 1275, 1282, 731, 1269, 9470, + 3681, 1173, 6631, 7480, 9196, 2302, 1949, 8787, 1129, + 1125, 4297, 4943, 4447, 1664, 6711, 8112, 1408, 7950, + 7599, 2171, 4712, 2838, 8879, 1125, 4047, 4182, 8087, + 1569, 5203, 1187, 2617, 8221, 1044, 8367, 2455, 9256, + 8542, 2734, 4154, 564, 862, 2938, 8181, 9553, 6213, + 4745, 4737, 1459, 7909, 3161, 1590, 3897, 6011, 6214, + 2569, 549, 3805, 446, 5247, 3137, 4796, 1746, 1189, + 3620, 3084, 3566, 8386, 5910, 6207, 6634, 5459, 6679, + 5492, 6169, 124, 5792, 2812, 9746, 2003, 1033, 4312, + 295, 1228, 2246, 1734, 5746, 506, 5980, 3733, 2354, + 387, 556, 1008, 8019, 8686, 6438, 7916, 7645, 2553, + 9979, 2435, 675, 7835, 4651, 751, 7124, 697, 7035, + 9518, 4087, 9620, 6975, 1731, 5914, 3484, 1352, 6257, + 6718, 908, 8601, 3268, 5350, 4527, 6830, 1658, 5034, + 5775, 558, 5664, 6090, 4634, 8462, 7963, 8837, 5514, + 4446, 4641, 8679, 1259, 6362, 624, 4563, 3516, 6882, + 5871, 3970, 1698, 7248, 2012, 4600, 7752, 7422, 3099, + 8312, 7899, 5221, 3299, 4879, 9154, 8020, 8920, 1984, + 9823, 3973, 7049, 3087, 1749, 9956, 1222, 1950, 4061, + 5333, 9764, 1607, 7203, 1789, 6608, 3583, 5351, 3495, + 6607, 3337, 1557, 8046, 6830, 6440, 9293, 2777, 4730, + 3417, 3138, 4900, 2214, 2925, 3126, 1266, 4692, 4795, + 8310]), + values=tensor([0.3748, 0.7251, 0.6085, 0.0086, 0.0647, 0.8448, 0.3069, + 0.9220, 0.8517, 0.8690, 0.3399, 0.1551, 0.9748, 0.3433, + 0.3810, 0.4962, 0.9344, 0.8982, 0.2178, 0.6185, 0.3368, + 0.0322, 0.4531, 0.8663, 0.4863, 0.9404, 0.6500, 0.4692, + 0.0552, 0.1812, 0.8243, 0.9327, 0.6762, 0.7790, 0.5390, + 0.0307, 0.0611, 0.5630, 0.7745, 0.3836, 0.2714, 0.9120, + 0.8423, 0.5016, 0.0365, 0.1538, 0.4632, 0.4705, 0.1960, + 0.0621, 0.5818, 0.7436, 0.7049, 0.2547, 0.7136, 0.4854, + 0.6972, 0.8742, 0.4889, 0.1266, 0.3965, 0.8480, 0.3949, + 0.2237, 0.6648, 0.4717, 0.3323, 0.6358, 0.4995, 0.0382, + 0.6581, 0.1458, 0.2292, 0.4686, 0.8858, 0.6781, 0.2618, + 0.8092, 0.6558, 0.8634, 0.7807, 0.2261, 0.9837, 0.3291, + 0.0461, 0.2869, 0.4145, 0.3646, 0.5953, 0.2180, 0.0938, + 0.0363, 0.7765, 0.7685, 0.7201, 0.1291, 0.6075, 0.0551, + 0.5780, 0.8844, 0.6624, 0.6469, 0.4489, 0.4839, 0.7254, + 0.1189, 0.0221, 0.0949, 0.5389, 0.7826, 0.9657, 0.1309, + 0.4357, 0.2953, 0.3822, 0.3422, 0.2824, 0.1551, 0.3769, + 0.7963, 0.7335, 0.7200, 0.5477, 0.9971, 0.7656, 0.7009, + 0.4415, 0.3722, 0.8372, 0.6898, 0.4507, 0.9078, 0.6026, + 0.1948, 0.8051, 0.7007, 0.3757, 0.2947, 0.8832, 0.3545, + 0.8982, 0.6194, 0.8628, 0.7932, 0.4420, 0.6263, 0.1084, + 0.9280, 0.6953, 0.8438, 0.5079, 0.6797, 0.4695, 0.4882, + 0.0405, 0.9585, 0.6813, 0.6481, 0.8626, 0.3202, 0.4896, + 0.2720, 0.3578, 0.3427, 0.3651, 0.4992, 0.5512, 0.3105, + 0.2782, 0.8411, 0.6370, 0.6306, 0.1504, 0.8961, 0.4184, + 0.3611, 0.1219, 0.5984, 0.2975, 0.4269, 0.4172, 0.2465, + 0.1520, 0.6790, 0.6422, 0.4095, 0.9047, 0.8998, 0.9223, + 0.9677, 0.6900, 0.4485, 0.9418, 0.6051, 0.8188, 0.2070, + 0.2354, 0.5114, 0.6536, 0.3031, 0.8623, 0.5110, 0.3916, + 0.2410, 0.4749, 0.6854, 0.2508, 0.4230, 0.5440, 0.9638, + 0.9553, 0.5685, 0.0033, 0.4347, 0.6212, 0.0085, 0.5291, + 0.4839, 0.1671, 0.8202, 0.7597, 0.1218, 0.0892, 0.0038, + 0.7729, 0.5608, 0.6710, 0.1275, 0.4216, 0.8768, 0.5410, + 0.9480, 0.7271, 0.9592, 0.6500, 0.3142, 0.4128, 0.3751, + 0.7960, 0.7044, 0.6279, 0.8593, 0.4373, 0.5930, 0.0710, + 0.9933, 0.7874, 0.4969, 0.7723, 0.5060, 0.4951, 0.9938, + 0.1735, 0.2681, 0.5656, 0.5160, 0.0646, 0.9346, 0.1120, + 0.3977, 0.2682, 0.0630, 0.5012, 0.0665, 0.9114, 0.4135, + 0.7401, 0.5131, 0.2648, 0.6703, 0.8294, 0.1391, 0.0821, + 0.5445, 0.1275, 0.3653, 0.2153, 0.3795, 0.3805, 0.8597, + 0.6993, 0.3980, 0.6514, 0.3216, 0.3582, 0.3989, 0.8129, + 0.3725, 0.5783, 0.4660, 0.4170, 0.7458, 0.0339, 0.5013, + 0.3898, 0.7208, 0.3136, 0.5609, 0.2838, 0.2580, 0.7442, + 0.8922, 0.7068, 0.1947, 0.7608, 0.4739, 0.5507, 0.2497, + 0.5240, 0.4929, 0.5431, 0.5771, 0.5305, 0.2866, 0.5271, + 0.3870, 0.0164, 0.9674, 0.8514, 0.9314, 0.5889, 0.6201, + 0.4773, 0.8407, 0.5378, 0.1488, 0.7085, 0.5677, 0.6798, + 0.3955, 0.6448, 0.9775, 0.9333, 0.0839, 0.7466, 0.9914, + 0.6412, 0.6891, 0.4489, 0.8129, 0.0040, 0.1733, 0.4027, + 0.9507, 0.8529, 0.8990, 0.3280, 0.1092, 0.8370, 0.7069, + 0.1977, 0.9423, 0.8664, 0.3980, 0.2829, 0.7771, 0.5671, + 0.8737, 0.5975, 0.9560, 0.8458, 0.6644, 0.5465, 0.8816, + 0.3812, 0.1084, 0.4217, 0.7369, 0.5760, 0.7519, 0.7681, + 0.5632, 0.3303, 0.0164, 0.8602, 0.3311, 0.1529, 0.1060, + 0.9357, 0.4612, 0.1917, 0.3903, 0.3151, 0.5589, 0.3146, + 0.2859, 0.3759, 0.2309, 0.7163, 0.9242, 0.2431, 0.9469, + 0.7537, 0.1414, 0.1493, 0.2855, 0.6242, 0.5471, 0.3069, + 0.0082, 0.1997, 0.5926, 0.5560, 0.0189, 0.3735, 0.3183, + 0.0434, 0.5654, 0.2451, 0.1150, 0.8137, 0.4292, 0.1227, + 0.6254, 0.7803, 0.4394, 0.5156, 0.1965, 0.1529, 0.3605, + 0.4396, 0.2921, 0.4553, 0.4954, 0.4495, 0.4114, 0.6994, + 0.0402, 0.8686, 0.7799, 0.0799, 0.9058, 0.9904, 0.3757, + 0.5766, 0.8563, 0.3999, 0.0764, 0.1339, 0.2859, 0.9220, + 0.1372, 0.5098, 0.4436, 0.6043, 0.1046, 0.3047, 0.4362, + 0.8862, 0.2658, 0.3846, 0.8853, 0.2010, 0.0637, 0.7516, + 0.8739, 0.6536, 0.5557, 0.0019, 0.6673, 0.2605, 0.7656, + 0.2845, 0.7096, 0.1054, 0.8094, 0.9011, 0.0095, 0.4612, + 0.3009, 0.1121, 0.4638, 0.6538, 0.3057, 0.3029, 0.9791, + 0.9391, 0.2713, 0.9208, 0.8081, 0.9573, 0.5411, 0.0150, + 0.5008, 0.5982, 0.5124, 0.6222, 0.2512, 0.5804, 0.8259, + 0.8207, 0.8416, 0.9741, 0.9387, 0.1226, 0.5152, 0.4031, + 0.6112, 0.6101, 0.5564, 0.9427, 0.4266, 0.0362, 0.6724, + 0.1725, 0.6694, 0.1189, 0.6417, 0.1205, 0.0397, 0.9793, + 0.1935, 0.2486, 0.4492, 0.4217, 0.1563, 0.5550, 0.7021, + 0.3402, 0.5412, 0.4358, 0.6247, 0.6236, 0.8801, 0.8823, + 0.7117, 0.4558, 0.8653, 0.3245, 0.2788, 0.5348, 0.7691, + 0.2454, 0.8368, 0.1853, 0.3516, 0.2712, 0.7348, 0.9175, + 0.8120, 0.2641, 0.6998, 0.6171, 0.2259, 0.0069, 0.3326, + 0.4369, 0.9441, 0.3206, 0.8807, 0.2807, 0.8370, 0.7341, + 0.9830, 0.9303, 0.7959, 0.7809, 0.0183, 0.8274, 0.4472, + 0.6717, 0.2722, 0.8454, 0.3657, 0.2428, 0.1641, 0.8040, + 0.3907, 0.6985, 0.1781, 0.9196, 0.8354, 0.6765, 0.8170, + 0.8622, 0.5979, 0.6590, 0.8547, 0.6768, 0.9637, 0.5364, + 0.6575, 0.6928, 0.0506, 0.5094, 0.8572, 0.6505, 0.1098, + 0.9111, 0.0404, 0.6170, 0.4561, 0.2598, 0.5996, 0.7542, + 0.6567, 0.3609, 0.1279, 0.9176, 0.7239, 0.8986, 0.9571, + 0.0599, 0.0161, 0.5733, 0.9994, 0.6168, 0.0569, 0.8098, + 0.1598, 0.9042, 0.2293, 0.6013, 0.4809, 0.6291, 0.4472, + 0.9610, 0.2924, 0.4591, 0.3643, 0.6817, 0.1028, 0.6984, + 0.5224, 0.8035, 0.4562, 0.6993, 0.3960, 0.1675, 0.4842, + 0.8168, 0.6750, 0.2827, 0.1016, 0.7698, 0.6292, 0.5174, + 0.8470, 0.8590, 0.0201, 0.1578, 0.3095, 0.3445, 0.5396, + 0.0354, 0.8137, 0.3273, 0.4281, 0.1615, 0.1839, 0.2116, + 0.9247, 0.8111, 0.0353, 0.8826, 0.1547, 0.6781, 0.1169, + 0.4152, 0.9039, 0.6496, 0.1482, 0.7520, 0.7951, 0.4736, + 0.0454, 0.0622, 0.6978, 0.2267, 0.9860, 0.6663, 0.3956, + 0.2141, 0.3549, 0.0910, 0.2909, 0.5470, 0.7548, 0.0972, + 0.4757, 0.3250, 0.5499, 0.2243, 0.5297, 0.8612, 0.2274, + 0.0708, 0.5436, 0.9682, 0.7714, 0.8643, 0.1466, 0.6872, + 0.5096, 0.0677, 0.4723, 0.2781, 0.2643, 0.7250, 0.5338, + 0.9605, 0.4880, 0.8087, 0.3404, 0.9499, 0.2186, 0.5648, + 0.1601, 0.4222, 0.4619, 0.9274, 0.9196, 0.5713, 0.0610, + 0.2794, 0.8313, 0.2162, 0.2106, 0.1214, 0.6217, 0.5001, + 0.6973, 0.2053, 0.6011, 0.1748, 0.9410, 0.1447, 0.2945, + 0.6250, 0.9228, 0.9360, 0.5967, 0.4091, 0.1808, 0.3765, + 0.1337, 0.5400, 0.9876, 0.6793, 0.7304, 0.3127, 0.5520, + 0.6348, 0.1406, 0.6950, 0.0929, 0.0627, 0.4506, 0.3951, + 0.3316, 0.5265, 0.5732, 0.8916, 0.5309, 0.1661, 0.8605, + 0.4698, 0.9510, 0.7370, 0.1038, 0.2103, 0.5296, 0.6083, + 0.3996, 0.3776, 0.7427, 0.9500, 0.4692, 0.4301, 0.2318, + 0.6475, 0.6818, 0.7210, 0.4957, 0.3573, 0.1896, 0.8362, + 0.0892, 0.9289, 0.0416, 0.7232, 0.0997, 0.1239, 0.9977, + 0.4358, 0.5742, 0.1304, 0.2148, 0.2201, 0.4924, 0.9838, + 0.4169, 0.4513, 0.6597, 0.5735, 0.7650, 0.1572, 0.3173, + 0.0146, 0.3963, 0.3763, 0.1653, 0.1574, 0.5134, 0.0387, + 0.9809, 0.8512, 0.4693, 0.5948, 0.2904, 0.1408, 0.4624, + 0.8130, 0.5553, 0.5373, 0.2494, 0.8360, 0.7498, 0.9334, + 0.9454, 0.3945, 0.9701, 0.6077, 0.5653, 0.6160, 0.2710, + 0.6122, 0.6190, 0.1168, 0.2814, 0.5760, 0.8999, 0.6450, + 0.0683, 0.5070, 0.2019, 0.8585, 0.1942, 0.2940, 0.5216, + 0.3103, 0.6126, 0.6675, 0.8710, 0.7201, 0.0100, 0.0375, + 0.0963, 0.7717, 0.1452, 0.0646, 0.5055, 0.9412, 0.1846, + 0.6680, 0.1879, 0.4989, 0.0299, 0.1019, 0.2317, 0.9361, + 0.3513, 0.7939, 0.6448, 0.0672, 0.3504, 0.7322, 0.1915, + 0.9575, 0.1252, 0.0016, 0.2806, 0.1718, 0.3189, 0.9448, + 0.8493, 0.4983, 0.7286, 0.9555, 0.2622, 0.8758, 0.8372, + 0.5610, 0.8622, 0.9444, 0.1900, 0.1986, 0.2019, 0.1261, + 0.5115, 0.8721, 0.0151, 0.3809, 0.2622, 0.9685, 0.2802, + 0.4487, 0.0746, 0.0535, 0.8313, 0.2256, 0.3443, 0.6934, + 0.8284, 0.7207, 0.7301, 0.8063, 0.2291, 0.1601, 0.6156, + 0.5280, 0.1061, 0.8350, 0.8964, 0.7324, 0.3965, 0.6242, + 0.2963, 0.1018, 0.1467, 0.4504, 0.5584, 0.6791, 0.9586, + 0.0520, 0.7818, 0.3798, 0.9374, 0.8768, 0.2057, 0.9814, + 0.1150, 0.7122, 0.3423, 0.3305, 0.0945, 0.8800, 0.5622, + 0.1210, 0.3190, 0.5406, 0.1019, 0.4162, 0.4732, 0.2433, + 0.6735, 0.8204, 0.0250, 0.4980, 0.9544, 0.9459, 0.0656, + 0.6166, 0.6686, 0.0882, 0.4563, 0.0171, 0.2508, 0.3079, + 0.1192, 0.9060, 0.2876, 0.0179, 0.2806, 0.5330, 0.0535, + 0.8679, 0.6396, 0.9573, 0.3695, 0.7753, 0.1809, 0.4376, + 0.2260, 0.3963, 0.6636, 0.7233, 0.8930, 0.6234, 0.6296, + 0.0101, 0.1070, 0.4268, 0.6794, 0.1008, 0.6421, 0.1477, + 0.9139, 0.6810, 0.1847, 0.6845, 0.0817, 0.7445, 0.4639, + 0.9411, 0.7143, 0.1906, 0.2476, 0.7680, 0.7234]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.7076, 0.7623, 0.0310, ..., 0.6712, 0.8172, 0.8298]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.527426958084106 seconds + +[40.29, 41.7, 40.43, 39.93, 40.63, 41.16, 39.74, 39.77, 39.65, 39.51] +[97.79] +13.381757259368896 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 360318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.527426958084106, 'TIME_S_1KI': 0.029217044272237598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1308.6020423936845, 'W': 97.79} +[40.29, 41.7, 40.43, 39.93, 40.63, 41.16, 39.74, 39.77, 39.65, 39.51, 40.23, 41.08, 39.76, 40.17, 39.91, 40.85, 39.5, 40.14, 40.16, 39.37] +724.28 +36.214 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 360318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.527426958084106, 'TIME_S_1KI': 0.029217044272237598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1308.6020423936845, 'W': 97.79, 'J_1KI': 3.631797585448644, 'W_1KI': 0.2713991529704317, 'W_D': 61.57600000000001, 'J_D': 823.9950850028993, 'W_D_1KI': 0.17089348852957667, 'J_D_1KI': 0.00047428518289282433} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json index 6ded063..2bce2c3 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 308023, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.28289532661438, "TIME_S_1KI": 0.03338353086170311, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1226.4741826629638, "W": 98.08, "J_1KI": 3.9817616952726382, "W_1KI": 0.3184177804904179, "W_D": 61.967, "J_D": 774.8870888772011, "W_D_1KI": 0.20117653551845155, "J_D_1KI": 0.0006531217977828004} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 319440, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.952518463134766, "TIME_S_1KI": 0.03428662178542063, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1362.7280988121033, "W": 98.16000000000001, "J_1KI": 4.265990792675004, "W_1KI": 0.3072877535687453, "W_D": 52.57025000000001, "J_D": 729.8182236815096, "W_D_1KI": 0.16457002880040073, "J_D_1KI": 0.0005151829100939166} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output index df70699..10ed23c 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019933462142944336} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019902944564819336} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 4999, 5000, 5000]), - col_indices=tensor([4080, 6557, 3158, ..., 4357, 6307, 2550]), - values=tensor([0.9910, 0.3414, 0.4855, ..., 0.2598, 0.6108, 0.2815]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 4999, 4999, 5000]), + col_indices=tensor([2372, 5760, 6848, ..., 5311, 7243, 8029]), + values=tensor([0.3104, 0.6761, 0.3545, ..., 0.8200, 0.7445, 0.5520]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.2787, 0.7388, 0.8319, ..., 0.5413, 0.0496, 0.2437]) +tensor([0.5031, 0.8529, 0.5243, ..., 0.2679, 0.5423, 0.7335]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.019933462142944336 seconds +Time: 0.019902944564819336 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52675', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.7956035137176514} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52756', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.877525806427002} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([ 408, 476, 3837, ..., 3097, 8388, 8856]), - values=tensor([0.3698, 0.9808, 0.6496, ..., 0.7839, 0.4021, 0.3346]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4997, 4999, 5000]), + col_indices=tensor([6754, 721, 8092, ..., 4456, 5626, 9090]), + values=tensor([0.2896, 0.1011, 0.3253, ..., 0.8144, 0.5407, 0.9332]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.1775, 0.8809, 0.7204, ..., 0.4994, 0.6943, 0.3851]) +tensor([0.9789, 0.3880, 0.5470, ..., 0.8477, 0.3284, 0.8468]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 1.7956035137176514 seconds +Time: 1.877525806427002 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '308023', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.28289532661438} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '295036', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.697813034057617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), - col_indices=tensor([2483, 6584, 3017, ..., 870, 3138, 2052]), - values=tensor([0.7385, 0.7043, 0.9061, ..., 0.4377, 0.8515, 0.3180]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4998, 4999, 5000]), + col_indices=tensor([ 740, 7946, 4530, ..., 5174, 247, 935]), + values=tensor([0.0112, 0.8926, 0.5684, ..., 0.2376, 0.1963, 0.7042]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.1916, 0.9837, 0.2990, ..., 0.4110, 0.2807, 0.4933]) +tensor([0.7472, 0.6061, 0.9715, ..., 0.9591, 0.6644, 0.1656]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.28289532661438 seconds +Time: 9.697813034057617 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '319440', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.952518463134766} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), - col_indices=tensor([2483, 6584, 3017, ..., 870, 3138, 2052]), - values=tensor([0.7385, 0.7043, 0.9061, ..., 0.4377, 0.8515, 0.3180]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([2136, 3821, 7264, ..., 9985, 4627, 3401]), + values=tensor([0.8150, 0.5813, 0.6488, ..., 0.6046, 0.0821, 0.1459]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.1916, 0.9837, 0.2990, ..., 0.4110, 0.2807, 0.4933]) +tensor([0.7848, 0.8694, 0.6164, ..., 0.4553, 0.9967, 0.4418]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +72,29 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.28289532661438 seconds +Time: 10.952518463134766 seconds -[40.25, 39.88, 39.44, 44.89, 40.01, 39.55, 39.45, 39.48, 39.91, 41.29] -[98.08] -12.504834651947021 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 308023, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.28289532661438, 'TIME_S_1KI': 0.03338353086170311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1226.4741826629638, 'W': 98.08} -[40.25, 39.88, 39.44, 44.89, 40.01, 39.55, 39.45, 39.48, 39.91, 41.29, 44.96, 39.78, 39.79, 40.18, 39.36, 39.46, 39.39, 39.37, 39.43, 39.28] -722.26 -36.113 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 308023, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.28289532661438, 'TIME_S_1KI': 0.03338353086170311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1226.4741826629638, 'W': 98.08, 'J_1KI': 3.9817616952726382, 'W_1KI': 0.3184177804904179, 'W_D': 61.967, 'J_D': 774.8870888772011, 'W_D_1KI': 0.20117653551845155, 'J_D_1KI': 0.0006531217977828004} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([2136, 3821, 7264, ..., 9985, 4627, 3401]), + values=tensor([0.8150, 0.5813, 0.6488, ..., 0.6046, 0.0821, 0.1459]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.7848, 0.8694, 0.6164, ..., 0.4553, 0.9967, 0.4418]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.952518463134766 seconds + +[42.53, 39.89, 39.54, 39.38, 39.41, 39.39, 39.72, 39.55, 39.61, 40.35] +[98.16] +13.882723093032837 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 319440, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.952518463134766, 'TIME_S_1KI': 0.03428662178542063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.7280988121033, 'W': 98.16000000000001} +[42.53, 39.89, 39.54, 39.38, 39.41, 39.39, 39.72, 39.55, 39.61, 40.35, 40.76, 45.98, 64.96, 63.35, 66.95, 64.56, 52.89, 67.11, 71.86, 71.65] +911.7950000000001 +45.58975 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 319440, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.952518463134766, 'TIME_S_1KI': 0.03428662178542063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.7280988121033, 'W': 98.16000000000001, 'J_1KI': 4.265990792675004, 'W_1KI': 0.3072877535687453, 'W_D': 52.57025000000001, 'J_D': 729.8182236815096, 'W_D_1KI': 0.16457002880040073, 'J_D_1KI': 0.0005151829100939166} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json index f68b6b9..5670ac8 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1275, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.370937824249268, "TIME_S_1KI": 8.134068881764131, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2317.954887828827, "W": 118.94, "J_1KI": 1818.0038335912368, "W_1KI": 93.28627450980392, "W_D": 82.976, "J_D": 1617.072681793213, "W_D_1KI": 65.07921568627451, "J_D_1KI": 51.04252210688197} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1374, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.242573499679565, "TIME_S_1KI": 8.18236790369692, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2407.0519640755656, "W": 121.43, "J_1KI": 1751.8573246547055, "W_1KI": 88.37700145560407, "W_D": 84.9805, "J_D": 1684.5300126255752, "W_D_1KI": 61.84898107714702, "J_D_1KI": 45.01381446662811} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output index 8fe4739..813cac8 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8229217529296875} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8090236186981201} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 92, ..., 24999906, - 24999954, 25000000]), - col_indices=tensor([ 13687, 16103, 22085, ..., 466250, 497468, - 498839]), - values=tensor([0.1763, 0.0612, 0.1831, ..., 0.7206, 0.9735, 0.4201]), +tensor(crow_indices=tensor([ 0, 48, 94, ..., 24999925, + 24999965, 25000000]), + col_indices=tensor([ 23376, 31397, 31689, ..., 443361, 445058, + 487681]), + values=tensor([0.4839, 0.8919, 0.4062, ..., 0.7044, 0.3151, 0.1559]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.0392, 0.3068, 0.8540, ..., 0.0771, 0.2433, 0.8939]) +tensor([0.1445, 0.7363, 0.4463, ..., 0.9472, 0.1025, 0.0893]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 0.8229217529296875 seconds +Time: 0.8090236186981201 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1275', '-ss', '500000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.370937824249268} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1297', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.907588481903076} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 89, ..., 24999893, - 24999957, 25000000]), - col_indices=tensor([ 25264, 35882, 38786, ..., 487781, 491680, - 492236]), - values=tensor([0.0901, 0.4292, 0.0295, ..., 0.7641, 0.5758, 0.3435]), +tensor(crow_indices=tensor([ 0, 35, 72, ..., 24999893, + 24999944, 25000000]), + col_indices=tensor([ 16655, 42218, 84405, ..., 471063, 480187, + 495612]), + values=tensor([0.8923, 0.1977, 0.5949, ..., 0.6111, 0.0213, 0.1253]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7878, 0.6485, 0.9023, ..., 0.5055, 0.2764, 0.4227]) +tensor([0.7619, 0.7901, 0.5066, ..., 0.1051, 0.1233, 0.1900]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.370937824249268 seconds +Time: 9.907588481903076 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1374', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.242573499679565} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 89, ..., 24999893, - 24999957, 25000000]), - col_indices=tensor([ 25264, 35882, 38786, ..., 487781, 491680, - 492236]), - values=tensor([0.0901, 0.4292, 0.0295, ..., 0.7641, 0.5758, 0.3435]), +tensor(crow_indices=tensor([ 0, 39, 98, ..., 24999895, + 24999956, 25000000]), + col_indices=tensor([ 49186, 57840, 60335, ..., 478855, 479456, + 490342]), + values=tensor([0.8208, 0.1257, 0.0473, ..., 0.1778, 0.0330, 0.2399]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7878, 0.6485, 0.9023, ..., 0.5055, 0.2764, 0.4227]) +tensor([0.2858, 0.5405, 0.2026, ..., 0.3109, 0.2614, 0.2825]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +59,31 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.370937824249268 seconds +Time: 11.242573499679565 seconds -[40.75, 40.02, 39.59, 39.62, 39.74, 39.84, 39.97, 39.74, 40.73, 39.5] -[118.94] -19.488438606262207 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.370937824249268, 'TIME_S_1KI': 8.134068881764131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2317.954887828827, 'W': 118.94} -[40.75, 40.02, 39.59, 39.62, 39.74, 39.84, 39.97, 39.74, 40.73, 39.5, 41.45, 40.23, 40.12, 39.57, 39.77, 39.94, 39.75, 39.69, 40.29, 39.64] -719.28 -35.964 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.370937824249268, 'TIME_S_1KI': 8.134068881764131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2317.954887828827, 'W': 118.94, 'J_1KI': 1818.0038335912368, 'W_1KI': 93.28627450980392, 'W_D': 82.976, 'J_D': 1617.072681793213, 'W_D_1KI': 65.07921568627451, 'J_D_1KI': 51.04252210688197} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 39, 98, ..., 24999895, + 24999956, 25000000]), + col_indices=tensor([ 49186, 57840, 60335, ..., 478855, 479456, + 490342]), + values=tensor([0.8208, 0.1257, 0.0473, ..., 0.1778, 0.0330, 0.2399]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2858, 0.5405, 0.2026, ..., 0.3109, 0.2614, 0.2825]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 11.242573499679565 seconds + +[41.4, 40.98, 40.32, 40.18, 40.32, 39.77, 39.88, 39.58, 40.12, 39.72] +[121.43] +19.822547674179077 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.242573499679565, 'TIME_S_1KI': 8.18236790369692, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2407.0519640755656, 'W': 121.43} +[41.4, 40.98, 40.32, 40.18, 40.32, 39.77, 39.88, 39.58, 40.12, 39.72, 41.41, 40.06, 39.79, 41.99, 39.71, 39.74, 45.22, 40.21, 39.84, 40.03] +728.99 +36.4495 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.242573499679565, 'TIME_S_1KI': 8.18236790369692, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2407.0519640755656, 'W': 121.43, 'J_1KI': 1751.8573246547055, 'W_1KI': 88.37700145560407, 'W_D': 84.9805, 'J_D': 1684.5300126255752, 'W_D_1KI': 61.84898107714702, 'J_D_1KI': 45.01381446662811} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json index 24823a1..6b52d9e 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 20602, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.051029920578003, "TIME_S_1KI": 0.4878667081146492, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2012.3513662075998, "W": 152.11, "J_1KI": 97.67747627451702, "W_1KI": 7.383263760799923, "W_D": 115.75800000000001, "J_D": 1531.429685421467, "W_D_1KI": 5.61877487622561, "J_D_1KI": 0.272729583352374} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21617, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.82520842552185, "TIME_S_1KI": 0.5007729298941505, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2091.1719660377503, "W": 152.99, "J_1KI": 96.73738104444419, "W_1KI": 7.07730027293334, "W_D": 117.02850000000001, "J_D": 1599.6255861654283, "W_D_1KI": 5.413725308784754, "J_D_1KI": 0.25043832672363203} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output index 60d167b..6c81aa1 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08110809326171875} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.07646036148071289} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499988, - 2499994, 2500000]), - col_indices=tensor([ 61750, 191731, 192878, ..., 292292, 347392, - 413452]), - values=tensor([0.4333, 0.7749, 0.6975, ..., 0.5571, 0.2303, 0.6423]), +tensor(crow_indices=tensor([ 0, 7, 9, ..., 2499989, + 2499996, 2500000]), + col_indices=tensor([102432, 128529, 159807, ..., 283424, 312882, + 427124]), + values=tensor([0.3590, 0.8739, 0.1654, ..., 0.5356, 0.8734, 0.7698]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7573, 0.7811, 0.2609, ..., 0.7028, 0.0683, 0.1077]) +tensor([0.5883, 0.7167, 0.7101, ..., 0.2003, 0.5848, 0.6856]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,41 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.08110809326171875 seconds +Time: 0.07646036148071289 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12945', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.597289562225342} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13732', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.670015335083008} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 2499996, +tensor(crow_indices=tensor([ 0, 4, 16, ..., 2499988, + 2499996, 2500000]), + col_indices=tensor([ 59164, 71164, 341125, ..., 45240, 225921, + 431340]), + values=tensor([0.0543, 0.5224, 0.7852, ..., 0.1414, 0.5048, 0.3968]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.2230, 0.1381, 0.5919, ..., 0.8762, 0.3274, 0.6312]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 6.670015335083008 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21617', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.82520842552185} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 11, ..., 2499992, 2499997, 2500000]), - col_indices=tensor([304373, 374974, 396567, ..., 161828, 243938, - 306700]), - values=tensor([0.0234, 0.0111, 0.7752, ..., 0.4123, 0.0911, 0.7333]), + col_indices=tensor([ 6844, 10153, 255216, ..., 42121, 91491, + 461513]), + values=tensor([0.9924, 0.6578, 0.5329, ..., 0.0031, 0.7725, 0.4194]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2563, 0.2400, 0.1997, ..., 0.9331, 0.1838, 0.9541]) +tensor([0.1428, 0.3987, 0.7169, ..., 0.0273, 0.0070, 0.3921]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +59,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 6.597289562225342 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20602', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.051029920578003} +Time: 10.82520842552185 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499995, - 2499998, 2500000]), - col_indices=tensor([ 84683, 221772, 250792, ..., 457280, 123381, - 490345]), - values=tensor([0.6671, 0.6498, 0.8275, ..., 0.5282, 0.6912, 0.3058]), +tensor(crow_indices=tensor([ 0, 5, 11, ..., 2499992, + 2499997, 2500000]), + col_indices=tensor([ 6844, 10153, 255216, ..., 42121, 91491, + 461513]), + values=tensor([0.9924, 0.6578, 0.5329, ..., 0.0031, 0.7725, 0.4194]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8099, 0.6830, 0.6662, ..., 0.4435, 0.6731, 0.4595]) +tensor([0.1428, 0.3987, 0.7169, ..., 0.0273, 0.0070, 0.3921]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,31 +77,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.051029920578003 seconds +Time: 10.82520842552185 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499995, - 2499998, 2500000]), - col_indices=tensor([ 84683, 221772, 250792, ..., 457280, 123381, - 490345]), - values=tensor([0.6671, 0.6498, 0.8275, ..., 0.5282, 0.6912, 0.3058]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8099, 0.6830, 0.6662, ..., 0.4435, 0.6731, 0.4595]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 10.051029920578003 seconds - -[40.55, 45.24, 39.91, 39.85, 39.84, 39.77, 40.43, 40.43, 42.0, 39.73] -[152.11] -13.22957968711853 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.051029920578003, 'TIME_S_1KI': 0.4878667081146492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2012.3513662075998, 'W': 152.11} -[40.55, 45.24, 39.91, 39.85, 39.84, 39.77, 40.43, 40.43, 42.0, 39.73, 40.67, 40.21, 39.79, 39.83, 39.69, 40.04, 39.86, 39.69, 40.08, 39.81] -727.04 -36.352 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.051029920578003, 'TIME_S_1KI': 0.4878667081146492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2012.3513662075998, 'W': 152.11, 'J_1KI': 97.67747627451702, 'W_1KI': 7.383263760799923, 'W_D': 115.75800000000001, 'J_D': 1531.429685421467, 'W_D_1KI': 5.61877487622561, 'J_D_1KI': 0.272729583352374} +[40.4, 40.36, 39.63, 39.65, 40.33, 39.57, 40.07, 39.61, 39.74, 39.54] +[152.99] +13.668684005737305 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21617, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.82520842552185, 'TIME_S_1KI': 0.5007729298941505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2091.1719660377503, 'W': 152.99} +[40.4, 40.36, 39.63, 39.65, 40.33, 39.57, 40.07, 39.61, 39.74, 39.54, 40.7, 39.74, 40.09, 39.86, 39.97, 40.05, 40.88, 39.9, 39.65, 39.62] +719.23 +35.9615 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21617, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.82520842552185, 'TIME_S_1KI': 0.5007729298941505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2091.1719660377503, 'W': 152.99, 'J_1KI': 96.73738104444419, 'W_1KI': 7.07730027293334, 'W_D': 117.02850000000001, 'J_D': 1599.6255861654283, 'W_D_1KI': 5.413725308784754, 'J_D_1KI': 0.25043832672363203} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json index ac74c89..2eb0948 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2268, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.338330507278442, "TIME_S_1KI": 4.558346784514304, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1929.4526886749268, "W": 124.72, "J_1KI": 850.7286987102852, "W_1KI": 54.99118165784832, "W_D": 88.6155, "J_D": 1370.9061476368904, "W_D_1KI": 39.07208994708994, "J_D_1KI": 17.227552886723963} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2135, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.106867790222168, "TIME_S_1KI": 4.733895920478767, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1866.3490508079528, "W": 123.28, "J_1KI": 874.16817368054, "W_1KI": 57.742388758782205, "W_D": 87.33449999999999, "J_D": 1322.1662976783514, "W_D_1KI": 40.90608899297424, "J_D_1KI": 19.159760652446952} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output index 4f74cbe..5e3d223 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,36 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.46280455589294434} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.491588830947876} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 21, 39, ..., 12499960, - 12499981, 12500000]), - col_indices=tensor([ 5530, 18658, 36900, ..., 388989, 426254, - 497258]), - values=tensor([0.8053, 0.3880, 0.4779, ..., 0.4773, 0.4279, 0.6817]), - size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.5886, 0.4606, 0.7255, ..., 0.1606, 0.2608, 0.5232]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 12500000 -Density: 5e-05 -Time: 0.46280455589294434 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2268', '-ss', '500000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.338330507278442} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 63, ..., 12499957, +tensor(crow_indices=tensor([ 0, 31, 56, ..., 12499950, 12499979, 12500000]), - col_indices=tensor([ 14790, 16334, 55074, ..., 466420, 486794, - 499923]), - values=tensor([0.8543, 0.1686, 0.8292, ..., 0.6567, 0.2357, 0.6950]), + col_indices=tensor([ 20037, 42363, 59004, ..., 453758, 466137, + 477307]), + values=tensor([0.6776, 0.0862, 0.2736, ..., 0.4363, 0.2767, 0.3172]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.8639, 0.3423, 0.4800, ..., 0.1443, 0.7816, 0.0060]) +tensor([0.5582, 0.5777, 0.9845, ..., 0.7624, 0.5547, 0.6204]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.338330507278442 seconds +Time: 0.491588830947876 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2135', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.106867790222168} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 63, ..., 12499957, - 12499979, 12500000]), - col_indices=tensor([ 14790, 16334, 55074, ..., 466420, 486794, - 499923]), - values=tensor([0.8543, 0.1686, 0.8292, ..., 0.6567, 0.2357, 0.6950]), +tensor(crow_indices=tensor([ 0, 24, 49, ..., 12499947, + 12499975, 12500000]), + col_indices=tensor([ 4589, 81496, 110665, ..., 429262, 449020, + 460708]), + values=tensor([0.3929, 0.7633, 0.9504, ..., 0.5778, 0.1433, 0.6727]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.8639, 0.3423, 0.4800, ..., 0.1443, 0.7816, 0.0060]) +tensor([0.3227, 0.6104, 0.4525, ..., 0.1890, 0.8017, 0.8134]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +38,31 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.338330507278442 seconds +Time: 10.106867790222168 seconds -[40.4, 42.56, 40.55, 40.15, 39.72, 40.38, 39.75, 39.66, 39.85, 39.57] -[124.72] -15.470274925231934 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2268, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.338330507278442, 'TIME_S_1KI': 4.558346784514304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1929.4526886749268, 'W': 124.72} -[40.4, 42.56, 40.55, 40.15, 39.72, 40.38, 39.75, 39.66, 39.85, 39.57, 40.6, 40.35, 40.27, 40.13, 39.75, 39.68, 39.73, 39.6, 39.89, 39.57] -722.09 -36.1045 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2268, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.338330507278442, 'TIME_S_1KI': 4.558346784514304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1929.4526886749268, 'W': 124.72, 'J_1KI': 850.7286987102852, 'W_1KI': 54.99118165784832, 'W_D': 88.6155, 'J_D': 1370.9061476368904, 'W_D_1KI': 39.07208994708994, 'J_D_1KI': 17.227552886723963} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 49, ..., 12499947, + 12499975, 12500000]), + col_indices=tensor([ 4589, 81496, 110665, ..., 429262, 449020, + 460708]), + values=tensor([0.3929, 0.7633, 0.9504, ..., 0.5778, 0.1433, 0.6727]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.3227, 0.6104, 0.4525, ..., 0.1890, 0.8017, 0.8134]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.106867790222168 seconds + +[41.05, 39.9, 39.99, 39.71, 39.62, 39.5, 41.67, 39.56, 39.56, 39.91] +[123.28] +15.139106512069702 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.106867790222168, 'TIME_S_1KI': 4.733895920478767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.3490508079528, 'W': 123.28} +[41.05, 39.9, 39.99, 39.71, 39.62, 39.5, 41.67, 39.56, 39.56, 39.91, 40.93, 40.32, 39.68, 39.56, 39.71, 39.68, 39.87, 39.55, 40.03, 40.11] +718.9100000000001 +35.9455 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.106867790222168, 'TIME_S_1KI': 4.733895920478767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.3490508079528, 'W': 123.28, 'J_1KI': 874.16817368054, 'W_1KI': 57.742388758782205, 'W_D': 87.33449999999999, 'J_D': 1322.1662976783514, 'W_D_1KI': 40.90608899297424, 'J_D_1KI': 19.159760652446952} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json index 366bd80..b4235dc 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 89538, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.289806604385376, "TIME_S_1KI": 0.11492111287258344, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1521.4007213592529, "W": 116.96, "J_1KI": 16.991676398392332, "W_1KI": 1.3062610288369183, "W_D": 80.71074999999999, "J_D": 1049.8751134699583, "W_D_1KI": 0.9014133663919228, "J_D_1KI": 0.010067383305322017} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 88363, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.692678213119507, "TIME_S_1KI": 0.1210085467120798, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1541.5747392845155, "W": 115.79, "J_1KI": 17.445930302100603, "W_1KI": 1.3103900954019216, "W_D": 79.897, "J_D": 1063.7118658313752, "W_D_1KI": 0.9041906680397905, "J_D_1KI": 0.010232684132949204} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output index aedb6df..f998b37 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.049301862716674805} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04586005210876465} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 13, ..., 249993, 249996, +tensor(crow_indices=tensor([ 0, 1, 4, ..., 249988, 249993, 250000]), - col_indices=tensor([ 1709, 19790, 28830, ..., 3831, 22257, 48856]), - values=tensor([0.9244, 0.7522, 0.6687, ..., 0.7540, 0.7318, 0.7260]), + col_indices=tensor([48756, 27344, 41514, ..., 26596, 29730, 34292]), + values=tensor([0.9696, 0.8272, 0.9747, ..., 0.0537, 0.5377, 0.5408]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0785, 0.8938, 0.5541, ..., 0.5935, 0.2052, 0.2232]) +tensor([0.4023, 0.3988, 0.8031, ..., 0.0466, 0.9788, 0.6602]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.049301862716674805 seconds +Time: 0.04586005210876465 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21297', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7824935913085938} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '22895', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7205493450164795} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 249989, 249993, +tensor(crow_indices=tensor([ 0, 4, 6, ..., 249991, 249994, 250000]), - col_indices=tensor([16415, 16632, 32449, ..., 45169, 45288, 48610]), - values=tensor([0.0101, 0.6954, 0.6241, ..., 0.3711, 0.7246, 0.3748]), + col_indices=tensor([ 1690, 7661, 14833, ..., 26670, 28443, 40583]), + values=tensor([0.3670, 0.4597, 0.1032, ..., 0.6580, 0.1615, 0.4832]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6515, 0.7514, 0.0204, ..., 0.8861, 0.6124, 0.4798]) +tensor([0.0576, 0.8105, 0.5861, ..., 0.6869, 0.4943, 0.9931]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 2.7824935913085938 seconds +Time: 2.7205493450164795 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '80366', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.42440915107727} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '88363', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.692678213119507} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 249987, 249993, +tensor(crow_indices=tensor([ 0, 6, 12, ..., 249987, 249994, 250000]), - col_indices=tensor([ 2445, 24855, 26173, ..., 23560, 26333, 46130]), - values=tensor([0.2012, 0.2713, 0.8391, ..., 0.5844, 0.7972, 0.4463]), + col_indices=tensor([ 2153, 14633, 16101, ..., 25645, 38673, 48362]), + values=tensor([0.6843, 0.5058, 0.7514, ..., 0.4931, 0.4973, 0.4238]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5580, 0.1767, 0.6905, ..., 0.9860, 0.6709, 0.2165]) +tensor([0.1661, 0.3950, 0.7634, ..., 0.8727, 0.6472, 0.3253]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,19 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 9.42440915107727 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '89538', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.289806604385376} +Time: 10.692678213119507 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249988, 249996, +tensor(crow_indices=tensor([ 0, 6, 12, ..., 249987, 249994, 250000]), - col_indices=tensor([ 2244, 34732, 7243, ..., 9132, 13610, 19520]), - values=tensor([0.6983, 0.0446, 0.9216, ..., 0.0232, 0.0374, 0.6300]), + col_indices=tensor([ 2153, 14633, 16101, ..., 25645, 38673, 48362]), + values=tensor([0.6843, 0.5058, 0.7514, ..., 0.4931, 0.4973, 0.4238]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8539, 0.6321, 0.4259, ..., 0.2899, 0.6274, 0.3350]) +tensor([0.1661, 0.3950, 0.7634, ..., 0.8727, 0.6472, 0.3253]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -76,30 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.289806604385376 seconds +Time: 10.692678213119507 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249988, 249996, - 250000]), - col_indices=tensor([ 2244, 34732, 7243, ..., 9132, 13610, 19520]), - values=tensor([0.6983, 0.0446, 0.9216, ..., 0.0232, 0.0374, 0.6300]), - size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8539, 0.6321, 0.4259, ..., 0.2899, 0.6274, 0.3350]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 250000 -Density: 0.0001 -Time: 10.289806604385376 seconds - -[40.27, 39.54, 39.5, 39.65, 39.93, 39.44, 40.01, 39.51, 45.05, 39.37] -[116.96] -13.007872104644775 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 89538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.289806604385376, 'TIME_S_1KI': 0.11492111287258344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.4007213592529, 'W': 116.96} -[40.27, 39.54, 39.5, 39.65, 39.93, 39.44, 40.01, 39.51, 45.05, 39.37, 40.45, 39.41, 39.86, 39.36, 39.91, 44.54, 39.53, 39.97, 39.84, 39.78] -724.985 -36.24925 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 89538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.289806604385376, 'TIME_S_1KI': 0.11492111287258344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.4007213592529, 'W': 116.96, 'J_1KI': 16.991676398392332, 'W_1KI': 1.3062610288369183, 'W_D': 80.71074999999999, 'J_D': 1049.8751134699583, 'W_D_1KI': 0.9014133663919228, 'J_D_1KI': 0.010067383305322017} +[40.57, 39.5, 40.22, 39.42, 39.56, 39.38, 39.65, 39.51, 39.53, 40.4] +[115.79] +13.313539505004883 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 88363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.692678213119507, 'TIME_S_1KI': 0.1210085467120798, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1541.5747392845155, 'W': 115.79} +[40.57, 39.5, 40.22, 39.42, 39.56, 39.38, 39.65, 39.51, 39.53, 40.4, 41.29, 39.45, 40.25, 39.78, 42.21, 39.36, 40.3, 39.38, 39.52, 39.42] +717.86 +35.893 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 88363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.692678213119507, 'TIME_S_1KI': 0.1210085467120798, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1541.5747392845155, 'W': 115.79, 'J_1KI': 17.445930302100603, 'W_1KI': 1.3103900954019216, 'W_D': 79.897, 'J_D': 1063.7118658313752, 'W_D_1KI': 0.9041906680397905, 'J_D_1KI': 0.010232684132949204} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json index 16ebf8c..d36f4c6 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 45908, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.364075660705566, "TIME_S_1KI": 0.22575750763931268, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1982.6904290771486, "W": 146.58, "J_1KI": 43.18834253457238, "W_1KI": 3.192907554238913, "W_D": 110.64150000000001, "J_D": 1496.5741786651613, "W_D_1KI": 2.410070140280561, "J_D_1KI": 0.05249782478610615} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 45817, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300234079360962, "TIME_S_1KI": 0.22481249491151672, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1949.6090237426756, "W": 147.88, "J_1KI": 42.55208817126123, "W_1KI": 3.2276229347185543, "W_D": 111.8985, "J_D": 1475.238878437042, "W_D_1KI": 2.442292162297837, "J_D_1KI": 0.05330537054582005} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output index f9b44d5..3af936c 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0605926513671875} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.05995535850524902} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 42, 99, ..., 2499902, - 2499955, 2500000]), - col_indices=tensor([ 1009, 1628, 5292, ..., 43455, 47256, 47946]), - values=tensor([0.2339, 0.7843, 0.8407, ..., 0.0388, 0.2390, 0.6904]), +tensor(crow_indices=tensor([ 0, 46, 101, ..., 2499911, + 2499946, 2500000]), + col_indices=tensor([ 837, 3436, 3784, ..., 49193, 49198, 49836]), + values=tensor([0.9403, 0.3671, 0.7748, ..., 0.9889, 0.1996, 0.6173]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3494, 0.3893, 0.8826, ..., 0.0693, 0.0070, 0.7582]) +tensor([0.0910, 0.9171, 0.2074, ..., 0.6372, 0.3068, 0.1250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,39 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.0605926513671875 seconds +Time: 0.05995535850524902 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17328', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.963207721710205} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17513', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 4.013477563858032} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 85, ..., 2499905, +tensor(crow_indices=tensor([ 0, 46, 92, ..., 2499887, + 2499939, 2500000]), + col_indices=tensor([ 304, 816, 1770, ..., 46946, 47011, 48726]), + values=tensor([0.3138, 0.4297, 0.4205, ..., 0.5723, 0.1353, 0.1550]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3742, 0.5301, 0.3458, ..., 0.7724, 0.2652, 0.5224]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 4.013477563858032 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45817', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300234079360962} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 50, 95, ..., 2499900, 2499952, 2500000]), - col_indices=tensor([ 2138, 2192, 2629, ..., 48532, 49646, 49876]), - values=tensor([0.7824, 0.0061, 0.7967, ..., 0.1635, 0.4732, 0.5157]), + col_indices=tensor([ 291, 577, 802, ..., 47318, 48670, 49733]), + values=tensor([0.1845, 0.8430, 0.5042, ..., 0.4194, 0.3359, 0.4011]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8165, 0.7580, 0.0903, ..., 0.6290, 0.7559, 0.6116]) +tensor([0.6378, 0.1370, 0.7965, ..., 0.0471, 0.0852, 0.0122]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 3.963207721710205 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45908', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.364075660705566} +Time: 10.300234079360962 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 99, ..., 2499901, - 2499955, 2500000]), - col_indices=tensor([ 2242, 2630, 4307, ..., 47333, 48170, 49131]), - values=tensor([0.3970, 0.2919, 0.1690, ..., 0.5693, 0.6652, 0.4283]), +tensor(crow_indices=tensor([ 0, 50, 95, ..., 2499900, + 2499952, 2500000]), + col_indices=tensor([ 291, 577, 802, ..., 47318, 48670, 49733]), + values=tensor([0.1845, 0.8430, 0.5042, ..., 0.4194, 0.3359, 0.4011]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7545, 0.7866, 0.4331, ..., 0.1722, 0.5406, 0.9467]) +tensor([0.6378, 0.1370, 0.7965, ..., 0.0471, 0.0852, 0.0122]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,30 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.364075660705566 seconds +Time: 10.300234079360962 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 99, ..., 2499901, - 2499955, 2500000]), - col_indices=tensor([ 2242, 2630, 4307, ..., 47333, 48170, 49131]), - values=tensor([0.3970, 0.2919, 0.1690, ..., 0.5693, 0.6652, 0.4283]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7545, 0.7866, 0.4331, ..., 0.1722, 0.5406, 0.9467]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 10.364075660705566 seconds - -[40.59, 39.91, 39.52, 39.54, 39.7, 39.41, 39.47, 40.95, 40.05, 39.39] -[146.58] -13.526336669921875 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45908, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.364075660705566, 'TIME_S_1KI': 0.22575750763931268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1982.6904290771486, 'W': 146.58} -[40.59, 39.91, 39.52, 39.54, 39.7, 39.41, 39.47, 40.95, 40.05, 39.39, 40.78, 39.79, 39.69, 41.93, 40.24, 39.57, 39.58, 39.7, 39.52, 39.64] -718.77 -35.9385 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45908, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.364075660705566, 'TIME_S_1KI': 0.22575750763931268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1982.6904290771486, 'W': 146.58, 'J_1KI': 43.18834253457238, 'W_1KI': 3.192907554238913, 'W_D': 110.64150000000001, 'J_D': 1496.5741786651613, 'W_D_1KI': 2.410070140280561, 'J_D_1KI': 0.05249782478610615} +[46.28, 39.85, 39.61, 39.84, 39.94, 40.08, 39.93, 39.98, 39.68, 39.43] +[147.88] +13.183723449707031 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45817, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300234079360962, 'TIME_S_1KI': 0.22481249491151672, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1949.6090237426756, 'W': 147.88} +[46.28, 39.85, 39.61, 39.84, 39.94, 40.08, 39.93, 39.98, 39.68, 39.43, 40.17, 39.51, 40.01, 40.53, 39.54, 39.43, 39.57, 39.81, 39.43, 39.9] +719.63 +35.9815 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45817, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300234079360962, 'TIME_S_1KI': 0.22481249491151672, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1949.6090237426756, 'W': 147.88, 'J_1KI': 42.55208817126123, 'W_1KI': 3.2276229347185543, 'W_D': 111.8985, 'J_D': 1475.238878437042, 'W_D_1KI': 2.442292162297837, 'J_D_1KI': 0.05330537054582005} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json index 20ca6ca..a7370ad 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1726, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.692668676376343, "TIME_S_1KI": 6.1950571705540805, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2256.2120828294755, "W": 117.97000000000001, "J_1KI": 1307.1912415002755, "W_1KI": 68.34878331402086, "W_D": 81.57950000000002, "J_D": 1560.2327168872362, "W_D_1KI": 47.26506373117035, "J_D_1KI": 27.384162069044237} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1804, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.344184398651123, "TIME_S_1KI": 6.28835055357601, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2300.324343466759, "W": 118.35000000000001, "J_1KI": 1275.1243589061855, "W_1KI": 65.60421286031043, "W_D": 82.3405, "J_D": 1600.421264074564, "W_D_1KI": 45.643292682926834, "J_D_1KI": 25.30116002379536} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output index 939a7fb..1c44d60 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6629180908203125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6471617221832275} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 514, 996, ..., 24998956, - 24999472, 25000000]), - col_indices=tensor([ 1, 94, 348, ..., 49850, 49922, 49959]), - values=tensor([0.7408, 0.6252, 0.3689, ..., 0.4667, 0.9642, 0.0582]), +tensor(crow_indices=tensor([ 0, 501, 980, ..., 24998975, + 24999493, 25000000]), + col_indices=tensor([ 65, 80, 124, ..., 49855, 49857, 49996]), + values=tensor([0.2347, 0.8281, 0.9357, ..., 0.1898, 0.4548, 0.5109]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9247, 0.2733, 0.8266, ..., 0.5422, 0.1520, 0.6812]) +tensor([0.1705, 0.9477, 0.0426, ..., 0.3530, 0.9921, 0.0403]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 0.6629180908203125 seconds +Time: 0.6471617221832275 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1583', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.627561807632446} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1622', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.437881469726562} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 562, 1043, ..., 24999032, - 24999504, 25000000]), - col_indices=tensor([ 4, 50, 78, ..., 49916, 49920, 49965]), - values=tensor([0.0759, 0.2514, 0.9400, ..., 0.2240, 0.9432, 0.3438]), +tensor(crow_indices=tensor([ 0, 500, 1005, ..., 24999021, + 24999495, 25000000]), + col_indices=tensor([ 20, 485, 487, ..., 49816, 49880, 49908]), + values=tensor([0.8491, 0.8120, 0.9833, ..., 0.7969, 0.8199, 0.5818]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3732, 0.6785, 0.5695, ..., 0.6003, 0.8169, 0.4003]) +tensor([0.4544, 0.6573, 0.4768, ..., 0.0080, 0.3189, 0.0636]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 9.627561807632446 seconds +Time: 9.437881469726562 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1726', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.692668676376343} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1804', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.344184398651123} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 457, 970, ..., 24999002, - 24999486, 25000000]), - col_indices=tensor([ 100, 360, 480, ..., 49859, 49889, 49953]), - values=tensor([0.3856, 0.6378, 0.2660, ..., 0.6784, 0.6537, 0.7029]), +tensor(crow_indices=tensor([ 0, 496, 963, ..., 24998993, + 24999488, 25000000]), + col_indices=tensor([ 360, 922, 929, ..., 49746, 49786, 49807]), + values=tensor([0.4579, 0.0160, 0.6008, ..., 0.8509, 0.1795, 0.5359]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.4800, 0.0280, 0.3242, ..., 0.3544, 0.6298, 0.7207]) +tensor([0.1076, 0.4012, 0.5788, ..., 0.7338, 0.7512, 0.3105]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.692668676376343 seconds +Time: 11.344184398651123 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 457, 970, ..., 24999002, - 24999486, 25000000]), - col_indices=tensor([ 100, 360, 480, ..., 49859, 49889, 49953]), - values=tensor([0.3856, 0.6378, 0.2660, ..., 0.6784, 0.6537, 0.7029]), +tensor(crow_indices=tensor([ 0, 496, 963, ..., 24998993, + 24999488, 25000000]), + col_indices=tensor([ 360, 922, 929, ..., 49746, 49786, 49807]), + values=tensor([0.4579, 0.0160, 0.6008, ..., 0.8509, 0.1795, 0.5359]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.4800, 0.0280, 0.3242, ..., 0.3544, 0.6298, 0.7207]) +tensor([0.1076, 0.4012, 0.5788, ..., 0.7338, 0.7512, 0.3105]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.692668676376343 seconds +Time: 11.344184398651123 seconds -[41.47, 39.7, 39.74, 40.1, 40.28, 40.05, 40.37, 39.58, 39.73, 40.01] -[117.97] -19.125303745269775 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.692668676376343, 'TIME_S_1KI': 6.1950571705540805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2256.2120828294755, 'W': 117.97000000000001} -[41.47, 39.7, 39.74, 40.1, 40.28, 40.05, 40.37, 39.58, 39.73, 40.01, 41.18, 40.13, 45.73, 39.77, 39.88, 40.17, 39.76, 40.02, 41.27, 40.4] -727.81 -36.390499999999996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.692668676376343, 'TIME_S_1KI': 6.1950571705540805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2256.2120828294755, 'W': 117.97000000000001, 'J_1KI': 1307.1912415002755, 'W_1KI': 68.34878331402086, 'W_D': 81.57950000000002, 'J_D': 1560.2327168872362, 'W_D_1KI': 47.26506373117035, 'J_D_1KI': 27.384162069044237} +[40.38, 40.24, 40.1, 39.7, 39.9, 41.15, 39.98, 39.73, 39.7, 39.9] +[118.35] +19.436623096466064 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.344184398651123, 'TIME_S_1KI': 6.28835055357601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.324343466759, 'W': 118.35000000000001} +[40.38, 40.24, 40.1, 39.7, 39.9, 41.15, 39.98, 39.73, 39.7, 39.9, 40.49, 39.72, 40.3, 40.03, 39.74, 39.88, 39.79, 39.71, 40.16, 39.95] +720.19 +36.0095 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.344184398651123, 'TIME_S_1KI': 6.28835055357601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2300.324343466759, 'W': 118.35000000000001, 'J_1KI': 1275.1243589061855, 'W_1KI': 65.60421286031043, 'W_D': 82.3405, 'J_D': 1600.421264074564, 'W_D_1KI': 45.643292682926834, 'J_D_1KI': 25.30116002379536} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..dcc7159 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 355, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.030322074890137, "TIME_S_1KI": 28.254428379972214, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3807.8137480163573, "W": 95.36, "J_1KI": 10726.235909905232, "W_1KI": 268.61971830985914, "W_D": 58.324250000000006, "J_D": 2328.941705041349, "W_D_1KI": 164.293661971831, "J_D_1KI": 462.79904780797466} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..2499979 --- /dev/null +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.9571590423583984} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2454, 4967, ..., + 124995021, 124997489, 125000000]), + col_indices=tensor([ 26, 74, 80, ..., 49981, 49991, 49992]), + values=tensor([0.1549, 0.4800, 0.7967, ..., 0.3045, 0.9216, 0.3542]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.9869, 0.0702, 0.3082, ..., 0.7358, 0.2978, 0.9233]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 2.9571590423583984 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '355', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.030322074890137} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2458, 4991, ..., + 124995052, 124997552, 125000000]), + col_indices=tensor([ 4, 7, 30, ..., 49943, 49948, 49958]), + values=tensor([0.9374, 0.9751, 0.0126, ..., 0.3756, 0.9643, 0.1575]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.6622, 0.7788, 0.7272, ..., 0.9942, 0.4374, 0.8252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.030322074890137 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2458, 4991, ..., + 124995052, 124997552, 125000000]), + col_indices=tensor([ 4, 7, 30, ..., 49943, 49948, 49958]), + values=tensor([0.9374, 0.9751, 0.0126, ..., 0.3756, 0.9643, 0.1575]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.6622, 0.7788, 0.7272, ..., 0.9942, 0.4374, 0.8252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.030322074890137 seconds + +[56.69, 40.33, 39.94, 40.83, 39.8, 39.71, 45.34, 39.69, 39.84, 40.29] +[95.36] +39.93093276023865 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 355, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.030322074890137, 'TIME_S_1KI': 28.254428379972214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3807.8137480163573, 'W': 95.36} +[56.69, 40.33, 39.94, 40.83, 39.8, 39.71, 45.34, 39.69, 39.84, 40.29, 40.99, 39.72, 40.01, 41.04, 39.84, 39.83, 45.74, 40.49, 39.69, 39.78] +740.7149999999999 +37.03574999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 355, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.030322074890137, 'TIME_S_1KI': 28.254428379972214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3807.8137480163573, 'W': 95.36, 'J_1KI': 10726.235909905232, 'W_1KI': 268.61971830985914, 'W_D': 58.324250000000006, 'J_D': 2328.941705041349, 'W_D_1KI': 164.293661971831, 'J_D_1KI': 462.79904780797466} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json index 53cb9a4..43cc709 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 125418, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.786596775054932, "TIME_S_1KI": 0.0860051729022543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1353.9629184293747, "W": 103.38, "J_1KI": 10.795602851499583, "W_1KI": 0.8242835956561259, "W_D": 67.61375, "J_D": 885.5340518084168, "W_D_1KI": 0.5391072254381349, "J_D_1KI": 0.004298483674098893} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 126100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.468435764312744, "TIME_S_1KI": 0.09094715118408203, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1365.049603307247, "W": 103.35, "J_1KI": 10.825135632888557, "W_1KI": 0.8195876288659794, "W_D": 67.52475, "J_D": 891.8687295686602, "W_D_1KI": 0.5354857256145915, "J_D_1KI": 0.0042465164600681326} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output index b030af9..388fac8 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05780529975891113} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04126620292663574} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([26335, 27290, 38418, ..., 19756, 20120, 4010]), - values=tensor([0.3389, 0.0656, 0.4529, ..., 0.8287, 0.8944, 0.8355]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([24401, 39088, 19902, ..., 8711, 5895, 39358]), + values=tensor([0.9277, 0.4854, 0.8221, ..., 0.5577, 0.9470, 0.1496]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6330, 0.8862, 0.5805, ..., 0.8180, 0.2124, 0.8337]) +tensor([0.1757, 0.4572, 0.4112, ..., 0.1642, 0.8355, 0.9416]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.05780529975891113 seconds +Time: 0.04126620292663574 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18164', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5206849575042725} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25444', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.118638038635254} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([47070, 16594, 28343, ..., 43440, 28747, 28655]), - values=tensor([0.5955, 0.4100, 0.8378, ..., 0.8449, 0.3361, 0.6219]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), + col_indices=tensor([33127, 16683, 48525, ..., 8852, 11318, 36089]), + values=tensor([0.1304, 0.0600, 0.7524, ..., 0.8598, 0.7086, 0.6482]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0973, 0.1697, 0.0749, ..., 0.0145, 0.6554, 0.2719]) +tensor([0.5990, 0.7765, 0.7637, ..., 0.8510, 0.0540, 0.9906]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 1.5206849575042725 seconds +Time: 2.118638038635254 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '125418', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.786596775054932} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '126100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.468435764312744} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([27120, 12941, 15664, ..., 3161, 41560, 29450]), - values=tensor([0.4509, 0.2974, 0.8733, ..., 0.8770, 0.0483, 0.7990]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([ 2972, 25909, 36142, ..., 6702, 9594, 36895]), + values=tensor([0.1019, 0.7173, 0.0602, ..., 0.9921, 0.7905, 0.1013]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0817, 0.8974, 0.0414, ..., 0.9825, 0.3309, 0.2047]) +tensor([0.0322, 0.1505, 0.0180, ..., 0.8141, 0.7889, 0.6939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.786596775054932 seconds +Time: 11.468435764312744 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([27120, 12941, 15664, ..., 3161, 41560, 29450]), - values=tensor([0.4509, 0.2974, 0.8733, ..., 0.8770, 0.0483, 0.7990]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([ 2972, 25909, 36142, ..., 6702, 9594, 36895]), + values=tensor([0.1019, 0.7173, 0.0602, ..., 0.9921, 0.7905, 0.1013]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0817, 0.8974, 0.0414, ..., 0.9825, 0.3309, 0.2047]) +tensor([0.0322, 0.1505, 0.0180, ..., 0.8141, 0.7889, 0.6939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.786596775054932 seconds +Time: 11.468435764312744 seconds -[40.88, 40.06, 39.87, 40.02, 39.61, 39.65, 39.87, 39.87, 39.51, 39.91] -[103.38] -13.096952199935913 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 125418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.786596775054932, 'TIME_S_1KI': 0.0860051729022543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1353.9629184293747, 'W': 103.38} -[40.88, 40.06, 39.87, 40.02, 39.61, 39.65, 39.87, 39.87, 39.51, 39.91, 40.56, 39.51, 39.82, 39.66, 39.34, 39.38, 39.55, 39.32, 39.66, 39.9] -715.325 -35.76625 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 125418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.786596775054932, 'TIME_S_1KI': 0.0860051729022543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1353.9629184293747, 'W': 103.38, 'J_1KI': 10.795602851499583, 'W_1KI': 0.8242835956561259, 'W_D': 67.61375, 'J_D': 885.5340518084168, 'W_D_1KI': 0.5391072254381349, 'J_D_1KI': 0.004298483674098893} +[42.06, 39.46, 39.57, 39.5, 39.54, 39.4, 39.45, 39.76, 39.48, 39.85] +[103.35] +13.208027124404907 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.468435764312744, 'TIME_S_1KI': 0.09094715118408203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.049603307247, 'W': 103.35} +[42.06, 39.46, 39.57, 39.5, 39.54, 39.4, 39.45, 39.76, 39.48, 39.85, 41.35, 39.62, 39.91, 39.46, 40.32, 39.44, 39.48, 39.77, 41.03, 39.37] +716.5049999999999 +35.82525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.468435764312744, 'TIME_S_1KI': 0.09094715118408203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.049603307247, 'W': 103.35, 'J_1KI': 10.825135632888557, 'W_1KI': 0.8195876288659794, 'W_D': 67.52475, 'J_D': 891.8687295686602, 'W_D_1KI': 0.5354857256145915, 'J_D_1KI': 0.0042465164600681326} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json index d831091..53264f0 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 106823, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.303539752960205, "TIME_S_1KI": 0.09645431932224527, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1429.2441740632057, "W": 111.25, "J_1KI": 13.379554721953191, "W_1KI": 1.0414423860030144, "W_D": 75.065, "J_D": 964.37046225667, "W_D_1KI": 0.7027044737556518, "J_D_1KI": 0.006578213247668122} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 107353, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.681612730026245, "TIME_S_1KI": 0.09949989967701178, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1431.663604259491, "W": 111.45, "J_1KI": 13.336037225410477, "W_1KI": 1.0381638147047592, "W_D": 75.3235, "J_D": 967.5900717401504, "W_D_1KI": 0.7016431771818207, "J_D_1KI": 0.006535850671912482} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output index a27a2d5..b6bc6a5 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.05483698844909668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04086589813232422} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 124996, 124998, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 124997, 125000, 125000]), - col_indices=tensor([28119, 29640, 21715, ..., 29199, 13516, 45728]), - values=tensor([0.3782, 0.4368, 0.3959, ..., 0.8630, 0.5532, 0.4165]), + col_indices=tensor([41751, 43976, 3497, ..., 3909, 25887, 38459]), + values=tensor([0.4106, 0.2816, 0.3184, ..., 0.9617, 0.2316, 0.2041]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.8607, 0.2103, 0.4385, ..., 0.0263, 0.3906, 0.3161]) +tensor([0.5700, 0.9216, 0.5233, ..., 0.2364, 0.5823, 0.3818]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.05483698844909668 seconds +Time: 0.04086589813232422 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19147', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.8820090293884277} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25693', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.5129716396331787} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 6, ..., 124991, 124992, +tensor(crow_indices=tensor([ 0, 1, 4, ..., 124992, 124998, 125000]), - col_indices=tensor([32530, 36762, 311, ..., 24158, 32618, 44758]), - values=tensor([0.9615, 0.3318, 0.5732, ..., 0.8773, 0.1422, 0.4683]), + col_indices=tensor([46343, 20233, 31374, ..., 45113, 7654, 17213]), + values=tensor([0.9969, 0.7071, 0.9644, ..., 0.3342, 0.2384, 0.1403]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.1372, 0.3779, 0.3457, ..., 0.7036, 0.6193, 0.2501]) +tensor([0.7937, 0.4795, 0.0028, ..., 0.1875, 0.3993, 0.1011]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 1.8820090293884277 seconds +Time: 2.5129716396331787 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '106823', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.303539752960205} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '107353', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.681612730026245} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 124996, 124998, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124996, 124998, 125000]), - col_indices=tensor([16502, 37527, 11294, ..., 28497, 8084, 35661]), - values=tensor([0.2823, 0.8232, 0.0849, ..., 0.6885, 0.2665, 0.0851]), + col_indices=tensor([18651, 25437, 48526, ..., 23704, 10356, 37225]), + values=tensor([0.9489, 0.4951, 0.8868, ..., 0.5017, 0.1543, 0.5981]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.8734, 0.5898, 0.3749, ..., 0.2817, 0.4056, 0.3872]) +tensor([0.0752, 0.5705, 0.6730, ..., 0.8462, 0.4093, 0.2438]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.303539752960205 seconds +Time: 10.681612730026245 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 124996, 124998, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124996, 124998, 125000]), - col_indices=tensor([16502, 37527, 11294, ..., 28497, 8084, 35661]), - values=tensor([0.2823, 0.8232, 0.0849, ..., 0.6885, 0.2665, 0.0851]), + col_indices=tensor([18651, 25437, 48526, ..., 23704, 10356, 37225]), + values=tensor([0.9489, 0.4951, 0.8868, ..., 0.5017, 0.1543, 0.5981]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.8734, 0.5898, 0.3749, ..., 0.2817, 0.4056, 0.3872]) +tensor([0.0752, 0.5705, 0.6730, ..., 0.8462, 0.4093, 0.2438]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.303539752960205 seconds +Time: 10.681612730026245 seconds -[40.0, 39.65, 39.5, 39.28, 44.73, 39.57, 39.57, 39.75, 39.32, 39.43] -[111.25] -12.84713864326477 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.303539752960205, 'TIME_S_1KI': 0.09645431932224527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1429.2441740632057, 'W': 111.25} -[40.0, 39.65, 39.5, 39.28, 44.73, 39.57, 39.57, 39.75, 39.32, 39.43, 41.22, 44.93, 39.95, 39.46, 39.81, 39.42, 39.55, 39.68, 39.52, 39.37] -723.7 -36.185 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.303539752960205, 'TIME_S_1KI': 0.09645431932224527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1429.2441740632057, 'W': 111.25, 'J_1KI': 13.379554721953191, 'W_1KI': 1.0414423860030144, 'W_D': 75.065, 'J_D': 964.37046225667, 'W_D_1KI': 0.7027044737556518, 'J_D_1KI': 0.006578213247668122} +[40.06, 44.71, 39.63, 39.86, 39.97, 39.65, 40.56, 41.5, 40.08, 39.24] +[111.45] +12.845792770385742 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 107353, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.681612730026245, 'TIME_S_1KI': 0.09949989967701178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1431.663604259491, 'W': 111.45} +[40.06, 44.71, 39.63, 39.86, 39.97, 39.65, 40.56, 41.5, 40.08, 39.24, 40.3, 39.38, 39.45, 39.74, 39.72, 39.41, 40.03, 39.65, 39.73, 39.32] +722.53 +36.1265 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 107353, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.681612730026245, 'TIME_S_1KI': 0.09949989967701178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1431.663604259491, 'W': 111.45, 'J_1KI': 13.336037225410477, 'W_1KI': 1.0381638147047592, 'W_D': 75.3235, 'J_D': 967.5900717401504, 'W_D_1KI': 0.7016431771818207, 'J_D_1KI': 0.006535850671912482} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json index 59bfbf5..2af5597 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 423606, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.169308185577393, "TIME_S_1KI": 0.024006525369275677, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1199.050221145153, "W": 95.75, "J_1KI": 2.8305789369016328, "W_1KI": 0.2260355141334164, "W_D": 60.24975, "J_D": 754.4906116077303, "W_D_1KI": 0.14223063412699538, "J_D_1KI": 0.0003357616136858198} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 439130, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.833286046981812, "TIME_S_1KI": 0.024669883740536542, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1236.1940939807891, "W": 96.14, "J_1KI": 2.8150982487664, "W_1KI": 0.2189328900325644, "W_D": 60.80775, "J_D": 781.8824778267145, "W_D_1KI": 0.138473231161615, "J_D_1KI": 0.0003153353930763441} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output index ae86751..24549b0 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018892288208007812} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03937959671020508} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 4, ..., 2500, 2500, 2500]), - col_indices=tensor([1108, 1116, 4456, ..., 2396, 548, 1385]), - values=tensor([0.8638, 0.8794, 0.8595, ..., 0.2787, 0.2270, 0.2436]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([4278, 4518, 4983, ..., 2661, 3298, 613]), + values=tensor([0.7810, 0.3230, 0.0701, ..., 0.3279, 0.8382, 0.1710]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.1028, 0.3454, 0.0668, ..., 0.0203, 0.1099, 0.2752]) +tensor([0.6892, 0.5908, 0.0931, ..., 0.5677, 0.7492, 0.3206]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.018892288208007812 seconds +Time: 0.03937959671020508 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '55578', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.3776216506958008} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '26663', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6375362873077393} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), - col_indices=tensor([1067, 4726, 2617, ..., 4515, 4937, 207]), - values=tensor([0.7749, 0.8447, 0.6931, ..., 0.5698, 0.5658, 0.7624]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 2499, 2500, 2500]), + col_indices=tensor([4464, 78, 2804, ..., 4372, 12, 2382]), + values=tensor([0.2558, 0.8624, 0.3310, ..., 0.1585, 0.2890, 0.7997]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8259, 0.6183, 0.2744, ..., 0.6644, 0.1716, 0.4385]) +tensor([0.1645, 0.8666, 0.7353, ..., 0.9720, 0.2835, 0.6947]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 1.3776216506958008 seconds +Time: 0.6375362873077393 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '423606', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.169308185577393} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '439130', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.833286046981812} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), - col_indices=tensor([ 723, 3357, 4021, ..., 1038, 2195, 2669]), - values=tensor([0.5380, 0.6250, 0.5522, ..., 0.2239, 0.7354, 0.7870]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([4485, 3797, 96, ..., 993, 4029, 4486]), + values=tensor([0.6070, 0.3408, 0.8766, ..., 0.8191, 0.5508, 0.4514]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8647, 0.0404, 0.2214, ..., 0.2716, 0.6887, 0.8481]) +tensor([0.5455, 0.6563, 0.3370, ..., 0.1536, 0.0207, 0.1893]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.169308185577393 seconds +Time: 10.833286046981812 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), - col_indices=tensor([ 723, 3357, 4021, ..., 1038, 2195, 2669]), - values=tensor([0.5380, 0.6250, 0.5522, ..., 0.2239, 0.7354, 0.7870]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([4485, 3797, 96, ..., 993, 4029, 4486]), + values=tensor([0.6070, 0.3408, 0.8766, ..., 0.8191, 0.5508, 0.4514]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8647, 0.0404, 0.2214, ..., 0.2716, 0.6887, 0.8481]) +tensor([0.5455, 0.6563, 0.3370, ..., 0.1536, 0.0207, 0.1893]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.169308185577393 seconds +Time: 10.833286046981812 seconds -[40.09, 39.04, 39.2, 39.28, 39.5, 39.12, 39.02, 39.36, 39.0, 39.1] -[95.75] -12.522717714309692 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 423606, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.169308185577393, 'TIME_S_1KI': 0.024006525369275677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1199.050221145153, 'W': 95.75} -[40.09, 39.04, 39.2, 39.28, 39.5, 39.12, 39.02, 39.36, 39.0, 39.1, 39.76, 39.19, 39.37, 38.99, 39.74, 39.45, 39.52, 39.05, 39.58, 44.24] -710.005 -35.50025 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 423606, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.169308185577393, 'TIME_S_1KI': 0.024006525369275677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1199.050221145153, 'W': 95.75, 'J_1KI': 2.8305789369016328, 'W_1KI': 0.2260355141334164, 'W_D': 60.24975, 'J_D': 754.4906116077303, 'W_D_1KI': 0.14223063412699538, 'J_D_1KI': 0.0003357616136858198} +[40.81, 39.45, 39.13, 38.94, 38.98, 39.39, 39.0, 39.64, 39.01, 40.46] +[96.14] +12.858270168304443 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 439130, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.833286046981812, 'TIME_S_1KI': 0.024669883740536542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1236.1940939807891, 'W': 96.14} +[40.81, 39.45, 39.13, 38.94, 38.98, 39.39, 39.0, 39.64, 39.01, 40.46, 39.76, 39.08, 39.11, 38.91, 39.52, 38.95, 39.43, 39.01, 39.09, 38.98] +706.645 +35.33225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 439130, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.833286046981812, 'TIME_S_1KI': 0.024669883740536542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1236.1940939807891, 'W': 96.14, 'J_1KI': 2.8150982487664, 'W_1KI': 0.2189328900325644, 'W_D': 60.80775, 'J_D': 781.8824778267145, 'W_D_1KI': 0.138473231161615, 'J_D_1KI': 0.0003153353930763441} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json index 1eeb5de..bfd0516 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 247437, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.32590627670288, "TIME_S_1KI": 0.04173145599365851, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1233.848487944603, "W": 98.03, "J_1KI": 4.986515710846005, "W_1KI": 0.39618165432008956, "W_D": 62.5795, "J_D": 787.6529781835079, "W_D_1KI": 0.2529108419516887, "J_D_1KI": 0.001022122164234487} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 244760, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.325824975967407, "TIME_S_1KI": 0.042187550972247946, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1282.2631218481063, "W": 98.38, "J_1KI": 5.238858971433674, "W_1KI": 0.40194476221604836, "W_D": 62.769499999999994, "J_D": 818.1237551010846, "W_D_1KI": 0.2564532603366563, "J_D_1KI": 0.0010477743926158533} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output index f512a45..edb881e 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.016702651977539062} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.01943659782409668} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 24993, 24996, 25000]), - col_indices=tensor([ 203, 3164, 3874, ..., 1660, 2575, 4898]), - values=tensor([0.2509, 0.0733, 0.7857, ..., 0.9782, 0.1584, 0.7182]), +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24986, 24993, 25000]), + col_indices=tensor([ 944, 1102, 1187, ..., 2452, 3392, 3609]), + values=tensor([0.7522, 0.3420, 0.2595, ..., 0.1979, 0.1722, 0.7557]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6754, 0.2035, 0.5445, ..., 0.8964, 0.5875, 0.7630]) +tensor([0.0567, 0.9477, 0.6395, ..., 0.6412, 0.4925, 0.4268]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.016702651977539062 seconds +Time: 0.01943659782409668 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '62864', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.6676299571990967} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '54021', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.317448377609253} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 24992, 24998, 25000]), - col_indices=tensor([1528, 1565, 2407, ..., 4843, 196, 1526]), - values=tensor([0.9615, 0.4377, 0.6921, ..., 0.0433, 0.3280, 0.0962]), +tensor(crow_indices=tensor([ 0, 4, 6, ..., 24991, 24996, 25000]), + col_indices=tensor([1724, 1740, 2512, ..., 2523, 3784, 4136]), + values=tensor([0.7377, 0.7615, 0.8356, ..., 0.4074, 0.7017, 0.6251]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1897, 0.1898, 0.6419, ..., 0.9248, 0.4513, 0.5147]) +tensor([0.1495, 0.6392, 0.8753, ..., 0.6190, 0.2728, 0.3068]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 2.6676299571990967 seconds +Time: 2.317448377609253 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '247437', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.32590627670288} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '244760', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.325824975967407} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 13, ..., 24992, 24996, 25000]), - col_indices=tensor([ 429, 548, 735, ..., 2923, 3331, 3611]), - values=tensor([0.1470, 0.7094, 0.7244, ..., 0.3013, 0.3840, 0.1701]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 24992, 24995, 25000]), + col_indices=tensor([1055, 3537, 4773, ..., 2200, 4406, 4831]), + values=tensor([0.3935, 0.5459, 0.0701, ..., 0.5783, 0.3040, 0.1605]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0349, 0.9280, 0.8549, ..., 0.2131, 0.1223, 0.0130]) +tensor([0.3300, 0.9008, 0.1999, ..., 0.9229, 0.2876, 0.9678]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.32590627670288 seconds +Time: 10.325824975967407 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 13, ..., 24992, 24996, 25000]), - col_indices=tensor([ 429, 548, 735, ..., 2923, 3331, 3611]), - values=tensor([0.1470, 0.7094, 0.7244, ..., 0.3013, 0.3840, 0.1701]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 24992, 24995, 25000]), + col_indices=tensor([1055, 3537, 4773, ..., 2200, 4406, 4831]), + values=tensor([0.3935, 0.5459, 0.0701, ..., 0.5783, 0.3040, 0.1605]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0349, 0.9280, 0.8549, ..., 0.2131, 0.1223, 0.0130]) +tensor([0.3300, 0.9008, 0.1999, ..., 0.9229, 0.2876, 0.9678]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.32590627670288 seconds +Time: 10.325824975967407 seconds -[40.5, 39.52, 39.15, 39.13, 39.57, 40.5, 39.17, 39.06, 39.52, 39.1] -[98.03] -12.586437702178955 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 247437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.32590627670288, 'TIME_S_1KI': 0.04173145599365851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1233.848487944603, 'W': 98.03} -[40.5, 39.52, 39.15, 39.13, 39.57, 40.5, 39.17, 39.06, 39.52, 39.1, 40.17, 39.6, 39.6, 39.15, 39.15, 39.05, 39.05, 39.08, 39.25, 39.15] -709.01 -35.4505 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 247437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.32590627670288, 'TIME_S_1KI': 0.04173145599365851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1233.848487944603, 'W': 98.03, 'J_1KI': 4.986515710846005, 'W_1KI': 0.39618165432008956, 'W_D': 62.5795, 'J_D': 787.6529781835079, 'W_D_1KI': 0.2529108419516887, 'J_D_1KI': 0.001022122164234487} +[39.76, 44.57, 39.04, 39.05, 39.19, 38.89, 39.14, 39.36, 39.43, 39.34] +[98.38] +13.033778429031372 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 244760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.325824975967407, 'TIME_S_1KI': 0.042187550972247946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.2631218481063, 'W': 98.38} +[39.76, 44.57, 39.04, 39.05, 39.19, 38.89, 39.14, 39.36, 39.43, 39.34, 40.19, 39.51, 39.02, 39.31, 39.14, 40.3, 39.01, 39.01, 38.96, 39.27] +712.21 +35.6105 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 244760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.325824975967407, 'TIME_S_1KI': 0.042187550972247946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.2631218481063, 'W': 98.38, 'J_1KI': 5.238858971433674, 'W_1KI': 0.40194476221604836, 'W_D': 62.769499999999994, 'J_D': 818.1237551010846, 'W_D_1KI': 0.2564532603366563, 'J_D_1KI': 0.0010477743926158533} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json index 685c3bf..eaa7f45 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 163068, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.72166895866394, "TIME_S_1KI": 0.07188209187985345, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1665.769057817459, "W": 116.82, "J_1KI": 10.215180524796153, "W_1KI": 0.7163882552064169, "W_D": 81.24949999999998, "J_D": 1158.5593482549189, "W_D_1KI": 0.4982553290651751, "J_D_1KI": 0.0030555064700933054} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 152771, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.930854320526123, "TIME_S_1KI": 0.07155058434209453, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1604.5886056470872, "W": 115.67, "J_1KI": 10.503227743793568, "W_1KI": 0.7571463170366104, "W_D": 79.54775000000001, "J_D": 1103.4962674406768, "W_D_1KI": 0.5206992819317804, "J_D_1KI": 0.0034083646891869554} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output index bc4d2ed..f40d406 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.02829742431640625} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.023181676864624023} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 58, 108, ..., 249919, 249959, +tensor(crow_indices=tensor([ 0, 55, 94, ..., 249894, 249941, 250000]), - col_indices=tensor([ 73, 104, 551, ..., 4719, 4888, 4958]), - values=tensor([0.4939, 0.4915, 0.0888, ..., 0.3493, 0.1552, 0.1459]), + col_indices=tensor([ 15, 20, 118, ..., 4709, 4950, 4982]), + values=tensor([0.3949, 0.6252, 0.7734, ..., 0.1433, 0.8658, 0.9936]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1377, 0.0837, 0.3150, ..., 0.5794, 0.8670, 0.3865]) +tensor([0.9810, 0.2218, 0.5564, ..., 0.7421, 0.8578, 0.2367]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.02829742431640625 seconds +Time: 0.023181676864624023 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37105', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.3891899585723877} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45294', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.113070011138916} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 55, 99, ..., 249890, 249937, +tensor(crow_indices=tensor([ 0, 46, 98, ..., 249898, 249953, 250000]), - col_indices=tensor([ 6, 32, 44, ..., 4844, 4921, 4988]), - values=tensor([0.1281, 0.2469, 0.7745, ..., 0.0638, 0.9042, 0.9189]), + col_indices=tensor([ 1, 15, 17, ..., 4938, 4952, 4956]), + values=tensor([0.7179, 0.8487, 0.2483, ..., 0.2443, 0.9815, 0.3824]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6302, 0.1474, 0.6987, ..., 0.1092, 0.0062, 0.2645]) +tensor([0.6142, 0.5826, 0.7574, ..., 0.9763, 0.8571, 0.0747]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 2.3891899585723877 seconds +Time: 3.113070011138916 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '163068', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.72166895866394} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '152771', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.930854320526123} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 94, ..., 249900, 249951, +tensor(crow_indices=tensor([ 0, 47, 98, ..., 249882, 249937, 250000]), - col_indices=tensor([ 17, 114, 188, ..., 4806, 4921, 4968]), - values=tensor([0.1229, 0.8785, 0.6808, ..., 0.9268, 0.7326, 0.7148]), + col_indices=tensor([ 114, 217, 246, ..., 4895, 4908, 4986]), + values=tensor([0.9429, 0.8691, 0.4109, ..., 0.0935, 0.8718, 0.7828]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4545, 0.7872, 0.8321, ..., 0.0206, 0.6423, 0.1627]) +tensor([0.2849, 0.2833, 0.3387, ..., 0.0747, 0.2812, 0.2237]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 11.72166895866394 seconds +Time: 10.930854320526123 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 94, ..., 249900, 249951, +tensor(crow_indices=tensor([ 0, 47, 98, ..., 249882, 249937, 250000]), - col_indices=tensor([ 17, 114, 188, ..., 4806, 4921, 4968]), - values=tensor([0.1229, 0.8785, 0.6808, ..., 0.9268, 0.7326, 0.7148]), + col_indices=tensor([ 114, 217, 246, ..., 4895, 4908, 4986]), + values=tensor([0.9429, 0.8691, 0.4109, ..., 0.0935, 0.8718, 0.7828]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4545, 0.7872, 0.8321, ..., 0.0206, 0.6423, 0.1627]) +tensor([0.2849, 0.2833, 0.3387, ..., 0.0747, 0.2812, 0.2237]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 11.72166895866394 seconds +Time: 10.930854320526123 seconds -[40.87, 39.29, 39.18, 39.72, 39.08, 39.54, 39.6, 39.54, 39.86, 39.27] -[116.82] -14.259279727935791 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 163068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.72166895866394, 'TIME_S_1KI': 0.07188209187985345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.769057817459, 'W': 116.82} -[40.87, 39.29, 39.18, 39.72, 39.08, 39.54, 39.6, 39.54, 39.86, 39.27, 40.71, 39.19, 39.43, 39.27, 39.46, 39.83, 39.54, 39.53, 39.37, 39.11] -711.4100000000001 -35.5705 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 163068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.72166895866394, 'TIME_S_1KI': 0.07188209187985345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.769057817459, 'W': 116.82, 'J_1KI': 10.215180524796153, 'W_1KI': 0.7163882552064169, 'W_D': 81.24949999999998, 'J_D': 1158.5593482549189, 'W_D_1KI': 0.4982553290651751, 'J_D_1KI': 0.0030555064700933054} +[39.91, 39.78, 40.82, 39.59, 39.84, 39.72, 39.23, 39.32, 39.18, 39.93] +[115.67] +13.872124195098877 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 152771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.930854320526123, 'TIME_S_1KI': 0.07155058434209453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1604.5886056470872, 'W': 115.67} +[39.91, 39.78, 40.82, 39.59, 39.84, 39.72, 39.23, 39.32, 39.18, 39.93, 40.03, 39.32, 39.26, 39.5, 39.74, 39.61, 39.66, 39.33, 40.94, 55.34] +722.4449999999999 +36.122249999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 152771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.930854320526123, 'TIME_S_1KI': 0.07155058434209453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1604.5886056470872, 'W': 115.67, 'J_1KI': 10.503227743793568, 'W_1KI': 0.7571463170366104, 'W_D': 79.54775000000001, 'J_D': 1103.4962674406768, 'W_D_1KI': 0.5206992819317804, 'J_D_1KI': 0.0034083646891869554} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json index 022fbf8..7ef43d0 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 90395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.401180028915405, "TIME_S_1KI": 0.11506366534559882, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1713.355565071106, "W": 132.0, "J_1KI": 18.954096632237466, "W_1KI": 1.4602577576193374, "W_D": 96.25175, "J_D": 1249.344481138885, "W_D_1KI": 1.0647906410752808, "J_D_1KI": 0.011779309044474592} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91453, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.426929712295532, "TIME_S_1KI": 0.11401408059107446, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1730.480533401966, "W": 133.63, "J_1KI": 18.922075092145324, "W_1KI": 1.4611877139076903, "W_D": 97.79375, "J_D": 1266.4085958495737, "W_D_1KI": 1.0693334280996796, "J_D_1KI": 0.011692710223827316} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output index 6ede2ab..1b0d7e5 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03801298141479492} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03647494316101074} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 279, 546, ..., 1249528, - 1249760, 1250000]), - col_indices=tensor([ 17, 21, 26, ..., 4944, 4980, 4991]), - values=tensor([0.9138, 0.1459, 0.7159, ..., 0.0773, 0.4834, 0.3377]), +tensor(crow_indices=tensor([ 0, 224, 472, ..., 1249485, + 1249755, 1250000]), + col_indices=tensor([ 42, 63, 65, ..., 4929, 4934, 4955]), + values=tensor([0.9997, 0.6564, 0.8793, ..., 0.2501, 0.6294, 0.5996]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.0568, 0.1587, 0.8688, ..., 0.8476, 0.8640, 0.6593]) +tensor([0.2116, 0.5433, 0.6548, ..., 0.8394, 0.9621, 0.4668]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.03801298141479492 seconds +Time: 0.03647494316101074 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27622', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.2084648609161377} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28786', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.304973840713501} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 251, 530, ..., 1249473, - 1249739, 1250000]), - col_indices=tensor([ 53, 63, 72, ..., 4941, 4984, 4995]), - values=tensor([0.4190, 0.6332, 0.1682, ..., 0.1102, 0.0295, 0.1696]), +tensor(crow_indices=tensor([ 0, 254, 516, ..., 1249479, + 1249742, 1250000]), + col_indices=tensor([ 25, 80, 95, ..., 4912, 4972, 4985]), + values=tensor([0.8354, 0.9904, 0.9019, ..., 0.7510, 0.5312, 0.2669]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4081, 0.8253, 0.9060, ..., 0.4379, 0.8960, 0.7193]) +tensor([0.3432, 0.7192, 0.5992, ..., 0.6854, 0.7940, 0.1194]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 3.2084648609161377 seconds +Time: 3.304973840713501 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '90395', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.401180028915405} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91453', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.426929712295532} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 225, 477, ..., 1249532, - 1249754, 1250000]), - col_indices=tensor([ 0, 4, 16, ..., 4911, 4963, 4980]), - values=tensor([0.5027, 0.8615, 0.8405, ..., 0.5051, 0.1395, 0.2376]), +tensor(crow_indices=tensor([ 0, 268, 515, ..., 1249493, + 1249744, 1250000]), + col_indices=tensor([ 7, 45, 98, ..., 4983, 4985, 4997]), + values=tensor([0.9678, 0.6032, 0.4563, ..., 0.0809, 0.2549, 0.8523]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.7929, 0.2058, 0.1103, ..., 0.0989, 0.8674, 0.8642]) +tensor([0.9236, 0.3779, 0.8296, ..., 0.1378, 0.9857, 0.0479]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.401180028915405 seconds +Time: 10.426929712295532 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 225, 477, ..., 1249532, - 1249754, 1250000]), - col_indices=tensor([ 0, 4, 16, ..., 4911, 4963, 4980]), - values=tensor([0.5027, 0.8615, 0.8405, ..., 0.5051, 0.1395, 0.2376]), +tensor(crow_indices=tensor([ 0, 268, 515, ..., 1249493, + 1249744, 1250000]), + col_indices=tensor([ 7, 45, 98, ..., 4983, 4985, 4997]), + values=tensor([0.9678, 0.6032, 0.4563, ..., 0.0809, 0.2549, 0.8523]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.7929, 0.2058, 0.1103, ..., 0.0989, 0.8674, 0.8642]) +tensor([0.9236, 0.3779, 0.8296, ..., 0.1378, 0.9857, 0.0479]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.401180028915405 seconds +Time: 10.426929712295532 seconds -[40.46, 40.0, 39.58, 39.65, 39.39, 39.96, 39.38, 39.29, 39.31, 39.34] -[132.0] -12.979966402053833 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 90395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.401180028915405, 'TIME_S_1KI': 0.11506366534559882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.355565071106, 'W': 132.0} -[40.46, 40.0, 39.58, 39.65, 39.39, 39.96, 39.38, 39.29, 39.31, 39.34, 40.66, 39.72, 41.06, 39.66, 39.37, 39.71, 39.69, 39.36, 39.44, 40.33] -714.9649999999999 -35.74825 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 90395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.401180028915405, 'TIME_S_1KI': 0.11506366534559882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.355565071106, 'W': 132.0, 'J_1KI': 18.954096632237466, 'W_1KI': 1.4602577576193374, 'W_D': 96.25175, 'J_D': 1249.344481138885, 'W_D_1KI': 1.0647906410752808, 'J_D_1KI': 0.011779309044474592} +[46.82, 39.84, 39.43, 39.46, 39.39, 39.51, 39.45, 40.88, 39.31, 39.7] +[133.63] +12.949790716171265 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91453, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.426929712295532, 'TIME_S_1KI': 0.11401408059107446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1730.480533401966, 'W': 133.63} +[46.82, 39.84, 39.43, 39.46, 39.39, 39.51, 39.45, 40.88, 39.31, 39.7, 40.56, 39.85, 39.52, 39.47, 39.48, 39.65, 39.37, 39.3, 39.63, 39.29] +716.7249999999999 +35.83624999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91453, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.426929712295532, 'TIME_S_1KI': 0.11401408059107446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1730.480533401966, 'W': 133.63, 'J_1KI': 18.922075092145324, 'W_1KI': 1.4611877139076903, 'W_D': 97.79375, 'J_D': 1266.4085958495737, 'W_D_1KI': 1.0693334280996796, 'J_D_1KI': 0.011692710223827316} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json index 28b4bfb..0659b36 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52843, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.472357749938965, "TIME_S_1KI": 0.19817871335728413, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1834.1895242333412, "W": 138.65, "J_1KI": 34.710170206713116, "W_1KI": 2.623810154608936, "W_D": 102.62450000000001, "J_D": 1357.611127513051, "W_D_1KI": 1.9420642279961398, "J_D_1KI": 0.036751589198117815} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52932, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.611177206039429, "TIME_S_1KI": 0.20046809502832744, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1857.9822360515595, "W": 137.3, "J_1KI": 35.10130424037557, "W_1KI": 2.5938940527469208, "W_D": 101.48075000000001, "J_D": 1373.266065558553, "W_D_1KI": 1.91719092420464, "J_D_1KI": 0.03621988445939394} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output index 05a194a..29e9499 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.05003714561462402} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.048235177993774414} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 498, 977, ..., 2499006, - 2499489, 2500000]), - col_indices=tensor([ 2, 20, 22, ..., 4935, 4945, 4946]), - values=tensor([0.7520, 0.3359, 0.1395, ..., 0.8155, 0.8337, 0.5892]), +tensor(crow_indices=tensor([ 0, 503, 960, ..., 2498968, + 2499463, 2500000]), + col_indices=tensor([ 5, 6, 43, ..., 4985, 4990, 4995]), + values=tensor([0.9774, 0.9396, 0.3252, ..., 0.3425, 0.9018, 0.9739]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3287, 0.7670, 0.4633, ..., 0.8662, 0.8996, 0.4236]) +tensor([0.9855, 0.5305, 0.7022, ..., 0.7874, 0.9309, 0.2173]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.05003714561462402 seconds +Time: 0.048235177993774414 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20984', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 4.169527769088745} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21768', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 4.318019390106201} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 527, 1017, ..., 2498957, - 2499469, 2500000]), - col_indices=tensor([ 0, 2, 46, ..., 4971, 4981, 4983]), - values=tensor([0.4264, 0.7891, 0.1289, ..., 0.9402, 0.9265, 0.8274]), +tensor(crow_indices=tensor([ 0, 497, 971, ..., 2498991, + 2499512, 2500000]), + col_indices=tensor([ 22, 34, 57, ..., 4955, 4959, 4978]), + values=tensor([0.4334, 0.8588, 0.8612, ..., 0.1945, 0.8902, 0.3845]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1592, 0.0229, 0.7345, ..., 0.9022, 0.9396, 0.4003]) +tensor([0.0333, 0.8449, 0.6470, ..., 0.5455, 0.8886, 0.2722]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 4.169527769088745 seconds +Time: 4.318019390106201 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52843', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.472357749938965} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52932', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.611177206039429} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 536, 1043, ..., 2499033, - 2499531, 2500000]), - col_indices=tensor([ 2, 4, 9, ..., 4990, 4992, 4998]), - values=tensor([0.3950, 0.1857, 0.2386, ..., 0.4312, 0.4990, 0.5416]), +tensor(crow_indices=tensor([ 0, 513, 1021, ..., 2498990, + 2499487, 2500000]), + col_indices=tensor([ 1, 6, 11, ..., 4985, 4989, 4999]), + values=tensor([0.0321, 0.9116, 0.7036, ..., 0.4498, 0.4675, 0.8894]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5233, 0.3838, 0.2090, ..., 0.9440, 0.1891, 0.8384]) +tensor([0.1398, 0.0564, 0.1701, ..., 0.8437, 0.6801, 0.5112]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.472357749938965 seconds +Time: 10.611177206039429 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 536, 1043, ..., 2499033, - 2499531, 2500000]), - col_indices=tensor([ 2, 4, 9, ..., 4990, 4992, 4998]), - values=tensor([0.3950, 0.1857, 0.2386, ..., 0.4312, 0.4990, 0.5416]), +tensor(crow_indices=tensor([ 0, 513, 1021, ..., 2498990, + 2499487, 2500000]), + col_indices=tensor([ 1, 6, 11, ..., 4985, 4989, 4999]), + values=tensor([0.0321, 0.9116, 0.7036, ..., 0.4498, 0.4675, 0.8894]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5233, 0.3838, 0.2090, ..., 0.9440, 0.1891, 0.8384]) +tensor([0.1398, 0.0564, 0.1701, ..., 0.8437, 0.6801, 0.5112]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.472357749938965 seconds +Time: 10.611177206039429 seconds -[40.4, 39.54, 40.0, 39.38, 39.54, 39.86, 39.47, 40.22, 39.37, 39.4] -[138.65] -13.228918313980103 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.472357749938965, 'TIME_S_1KI': 0.19817871335728413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1834.1895242333412, 'W': 138.65} -[40.4, 39.54, 40.0, 39.38, 39.54, 39.86, 39.47, 40.22, 39.37, 39.4, 41.49, 40.09, 39.59, 39.6, 39.65, 39.39, 39.42, 39.69, 45.2, 39.71] -720.51 -36.0255 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.472357749938965, 'TIME_S_1KI': 0.19817871335728413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1834.1895242333412, 'W': 138.65, 'J_1KI': 34.710170206713116, 'W_1KI': 2.623810154608936, 'W_D': 102.62450000000001, 'J_D': 1357.611127513051, 'W_D_1KI': 1.9420642279961398, 'J_D_1KI': 0.036751589198117815} +[40.25, 40.11, 39.45, 39.51, 39.44, 40.31, 40.2, 39.34, 39.39, 40.88] +[137.3] +13.532281398773193 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52932, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.611177206039429, 'TIME_S_1KI': 0.20046809502832744, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1857.9822360515595, 'W': 137.3} +[40.25, 40.11, 39.45, 39.51, 39.44, 40.31, 40.2, 39.34, 39.39, 40.88, 40.72, 39.81, 39.61, 39.72, 39.46, 39.86, 39.86, 39.84, 39.74, 39.62] +716.385 +35.81925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52932, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.611177206039429, 'TIME_S_1KI': 0.20046809502832744, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1857.9822360515595, 'W': 137.3, 'J_1KI': 35.10130424037557, 'W_1KI': 2.5938940527469208, 'W_D': 101.48075000000001, 'J_D': 1373.266065558553, 'W_D_1KI': 1.91719092420464, 'J_D_1KI': 0.03621988445939394} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json index 3a471cf..5d59e41 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28798, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.444172620773315, "TIME_S_1KI": 0.36267006808713503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1937.146224603653, "W": 139.06, "J_1KI": 67.26669298575086, "W_1KI": 4.8288075560802834, "W_D": 102.9745, "J_D": 1434.4647195847035, "W_D_1KI": 3.5757517883186334, "J_D_1KI": 0.12416667089098664} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28675, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.367558717727661, "TIME_S_1KI": 0.36155392215266474, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1923.0010931968689, "W": 139.28, "J_1KI": 67.06193873397974, "W_1KI": 4.857192676547515, "W_D": 103.496, "J_D": 1428.9411339855194, "W_D_1KI": 3.6092763731473405, "J_D_1KI": 0.12586840010975905} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output index fd0984f..5634c2b 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.06763482093811035} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.0704355239868164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 956, 1961, ..., 4997943, - 4998975, 5000000]), - col_indices=tensor([ 6, 18, 19, ..., 4986, 4993, 4998]), - values=tensor([0.4638, 0.8169, 0.7421, ..., 0.5926, 0.1207, 0.0279]), +tensor(crow_indices=tensor([ 0, 1011, 2020, ..., 4998002, + 4999000, 5000000]), + col_indices=tensor([ 0, 2, 4, ..., 4985, 4986, 4995]), + values=tensor([0.5170, 0.7696, 0.0601, ..., 0.2830, 0.0857, 0.2394]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1902, 0.8341, 0.1608, ..., 0.1172, 0.3175, 0.0262]) +tensor([0.2191, 0.6347, 0.0324, ..., 0.6963, 0.4125, 0.5924]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 0.06763482093811035 seconds +Time: 0.0704355239868164 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15524', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.659992933273315} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14907', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.458455324172974} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 987, 2000, ..., 4998018, - 4999001, 5000000]), - col_indices=tensor([ 4, 16, 17, ..., 4983, 4988, 4998]), - values=tensor([0.3168, 0.3066, 0.1113, ..., 0.0328, 0.5136, 0.1275]), +tensor(crow_indices=tensor([ 0, 968, 1996, ..., 4998022, + 4999009, 5000000]), + col_indices=tensor([ 2, 4, 6, ..., 4985, 4989, 4995]), + values=tensor([0.4424, 0.0227, 0.5193, ..., 0.0048, 0.8974, 0.3541]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9704, 0.5285, 0.3815, ..., 0.6149, 0.3291, 0.1983]) +tensor([0.6849, 0.5933, 0.5488, ..., 0.6403, 0.3844, 0.3278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 5.659992933273315 seconds +Time: 5.458455324172974 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28798', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.444172620773315} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28675', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.367558717727661} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1005, 2014, ..., 4998103, - 4999056, 5000000]), - col_indices=tensor([ 4, 9, 14, ..., 4980, 4983, 4993]), - values=tensor([0.7293, 0.3445, 0.3834, ..., 0.7374, 0.4715, 0.7945]), +tensor(crow_indices=tensor([ 0, 1035, 2088, ..., 4997978, + 4999013, 5000000]), + col_indices=tensor([ 6, 12, 18, ..., 4989, 4993, 4999]), + values=tensor([0.0591, 0.4462, 0.0291, ..., 0.8101, 0.4669, 0.6210]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9531, 0.9906, 0.0327, ..., 0.2819, 0.9884, 0.0185]) +tensor([0.3435, 0.0770, 0.4550, ..., 0.4506, 0.4973, 0.4864]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.444172620773315 seconds +Time: 10.367558717727661 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1005, 2014, ..., 4998103, - 4999056, 5000000]), - col_indices=tensor([ 4, 9, 14, ..., 4980, 4983, 4993]), - values=tensor([0.7293, 0.3445, 0.3834, ..., 0.7374, 0.4715, 0.7945]), +tensor(crow_indices=tensor([ 0, 1035, 2088, ..., 4997978, + 4999013, 5000000]), + col_indices=tensor([ 6, 12, 18, ..., 4989, 4993, 4999]), + values=tensor([0.0591, 0.4462, 0.0291, ..., 0.8101, 0.4669, 0.6210]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9531, 0.9906, 0.0327, ..., 0.2819, 0.9884, 0.0185]) +tensor([0.3435, 0.0770, 0.4550, ..., 0.4506, 0.4973, 0.4864]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.444172620773315 seconds +Time: 10.367558717727661 seconds -[40.89, 39.68, 39.94, 39.87, 39.8, 39.94, 40.85, 40.96, 39.65, 40.01] -[139.06] -13.930290699005127 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28798, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.444172620773315, 'TIME_S_1KI': 0.36267006808713503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1937.146224603653, 'W': 139.06} -[40.89, 39.68, 39.94, 39.87, 39.8, 39.94, 40.85, 40.96, 39.65, 40.01, 41.32, 40.08, 40.09, 40.7, 40.16, 39.61, 39.76, 39.88, 39.81, 39.64] -721.71 -36.0855 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28798, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.444172620773315, 'TIME_S_1KI': 0.36267006808713503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1937.146224603653, 'W': 139.06, 'J_1KI': 67.26669298575086, 'W_1KI': 4.8288075560802834, 'W_D': 102.9745, 'J_D': 1434.4647195847035, 'W_D_1KI': 3.5757517883186334, 'J_D_1KI': 0.12416667089098664} +[40.24, 39.6, 39.51, 39.42, 39.55, 39.78, 39.64, 39.82, 39.87, 40.57] +[139.28] +13.80672812461853 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28675, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.367558717727661, 'TIME_S_1KI': 0.36155392215266474, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1923.0010931968689, 'W': 139.28} +[40.24, 39.6, 39.51, 39.42, 39.55, 39.78, 39.64, 39.82, 39.87, 40.57, 40.14, 39.5, 39.75, 39.83, 39.43, 39.58, 39.98, 39.51, 40.53, 39.81] +715.6800000000001 +35.784000000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28675, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.367558717727661, 'TIME_S_1KI': 0.36155392215266474, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1923.0010931968689, 'W': 139.28, 'J_1KI': 67.06193873397974, 'W_1KI': 4.857192676547515, 'W_D': 103.496, 'J_D': 1428.9411339855194, 'W_D_1KI': 3.6092763731473405, 'J_D_1KI': 0.12586840010975905} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json index 6f0dd43..a3c3be2 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 18401, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.148674011230469, "TIME_S_1KI": 0.5515283958062317, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1912.232966468334, "W": 137.63, "J_1KI": 103.92005687018825, "W_1KI": 7.479484810608119, "W_D": 101.70774999999999, "J_D": 1413.1287691296934, "W_D_1KI": 5.5272947122439, "J_D_1KI": 0.3003801267454975} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19347, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.572320699691772, "TIME_S_1KI": 0.5464578849274706, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2023.443679611683, "W": 136.87, "J_1KI": 104.58694782714028, "W_1KI": 7.07448183180855, "W_D": 100.796, "J_D": 1490.1368388261797, "W_D_1KI": 5.20990334418773, "J_D_1KI": 0.2692874008470424} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output index 60a702d..6a49bae 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.093505859375} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.0887749195098877} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1476, 2946, ..., 7497092, - 7498499, 7500000]), - col_indices=tensor([ 3, 4, 9, ..., 4993, 4995, 4998]), - values=tensor([0.7692, 0.9577, 0.6421, ..., 0.4974, 0.8037, 0.4799]), +tensor(crow_indices=tensor([ 0, 1502, 3027, ..., 7497008, + 7498539, 7500000]), + col_indices=tensor([ 0, 1, 3, ..., 4996, 4998, 4999]), + values=tensor([0.3598, 0.2894, 0.8829, ..., 0.3025, 0.7495, 0.3483]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.8552, 0.1634, 0.3191, ..., 0.0243, 0.9305, 0.7580]) +tensor([0.0129, 0.4104, 0.6159, ..., 0.6777, 0.7347, 0.9375]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 0.093505859375 seconds +Time: 0.0887749195098877 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11229', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.407301664352417} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11827', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.910944223403931} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1491, 2991, ..., 7496987, - 7498491, 7500000]), - col_indices=tensor([ 6, 7, 10, ..., 4987, 4988, 4999]), - values=tensor([0.9932, 0.6823, 0.0941, ..., 0.3170, 0.1700, 0.5277]), +tensor(crow_indices=tensor([ 0, 1504, 3041, ..., 7497033, + 7498540, 7500000]), + col_indices=tensor([ 11, 12, 13, ..., 4992, 4996, 4997]), + values=tensor([0.6635, 0.8462, 0.1881, ..., 0.2418, 0.8339, 0.4223]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.7857, 0.3541, 0.7153, ..., 0.0858, 0.7918, 0.2952]) +tensor([0.1539, 0.9828, 0.7502, ..., 0.6822, 0.7232, 0.1164]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 6.407301664352417 seconds +Time: 6.910944223403931 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18401', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.148674011230469} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17969', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.75194501876831} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1524, 2999, ..., 7496978, - 7498500, 7500000]), - col_indices=tensor([ 0, 1, 3, ..., 4975, 4979, 4999]), - values=tensor([0.7847, 0.2112, 0.1435, ..., 0.9949, 0.2225, 0.8434]), +tensor(crow_indices=tensor([ 0, 1521, 3049, ..., 7497131, + 7498579, 7500000]), + col_indices=tensor([ 1, 6, 11, ..., 4993, 4996, 4998]), + values=tensor([0.4767, 0.7925, 0.6152, ..., 0.5404, 0.2176, 0.3422]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.9436, 0.7820, 0.2976, ..., 0.7279, 0.8012, 0.5089]) +tensor([0.8753, 0.8267, 0.1281, ..., 0.9770, 0.8043, 0.2049]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.148674011230469 seconds +Time: 9.75194501876831 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19347', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.572320699691772} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1524, 2999, ..., 7496978, - 7498500, 7500000]), - col_indices=tensor([ 0, 1, 3, ..., 4975, 4979, 4999]), - values=tensor([0.7847, 0.2112, 0.1435, ..., 0.9949, 0.2225, 0.8434]), +tensor(crow_indices=tensor([ 0, 1485, 2948, ..., 7496987, + 7498475, 7500000]), + col_indices=tensor([ 9, 12, 14, ..., 4981, 4995, 4998]), + values=tensor([0.3812, 0.2057, 0.2607, ..., 0.9056, 0.6715, 0.4951]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.9436, 0.7820, 0.2976, ..., 0.7279, 0.8012, 0.5089]) +tensor([0.5119, 0.3228, 0.8802, ..., 0.4955, 0.2289, 0.1030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +76,30 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.148674011230469 seconds +Time: 10.572320699691772 seconds -[40.88, 40.15, 40.48, 39.7, 39.9, 39.72, 39.73, 39.77, 39.68, 39.5] -[137.63] -13.894012689590454 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 18401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.148674011230469, 'TIME_S_1KI': 0.5515283958062317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1912.232966468334, 'W': 137.63} -[40.88, 40.15, 40.48, 39.7, 39.9, 39.72, 39.73, 39.77, 39.68, 39.5, 40.73, 39.74, 41.44, 39.63, 39.52, 39.59, 39.98, 39.43, 39.67, 39.52] -718.445 -35.922250000000005 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 18401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.148674011230469, 'TIME_S_1KI': 0.5515283958062317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1912.232966468334, 'W': 137.63, 'J_1KI': 103.92005687018825, 'W_1KI': 7.479484810608119, 'W_D': 101.70774999999999, 'J_D': 1413.1287691296934, 'W_D_1KI': 5.5272947122439, 'J_D_1KI': 0.3003801267454975} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1485, 2948, ..., 7496987, + 7498475, 7500000]), + col_indices=tensor([ 9, 12, 14, ..., 4981, 4995, 4998]), + values=tensor([0.3812, 0.2057, 0.2607, ..., 0.9056, 0.6715, 0.4951]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.5119, 0.3228, 0.8802, ..., 0.4955, 0.2289, 0.1030]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.572320699691772 seconds + +[41.41, 39.74, 39.87, 40.98, 39.93, 39.8, 39.75, 40.15, 40.46, 39.9] +[136.87] +14.783690214157104 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19347, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.572320699691772, 'TIME_S_1KI': 0.5464578849274706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2023.443679611683, 'W': 136.87} +[41.41, 39.74, 39.87, 40.98, 39.93, 39.8, 39.75, 40.15, 40.46, 39.9, 40.45, 39.73, 39.75, 39.94, 39.71, 40.27, 40.08, 40.07, 40.21, 40.32] +721.48 +36.074 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19347, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.572320699691772, 'TIME_S_1KI': 0.5464578849274706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2023.443679611683, 'W': 136.87, 'J_1KI': 104.58694782714028, 'W_1KI': 7.07448183180855, 'W_D': 100.796, 'J_D': 1490.1368388261797, 'W_D_1KI': 5.20990334418773, 'J_D_1KI': 0.2692874008470424} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json index b33500c..70f565b 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4623, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.869210720062256, "TIME_S_1KI": 2.3511163140952314, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1715.391318473816, "W": 123.94000000000001, "J_1KI": 371.0558768059303, "W_1KI": 26.809431105342853, "W_D": 87.73350000000002, "J_D": 1214.2753287019732, "W_D_1KI": 18.977611940298512, "J_D_1KI": 4.105042600107833} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 5031, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 11.223548650741577, "TIME_S_1KI": 2.230878284782663, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1917.9567476463317, "W": 124.24, "J_1KI": 381.2277375564165, "W_1KI": 24.694891671635858, "W_D": 87.814, "J_D": 1355.629860252857, "W_D_1KI": 17.454581594116476, "J_D_1KI": 3.469406001613293} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output index 915d5a7..c7e7247 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.2556135654449463} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.2790210247039795} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2026, 4021, ..., 9995990, - 9997978, 10000000]), - col_indices=tensor([ 1, 6, 9, ..., 4991, 4993, 4997]), - values=tensor([0.3323, 0.6585, 0.3485, ..., 0.6316, 0.2886, 0.7495]), +tensor(crow_indices=tensor([ 0, 2000, 3996, ..., 9995976, + 9998017, 10000000]), + col_indices=tensor([ 1, 3, 4, ..., 4992, 4993, 4994]), + values=tensor([0.4344, 0.2681, 0.6913, ..., 0.2550, 0.4913, 0.3141]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3914, 0.7431, 0.0627, ..., 0.4218, 0.6007, 0.5832]) +tensor([0.3647, 0.9287, 0.3969, ..., 0.1409, 0.3592, 0.3689]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 0.2556135654449463 seconds +Time: 0.2790210247039795 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4107', '-ss', '5000', '-sd', '0.4', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.327423810958862} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3763', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 8.507438898086548} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2032, 4046, ..., 9995943, - 9997941, 10000000]), - col_indices=tensor([ 1, 2, 4, ..., 4988, 4990, 4991]), - values=tensor([0.7887, 0.1218, 0.0752, ..., 0.7697, 0.6176, 0.4928]), +tensor(crow_indices=tensor([ 0, 2023, 4020, ..., 9995940, + 9997949, 10000000]), + col_indices=tensor([ 1, 2, 4, ..., 4996, 4997, 4998]), + values=tensor([0.1831, 0.0629, 0.7984, ..., 0.9065, 0.2929, 0.1040]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3655, 0.9637, 0.0803, ..., 0.0942, 0.4831, 0.3974]) +tensor([0.7862, 0.3853, 0.1330, ..., 0.7853, 0.2639, 0.4428]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 9.327423810958862 seconds +Time: 8.507438898086548 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4623', '-ss', '5000', '-sd', '0.4', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.869210720062256} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4644', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.691264152526855} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2080, 4074, ..., 9996120, - 9998055, 10000000]), - col_indices=tensor([ 0, 1, 3, ..., 4988, 4989, 4998]), - values=tensor([0.8721, 0.2802, 0.5674, ..., 0.7807, 0.4474, 0.7441]), +tensor(crow_indices=tensor([ 0, 2009, 4023, ..., 9996049, + 9998045, 10000000]), + col_indices=tensor([ 0, 1, 2, ..., 4996, 4997, 4999]), + values=tensor([0.2930, 0.9632, 0.6423, ..., 0.4736, 0.0662, 0.9617]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.9197, 0.0161, 0.2580, ..., 0.5344, 0.2373, 0.6957]) +tensor([0.8864, 0.8642, 0.6244, ..., 0.5949, 0.5906, 0.1491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.869210720062256 seconds +Time: 9.691264152526855 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5031', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 11.223548650741577} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2080, 4074, ..., 9996120, - 9998055, 10000000]), - col_indices=tensor([ 0, 1, 3, ..., 4988, 4989, 4998]), - values=tensor([0.8721, 0.2802, 0.5674, ..., 0.7807, 0.4474, 0.7441]), +tensor(crow_indices=tensor([ 0, 1956, 4022, ..., 9995993, + 9998029, 10000000]), + col_indices=tensor([ 6, 8, 9, ..., 4989, 4993, 4996]), + values=tensor([0.9230, 0.5039, 0.8574, ..., 0.4809, 0.6177, 0.8143]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.9197, 0.0161, 0.2580, ..., 0.5344, 0.2373, 0.6957]) +tensor([0.6063, 0.9276, 0.9282, ..., 0.5823, 0.2993, 0.8060]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +76,30 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.869210720062256 seconds +Time: 11.223548650741577 seconds -[40.45, 40.3, 45.54, 40.13, 39.83, 39.68, 39.66, 40.04, 39.63, 41.25] -[123.94] -13.840497970581055 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.869210720062256, 'TIME_S_1KI': 2.3511163140952314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1715.391318473816, 'W': 123.94000000000001} -[40.45, 40.3, 45.54, 40.13, 39.83, 39.68, 39.66, 40.04, 39.63, 41.25, 41.52, 40.34, 39.67, 39.9, 39.74, 39.58, 39.63, 39.49, 39.57, 39.58] -724.1299999999999 -36.20649999999999 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.869210720062256, 'TIME_S_1KI': 2.3511163140952314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1715.391318473816, 'W': 123.94000000000001, 'J_1KI': 371.0558768059303, 'W_1KI': 26.809431105342853, 'W_D': 87.73350000000002, 'J_D': 1214.2753287019732, 'W_D_1KI': 18.977611940298512, 'J_D_1KI': 4.105042600107833} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1956, 4022, ..., 9995993, + 9998029, 10000000]), + col_indices=tensor([ 6, 8, 9, ..., 4989, 4993, 4996]), + values=tensor([0.9230, 0.5039, 0.8574, ..., 0.4809, 0.6177, 0.8143]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6063, 0.9276, 0.9282, ..., 0.5823, 0.2993, 0.8060]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 11.223548650741577 seconds + +[41.4, 40.38, 40.49, 39.84, 39.9, 45.5, 40.66, 40.02, 39.82, 39.77] +[124.24] +15.437514066696167 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 11.223548650741577, 'TIME_S_1KI': 2.230878284782663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1917.9567476463317, 'W': 124.24} +[41.4, 40.38, 40.49, 39.84, 39.9, 45.5, 40.66, 40.02, 39.82, 39.77, 40.48, 40.2, 41.05, 40.13, 40.42, 39.85, 39.76, 39.74, 39.96, 39.95] +728.5200000000001 +36.426 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5031, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 11.223548650741577, 'TIME_S_1KI': 2.230878284782663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1917.9567476463317, 'W': 124.24, 'J_1KI': 381.2277375564165, 'W_1KI': 24.694891671635858, 'W_D': 87.814, 'J_D': 1355.629860252857, 'W_D_1KI': 17.454581594116476, 'J_D_1KI': 3.469406001613293} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json index ef8b561..e95cfa6 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3775, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.108771324157715, "TIME_S_1KI": 2.94272088057158, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1948.3000312638283, "W": 122.57, "J_1KI": 516.1059685467094, "W_1KI": 32.46887417218543, "W_D": 86.1335, "J_D": 1369.1270355132817, "W_D_1KI": 22.816821192052977, "J_D_1KI": 6.044191044252445} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3639, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.423044204711914, "TIME_S_1KI": 3.1390613368265767, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1913.1958845329284, "W": 122.77, "J_1KI": 525.747701163212, "W_1KI": 33.737290464413306, "W_D": 86.43, "J_D": 1346.888656024933, "W_D_1KI": 23.75103050288541, "J_D_1KI": 6.526801457236991} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output index 05d0f73..22d5b3a 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.2961087226867676} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.34267544746398926} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2490, 4975, ..., 12494955, - 12497491, 12500000]), - col_indices=tensor([ 0, 1, 2, ..., 4994, 4996, 4998]), - values=tensor([0.9230, 0.7404, 0.0716, ..., 0.8209, 0.3183, 0.8676]), +tensor(crow_indices=tensor([ 0, 2521, 4985, ..., 12494959, + 12497459, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4995, 4998, 4999]), + values=tensor([0.3077, 0.5792, 0.1197, ..., 0.8936, 0.1947, 0.7325]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4086, 0.9880, 0.1016, ..., 0.8907, 0.8066, 0.6446]) +tensor([0.4180, 0.8469, 0.8947, ..., 0.8931, 0.7911, 0.6347]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 0.2961087226867676 seconds +Time: 0.34267544746398926 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3545', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.858964443206787} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3064', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 8.839110374450684} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2500, 5017, ..., 12494975, - 12497542, 12500000]), - col_indices=tensor([ 0, 1, 2, ..., 4994, 4997, 4999]), - values=tensor([0.5138, 0.3202, 0.6371, ..., 0.0572, 0.7854, 0.0609]), +tensor(crow_indices=tensor([ 0, 2441, 4940, ..., 12494998, + 12497446, 12500000]), + col_indices=tensor([ 2, 3, 5, ..., 4995, 4998, 4999]), + values=tensor([0.9162, 0.0160, 0.4149, ..., 0.8237, 0.2759, 0.8628]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.5537, 0.0044, 0.4461, ..., 0.4637, 0.2205, 0.0434]) +tensor([0.9573, 0.7850, 0.3646, ..., 0.3146, 0.0155, 0.4417]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 9.858964443206787 seconds +Time: 8.839110374450684 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3775', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.108771324157715} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3639', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.423044204711914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2457, 4970, ..., 12495118, - 12497586, 12500000]), - col_indices=tensor([ 0, 5, 6, ..., 4996, 4997, 4999]), - values=tensor([0.5982, 0.0244, 0.5821, ..., 0.2791, 0.2569, 0.9852]), +tensor(crow_indices=tensor([ 0, 2446, 4933, ..., 12494981, + 12497497, 12500000]), + col_indices=tensor([ 2, 3, 4, ..., 4994, 4997, 4999]), + values=tensor([0.9027, 0.0284, 0.4936, ..., 0.0666, 0.4136, 0.7094]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.1130, 0.4034, 0.5297, ..., 0.0598, 0.7079, 0.8853]) +tensor([0.5919, 0.5153, 0.6363, ..., 0.8884, 0.6968, 0.4432]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 11.108771324157715 seconds +Time: 11.423044204711914 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2457, 4970, ..., 12495118, - 12497586, 12500000]), - col_indices=tensor([ 0, 5, 6, ..., 4996, 4997, 4999]), - values=tensor([0.5982, 0.0244, 0.5821, ..., 0.2791, 0.2569, 0.9852]), +tensor(crow_indices=tensor([ 0, 2446, 4933, ..., 12494981, + 12497497, 12500000]), + col_indices=tensor([ 2, 3, 4, ..., 4994, 4997, 4999]), + values=tensor([0.9027, 0.0284, 0.4936, ..., 0.0666, 0.4136, 0.7094]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.1130, 0.4034, 0.5297, ..., 0.0598, 0.7079, 0.8853]) +tensor([0.5919, 0.5153, 0.6363, ..., 0.8884, 0.6968, 0.4432]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 11.108771324157715 seconds +Time: 11.423044204711914 seconds -[40.6, 40.02, 45.54, 40.47, 40.31, 40.18, 40.34, 40.25, 40.23, 39.68] -[122.57] -15.89540696144104 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.108771324157715, 'TIME_S_1KI': 2.94272088057158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1948.3000312638283, 'W': 122.57} -[40.6, 40.02, 45.54, 40.47, 40.31, 40.18, 40.34, 40.25, 40.23, 39.68, 40.3, 40.16, 40.89, 40.3, 40.11, 40.08, 40.03, 39.96, 39.79, 39.56] -728.73 -36.4365 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.108771324157715, 'TIME_S_1KI': 2.94272088057158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1948.3000312638283, 'W': 122.57, 'J_1KI': 516.1059685467094, 'W_1KI': 32.46887417218543, 'W_D': 86.1335, 'J_D': 1369.1270355132817, 'W_D_1KI': 22.816821192052977, 'J_D_1KI': 6.044191044252445} +[41.72, 39.81, 39.85, 39.78, 45.53, 39.71, 40.25, 40.38, 39.72, 39.79] +[122.77] +15.583578109741211 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3639, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.423044204711914, 'TIME_S_1KI': 3.1390613368265767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1913.1958845329284, 'W': 122.77} +[41.72, 39.81, 39.85, 39.78, 45.53, 39.71, 40.25, 40.38, 39.72, 39.79, 40.44, 39.88, 39.71, 39.85, 40.63, 41.4, 39.84, 39.98, 39.72, 39.57] +726.8 +36.339999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3639, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.423044204711914, 'TIME_S_1KI': 3.1390613368265767, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1913.1958845329284, 'W': 122.77, 'J_1KI': 525.747701163212, 'W_1KI': 33.737290464413306, 'W_D': 86.43, 'J_D': 1346.888656024933, 'W_D_1KI': 23.75103050288541, 'J_D_1KI': 6.526801457236991} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json index aad5720..9c857d9 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 475914, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.308423519134521, "TIME_S_1KI": 0.021660265340239036, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1189.9414702892302, "W": 94.85, "J_1KI": 2.5003287784961783, "W_1KI": 0.19930071399454524, "W_D": 59.248999999999995, "J_D": 743.3088262853622, "W_D_1KI": 0.12449518190261265, "J_D_1KI": 0.000261591762172604} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 490047, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.476571798324585, "TIME_S_1KI": 0.021378708161308168, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1229.4466451644898, "W": 95.2, "J_1KI": 2.50883414277506, "W_1KI": 0.19426708050452304, "W_D": 58.61600000000001, "J_D": 756.9878629512788, "W_D_1KI": 0.11961301671064206, "J_D_1KI": 0.00024408478515457101} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output index c0883dc..61661ae 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,102 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04127812385559082} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.018640756607055664} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([ 980, 1760, 2093, 785, 4671, 1948, 723, 1459, 1601, - 3059, 3354, 3009, 3506, 1670, 2673, 2868, 3157, 4447, - 1531, 659, 4281, 2627, 1377, 2950, 2583, 1810, 1775, - 4655, 1859, 3752, 2371, 1562, 678, 1349, 716, 1427, - 4744, 619, 1269, 399, 1864, 2208, 1480, 456, 208, - 1221, 4437, 899, 3205, 761, 3187, 2953, 2815, 148, - 2514, 1056, 1872, 3736, 1283, 2888, 2704, 2532, 4106, - 3966, 441, 2333, 946, 4406, 4968, 4149, 1833, 4307, - 4337, 2109, 4989, 3863, 2823, 4489, 3013, 4755, 4523, - 3606, 244, 568, 1109, 1740, 4991, 4457, 3089, 4428, - 4324, 4720, 4209, 3750, 3625, 450, 2141, 1794, 3260, - 4714, 3864, 2541, 1784, 2782, 3268, 3685, 3828, 1583, - 1640, 2421, 3816, 1055, 144, 4272, 260, 1691, 1506, - 1638, 1666, 89, 0, 4364, 3266, 7, 888, 1591, - 2436, 524, 4077, 4995, 1729, 2451, 3127, 2410, 3894, - 4663, 2351, 1158, 1336, 2596, 2075, 768, 1048, 2564, - 3613, 2403, 1235, 967, 1065, 2176, 1047, 2886, 1645, - 3823, 3916, 3782, 98, 2899, 2287, 3273, 4032, 4889, - 1097, 3448, 4328, 2582, 4575, 4148, 3284, 2665, 249, - 1314, 3187, 3430, 4704, 97, 1632, 931, 382, 3849, - 3930, 2162, 1791, 4253, 2474, 3110, 4678, 4692, 474, - 3715, 235, 3222, 4417, 4989, 3151, 1089, 1264, 3742, - 4431, 906, 341, 4151, 340, 1004, 4160, 1654, 1155, - 188, 4818, 2576, 447, 1886, 1055, 2211, 1261, 3706, - 4104, 4081, 2022, 1659, 4405, 105, 257, 1519, 1573, - 2647, 334, 3953, 2722, 2554, 628, 1350, 4587, 1239, - 3896, 49, 2034, 3369, 750, 1198, 743, 3025, 4827, - 4181, 3975, 2556, 3695, 4820, 3552, 3237]), - values=tensor([0.9726, 0.4932, 0.3912, 0.9738, 0.2382, 0.2194, 0.1583, - 0.5409, 0.3403, 0.0032, 0.6572, 0.0849, 0.3926, 0.9886, - 0.6424, 0.9153, 0.1836, 0.7579, 0.8401, 0.5061, 0.5178, - 0.6259, 0.6574, 0.6207, 0.6672, 0.0690, 0.9817, 0.7086, - 0.9781, 0.5592, 0.4518, 0.3448, 0.6644, 0.1469, 0.4823, - 0.3431, 0.4389, 0.7337, 0.3527, 0.9829, 0.5078, 0.8332, - 0.7767, 0.9645, 0.7365, 0.4985, 0.9411, 0.0544, 0.1772, - 0.3375, 0.9957, 0.2067, 0.3235, 0.4169, 0.2185, 0.9745, - 0.4024, 0.1951, 0.5401, 0.1889, 0.4891, 0.1494, 0.3864, - 0.9968, 0.5170, 0.5194, 0.2311, 0.7378, 0.0181, 0.8480, - 0.8709, 0.1149, 0.7166, 0.5193, 0.8973, 0.0220, 0.9337, - 0.7205, 0.2663, 0.4873, 0.6865, 0.7454, 0.1670, 0.6733, - 0.5028, 0.8469, 0.5015, 0.0183, 0.9101, 0.5044, 0.6164, - 0.0686, 0.2021, 0.0101, 0.2356, 0.6960, 0.5089, 0.6651, - 0.9123, 0.3294, 0.9018, 0.9354, 0.4913, 0.5484, 0.4661, - 0.9948, 0.4189, 0.4723, 0.9512, 0.6341, 0.6798, 0.5802, - 0.6623, 0.0188, 0.0129, 0.1565, 0.2382, 0.5939, 0.9749, - 0.6008, 0.1917, 0.4414, 0.2563, 0.1692, 0.9585, 0.0472, - 0.8804, 0.5964, 0.2177, 0.3908, 0.2373, 0.0395, 0.7888, - 0.2774, 0.1837, 0.8164, 0.9307, 0.6092, 0.7632, 0.0856, - 0.4941, 0.2709, 0.6765, 0.8735, 0.4097, 0.3394, 0.4724, - 0.7144, 0.5758, 0.2577, 0.2371, 0.8221, 0.9059, 0.5592, - 0.3289, 0.0171, 0.1330, 0.9826, 0.1215, 0.7990, 0.5552, - 0.1342, 0.7031, 0.1802, 0.8204, 0.3147, 0.1663, 0.8508, - 0.4793, 0.5702, 0.1389, 0.1455, 0.9150, 0.8457, 0.2787, - 0.6364, 0.0265, 0.9823, 0.1357, 0.7315, 0.5366, 0.2059, - 0.1967, 0.4234, 0.3059, 0.0470, 0.8504, 0.4796, 0.2864, - 0.4369, 0.4481, 0.7929, 0.8452, 0.0958, 0.5059, 0.6459, - 0.8388, 0.7761, 0.1938, 0.8212, 0.1919, 0.8336, 0.3582, - 0.0664, 0.5404, 0.5179, 0.4249, 0.9245, 0.0841, 0.7587, - 0.7759, 0.0382, 0.0647, 0.6744, 0.6055, 0.1518, 0.6047, - 0.4888, 0.3468, 0.0096, 0.8517, 0.8972, 0.6357, 0.5150, - 0.0879, 0.9902, 0.5975, 0.0921, 0.1553, 0.8301, 0.3740, - 0.3374, 0.9082, 0.3028, 0.9956, 0.7728, 0.1467, 0.8331, - 0.1315, 0.6195, 0.4346, 0.3339, 0.9836, 0.5643, 0.7400, - 0.8152, 0.3695, 0.3467, 0.0410, 0.0075]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4444, 0.7904, 0.6664, ..., 0.7984, 0.5487, 0.5407]) + col_indices=tensor([2210, 2525, 4536, 254, 1226, 2449, 366, 1857, 151, + 2332, 406, 4430, 1222, 2463, 3588, 656, 1103, 2311, + 1013, 3976, 880, 2041, 3191, 3088, 1879, 747, 3371, + 4894, 2609, 3908, 900, 4902, 4688, 3776, 3376, 2737, + 4590, 470, 3093, 1817, 4755, 3959, 4287, 630, 2428, + 2757, 360, 1388, 4642, 1391, 4373, 2149, 4995, 4605, + 138, 1495, 3641, 1529, 432, 3938, 1759, 3968, 3449, + 2974, 3369, 1688, 2021, 3236, 4100, 1872, 2820, 3746, + 4655, 4530, 1527, 2442, 4614, 521, 2103, 4817, 701, + 1635, 4684, 803, 3732, 2925, 1132, 4505, 4864, 4238, + 4904, 4953, 2910, 4209, 3612, 2547, 3584, 4316, 4371, + 562, 489, 2362, 2065, 357, 4342, 2502, 3366, 4412, + 148, 2949, 4224, 4965, 3331, 4672, 2948, 1267, 525, + 4213, 3160, 49, 1361, 3985, 2981, 2288, 4311, 444, + 3771, 2532, 3596, 2610, 1798, 4280, 1667, 3358, 2273, + 4922, 2110, 4355, 1367, 2691, 914, 2442, 4114, 223, + 3620, 607, 3967, 3857, 857, 2774, 1965, 4635, 3432, + 2893, 4269, 1299, 3401, 3189, 13, 3490, 4233, 1096, + 745, 500, 3687, 203, 4633, 4718, 1863, 1974, 457, + 112, 1833, 1067, 3003, 266, 4865, 2174, 764, 4748, + 155, 1848, 1885, 4786, 4984, 112, 2189, 2836, 4023, + 3638, 4342, 2691, 1967, 637, 3457, 2734, 3557, 3527, + 975, 4077, 3661, 3379, 4171, 2284, 761, 4953, 3300, + 3739, 2615, 640, 853, 2049, 2480, 1850, 2394, 2087, + 2379, 3026, 1773, 1329, 2252, 359, 1436, 1370, 1237, + 3652, 1161, 3702, 4397, 552, 1770, 2638, 4962, 2578, + 1640, 2730, 1779, 3267, 430, 3515, 708, 4874, 633, + 1717, 3769, 2802, 4336, 3129, 3620, 550]), + values=tensor([7.5476e-01, 9.9584e-01, 3.6944e-01, 2.4679e-01, + 9.9076e-01, 1.7729e-01, 6.6372e-01, 6.4903e-02, + 3.9639e-04, 4.5557e-02, 7.6435e-01, 3.3696e-01, + 7.9507e-01, 7.9506e-01, 6.0193e-01, 3.2090e-01, + 5.3296e-01, 5.6075e-01, 5.8782e-01, 3.8643e-02, + 8.6068e-01, 4.0137e-02, 5.9857e-01, 2.3873e-01, + 6.3625e-01, 5.8456e-01, 5.5945e-01, 4.0661e-01, + 3.9285e-01, 1.2126e-01, 8.6957e-01, 2.5272e-01, + 6.7490e-02, 7.5861e-01, 5.8089e-01, 2.4459e-01, + 1.3448e-01, 2.8918e-01, 4.3072e-01, 7.5514e-01, + 5.7395e-01, 9.4086e-01, 4.3654e-01, 8.9058e-01, + 7.1311e-01, 1.4119e-01, 2.7015e-01, 5.1550e-01, + 1.1333e-01, 7.6211e-01, 6.2497e-01, 4.6967e-01, + 4.0660e-01, 1.3804e-01, 1.4416e-01, 2.9245e-01, + 3.3462e-02, 6.6380e-01, 4.8630e-01, 5.1554e-01, + 4.7333e-01, 1.4924e-01, 2.9121e-01, 5.6760e-01, + 8.7723e-01, 8.9333e-02, 1.5279e-01, 5.7785e-01, + 1.9690e-01, 7.5747e-01, 7.1229e-01, 8.4007e-01, + 8.0107e-01, 2.7292e-01, 6.8333e-01, 7.4695e-01, + 3.6860e-01, 9.7172e-01, 2.8281e-01, 8.3940e-01, + 7.3482e-01, 3.2709e-01, 7.4569e-01, 5.8429e-01, + 7.9576e-01, 5.8223e-02, 1.0638e-01, 3.1381e-01, + 7.8762e-01, 4.9287e-01, 6.5419e-01, 1.8791e-01, + 6.8885e-01, 6.2038e-01, 7.8875e-01, 2.5038e-02, + 7.6963e-01, 5.6537e-01, 2.4250e-01, 4.7930e-01, + 9.5267e-01, 7.1191e-01, 2.2721e-01, 3.2039e-01, + 5.6071e-01, 8.6419e-01, 2.2398e-01, 7.7169e-01, + 1.1949e-01, 2.9153e-01, 9.1543e-01, 1.1730e-01, + 8.7919e-01, 3.2979e-01, 9.3135e-01, 6.2645e-01, + 3.4387e-01, 4.7381e-01, 8.0364e-01, 5.7183e-01, + 7.2355e-02, 1.4174e-01, 3.7309e-01, 8.5822e-01, + 9.6114e-01, 2.4436e-01, 2.7893e-01, 9.9309e-01, + 3.3339e-01, 9.8719e-01, 9.6610e-01, 4.5518e-01, + 1.7038e-01, 9.6316e-01, 7.6532e-01, 5.0100e-01, + 8.1306e-01, 2.5377e-01, 6.7973e-01, 8.3657e-01, + 3.7796e-01, 2.9066e-02, 8.6427e-01, 5.2938e-01, + 6.1862e-01, 2.3821e-01, 8.0913e-01, 2.2104e-01, + 9.1922e-01, 3.4906e-01, 4.6560e-01, 1.4396e-01, + 7.8011e-03, 2.3663e-01, 1.2759e-01, 4.9720e-01, + 9.5077e-01, 9.5756e-01, 8.1511e-01, 6.2892e-02, + 1.6198e-01, 8.0922e-01, 4.5516e-01, 5.2463e-02, + 2.1733e-02, 9.7583e-01, 9.7310e-01, 7.1556e-01, + 9.8564e-01, 9.0518e-01, 3.7118e-01, 3.8267e-02, + 4.1911e-01, 6.1618e-01, 9.7580e-01, 7.4712e-01, + 3.7697e-01, 7.4031e-01, 1.8301e-02, 6.3343e-01, + 4.1614e-02, 6.8194e-01, 7.1821e-01, 6.3265e-01, + 5.6130e-01, 4.4068e-01, 8.0311e-02, 5.3380e-01, + 5.4050e-02, 8.4785e-01, 6.2972e-02, 9.3009e-01, + 7.2763e-01, 9.0116e-02, 8.8710e-01, 4.0608e-01, + 3.2173e-01, 8.2870e-02, 9.7686e-01, 3.8941e-01, + 6.8869e-01, 2.4992e-01, 3.4820e-01, 6.0610e-01, + 3.7411e-01, 5.5247e-01, 5.2007e-01, 3.5724e-01, + 1.2643e-01, 2.1454e-01, 7.9156e-01, 2.7779e-01, + 8.9329e-01, 2.1892e-01, 8.1643e-01, 9.1950e-03, + 5.4722e-01, 5.2425e-01, 9.4202e-01, 6.0481e-02, + 4.4659e-01, 9.6288e-01, 7.1410e-01, 7.8926e-01, + 2.4573e-01, 7.6478e-01, 1.7824e-02, 6.6916e-01, + 4.9662e-01, 8.3600e-01, 8.6759e-01, 2.9561e-01, + 2.9478e-01, 8.4606e-01, 9.1135e-01, 1.7097e-01, + 9.2858e-01, 8.2406e-01, 1.2371e-01, 7.0161e-01, + 8.4881e-01, 3.4036e-01, 9.0372e-01, 6.9052e-01, + 1.6420e-01, 2.3091e-02, 4.1037e-01, 1.6786e-01, + 2.5768e-01, 2.9182e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.9379, 0.1980, 0.4828, ..., 0.6873, 0.0867, 0.0276]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +104,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.04127812385559082 seconds +Time: 0.018640756607055664 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25437', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5612115859985352} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '56328', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2069127559661865} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4513, 4874, 3027, 172, 3763, 1079, 239, 4529, 4955, - 608, 1066, 2737, 4445, 4271, 3658, 687, 3210, 2517, - 2807, 1524, 3953, 1461, 3621, 1960, 4173, 4521, 4104, - 3884, 561, 4646, 4065, 4845, 1534, 1821, 1059, 2352, - 1896, 3528, 4564, 4230, 603, 1565, 2747, 4968, 3463, - 2434, 4028, 3145, 1082, 2478, 4336, 3976, 44, 4019, - 3984, 3758, 2982, 3832, 4392, 3709, 4379, 1452, 4965, - 778, 3505, 1776, 1029, 1733, 4085, 16, 3544, 611, - 487, 4244, 3327, 1553, 300, 454, 615, 1019, 1638, - 2606, 4708, 2838, 1644, 2517, 3419, 4512, 1409, 4514, - 98, 975, 2304, 4532, 578, 3866, 4888, 1913, 1427, - 2350, 3190, 471, 3972, 3754, 1263, 2752, 1137, 2063, - 1340, 4788, 4731, 1452, 253, 4150, 2425, 3321, 2705, - 2043, 4482, 3426, 39, 1965, 4794, 2137, 4312, 4929, - 4149, 4158, 2375, 1520, 2947, 1537, 114, 3706, 3423, - 2525, 3424, 2121, 1841, 2281, 281, 582, 587, 2819, - 1328, 2949, 349, 1435, 37, 2197, 94, 1382, 1590, - 4694, 734, 27, 2763, 3346, 997, 4500, 432, 3423, - 1345, 2417, 4661, 2561, 4893, 2705, 322, 3467, 246, - 371, 460, 4416, 3517, 2533, 3314, 4727, 4068, 2071, - 89, 3259, 446, 2255, 4308, 1260, 2389, 3030, 3160, - 928, 1241, 1770, 729, 2255, 2572, 3942, 459, 250, - 4351, 2164, 642, 3643, 4211, 2537, 3823, 703, 3549, - 2055, 653, 1366, 4832, 1570, 1682, 1477, 4511, 1980, - 2756, 1220, 4509, 2420, 623, 993, 3225, 1492, 4751, - 3238, 2111, 484, 1439, 2447, 2514, 4082, 4164, 1688, - 2786, 558, 866, 946, 3251, 3488, 4976, 3700, 3363, - 3272, 1906, 4483, 1350, 885, 3495, 1900]), - values=tensor([0.7997, 0.6693, 0.9766, 0.8132, 0.4494, 0.7325, 0.7457, - 0.7556, 0.9866, 0.9047, 0.1758, 0.6478, 0.3725, 0.9918, - 0.9691, 0.3899, 0.4189, 0.2206, 0.4455, 0.8056, 0.8850, - 0.7612, 0.3869, 0.5377, 0.9465, 0.2694, 0.7714, 0.3970, - 0.0074, 0.7396, 0.6772, 0.0796, 0.1408, 0.9831, 0.1256, - 0.1621, 0.7601, 0.7241, 0.4318, 0.4425, 0.2980, 0.3108, - 0.6562, 0.1074, 0.6925, 0.0813, 0.3905, 0.9992, 0.5983, - 0.9779, 0.9273, 0.2532, 0.6041, 0.4773, 0.3669, 0.1258, - 0.1311, 0.3773, 0.8821, 0.3058, 0.0597, 0.9674, 0.6666, - 0.7396, 0.6305, 0.7148, 0.8106, 0.4078, 0.2370, 0.3958, - 0.1089, 0.2905, 0.3401, 0.3586, 0.4906, 0.7927, 0.7147, - 0.3090, 0.2058, 0.2236, 0.1502, 0.7530, 0.1376, 0.7384, - 0.1472, 0.7286, 0.2760, 0.2427, 0.0512, 0.9512, 0.6082, - 0.0272, 0.0487, 0.6103, 0.0226, 0.4108, 0.7461, 0.2695, - 0.7017, 0.7638, 0.0407, 0.0211, 0.7975, 0.8394, 0.7173, - 0.8557, 0.1529, 0.3846, 0.8527, 0.7100, 0.7245, 0.3654, - 0.7311, 0.7142, 0.9375, 0.5321, 0.1856, 0.8364, 0.4430, - 0.4779, 0.6050, 0.4883, 0.1682, 0.5943, 0.1945, 0.2069, - 0.1270, 0.4728, 0.3645, 0.8206, 0.5488, 0.4640, 0.7881, - 0.3157, 0.0026, 0.7089, 0.5313, 0.9513, 0.9856, 0.8065, - 0.3755, 0.7881, 0.5548, 0.8650, 0.7960, 0.2190, 0.4494, - 0.4403, 0.9214, 0.6868, 0.3061, 0.1264, 0.0552, 0.0018, - 0.0148, 0.0640, 0.2837, 0.7551, 0.1142, 0.0916, 0.0958, - 0.1039, 0.5333, 0.1734, 0.0796, 0.6961, 0.6442, 0.5756, - 0.9631, 0.0278, 0.9861, 0.3260, 0.4440, 0.2476, 0.8157, - 0.9861, 0.8239, 0.5108, 0.1860, 0.3033, 0.0741, 0.8427, - 0.8545, 0.2806, 0.0718, 0.7219, 0.2946, 0.3868, 0.6450, - 0.1470, 0.4273, 0.0373, 0.4562, 0.7872, 0.0251, 0.8634, - 0.3919, 0.4240, 0.0414, 0.7931, 0.4445, 0.1790, 0.3828, - 0.7421, 0.3011, 0.1605, 0.1136, 0.9314, 0.3920, 0.9924, - 0.0352, 0.6870, 0.4156, 0.4859, 0.8722, 0.6951, 0.2675, - 0.8061, 0.5063, 0.5828, 0.5303, 0.7965, 0.2479, 0.8340, - 0.3931, 0.9858, 0.1292, 0.6472, 0.7465, 0.0833, 0.0197, - 0.8484, 0.0914, 0.1498, 0.8894, 0.1548, 0.5990, 0.0393, - 0.7324, 0.7542, 0.7672, 0.8989, 0.1970, 0.1932, 0.9622, - 0.5932, 0.9630, 0.7336, 0.7453, 0.9290]), + col_indices=tensor([2739, 3202, 3055, 578, 567, 4686, 1473, 1022, 3270, + 3533, 1234, 506, 53, 1733, 4610, 1642, 1681, 2437, + 4098, 4708, 873, 1211, 283, 225, 4041, 2130, 3507, + 1689, 3878, 1145, 102, 4439, 3700, 4800, 1514, 4464, + 361, 1389, 2798, 3635, 1079, 546, 3169, 194, 1242, + 2303, 3833, 3675, 4440, 1193, 992, 2678, 2434, 1123, + 3960, 4738, 1570, 1609, 194, 1746, 1112, 3326, 4101, + 3988, 1765, 1243, 160, 2907, 4330, 3350, 3626, 319, + 1841, 4949, 1658, 4991, 4778, 2811, 2516, 2820, 2110, + 810, 2358, 2710, 137, 1629, 3317, 593, 1264, 3861, + 481, 1697, 4791, 2170, 570, 3448, 4121, 3526, 2487, + 1248, 673, 2034, 2225, 2560, 4585, 80, 4803, 1268, + 3101, 1016, 3468, 3442, 2146, 2599, 3246, 1650, 195, + 2194, 2682, 3364, 3234, 3624, 391, 1281, 4759, 2020, + 3538, 1029, 3451, 2215, 124, 4615, 461, 2016, 1450, + 2903, 1890, 1305, 4997, 4128, 449, 1396, 3693, 3709, + 2624, 690, 2685, 3084, 3516, 2519, 290, 4173, 3803, + 1743, 138, 1120, 1627, 3299, 1669, 105, 410, 689, + 464, 3723, 4716, 2130, 2697, 664, 2230, 4504, 3770, + 3410, 1309, 4287, 3302, 2009, 4633, 2459, 3174, 4471, + 3966, 1401, 914, 1795, 561, 2195, 1691, 760, 3533, + 1434, 4873, 4007, 4410, 2191, 3472, 3908, 3738, 3570, + 3532, 2156, 1233, 2736, 3638, 1415, 58, 334, 2921, + 2011, 262, 342, 1056, 2799, 1861, 681, 822, 2151, + 799, 4155, 1791, 3983, 662, 4605, 3645, 973, 3587, + 893, 1154, 3743, 3949, 941, 3032, 912, 726, 3824, + 883, 2346, 4532, 3991, 388, 4355, 1719, 4522, 1837, + 3604, 3347, 846, 2551, 437, 2642, 2900]), + values=tensor([0.8415, 0.8497, 0.5325, 0.7346, 0.8594, 0.9961, 0.4147, + 0.8812, 0.2603, 0.6584, 0.2870, 0.5384, 0.6003, 0.4548, + 0.1589, 0.6842, 0.5296, 0.2845, 0.9822, 0.3594, 0.0248, + 0.8764, 0.5750, 0.5945, 0.3458, 0.2786, 0.0410, 0.3660, + 0.7881, 0.4425, 0.0031, 0.4864, 0.3525, 0.4086, 0.3531, + 0.6835, 0.7236, 0.6206, 0.9715, 0.7456, 0.3004, 0.2403, + 0.4011, 0.7562, 0.1134, 0.0891, 0.1828, 0.5385, 0.0631, + 0.0780, 0.4577, 0.6325, 0.1179, 0.2074, 0.7301, 0.5777, + 0.0758, 0.5555, 0.5263, 0.8032, 0.7136, 0.4102, 0.3320, + 0.9061, 0.1644, 0.3754, 0.6712, 0.0240, 0.6623, 0.1917, + 0.6286, 0.0125, 0.8187, 0.9459, 0.5234, 0.3696, 0.8193, + 0.5105, 0.0832, 0.7938, 0.9363, 0.6516, 0.8234, 0.0815, + 0.6999, 0.7812, 0.4808, 0.8375, 0.3946, 0.8570, 0.0968, + 0.4370, 0.0089, 0.8479, 0.3085, 0.5083, 0.5439, 0.4513, + 0.1871, 0.8664, 0.3799, 0.5345, 0.9797, 0.5012, 0.8101, + 0.8701, 0.8009, 0.7757, 0.3170, 0.4455, 0.7223, 0.4012, + 0.8929, 0.3837, 0.5355, 0.7529, 0.6434, 0.0720, 0.3665, + 0.2879, 0.5139, 0.1957, 0.8268, 0.8936, 0.6475, 0.1279, + 0.8046, 0.5301, 0.3773, 0.0479, 0.5169, 0.9907, 0.3845, + 0.0939, 0.4771, 0.0822, 0.8391, 0.1262, 0.1913, 0.1918, + 0.1022, 0.3678, 0.6984, 0.9519, 0.6428, 0.0219, 0.8751, + 0.4062, 0.2851, 0.0969, 0.8054, 0.1547, 0.1901, 0.6736, + 0.1744, 0.5488, 0.5424, 0.2996, 0.4545, 0.8678, 0.6223, + 0.1872, 0.8862, 0.9836, 0.0042, 0.7344, 0.2182, 0.4164, + 0.9688, 0.0996, 0.6496, 0.7440, 0.1113, 0.5514, 0.4079, + 0.6311, 0.5176, 0.2140, 0.1630, 0.3379, 0.9718, 0.7356, + 0.1008, 0.7986, 0.6988, 0.8781, 0.4848, 0.6436, 0.9564, + 0.7858, 0.2942, 0.1256, 0.9203, 0.8771, 0.0282, 0.9093, + 0.5221, 0.4368, 0.9846, 0.3459, 0.8617, 0.5427, 0.1517, + 0.0640, 0.5443, 0.9876, 0.7632, 0.9902, 0.4944, 0.4427, + 0.6485, 0.6862, 0.4486, 0.8587, 0.5061, 0.2447, 0.9927, + 0.3863, 0.5940, 0.2797, 0.0030, 0.0565, 0.4906, 0.3379, + 0.1449, 0.8743, 0.9193, 0.2953, 0.8320, 0.5265, 0.4018, + 0.6287, 0.1136, 0.0618, 0.5379, 0.8219, 0.8409, 0.5310, + 0.3028, 0.9560, 0.1131, 0.2959, 0.3203, 0.5288, 0.5615, + 0.1631, 0.0795, 0.4559, 0.7522, 0.3620]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.8285, 0.4420, 0.7749, ..., 0.7287, 0.4578, 0.9435]) +tensor([0.6507, 0.8665, 0.7088, ..., 0.1660, 0.7141, 0.7532]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +185,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.5612115859985352 seconds +Time: 1.2069127559661865 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '475914', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.308423519134521} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '490047', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.476571798324585} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([ 698, 3054, 4592, 1629, 4601, 3886, 4804, 318, 1415, - 433, 1872, 1429, 1550, 3511, 4304, 3637, 1101, 2710, - 4079, 541, 1194, 97, 2807, 2811, 3206, 3991, 2286, - 2681, 4835, 38, 1361, 1702, 1987, 1831, 485, 140, - 4362, 3450, 2222, 295, 3370, 591, 1718, 4950, 1639, - 3575, 1461, 4389, 3994, 2356, 1105, 1104, 1761, 4007, - 4669, 3008, 4553, 4279, 1484, 3371, 4533, 863, 587, - 1360, 4727, 3879, 832, 240, 2132, 2582, 1372, 4190, - 3588, 2592, 4310, 2614, 1567, 1660, 604, 4488, 1313, - 3610, 3188, 2899, 3261, 1055, 1112, 4180, 1426, 3909, - 3409, 4510, 3025, 703, 1794, 225, 659, 2212, 1407, - 4739, 1542, 2238, 2858, 4535, 4405, 3841, 3716, 4156, - 4416, 2641, 4372, 1051, 980, 3180, 2782, 497, 394, - 605, 971, 455, 3831, 523, 3209, 733, 4726, 2765, - 226, 3470, 1720, 2299, 2372, 1447, 3202, 1153, 3498, - 4698, 2998, 1466, 363, 4324, 2506, 2090, 2285, 2204, - 303, 4406, 1500, 1826, 4080, 1378, 1816, 980, 66, - 4392, 4266, 2875, 275, 1828, 3108, 989, 3152, 2508, - 1550, 2008, 1066, 529, 18, 1, 3369, 4893, 2556, - 1068, 1671, 1086, 2667, 3511, 2891, 1749, 4140, 3150, - 43, 1921, 1837, 980, 1293, 1802, 2390, 249, 768, - 4105, 1721, 1435, 4658, 2745, 4338, 2320, 3879, 2641, - 3097, 1585, 3887, 4913, 4556, 2794, 2705, 299, 61, - 2384, 1270, 740, 2129, 3392, 3774, 4644, 4192, 4506, - 149, 4748, 3571, 1159, 4587, 1920, 982, 2347, 1650, - 1882, 1955, 3910, 197, 4484, 3655, 4387, 968, 3452, - 181, 2166, 1855, 2452, 189, 3074, 2522, 3426, 135, - 1267, 666, 4928, 2908, 4053, 4593, 448]), - values=tensor([0.8620, 0.7365, 0.7531, 0.5477, 0.5295, 0.4268, 0.2903, - 0.3296, 0.4852, 0.0295, 0.6605, 0.4770, 0.1707, 0.4638, - 0.1229, 0.6813, 0.2237, 0.9317, 0.9546, 0.4741, 0.7915, - 0.3909, 0.2549, 0.7853, 0.8207, 0.9924, 0.8328, 0.0293, - 0.3281, 0.4028, 0.9335, 0.8141, 0.3687, 0.4243, 0.5386, - 0.2123, 0.0695, 0.2792, 0.2453, 0.4935, 0.1675, 0.4387, - 0.5777, 0.6384, 0.5870, 0.2050, 0.9519, 0.0161, 0.0462, - 0.8312, 0.5114, 0.5703, 0.5170, 0.0110, 0.6229, 0.7339, - 0.2337, 0.7709, 0.7844, 0.2062, 0.2004, 0.9990, 0.4625, - 0.4209, 0.7064, 0.0680, 0.6043, 0.0073, 0.1383, 0.5359, - 0.1641, 0.0316, 0.0479, 0.9788, 0.0764, 0.0936, 0.1603, - 0.1581, 0.8855, 0.4285, 0.9101, 0.6054, 0.5164, 0.1839, - 0.2783, 0.0513, 0.4451, 0.7375, 0.3333, 0.1348, 0.3539, - 0.0102, 0.1620, 0.4960, 0.1201, 0.8615, 0.2151, 0.0085, - 0.8133, 0.8439, 0.5713, 0.6595, 0.6728, 0.2738, 0.1487, - 0.3205, 0.6933, 0.0963, 0.6731, 0.6903, 0.0043, 0.7900, - 0.7911, 0.9496, 0.4295, 0.5758, 0.2659, 0.3025, 0.6145, - 0.1511, 0.7265, 0.9480, 0.6751, 0.8138, 0.6361, 0.4149, - 0.8899, 0.3218, 0.3413, 0.2054, 0.1555, 0.4398, 0.9946, - 0.6820, 0.1566, 0.7238, 0.9562, 0.1023, 0.0696, 0.9724, - 0.8182, 0.0031, 0.0289, 0.5187, 0.0063, 0.5262, 0.9232, - 0.3694, 0.0136, 0.3019, 0.9633, 0.8770, 0.0826, 0.1792, - 0.6372, 0.5719, 0.7979, 0.1369, 0.9923, 0.7514, 0.0627, - 0.3337, 0.0132, 0.9026, 0.1169, 0.4065, 0.7302, 0.3087, - 0.4276, 0.6874, 0.0705, 0.4727, 0.3286, 0.7188, 0.3727, - 0.5310, 0.1979, 0.9773, 0.3076, 0.3372, 0.2546, 0.3340, - 0.4532, 0.0609, 0.2279, 0.8651, 0.8162, 0.8251, 0.1216, - 0.3049, 0.0805, 0.1284, 0.1859, 0.3690, 0.3435, 0.7762, - 0.7083, 0.6529, 0.8556, 0.1421, 0.4528, 0.4045, 0.9221, - 0.6914, 0.6437, 0.8815, 0.0609, 0.9680, 0.2115, 0.5295, - 0.5418, 0.8646, 0.6735, 0.1927, 0.2578, 0.4564, 0.0603, - 0.1414, 0.3382, 0.0772, 0.6503, 0.3586, 0.8775, 0.8840, - 0.9215, 0.4825, 0.2733, 0.0423, 0.6825, 0.8144, 0.0837, - 0.2758, 0.6188, 0.3276, 0.0762, 0.7932, 0.5621, 0.9067, - 0.7339, 0.1976, 0.8462, 0.5736, 0.2659, 0.7486, 0.3053, - 0.7429, 0.4272, 0.8072, 0.5183, 0.4677]), + col_indices=tensor([2879, 2548, 4235, 2177, 1968, 4792, 2118, 3824, 3830, + 419, 2935, 858, 3221, 1158, 3295, 4115, 4212, 4937, + 4105, 1905, 430, 1434, 4104, 172, 3831, 777, 1804, + 1906, 3629, 4755, 3973, 3704, 574, 3092, 296, 4473, + 178, 1394, 364, 1101, 1225, 72, 2310, 4099, 4749, + 4433, 3811, 1052, 4063, 946, 4833, 3504, 2256, 3467, + 380, 2912, 3726, 4501, 3719, 1032, 1375, 4002, 185, + 3479, 2598, 4766, 3328, 793, 1360, 4472, 484, 3670, + 533, 4829, 1260, 1285, 2295, 963, 1454, 4716, 3791, + 4404, 1941, 4961, 4734, 3669, 4605, 847, 1484, 3477, + 947, 3738, 664, 4986, 4204, 3808, 3474, 2661, 3309, + 3369, 153, 935, 2927, 3740, 791, 4992, 711, 2043, + 194, 4833, 79, 4635, 839, 299, 575, 1530, 2233, + 4374, 1754, 4418, 1039, 4935, 1345, 3862, 3955, 777, + 3857, 4284, 1102, 3982, 3631, 1213, 324, 1586, 2281, + 4410, 4374, 2992, 3918, 2289, 1015, 2745, 4492, 594, + 4664, 3401, 2974, 3118, 4743, 4680, 4438, 2693, 4451, + 1606, 3511, 4742, 2749, 2647, 1217, 405, 537, 1399, + 199, 4498, 2424, 286, 693, 270, 889, 2160, 4138, + 3608, 1489, 2742, 4553, 2358, 2877, 4564, 3315, 4926, + 4195, 4940, 4664, 172, 3985, 745, 3, 3830, 4058, + 2581, 776, 405, 898, 2791, 3728, 3234, 1387, 4346, + 295, 3823, 1053, 2346, 4176, 1488, 434, 1655, 1692, + 2590, 3984, 3595, 999, 1056, 2826, 3138, 3494, 2904, + 1058, 2210, 3296, 2486, 2896, 138, 3985, 1060, 2585, + 4071, 3584, 758, 4715, 2930, 1732, 1000, 3747, 4019, + 1473, 2790, 3984, 2969, 781, 585, 781, 4422, 445, + 2047, 2372, 1079, 2925, 955, 284, 951]), + values=tensor([0.2472, 0.6408, 0.2221, 0.5181, 0.6350, 0.9033, 0.8956, + 0.7487, 0.8455, 0.1032, 0.8217, 0.9140, 0.5672, 0.0509, + 0.3161, 0.8507, 0.6291, 0.4677, 0.4913, 0.1587, 0.9565, + 0.4922, 0.5437, 0.6340, 0.3083, 0.8605, 0.8132, 0.4845, + 0.2069, 0.0097, 0.3167, 0.5484, 0.6282, 0.4620, 0.0987, + 0.6949, 0.7304, 0.3076, 0.4382, 0.8483, 0.5431, 0.3061, + 0.7208, 0.7528, 0.0727, 0.1586, 0.4223, 0.7512, 0.1050, + 0.2044, 0.1480, 0.8843, 0.6474, 0.9923, 0.4300, 0.7404, + 0.8099, 0.0456, 0.9256, 0.2564, 0.9005, 0.6348, 0.2339, + 0.7885, 0.6327, 0.3555, 0.8037, 0.0679, 0.3358, 0.4488, + 0.2846, 0.4860, 0.3112, 0.9987, 0.3595, 0.5644, 0.7766, + 0.8640, 0.9246, 0.3503, 0.1256, 0.5337, 0.1297, 0.5515, + 0.6635, 0.5154, 0.0632, 0.7639, 0.9128, 0.6392, 0.8302, + 0.4439, 0.2348, 0.2796, 0.1102, 0.8665, 0.0654, 0.2969, + 0.6779, 0.5154, 0.8782, 0.6349, 0.2534, 0.1900, 0.1089, + 0.2132, 0.1606, 0.4838, 0.5131, 0.2693, 0.2911, 0.3925, + 0.1298, 0.2734, 0.2859, 0.2402, 0.7284, 0.6809, 0.1017, + 0.5006, 0.7387, 0.5250, 0.6054, 0.3207, 0.8317, 0.0063, + 0.8124, 0.1682, 0.9358, 0.7971, 0.6026, 0.8063, 0.0277, + 0.9101, 0.2706, 0.6666, 0.9401, 0.9468, 0.6874, 0.1204, + 0.1461, 0.6930, 0.8474, 0.3668, 0.2632, 0.3152, 0.9470, + 0.1754, 0.9084, 0.1945, 0.9896, 0.9045, 0.7615, 0.9816, + 0.3493, 0.9428, 0.7718, 0.8234, 0.0894, 0.6485, 0.0109, + 0.8460, 0.6278, 0.2100, 0.7409, 0.3213, 0.8051, 0.7234, + 0.6970, 0.4116, 0.6352, 0.0029, 0.6761, 0.6077, 0.8527, + 0.5089, 0.6432, 0.8485, 0.4372, 0.2321, 0.3474, 0.5371, + 0.5527, 0.4008, 0.4177, 0.1217, 0.7348, 0.6489, 0.0080, + 0.9129, 0.6431, 0.1973, 0.6075, 0.8143, 0.0232, 0.3884, + 0.5022, 0.1042, 0.6880, 0.6811, 0.2932, 0.7890, 0.6577, + 0.7526, 0.0327, 0.3684, 0.3929, 0.5193, 0.6389, 0.8937, + 0.2414, 0.4405, 0.1415, 0.9714, 0.7709, 0.7504, 0.6900, + 0.1775, 0.9758, 0.6293, 0.6471, 0.1301, 0.8606, 0.1224, + 0.5565, 0.3881, 0.8292, 0.5099, 0.9753, 0.4458, 0.1070, + 0.9912, 0.6060, 0.3669, 0.5306, 0.4360, 0.7428, 0.4271, + 0.8779, 0.6385, 0.2427, 0.9170, 0.1454, 0.1927, 0.3375, + 0.3438, 0.8555, 0.8565, 0.6394, 0.2035]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4130, 0.3379, 0.7498, ..., 0.0848, 0.9618, 0.5893]) +tensor([0.4641, 0.6633, 0.6004, ..., 0.4285, 0.2216, 0.5300]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +266,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.308423519134521 seconds +Time: 10.476571798324585 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([ 698, 3054, 4592, 1629, 4601, 3886, 4804, 318, 1415, - 433, 1872, 1429, 1550, 3511, 4304, 3637, 1101, 2710, - 4079, 541, 1194, 97, 2807, 2811, 3206, 3991, 2286, - 2681, 4835, 38, 1361, 1702, 1987, 1831, 485, 140, - 4362, 3450, 2222, 295, 3370, 591, 1718, 4950, 1639, - 3575, 1461, 4389, 3994, 2356, 1105, 1104, 1761, 4007, - 4669, 3008, 4553, 4279, 1484, 3371, 4533, 863, 587, - 1360, 4727, 3879, 832, 240, 2132, 2582, 1372, 4190, - 3588, 2592, 4310, 2614, 1567, 1660, 604, 4488, 1313, - 3610, 3188, 2899, 3261, 1055, 1112, 4180, 1426, 3909, - 3409, 4510, 3025, 703, 1794, 225, 659, 2212, 1407, - 4739, 1542, 2238, 2858, 4535, 4405, 3841, 3716, 4156, - 4416, 2641, 4372, 1051, 980, 3180, 2782, 497, 394, - 605, 971, 455, 3831, 523, 3209, 733, 4726, 2765, - 226, 3470, 1720, 2299, 2372, 1447, 3202, 1153, 3498, - 4698, 2998, 1466, 363, 4324, 2506, 2090, 2285, 2204, - 303, 4406, 1500, 1826, 4080, 1378, 1816, 980, 66, - 4392, 4266, 2875, 275, 1828, 3108, 989, 3152, 2508, - 1550, 2008, 1066, 529, 18, 1, 3369, 4893, 2556, - 1068, 1671, 1086, 2667, 3511, 2891, 1749, 4140, 3150, - 43, 1921, 1837, 980, 1293, 1802, 2390, 249, 768, - 4105, 1721, 1435, 4658, 2745, 4338, 2320, 3879, 2641, - 3097, 1585, 3887, 4913, 4556, 2794, 2705, 299, 61, - 2384, 1270, 740, 2129, 3392, 3774, 4644, 4192, 4506, - 149, 4748, 3571, 1159, 4587, 1920, 982, 2347, 1650, - 1882, 1955, 3910, 197, 4484, 3655, 4387, 968, 3452, - 181, 2166, 1855, 2452, 189, 3074, 2522, 3426, 135, - 1267, 666, 4928, 2908, 4053, 4593, 448]), - values=tensor([0.8620, 0.7365, 0.7531, 0.5477, 0.5295, 0.4268, 0.2903, - 0.3296, 0.4852, 0.0295, 0.6605, 0.4770, 0.1707, 0.4638, - 0.1229, 0.6813, 0.2237, 0.9317, 0.9546, 0.4741, 0.7915, - 0.3909, 0.2549, 0.7853, 0.8207, 0.9924, 0.8328, 0.0293, - 0.3281, 0.4028, 0.9335, 0.8141, 0.3687, 0.4243, 0.5386, - 0.2123, 0.0695, 0.2792, 0.2453, 0.4935, 0.1675, 0.4387, - 0.5777, 0.6384, 0.5870, 0.2050, 0.9519, 0.0161, 0.0462, - 0.8312, 0.5114, 0.5703, 0.5170, 0.0110, 0.6229, 0.7339, - 0.2337, 0.7709, 0.7844, 0.2062, 0.2004, 0.9990, 0.4625, - 0.4209, 0.7064, 0.0680, 0.6043, 0.0073, 0.1383, 0.5359, - 0.1641, 0.0316, 0.0479, 0.9788, 0.0764, 0.0936, 0.1603, - 0.1581, 0.8855, 0.4285, 0.9101, 0.6054, 0.5164, 0.1839, - 0.2783, 0.0513, 0.4451, 0.7375, 0.3333, 0.1348, 0.3539, - 0.0102, 0.1620, 0.4960, 0.1201, 0.8615, 0.2151, 0.0085, - 0.8133, 0.8439, 0.5713, 0.6595, 0.6728, 0.2738, 0.1487, - 0.3205, 0.6933, 0.0963, 0.6731, 0.6903, 0.0043, 0.7900, - 0.7911, 0.9496, 0.4295, 0.5758, 0.2659, 0.3025, 0.6145, - 0.1511, 0.7265, 0.9480, 0.6751, 0.8138, 0.6361, 0.4149, - 0.8899, 0.3218, 0.3413, 0.2054, 0.1555, 0.4398, 0.9946, - 0.6820, 0.1566, 0.7238, 0.9562, 0.1023, 0.0696, 0.9724, - 0.8182, 0.0031, 0.0289, 0.5187, 0.0063, 0.5262, 0.9232, - 0.3694, 0.0136, 0.3019, 0.9633, 0.8770, 0.0826, 0.1792, - 0.6372, 0.5719, 0.7979, 0.1369, 0.9923, 0.7514, 0.0627, - 0.3337, 0.0132, 0.9026, 0.1169, 0.4065, 0.7302, 0.3087, - 0.4276, 0.6874, 0.0705, 0.4727, 0.3286, 0.7188, 0.3727, - 0.5310, 0.1979, 0.9773, 0.3076, 0.3372, 0.2546, 0.3340, - 0.4532, 0.0609, 0.2279, 0.8651, 0.8162, 0.8251, 0.1216, - 0.3049, 0.0805, 0.1284, 0.1859, 0.3690, 0.3435, 0.7762, - 0.7083, 0.6529, 0.8556, 0.1421, 0.4528, 0.4045, 0.9221, - 0.6914, 0.6437, 0.8815, 0.0609, 0.9680, 0.2115, 0.5295, - 0.5418, 0.8646, 0.6735, 0.1927, 0.2578, 0.4564, 0.0603, - 0.1414, 0.3382, 0.0772, 0.6503, 0.3586, 0.8775, 0.8840, - 0.9215, 0.4825, 0.2733, 0.0423, 0.6825, 0.8144, 0.0837, - 0.2758, 0.6188, 0.3276, 0.0762, 0.7932, 0.5621, 0.9067, - 0.7339, 0.1976, 0.8462, 0.5736, 0.2659, 0.7486, 0.3053, - 0.7429, 0.4272, 0.8072, 0.5183, 0.4677]), + col_indices=tensor([2879, 2548, 4235, 2177, 1968, 4792, 2118, 3824, 3830, + 419, 2935, 858, 3221, 1158, 3295, 4115, 4212, 4937, + 4105, 1905, 430, 1434, 4104, 172, 3831, 777, 1804, + 1906, 3629, 4755, 3973, 3704, 574, 3092, 296, 4473, + 178, 1394, 364, 1101, 1225, 72, 2310, 4099, 4749, + 4433, 3811, 1052, 4063, 946, 4833, 3504, 2256, 3467, + 380, 2912, 3726, 4501, 3719, 1032, 1375, 4002, 185, + 3479, 2598, 4766, 3328, 793, 1360, 4472, 484, 3670, + 533, 4829, 1260, 1285, 2295, 963, 1454, 4716, 3791, + 4404, 1941, 4961, 4734, 3669, 4605, 847, 1484, 3477, + 947, 3738, 664, 4986, 4204, 3808, 3474, 2661, 3309, + 3369, 153, 935, 2927, 3740, 791, 4992, 711, 2043, + 194, 4833, 79, 4635, 839, 299, 575, 1530, 2233, + 4374, 1754, 4418, 1039, 4935, 1345, 3862, 3955, 777, + 3857, 4284, 1102, 3982, 3631, 1213, 324, 1586, 2281, + 4410, 4374, 2992, 3918, 2289, 1015, 2745, 4492, 594, + 4664, 3401, 2974, 3118, 4743, 4680, 4438, 2693, 4451, + 1606, 3511, 4742, 2749, 2647, 1217, 405, 537, 1399, + 199, 4498, 2424, 286, 693, 270, 889, 2160, 4138, + 3608, 1489, 2742, 4553, 2358, 2877, 4564, 3315, 4926, + 4195, 4940, 4664, 172, 3985, 745, 3, 3830, 4058, + 2581, 776, 405, 898, 2791, 3728, 3234, 1387, 4346, + 295, 3823, 1053, 2346, 4176, 1488, 434, 1655, 1692, + 2590, 3984, 3595, 999, 1056, 2826, 3138, 3494, 2904, + 1058, 2210, 3296, 2486, 2896, 138, 3985, 1060, 2585, + 4071, 3584, 758, 4715, 2930, 1732, 1000, 3747, 4019, + 1473, 2790, 3984, 2969, 781, 585, 781, 4422, 445, + 2047, 2372, 1079, 2925, 955, 284, 951]), + values=tensor([0.2472, 0.6408, 0.2221, 0.5181, 0.6350, 0.9033, 0.8956, + 0.7487, 0.8455, 0.1032, 0.8217, 0.9140, 0.5672, 0.0509, + 0.3161, 0.8507, 0.6291, 0.4677, 0.4913, 0.1587, 0.9565, + 0.4922, 0.5437, 0.6340, 0.3083, 0.8605, 0.8132, 0.4845, + 0.2069, 0.0097, 0.3167, 0.5484, 0.6282, 0.4620, 0.0987, + 0.6949, 0.7304, 0.3076, 0.4382, 0.8483, 0.5431, 0.3061, + 0.7208, 0.7528, 0.0727, 0.1586, 0.4223, 0.7512, 0.1050, + 0.2044, 0.1480, 0.8843, 0.6474, 0.9923, 0.4300, 0.7404, + 0.8099, 0.0456, 0.9256, 0.2564, 0.9005, 0.6348, 0.2339, + 0.7885, 0.6327, 0.3555, 0.8037, 0.0679, 0.3358, 0.4488, + 0.2846, 0.4860, 0.3112, 0.9987, 0.3595, 0.5644, 0.7766, + 0.8640, 0.9246, 0.3503, 0.1256, 0.5337, 0.1297, 0.5515, + 0.6635, 0.5154, 0.0632, 0.7639, 0.9128, 0.6392, 0.8302, + 0.4439, 0.2348, 0.2796, 0.1102, 0.8665, 0.0654, 0.2969, + 0.6779, 0.5154, 0.8782, 0.6349, 0.2534, 0.1900, 0.1089, + 0.2132, 0.1606, 0.4838, 0.5131, 0.2693, 0.2911, 0.3925, + 0.1298, 0.2734, 0.2859, 0.2402, 0.7284, 0.6809, 0.1017, + 0.5006, 0.7387, 0.5250, 0.6054, 0.3207, 0.8317, 0.0063, + 0.8124, 0.1682, 0.9358, 0.7971, 0.6026, 0.8063, 0.0277, + 0.9101, 0.2706, 0.6666, 0.9401, 0.9468, 0.6874, 0.1204, + 0.1461, 0.6930, 0.8474, 0.3668, 0.2632, 0.3152, 0.9470, + 0.1754, 0.9084, 0.1945, 0.9896, 0.9045, 0.7615, 0.9816, + 0.3493, 0.9428, 0.7718, 0.8234, 0.0894, 0.6485, 0.0109, + 0.8460, 0.6278, 0.2100, 0.7409, 0.3213, 0.8051, 0.7234, + 0.6970, 0.4116, 0.6352, 0.0029, 0.6761, 0.6077, 0.8527, + 0.5089, 0.6432, 0.8485, 0.4372, 0.2321, 0.3474, 0.5371, + 0.5527, 0.4008, 0.4177, 0.1217, 0.7348, 0.6489, 0.0080, + 0.9129, 0.6431, 0.1973, 0.6075, 0.8143, 0.0232, 0.3884, + 0.5022, 0.1042, 0.6880, 0.6811, 0.2932, 0.7890, 0.6577, + 0.7526, 0.0327, 0.3684, 0.3929, 0.5193, 0.6389, 0.8937, + 0.2414, 0.4405, 0.1415, 0.9714, 0.7709, 0.7504, 0.6900, + 0.1775, 0.9758, 0.6293, 0.6471, 0.1301, 0.8606, 0.1224, + 0.5565, 0.3881, 0.8292, 0.5099, 0.9753, 0.4458, 0.1070, + 0.9912, 0.6060, 0.3669, 0.5306, 0.4360, 0.7428, 0.4271, + 0.8779, 0.6385, 0.2427, 0.9170, 0.1454, 0.1927, 0.3375, + 0.3438, 0.8555, 0.8565, 0.6394, 0.2035]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4130, 0.3379, 0.7498, ..., 0.0848, 0.9618, 0.5893]) +tensor([0.4641, 0.6633, 0.6004, ..., 0.4285, 0.2216, 0.5300]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +344,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.308423519134521 seconds +Time: 10.476571798324585 seconds -[39.69, 38.64, 38.76, 38.66, 38.74, 43.79, 39.13, 39.04, 39.27, 38.98] -[94.85] -12.54550838470459 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 475914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.308423519134521, 'TIME_S_1KI': 0.021660265340239036, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1189.9414702892302, 'W': 94.85} -[39.69, 38.64, 38.76, 38.66, 38.74, 43.79, 39.13, 39.04, 39.27, 38.98, 39.59, 39.06, 44.18, 38.71, 38.85, 39.28, 38.92, 38.81, 38.79, 40.52] -712.02 -35.601 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 475914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.308423519134521, 'TIME_S_1KI': 0.021660265340239036, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1189.9414702892302, 'W': 94.85, 'J_1KI': 2.5003287784961783, 'W_1KI': 0.19930071399454524, 'W_D': 59.248999999999995, 'J_D': 743.3088262853622, 'W_D_1KI': 0.12449518190261265, 'J_D_1KI': 0.000261591762172604} +[41.03, 38.98, 39.26, 39.42, 39.05, 48.27, 44.68, 38.99, 43.85, 39.12] +[95.2] +12.914355516433716 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 490047, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.476571798324585, 'TIME_S_1KI': 0.021378708161308168, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1229.4466451644898, 'W': 95.2} +[41.03, 38.98, 39.26, 39.42, 39.05, 48.27, 44.68, 38.99, 43.85, 39.12, 41.47, 39.11, 39.34, 39.38, 39.11, 44.02, 39.04, 39.8, 39.1, 38.94] +731.68 +36.583999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 490047, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.476571798324585, 'TIME_S_1KI': 0.021378708161308168, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1229.4466451644898, 'W': 95.2, 'J_1KI': 2.50883414277506, 'W_1KI': 0.19426708050452304, 'W_D': 58.61600000000001, 'J_D': 756.9878629512788, 'W_D_1KI': 0.11961301671064206, 'J_D_1KI': 0.00024408478515457101} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json index ae25247..257240e 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 463602, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329545974731445, "TIME_S_1KI": 0.02228106430673605, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1198.2176897239685, "W": 95.11, "J_1KI": 2.5845826586683587, "W_1KI": 0.20515442124926123, "W_D": 59.724, "J_D": 752.4167101364135, "W_D_1KI": 0.12882601886963388, "J_D_1KI": 0.00027788063655815524} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 456925, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.096729040145874, "TIME_S_1KI": 0.022097125436660005, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1218.1025603413582, "W": 95.69, "J_1KI": 2.665869804325345, "W_1KI": 0.20942167751819227, "W_D": 60.22, "J_D": 766.5810030698776, "W_D_1KI": 0.13179405810581604, "J_D_1KI": 0.0002884369603453872} diff --git a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output index 3c48d35..c94353e 100644 --- a/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_16core/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,51 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01933002471923828} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([ 393, 4092, 1605, ..., 4543, 205, 1898]), - values=tensor([0.0363, 0.1593, 0.8850, ..., 0.0884, 0.4054, 0.0261]), - size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.7170, 0.2316, 0.8921, ..., 0.0306, 0.1187, 0.4918]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250 -Density: 5e-05 -Time: 0.01933002471923828 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '54319', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.230255126953125} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([4735, 1903, 2985, ..., 3889, 4420, 4686]), - values=tensor([0.8501, 0.7899, 0.6223, ..., 0.9437, 0.2014, 0.9727]), - size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.5741, 0.3449, 0.6519, ..., 0.7953, 0.3519, 0.0286]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250 -Density: 5e-05 -Time: 1.230255126953125 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '463602', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329545974731445} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019748687744140625} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), - col_indices=tensor([1727, 4803, 1040, ..., 3710, 1053, 4648]), - values=tensor([0.0640, 0.8338, 0.2393, ..., 0.0278, 0.9877, 0.3687]), + col_indices=tensor([1288, 418, 1217, ..., 3615, 1050, 3739]), + values=tensor([0.1525, 0.4681, 0.7924, ..., 0.1705, 0.9120, 0.8137]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8593, 0.3881, 0.7226, ..., 0.2122, 0.1433, 0.4534]) +tensor([0.6959, 0.5396, 0.0275, ..., 0.0373, 0.0563, 0.7436]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.329545974731445 seconds +Time: 0.019748687744140625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '53168', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.2217841148376465} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), - col_indices=tensor([1727, 4803, 1040, ..., 3710, 1053, 4648]), - values=tensor([0.0640, 0.8338, 0.2393, ..., 0.0278, 0.9877, 0.3687]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([1720, 308, 494, ..., 419, 4959, 3638]), + values=tensor([0.7424, 0.0997, 0.6943, ..., 0.3613, 0.1237, 0.5099]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8593, 0.3881, 0.7226, ..., 0.2122, 0.1433, 0.4534]) +tensor([0.4725, 0.9350, 0.5353, ..., 0.3161, 0.1856, 0.9919]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +34,48 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.329545974731445 seconds +Time: 1.2217841148376465 seconds -[40.79, 39.42, 39.16, 38.83, 39.21, 39.28, 39.68, 38.8, 39.35, 39.35] -[95.11] -12.598230361938477 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 463602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329545974731445, 'TIME_S_1KI': 0.02228106430673605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1198.2176897239685, 'W': 95.11} -[40.79, 39.42, 39.16, 38.83, 39.21, 39.28, 39.68, 38.8, 39.35, 39.35, 40.35, 38.9, 38.98, 39.04, 38.98, 40.01, 39.46, 39.09, 39.7, 39.17] -707.72 -35.386 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 463602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329545974731445, 'TIME_S_1KI': 0.02228106430673605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1198.2176897239685, 'W': 95.11, 'J_1KI': 2.5845826586683587, 'W_1KI': 0.20515442124926123, 'W_D': 59.724, 'J_D': 752.4167101364135, 'W_D_1KI': 0.12882601886963388, 'J_D_1KI': 0.00027788063655815524} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '456925', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.096729040145874} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1672, 172, 4731, ..., 3430, 2179, 1580]), + values=tensor([0.3560, 0.5737, 0.3085, ..., 0.2088, 0.5679, 0.6187]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9752, 0.9948, 0.1696, ..., 0.2234, 0.5842, 0.4674]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.096729040145874 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1672, 172, 4731, ..., 3430, 2179, 1580]), + values=tensor([0.3560, 0.5737, 0.3085, ..., 0.2088, 0.5679, 0.6187]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9752, 0.9948, 0.1696, ..., 0.2234, 0.5842, 0.4674]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.096729040145874 seconds + +[40.16, 39.39, 39.13, 40.27, 39.54, 39.09, 39.14, 39.03, 40.02, 39.45] +[95.69] +12.729674577713013 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 456925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.096729040145874, 'TIME_S_1KI': 0.022097125436660005, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1218.1025603413582, 'W': 95.69} +[40.16, 39.39, 39.13, 40.27, 39.54, 39.09, 39.14, 39.03, 40.02, 39.45, 40.28, 39.58, 39.55, 39.03, 39.15, 39.03, 39.41, 39.02, 39.21, 39.73] +709.4 +35.47 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 456925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.096729040145874, 'TIME_S_1KI': 0.022097125436660005, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1218.1025603413582, 'W': 95.69, 'J_1KI': 2.665869804325345, 'W_1KI': 0.20942167751819227, 'W_D': 60.22, 'J_D': 766.5810030698776, 'W_D_1KI': 0.13179405810581604, 'J_D_1KI': 0.0002884369603453872} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..622b9fa --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 276, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.459847450256348, "TIME_S_1KI": 37.89799800817517, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 817.4248892712593, "W": 57.93, "J_1KI": 2961.6843814176063, "W_1KI": 209.89130434782606, "W_D": 41.04475, "J_D": 579.1645127553344, "W_D_1KI": 148.7128623188406, "J_D_1KI": 538.8147185465239} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..57968d5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.7949230670928955} + +tensor(indices=tensor([[ 259, 92406, 67214, ..., 56126, 49758, 28077], + [18855, 67153, 56435, ..., 20353, 29983, 42584]]), + values=tensor([0.3156, 0.0350, 0.7564, ..., 0.8554, 0.7583, 0.2188]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1576, 0.3481, 0.0886, ..., 0.2032, 0.6260, 0.4921]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 3.7949230670928955 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '276', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.459847450256348} + +tensor(indices=tensor([[19637, 46545, 80696, ..., 21067, 33035, 70657], + [76098, 99632, 25388, ..., 15886, 74696, 29574]]), + values=tensor([0.9176, 0.5660, 0.0023, ..., 0.9938, 0.3471, 0.0114]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2242, 0.3499, 0.5367, ..., 0.5506, 0.8782, 0.6678]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.459847450256348 seconds + +tensor(indices=tensor([[19637, 46545, 80696, ..., 21067, 33035, 70657], + [76098, 99632, 25388, ..., 15886, 74696, 29574]]), + values=tensor([0.9176, 0.5660, 0.0023, ..., 0.9938, 0.3471, 0.0114]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2242, 0.3499, 0.5367, ..., 0.5506, 0.8782, 0.6678]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.459847450256348 seconds + +[19.16, 19.05, 18.6, 18.49, 19.68, 18.59, 18.59, 18.46, 18.74, 18.83] +[57.93] +14.110562562942505 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.459847450256348, 'TIME_S_1KI': 37.89799800817517, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 817.4248892712593, 'W': 57.93} +[19.16, 19.05, 18.6, 18.49, 19.68, 18.59, 18.59, 18.46, 18.74, 18.83, 19.21, 18.6, 18.67, 18.46, 18.59, 18.45, 18.54, 18.66, 19.59, 18.69] +337.705 +16.88525 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.459847450256348, 'TIME_S_1KI': 37.89799800817517, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 817.4248892712593, 'W': 57.93, 'J_1KI': 2961.6843814176063, 'W_1KI': 209.89130434782606, 'W_D': 41.04475, 'J_D': 579.1645127553344, 'W_D_1KI': 148.7128623188406, 'J_D_1KI': 538.8147185465239} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..4bef977 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 42.84994697570801, "TIME_S_1KI": 428.4994697570801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2573.45484375, "W": 53.76, "J_1KI": 25734.548437499998, "W_1KI": 537.6, "W_D": 36.732, "J_D": 1758.335999267578, "W_D_1KI": 367.32, "J_D_1KI": 3673.2} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..37a66db --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 42.84994697570801} + +tensor(indices=tensor([[88148, 50636, 12278, ..., 44395, 13251, 88413], + [93628, 9107, 57736, ..., 46921, 76663, 4416]]), + values=tensor([0.1545, 0.0086, 0.8794, ..., 0.5253, 0.2705, 0.7992]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.1559, 0.8747, 0.7723, ..., 0.9967, 0.3874, 0.3742]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 42.84994697570801 seconds + +tensor(indices=tensor([[88148, 50636, 12278, ..., 44395, 13251, 88413], + [93628, 9107, 57736, ..., 46921, 76663, 4416]]), + values=tensor([0.1545, 0.0086, 0.8794, ..., 0.5253, 0.2705, 0.7992]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.1559, 0.8747, 0.7723, ..., 0.9967, 0.3874, 0.3742]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 42.84994697570801 seconds + +[18.8, 18.3, 18.52, 18.99, 18.36, 18.42, 18.8, 19.79, 18.46, 18.37] +[53.76] +47.86932373046875 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 42.84994697570801, 'TIME_S_1KI': 428.4994697570801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2573.45484375, 'W': 53.76} +[18.8, 18.3, 18.52, 18.99, 18.36, 18.42, 18.8, 19.79, 18.46, 18.37, 20.32, 18.48, 18.79, 18.38, 18.47, 18.42, 18.75, 23.03, 18.64, 18.43] +340.55999999999995 +17.028 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 42.84994697570801, 'TIME_S_1KI': 428.4994697570801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2573.45484375, 'W': 53.76, 'J_1KI': 25734.548437499998, 'W_1KI': 537.6, 'W_D': 36.732, 'J_D': 1758.335999267578, 'W_D_1KI': 367.32, 'J_D_1KI': 3673.2} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..ede5e39 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 412.5398921966553, "TIME_S_1KI": 4125.398921966553, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 24198.670872001647, "W": 53.279999999999994, "J_1KI": 241986.70872001647, "W_1KI": 532.8, "W_D": 36.422999999999995, "J_D": 16542.571118072745, "W_D_1KI": 364.22999999999996, "J_D_1KI": 3642.2999999999997} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..a0dbec0 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 412.5398921966553} + +tensor(indices=tensor([[71321, 41376, 38880, ..., 18381, 33336, 40558], + [74739, 76364, 52147, ..., 6055, 76752, 19403]]), + values=tensor([0.6997, 0.7294, 0.0218, ..., 0.6601, 0.3490, 0.6000]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2890, 0.1028, 0.0427, ..., 0.6576, 0.0647, 0.6766]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 412.5398921966553 seconds + +tensor(indices=tensor([[71321, 41376, 38880, ..., 18381, 33336, 40558], + [74739, 76364, 52147, ..., 6055, 76752, 19403]]), + values=tensor([0.6997, 0.7294, 0.0218, ..., 0.6601, 0.3490, 0.6000]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2890, 0.1028, 0.0427, ..., 0.6576, 0.0647, 0.6766]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 412.5398921966553 seconds + +[19.37, 18.78, 18.57, 18.88, 18.82, 18.81, 18.55, 18.97, 18.63, 18.37] +[53.28] +454.17925810813904 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 412.5398921966553, 'TIME_S_1KI': 4125.398921966553, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24198.670872001647, 'W': 53.279999999999994} +[19.37, 18.78, 18.57, 18.88, 18.82, 18.81, 18.55, 18.97, 18.63, 18.37, 19.0, 18.87, 18.71, 18.6, 18.72, 18.58, 18.65, 18.72, 18.61, 18.6] +337.14 +16.857 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 412.5398921966553, 'TIME_S_1KI': 4125.398921966553, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24198.670872001647, 'W': 53.279999999999994, 'J_1KI': 241986.70872001647, 'W_1KI': 532.8, 'W_D': 36.422999999999995, 'J_D': 16542.571118072745, 'W_D_1KI': 364.22999999999996, 'J_D_1KI': 3642.2999999999997} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..8b20250 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2194, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.215775489807129, "TIME_S_1KI": 4.656233131179184, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1111.5062367582323, "W": 81.18, "J_1KI": 506.6117760976446, "W_1KI": 37.00091157702826, "W_D": 64.4195, "J_D": 882.0236021045446, "W_D_1KI": 29.36166818596171, "J_D_1KI": 13.382711114841255} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..a236b77 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.4785642623901367} + +tensor(indices=tensor([[54677, 54370, 622, ..., 38870, 89232, 95851], + [53225, 99442, 33615, ..., 22013, 89385, 25238]]), + values=tensor([0.3522, 0.6796, 0.7423, ..., 0.3629, 0.1983, 0.1916]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4184, 0.8527, 0.3195, ..., 0.6891, 0.6136, 0.5192]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.4785642623901367 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2194', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.215775489807129} + +tensor(indices=tensor([[73735, 45882, 7932, ..., 62765, 94691, 50180], + [ 7289, 82506, 87452, ..., 25331, 17803, 7265]]), + values=tensor([0.6305, 0.5778, 0.7222, ..., 0.1306, 0.4978, 0.1769]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6892, 0.5457, 0.5229, ..., 0.4861, 0.0039, 0.9339]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.215775489807129 seconds + +tensor(indices=tensor([[73735, 45882, 7932, ..., 62765, 94691, 50180], + [ 7289, 82506, 87452, ..., 25331, 17803, 7265]]), + values=tensor([0.6305, 0.5778, 0.7222, ..., 0.1306, 0.4978, 0.1769]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6892, 0.5457, 0.5229, ..., 0.4861, 0.0039, 0.9339]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.215775489807129 seconds + +[19.3, 18.38, 18.9, 18.68, 18.51, 18.37, 18.92, 18.4, 18.84, 18.37] +[81.18] +13.691872835159302 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.215775489807129, 'TIME_S_1KI': 4.656233131179184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1111.5062367582323, 'W': 81.18} +[19.3, 18.38, 18.9, 18.68, 18.51, 18.37, 18.92, 18.4, 18.84, 18.37, 19.16, 18.76, 18.34, 18.35, 18.76, 18.46, 18.78, 18.52, 18.59, 18.47] +335.21000000000004 +16.7605 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.215775489807129, 'TIME_S_1KI': 4.656233131179184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1111.5062367582323, 'W': 81.18, 'J_1KI': 506.6117760976446, 'W_1KI': 37.00091157702826, 'W_D': 64.4195, 'J_D': 882.0236021045446, 'W_D_1KI': 29.36166818596171, 'J_D_1KI': 13.382711114841255} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..33e4c22 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 538, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.497490882873535, "TIME_S_1KI": 19.512064838054897, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 869.9857006072998, "W": 61.94, "J_1KI": 1617.0737929503714, "W_1KI": 115.13011152416357, "W_D": 45.07325, "J_D": 633.0817400693894, "W_D_1KI": 83.77927509293681, "J_D_1KI": 155.72355965229892} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..8e04606 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9510362148284912} + +tensor(indices=tensor([[67703, 92127, 38831, ..., 11246, 83214, 21303], + [23185, 76303, 44580, ..., 32673, 12011, 55424]]), + values=tensor([0.4544, 0.1257, 0.1042, ..., 0.4749, 0.4298, 0.4851]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.0599, 0.2614, 0.5637, ..., 0.0251, 0.7289, 0.2075]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.9510362148284912 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '538', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.497490882873535} + +tensor(indices=tensor([[ 1800, 75339, 71391, ..., 38217, 18774, 99711], + [56368, 11982, 31186, ..., 69767, 67396, 4804]]), + values=tensor([0.5536, 0.2742, 0.9215, ..., 0.4908, 0.0958, 0.0743]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.2693, 0.4645, 0.9119, ..., 0.1622, 0.2320, 0.7709]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.497490882873535 seconds + +tensor(indices=tensor([[ 1800, 75339, 71391, ..., 38217, 18774, 99711], + [56368, 11982, 31186, ..., 69767, 67396, 4804]]), + values=tensor([0.5536, 0.2742, 0.9215, ..., 0.4908, 0.0958, 0.0743]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.2693, 0.4645, 0.9119, ..., 0.1622, 0.2320, 0.7709]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.497490882873535 seconds + +[18.79, 18.61, 18.38, 18.45, 18.73, 18.86, 19.57, 19.19, 18.58, 19.13] +[61.94] +14.04561996459961 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.497490882873535, 'TIME_S_1KI': 19.512064838054897, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 869.9857006072998, 'W': 61.94} +[18.79, 18.61, 18.38, 18.45, 18.73, 18.86, 19.57, 19.19, 18.58, 19.13, 19.05, 18.54, 18.91, 19.2, 18.36, 18.45, 18.65, 18.41, 18.77, 18.38] +337.335 +16.86675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.497490882873535, 'TIME_S_1KI': 19.512064838054897, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 869.9857006072998, 'W': 61.94, 'J_1KI': 1617.0737929503714, 'W_1KI': 115.13011152416357, 'W_D': 45.07325, 'J_D': 633.0817400693894, 'W_D_1KI': 83.77927509293681, 'J_D_1KI': 155.72355965229892} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..3612954 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 29372, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.55020022392273, "TIME_S_1KI": 0.35919243578655624, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 759.2383503341675, "W": 54.24, "J_1KI": 25.84905182943509, "W_1KI": 1.846656679831132, "W_D": 37.11625, "J_D": 519.544255541861, "W_D_1KI": 1.263660969630941, "J_D_1KI": 0.04302263957615896} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..63fd63a --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04774022102355957} + +tensor(indices=tensor([[8621, 5456, 6763, ..., 9899, 8829, 8052], + [8094, 4277, 5280, ..., 3585, 4224, 9332]]), + values=tensor([0.3743, 0.4541, 0.6901, ..., 0.3693, 0.1054, 0.4083]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.3881, 0.6282, 0.8282, ..., 0.2549, 0.9387, 0.2470]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.04774022102355957 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '21994', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.862473249435425} + +tensor(indices=tensor([[8084, 218, 1416, ..., 3260, 6300, 8841], + [4003, 9040, 9537, ..., 8153, 4567, 1395]]), + values=tensor([0.6308, 0.8593, 0.2702, ..., 0.6769, 0.1946, 0.9528]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7031, 0.2192, 0.3440, ..., 0.0023, 0.7055, 0.0826]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 7.862473249435425 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '29372', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.55020022392273} + +tensor(indices=tensor([[3123, 7058, 4069, ..., 8347, 1643, 7446], + [1097, 2596, 5104, ..., 3408, 4899, 1493]]), + values=tensor([0.5132, 0.3285, 0.4436, ..., 0.5381, 0.1033, 0.1200]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.9020, 0.6942, 0.3490, ..., 0.8627, 0.6082, 0.2844]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.55020022392273 seconds + +tensor(indices=tensor([[3123, 7058, 4069, ..., 8347, 1643, 7446], + [1097, 2596, 5104, ..., 3408, 4899, 1493]]), + values=tensor([0.5132, 0.3285, 0.4436, ..., 0.5381, 0.1033, 0.1200]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.9020, 0.6942, 0.3490, ..., 0.8627, 0.6082, 0.2844]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.55020022392273 seconds + +[19.05, 18.83, 18.43, 19.83, 18.83, 18.65, 18.78, 18.5, 18.99, 18.6] +[54.24] +13.997757196426392 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 29372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.55020022392273, 'TIME_S_1KI': 0.35919243578655624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 759.2383503341675, 'W': 54.24} +[19.05, 18.83, 18.43, 19.83, 18.83, 18.65, 18.78, 18.5, 18.99, 18.6, 19.04, 18.55, 18.7, 18.71, 19.09, 18.86, 21.77, 19.21, 18.61, 19.58] +342.475 +17.12375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 29372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.55020022392273, 'TIME_S_1KI': 0.35919243578655624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 759.2383503341675, 'W': 54.24, 'J_1KI': 25.84905182943509, 'W_1KI': 1.846656679831132, 'W_D': 37.11625, 'J_D': 519.544255541861, 'W_D_1KI': 1.263660969630941, 'J_D_1KI': 0.04302263957615896} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..341fad9 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2885, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.179133892059326, "TIME_S_1KI": 3.528295976450373, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 741.2548218441009, "W": 54.35999999999999, "J_1KI": 256.93408036190675, "W_1KI": 18.842287694974, "W_D": 37.470749999999995, "J_D": 510.9524303829073, "W_D_1KI": 12.988128249566723, "J_D_1KI": 4.501950866400944} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..c48a950 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.363910436630249} + +tensor(indices=tensor([[9278, 1408, 1284, ..., 8003, 3323, 3188], + [2099, 9688, 1027, ..., 8690, 325, 485]]), + values=tensor([0.1559, 0.8015, 0.6542, ..., 0.1766, 0.2964, 0.0191]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6998, 0.5147, 0.8258, ..., 0.4739, 0.2636, 0.6361]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.363910436630249 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2885', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.179133892059326} + +tensor(indices=tensor([[1099, 7524, 118, ..., 4494, 9102, 8694], + [7426, 8511, 2713, ..., 2140, 6919, 2106]]), + values=tensor([0.0932, 0.0015, 0.3645, ..., 0.0933, 0.0751, 0.1868]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7707, 0.9146, 0.6318, ..., 0.3139, 0.5649, 0.1325]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.179133892059326 seconds + +tensor(indices=tensor([[1099, 7524, 118, ..., 4494, 9102, 8694], + [7426, 8511, 2713, ..., 2140, 6919, 2106]]), + values=tensor([0.0932, 0.0015, 0.3645, ..., 0.0933, 0.0751, 0.1868]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7707, 0.9146, 0.6318, ..., 0.3139, 0.5649, 0.1325]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.179133892059326 seconds + +[19.07, 18.54, 19.14, 19.41, 18.63, 18.48, 18.94, 18.39, 18.67, 18.55] +[54.36] +13.6360342502594 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2885, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.179133892059326, 'TIME_S_1KI': 3.528295976450373, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 741.2548218441009, 'W': 54.35999999999999} +[19.07, 18.54, 19.14, 19.41, 18.63, 18.48, 18.94, 18.39, 18.67, 18.55, 19.74, 18.55, 18.56, 18.57, 19.04, 18.54, 18.68, 18.75, 18.98, 18.47] +337.78499999999997 +16.889249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2885, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.179133892059326, 'TIME_S_1KI': 3.528295976450373, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 741.2548218441009, 'W': 54.35999999999999, 'J_1KI': 256.93408036190675, 'W_1KI': 18.842287694974, 'W_D': 37.470749999999995, 'J_D': 510.9524303829073, 'W_D_1KI': 12.988128249566723, 'J_D_1KI': 4.501950866400944} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..bd2079d --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 297, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.454929828643799, "TIME_S_1KI": 35.2017839348276, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 767.9595693349838, "W": 54.5, "J_1KI": 2585.7224556733463, "W_1KI": 183.5016835016835, "W_D": 37.475750000000005, "J_D": 528.0708409267069, "W_D_1KI": 126.18097643097644, "J_D_1KI": 424.85177249487015} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..921bf5b --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.53249192237854} + +tensor(indices=tensor([[3496, 852, 4770, ..., 509, 4576, 4025], + [ 167, 7324, 4551, ..., 547, 6717, 3593]]), + values=tensor([0.8706, 0.9084, 0.2784, ..., 0.1695, 0.5914, 0.6041]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2662, 0.9166, 0.5141, ..., 0.0387, 0.6488, 0.3676]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 3.53249192237854 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '297', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.454929828643799} + +tensor(indices=tensor([[6046, 3973, 4226, ..., 779, 4675, 2232], + [1467, 7074, 9310, ..., 3097, 2073, 1856]]), + values=tensor([0.7224, 0.2456, 0.0192, ..., 0.9529, 0.4760, 0.5637]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8396, 0.8390, 0.0451, ..., 0.1536, 0.8727, 0.3219]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.454929828643799 seconds + +tensor(indices=tensor([[6046, 3973, 4226, ..., 779, 4675, 2232], + [1467, 7074, 9310, ..., 3097, 2073, 1856]]), + values=tensor([0.7224, 0.2456, 0.0192, ..., 0.9529, 0.4760, 0.5637]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8396, 0.8390, 0.0451, ..., 0.1536, 0.8727, 0.3219]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.454929828643799 seconds + +[18.7, 18.37, 18.72, 18.71, 18.71, 18.8, 19.32, 18.77, 18.69, 18.5] +[54.5] +14.091001272201538 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.454929828643799, 'TIME_S_1KI': 35.2017839348276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 767.9595693349838, 'W': 54.5} +[18.7, 18.37, 18.72, 18.71, 18.71, 18.8, 19.32, 18.77, 18.69, 18.5, 19.07, 18.62, 20.29, 18.71, 18.59, 18.56, 18.82, 18.86, 18.77, 22.08] +340.48499999999996 +17.02425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.454929828643799, 'TIME_S_1KI': 35.2017839348276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 767.9595693349838, 'W': 54.5, 'J_1KI': 2585.7224556733463, 'W_1KI': 183.5016835016835, 'W_D': 37.475750000000005, 'J_D': 528.0708409267069, 'W_D_1KI': 126.18097643097644, 'J_D_1KI': 424.85177249487015} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..6867b72 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.546292066574097, "TIME_S_1KI": 175.46292066574097, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1213.4392498111724, "W": 54.29, "J_1KI": 12134.392498111725, "W_1KI": 542.9, "W_D": 37.43899999999999, "J_D": 836.801474925041, "W_D_1KI": 374.38999999999993, "J_D_1KI": 3743.899999999999} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..12cc0f8 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.546292066574097} + +tensor(indices=tensor([[7712, 8176, 2733, ..., 9900, 3552, 719], + [5895, 6102, 1680, ..., 1426, 873, 1922]]), + values=tensor([0.7534, 0.5438, 0.6082, ..., 0.2470, 0.5768, 0.1059]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.0697, 0.4570, 0.5878, ..., 0.8004, 0.4211, 0.6165]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.546292066574097 seconds + +tensor(indices=tensor([[7712, 8176, 2733, ..., 9900, 3552, 719], + [5895, 6102, 1680, ..., 1426, 873, 1922]]), + values=tensor([0.7534, 0.5438, 0.6082, ..., 0.2470, 0.5768, 0.1059]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.0697, 0.4570, 0.5878, ..., 0.8004, 0.4211, 0.6165]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.546292066574097 seconds + +[19.55, 18.63, 18.88, 18.72, 18.8, 18.35, 18.79, 18.7, 18.8, 19.06] +[54.29] +22.35106372833252 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.546292066574097, 'TIME_S_1KI': 175.46292066574097, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1213.4392498111724, 'W': 54.29} +[19.55, 18.63, 18.88, 18.72, 18.8, 18.35, 18.79, 18.7, 18.8, 19.06, 19.43, 18.41, 18.47, 18.5, 18.66, 18.58, 18.61, 19.0, 18.85, 18.5] +337.02000000000004 +16.851000000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.546292066574097, 'TIME_S_1KI': 175.46292066574097, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1213.4392498111724, 'W': 54.29, 'J_1KI': 12134.392498111725, 'W_1KI': 542.9, 'W_D': 37.43899999999999, 'J_D': 836.801474925041, 'W_D_1KI': 374.38999999999993, 'J_D_1KI': 3743.899999999999} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..1a1ffe0 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.104506969451904, "TIME_S_1KI": 351.04506969451904, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2223.327211351395, "W": 54.02, "J_1KI": 22233.272113513947, "W_1KI": 540.2, "W_D": 37.23700000000001, "J_D": 1532.5811804718976, "W_D_1KI": 372.3700000000001, "J_D_1KI": 3723.700000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..81ebf24 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.104506969451904} + +tensor(indices=tensor([[5965, 1810, 9626, ..., 4502, 3774, 7838], + [1310, 6428, 4724, ..., 7910, 1197, 4169]]), + values=tensor([0.7596, 0.3928, 0.9709, ..., 0.9446, 0.5601, 0.7829]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9776, 0.9172, 0.8687, ..., 0.7797, 0.1327, 0.9713]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.104506969451904 seconds + +tensor(indices=tensor([[5965, 1810, 9626, ..., 4502, 3774, 7838], + [1310, 6428, 4724, ..., 7910, 1197, 4169]]), + values=tensor([0.7596, 0.3928, 0.9709, ..., 0.9446, 0.5601, 0.7829]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9776, 0.9172, 0.8687, ..., 0.7797, 0.1327, 0.9713]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.104506969451904 seconds + +[18.81, 18.41, 18.48, 18.45, 18.74, 18.25, 19.34, 18.38, 18.68, 18.43] +[54.02] +41.157482624053955 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.104506969451904, 'TIME_S_1KI': 351.04506969451904, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2223.327211351395, 'W': 54.02} +[18.81, 18.41, 18.48, 18.45, 18.74, 18.25, 19.34, 18.38, 18.68, 18.43, 18.65, 18.92, 18.76, 18.53, 18.58, 19.1, 18.46, 18.5, 18.66, 18.95] +335.65999999999997 +16.782999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.104506969451904, 'TIME_S_1KI': 351.04506969451904, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2223.327211351395, 'W': 54.02, 'J_1KI': 22233.272113513947, 'W_1KI': 540.2, 'W_D': 37.23700000000001, 'J_D': 1532.5811804718976, 'W_D_1KI': 372.3700000000001, 'J_D_1KI': 3723.700000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..a4de997 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.16714549064636, "TIME_S_1KI": 701.6714549064636, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4225.564891979694, "W": 53.83, "J_1KI": 42255.64891979694, "W_1KI": 538.3, "W_D": 36.723749999999995, "J_D": 2882.7529017618294, "W_D_1KI": 367.23749999999995, "J_D_1KI": 3672.3749999999995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..1afd81e --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.16714549064636} + +tensor(indices=tensor([[9700, 8094, 3775, ..., 8917, 6161, 6776], + [2703, 1006, 6246, ..., 5702, 7129, 7896]]), + values=tensor([0.2544, 0.6040, 0.8647, ..., 0.6605, 0.8051, 0.9149]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.7925, 0.4684, 0.4705, ..., 0.0795, 0.6524, 0.8229]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.16714549064636 seconds + +tensor(indices=tensor([[9700, 8094, 3775, ..., 8917, 6161, 6776], + [2703, 1006, 6246, ..., 5702, 7129, 7896]]), + values=tensor([0.2544, 0.6040, 0.8647, ..., 0.6605, 0.8051, 0.9149]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.7925, 0.4684, 0.4705, ..., 0.0795, 0.6524, 0.8229]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.16714549064636 seconds + +[19.2, 18.73, 18.5, 22.24, 19.21, 18.45, 19.44, 18.56, 19.68, 18.43] +[53.83] +78.49832606315613 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.16714549064636, 'TIME_S_1KI': 701.6714549064636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4225.564891979694, 'W': 53.83} +[19.2, 18.73, 18.5, 22.24, 19.21, 18.45, 19.44, 18.56, 19.68, 18.43, 19.35, 18.45, 18.53, 18.48, 18.74, 18.7, 18.79, 18.88, 18.98, 18.55] +342.125 +17.10625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.16714549064636, 'TIME_S_1KI': 701.6714549064636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4225.564891979694, 'W': 53.83, 'J_1KI': 42255.64891979694, 'W_1KI': 538.3, 'W_D': 36.723749999999995, 'J_D': 2882.7529017618294, 'W_D_1KI': 367.23749999999995, 'J_D_1KI': 3672.3749999999995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..4e52d36 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.21965885162354, "TIME_S_1KI": 1052.1965885162354, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6222.783360099792, "W": 53.8, "J_1KI": 62227.83360099792, "W_1KI": 537.9999999999999, "W_D": 36.875, "J_D": 4265.151234269142, "W_D_1KI": 368.75, "J_D_1KI": 3687.5} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..4bb698f --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.21965885162354} + +tensor(indices=tensor([[5407, 5787, 9186, ..., 8033, 8571, 4724], + [4914, 4898, 6139, ..., 2861, 8664, 3517]]), + values=tensor([0.6261, 0.7581, 0.0102, ..., 0.7531, 0.9323, 0.1407]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.2937, 0.3731, 0.4040, ..., 0.6645, 0.2192, 0.9273]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.21965885162354 seconds + +tensor(indices=tensor([[5407, 5787, 9186, ..., 8033, 8571, 4724], + [4914, 4898, 6139, ..., 2861, 8664, 3517]]), + values=tensor([0.6261, 0.7581, 0.0102, ..., 0.7531, 0.9323, 0.1407]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.2937, 0.3731, 0.4040, ..., 0.6645, 0.2192, 0.9273]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.21965885162354 seconds + +[19.36, 19.51, 18.76, 18.56, 18.66, 18.5, 18.64, 18.45, 18.82, 22.61] +[53.8] +115.66511821746826 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.21965885162354, 'TIME_S_1KI': 1052.1965885162354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6222.783360099792, 'W': 53.8} +[19.36, 19.51, 18.76, 18.56, 18.66, 18.5, 18.64, 18.45, 18.82, 22.61, 19.22, 18.51, 18.41, 18.64, 19.06, 18.46, 18.63, 18.43, 18.64, 18.45] +338.5 +16.925 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.21965885162354, 'TIME_S_1KI': 1052.1965885162354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6222.783360099792, 'W': 53.8, 'J_1KI': 62227.83360099792, 'W_1KI': 537.9999999999999, 'W_D': 36.875, 'J_D': 4265.151234269142, 'W_D_1KI': 368.75, 'J_D_1KI': 3687.5} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..62ec3b8 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.21517753601074, "TIME_S_1KI": 1402.1517753601074, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8232.081338682176, "W": 53.82000000000001, "J_1KI": 82320.81338682176, "W_1KI": 538.2000000000002, "W_D": 36.60050000000001, "J_D": 5598.258882133724, "W_D_1KI": 366.0050000000001, "J_D_1KI": 3660.050000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..c569fce --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.21517753601074} + +tensor(indices=tensor([[ 911, 1747, 969, ..., 5021, 8798, 9712], + [7948, 8550, 3543, ..., 8398, 3778, 5745]]), + values=tensor([0.9606, 0.0989, 0.4394, ..., 0.2887, 0.8749, 0.1670]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.7317, 0.4476, 0.2901, ..., 0.4493, 0.7492, 0.0926]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.21517753601074 seconds + +tensor(indices=tensor([[ 911, 1747, 969, ..., 5021, 8798, 9712], + [7948, 8550, 3543, ..., 8398, 3778, 5745]]), + values=tensor([0.9606, 0.0989, 0.4394, ..., 0.2887, 0.8749, 0.1670]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.7317, 0.4476, 0.2901, ..., 0.4493, 0.7492, 0.0926]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.21517753601074 seconds + +[23.09, 18.96, 18.67, 19.1, 18.53, 19.56, 18.66, 18.75, 18.42, 18.48] +[53.82] +152.95580339431763 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.21517753601074, 'TIME_S_1KI': 1402.1517753601074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8232.081338682176, 'W': 53.82000000000001} +[23.09, 18.96, 18.67, 19.1, 18.53, 19.56, 18.66, 18.75, 18.42, 18.48, 19.01, 18.55, 18.89, 18.56, 19.07, 18.45, 20.33, 21.53, 18.57, 19.0] +344.39 +17.2195 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.21517753601074, 'TIME_S_1KI': 1402.1517753601074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8232.081338682176, 'W': 53.82000000000001, 'J_1KI': 82320.81338682176, 'W_1KI': 538.2000000000002, 'W_D': 36.60050000000001, 'J_D': 5598.258882133724, 'W_D_1KI': 366.0050000000001, 'J_D_1KI': 3660.050000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..6c3cd55 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.4700062274933, "TIME_S_1KI": 1754.7000622749329, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10255.436193523406, "W": 53.68, "J_1KI": 102554.36193523408, "W_1KI": 536.8, "W_D": 26.626000000000005, "J_D": 5086.833906273366, "W_D_1KI": 266.26000000000005, "J_D_1KI": 2662.6000000000004} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..82676b2 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.4700062274933} + +tensor(indices=tensor([[5161, 1288, 9022, ..., 8146, 2109, 6157], + [6072, 3357, 8597, ..., 2792, 9679, 7987]]), + values=tensor([0.3334, 0.3262, 0.6931, ..., 0.2841, 0.1700, 0.7507]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.2846, 0.5884, 0.8203, ..., 0.0667, 0.5552, 0.4961]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.4700062274933 seconds + +tensor(indices=tensor([[5161, 1288, 9022, ..., 8146, 2109, 6157], + [6072, 3357, 8597, ..., 2792, 9679, 7987]]), + values=tensor([0.3334, 0.3262, 0.6931, ..., 0.2841, 0.1700, 0.7507]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.2846, 0.5884, 0.8203, ..., 0.0667, 0.5552, 0.4961]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.4700062274933 seconds + +[18.95, 18.8, 18.54, 18.79, 18.57, 22.11, 19.66, 18.5, 19.13, 18.56] +[53.68] +191.04761910438538 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.4700062274933, 'TIME_S_1KI': 1754.7000622749329, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10255.436193523406, 'W': 53.68} +[18.95, 18.8, 18.54, 18.79, 18.57, 22.11, 19.66, 18.5, 19.13, 18.56, 18.9, 18.98, 28.6, 49.83, 43.67, 41.26, 45.63, 51.49, 52.72, 53.19] +541.0799999999999 +27.053999999999995 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.4700062274933, 'TIME_S_1KI': 1754.7000622749329, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10255.436193523406, 'W': 53.68, 'J_1KI': 102554.36193523408, 'W_1KI': 536.8, 'W_D': 26.626000000000005, 'J_D': 5086.833906273366, 'W_D_1KI': 266.26000000000005, 'J_D_1KI': 2662.6000000000004} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..1e3ff7b --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 245084, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.43880581855774, "TIME_S_1KI": 0.04259276745343531, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 754.0787694597244, "W": 54.17, "J_1KI": 3.076817619508921, "W_1KI": 0.22102626038419482, "W_D": 37.293000000000006, "J_D": 519.1408445534707, "W_D_1KI": 0.15216415596285357, "J_D_1KI": 0.0006208653194939432} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..459b6c1 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,962 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.01689934730529785} + +tensor(indices=tensor([[9853, 4476, 72, ..., 3689, 3187, 5788], + [4533, 6878, 2796, ..., 6432, 9670, 8061]]), + values=tensor([6.3614e-01, 3.2517e-02, 2.4696e-02, 6.3957e-03, + 6.3590e-02, 4.0325e-01, 6.5424e-01, 8.3406e-01, + 1.3681e-01, 8.4398e-01, 3.0200e-01, 9.1807e-01, + 7.1754e-01, 7.8713e-02, 7.4473e-01, 8.7099e-01, + 7.9428e-01, 7.0103e-01, 6.4743e-01, 6.6945e-02, + 8.9638e-01, 8.9165e-01, 1.1981e-01, 9.4339e-01, + 4.3492e-03, 1.1615e-01, 9.9467e-01, 9.0979e-01, + 7.3443e-01, 2.5051e-01, 5.8615e-01, 8.2826e-01, + 8.8167e-01, 6.1481e-01, 7.2996e-01, 4.2960e-01, + 7.1643e-01, 8.3073e-01, 8.2031e-01, 6.5400e-01, + 1.3393e-01, 2.5373e-01, 5.4830e-01, 9.9813e-02, + 6.1180e-01, 8.4680e-01, 5.6777e-01, 5.2341e-01, + 4.9315e-01, 2.2081e-01, 8.4444e-01, 3.9178e-01, + 9.5382e-01, 2.9935e-01, 7.2703e-01, 9.4373e-01, + 9.9236e-01, 6.7168e-01, 8.1513e-01, 5.3284e-01, + 5.5936e-01, 5.6993e-01, 7.4766e-01, 6.8866e-01, + 2.9239e-01, 9.4168e-02, 2.7169e-01, 1.5214e-01, + 5.7190e-01, 3.0660e-01, 1.2932e-02, 1.7694e-01, + 2.6496e-01, 2.5456e-01, 3.5786e-01, 8.8725e-01, + 6.9733e-01, 6.5755e-01, 9.4712e-01, 8.4843e-01, + 8.5588e-01, 6.1190e-01, 5.9108e-01, 5.2743e-01, + 6.7788e-01, 2.3084e-01, 9.1541e-01, 1.8166e-01, + 1.1407e-01, 1.4603e-01, 9.5483e-02, 6.9890e-01, + 7.3133e-01, 7.4997e-01, 7.1637e-01, 6.4288e-02, + 5.3543e-01, 1.0111e-01, 3.1740e-01, 1.0188e-01, + 9.2178e-01, 8.1508e-01, 7.1871e-02, 4.3475e-01, + 7.8215e-01, 3.9206e-01, 5.4081e-02, 7.7045e-01, + 4.7373e-01, 2.4179e-01, 1.9582e-01, 5.0879e-01, + 4.9792e-01, 2.0950e-01, 7.2829e-01, 1.2477e-01, + 6.9317e-02, 7.3876e-01, 2.4808e-01, 5.0109e-01, + 6.6316e-01, 2.2462e-01, 5.1248e-01, 5.4142e-01, + 6.4519e-01, 3.9267e-01, 9.9908e-01, 1.8465e-02, + 4.0206e-02, 8.8985e-01, 8.8272e-01, 8.1772e-01, + 9.7286e-02, 2.4231e-01, 2.6293e-02, 6.8303e-01, + 1.0245e-01, 6.5182e-01, 8.7578e-01, 5.0050e-01, + 1.8275e-01, 1.6349e-01, 2.0693e-01, 8.8773e-01, + 7.9760e-02, 3.6547e-01, 7.9603e-01, 1.9641e-01, + 8.7919e-01, 6.9649e-01, 6.7696e-01, 3.8051e-02, + 6.1156e-01, 6.7217e-01, 7.3506e-01, 8.2296e-02, + 6.1643e-01, 8.5249e-01, 4.9796e-01, 6.7720e-01, + 1.2539e-01, 2.0036e-01, 6.3894e-01, 5.3389e-01, + 8.4100e-01, 5.9784e-01, 1.0574e-01, 6.8701e-01, + 6.8544e-01, 5.1632e-01, 1.4811e-01, 8.4319e-01, + 1.2076e-01, 3.8438e-01, 4.0519e-01, 7.6938e-01, + 4.9949e-01, 2.5396e-01, 8.2159e-01, 8.5625e-01, + 9.3159e-01, 3.6287e-01, 4.9777e-01, 5.3518e-01, + 1.7629e-01, 7.1364e-01, 1.9284e-01, 9.0534e-03, + 2.3216e-01, 1.0360e-01, 5.7050e-01, 6.1117e-01, + 3.1070e-01, 1.3133e-01, 8.9440e-01, 6.7678e-01, + 2.9597e-01, 3.1734e-01, 9.8737e-01, 1.8305e-01, + 7.9739e-01, 9.4509e-01, 8.4833e-01, 8.2071e-01, + 9.0621e-01, 8.4571e-01, 6.0153e-01, 9.6963e-01, + 8.6332e-01, 5.3472e-01, 5.4098e-01, 1.0649e-01, + 2.8206e-01, 5.6226e-01, 4.1000e-01, 8.1669e-01, + 2.2991e-01, 1.2861e-01, 3.4323e-01, 8.5242e-01, + 8.0569e-01, 5.6161e-01, 5.4860e-02, 4.8439e-01, + 2.0491e-01, 3.2396e-01, 2.0674e-01, 1.7432e-01, + 1.2986e-01, 7.7999e-01, 9.1568e-01, 4.0893e-01, + 2.1455e-01, 8.3914e-01, 8.5506e-01, 4.5780e-01, + 1.5185e-01, 6.1925e-01, 5.5619e-03, 4.8873e-01, + 6.4694e-01, 8.3204e-01, 2.9132e-03, 1.5133e-02, + 9.2834e-01, 7.6959e-01, 4.8813e-01, 9.6373e-01, + 8.8295e-01, 5.4935e-01, 8.2771e-01, 1.5431e-01, + 4.6543e-01, 3.3185e-01, 6.1738e-01, 7.5377e-01, + 7.5750e-01, 3.2325e-01, 3.2343e-01, 5.0003e-01, + 4.6757e-01, 6.6998e-01, 5.1249e-01, 4.0907e-01, + 2.6970e-01, 8.4332e-01, 5.9023e-01, 9.0239e-01, + 9.1419e-02, 4.6745e-02, 3.1538e-01, 2.4198e-01, + 5.8952e-01, 9.5825e-01, 3.7490e-01, 3.6831e-01, + 9.1699e-01, 4.4705e-01, 6.9993e-01, 9.4785e-01, + 3.4674e-01, 4.6683e-01, 1.3550e-01, 8.8344e-01, + 2.3591e-01, 8.0040e-01, 7.4565e-01, 1.3116e-01, + 3.0851e-01, 2.1148e-01, 7.1339e-01, 7.7268e-01, + 5.3023e-01, 1.5096e-01, 1.9008e-01, 4.2258e-01, + 4.3880e-02, 9.9625e-01, 6.5268e-01, 9.3214e-01, + 6.0462e-01, 6.0590e-01, 2.4244e-01, 2.0030e-02, + 9.7005e-01, 3.2129e-01, 9.1344e-01, 6.5269e-01, + 6.5313e-03, 5.8713e-02, 4.5140e-01, 8.5618e-01, + 8.1477e-01, 2.6962e-01, 8.9698e-01, 1.7518e-01, + 2.8771e-01, 7.5827e-01, 2.9931e-01, 7.5091e-01, + 6.4095e-01, 1.3392e-01, 8.3241e-01, 6.6076e-02, + 5.8655e-01, 9.1416e-01, 3.7666e-02, 5.8806e-01, + 1.7213e-01, 6.9282e-01, 6.0925e-01, 9.9452e-01, + 9.1816e-01, 1.5073e-03, 9.2778e-01, 3.8351e-01, + 8.3213e-01, 1.8939e-02, 5.2956e-02, 4.9424e-01, + 7.6340e-02, 7.0816e-02, 5.2035e-02, 8.9355e-01, + 1.2355e-01, 3.5347e-01, 2.5801e-01, 3.3109e-01, + 4.1189e-01, 6.0967e-01, 9.8937e-01, 8.6153e-01, + 3.6467e-01, 3.8743e-01, 4.8515e-01, 5.4930e-01, + 2.8873e-01, 3.0396e-01, 2.3511e-01, 7.2566e-02, + 7.3001e-01, 9.8679e-01, 6.8869e-01, 2.5189e-01, + 1.4864e-01, 3.4546e-01, 3.1564e-01, 1.8638e-01, + 6.1711e-01, 2.9499e-01, 8.6787e-01, 8.1499e-01, + 6.4730e-01, 3.2227e-01, 8.5172e-01, 1.4655e-01, + 2.1927e-01, 8.4406e-01, 7.2521e-01, 4.4432e-01, + 7.8207e-01, 3.6034e-01, 1.7951e-01, 8.7854e-01, + 6.6734e-01, 2.7620e-01, 1.4615e-01, 5.3458e-02, + 1.7542e-01, 1.7793e-01, 6.4086e-01, 8.6692e-01, + 3.2839e-01, 4.5069e-01, 8.2317e-01, 2.1250e-01, + 7.0694e-01, 6.7302e-01, 9.5702e-01, 7.3103e-01, + 7.8736e-01, 7.0146e-01, 6.7212e-01, 2.8053e-01, + 7.4374e-01, 9.4274e-01, 6.2094e-01, 4.0364e-01, + 9.5878e-01, 5.9362e-01, 7.3273e-01, 5.9966e-02, + 8.5189e-01, 3.2934e-01, 4.7011e-01, 3.2987e-01, + 4.6086e-01, 5.4738e-01, 7.8338e-01, 8.8702e-01, + 4.5097e-01, 1.8844e-01, 6.7592e-01, 9.4414e-01, + 1.7023e-02, 9.5294e-01, 4.8367e-01, 3.1641e-01, + 8.3009e-01, 1.1180e-01, 7.2191e-01, 2.3642e-01, + 3.1400e-01, 7.6823e-01, 1.6067e-01, 9.4444e-01, + 5.0333e-01, 8.2602e-01, 4.8021e-01, 3.0343e-01, + 8.3966e-01, 7.9552e-01, 7.3259e-01, 5.2477e-01, + 2.2425e-01, 9.8972e-01, 7.4880e-01, 6.4110e-01, + 4.2640e-01, 5.9708e-01, 4.6243e-01, 8.5599e-02, + 6.2118e-01, 4.6689e-02, 2.9981e-01, 3.8868e-01, + 6.6759e-01, 2.3877e-01, 4.3729e-01, 5.6909e-01, + 6.2497e-01, 2.3828e-01, 5.7817e-01, 4.4903e-01, + 8.5992e-01, 9.5931e-01, 6.7993e-01, 1.7306e-02, + 5.0888e-01, 4.1626e-02, 2.5020e-01, 7.8423e-01, + 6.9608e-01, 1.0226e-03, 2.6028e-01, 6.3262e-01, + 8.5445e-01, 4.2874e-01, 6.1986e-01, 7.9866e-01, + 7.2065e-01, 9.6003e-01, 5.2093e-01, 8.8470e-01, + 1.6517e-01, 8.4961e-01, 3.8976e-01, 5.7753e-01, + 2.9480e-01, 1.6744e-01, 6.2028e-01, 3.4208e-01, + 7.5540e-01, 9.5138e-02, 5.4323e-01, 3.0407e-01, + 3.2085e-01, 3.2156e-01, 8.7564e-01, 4.3136e-01, + 5.3226e-01, 1.1451e-01, 4.3950e-01, 3.2841e-01, + 2.6104e-01, 6.2731e-01, 7.5653e-01, 4.4593e-01, + 9.7255e-01, 9.6832e-01, 5.1877e-01, 1.3855e-01, + 9.8016e-01, 5.8164e-01, 3.7397e-01, 4.8492e-01, + 8.1050e-01, 2.3061e-01, 8.8856e-01, 5.3247e-01, + 4.1591e-01, 8.0824e-01, 3.8023e-01, 2.8807e-01, + 5.1024e-03, 8.4755e-02, 2.8337e-01, 3.6151e-01, + 1.6139e-01, 3.1103e-01, 2.2235e-01, 4.7260e-01, + 6.6515e-01, 8.3183e-01, 2.6968e-01, 6.4002e-01, + 1.9927e-01, 5.4537e-01, 4.0424e-01, 6.2608e-01, + 4.5914e-01, 7.9082e-01, 8.1018e-01, 7.8784e-02, + 2.1390e-01, 3.4006e-01, 9.6087e-01, 3.7088e-01, + 3.9467e-01, 7.8375e-01, 7.5262e-01, 7.4814e-01, + 9.1578e-01, 7.6670e-01, 9.6066e-01, 5.8250e-01, + 5.1879e-01, 5.2738e-01, 1.8124e-01, 8.1565e-01, + 7.8998e-01, 4.6614e-01, 8.5005e-01, 2.3754e-01, + 1.5253e-01, 9.7522e-01, 9.9541e-01, 7.0295e-01, + 8.8043e-01, 1.0174e-01, 1.5106e-01, 5.6435e-02, + 2.0333e-01, 2.4863e-01, 3.9485e-01, 7.5788e-01, + 2.0744e-01, 8.7659e-01, 5.1666e-01, 6.5770e-01, + 5.9332e-01, 3.1169e-01, 1.7908e-01, 3.9231e-01, + 3.5565e-01, 6.7939e-01, 5.9180e-01, 6.9911e-01, + 3.6218e-01, 3.3214e-01, 4.0629e-02, 2.7968e-01, + 6.4853e-01, 2.5620e-01, 7.1704e-01, 6.5400e-01, + 8.1118e-01, 1.4613e-01, 7.5389e-01, 4.8293e-01, + 4.2422e-01, 7.6654e-01, 2.9025e-02, 8.5057e-02, + 1.7571e-01, 6.5626e-02, 9.7707e-01, 2.8433e-01, + 1.6557e-01, 4.4186e-01, 5.0239e-01, 9.7718e-01, + 4.8804e-01, 4.4476e-01, 2.5378e-01, 7.3241e-01, + 5.8390e-01, 4.3284e-01, 8.4050e-01, 8.6856e-01, + 9.0541e-01, 1.9578e-01, 2.8881e-01, 4.3512e-01, + 7.4289e-01, 1.1155e-01, 3.9864e-01, 6.6823e-01, + 9.6660e-01, 8.4797e-01, 9.9793e-01, 4.8311e-01, + 6.8505e-01, 1.2175e-01, 5.2102e-01, 6.8043e-03, + 7.5205e-01, 1.0196e-02, 8.0512e-01, 1.0146e-01, + 2.9048e-01, 3.0417e-01, 3.7578e-01, 2.0358e-01, + 2.8293e-01, 3.6585e-01, 3.0243e-01, 4.7272e-01, + 1.7454e-02, 2.6924e-01, 3.2347e-01, 3.8390e-01, + 6.9421e-01, 4.8097e-01, 9.0646e-01, 4.8484e-01, + 8.6715e-01, 3.6413e-01, 9.8464e-01, 7.7827e-01, + 2.1860e-01, 1.8266e-02, 6.7047e-01, 9.3040e-01, + 9.7638e-01, 2.3831e-01, 5.6178e-01, 3.3238e-02, + 6.4864e-01, 3.7287e-01, 2.7710e-01, 9.3842e-01, + 4.6454e-01, 5.3647e-01, 8.7118e-01, 2.6980e-01, + 6.6382e-01, 3.2424e-01, 3.2151e-01, 4.1227e-01, + 2.9922e-01, 5.4000e-02, 4.0543e-01, 7.4926e-01, + 8.2880e-01, 1.8861e-01, 5.3900e-01, 9.9388e-01, + 4.9049e-01, 1.3155e-01, 7.9963e-01, 5.2048e-01, + 4.5364e-01, 4.5588e-01, 2.6133e-01, 1.1612e-01, + 5.6066e-01, 8.3512e-01, 2.5035e-01, 3.7708e-01, + 7.2526e-01, 2.0981e-01, 8.6746e-01, 3.3657e-03, + 7.8514e-01, 1.1874e-01, 2.0931e-01, 6.1591e-01, + 7.6838e-01, 3.3318e-01, 3.3901e-02, 3.4181e-01, + 9.9697e-01, 2.9418e-01, 3.5710e-03, 1.9774e-01, + 9.9101e-04, 2.8553e-01, 4.7514e-01, 5.8622e-01, + 3.7307e-01, 6.7626e-02, 4.4578e-01, 5.0235e-03, + 9.6276e-01, 6.0679e-01, 3.2269e-01, 3.6944e-01, + 5.0319e-01, 7.2888e-01, 3.6048e-01, 2.2131e-01, + 5.5183e-01, 8.8462e-01, 3.2525e-01, 5.9906e-01, + 6.3283e-01, 2.1571e-01, 1.5326e-01, 6.0494e-01, + 9.3844e-01, 3.2085e-01, 5.2970e-01, 7.9144e-01, + 1.0353e-01, 9.5895e-01, 2.7595e-01, 8.3120e-01, + 7.6981e-01, 5.3640e-01, 9.0679e-01, 4.5289e-01, + 7.8577e-01, 3.9871e-01, 7.8118e-01, 3.4959e-01, + 5.0901e-01, 6.5280e-01, 6.4598e-01, 4.9167e-01, + 7.9295e-01, 9.8356e-01, 4.6616e-01, 9.5769e-01, + 5.3197e-01, 2.3024e-01, 4.0049e-02, 4.3386e-01, + 1.0517e-01, 3.5579e-03, 2.7359e-01, 3.2880e-01, + 8.7317e-01, 9.6289e-01, 5.6120e-01, 4.4357e-01, + 8.8679e-01, 2.0081e-01, 3.1285e-01, 7.2805e-01, + 2.5495e-01, 2.0463e-01, 4.6120e-01, 3.2724e-01, + 9.2376e-01, 7.4901e-01, 2.0056e-01, 5.7000e-01, + 3.8456e-01, 2.3684e-02, 6.6362e-01, 4.6989e-03, + 8.6396e-01, 5.3072e-01, 2.3571e-01, 7.4699e-01, + 2.2721e-01, 4.0910e-01, 9.7213e-01, 5.4094e-01, + 5.7596e-01, 8.0829e-03, 5.5548e-01, 7.0431e-01, + 5.5110e-01, 7.4034e-01, 9.3683e-01, 3.1405e-01, + 8.0164e-01, 4.3491e-01, 3.7706e-01, 2.5971e-01, + 5.0807e-01, 7.0856e-01, 9.6109e-01, 5.0709e-01, + 5.4468e-01, 7.2530e-01, 9.2954e-01, 3.6222e-01, + 6.1441e-01, 2.2455e-01, 5.4603e-02, 7.9251e-01, + 8.1993e-01, 1.5136e-01, 8.1286e-01, 2.7948e-03, + 3.6461e-01, 5.5646e-01, 9.6299e-01, 2.3915e-01, + 6.0657e-01, 2.3519e-01, 3.6332e-01, 2.2229e-01, + 4.6246e-01, 4.8782e-01, 3.8525e-02, 6.3085e-01, + 3.2510e-01, 4.8011e-01, 7.1317e-01, 4.7815e-01, + 8.3429e-01, 3.9524e-01, 7.0533e-01, 6.9427e-01, + 5.6640e-01, 1.3141e-01, 4.3335e-01, 2.8945e-01, + 7.3689e-01, 1.4851e-01, 2.2800e-01, 6.8983e-01, + 5.7122e-01, 5.5374e-01, 8.5804e-01, 5.9915e-01, + 4.7460e-01, 1.6783e-01, 1.6540e-01, 7.8787e-01, + 4.2086e-02, 4.0988e-01, 9.3912e-01, 9.3896e-01, + 9.8888e-01, 3.6929e-01, 9.8947e-02, 5.3458e-01, + 8.0857e-01, 3.6843e-01, 7.4867e-01, 7.4775e-01, + 2.9785e-01, 3.3546e-01, 3.2525e-01, 4.2520e-01, + 3.9291e-02, 7.7609e-01, 5.1834e-01, 5.1010e-01, + 1.1126e-01, 9.0828e-01, 7.8530e-01, 3.1252e-01, + 8.3948e-01, 2.2775e-01, 9.5582e-01, 7.8528e-01, + 4.8126e-02, 1.6195e-02, 8.8208e-03, 8.8697e-01, + 6.0644e-02, 6.6374e-01, 2.6899e-01, 7.9662e-01, + 6.4808e-01, 3.3597e-01, 9.4519e-01, 4.5321e-01, + 8.5435e-01, 4.5918e-01, 6.9370e-01, 3.0860e-01, + 4.7609e-02, 2.2096e-01, 2.8266e-01, 1.5660e-01, + 9.1450e-02, 2.0529e-01, 5.2281e-01, 8.8692e-01, + 5.7158e-01, 9.5937e-01, 3.8145e-01, 4.6475e-01, + 5.0255e-01, 9.8462e-02, 7.9592e-01, 8.2020e-01, + 2.2993e-01, 1.1201e-01, 7.5941e-01, 9.4793e-01, + 6.6752e-01, 2.0885e-02, 6.9597e-01, 2.7655e-01, + 1.9442e-01, 8.5342e-01, 4.9165e-02, 5.2640e-01, + 7.3335e-01, 6.1741e-01, 2.8828e-02, 7.4344e-01, + 3.2360e-01, 2.1018e-01, 8.3251e-01, 7.5848e-02, + 1.8241e-01, 1.7904e-01, 9.6374e-01, 1.0560e-02, + 4.9459e-01, 9.4338e-01, 7.1954e-01, 4.9748e-01, + 2.1768e-01, 1.5498e-01, 2.2277e-01, 9.9462e-01, + 7.3566e-01, 4.7796e-01, 2.8839e-01, 4.4162e-03, + 3.2213e-01, 4.8705e-01, 4.8808e-01, 7.0851e-01, + 6.6927e-01, 9.3718e-01, 2.9866e-01, 7.1131e-01, + 9.1977e-01, 9.6189e-02, 3.1682e-01, 9.0588e-01, + 8.4421e-01, 3.2918e-01, 4.0158e-01, 3.0646e-01, + 8.0535e-01, 8.7492e-01, 1.7403e-01, 7.8186e-01, + 1.3068e-01, 3.5040e-01, 9.6516e-01, 3.1815e-01, + 9.9817e-02, 6.3904e-01, 7.8971e-01, 6.2744e-02, + 6.4283e-01, 8.5679e-01, 9.7898e-01, 9.7924e-01, + 7.4132e-02, 8.2956e-01, 6.7860e-01, 2.7951e-01, + 3.3412e-01, 6.7694e-01, 1.6457e-01, 5.8893e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.6927, 0.7795, 0.8659, ..., 0.3841, 0.4383, 0.0183]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.01689934730529785 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '62132', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.6618854999542236} + +tensor(indices=tensor([[7887, 8182, 2290, ..., 5669, 1929, 8718], + [5753, 2194, 998, ..., 7998, 2237, 3220]]), + values=tensor([0.3516, 0.1508, 0.2281, 0.5764, 0.0963, 0.0092, 0.1031, + 0.4135, 0.7167, 0.9185, 0.4667, 0.4162, 0.0687, 0.7294, + 0.0044, 0.6731, 0.5891, 0.4442, 0.5025, 0.3410, 0.3352, + 0.6080, 0.7738, 0.4965, 0.6206, 0.0436, 0.6223, 0.5434, + 0.3842, 0.8924, 0.3801, 0.0851, 0.8896, 0.2859, 0.1616, + 0.2587, 0.0526, 0.1312, 0.4438, 0.9896, 0.9960, 0.7965, + 0.3859, 0.8268, 0.3324, 0.6216, 0.0615, 0.0086, 0.0089, + 0.1198, 0.0905, 0.5466, 0.3879, 0.8813, 0.9789, 0.5696, + 0.7641, 0.2942, 0.1609, 0.7201, 0.2161, 0.6605, 0.7781, + 0.3950, 0.2308, 0.5853, 0.8372, 0.9456, 0.2942, 0.5270, + 0.3390, 0.0440, 0.8859, 0.7725, 0.7197, 0.5013, 0.3129, + 0.4516, 0.2339, 0.3377, 0.7938, 0.1216, 0.8140, 0.2840, + 0.5583, 0.5610, 0.5425, 0.2026, 0.8527, 0.6351, 0.9918, + 0.0937, 0.9708, 0.1802, 0.2721, 0.5568, 0.2102, 0.1860, + 0.2510, 0.3101, 0.2073, 0.4087, 0.5450, 0.9005, 0.9144, + 0.8469, 0.3098, 0.4686, 0.9065, 0.6181, 0.2382, 0.6730, + 0.2456, 0.2176, 0.8717, 0.7462, 0.1084, 0.3189, 0.3640, + 0.7510, 0.8688, 0.7656, 0.2152, 0.6995, 0.9738, 0.4563, + 0.4816, 0.2711, 0.5962, 0.2707, 0.5409, 0.7526, 0.3763, + 0.3789, 0.8638, 0.1496, 0.7621, 0.2433, 0.5773, 0.3046, + 0.2659, 0.8226, 0.4167, 0.6819, 0.1541, 0.4790, 0.2258, + 0.0365, 0.9963, 0.2214, 0.1963, 0.6359, 0.5114, 0.6096, + 0.4646, 0.3974, 0.8742, 0.4305, 0.8326, 0.3379, 0.8568, + 0.5123, 0.6280, 0.6427, 0.3758, 0.9013, 0.6705, 0.5075, + 0.4271, 0.2459, 0.6210, 0.1916, 0.1321, 0.1664, 0.5000, + 0.2508, 0.1191, 0.1292, 0.2851, 0.1409, 0.4034, 0.3635, + 0.0523, 0.7040, 0.2888, 0.1409, 0.6635, 0.2008, 0.8983, + 0.7373, 0.9799, 0.6553, 0.3223, 0.4690, 0.7470, 0.3992, + 0.7529, 0.2557, 0.1067, 0.0018, 0.0683, 0.6000, 0.2432, + 0.4542, 0.7663, 0.6172, 0.0692, 0.4570, 0.0572, 0.4120, + 0.8550, 0.0266, 0.1564, 0.5359, 0.0953, 0.6235, 0.4229, + 0.2248, 0.8369, 0.6644, 0.2400, 0.5160, 0.5200, 0.2447, + 0.7097, 0.2014, 0.2419, 0.1668, 0.6224, 0.9958, 0.6851, + 0.4039, 0.1736, 0.7442, 0.3188, 0.1713, 0.3531, 0.6077, + 0.6999, 0.2412, 0.5980, 0.7473, 0.7825, 0.8620, 0.2338, + 0.6171, 0.8252, 0.9421, 0.0347, 0.9809, 0.0952, 0.1634, + 0.0348, 0.1895, 0.1820, 0.0667, 0.7317, 0.8273, 0.7891, + 0.7823, 0.7733, 0.2643, 0.0511, 0.8825, 0.1677, 0.2436, + 0.5100, 0.5638, 0.9744, 0.7548, 0.5934, 0.3548, 0.6378, + 0.5859, 0.7120, 0.5319, 0.4956, 0.6404, 0.6002, 0.9963, + 0.1276, 0.2019, 0.4417, 0.3613, 0.0536, 0.3673, 0.3656, + 0.0258, 0.4154, 0.8796, 0.9197, 0.5176, 0.6618, 0.9523, + 0.3958, 0.6110, 0.1718, 0.7288, 0.6452, 0.7441, 0.0125, + 0.6381, 0.4338, 0.8823, 0.6959, 0.9916, 0.4155, 0.8112, + 0.6575, 0.9736, 0.7723, 0.7804, 0.5721, 0.3150, 0.6933, + 0.6872, 0.3169, 0.1566, 0.6797, 0.3993, 0.2452, 0.5534, + 0.7312, 0.3468, 0.2182, 0.3908, 0.7886, 0.8885, 0.4691, + 0.7741, 0.4601, 0.0821, 0.1267, 0.9997, 0.0020, 0.5289, + 0.9700, 0.5299, 0.2801, 0.6535, 0.6001, 0.2840, 0.3211, + 0.5369, 0.6497, 0.3236, 0.8555, 0.3230, 0.9215, 0.1972, + 0.6691, 0.4462, 0.6308, 0.0974, 0.2460, 0.1048, 0.4941, + 0.0090, 0.0249, 0.6255, 0.1626, 0.5190, 0.6363, 0.5930, + 0.7929, 0.7978, 0.3811, 0.9480, 0.3989, 0.8968, 0.7730, + 0.6814, 0.9545, 0.2893, 0.6483, 0.8284, 0.0310, 0.5474, + 0.2796, 0.4105, 0.0939, 0.2059, 0.7259, 0.9569, 0.8443, + 0.1529, 0.3579, 0.2160, 0.0027, 0.5671, 0.5539, 0.9831, + 0.3201, 0.9215, 0.1565, 0.1009, 0.4567, 0.1403, 0.0583, + 0.3016, 0.1839, 0.6777, 0.4219, 0.6402, 0.5209, 0.5389, + 0.4062, 0.4971, 0.2115, 0.7395, 0.3435, 0.7668, 0.0483, + 0.5664, 0.3129, 0.6256, 0.8422, 0.3738, 0.9113, 0.9699, + 0.1269, 0.5831, 0.7057, 0.5476, 0.8736, 0.1715, 0.8554, + 0.6440, 0.3451, 0.7077, 0.7376, 0.2649, 0.0555, 0.8163, + 0.4821, 0.9663, 0.5832, 0.4287, 0.7069, 0.0735, 0.3079, + 0.2465, 0.7439, 0.6424, 0.8806, 0.4530, 0.9305, 0.3727, + 0.8454, 0.1674, 0.5325, 0.3888, 0.2291, 0.9439, 0.6687, + 0.2074, 0.6729, 0.7995, 0.0249, 0.5850, 0.7793, 0.7360, + 0.6126, 0.3687, 0.9493, 0.4428, 0.3410, 0.0082, 0.0410, + 0.5119, 0.4350, 0.5573, 0.2804, 0.2848, 0.0306, 0.1391, + 0.5622, 0.2371, 0.0409, 0.0900, 0.0825, 0.8157, 0.6483, + 0.1039, 0.6947, 0.7612, 0.9127, 0.1372, 0.3169, 0.3188, + 0.3406, 0.5060, 0.7803, 0.5213, 0.2755, 0.9735, 0.2911, + 0.6679, 0.7638, 0.0645, 0.9604, 0.3757, 0.4407, 0.1906, + 0.4805, 0.4010, 0.9644, 0.1921, 0.5615, 0.6663, 0.9910, + 0.9097, 0.7742, 0.5486, 0.5683, 0.2933, 0.4117, 0.0064, + 0.6771, 0.9390, 0.1219, 0.5303, 0.7987, 0.8455, 0.8765, + 0.1241, 0.8949, 0.9586, 0.0622, 0.1831, 0.7818, 0.2523, + 0.5500, 0.2791, 0.2206, 0.1956, 0.4110, 0.2739, 0.4866, + 0.3773, 0.3999, 0.7364, 0.1098, 0.5864, 0.5411, 0.9857, + 0.1717, 0.9912, 0.7450, 0.5886, 0.5535, 0.6766, 0.5246, + 0.9446, 0.8342, 0.7633, 0.4168, 0.4259, 0.9347, 0.2694, + 0.7192, 0.7050, 0.9339, 0.5077, 0.3755, 0.6614, 0.2549, + 0.5840, 0.0365, 0.7121, 0.6359, 0.6419, 0.2092, 0.7709, + 0.8862, 0.2510, 0.2745, 0.4203, 0.8233, 0.4378, 0.6058, + 0.5718, 0.8337, 0.1651, 0.0327, 0.5763, 0.3080, 0.5720, + 0.3553, 0.2812, 0.2201, 0.7680, 0.3079, 0.4795, 0.5404, + 0.4205, 0.9427, 0.0966, 0.8312, 0.4399, 0.6358, 0.8885, + 0.3163, 0.3591, 0.7752, 0.4171, 0.4265, 0.7445, 0.3830, + 0.9671, 0.2663, 0.8282, 0.8781, 1.0000, 0.9762, 0.1062, + 0.3058, 0.0733, 0.4585, 0.7503, 0.5960, 0.3631, 0.8713, + 0.2833, 0.4733, 0.5447, 0.6830, 0.5025, 0.5183, 0.7402, + 0.2707, 0.1974, 0.6706, 0.2214, 0.0703, 0.4414, 0.2360, + 0.3173, 0.7586, 0.2291, 0.6614, 0.4310, 0.6851, 0.9765, + 0.9718, 0.0778, 0.5948, 0.0843, 0.5802, 0.1306, 0.1533, + 0.6938, 0.1940, 0.8610, 0.2390, 0.3822, 0.1327, 0.0735, + 0.7849, 0.4796, 0.4999, 0.4115, 0.3060, 0.4726, 0.6247, + 0.8012, 0.0909, 0.7773, 0.9442, 0.9270, 0.4482, 0.1332, + 0.5719, 0.8490, 0.4105, 0.6259, 0.4848, 0.4339, 0.6376, + 0.5020, 0.0343, 0.6299, 0.9106, 0.2726, 0.2144, 0.4801, + 0.3900, 0.2268, 0.3954, 0.5511, 0.9541, 0.6689, 0.7565, + 0.8568, 0.1411, 0.2296, 0.0901, 0.3838, 0.0465, 0.0961, + 0.2749, 0.1824, 0.3126, 0.4258, 0.5110, 0.7600, 0.0827, + 0.2856, 0.6543, 0.1508, 0.9099, 0.5977, 0.9639, 0.5828, + 0.0969, 0.2553, 0.2380, 0.3132, 0.5572, 0.5787, 0.3286, + 0.4692, 0.9633, 0.5399, 0.9158, 0.9140, 0.1522, 0.1673, + 0.0179, 0.4893, 0.3235, 0.3317, 0.6713, 0.7343, 0.2321, + 0.5042, 0.0815, 0.3180, 0.7584, 0.0130, 0.8022, 0.2554, + 0.0738, 0.8126, 0.7482, 0.6810, 0.6998, 0.9080, 0.3089, + 0.3657, 0.5456, 0.9525, 0.8166, 0.7800, 0.6155, 0.4805, + 0.4189, 0.3227, 0.4761, 0.2254, 0.3856, 0.6332, 0.2902, + 0.2857, 0.5744, 0.5469, 0.8609, 0.1707, 0.6860, 0.6534, + 0.2773, 0.3895, 0.9354, 0.4097, 0.0726, 0.1694, 0.8590, + 0.4472, 0.4979, 0.8852, 0.4400, 0.0803, 0.0729, 0.9684, + 0.5574, 0.7015, 0.7603, 0.4011, 0.6658, 0.0143, 0.4998, + 0.6113, 0.2100, 0.2450, 0.6712, 0.7047, 0.6507, 0.3094, + 0.6189, 0.3076, 0.8045, 0.9364, 0.3982, 0.4953, 0.9166, + 0.4947, 0.9033, 0.8260, 0.9963, 0.3306, 0.6814, 0.6560, + 0.9614, 0.0061, 0.4968, 0.9920, 0.6320, 0.8581, 0.3375, + 0.0994, 0.6088, 0.3018, 0.9916, 0.5871, 0.8456, 0.4310, + 0.8181, 0.7211, 0.5992, 0.5196, 0.2119, 0.4874, 0.8059, + 0.7799, 0.1034, 0.7507, 0.8051, 0.3035, 0.4783, 0.5497, + 0.6194, 0.4494, 0.3941, 0.6812, 0.6161, 0.5571, 0.5572, + 0.0882, 0.2507, 0.9171, 0.2793, 0.1722, 0.6964, 0.6546, + 0.3702, 0.3910, 0.1051, 0.7280, 0.0573, 0.2636, 0.5637, + 0.7734, 0.9886, 0.7615, 0.3922, 0.2007, 0.0622, 0.5015, + 0.5213, 0.7405, 0.5210, 0.1540, 0.3407, 0.7068, 0.5817, + 0.5778, 0.0407, 0.5899, 0.1143, 0.6579, 0.8299, 0.0302, + 0.9172, 0.6358, 0.8428, 0.3201, 0.5864, 0.2069, 0.5762, + 0.2219, 0.7409, 0.0023, 0.7926, 0.5191, 0.0575, 0.6901, + 0.6262, 0.4344, 0.4915, 0.5826, 0.3205, 0.5526, 0.9569, + 0.6177, 0.4151, 0.6460, 0.5195, 0.4963, 0.7361, 0.1505, + 0.1704, 0.6601, 0.7841, 0.3539, 0.0877, 0.5589, 0.4459, + 0.1542, 0.4534, 0.9926, 0.0739, 0.6790, 0.2931, 0.1396, + 0.0068, 0.4048, 0.7593, 0.6175, 0.4181, 0.8310, 0.0178, + 0.0500, 0.2652, 0.2672, 0.8470, 0.2584, 0.7725, 0.0889, + 0.4215, 0.8721, 0.5689, 0.5291, 0.7033, 0.6136, 0.0565, + 0.5719, 0.8737, 0.2977, 0.7389, 0.1730, 0.4434, 0.1990, + 0.8990, 0.8550, 0.5627, 0.2113, 0.9593, 0.2781, 0.7762, + 0.4400, 0.2536, 0.0770, 0.9680, 0.2853, 0.2809, 0.3060, + 0.2553, 0.8622, 0.4694, 0.2729, 0.2321, 0.1356, 0.9008, + 0.1316, 0.4824, 0.3215, 0.8023, 0.8462, 0.4805, 0.7955, + 0.5352, 0.8455, 0.5927, 0.8112, 0.7726, 0.1017, 0.9079, + 0.5417, 0.6333, 0.5028, 0.4114, 0.1608, 0.4616, 0.9006, + 0.8264, 0.1019, 0.3966, 0.4288, 0.6316, 0.9100]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.4792, 0.1783, 0.2803, ..., 0.3679, 0.7916, 0.5162]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.6618854999542236 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '245084', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.43880581855774} + +tensor(indices=tensor([[ 75, 2283, 4386, ..., 4914, 8978, 4108], + [5864, 1653, 1720, ..., 6549, 6838, 7567]]), + values=tensor([7.3271e-01, 5.4680e-01, 1.5817e-01, 6.9331e-01, + 1.1846e-01, 1.3350e-01, 8.8129e-01, 4.5841e-01, + 8.2483e-01, 1.2462e-01, 6.0800e-01, 9.7921e-01, + 1.4789e-01, 6.8079e-01, 5.4726e-01, 2.3060e-01, + 6.3382e-01, 4.7766e-01, 7.1208e-01, 8.9536e-02, + 7.4618e-01, 6.5466e-01, 1.7416e-01, 1.1103e-01, + 4.4643e-01, 2.5366e-01, 4.4219e-01, 6.5522e-01, + 6.1001e-01, 3.5476e-02, 3.5495e-01, 4.8603e-01, + 1.5263e-01, 4.5825e-01, 3.4950e-01, 3.0839e-01, + 4.7679e-01, 2.2001e-01, 8.9524e-01, 4.8945e-01, + 7.1018e-01, 2.2409e-01, 6.3635e-01, 4.1382e-02, + 9.5898e-01, 2.7701e-01, 2.1051e-01, 7.0671e-01, + 2.0354e-01, 4.7440e-01, 7.9234e-01, 4.1302e-01, + 3.2418e-01, 1.6809e-01, 7.6865e-01, 7.6164e-01, + 6.7468e-01, 5.9160e-01, 2.9066e-01, 2.1660e-01, + 2.8635e-01, 4.5920e-01, 8.2088e-01, 8.4218e-01, + 5.8024e-01, 8.4741e-01, 5.2959e-01, 4.5521e-01, + 6.4944e-01, 1.0143e-02, 2.7338e-01, 9.8354e-03, + 5.4528e-01, 7.7215e-01, 4.5431e-01, 4.0141e-01, + 4.4784e-01, 3.8317e-01, 7.8518e-01, 7.8959e-01, + 5.1793e-01, 4.5287e-01, 1.5222e-01, 5.8794e-01, + 5.6619e-01, 2.4687e-01, 7.5138e-01, 9.0886e-01, + 8.0388e-02, 3.1824e-01, 7.2819e-01, 9.0560e-01, + 9.4639e-01, 8.4041e-01, 5.0709e-01, 6.0390e-01, + 7.8724e-01, 9.5523e-01, 3.9874e-01, 9.8421e-01, + 7.7028e-01, 7.2449e-01, 5.6752e-01, 5.3787e-01, + 6.7412e-01, 8.9320e-01, 3.3833e-01, 9.9498e-01, + 1.9607e-01, 6.0280e-01, 2.4460e-01, 9.6435e-01, + 2.0441e-01, 8.2772e-01, 1.2533e-01, 3.3767e-01, + 5.0446e-01, 6.8403e-01, 5.4899e-01, 8.5785e-02, + 6.8785e-01, 6.2367e-01, 9.1938e-01, 5.0608e-01, + 9.7173e-01, 7.6296e-01, 1.0997e-01, 9.8776e-01, + 3.5418e-02, 7.2150e-01, 8.2975e-01, 6.3764e-02, + 5.8874e-01, 5.1752e-02, 5.9220e-01, 7.5572e-01, + 9.3355e-01, 9.9567e-01, 7.2881e-02, 9.4809e-01, + 6.6658e-01, 6.8606e-01, 9.1774e-03, 5.2742e-01, + 9.1452e-01, 4.9707e-01, 2.0530e-01, 8.2976e-01, + 2.5918e-02, 2.1205e-01, 2.3545e-01, 5.5144e-01, + 5.8130e-01, 9.7222e-01, 4.0536e-01, 4.6538e-01, + 6.8799e-02, 5.6699e-01, 6.9826e-01, 3.6998e-01, + 8.6577e-01, 8.4580e-01, 1.3178e-02, 8.5368e-01, + 5.5168e-01, 1.6175e-01, 1.8090e-01, 6.6854e-01, + 3.6887e-02, 2.1382e-01, 2.0327e-01, 5.1918e-02, + 7.1541e-03, 7.2048e-01, 4.3021e-01, 5.7585e-01, + 1.9144e-01, 4.2284e-01, 4.6674e-01, 9.4400e-01, + 3.8877e-01, 1.9133e-01, 7.5804e-01, 5.2758e-01, + 3.5542e-01, 5.0573e-01, 6.3177e-01, 8.4992e-01, + 7.9636e-01, 4.1913e-01, 5.7553e-01, 3.8064e-01, + 6.5808e-01, 4.5062e-01, 5.4069e-01, 4.5458e-01, + 6.2183e-01, 9.1873e-01, 8.1379e-01, 8.5159e-01, + 1.2224e-01, 9.8947e-01, 9.2314e-02, 7.3464e-02, + 3.2238e-01, 4.5188e-01, 7.7785e-01, 5.8015e-01, + 2.8869e-01, 3.4127e-01, 2.6761e-01, 9.6772e-01, + 5.5199e-01, 6.5145e-01, 1.4601e-01, 1.1511e-01, + 4.8640e-01, 9.0314e-01, 4.3423e-01, 8.7704e-01, + 3.5066e-01, 6.2618e-01, 1.6390e-01, 7.2149e-01, + 6.9213e-02, 6.1452e-01, 6.6724e-01, 6.0671e-01, + 8.9952e-01, 3.1951e-01, 2.2969e-01, 4.9852e-01, + 5.5602e-01, 6.2980e-01, 2.1961e-01, 1.9928e-01, + 8.6844e-01, 1.0758e-01, 9.8622e-01, 7.0191e-01, + 9.4400e-01, 3.5167e-01, 9.9127e-01, 6.1739e-01, + 8.7217e-01, 2.9334e-01, 1.2226e-01, 9.1248e-01, + 8.6220e-01, 5.6810e-01, 6.8955e-01, 7.8186e-01, + 3.6459e-01, 1.2297e-01, 9.8556e-01, 3.2692e-01, + 2.2346e-01, 2.4662e-01, 5.7337e-01, 7.8042e-01, + 2.2907e-01, 7.8836e-01, 1.4088e-01, 3.8740e-01, + 9.1388e-01, 5.7935e-01, 2.0861e-02, 8.1530e-01, + 7.9710e-01, 4.4029e-01, 1.0658e-01, 9.5761e-01, + 2.8864e-01, 3.9947e-01, 5.4107e-01, 2.6990e-01, + 8.8545e-01, 5.1718e-01, 4.6133e-01, 9.3391e-02, + 8.6382e-01, 7.1517e-01, 3.2465e-02, 7.5054e-01, + 9.7682e-01, 8.1731e-01, 7.8329e-01, 2.5542e-01, + 6.1717e-01, 3.1796e-01, 1.4612e-01, 4.1871e-01, + 7.1622e-01, 2.8065e-01, 6.2769e-01, 3.6860e-01, + 8.8004e-02, 9.2029e-01, 1.9003e-01, 4.7738e-01, + 3.4078e-01, 6.8289e-01, 5.3525e-01, 7.5292e-01, + 1.2578e-01, 4.5091e-01, 9.8752e-01, 3.4600e-01, + 6.9098e-01, 3.9832e-03, 3.1848e-01, 7.6371e-01, + 6.6326e-01, 9.8863e-01, 3.1192e-01, 7.1441e-01, + 2.2649e-01, 6.8873e-01, 5.1553e-01, 6.4627e-02, + 8.0703e-01, 4.3408e-01, 5.5619e-01, 3.3691e-01, + 5.1261e-01, 4.5326e-02, 6.8718e-01, 4.0987e-01, + 6.6507e-02, 5.1556e-01, 6.8097e-01, 2.6586e-01, + 7.1592e-01, 6.2343e-01, 5.7058e-01, 3.5484e-01, + 4.4529e-01, 6.6338e-01, 3.5208e-01, 2.5696e-01, + 2.4915e-01, 2.6818e-01, 2.1290e-01, 8.0998e-01, + 7.3676e-01, 8.7054e-01, 1.3324e-01, 1.4124e-01, + 4.6535e-01, 4.6796e-01, 9.4269e-01, 7.5364e-01, + 5.1780e-01, 3.6524e-02, 9.1992e-01, 5.3479e-01, + 9.0948e-01, 4.4646e-01, 1.6443e-01, 1.5041e-01, + 2.4518e-01, 8.4823e-01, 6.7005e-01, 7.1536e-01, + 9.9284e-01, 1.5132e-01, 7.1916e-01, 2.7753e-01, + 6.4226e-01, 4.5892e-01, 4.2318e-01, 5.7719e-01, + 2.9804e-01, 5.7249e-01, 6.7936e-01, 5.2128e-01, + 7.4381e-01, 2.4777e-02, 7.3096e-02, 9.4852e-01, + 5.3554e-01, 8.2975e-01, 2.6035e-02, 5.2134e-03, + 8.9467e-01, 9.3599e-01, 2.1152e-02, 2.2564e-02, + 1.2804e-02, 8.4371e-01, 8.5011e-02, 9.3834e-01, + 9.8269e-01, 8.8754e-01, 4.4519e-01, 3.2414e-01, + 4.8486e-03, 8.4570e-01, 1.9598e-01, 5.6008e-01, + 6.7285e-01, 5.8782e-01, 1.0868e-01, 4.4946e-02, + 2.9140e-01, 3.8100e-02, 7.5123e-01, 7.3014e-01, + 5.6542e-01, 7.6369e-01, 1.3236e-01, 5.1384e-01, + 1.9924e-01, 3.5985e-01, 7.1673e-01, 2.6708e-01, + 9.1368e-01, 9.4055e-01, 8.8579e-01, 2.0018e-01, + 5.2963e-01, 7.5478e-01, 7.5532e-01, 1.0469e-02, + 2.1773e-01, 1.9842e-01, 1.9099e-01, 7.8952e-01, + 3.5206e-01, 1.8674e-01, 7.8673e-02, 4.9686e-01, + 2.6533e-01, 7.8752e-01, 4.0296e-01, 4.6912e-01, + 7.3044e-01, 9.3243e-01, 8.4926e-01, 8.6982e-01, + 1.4356e-01, 6.2410e-02, 6.2664e-01, 4.0935e-01, + 8.5079e-01, 3.0452e-01, 9.4226e-01, 5.0024e-01, + 3.8373e-02, 8.7342e-01, 1.4167e-01, 8.9646e-01, + 1.5678e-01, 7.7893e-01, 9.8193e-01, 9.1370e-01, + 2.0990e-01, 8.7695e-03, 9.2810e-01, 7.5912e-01, + 2.7285e-01, 6.2536e-01, 9.5261e-01, 2.4818e-01, + 4.7462e-01, 2.0497e-01, 8.6789e-01, 4.4037e-01, + 9.5126e-01, 9.1803e-01, 4.2632e-01, 2.2323e-01, + 1.2880e-02, 3.9843e-01, 2.8640e-01, 4.8695e-02, + 7.4227e-01, 1.7156e-01, 9.6676e-01, 4.4121e-01, + 6.6481e-01, 8.0715e-01, 3.1278e-01, 4.2137e-01, + 5.6372e-02, 7.2216e-01, 1.6298e-01, 6.9417e-02, + 9.5364e-02, 4.8961e-01, 9.4112e-01, 3.2162e-01, + 9.4008e-01, 6.3534e-01, 2.1342e-01, 9.7926e-02, + 9.2778e-01, 9.1802e-01, 6.7816e-02, 2.0407e-01, + 1.7389e-01, 1.8130e-01, 5.8292e-02, 8.3574e-01, + 5.7966e-01, 2.9410e-01, 7.6376e-01, 5.6396e-01, + 1.4521e-01, 8.8739e-01, 2.9351e-01, 8.0634e-01, + 6.1902e-01, 2.0047e-01, 1.8233e-01, 7.8520e-01, + 4.9119e-01, 4.7519e-01, 4.2019e-01, 6.0764e-01, + 3.7731e-01, 4.2470e-01, 9.1418e-01, 6.9032e-01, + 3.0260e-01, 8.6615e-01, 5.2264e-01, 9.0066e-01, + 1.9684e-01, 9.0541e-01, 9.0561e-01, 7.4386e-01, + 3.2478e-01, 6.3022e-01, 3.5306e-01, 6.1907e-01, + 1.9643e-01, 7.0280e-01, 9.3979e-01, 7.7306e-01, + 8.9292e-01, 6.4857e-02, 4.0880e-01, 3.1019e-01, + 2.0846e-01, 9.1245e-01, 5.9031e-01, 7.1376e-01, + 9.3256e-01, 9.3528e-01, 2.5866e-01, 9.5344e-01, + 9.2587e-02, 1.0317e-01, 1.5635e-01, 1.9409e-01, + 6.1521e-01, 3.8998e-01, 9.3360e-01, 5.4130e-01, + 9.4630e-01, 8.0619e-01, 6.8013e-01, 1.8101e-01, + 3.6336e-01, 3.6742e-01, 7.9749e-01, 1.4716e-01, + 5.2754e-01, 9.7110e-01, 9.3345e-01, 1.3102e-01, + 9.2571e-02, 2.0029e-01, 4.9620e-02, 5.4227e-01, + 9.5542e-01, 1.7904e-01, 5.2160e-01, 7.2533e-01, + 6.0648e-01, 5.7805e-01, 5.5186e-01, 2.4584e-01, + 2.5640e-01, 1.7564e-01, 7.9514e-01, 2.2396e-01, + 2.2121e-01, 5.7066e-01, 8.9176e-01, 3.3146e-01, + 1.6462e-01, 9.6012e-01, 6.0973e-01, 9.6240e-01, + 3.6285e-01, 5.3296e-01, 1.7822e-01, 1.1424e-01, + 7.4167e-01, 7.3771e-01, 1.1519e-01, 6.8285e-01, + 2.8223e-01, 9.3536e-01, 7.8112e-02, 8.8160e-01, + 6.4345e-01, 8.9490e-01, 4.3891e-01, 2.6843e-01, + 4.6105e-01, 8.0506e-01, 9.5685e-01, 7.9447e-01, + 5.5921e-01, 8.7030e-01, 1.1418e-01, 4.8335e-02, + 2.5109e-01, 8.1222e-01, 4.4395e-01, 1.3019e-01, + 1.6179e-01, 5.6817e-01, 1.1052e-01, 4.8152e-01, + 1.1024e-01, 3.6472e-01, 2.9609e-01, 8.0821e-01, + 5.0663e-01, 6.9621e-01, 3.0743e-01, 6.8817e-01, + 4.0507e-01, 2.7281e-01, 6.2851e-02, 4.5213e-01, + 5.2897e-01, 6.4944e-01, 9.0074e-01, 4.0042e-02, + 6.2494e-01, 2.7121e-01, 8.1937e-01, 5.7857e-01, + 7.9806e-01, 6.1742e-02, 5.3846e-01, 3.7882e-03, + 1.4363e-01, 7.8629e-01, 2.1149e-01, 3.3974e-01, + 3.1958e-01, 7.3129e-01, 6.0494e-01, 7.4047e-01, + 9.3645e-02, 6.8597e-01, 7.1879e-03, 9.7646e-01, + 7.6769e-01, 3.5479e-01, 3.8585e-01, 2.2733e-01, + 7.3644e-01, 9.7299e-01, 6.8710e-01, 2.4891e-01, + 2.1313e-01, 8.8574e-01, 8.3366e-01, 4.8621e-01, + 6.7528e-01, 6.4473e-01, 6.0127e-01, 8.9233e-01, + 9.6549e-01, 7.5386e-01, 6.3094e-01, 7.7293e-01, + 7.3511e-01, 3.6913e-03, 3.2480e-01, 7.6469e-02, + 5.3294e-01, 4.4696e-01, 3.5921e-01, 7.6895e-01, + 6.2602e-01, 1.6343e-01, 8.8208e-02, 6.1782e-01, + 2.1211e-01, 3.1611e-01, 4.9721e-01, 8.5662e-01, + 9.4304e-01, 1.8235e-01, 4.6522e-02, 8.0198e-01, + 4.9012e-01, 7.3981e-01, 3.8062e-01, 6.6518e-01, + 6.1653e-01, 1.2138e-01, 1.5314e-01, 9.8037e-01, + 1.3732e-01, 7.3530e-01, 1.7058e-01, 2.0511e-01, + 2.6577e-01, 5.4597e-01, 8.2296e-01, 8.4843e-01, + 4.4386e-01, 6.9015e-01, 3.7735e-01, 6.1638e-01, + 4.7162e-01, 3.0717e-01, 2.1313e-01, 6.9194e-01, + 4.6844e-01, 7.2511e-01, 1.6532e-01, 3.7467e-01, + 1.3861e-02, 1.5076e-01, 8.8503e-02, 5.3177e-01, + 4.4478e-01, 7.8659e-01, 3.8870e-01, 4.5148e-01, + 2.4644e-02, 5.5146e-01, 7.7138e-01, 9.7220e-01, + 6.3764e-01, 1.0847e-01, 9.4025e-01, 6.2251e-01, + 8.6928e-01, 7.1726e-01, 9.4684e-01, 6.6387e-01, + 7.4207e-01, 4.8619e-01, 9.7141e-01, 5.4412e-01, + 4.8484e-01, 5.3163e-01, 7.9819e-01, 7.9347e-01, + 2.6304e-01, 7.4783e-01, 6.9812e-01, 7.6335e-01, + 4.4840e-01, 3.7021e-01, 1.0800e-01, 2.2051e-01, + 3.0787e-01, 8.8401e-01, 4.8227e-01, 1.5634e-01, + 1.3736e-01, 5.0755e-01, 6.6429e-01, 1.0834e-01, + 2.0602e-01, 3.3324e-01, 6.2908e-02, 6.3601e-01, + 4.1417e-01, 7.9469e-01, 2.8448e-01, 1.5039e-02, + 6.6509e-01, 8.3419e-01, 3.3007e-01, 5.0814e-01, + 2.7307e-01, 6.4758e-01, 5.0298e-02, 9.6439e-01, + 1.1064e-02, 9.2389e-01, 5.8570e-01, 3.4857e-01, + 7.8140e-03, 4.4029e-01, 2.3853e-01, 6.6548e-01, + 7.3576e-01, 3.8732e-01, 6.3491e-01, 8.1114e-02, + 9.2334e-01, 5.6117e-01, 7.5407e-01, 5.4776e-01, + 5.1204e-01, 8.8201e-02, 2.0300e-01, 2.1369e-01, + 2.6030e-01, 2.1350e-01, 4.3720e-01, 9.1220e-01, + 2.5777e-01, 7.4324e-01, 7.1020e-04, 2.4908e-02, + 4.0045e-01, 4.9609e-01, 6.9434e-01, 5.8426e-02, + 8.2077e-01, 1.0926e-01, 2.6383e-01, 1.9642e-01, + 4.7797e-01, 1.1698e-01, 4.3008e-01, 8.2206e-01, + 2.5568e-01, 6.9996e-01, 3.7332e-01, 9.9500e-01, + 1.0494e-04, 3.1565e-01, 4.3279e-01, 3.4100e-01, + 9.5552e-01, 5.3357e-01, 4.6383e-01, 4.2149e-01, + 3.4867e-01, 1.2785e-01, 1.5832e-01, 2.5010e-01, + 4.8354e-01, 7.0320e-01, 6.8084e-01, 8.9369e-01, + 3.1654e-01, 4.0261e-01, 6.1809e-01, 1.9917e-01, + 1.7076e-01, 4.1920e-01, 6.9643e-01, 6.6234e-02, + 1.8190e-01, 2.8127e-01, 3.7760e-01, 8.2359e-01, + 8.7141e-01, 2.9213e-01, 3.4824e-01, 3.0912e-01, + 6.2830e-01, 6.5493e-01, 6.0422e-01, 8.9167e-01, + 5.2132e-01, 3.6001e-02, 1.7081e-01, 1.2396e-01, + 6.1209e-01, 8.7004e-02, 6.6043e-01, 2.2333e-01, + 5.1931e-02, 9.7944e-01, 3.5911e-01, 4.7203e-01, + 1.1878e-01, 1.0429e-01, 8.2495e-01, 5.6685e-01, + 7.2175e-01, 8.2411e-01, 6.6799e-01, 8.4697e-01, + 1.6486e-01, 8.9227e-01, 9.8843e-01, 3.4119e-02, + 3.3093e-01, 9.6444e-01, 9.8898e-01, 7.5176e-01, + 8.9913e-01, 5.1186e-01, 3.6440e-01, 7.1048e-02, + 2.8075e-01, 4.0937e-01, 4.2175e-01, 8.5431e-01, + 6.4081e-02, 8.8947e-01, 5.1664e-01, 2.2864e-01, + 3.5224e-01, 7.9346e-01, 3.6058e-01, 2.6759e-01, + 9.4336e-01, 1.8016e-01, 9.4158e-01, 3.2123e-02, + 3.5958e-01, 5.1494e-02, 1.5518e-01, 5.6965e-02, + 3.4751e-01, 2.9463e-01, 2.4497e-01, 6.2229e-01, + 2.3576e-01, 8.9947e-01, 4.3768e-01, 3.7762e-01, + 3.2372e-01, 4.1907e-01, 7.5155e-01, 1.6171e-01, + 1.7686e-01, 4.3287e-01, 7.3812e-01, 6.9623e-01, + 1.9684e-01, 6.7418e-01, 4.6148e-01, 9.9591e-01, + 7.0593e-01, 8.0186e-01, 9.1362e-01, 8.6659e-01, + 5.0452e-01, 3.3030e-01, 7.7476e-01, 5.5164e-01, + 9.3358e-01, 9.8860e-01, 9.3657e-01, 7.4082e-01, + 2.3944e-01, 3.6592e-01, 3.4026e-01, 1.7513e-02, + 5.7559e-01, 3.9447e-02, 4.3351e-01, 1.3694e-01, + 5.1229e-01, 4.5732e-01, 6.6092e-01, 9.9220e-01, + 8.2560e-01, 9.4070e-01, 3.7395e-01, 9.9231e-01, + 2.7295e-01, 1.4919e-01, 6.1311e-01, 2.9976e-01, + 1.0148e-01, 2.9291e-01, 1.4068e-01, 6.7935e-01, + 1.5932e-01, 6.9052e-01, 2.9227e-02, 5.0253e-01, + 1.4331e-01, 3.7997e-01, 8.4284e-01, 1.7361e-02, + 4.9768e-01, 5.0386e-01, 2.8837e-01, 2.2671e-01, + 7.4528e-02, 3.8154e-01, 7.3463e-01, 6.1408e-01, + 3.3312e-01, 3.6890e-01, 7.8808e-01, 7.3319e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8274, 0.3562, 0.5731, ..., 0.7654, 0.1963, 0.3806]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.43880581855774 seconds + +tensor(indices=tensor([[ 75, 2283, 4386, ..., 4914, 8978, 4108], + [5864, 1653, 1720, ..., 6549, 6838, 7567]]), + values=tensor([7.3271e-01, 5.4680e-01, 1.5817e-01, 6.9331e-01, + 1.1846e-01, 1.3350e-01, 8.8129e-01, 4.5841e-01, + 8.2483e-01, 1.2462e-01, 6.0800e-01, 9.7921e-01, + 1.4789e-01, 6.8079e-01, 5.4726e-01, 2.3060e-01, + 6.3382e-01, 4.7766e-01, 7.1208e-01, 8.9536e-02, + 7.4618e-01, 6.5466e-01, 1.7416e-01, 1.1103e-01, + 4.4643e-01, 2.5366e-01, 4.4219e-01, 6.5522e-01, + 6.1001e-01, 3.5476e-02, 3.5495e-01, 4.8603e-01, + 1.5263e-01, 4.5825e-01, 3.4950e-01, 3.0839e-01, + 4.7679e-01, 2.2001e-01, 8.9524e-01, 4.8945e-01, + 7.1018e-01, 2.2409e-01, 6.3635e-01, 4.1382e-02, + 9.5898e-01, 2.7701e-01, 2.1051e-01, 7.0671e-01, + 2.0354e-01, 4.7440e-01, 7.9234e-01, 4.1302e-01, + 3.2418e-01, 1.6809e-01, 7.6865e-01, 7.6164e-01, + 6.7468e-01, 5.9160e-01, 2.9066e-01, 2.1660e-01, + 2.8635e-01, 4.5920e-01, 8.2088e-01, 8.4218e-01, + 5.8024e-01, 8.4741e-01, 5.2959e-01, 4.5521e-01, + 6.4944e-01, 1.0143e-02, 2.7338e-01, 9.8354e-03, + 5.4528e-01, 7.7215e-01, 4.5431e-01, 4.0141e-01, + 4.4784e-01, 3.8317e-01, 7.8518e-01, 7.8959e-01, + 5.1793e-01, 4.5287e-01, 1.5222e-01, 5.8794e-01, + 5.6619e-01, 2.4687e-01, 7.5138e-01, 9.0886e-01, + 8.0388e-02, 3.1824e-01, 7.2819e-01, 9.0560e-01, + 9.4639e-01, 8.4041e-01, 5.0709e-01, 6.0390e-01, + 7.8724e-01, 9.5523e-01, 3.9874e-01, 9.8421e-01, + 7.7028e-01, 7.2449e-01, 5.6752e-01, 5.3787e-01, + 6.7412e-01, 8.9320e-01, 3.3833e-01, 9.9498e-01, + 1.9607e-01, 6.0280e-01, 2.4460e-01, 9.6435e-01, + 2.0441e-01, 8.2772e-01, 1.2533e-01, 3.3767e-01, + 5.0446e-01, 6.8403e-01, 5.4899e-01, 8.5785e-02, + 6.8785e-01, 6.2367e-01, 9.1938e-01, 5.0608e-01, + 9.7173e-01, 7.6296e-01, 1.0997e-01, 9.8776e-01, + 3.5418e-02, 7.2150e-01, 8.2975e-01, 6.3764e-02, + 5.8874e-01, 5.1752e-02, 5.9220e-01, 7.5572e-01, + 9.3355e-01, 9.9567e-01, 7.2881e-02, 9.4809e-01, + 6.6658e-01, 6.8606e-01, 9.1774e-03, 5.2742e-01, + 9.1452e-01, 4.9707e-01, 2.0530e-01, 8.2976e-01, + 2.5918e-02, 2.1205e-01, 2.3545e-01, 5.5144e-01, + 5.8130e-01, 9.7222e-01, 4.0536e-01, 4.6538e-01, + 6.8799e-02, 5.6699e-01, 6.9826e-01, 3.6998e-01, + 8.6577e-01, 8.4580e-01, 1.3178e-02, 8.5368e-01, + 5.5168e-01, 1.6175e-01, 1.8090e-01, 6.6854e-01, + 3.6887e-02, 2.1382e-01, 2.0327e-01, 5.1918e-02, + 7.1541e-03, 7.2048e-01, 4.3021e-01, 5.7585e-01, + 1.9144e-01, 4.2284e-01, 4.6674e-01, 9.4400e-01, + 3.8877e-01, 1.9133e-01, 7.5804e-01, 5.2758e-01, + 3.5542e-01, 5.0573e-01, 6.3177e-01, 8.4992e-01, + 7.9636e-01, 4.1913e-01, 5.7553e-01, 3.8064e-01, + 6.5808e-01, 4.5062e-01, 5.4069e-01, 4.5458e-01, + 6.2183e-01, 9.1873e-01, 8.1379e-01, 8.5159e-01, + 1.2224e-01, 9.8947e-01, 9.2314e-02, 7.3464e-02, + 3.2238e-01, 4.5188e-01, 7.7785e-01, 5.8015e-01, + 2.8869e-01, 3.4127e-01, 2.6761e-01, 9.6772e-01, + 5.5199e-01, 6.5145e-01, 1.4601e-01, 1.1511e-01, + 4.8640e-01, 9.0314e-01, 4.3423e-01, 8.7704e-01, + 3.5066e-01, 6.2618e-01, 1.6390e-01, 7.2149e-01, + 6.9213e-02, 6.1452e-01, 6.6724e-01, 6.0671e-01, + 8.9952e-01, 3.1951e-01, 2.2969e-01, 4.9852e-01, + 5.5602e-01, 6.2980e-01, 2.1961e-01, 1.9928e-01, + 8.6844e-01, 1.0758e-01, 9.8622e-01, 7.0191e-01, + 9.4400e-01, 3.5167e-01, 9.9127e-01, 6.1739e-01, + 8.7217e-01, 2.9334e-01, 1.2226e-01, 9.1248e-01, + 8.6220e-01, 5.6810e-01, 6.8955e-01, 7.8186e-01, + 3.6459e-01, 1.2297e-01, 9.8556e-01, 3.2692e-01, + 2.2346e-01, 2.4662e-01, 5.7337e-01, 7.8042e-01, + 2.2907e-01, 7.8836e-01, 1.4088e-01, 3.8740e-01, + 9.1388e-01, 5.7935e-01, 2.0861e-02, 8.1530e-01, + 7.9710e-01, 4.4029e-01, 1.0658e-01, 9.5761e-01, + 2.8864e-01, 3.9947e-01, 5.4107e-01, 2.6990e-01, + 8.8545e-01, 5.1718e-01, 4.6133e-01, 9.3391e-02, + 8.6382e-01, 7.1517e-01, 3.2465e-02, 7.5054e-01, + 9.7682e-01, 8.1731e-01, 7.8329e-01, 2.5542e-01, + 6.1717e-01, 3.1796e-01, 1.4612e-01, 4.1871e-01, + 7.1622e-01, 2.8065e-01, 6.2769e-01, 3.6860e-01, + 8.8004e-02, 9.2029e-01, 1.9003e-01, 4.7738e-01, + 3.4078e-01, 6.8289e-01, 5.3525e-01, 7.5292e-01, + 1.2578e-01, 4.5091e-01, 9.8752e-01, 3.4600e-01, + 6.9098e-01, 3.9832e-03, 3.1848e-01, 7.6371e-01, + 6.6326e-01, 9.8863e-01, 3.1192e-01, 7.1441e-01, + 2.2649e-01, 6.8873e-01, 5.1553e-01, 6.4627e-02, + 8.0703e-01, 4.3408e-01, 5.5619e-01, 3.3691e-01, + 5.1261e-01, 4.5326e-02, 6.8718e-01, 4.0987e-01, + 6.6507e-02, 5.1556e-01, 6.8097e-01, 2.6586e-01, + 7.1592e-01, 6.2343e-01, 5.7058e-01, 3.5484e-01, + 4.4529e-01, 6.6338e-01, 3.5208e-01, 2.5696e-01, + 2.4915e-01, 2.6818e-01, 2.1290e-01, 8.0998e-01, + 7.3676e-01, 8.7054e-01, 1.3324e-01, 1.4124e-01, + 4.6535e-01, 4.6796e-01, 9.4269e-01, 7.5364e-01, + 5.1780e-01, 3.6524e-02, 9.1992e-01, 5.3479e-01, + 9.0948e-01, 4.4646e-01, 1.6443e-01, 1.5041e-01, + 2.4518e-01, 8.4823e-01, 6.7005e-01, 7.1536e-01, + 9.9284e-01, 1.5132e-01, 7.1916e-01, 2.7753e-01, + 6.4226e-01, 4.5892e-01, 4.2318e-01, 5.7719e-01, + 2.9804e-01, 5.7249e-01, 6.7936e-01, 5.2128e-01, + 7.4381e-01, 2.4777e-02, 7.3096e-02, 9.4852e-01, + 5.3554e-01, 8.2975e-01, 2.6035e-02, 5.2134e-03, + 8.9467e-01, 9.3599e-01, 2.1152e-02, 2.2564e-02, + 1.2804e-02, 8.4371e-01, 8.5011e-02, 9.3834e-01, + 9.8269e-01, 8.8754e-01, 4.4519e-01, 3.2414e-01, + 4.8486e-03, 8.4570e-01, 1.9598e-01, 5.6008e-01, + 6.7285e-01, 5.8782e-01, 1.0868e-01, 4.4946e-02, + 2.9140e-01, 3.8100e-02, 7.5123e-01, 7.3014e-01, + 5.6542e-01, 7.6369e-01, 1.3236e-01, 5.1384e-01, + 1.9924e-01, 3.5985e-01, 7.1673e-01, 2.6708e-01, + 9.1368e-01, 9.4055e-01, 8.8579e-01, 2.0018e-01, + 5.2963e-01, 7.5478e-01, 7.5532e-01, 1.0469e-02, + 2.1773e-01, 1.9842e-01, 1.9099e-01, 7.8952e-01, + 3.5206e-01, 1.8674e-01, 7.8673e-02, 4.9686e-01, + 2.6533e-01, 7.8752e-01, 4.0296e-01, 4.6912e-01, + 7.3044e-01, 9.3243e-01, 8.4926e-01, 8.6982e-01, + 1.4356e-01, 6.2410e-02, 6.2664e-01, 4.0935e-01, + 8.5079e-01, 3.0452e-01, 9.4226e-01, 5.0024e-01, + 3.8373e-02, 8.7342e-01, 1.4167e-01, 8.9646e-01, + 1.5678e-01, 7.7893e-01, 9.8193e-01, 9.1370e-01, + 2.0990e-01, 8.7695e-03, 9.2810e-01, 7.5912e-01, + 2.7285e-01, 6.2536e-01, 9.5261e-01, 2.4818e-01, + 4.7462e-01, 2.0497e-01, 8.6789e-01, 4.4037e-01, + 9.5126e-01, 9.1803e-01, 4.2632e-01, 2.2323e-01, + 1.2880e-02, 3.9843e-01, 2.8640e-01, 4.8695e-02, + 7.4227e-01, 1.7156e-01, 9.6676e-01, 4.4121e-01, + 6.6481e-01, 8.0715e-01, 3.1278e-01, 4.2137e-01, + 5.6372e-02, 7.2216e-01, 1.6298e-01, 6.9417e-02, + 9.5364e-02, 4.8961e-01, 9.4112e-01, 3.2162e-01, + 9.4008e-01, 6.3534e-01, 2.1342e-01, 9.7926e-02, + 9.2778e-01, 9.1802e-01, 6.7816e-02, 2.0407e-01, + 1.7389e-01, 1.8130e-01, 5.8292e-02, 8.3574e-01, + 5.7966e-01, 2.9410e-01, 7.6376e-01, 5.6396e-01, + 1.4521e-01, 8.8739e-01, 2.9351e-01, 8.0634e-01, + 6.1902e-01, 2.0047e-01, 1.8233e-01, 7.8520e-01, + 4.9119e-01, 4.7519e-01, 4.2019e-01, 6.0764e-01, + 3.7731e-01, 4.2470e-01, 9.1418e-01, 6.9032e-01, + 3.0260e-01, 8.6615e-01, 5.2264e-01, 9.0066e-01, + 1.9684e-01, 9.0541e-01, 9.0561e-01, 7.4386e-01, + 3.2478e-01, 6.3022e-01, 3.5306e-01, 6.1907e-01, + 1.9643e-01, 7.0280e-01, 9.3979e-01, 7.7306e-01, + 8.9292e-01, 6.4857e-02, 4.0880e-01, 3.1019e-01, + 2.0846e-01, 9.1245e-01, 5.9031e-01, 7.1376e-01, + 9.3256e-01, 9.3528e-01, 2.5866e-01, 9.5344e-01, + 9.2587e-02, 1.0317e-01, 1.5635e-01, 1.9409e-01, + 6.1521e-01, 3.8998e-01, 9.3360e-01, 5.4130e-01, + 9.4630e-01, 8.0619e-01, 6.8013e-01, 1.8101e-01, + 3.6336e-01, 3.6742e-01, 7.9749e-01, 1.4716e-01, + 5.2754e-01, 9.7110e-01, 9.3345e-01, 1.3102e-01, + 9.2571e-02, 2.0029e-01, 4.9620e-02, 5.4227e-01, + 9.5542e-01, 1.7904e-01, 5.2160e-01, 7.2533e-01, + 6.0648e-01, 5.7805e-01, 5.5186e-01, 2.4584e-01, + 2.5640e-01, 1.7564e-01, 7.9514e-01, 2.2396e-01, + 2.2121e-01, 5.7066e-01, 8.9176e-01, 3.3146e-01, + 1.6462e-01, 9.6012e-01, 6.0973e-01, 9.6240e-01, + 3.6285e-01, 5.3296e-01, 1.7822e-01, 1.1424e-01, + 7.4167e-01, 7.3771e-01, 1.1519e-01, 6.8285e-01, + 2.8223e-01, 9.3536e-01, 7.8112e-02, 8.8160e-01, + 6.4345e-01, 8.9490e-01, 4.3891e-01, 2.6843e-01, + 4.6105e-01, 8.0506e-01, 9.5685e-01, 7.9447e-01, + 5.5921e-01, 8.7030e-01, 1.1418e-01, 4.8335e-02, + 2.5109e-01, 8.1222e-01, 4.4395e-01, 1.3019e-01, + 1.6179e-01, 5.6817e-01, 1.1052e-01, 4.8152e-01, + 1.1024e-01, 3.6472e-01, 2.9609e-01, 8.0821e-01, + 5.0663e-01, 6.9621e-01, 3.0743e-01, 6.8817e-01, + 4.0507e-01, 2.7281e-01, 6.2851e-02, 4.5213e-01, + 5.2897e-01, 6.4944e-01, 9.0074e-01, 4.0042e-02, + 6.2494e-01, 2.7121e-01, 8.1937e-01, 5.7857e-01, + 7.9806e-01, 6.1742e-02, 5.3846e-01, 3.7882e-03, + 1.4363e-01, 7.8629e-01, 2.1149e-01, 3.3974e-01, + 3.1958e-01, 7.3129e-01, 6.0494e-01, 7.4047e-01, + 9.3645e-02, 6.8597e-01, 7.1879e-03, 9.7646e-01, + 7.6769e-01, 3.5479e-01, 3.8585e-01, 2.2733e-01, + 7.3644e-01, 9.7299e-01, 6.8710e-01, 2.4891e-01, + 2.1313e-01, 8.8574e-01, 8.3366e-01, 4.8621e-01, + 6.7528e-01, 6.4473e-01, 6.0127e-01, 8.9233e-01, + 9.6549e-01, 7.5386e-01, 6.3094e-01, 7.7293e-01, + 7.3511e-01, 3.6913e-03, 3.2480e-01, 7.6469e-02, + 5.3294e-01, 4.4696e-01, 3.5921e-01, 7.6895e-01, + 6.2602e-01, 1.6343e-01, 8.8208e-02, 6.1782e-01, + 2.1211e-01, 3.1611e-01, 4.9721e-01, 8.5662e-01, + 9.4304e-01, 1.8235e-01, 4.6522e-02, 8.0198e-01, + 4.9012e-01, 7.3981e-01, 3.8062e-01, 6.6518e-01, + 6.1653e-01, 1.2138e-01, 1.5314e-01, 9.8037e-01, + 1.3732e-01, 7.3530e-01, 1.7058e-01, 2.0511e-01, + 2.6577e-01, 5.4597e-01, 8.2296e-01, 8.4843e-01, + 4.4386e-01, 6.9015e-01, 3.7735e-01, 6.1638e-01, + 4.7162e-01, 3.0717e-01, 2.1313e-01, 6.9194e-01, + 4.6844e-01, 7.2511e-01, 1.6532e-01, 3.7467e-01, + 1.3861e-02, 1.5076e-01, 8.8503e-02, 5.3177e-01, + 4.4478e-01, 7.8659e-01, 3.8870e-01, 4.5148e-01, + 2.4644e-02, 5.5146e-01, 7.7138e-01, 9.7220e-01, + 6.3764e-01, 1.0847e-01, 9.4025e-01, 6.2251e-01, + 8.6928e-01, 7.1726e-01, 9.4684e-01, 6.6387e-01, + 7.4207e-01, 4.8619e-01, 9.7141e-01, 5.4412e-01, + 4.8484e-01, 5.3163e-01, 7.9819e-01, 7.9347e-01, + 2.6304e-01, 7.4783e-01, 6.9812e-01, 7.6335e-01, + 4.4840e-01, 3.7021e-01, 1.0800e-01, 2.2051e-01, + 3.0787e-01, 8.8401e-01, 4.8227e-01, 1.5634e-01, + 1.3736e-01, 5.0755e-01, 6.6429e-01, 1.0834e-01, + 2.0602e-01, 3.3324e-01, 6.2908e-02, 6.3601e-01, + 4.1417e-01, 7.9469e-01, 2.8448e-01, 1.5039e-02, + 6.6509e-01, 8.3419e-01, 3.3007e-01, 5.0814e-01, + 2.7307e-01, 6.4758e-01, 5.0298e-02, 9.6439e-01, + 1.1064e-02, 9.2389e-01, 5.8570e-01, 3.4857e-01, + 7.8140e-03, 4.4029e-01, 2.3853e-01, 6.6548e-01, + 7.3576e-01, 3.8732e-01, 6.3491e-01, 8.1114e-02, + 9.2334e-01, 5.6117e-01, 7.5407e-01, 5.4776e-01, + 5.1204e-01, 8.8201e-02, 2.0300e-01, 2.1369e-01, + 2.6030e-01, 2.1350e-01, 4.3720e-01, 9.1220e-01, + 2.5777e-01, 7.4324e-01, 7.1020e-04, 2.4908e-02, + 4.0045e-01, 4.9609e-01, 6.9434e-01, 5.8426e-02, + 8.2077e-01, 1.0926e-01, 2.6383e-01, 1.9642e-01, + 4.7797e-01, 1.1698e-01, 4.3008e-01, 8.2206e-01, + 2.5568e-01, 6.9996e-01, 3.7332e-01, 9.9500e-01, + 1.0494e-04, 3.1565e-01, 4.3279e-01, 3.4100e-01, + 9.5552e-01, 5.3357e-01, 4.6383e-01, 4.2149e-01, + 3.4867e-01, 1.2785e-01, 1.5832e-01, 2.5010e-01, + 4.8354e-01, 7.0320e-01, 6.8084e-01, 8.9369e-01, + 3.1654e-01, 4.0261e-01, 6.1809e-01, 1.9917e-01, + 1.7076e-01, 4.1920e-01, 6.9643e-01, 6.6234e-02, + 1.8190e-01, 2.8127e-01, 3.7760e-01, 8.2359e-01, + 8.7141e-01, 2.9213e-01, 3.4824e-01, 3.0912e-01, + 6.2830e-01, 6.5493e-01, 6.0422e-01, 8.9167e-01, + 5.2132e-01, 3.6001e-02, 1.7081e-01, 1.2396e-01, + 6.1209e-01, 8.7004e-02, 6.6043e-01, 2.2333e-01, + 5.1931e-02, 9.7944e-01, 3.5911e-01, 4.7203e-01, + 1.1878e-01, 1.0429e-01, 8.2495e-01, 5.6685e-01, + 7.2175e-01, 8.2411e-01, 6.6799e-01, 8.4697e-01, + 1.6486e-01, 8.9227e-01, 9.8843e-01, 3.4119e-02, + 3.3093e-01, 9.6444e-01, 9.8898e-01, 7.5176e-01, + 8.9913e-01, 5.1186e-01, 3.6440e-01, 7.1048e-02, + 2.8075e-01, 4.0937e-01, 4.2175e-01, 8.5431e-01, + 6.4081e-02, 8.8947e-01, 5.1664e-01, 2.2864e-01, + 3.5224e-01, 7.9346e-01, 3.6058e-01, 2.6759e-01, + 9.4336e-01, 1.8016e-01, 9.4158e-01, 3.2123e-02, + 3.5958e-01, 5.1494e-02, 1.5518e-01, 5.6965e-02, + 3.4751e-01, 2.9463e-01, 2.4497e-01, 6.2229e-01, + 2.3576e-01, 8.9947e-01, 4.3768e-01, 3.7762e-01, + 3.2372e-01, 4.1907e-01, 7.5155e-01, 1.6171e-01, + 1.7686e-01, 4.3287e-01, 7.3812e-01, 6.9623e-01, + 1.9684e-01, 6.7418e-01, 4.6148e-01, 9.9591e-01, + 7.0593e-01, 8.0186e-01, 9.1362e-01, 8.6659e-01, + 5.0452e-01, 3.3030e-01, 7.7476e-01, 5.5164e-01, + 9.3358e-01, 9.8860e-01, 9.3657e-01, 7.4082e-01, + 2.3944e-01, 3.6592e-01, 3.4026e-01, 1.7513e-02, + 5.7559e-01, 3.9447e-02, 4.3351e-01, 1.3694e-01, + 5.1229e-01, 4.5732e-01, 6.6092e-01, 9.9220e-01, + 8.2560e-01, 9.4070e-01, 3.7395e-01, 9.9231e-01, + 2.7295e-01, 1.4919e-01, 6.1311e-01, 2.9976e-01, + 1.0148e-01, 2.9291e-01, 1.4068e-01, 6.7935e-01, + 1.5932e-01, 6.9052e-01, 2.9227e-02, 5.0253e-01, + 1.4331e-01, 3.7997e-01, 8.4284e-01, 1.7361e-02, + 4.9768e-01, 5.0386e-01, 2.8837e-01, 2.2671e-01, + 7.4528e-02, 3.8154e-01, 7.3463e-01, 6.1408e-01, + 3.3312e-01, 3.6890e-01, 7.8808e-01, 7.3319e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8274, 0.3562, 0.5731, ..., 0.7654, 0.1963, 0.3806]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.43880581855774 seconds + +[18.87, 18.88, 18.62, 18.53, 18.6, 18.99, 18.78, 18.79, 18.51, 19.26] +[54.17] +13.920597553253174 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245084, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.43880581855774, 'TIME_S_1KI': 0.04259276745343531, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.0787694597244, 'W': 54.17} +[18.87, 18.88, 18.62, 18.53, 18.6, 18.99, 18.78, 18.79, 18.51, 19.26, 19.16, 18.68, 18.86, 19.0, 18.65, 18.55, 18.69, 18.64, 18.86, 18.53] +337.53999999999996 +16.877 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245084, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.43880581855774, 'TIME_S_1KI': 0.04259276745343531, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.0787694597244, 'W': 54.17, 'J_1KI': 3.076817619508921, 'W_1KI': 0.22102626038419482, 'W_D': 37.293000000000006, 'J_D': 519.1408445534707, 'W_D_1KI': 0.15216415596285357, 'J_D_1KI': 0.0006208653194939432} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..eed56bf --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57307, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.488039493560791, "TIME_S_1KI": 0.18301498060552446, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 752.8469358444214, "W": 54.18, "J_1KI": 13.137085100326686, "W_1KI": 0.945434240145183, "W_D": 37.037000000000006, "J_D": 514.6399402523042, "W_D_1KI": 0.6462910290191426, "J_D_1KI": 0.01127769782084462} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..cd4ab9c --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.030483007431030273} + +tensor(indices=tensor([[9082, 5330, 1512, ..., 3284, 6824, 2716], + [4275, 6555, 5954, ..., 101, 7859, 2785]]), + values=tensor([0.6080, 0.6826, 0.1706, ..., 0.1752, 0.3853, 0.5537]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.0332, 0.8842, 0.7703, ..., 0.1727, 0.8378, 0.3817]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.030483007431030273 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '34445', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.311120271682739} + +tensor(indices=tensor([[9468, 3923, 6376, ..., 4943, 3935, 3556], + [3509, 9230, 3239, ..., 3460, 1571, 2853]]), + values=tensor([0.2308, 0.6081, 0.0151, ..., 0.5369, 0.1924, 0.6647]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8348, 0.9189, 0.6141, ..., 0.2864, 0.8825, 0.6909]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.311120271682739 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '57307', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.488039493560791} + +tensor(indices=tensor([[1357, 4226, 9041, ..., 7211, 2903, 1512], + [ 698, 8560, 1501, ..., 4840, 7181, 4483]]), + values=tensor([0.8865, 0.1975, 0.1253, ..., 0.2308, 0.7279, 0.8573]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.0766, 0.7259, 0.7366, ..., 0.7003, 0.3554, 0.0669]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.488039493560791 seconds + +tensor(indices=tensor([[1357, 4226, 9041, ..., 7211, 2903, 1512], + [ 698, 8560, 1501, ..., 4840, 7181, 4483]]), + values=tensor([0.8865, 0.1975, 0.1253, ..., 0.2308, 0.7279, 0.8573]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.0766, 0.7259, 0.7366, ..., 0.7003, 0.3554, 0.0669]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.488039493560791 seconds + +[18.99, 18.42, 18.65, 23.07, 18.88, 18.45, 19.01, 18.49, 18.82, 18.43] +[54.18] +13.895292282104492 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.488039493560791, 'TIME_S_1KI': 0.18301498060552446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.8469358444214, 'W': 54.18} +[18.99, 18.42, 18.65, 23.07, 18.88, 18.45, 19.01, 18.49, 18.82, 18.43, 19.06, 18.66, 19.28, 18.51, 20.13, 18.47, 18.81, 19.02, 18.58, 18.74] +342.85999999999996 +17.142999999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.488039493560791, 'TIME_S_1KI': 0.18301498060552446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.8469358444214, 'W': 54.18, 'J_1KI': 13.137085100326686, 'W_1KI': 0.945434240145183, 'W_D': 37.037000000000006, 'J_D': 514.6399402523042, 'W_D_1KI': 0.6462910290191426, 'J_D_1KI': 0.01127769782084462} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..4581c49 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 118.6995735168457, "TIME_S_1KI": 1186.995735168457, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6896.754456710815, "W": 53.6, "J_1KI": 68967.54456710815, "W_1KI": 536.0, "W_D": 36.755250000000004, "J_D": 4729.327131436945, "W_D_1KI": 367.55250000000007, "J_D_1KI": 3675.525000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..5813911 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 118.6995735168457} + +tensor(indices=tensor([[ 54064, 250916, 302761, ..., 264257, 361767, 348907], + [274088, 489704, 318844, ..., 456716, 387863, 28111]]), + values=tensor([0.8185, 0.4924, 0.8014, ..., 0.7517, 0.3044, 0.7081]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6197, 0.0798, 0.2763, ..., 0.5470, 0.5269, 0.6980]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 118.6995735168457 seconds + +tensor(indices=tensor([[ 54064, 250916, 302761, ..., 264257, 361767, 348907], + [274088, 489704, 318844, ..., 456716, 387863, 28111]]), + values=tensor([0.8185, 0.4924, 0.8014, ..., 0.7517, 0.3044, 0.7081]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6197, 0.0798, 0.2763, ..., 0.5470, 0.5269, 0.6980]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 118.6995735168457 seconds + +[19.03, 18.57, 18.77, 18.58, 18.81, 18.48, 18.73, 18.49, 18.53, 18.7] +[53.6] +128.67079210281372 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 118.6995735168457, 'TIME_S_1KI': 1186.995735168457, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6896.754456710815, 'W': 53.6} +[19.03, 18.57, 18.77, 18.58, 18.81, 18.48, 18.73, 18.49, 18.53, 18.7, 18.93, 18.71, 18.57, 18.96, 19.07, 19.08, 18.68, 18.45, 18.7, 18.77] +336.895 +16.844749999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 118.6995735168457, 'TIME_S_1KI': 1186.995735168457, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6896.754456710815, 'W': 53.6, 'J_1KI': 68967.54456710815, 'W_1KI': 536.0, 'W_D': 36.755250000000004, 'J_D': 4729.327131436945, 'W_D_1KI': 367.55250000000007, 'J_D_1KI': 3675.525000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..99b9838 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.544205665588379, "TIME_S_1KI": 115.44205665588379, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 859.7113027667999, "W": 55.14, "J_1KI": 8597.113027668, "W_1KI": 551.4, "W_D": 38.2685, "J_D": 596.6605366327763, "W_D_1KI": 382.68500000000006, "J_D_1KI": 3826.850000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..b874877 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.544205665588379} + +tensor(indices=tensor([[316163, 297120, 103270, ..., 186453, 89053, 102036], + [364628, 260758, 332133, ..., 184022, 96855, 109835]]), + values=tensor([0.8767, 0.3246, 0.9305, ..., 0.2458, 0.9154, 0.1682]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1078, 0.6308, 0.1035, ..., 0.0924, 0.8588, 0.7930]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.544205665588379 seconds + +tensor(indices=tensor([[316163, 297120, 103270, ..., 186453, 89053, 102036], + [364628, 260758, 332133, ..., 184022, 96855, 109835]]), + values=tensor([0.8767, 0.3246, 0.9305, ..., 0.2458, 0.9154, 0.1682]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1078, 0.6308, 0.1035, ..., 0.0924, 0.8588, 0.7930]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.544205665588379 seconds + +[19.07, 18.83, 18.61, 18.8, 18.72, 18.93, 18.58, 18.35, 18.58, 18.62] +[55.14] +15.591427326202393 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.544205665588379, 'TIME_S_1KI': 115.44205665588379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 859.7113027667999, 'W': 55.14} +[19.07, 18.83, 18.61, 18.8, 18.72, 18.93, 18.58, 18.35, 18.58, 18.62, 19.1, 18.67, 18.52, 18.55, 18.64, 18.75, 18.73, 19.74, 18.68, 18.71] +337.43 +16.8715 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.544205665588379, 'TIME_S_1KI': 115.44205665588379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 859.7113027667999, 'W': 55.14, 'J_1KI': 8597.113027668, 'W_1KI': 551.4, 'W_D': 38.2685, 'J_D': 596.6605366327763, 'W_D_1KI': 382.68500000000006, 'J_D_1KI': 3826.850000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..0a002a9 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 59.49291944503784, "TIME_S_1KI": 594.9291944503784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3602.7526541447637, "W": 53.73, "J_1KI": 36027.52654144764, "W_1KI": 537.3, "W_D": 36.9345, "J_D": 2476.5655668064355, "W_D_1KI": 369.34499999999997, "J_D_1KI": 3693.45} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..ab75b94 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 59.49291944503784} + +tensor(indices=tensor([[459605, 174156, 114019, ..., 322148, 190286, 152633], + [399185, 40378, 380596, ..., 391860, 331984, 49769]]), + values=tensor([0.0552, 0.2228, 0.4925, ..., 0.8715, 0.1326, 0.0625]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3630, 0.6242, 0.1713, ..., 0.5080, 0.3083, 0.7477]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 59.49291944503784 seconds + +tensor(indices=tensor([[459605, 174156, 114019, ..., 322148, 190286, 152633], + [399185, 40378, 380596, ..., 391860, 331984, 49769]]), + values=tensor([0.0552, 0.2228, 0.4925, ..., 0.8715, 0.1326, 0.0625]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3630, 0.6242, 0.1713, ..., 0.5080, 0.3083, 0.7477]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 59.49291944503784 seconds + +[19.43, 18.68, 18.84, 18.76, 18.58, 18.37, 18.48, 18.58, 18.63, 18.59] +[53.73] +67.05290627479553 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 59.49291944503784, 'TIME_S_1KI': 594.9291944503784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3602.7526541447637, 'W': 53.73} +[19.43, 18.68, 18.84, 18.76, 18.58, 18.37, 18.48, 18.58, 18.63, 18.59, 19.12, 18.61, 18.54, 18.8, 18.55, 18.59, 18.77, 18.57, 18.72, 18.54] +335.90999999999997 +16.795499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 59.49291944503784, 'TIME_S_1KI': 594.9291944503784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3602.7526541447637, 'W': 53.73, 'J_1KI': 36027.52654144764, 'W_1KI': 537.3, 'W_D': 36.9345, 'J_D': 2476.5655668064355, 'W_D_1KI': 369.34499999999997, 'J_D_1KI': 3693.45} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..8734f45 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1039, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.407158851623535, "TIME_S_1KI": 10.016514775383575, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 965.9580666804313, "W": 69.77, "J_1KI": 929.6997754383362, "W_1KI": 67.15110683349373, "W_D": 53.03375, "J_D": 734.2465044978261, "W_D_1KI": 51.04307025986525, "J_D_1KI": 49.12711285838812} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..64306f3 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.0100657939910889} + +tensor(indices=tensor([[31523, 33077, 49188, ..., 38675, 46247, 24669], + [47180, 34171, 35720, ..., 12997, 28474, 29386]]), + values=tensor([0.4505, 0.8160, 0.7651, ..., 0.8177, 0.8287, 0.7807]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6975, 0.1008, 0.1539, ..., 0.1438, 0.5719, 0.0024]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 1.0100657939910889 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1039', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.407158851623535} + +tensor(indices=tensor([[13448, 21655, 12218, ..., 16748, 44846, 5783], + [44752, 43200, 42220, ..., 44140, 48432, 25953]]), + values=tensor([0.1842, 0.0210, 0.4791, ..., 0.2640, 0.5463, 0.7789]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6918, 0.8208, 0.5134, ..., 0.8677, 0.9029, 0.2905]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.407158851623535 seconds + +tensor(indices=tensor([[13448, 21655, 12218, ..., 16748, 44846, 5783], + [44752, 43200, 42220, ..., 44140, 48432, 25953]]), + values=tensor([0.1842, 0.0210, 0.4791, ..., 0.2640, 0.5463, 0.7789]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6918, 0.8208, 0.5134, ..., 0.8677, 0.9029, 0.2905]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.407158851623535 seconds + +[18.92, 18.86, 18.34, 18.6, 18.57, 18.59, 18.73, 18.32, 18.45, 18.54] +[69.77] +13.84489130973816 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1039, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.407158851623535, 'TIME_S_1KI': 10.016514775383575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 965.9580666804313, 'W': 69.77} +[18.92, 18.86, 18.34, 18.6, 18.57, 18.59, 18.73, 18.32, 18.45, 18.54, 19.39, 18.35, 18.82, 18.62, 18.42, 18.27, 18.45, 18.71, 18.44, 19.52] +334.725 +16.736250000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1039, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.407158851623535, 'TIME_S_1KI': 10.016514775383575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 965.9580666804313, 'W': 69.77, 'J_1KI': 929.6997754383362, 'W_1KI': 67.15110683349373, 'W_D': 53.03375, 'J_D': 734.2465044978261, 'W_D_1KI': 51.04307025986525, 'J_D_1KI': 49.12711285838812} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..10b0ac1 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.434862852096558, "TIME_S_1KI": 89.9557142422117, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 806.8828873872757, "W": 55.65, "J_1KI": 6955.886960235135, "W_1KI": 479.7413793103448, "W_D": 38.929, "J_D": 564.4410408463478, "W_D_1KI": 335.5948275862069, "J_D_1KI": 2893.058858501784} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..1ad5d61 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.010148525238037} + +tensor(indices=tensor([[ 9192, 33431, 5676, ..., 41573, 48614, 46926], + [ 2246, 12786, 2198, ..., 19105, 28099, 31621]]), + values=tensor([0.4125, 0.6162, 0.6441, ..., 0.2133, 0.0567, 0.7355]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.9719, 0.8993, 0.6885, ..., 0.8108, 0.5742, 0.8846]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 9.010148525238037 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '116', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.434862852096558} + +tensor(indices=tensor([[10622, 45431, 4673, ..., 9888, 46780, 29591], + [29830, 11410, 39605, ..., 5208, 38247, 32159]]), + values=tensor([0.9671, 0.4227, 0.4464, ..., 0.8694, 0.5307, 0.0834]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5753, 0.0311, 0.2664, ..., 0.3468, 0.2274, 0.3010]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.434862852096558 seconds + +tensor(indices=tensor([[10622, 45431, 4673, ..., 9888, 46780, 29591], + [29830, 11410, 39605, ..., 5208, 38247, 32159]]), + values=tensor([0.9671, 0.4227, 0.4464, ..., 0.8694, 0.5307, 0.0834]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5753, 0.0311, 0.2664, ..., 0.3468, 0.2274, 0.3010]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.434862852096558 seconds + +[18.64, 19.07, 18.4, 18.46, 18.48, 18.21, 18.73, 18.52, 18.41, 18.55] +[55.65] +14.499243259429932 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.434862852096558, 'TIME_S_1KI': 89.9557142422117, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 806.8828873872757, 'W': 55.65} +[18.64, 19.07, 18.4, 18.46, 18.48, 18.21, 18.73, 18.52, 18.41, 18.55, 18.84, 18.51, 18.59, 18.37, 18.54, 18.86, 18.82, 18.58, 18.5, 18.71] +334.41999999999996 +16.720999999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.434862852096558, 'TIME_S_1KI': 89.9557142422117, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 806.8828873872757, 'W': 55.65, 'J_1KI': 6955.886960235135, 'W_1KI': 479.7413793103448, 'W_D': 38.929, 'J_D': 564.4410408463478, 'W_D_1KI': 335.5948275862069, 'J_D_1KI': 2893.058858501784} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..88e1cd6 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 89.05617833137512, "TIME_S_1KI": 890.5617833137512, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5383.155348339081, "W": 53.839999999999996, "J_1KI": 53831.55348339081, "W_1KI": 538.4, "W_D": 37.0565, "J_D": 3705.0686509236098, "W_D_1KI": 370.565, "J_D_1KI": 3705.65} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..d1b3ab0 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 89.05617833137512} + +tensor(indices=tensor([[25851, 31529, 28608, ..., 12178, 47469, 26738], + [ 7344, 26804, 2262, ..., 17020, 22352, 37461]]), + values=tensor([0.7135, 0.5801, 0.4814, ..., 0.2742, 0.9893, 0.4743]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.0606, 0.7618, 0.8293, ..., 0.2579, 0.2009, 0.1562]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 89.05617833137512 seconds + +tensor(indices=tensor([[25851, 31529, 28608, ..., 12178, 47469, 26738], + [ 7344, 26804, 2262, ..., 17020, 22352, 37461]]), + values=tensor([0.7135, 0.5801, 0.4814, ..., 0.2742, 0.9893, 0.4743]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.0606, 0.7618, 0.8293, ..., 0.2579, 0.2009, 0.1562]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 89.05617833137512 seconds + +[19.31, 18.87, 18.42, 18.43, 18.75, 18.6, 18.69, 18.47, 18.87, 18.47] +[53.84] +99.98431181907654 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 89.05617833137512, 'TIME_S_1KI': 890.5617833137512, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5383.155348339081, 'W': 53.839999999999996} +[19.31, 18.87, 18.42, 18.43, 18.75, 18.6, 18.69, 18.47, 18.87, 18.47, 18.93, 18.45, 18.6, 18.41, 19.13, 18.51, 18.61, 18.42, 18.72, 18.73] +335.66999999999996 +16.783499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 89.05617833137512, 'TIME_S_1KI': 890.5617833137512, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5383.155348339081, 'W': 53.839999999999996, 'J_1KI': 53831.55348339081, 'W_1KI': 538.4, 'W_D': 37.0565, 'J_D': 3705.0686509236098, 'W_D_1KI': 370.565, 'J_D_1KI': 3705.65} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..f18df01 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 444.78580713272095, "TIME_S_1KI": 4447.8580713272095, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 25976.00573214054, "W": 53.67, "J_1KI": 259760.0573214054, "W_1KI": 536.7, "W_D": 36.655750000000005, "J_D": 17741.195679446817, "W_D_1KI": 366.55750000000006, "J_D_1KI": 3665.5750000000003} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..a30ee4f --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 444.78580713272095} + +tensor(indices=tensor([[29603, 34511, 23006, ..., 23494, 8188, 14199], + [16882, 37671, 41885, ..., 26286, 9404, 36377]]), + values=tensor([0.2189, 0.6909, 0.1034, ..., 0.1533, 0.4464, 0.0522]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.6073, 0.3302, 0.6540, ..., 0.2403, 0.0233, 0.1677]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 444.78580713272095 seconds + +tensor(indices=tensor([[29603, 34511, 23006, ..., 23494, 8188, 14199], + [16882, 37671, 41885, ..., 26286, 9404, 36377]]), + values=tensor([0.2189, 0.6909, 0.1034, ..., 0.1533, 0.4464, 0.0522]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.6073, 0.3302, 0.6540, ..., 0.2403, 0.0233, 0.1677]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 444.78580713272095 seconds + +[19.33, 18.31, 19.35, 18.4, 18.6, 18.53, 18.51, 18.25, 18.58, 18.44] +[53.67] +483.99488973617554 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 444.78580713272095, 'TIME_S_1KI': 4447.8580713272095, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25976.00573214054, 'W': 53.67} +[19.33, 18.31, 19.35, 18.4, 18.6, 18.53, 18.51, 18.25, 18.58, 18.44, 19.58, 18.5, 18.57, 18.58, 18.7, 22.68, 19.04, 18.5, 19.25, 18.52] +340.28499999999997 +17.014249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 444.78580713272095, 'TIME_S_1KI': 4447.8580713272095, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25976.00573214054, 'W': 53.67, 'J_1KI': 259760.0573214054, 'W_1KI': 536.7, 'W_D': 36.655750000000005, 'J_D': 17741.195679446817, 'W_D_1KI': 366.55750000000006, 'J_D_1KI': 3665.5750000000003} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..ffd2a22 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9390, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.600325584411621, "TIME_S_1KI": 1.1288951634091182, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1132.2721235394476, "W": 81.35, "J_1KI": 120.58276076032456, "W_1KI": 8.663471778487752, "W_D": 55.831999999999994, "J_D": 777.0991665821075, "W_D_1KI": 5.945899893503727, "J_D_1KI": 0.6332161760919837} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..d98ed48 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.125227689743042} + +tensor(indices=tensor([[26684, 156, 45803, ..., 17345, 41881, 24481], + [10562, 38430, 30967, ..., 5071, 20662, 261]]), + values=tensor([0.8725, 0.3150, 0.6120, ..., 0.7598, 0.1667, 0.8221]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.4073, 0.4312, 0.0425, ..., 0.0130, 0.8266, 0.4352]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.125227689743042 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '8384', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.374225616455078} + +tensor(indices=tensor([[ 8808, 3686, 7654, ..., 25230, 46937, 20698], + [12679, 30137, 4471, ..., 31618, 8019, 28488]]), + values=tensor([0.7390, 0.8583, 0.7898, ..., 0.0845, 0.9755, 0.2548]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.2489, 0.5279, 0.9447, ..., 0.6654, 0.8101, 0.5127]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.374225616455078 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '9390', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.600325584411621} + +tensor(indices=tensor([[ 1563, 39257, 1881, ..., 26396, 10695, 8348], + [23935, 44588, 37393, ..., 41309, 29701, 28280]]), + values=tensor([0.3254, 0.4269, 0.6363, ..., 0.2801, 0.9656, 0.2835]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0843, 0.6518, 0.0483, ..., 0.5908, 0.5508, 0.4834]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.600325584411621 seconds + +tensor(indices=tensor([[ 1563, 39257, 1881, ..., 26396, 10695, 8348], + [23935, 44588, 37393, ..., 41309, 29701, 28280]]), + values=tensor([0.3254, 0.4269, 0.6363, ..., 0.2801, 0.9656, 0.2835]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0843, 0.6518, 0.0483, ..., 0.5908, 0.5508, 0.4834]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.600325584411621 seconds + +[44.75, 46.67, 43.9, 41.34, 41.8, 43.97, 44.11, 30.11, 18.93, 18.69] +[81.35] +13.918526411056519 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9390, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.600325584411621, 'TIME_S_1KI': 1.1288951634091182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.2721235394476, 'W': 81.35} +[44.75, 46.67, 43.9, 41.34, 41.8, 43.97, 44.11, 30.11, 18.93, 18.69, 19.47, 18.61, 18.62, 19.06, 18.65, 18.44, 18.52, 18.47, 18.53, 18.35] +510.36 +25.518 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9390, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.600325584411621, 'TIME_S_1KI': 1.1288951634091182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.2721235394476, 'W': 81.35, 'J_1KI': 120.58276076032456, 'W_1KI': 8.663471778487752, 'W_D': 55.831999999999994, 'J_D': 777.0991665821075, 'W_D_1KI': 5.945899893503727, 'J_D_1KI': 0.6332161760919837} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..0c3aed5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1937, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.259681940078735, "TIME_S_1KI": 5.2966865978723465, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1120.4161442756654, "W": 81.45, "J_1KI": 578.428572160901, "W_1KI": 42.049561177077955, "W_D": 64.54225, "J_D": 887.8352226872444, "W_D_1KI": 33.32072792978833, "J_D_1KI": 17.202234346818962} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..8f57ac2 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.5420258045196533} + +tensor(indices=tensor([[ 2988, 20794, 17413, ..., 46199, 1932, 40249], + [27526, 29823, 23479, ..., 24916, 27370, 15952]]), + values=tensor([0.8998, 0.6924, 0.4185, ..., 0.6004, 0.4433, 0.5493]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.9585, 0.8675, 0.1609, ..., 0.0520, 0.4359, 0.3841]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.5420258045196533 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1937', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.259681940078735} + +tensor(indices=tensor([[39861, 11301, 29084, ..., 9951, 26435, 10636], + [39676, 27590, 41109, ..., 6264, 35631, 17253]]), + values=tensor([0.4879, 0.8845, 0.5320, ..., 0.2499, 0.2533, 0.5319]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.3393, 0.7650, 0.7758, ..., 0.8198, 0.4559, 0.3967]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.259681940078735 seconds + +tensor(indices=tensor([[39861, 11301, 29084, ..., 9951, 26435, 10636], + [39676, 27590, 41109, ..., 6264, 35631, 17253]]), + values=tensor([0.4879, 0.8845, 0.5320, ..., 0.2499, 0.2533, 0.5319]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.3393, 0.7650, 0.7758, ..., 0.8198, 0.4559, 0.3967]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.259681940078735 seconds + +[19.18, 18.53, 20.24, 18.58, 18.74, 19.01, 18.42, 19.24, 18.54, 18.66] +[81.45] +13.755876541137695 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.259681940078735, 'TIME_S_1KI': 5.2966865978723465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1120.4161442756654, 'W': 81.45} +[19.18, 18.53, 20.24, 18.58, 18.74, 19.01, 18.42, 19.24, 18.54, 18.66, 18.95, 19.14, 18.49, 18.66, 18.6, 18.43, 18.52, 18.55, 18.56, 19.02] +338.155 +16.90775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.259681940078735, 'TIME_S_1KI': 5.2966865978723465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1120.4161442756654, 'W': 81.45, 'J_1KI': 578.428572160901, 'W_1KI': 42.049561177077955, 'W_D': 64.54225, 'J_D': 887.8352226872444, 'W_D_1KI': 33.32072792978833, 'J_D_1KI': 17.202234346818962} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..ed09269 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 110590, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.501764297485352, "TIME_S_1KI": 0.09496124692544851, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 753.7730596852302, "W": 54.19, "J_1KI": 6.81592422176716, "W_1KI": 0.490008138168008, "W_D": 37.28125, "J_D": 518.5754176303744, "W_D_1KI": 0.33711230671850984, "J_D_1KI": 0.003048307321805858} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..c21d2db --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.021326065063476562} + +tensor(indices=tensor([[1489, 1399, 3199, ..., 1338, 3692, 2037], + [ 50, 1696, 1964, ..., 175, 4016, 3269]]), + values=tensor([0.3226, 0.2986, 0.6059, ..., 0.9821, 0.7268, 0.1980]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.3840, 0.8665, 0.8556, ..., 0.4860, 0.5832, 0.1565]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.021326065063476562 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '49235', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.674599647521973} + +tensor(indices=tensor([[2878, 3865, 1234, ..., 4701, 1464, 2354], + [3379, 4688, 4096, ..., 2237, 208, 2335]]), + values=tensor([0.7381, 0.1747, 0.2573, ..., 0.2561, 0.9894, 0.8209]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.3278, 0.2209, 0.8626, ..., 0.8784, 0.5498, 0.8512]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.674599647521973 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '110590', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.501764297485352} + +tensor(indices=tensor([[3210, 1146, 1293, ..., 4286, 1879, 1028], + [2077, 1203, 2292, ..., 3308, 1309, 755]]), + values=tensor([0.7380, 0.1860, 0.1172, ..., 0.2266, 0.0661, 0.4039]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.2445, 0.0287, 0.9117, ..., 0.5079, 0.8174, 0.6108]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.501764297485352 seconds + +tensor(indices=tensor([[3210, 1146, 1293, ..., 4286, 1879, 1028], + [2077, 1203, 2292, ..., 3308, 1309, 755]]), + values=tensor([0.7380, 0.1860, 0.1172, ..., 0.2266, 0.0661, 0.4039]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.2445, 0.0287, 0.9117, ..., 0.5079, 0.8174, 0.6108]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.501764297485352 seconds + +[19.06, 19.14, 19.64, 18.45, 18.65, 18.72, 18.55, 18.52, 18.64, 18.69] +[54.19] +13.909818410873413 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.501764297485352, 'TIME_S_1KI': 0.09496124692544851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.7730596852302, 'W': 54.19} +[19.06, 19.14, 19.64, 18.45, 18.65, 18.72, 18.55, 18.52, 18.64, 18.69, 19.33, 18.42, 18.61, 18.81, 18.78, 18.89, 18.79, 18.75, 18.84, 18.87] +338.175 +16.90875 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.501764297485352, 'TIME_S_1KI': 0.09496124692544851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.7730596852302, 'W': 54.19, 'J_1KI': 6.81592422176716, 'W_1KI': 0.490008138168008, 'W_D': 37.28125, 'J_D': 518.5754176303744, 'W_D_1KI': 0.33711230671850984, 'J_D_1KI': 0.003048307321805858} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..5b7a026 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 11849, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.476124286651611, "TIME_S_1KI": 0.8841357318467052, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 758.6586921501159, "W": 54.62, "J_1KI": 64.02723370327588, "W_1KI": 4.609671702253355, "W_D": 37.47324999999999, "J_D": 520.4944495718478, "W_D_1KI": 3.162566461304751, "J_D_1KI": 0.2669057693733438} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..dae494e --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.10011768341064453} + +tensor(indices=tensor([[3026, 3606, 3274, ..., 750, 4825, 3528], + [ 198, 4380, 3647, ..., 3878, 3826, 2900]]), + values=tensor([0.7092, 0.7691, 0.9124, ..., 0.8563, 0.6046, 0.9069]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3019, 0.2649, 0.8018, ..., 0.6055, 0.0687, 0.5660]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.10011768341064453 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '10487', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.293036460876465} + +tensor(indices=tensor([[2197, 3097, 2265, ..., 4453, 2081, 3796], + [1822, 4792, 1896, ..., 4084, 2805, 1538]]), + values=tensor([0.8348, 0.8176, 0.1314, ..., 0.9112, 0.8747, 0.2644]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5416, 0.5434, 0.9042, ..., 0.9585, 0.4923, 0.1121]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.293036460876465 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '11849', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.476124286651611} + +tensor(indices=tensor([[2420, 2332, 3084, ..., 828, 1698, 2078], + [3849, 4114, 4371, ..., 2942, 3118, 3258]]), + values=tensor([0.2696, 0.3007, 0.2328, ..., 0.6484, 0.2954, 0.9868]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9698, 0.2781, 0.9168, ..., 0.0955, 0.2837, 0.8036]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.476124286651611 seconds + +tensor(indices=tensor([[2420, 2332, 3084, ..., 828, 1698, 2078], + [3849, 4114, 4371, ..., 2942, 3118, 3258]]), + values=tensor([0.2696, 0.3007, 0.2328, ..., 0.6484, 0.2954, 0.9868]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9698, 0.2781, 0.9168, ..., 0.0955, 0.2837, 0.8036]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.476124286651611 seconds + +[18.99, 18.52, 18.65, 21.86, 19.34, 18.74, 19.23, 18.49, 18.85, 18.64] +[54.62] +13.88976001739502 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 11849, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.476124286651611, 'TIME_S_1KI': 0.8841357318467052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.6586921501159, 'W': 54.62} +[18.99, 18.52, 18.65, 21.86, 19.34, 18.74, 19.23, 18.49, 18.85, 18.64, 19.82, 18.48, 19.46, 18.5, 19.96, 18.82, 18.84, 18.6, 18.49, 18.76] +342.935 +17.14675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 11849, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.476124286651611, 'TIME_S_1KI': 0.8841357318467052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.6586921501159, 'W': 54.62, 'J_1KI': 64.02723370327588, 'W_1KI': 4.609671702253355, 'W_D': 37.47324999999999, 'J_D': 520.4944495718478, 'W_D_1KI': 3.162566461304751, 'J_D_1KI': 0.2669057693733438} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..de2c625 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1184, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.404863119125366, "TIME_S_1KI": 8.787891147909937, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 752.6750787305832, "W": 54.27, "J_1KI": 635.7053029819115, "W_1KI": 45.836148648648646, "W_D": 37.423750000000005, "J_D": 519.0330565255881, "W_D_1KI": 31.607896959459467, "J_D_1KI": 26.69585891846239} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..eac492b --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.8865766525268555} + +tensor(indices=tensor([[2266, 3665, 75, ..., 4062, 2508, 79], + [4792, 1699, 495, ..., 4464, 4904, 2966]]), + values=tensor([0.3826, 0.8595, 0.1537, ..., 0.6749, 0.0648, 0.2729]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1090, 0.5736, 0.6623, ..., 0.3668, 0.2557, 0.3649]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.8865766525268555 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1184', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.404863119125366} + +tensor(indices=tensor([[3597, 317, 2030, ..., 4325, 1071, 2230], + [3493, 1084, 1982, ..., 4467, 2119, 2803]]), + values=tensor([0.0374, 0.9123, 0.6869, ..., 0.1318, 0.8225, 0.8492]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0022, 0.8842, 0.7844, ..., 0.6607, 0.2717, 0.2593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.404863119125366 seconds + +tensor(indices=tensor([[3597, 317, 2030, ..., 4325, 1071, 2230], + [3493, 1084, 1982, ..., 4467, 2119, 2803]]), + values=tensor([0.0374, 0.9123, 0.6869, ..., 0.1318, 0.8225, 0.8492]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0022, 0.8842, 0.7844, ..., 0.6607, 0.2717, 0.2593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.404863119125366 seconds + +[19.18, 18.45, 19.2, 18.49, 18.87, 18.56, 18.76, 18.55, 18.84, 18.46] +[54.27] +13.869081974029541 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.404863119125366, 'TIME_S_1KI': 8.787891147909937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.6750787305832, 'W': 54.27} +[19.18, 18.45, 19.2, 18.49, 18.87, 18.56, 18.76, 18.55, 18.84, 18.46, 19.18, 18.59, 18.81, 18.58, 18.56, 18.53, 18.91, 18.56, 18.95, 18.61] +336.92499999999995 +16.846249999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.404863119125366, 'TIME_S_1KI': 8.787891147909937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.6750787305832, 'W': 54.27, 'J_1KI': 635.7053029819115, 'W_1KI': 45.836148648648646, 'W_D': 37.423750000000005, 'J_D': 519.0330565255881, 'W_D_1KI': 31.607896959459467, 'J_D_1KI': 26.69585891846239} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..a8877f7 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 238, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.45671272277832, "TIME_S_1KI": 43.93576774276605, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 774.8330591201782, "W": 54.01, "J_1KI": 3255.6010887402444, "W_1KI": 226.9327731092437, "W_D": 36.69324999999999, "J_D": 526.4051684236525, "W_D_1KI": 154.17331932773106, "J_D_1KI": 647.78705599887} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..39d5cc5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 4.403820753097534} + +tensor(indices=tensor([[2166, 1268, 629, ..., 4120, 1007, 2512], + [ 415, 1940, 789, ..., 2792, 1763, 1545]]), + values=tensor([0.6506, 0.4904, 0.9076, ..., 0.7086, 0.5413, 0.2455]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.8562, 0.6438, 0.6195, ..., 0.6516, 0.0272, 0.2293]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 4.403820753097534 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '238', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.45671272277832} + +tensor(indices=tensor([[2164, 4063, 3219, ..., 1753, 4930, 3141], + [2346, 231, 349, ..., 3835, 88, 2454]]), + values=tensor([0.2722, 0.8635, 0.7075, ..., 0.8216, 0.5234, 0.5410]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1210, 0.9125, 0.3696, ..., 0.7491, 0.4661, 0.2966]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.45671272277832 seconds + +tensor(indices=tensor([[2164, 4063, 3219, ..., 1753, 4930, 3141], + [2346, 231, 349, ..., 3835, 88, 2454]]), + values=tensor([0.2722, 0.8635, 0.7075, ..., 0.8216, 0.5234, 0.5410]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1210, 0.9125, 0.3696, ..., 0.7491, 0.4661, 0.2966]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.45671272277832 seconds + +[19.19, 18.6, 18.82, 18.52, 18.92, 23.07, 18.67, 18.63, 19.42, 19.34] +[54.01] +14.34610366821289 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.45671272277832, 'TIME_S_1KI': 43.93576774276605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 774.8330591201782, 'W': 54.01} +[19.19, 18.6, 18.82, 18.52, 18.92, 23.07, 18.67, 18.63, 19.42, 19.34, 22.56, 19.85, 18.6, 19.27, 18.66, 18.87, 19.04, 18.62, 18.89, 18.68] +346.33500000000004 +17.316750000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.45671272277832, 'TIME_S_1KI': 43.93576774276605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 774.8330591201782, 'W': 54.01, 'J_1KI': 3255.6010887402444, 'W_1KI': 226.9327731092437, 'W_D': 36.69324999999999, 'J_D': 526.4051684236525, 'W_D_1KI': 154.17331932773106, 'J_D_1KI': 647.78705599887} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..7e731ad --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 119, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.456920862197876, "TIME_S_1KI": 87.87328455628467, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 791.9394631242752, "W": 54.53, "J_1KI": 6654.953471632565, "W_1KI": 458.2352941176471, "W_D": 37.447250000000004, "J_D": 543.8465993119479, "W_D_1KI": 314.6827731092437, "J_D_1KI": 2644.3930513381824} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..4debb38 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.776594400405884} + +tensor(indices=tensor([[1054, 4831, 4827, ..., 1808, 4094, 2990], + [3235, 156, 130, ..., 75, 689, 2883]]), + values=tensor([0.8814, 0.1772, 0.2854, ..., 0.3901, 0.7521, 0.8354]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7139, 0.6677, 0.5066, ..., 0.6673, 0.2028, 0.0852]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.776594400405884 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '119', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.456920862197876} + +tensor(indices=tensor([[4425, 4377, 383, ..., 3921, 3701, 4194], + [4770, 1880, 1819, ..., 2593, 542, 4216]]), + values=tensor([0.3845, 0.5269, 0.3479, ..., 0.1245, 0.6492, 0.0895]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.9783, 0.9967, 0.0811, ..., 0.3461, 0.5111, 0.2827]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.456920862197876 seconds + +tensor(indices=tensor([[4425, 4377, 383, ..., 3921, 3701, 4194], + [4770, 1880, 1819, ..., 2593, 542, 4216]]), + values=tensor([0.3845, 0.5269, 0.3479, ..., 0.1245, 0.6492, 0.0895]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.9783, 0.9967, 0.0811, ..., 0.3461, 0.5111, 0.2827]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.456920862197876 seconds + +[18.66, 18.46, 18.87, 18.55, 18.84, 18.42, 19.13, 18.56, 18.8, 18.65] +[54.53] +14.52300500869751 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.456920862197876, 'TIME_S_1KI': 87.87328455628467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 791.9394631242752, 'W': 54.53} +[18.66, 18.46, 18.87, 18.55, 18.84, 18.42, 19.13, 18.56, 18.8, 18.65, 19.22, 19.94, 18.88, 18.64, 18.77, 18.59, 18.61, 18.65, 22.12, 19.12] +341.655 +17.082749999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.456920862197876, 'TIME_S_1KI': 87.87328455628467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 791.9394631242752, 'W': 54.53, 'J_1KI': 6654.953471632565, 'W_1KI': 458.2352941176471, 'W_D': 37.447250000000004, 'J_D': 543.8465993119479, 'W_D_1KI': 314.6827731092437, 'J_D_1KI': 2644.3930513381824} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..bed1137 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.56717610359192, "TIME_S_1KI": 175.6717610359192, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1204.4688155555725, "W": 54.160000000000004, "J_1KI": 12044.688155555725, "W_1KI": 541.6000000000001, "W_D": 37.317750000000004, "J_D": 829.9125949353577, "W_D_1KI": 373.17750000000007, "J_D_1KI": 3731.7750000000005} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..9ed476e --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.56717610359192} + +tensor(indices=tensor([[ 986, 2884, 1593, ..., 3297, 638, 632], + [2510, 1321, 4999, ..., 1993, 3985, 54]]), + values=tensor([0.5730, 0.8361, 0.4689, ..., 0.9783, 0.6525, 0.8309]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3841, 0.5606, 0.9926, ..., 0.8077, 0.4532, 0.2709]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.56717610359192 seconds + +tensor(indices=tensor([[ 986, 2884, 1593, ..., 3297, 638, 632], + [2510, 1321, 4999, ..., 1993, 3985, 54]]), + values=tensor([0.5730, 0.8361, 0.4689, ..., 0.9783, 0.6525, 0.8309]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3841, 0.5606, 0.9926, ..., 0.8077, 0.4532, 0.2709]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.56717610359192 seconds + +[18.93, 18.74, 18.77, 18.46, 18.56, 18.59, 18.64, 18.56, 18.83, 18.82] +[54.16] +22.239084482192993 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.56717610359192, 'TIME_S_1KI': 175.6717610359192, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1204.4688155555725, 'W': 54.160000000000004} +[18.93, 18.74, 18.77, 18.46, 18.56, 18.59, 18.64, 18.56, 18.83, 18.82, 18.92, 18.43, 18.57, 18.63, 19.08, 18.98, 18.92, 18.83, 18.65, 18.54] +336.845 +16.84225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.56717610359192, 'TIME_S_1KI': 175.6717610359192, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1204.4688155555725, 'W': 54.160000000000004, 'J_1KI': 12044.688155555725, 'W_1KI': 541.6000000000001, 'W_D': 37.317750000000004, 'J_D': 829.9125949353577, 'W_D_1KI': 373.17750000000007, 'J_D_1KI': 3731.7750000000005} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..d3fe69a --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.314939260482788, "TIME_S_1KI": 263.1493926048279, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1704.6602046489716, "W": 54.15, "J_1KI": 17046.602046489716, "W_1KI": 541.5, "W_D": 37.14875, "J_D": 1169.4551390111446, "W_D_1KI": 371.48749999999995, "J_D_1KI": 3714.8749999999995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..4255392 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.314939260482788} + +tensor(indices=tensor([[3862, 3181, 2555, ..., 3526, 2757, 1246], + [3276, 1342, 283, ..., 1567, 2573, 3445]]), + values=tensor([0.9874, 0.2580, 0.5181, ..., 0.3005, 0.2950, 0.3540]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.6924, 0.7904, 0.1421, ..., 0.2673, 0.3923, 0.7382]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.314939260482788 seconds + +tensor(indices=tensor([[3862, 3181, 2555, ..., 3526, 2757, 1246], + [3276, 1342, 283, ..., 1567, 2573, 3445]]), + values=tensor([0.9874, 0.2580, 0.5181, ..., 0.3005, 0.2950, 0.3540]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.6924, 0.7904, 0.1421, ..., 0.2673, 0.3923, 0.7382]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.314939260482788 seconds + +[18.89, 18.3, 18.66, 18.8, 18.61, 18.6, 18.56, 18.83, 18.63, 18.48] +[54.15] +31.48033618927002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.314939260482788, 'TIME_S_1KI': 263.1493926048279, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1704.6602046489716, 'W': 54.15} +[18.89, 18.3, 18.66, 18.8, 18.61, 18.6, 18.56, 18.83, 18.63, 18.48, 22.95, 19.15, 18.54, 19.57, 18.76, 19.43, 18.62, 18.49, 19.03, 18.57] +340.025 +17.00125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.314939260482788, 'TIME_S_1KI': 263.1493926048279, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1704.6602046489716, 'W': 54.15, 'J_1KI': 17046.602046489716, 'W_1KI': 541.5, 'W_D': 37.14875, 'J_D': 1169.4551390111446, 'W_D_1KI': 371.48749999999995, 'J_D_1KI': 3714.8749999999995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..2d0da51 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 35.06979584693909, "TIME_S_1KI": 350.69795846939087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2216.3210521268848, "W": 54.02000000000001, "J_1KI": 22163.210521268848, "W_1KI": 540.2000000000002, "W_D": 37.01150000000001, "J_D": 1518.4999374452834, "W_D_1KI": 370.1150000000001, "J_D_1KI": 3701.150000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..347098c --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 35.06979584693909} + +tensor(indices=tensor([[2887, 2260, 1800, ..., 3729, 1798, 1093], + [ 38, 352, 3922, ..., 2925, 1088, 3829]]), + values=tensor([0.2553, 0.3460, 0.9039, ..., 0.9518, 0.5579, 0.4722]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6033, 0.3586, 0.9716, ..., 0.9546, 0.5618, 0.6769]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 35.06979584693909 seconds + +tensor(indices=tensor([[2887, 2260, 1800, ..., 3729, 1798, 1093], + [ 38, 352, 3922, ..., 2925, 1088, 3829]]), + values=tensor([0.2553, 0.3460, 0.9039, ..., 0.9518, 0.5579, 0.4722]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6033, 0.3586, 0.9716, ..., 0.9546, 0.5618, 0.6769]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 35.06979584693909 seconds + +[20.19, 18.51, 19.37, 18.71, 19.66, 18.63, 18.73, 18.52, 18.49, 18.46] +[54.02] +41.02778697013855 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 35.06979584693909, 'TIME_S_1KI': 350.69795846939087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2216.3210521268848, 'W': 54.02000000000001} +[20.19, 18.51, 19.37, 18.71, 19.66, 18.63, 18.73, 18.52, 18.49, 18.46, 19.02, 19.11, 18.8, 19.36, 18.91, 18.77, 18.75, 18.73, 18.75, 19.07] +340.16999999999996 +17.008499999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 35.06979584693909, 'TIME_S_1KI': 350.69795846939087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2216.3210521268848, 'W': 54.02000000000001, 'J_1KI': 22163.210521268848, 'W_1KI': 540.2000000000002, 'W_D': 37.01150000000001, 'J_D': 1518.4999374452834, 'W_D_1KI': 370.1150000000001, 'J_D_1KI': 3701.150000000001} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..3458276 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.865360498428345, "TIME_S_1KI": 438.65360498428345, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2714.299004650116, "W": 54.05, "J_1KI": 27142.990046501156, "W_1KI": 540.5, "W_D": 37.03825, "J_D": 1859.9978743567465, "W_D_1KI": 370.3825, "J_D_1KI": 3703.8250000000003} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..71de153 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.865360498428345} + +tensor(indices=tensor([[3939, 3535, 1415, ..., 4951, 3793, 4406], + [3504, 4136, 3541, ..., 2190, 3440, 4821]]), + values=tensor([0.3608, 0.9417, 0.0695, ..., 0.6702, 0.1591, 0.7388]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.0465, 0.7407, 0.4101, ..., 0.8617, 0.2262, 0.7182]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.865360498428345 seconds + +tensor(indices=tensor([[3939, 3535, 1415, ..., 4951, 3793, 4406], + [3504, 4136, 3541, ..., 2190, 3440, 4821]]), + values=tensor([0.3608, 0.9417, 0.0695, ..., 0.6702, 0.1591, 0.7388]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.0465, 0.7407, 0.4101, ..., 0.8617, 0.2262, 0.7182]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.865360498428345 seconds + +[18.89, 18.55, 18.69, 18.69, 18.48, 18.44, 18.77, 18.61, 18.97, 18.65] +[54.05] +50.21829795837402 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.865360498428345, 'TIME_S_1KI': 438.65360498428345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2714.299004650116, 'W': 54.05} +[18.89, 18.55, 18.69, 18.69, 18.48, 18.44, 18.77, 18.61, 18.97, 18.65, 19.18, 18.9, 18.59, 18.58, 18.44, 18.74, 18.52, 19.12, 22.28, 19.01] +340.235 +17.01175 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.865360498428345, 'TIME_S_1KI': 438.65360498428345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2714.299004650116, 'W': 54.05, 'J_1KI': 27142.990046501156, 'W_1KI': 540.5, 'W_D': 37.03825, 'J_D': 1859.9978743567465, 'W_D_1KI': 370.3825, 'J_D_1KI': 3703.8250000000003} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..029137a --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 647175, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.114959955215454, "TIME_S_1KI": 0.015629404651316035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 733.7914706420898, "W": 54.08, "J_1KI": 1.1338377882985127, "W_1KI": 0.08356317842932746, "W_D": 36.693999999999996, "J_D": 497.8872822437286, "W_D_1KI": 0.05669872909182214, "J_D_1KI": 8.760957869482311e-05} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..a00aeb7 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,429 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013978004455566406} + +tensor(indices=tensor([[3868, 801, 3405, 3873, 4709, 3205, 3214, 1464, 4801, + 2186, 510, 3693, 3409, 4038, 4599, 3322, 4144, 1936, + 3269, 1276, 4480, 113, 3908, 345, 2641, 4253, 779, + 3437, 891, 2470, 353, 4741, 2510, 4, 1658, 4367, + 4161, 3299, 3858, 2302, 1649, 866, 4270, 4084, 3491, + 2962, 604, 2900, 4662, 858, 2855, 134, 4975, 1716, + 2082, 4563, 239, 2248, 3312, 1509, 1380, 2193, 4705, + 2150, 1770, 539, 4711, 4145, 3635, 1097, 696, 218, + 2387, 1947, 4301, 2192, 4890, 4203, 1829, 1537, 2805, + 3562, 3213, 3417, 1273, 121, 3567, 1054, 2791, 2213, + 315, 4268, 4445, 575, 4118, 3494, 149, 2139, 4377, + 528, 219, 1817, 3220, 1757, 1296, 3178, 644, 4664, + 1055, 122, 4976, 4462, 3412, 2048, 4818, 3058, 2012, + 1497, 34, 2171, 4873, 2356, 4654, 3269, 4573, 2131, + 4702, 3685, 80, 2750, 3517, 3281, 1725, 2309, 4030, + 2087, 1125, 1957, 251, 1530, 1578, 4800, 4473, 2987, + 1000, 912, 4488, 564, 4198, 3212, 4105, 151, 2988, + 1322, 2844, 2895, 4158, 2732, 60, 4013, 3931, 4488, + 2004, 2871, 2377, 611, 2861, 2566, 2197, 3892, 3047, + 4855, 2707, 434, 1311, 2480, 2583, 1198, 4844, 1401, + 2022, 3266, 1194, 2267, 2906, 2449, 886, 2808, 1621, + 4244, 3927, 104, 1394, 809, 3559, 2538, 3339, 1175, + 534, 1226, 1181, 3300, 1976, 3886, 3665, 4737, 4252, + 3489, 2352, 3967, 3795, 2024, 527, 490, 2564, 3458, + 4060, 1211, 1222, 62, 916, 1581, 4008, 691, 1953, + 411, 4469, 517, 1082, 4109, 2341, 197, 1479, 3890, + 211, 968, 978, 2120, 1947, 4021, 4354, 4586, 3579, + 3580, 3228, 3434, 1567, 208, 262, 3756], + [3180, 349, 4147, 4535, 2563, 119, 1439, 4442, 3649, + 4413, 2672, 3866, 3254, 3399, 4654, 3029, 2842, 1239, + 2423, 4098, 148, 2081, 410, 504, 4360, 3050, 3181, + 4409, 2425, 2233, 920, 1791, 4239, 2512, 4096, 1318, + 4791, 957, 4997, 2311, 163, 4572, 899, 2936, 4768, + 1938, 613, 4604, 1121, 4925, 3386, 790, 4323, 4928, + 3241, 4251, 4444, 4687, 2467, 4403, 2822, 2392, 3057, + 4191, 653, 4804, 2893, 2744, 2992, 1191, 3147, 282, + 1451, 4886, 3472, 4347, 4129, 1012, 2708, 2580, 4561, + 1049, 2185, 3611, 4053, 1126, 4035, 1886, 1706, 871, + 2326, 3902, 518, 1809, 719, 631, 632, 558, 2679, + 2085, 1268, 4, 1618, 2961, 4343, 2262, 4997, 4884, + 1442, 1677, 1019, 196, 1769, 1757, 1595, 562, 1160, + 2541, 1416, 932, 4989, 605, 3367, 2083, 2655, 2146, + 4515, 2356, 4686, 977, 3198, 1624, 4231, 352, 3858, + 283, 2582, 2389, 2270, 3651, 1841, 3004, 482, 3899, + 3615, 4409, 2996, 4364, 1272, 1686, 500, 1020, 3693, + 1598, 1146, 2735, 3546, 1030, 87, 1512, 1837, 3190, + 3484, 4076, 2294, 3262, 1849, 321, 537, 2535, 4815, + 3926, 1727, 3872, 3780, 393, 4288, 3924, 701, 2399, + 4379, 80, 1008, 2484, 2110, 1015, 112, 1295, 4839, + 886, 1426, 846, 4019, 2160, 448, 3330, 4952, 1262, + 2285, 4885, 149, 2758, 4825, 213, 1492, 113, 2328, + 2240, 3709, 1492, 1143, 1958, 3851, 1377, 2456, 3057, + 89, 2858, 851, 2835, 2364, 2921, 1273, 2271, 2899, + 1321, 23, 793, 2132, 3963, 705, 3454, 855, 3926, + 2652, 3803, 2181, 2741, 291, 1082, 4703, 2239, 2040, + 1440, 3876, 1896, 3451, 3152, 2919, 2445]]), + values=tensor([0.6770, 0.7147, 0.9077, 0.4027, 0.1369, 0.8748, 0.3752, + 0.6886, 0.7040, 0.2461, 0.9592, 0.3802, 0.9636, 0.8043, + 0.1602, 0.2768, 0.4000, 0.0556, 0.4284, 0.6427, 0.9898, + 0.5235, 0.7517, 0.6109, 0.6773, 0.5601, 0.1309, 0.5376, + 0.4089, 0.2698, 0.2282, 0.7436, 0.0226, 0.3639, 0.4799, + 0.1749, 0.8845, 0.8426, 0.6687, 0.2920, 0.4277, 0.6928, + 0.4819, 0.2109, 0.9768, 0.2023, 0.9326, 0.2179, 0.3429, + 0.2316, 0.2746, 0.0478, 0.2458, 0.4790, 0.4442, 0.1697, + 0.5388, 0.9006, 0.3307, 0.9814, 0.3361, 0.3415, 0.3613, + 0.3202, 0.5327, 0.3659, 0.0864, 0.2038, 0.9975, 0.5180, + 0.0033, 0.9738, 0.5953, 0.8298, 0.8531, 0.6033, 0.7035, + 0.5546, 0.5838, 0.3182, 0.7263, 0.1602, 0.1762, 0.8931, + 0.9099, 0.8118, 0.1823, 0.0237, 0.6517, 0.0981, 0.2668, + 0.9956, 0.3185, 0.5643, 0.1451, 0.8132, 0.1520, 0.6640, + 0.9144, 0.5296, 0.5763, 0.7921, 0.5633, 0.3664, 0.6460, + 0.8658, 0.5268, 0.2981, 0.9140, 0.6410, 0.7949, 0.3939, + 0.7212, 0.1931, 0.2551, 0.3881, 0.5762, 0.0784, 0.5545, + 0.6491, 0.7914, 0.4847, 0.9005, 0.0125, 0.2522, 0.2097, + 0.4390, 0.3403, 0.4090, 0.5372, 0.0110, 0.9707, 0.5907, + 0.2502, 0.8509, 0.3005, 0.3490, 0.0304, 0.2708, 0.8440, + 0.9189, 0.4212, 0.8745, 0.3524, 0.8390, 0.0412, 0.5954, + 0.9013, 0.1220, 0.3528, 0.7424, 0.3248, 0.4503, 0.4874, + 0.1349, 0.7230, 0.3691, 0.2308, 0.8225, 0.4428, 0.6754, + 0.4979, 0.2020, 0.3436, 0.4140, 0.6938, 0.6041, 0.2378, + 0.4796, 0.5560, 0.9547, 0.0313, 0.9419, 0.4836, 0.0767, + 0.9294, 0.7333, 0.5524, 0.5691, 0.9956, 0.0893, 0.5833, + 0.6843, 0.8765, 0.4747, 0.5323, 0.3848, 0.9949, 0.3110, + 0.4933, 0.0027, 0.9801, 0.8268, 0.1391, 0.9262, 0.3421, + 0.2742, 0.4650, 0.4322, 0.1281, 0.2080, 0.3790, 0.1802, + 0.4479, 0.6665, 0.4330, 0.8598, 0.4979, 0.7343, 0.4200, + 0.5932, 0.1774, 0.0074, 0.0278, 0.7518, 0.1070, 0.2248, + 0.9799, 0.0421, 0.9231, 0.7902, 0.3792, 0.6363, 0.2078, + 0.4978, 0.1650, 0.4327, 0.9819, 0.8667, 0.6953, 0.8989, + 0.2625, 0.5164, 0.3700, 0.3542, 0.0770, 0.2697, 0.2457, + 0.6477, 0.0564, 0.9130, 0.0884, 0.9538, 0.9637, 0.6829, + 0.7439, 0.1615, 0.3005, 0.2174, 0.8441]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.2744, 0.7051, 0.1601, ..., 0.0152, 0.0952, 0.7788]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.013978004455566406 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '75118', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2187411785125732} + +tensor(indices=tensor([[ 307, 1112, 4675, 3230, 516, 3437, 4033, 924, 2833, + 98, 3567, 3778, 1927, 2847, 182, 1431, 235, 1616, + 2652, 3172, 886, 778, 1417, 1774, 3920, 3294, 2595, + 3590, 2893, 4547, 4904, 3007, 967, 900, 3963, 1896, + 741, 1758, 1960, 1607, 3722, 907, 3983, 11, 978, + 1260, 1308, 390, 3944, 721, 2192, 1921, 1335, 2174, + 2549, 2930, 1761, 4765, 2589, 4796, 2782, 3922, 987, + 4272, 4651, 46, 1438, 4655, 385, 4748, 837, 4168, + 11, 1520, 3599, 2446, 4495, 3577, 4941, 4185, 4264, + 2835, 2918, 2392, 2254, 2614, 2830, 3068, 4221, 3599, + 1045, 1432, 3312, 4672, 4923, 1976, 1051, 4180, 4167, + 2168, 737, 4439, 3673, 1998, 2854, 537, 1291, 671, + 4190, 1766, 331, 3994, 1849, 1102, 3412, 1467, 866, + 4929, 1501, 164, 1808, 4185, 1575, 3408, 3554, 2261, + 3122, 1483, 1885, 3255, 3538, 3960, 1526, 313, 1106, + 1157, 1447, 4513, 3325, 4739, 3669, 1490, 1371, 1778, + 1600, 3832, 4956, 1942, 2887, 3085, 1874, 700, 1017, + 3610, 2134, 3356, 503, 3624, 2420, 1100, 2272, 2438, + 1369, 3698, 2177, 2401, 3224, 4083, 4513, 1934, 3956, + 2632, 3587, 1839, 1335, 4059, 2652, 1633, 1965, 4259, + 577, 4344, 398, 2173, 2457, 3616, 1103, 2193, 4891, + 3816, 3068, 346, 1364, 847, 2345, 24, 2948, 2821, + 392, 1864, 3937, 27, 3876, 1906, 3221, 3985, 3314, + 4616, 1462, 1249, 1864, 243, 405, 2091, 2621, 1068, + 4427, 2134, 44, 4785, 628, 4531, 3181, 1745, 4759, + 4829, 4343, 3561, 994, 601, 26, 3818, 2084, 1722, + 3199, 2137, 87, 1742, 2022, 62, 712, 1272, 4437, + 2842, 530, 359, 2664, 3949, 4277, 4061], + [4966, 879, 1357, 1654, 4198, 2068, 404, 726, 1038, + 1023, 3887, 3746, 2112, 1005, 4568, 3023, 1144, 3082, + 4762, 59, 749, 895, 135, 2263, 2567, 2669, 4769, + 55, 4486, 3901, 1958, 988, 2818, 2612, 458, 3164, + 3009, 3632, 1535, 1872, 4339, 1640, 4390, 2688, 244, + 4233, 1623, 2863, 2769, 2986, 637, 3952, 1452, 3935, + 2120, 2996, 4179, 3623, 1885, 999, 4953, 2050, 314, + 3347, 832, 596, 1715, 3784, 4195, 1553, 2616, 74, + 2852, 1115, 3654, 2515, 1065, 1040, 1976, 4875, 4550, + 926, 844, 2032, 2458, 4851, 1593, 2614, 4386, 2933, + 98, 4482, 1270, 972, 764, 1905, 1017, 3137, 2972, + 577, 3033, 1205, 1673, 4218, 2425, 1502, 1860, 2966, + 244, 1928, 3060, 2782, 3239, 1032, 2238, 1320, 4703, + 138, 3014, 4944, 2514, 497, 2889, 3925, 4539, 3058, + 4185, 4554, 1242, 3668, 3929, 2519, 2107, 2396, 1439, + 3595, 272, 3842, 1107, 4874, 3068, 1705, 4598, 2727, + 2031, 2046, 1175, 4256, 915, 2829, 3986, 2197, 2919, + 345, 363, 1799, 4260, 3394, 2678, 3391, 4980, 225, + 4733, 1321, 1374, 579, 2342, 749, 3844, 4737, 2689, + 4321, 618, 1470, 2588, 4307, 3657, 3466, 1356, 4734, + 2037, 4911, 2787, 2787, 1522, 3689, 981, 2828, 1776, + 2558, 3112, 3923, 3286, 3897, 4222, 3630, 2241, 2095, + 4034, 649, 588, 558, 2494, 18, 2157, 1794, 2310, + 2567, 4291, 4222, 1032, 16, 4545, 2334, 3382, 3167, + 3297, 2514, 4773, 4660, 4022, 4892, 10, 2475, 3690, + 1801, 4236, 584, 2369, 3106, 1633, 632, 593, 1127, + 4694, 811, 1185, 1980, 915, 2144, 4758, 3894, 2344, + 4750, 77, 3485, 1010, 1230, 3054, 3683]]), + values=tensor([0.4566, 0.4669, 0.3776, 0.0013, 0.4606, 0.1593, 0.6614, + 0.5896, 0.3059, 0.1410, 0.5047, 0.2805, 0.5060, 0.9032, + 0.2844, 0.2360, 0.3916, 0.0981, 0.7348, 0.0528, 0.5709, + 0.0069, 0.3725, 0.7553, 0.5955, 0.4263, 0.9230, 0.6077, + 0.9176, 0.2330, 0.6965, 0.3324, 0.9678, 0.3629, 0.6485, + 0.4249, 0.9277, 0.4169, 0.5933, 0.2899, 0.8279, 0.0451, + 0.9891, 0.6749, 0.1013, 0.2748, 0.8689, 0.0827, 0.7124, + 0.4486, 0.4053, 0.6901, 0.9615, 0.5891, 0.2439, 0.3594, + 0.6535, 0.3718, 0.6083, 0.6685, 0.3816, 0.6461, 0.7892, + 0.7307, 0.5841, 0.4648, 0.9265, 0.5235, 0.6258, 0.4468, + 0.7003, 0.1500, 0.9843, 0.8308, 0.0472, 0.0989, 0.6743, + 0.0248, 0.6156, 0.8812, 0.5912, 0.6431, 0.3961, 0.0357, + 0.8454, 0.6431, 0.5469, 0.9097, 0.5691, 0.4815, 0.3263, + 0.8203, 0.5533, 0.4086, 0.9512, 0.9790, 0.8422, 0.2243, + 0.0308, 0.2771, 0.2439, 0.8306, 0.6975, 0.7012, 0.7852, + 0.5070, 0.2320, 0.0989, 0.5885, 0.0093, 0.6656, 0.8031, + 0.2352, 0.9199, 0.2314, 0.3704, 0.6814, 0.2352, 0.0175, + 0.9816, 0.4038, 0.2333, 0.8592, 0.0859, 0.1684, 0.9928, + 0.6166, 0.6834, 0.8358, 0.4578, 0.5097, 0.0218, 0.9270, + 0.4755, 0.5979, 0.8389, 0.4194, 0.0366, 0.7044, 0.6176, + 0.0971, 0.9202, 0.1903, 0.6196, 0.1311, 0.6463, 0.3790, + 0.5768, 0.0964, 0.7004, 0.0045, 0.1837, 0.2147, 0.3736, + 0.7554, 0.4959, 0.9341, 0.6359, 0.5032, 0.4115, 0.5189, + 0.3218, 0.7607, 0.1938, 0.1371, 0.4927, 0.4376, 0.7736, + 0.8584, 0.3420, 0.7735, 0.2993, 0.8219, 0.2846, 0.1685, + 0.8344, 0.3239, 0.4257, 0.6719, 0.6185, 0.3429, 0.7688, + 0.3576, 0.9256, 0.7589, 0.2161, 0.8456, 0.0672, 0.5782, + 0.2441, 0.8816, 0.5409, 0.2090, 0.5541, 0.6047, 0.6940, + 0.0651, 0.3659, 0.2711, 0.2609, 0.4024, 0.2039, 0.8513, + 0.9085, 0.2570, 0.6915, 0.4149, 0.3618, 0.9079, 0.7008, + 0.2502, 0.0102, 0.9506, 0.5842, 0.4539, 0.9304, 0.2939, + 0.3550, 0.3731, 0.8605, 0.0204, 0.9800, 0.1046, 0.3431, + 0.4191, 0.1812, 0.8176, 0.6629, 0.5720, 0.7936, 0.7789, + 0.8060, 0.5551, 0.5658, 0.4333, 0.9549, 0.8713, 0.3505, + 0.3475, 0.3863, 0.6786, 0.4952, 0.9817, 0.4541, 0.1264, + 0.7796, 0.0929, 0.9332, 0.2699, 0.3433]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9389, 0.8826, 0.1190, ..., 0.5371, 0.0521, 0.4610]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.2187411785125732 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '647175', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.114959955215454} + +tensor(indices=tensor([[1070, 554, 3623, 914, 4972, 1707, 178, 2210, 3763, + 70, 3108, 4275, 3395, 2527, 3258, 3508, 2191, 4853, + 1150, 4924, 1328, 2639, 4830, 1513, 310, 1716, 3634, + 2960, 903, 801, 4771, 4278, 140, 3345, 768, 4608, + 305, 1533, 3780, 2595, 1446, 4790, 3293, 1560, 406, + 1129, 1499, 3991, 1706, 2568, 3997, 2538, 430, 1836, + 2858, 145, 3506, 3768, 1105, 4026, 2259, 699, 355, + 4025, 2982, 175, 2855, 2239, 4251, 3315, 905, 2553, + 4498, 683, 723, 2788, 194, 2843, 814, 3740, 1727, + 2143, 3554, 1085, 269, 1180, 3679, 3098, 921, 1365, + 1890, 2487, 2476, 3748, 3987, 3439, 383, 1969, 3698, + 1784, 2313, 1958, 4906, 3194, 4893, 1049, 1296, 1957, + 3481, 1223, 1839, 2719, 3925, 4723, 3676, 4495, 944, + 125, 4607, 533, 2416, 4637, 3951, 1195, 4090, 4426, + 1663, 2061, 4227, 4648, 4598, 630, 752, 3727, 4559, + 253, 4778, 704, 2491, 1843, 4385, 1095, 3765, 1341, + 926, 4833, 4151, 2324, 3321, 3162, 838, 397, 3738, + 538, 2249, 3693, 3325, 3283, 4357, 2507, 178, 340, + 3288, 248, 1785, 2067, 2560, 3904, 87, 3926, 3424, + 481, 498, 2205, 4509, 3094, 1184, 2988, 3277, 1078, + 2549, 1316, 3969, 4643, 303, 86, 4831, 9, 51, + 2028, 3206, 3762, 2416, 4705, 2876, 4088, 2083, 4572, + 1724, 536, 1908, 3190, 1783, 3346, 6, 4507, 3641, + 4226, 199, 4928, 2038, 2816, 94, 1358, 7, 3783, + 2758, 2471, 272, 267, 3430, 1307, 2035, 2316, 3025, + 1165, 1529, 2976, 3382, 371, 4211, 1222, 137, 3141, + 2371, 2431, 1435, 2566, 3880, 4724, 128, 3441, 3117, + 303, 4380, 1774, 4747, 4552, 1653, 4650], + [4647, 1673, 4544, 1987, 1582, 1625, 1616, 232, 3250, + 3078, 2559, 4039, 3326, 106, 4677, 148, 4084, 661, + 2357, 2941, 1719, 1455, 2334, 1737, 2897, 3189, 680, + 2129, 3326, 4541, 4120, 343, 2498, 4878, 1839, 4934, + 4182, 3000, 2850, 3907, 311, 4042, 2721, 1461, 4750, + 3937, 207, 255, 3947, 1306, 3685, 2352, 3416, 4802, + 1298, 35, 1621, 791, 2523, 2693, 483, 1098, 2240, + 3905, 3254, 952, 2184, 4983, 4399, 712, 4246, 1912, + 4900, 4079, 1579, 4724, 4536, 4809, 4940, 3020, 26, + 2268, 1817, 2988, 4977, 4373, 4236, 3578, 1732, 3459, + 3903, 1333, 3176, 3454, 1390, 1068, 3409, 3562, 1015, + 2260, 4675, 3812, 3092, 3175, 3937, 3553, 21, 2913, + 3713, 3824, 4559, 2676, 1770, 1420, 3167, 3121, 4571, + 3308, 4827, 695, 37, 597, 4797, 15, 1242, 4020, + 4519, 105, 3454, 1554, 893, 4934, 3374, 790, 4791, + 1540, 535, 3681, 3761, 4873, 2134, 4412, 4265, 3255, + 2532, 3497, 3968, 2990, 3296, 1950, 3264, 188, 1470, + 114, 41, 701, 607, 791, 2140, 3411, 1002, 1402, + 4259, 3518, 2727, 952, 1779, 3563, 4655, 998, 2072, + 1379, 3446, 935, 2956, 4136, 1050, 749, 2418, 2840, + 4882, 4422, 4301, 4965, 4249, 4117, 2221, 3873, 2795, + 3215, 629, 3174, 1302, 3051, 2132, 3671, 3173, 2221, + 3969, 3109, 1673, 1043, 1201, 894, 3392, 3615, 2775, + 2538, 1503, 4995, 4469, 1588, 1602, 351, 4230, 2692, + 4772, 3712, 2383, 923, 532, 3465, 2114, 3806, 410, + 2698, 3143, 596, 3270, 4475, 3045, 4107, 929, 1786, + 1684, 1446, 315, 1697, 2303, 1830, 4373, 972, 2607, + 2739, 2783, 2015, 4841, 1704, 2825, 4668]]), + values=tensor([0.8577, 0.0561, 0.0215, 0.4220, 0.5376, 0.8234, 0.2864, + 0.1892, 0.5188, 0.2133, 0.6609, 0.7259, 0.4829, 0.5298, + 0.4015, 0.1798, 0.5983, 0.4425, 0.1202, 0.7435, 0.2416, + 0.9206, 0.6011, 0.4707, 0.6840, 0.9609, 0.0603, 0.1903, + 0.1445, 0.8315, 0.2454, 0.8229, 0.5669, 0.6394, 0.5789, + 0.3988, 0.0930, 0.5869, 0.6666, 0.2623, 0.3474, 0.8785, + 0.4621, 0.7925, 0.2096, 0.8206, 0.5510, 0.6579, 0.8806, + 0.7843, 0.4799, 0.2851, 0.5456, 0.0101, 0.1915, 0.3870, + 0.3596, 0.8460, 0.7732, 0.2015, 0.1849, 0.1487, 0.3484, + 0.4353, 0.2210, 0.4792, 0.0416, 0.6315, 0.5813, 0.5249, + 0.7866, 0.5566, 0.0763, 0.2130, 0.4762, 0.5636, 0.3171, + 0.1750, 0.8376, 0.1553, 0.5325, 0.3541, 0.3722, 0.3419, + 0.1416, 0.0765, 0.4616, 0.1855, 0.7445, 0.7366, 0.7038, + 0.0422, 0.9483, 0.2099, 0.9756, 0.0612, 0.8093, 0.2532, + 0.1465, 0.6488, 0.1045, 0.0284, 0.5921, 0.7163, 0.5598, + 0.7401, 0.0315, 0.2238, 0.2897, 0.7555, 0.9286, 0.3605, + 0.3056, 0.9832, 0.1729, 0.0109, 0.4480, 0.1822, 0.8832, + 0.1951, 0.7406, 0.2004, 0.2937, 0.3182, 0.2204, 0.1512, + 0.2427, 0.7837, 0.7944, 0.9406, 0.6225, 0.8711, 0.5675, + 0.6512, 0.9850, 0.0772, 0.5053, 0.4504, 0.7641, 0.3931, + 0.6837, 0.0167, 0.7797, 0.4860, 0.5280, 0.6330, 0.0290, + 0.7362, 0.2203, 0.8746, 0.2172, 0.2995, 0.5333, 0.4682, + 0.7128, 0.9358, 0.8186, 0.8676, 0.0905, 0.0929, 0.0240, + 0.7021, 0.9674, 0.5404, 0.1689, 0.2290, 0.0757, 0.3316, + 0.7712, 0.3301, 0.1136, 0.3724, 0.9170, 0.0695, 0.6790, + 0.3427, 0.8334, 0.3770, 0.1598, 0.2051, 0.6493, 0.4032, + 0.0755, 0.0979, 0.6668, 0.6010, 0.9093, 0.6818, 0.7120, + 0.3803, 0.8846, 0.1716, 0.1166, 0.8292, 0.4714, 0.9191, + 0.3141, 0.6310, 0.2200, 0.4181, 0.1060, 0.3698, 0.3381, + 0.6072, 0.2206, 0.9606, 0.5453, 0.4985, 0.3088, 0.2218, + 0.1496, 0.2462, 0.4853, 0.1743, 0.6497, 0.3831, 0.8912, + 0.4132, 0.8368, 0.2873, 0.3494, 0.3461, 0.5114, 0.7938, + 0.3291, 0.8901, 0.2024, 0.8759, 0.8400, 0.5867, 0.1099, + 0.9241, 0.1273, 0.4545, 0.7869, 0.4669, 0.1542, 0.5581, + 0.1450, 0.9442, 0.5666, 0.1076, 0.6024, 0.3686, 0.3983, + 0.9459, 0.5736, 0.0977, 0.1824, 0.1527]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.0362, 0.9277, 0.2849, ..., 0.1066, 0.3769, 0.7796]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.114959955215454 seconds + +tensor(indices=tensor([[1070, 554, 3623, 914, 4972, 1707, 178, 2210, 3763, + 70, 3108, 4275, 3395, 2527, 3258, 3508, 2191, 4853, + 1150, 4924, 1328, 2639, 4830, 1513, 310, 1716, 3634, + 2960, 903, 801, 4771, 4278, 140, 3345, 768, 4608, + 305, 1533, 3780, 2595, 1446, 4790, 3293, 1560, 406, + 1129, 1499, 3991, 1706, 2568, 3997, 2538, 430, 1836, + 2858, 145, 3506, 3768, 1105, 4026, 2259, 699, 355, + 4025, 2982, 175, 2855, 2239, 4251, 3315, 905, 2553, + 4498, 683, 723, 2788, 194, 2843, 814, 3740, 1727, + 2143, 3554, 1085, 269, 1180, 3679, 3098, 921, 1365, + 1890, 2487, 2476, 3748, 3987, 3439, 383, 1969, 3698, + 1784, 2313, 1958, 4906, 3194, 4893, 1049, 1296, 1957, + 3481, 1223, 1839, 2719, 3925, 4723, 3676, 4495, 944, + 125, 4607, 533, 2416, 4637, 3951, 1195, 4090, 4426, + 1663, 2061, 4227, 4648, 4598, 630, 752, 3727, 4559, + 253, 4778, 704, 2491, 1843, 4385, 1095, 3765, 1341, + 926, 4833, 4151, 2324, 3321, 3162, 838, 397, 3738, + 538, 2249, 3693, 3325, 3283, 4357, 2507, 178, 340, + 3288, 248, 1785, 2067, 2560, 3904, 87, 3926, 3424, + 481, 498, 2205, 4509, 3094, 1184, 2988, 3277, 1078, + 2549, 1316, 3969, 4643, 303, 86, 4831, 9, 51, + 2028, 3206, 3762, 2416, 4705, 2876, 4088, 2083, 4572, + 1724, 536, 1908, 3190, 1783, 3346, 6, 4507, 3641, + 4226, 199, 4928, 2038, 2816, 94, 1358, 7, 3783, + 2758, 2471, 272, 267, 3430, 1307, 2035, 2316, 3025, + 1165, 1529, 2976, 3382, 371, 4211, 1222, 137, 3141, + 2371, 2431, 1435, 2566, 3880, 4724, 128, 3441, 3117, + 303, 4380, 1774, 4747, 4552, 1653, 4650], + [4647, 1673, 4544, 1987, 1582, 1625, 1616, 232, 3250, + 3078, 2559, 4039, 3326, 106, 4677, 148, 4084, 661, + 2357, 2941, 1719, 1455, 2334, 1737, 2897, 3189, 680, + 2129, 3326, 4541, 4120, 343, 2498, 4878, 1839, 4934, + 4182, 3000, 2850, 3907, 311, 4042, 2721, 1461, 4750, + 3937, 207, 255, 3947, 1306, 3685, 2352, 3416, 4802, + 1298, 35, 1621, 791, 2523, 2693, 483, 1098, 2240, + 3905, 3254, 952, 2184, 4983, 4399, 712, 4246, 1912, + 4900, 4079, 1579, 4724, 4536, 4809, 4940, 3020, 26, + 2268, 1817, 2988, 4977, 4373, 4236, 3578, 1732, 3459, + 3903, 1333, 3176, 3454, 1390, 1068, 3409, 3562, 1015, + 2260, 4675, 3812, 3092, 3175, 3937, 3553, 21, 2913, + 3713, 3824, 4559, 2676, 1770, 1420, 3167, 3121, 4571, + 3308, 4827, 695, 37, 597, 4797, 15, 1242, 4020, + 4519, 105, 3454, 1554, 893, 4934, 3374, 790, 4791, + 1540, 535, 3681, 3761, 4873, 2134, 4412, 4265, 3255, + 2532, 3497, 3968, 2990, 3296, 1950, 3264, 188, 1470, + 114, 41, 701, 607, 791, 2140, 3411, 1002, 1402, + 4259, 3518, 2727, 952, 1779, 3563, 4655, 998, 2072, + 1379, 3446, 935, 2956, 4136, 1050, 749, 2418, 2840, + 4882, 4422, 4301, 4965, 4249, 4117, 2221, 3873, 2795, + 3215, 629, 3174, 1302, 3051, 2132, 3671, 3173, 2221, + 3969, 3109, 1673, 1043, 1201, 894, 3392, 3615, 2775, + 2538, 1503, 4995, 4469, 1588, 1602, 351, 4230, 2692, + 4772, 3712, 2383, 923, 532, 3465, 2114, 3806, 410, + 2698, 3143, 596, 3270, 4475, 3045, 4107, 929, 1786, + 1684, 1446, 315, 1697, 2303, 1830, 4373, 972, 2607, + 2739, 2783, 2015, 4841, 1704, 2825, 4668]]), + values=tensor([0.8577, 0.0561, 0.0215, 0.4220, 0.5376, 0.8234, 0.2864, + 0.1892, 0.5188, 0.2133, 0.6609, 0.7259, 0.4829, 0.5298, + 0.4015, 0.1798, 0.5983, 0.4425, 0.1202, 0.7435, 0.2416, + 0.9206, 0.6011, 0.4707, 0.6840, 0.9609, 0.0603, 0.1903, + 0.1445, 0.8315, 0.2454, 0.8229, 0.5669, 0.6394, 0.5789, + 0.3988, 0.0930, 0.5869, 0.6666, 0.2623, 0.3474, 0.8785, + 0.4621, 0.7925, 0.2096, 0.8206, 0.5510, 0.6579, 0.8806, + 0.7843, 0.4799, 0.2851, 0.5456, 0.0101, 0.1915, 0.3870, + 0.3596, 0.8460, 0.7732, 0.2015, 0.1849, 0.1487, 0.3484, + 0.4353, 0.2210, 0.4792, 0.0416, 0.6315, 0.5813, 0.5249, + 0.7866, 0.5566, 0.0763, 0.2130, 0.4762, 0.5636, 0.3171, + 0.1750, 0.8376, 0.1553, 0.5325, 0.3541, 0.3722, 0.3419, + 0.1416, 0.0765, 0.4616, 0.1855, 0.7445, 0.7366, 0.7038, + 0.0422, 0.9483, 0.2099, 0.9756, 0.0612, 0.8093, 0.2532, + 0.1465, 0.6488, 0.1045, 0.0284, 0.5921, 0.7163, 0.5598, + 0.7401, 0.0315, 0.2238, 0.2897, 0.7555, 0.9286, 0.3605, + 0.3056, 0.9832, 0.1729, 0.0109, 0.4480, 0.1822, 0.8832, + 0.1951, 0.7406, 0.2004, 0.2937, 0.3182, 0.2204, 0.1512, + 0.2427, 0.7837, 0.7944, 0.9406, 0.6225, 0.8711, 0.5675, + 0.6512, 0.9850, 0.0772, 0.5053, 0.4504, 0.7641, 0.3931, + 0.6837, 0.0167, 0.7797, 0.4860, 0.5280, 0.6330, 0.0290, + 0.7362, 0.2203, 0.8746, 0.2172, 0.2995, 0.5333, 0.4682, + 0.7128, 0.9358, 0.8186, 0.8676, 0.0905, 0.0929, 0.0240, + 0.7021, 0.9674, 0.5404, 0.1689, 0.2290, 0.0757, 0.3316, + 0.7712, 0.3301, 0.1136, 0.3724, 0.9170, 0.0695, 0.6790, + 0.3427, 0.8334, 0.3770, 0.1598, 0.2051, 0.6493, 0.4032, + 0.0755, 0.0979, 0.6668, 0.6010, 0.9093, 0.6818, 0.7120, + 0.3803, 0.8846, 0.1716, 0.1166, 0.8292, 0.4714, 0.9191, + 0.3141, 0.6310, 0.2200, 0.4181, 0.1060, 0.3698, 0.3381, + 0.6072, 0.2206, 0.9606, 0.5453, 0.4985, 0.3088, 0.2218, + 0.1496, 0.2462, 0.4853, 0.1743, 0.6497, 0.3831, 0.8912, + 0.4132, 0.8368, 0.2873, 0.3494, 0.3461, 0.5114, 0.7938, + 0.3291, 0.8901, 0.2024, 0.8759, 0.8400, 0.5867, 0.1099, + 0.9241, 0.1273, 0.4545, 0.7869, 0.4669, 0.1542, 0.5581, + 0.1450, 0.9442, 0.5666, 0.1076, 0.6024, 0.3686, 0.3983, + 0.9459, 0.5736, 0.0977, 0.1824, 0.1527]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.0362, 0.9277, 0.2849, ..., 0.1066, 0.3769, 0.7796]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.114959955215454 seconds + +[19.01, 18.62, 18.75, 18.53, 18.58, 23.31, 19.17, 18.5, 19.28, 18.61] +[54.08] +13.568629264831543 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 647175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.114959955215454, 'TIME_S_1KI': 0.015629404651316035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.7914706420898, 'W': 54.08} +[19.01, 18.62, 18.75, 18.53, 18.58, 23.31, 19.17, 18.5, 19.28, 18.61, 19.23, 22.35, 19.18, 18.71, 19.45, 19.51, 18.94, 18.5, 18.62, 18.59] +347.72 +17.386000000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 647175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.114959955215454, 'TIME_S_1KI': 0.015629404651316035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.7914706420898, 'W': 54.08, 'J_1KI': 1.1338377882985127, 'W_1KI': 0.08356317842932746, 'W_D': 36.693999999999996, 'J_D': 497.8872822437286, 'W_D_1KI': 0.05669872909182214, 'J_D_1KI': 8.760957869482311e-05} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..97e24d5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 206108, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.516620397567749, "TIME_S_1KI": 0.051024804459641304, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 755.539121131897, "W": 54.27, "J_1KI": 3.6657437903036123, "W_1KI": 0.26330855667902264, "W_D": 37.44975000000001, "J_D": 521.3700239839555, "W_D_1KI": 0.18169964290566115, "J_D_1KI": 0.0008815749165760725} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..ad34e24 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01703357696533203} + +tensor(indices=tensor([[ 475, 3738, 3288, ..., 4069, 887, 1359], + [1010, 4738, 4553, ..., 4388, 225, 2926]]), + values=tensor([0.2526, 0.0196, 0.5767, ..., 0.5177, 0.2605, 0.1150]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.0360, 0.3764, 0.3903, ..., 0.6391, 0.0436, 0.4518]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.01703357696533203 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '61642', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.140289306640625} + +tensor(indices=tensor([[ 167, 1713, 4757, ..., 62, 1670, 2419], + [ 610, 1517, 4376, ..., 3286, 2079, 2105]]), + values=tensor([0.4242, 0.0107, 0.6695, ..., 0.4660, 0.2421, 0.5912]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.7928, 0.1057, 0.9545, ..., 0.4896, 0.0992, 0.9309]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.140289306640625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '206108', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.516620397567749} + +tensor(indices=tensor([[2695, 3871, 432, ..., 1583, 3020, 2131], + [2211, 835, 658, ..., 3988, 3418, 3891]]), + values=tensor([0.1995, 0.5778, 0.7653, ..., 0.2548, 0.7811, 0.9660]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4693, 0.9791, 0.1618, ..., 0.4783, 0.1670, 0.5419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.516620397567749 seconds + +tensor(indices=tensor([[2695, 3871, 432, ..., 1583, 3020, 2131], + [2211, 835, 658, ..., 3988, 3418, 3891]]), + values=tensor([0.1995, 0.5778, 0.7653, ..., 0.2548, 0.7811, 0.9660]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4693, 0.9791, 0.1618, ..., 0.4783, 0.1670, 0.5419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.516620397567749 seconds + +[19.2, 18.48, 18.65, 18.58, 19.01, 18.56, 18.65, 18.53, 18.98, 18.52] +[54.27] +13.921855926513672 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 206108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.516620397567749, 'TIME_S_1KI': 0.051024804459641304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.539121131897, 'W': 54.27} +[19.2, 18.48, 18.65, 18.58, 19.01, 18.56, 18.65, 18.53, 18.98, 18.52, 19.01, 18.42, 18.89, 18.41, 18.81, 18.5, 19.05, 18.68, 18.47, 18.74] +336.405 +16.820249999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 206108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.516620397567749, 'TIME_S_1KI': 0.051024804459641304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.539121131897, 'W': 54.27, 'J_1KI': 3.6657437903036123, 'W_1KI': 0.26330855667902264, 'W_D': 37.44975000000001, 'J_D': 521.3700239839555, 'W_D_1KI': 0.18169964290566115, 'J_D_1KI': 0.0008815749165760725} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json index c7b5a2b..ffb2e2a 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33105, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.270436525344849, "TIME_S_1KI": 0.31023822761953934, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1243.494291753769, "W": 88.28, "J_1KI": 37.56212933858236, "W_1KI": 2.6666666666666665, "W_D": 71.58725, "J_D": 1008.3635788100362, "W_D_1KI": 2.1624301465035494, "J_D_1KI": 0.0653203487842788} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 30731, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.384181499481201, "TIME_S_1KI": 0.3379057466233185, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1280.0619927191735, "W": 90.17, "J_1KI": 41.65376957206643, "W_1KI": 2.934170707103576, "W_D": 73.17975, "J_D": 1038.866769565165, "W_D_1KI": 2.3813006410465003, "J_D_1KI": 0.07748855035783087} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output index 31fde2c..3ceb939 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.047393798828125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.046231746673583984} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 25, ..., 999973, - 999987, 1000000]), - col_indices=tensor([ 2538, 10020, 11588, ..., 84720, 92719, 95287]), - values=tensor([0.8172, 0.5815, 0.2513, ..., 0.2819, 0.8178, 0.1271]), +tensor(crow_indices=tensor([ 0, 12, 30, ..., 999983, + 999990, 1000000]), + col_indices=tensor([ 1195, 1262, 6291, ..., 68939, 88156, 98049]), + values=tensor([0.1288, 0.6989, 0.0067, ..., 0.2774, 0.2181, 0.0994]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9323, 0.3660, 0.2073, ..., 0.7127, 0.8566, 0.0523]) +tensor([0.9227, 0.2479, 0.1465, ..., 0.1264, 0.7704, 0.2389]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,39 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.047393798828125 seconds +Time: 0.046231746673583984 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22154', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.0265889167785645} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22711', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.759751558303833} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 20, ..., 999976, - 999985, 1000000]), - col_indices=tensor([10428, 14843, 15503, ..., 86013, 91025, 96391]), - values=tensor([0.0148, 0.3731, 0.6426, ..., 0.4125, 0.5086, 0.3848]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0982, 0.0147, 0.0440, ..., 0.9267, 0.0489, 0.6248]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 7.0265889167785645 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33105', '-ss', '100000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.270436525344849} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 18, ..., 999974, +tensor(crow_indices=tensor([ 0, 13, 26, ..., 999975, 999988, 1000000]), - col_indices=tensor([ 697, 33076, 59577, ..., 88840, 91058, 94574]), - values=tensor([0.8969, 0.3012, 0.5025, ..., 0.5812, 0.6517, 0.5598]), + col_indices=tensor([ 54, 3894, 8242, ..., 85217, 87443, 91730]), + values=tensor([0.7440, 0.9626, 0.1209, ..., 0.0767, 0.1943, 0.4883]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0089, 0.5150, 0.8606, ..., 0.9603, 0.9290, 0.1786]) +tensor([0.9335, 0.7866, 0.9277, ..., 0.2681, 0.0485, 0.1026]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.270436525344849 seconds +Time: 7.759751558303833 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '30731', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.384181499481201} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 18, ..., 999974, - 999988, 1000000]), - col_indices=tensor([ 697, 33076, 59577, ..., 88840, 91058, 94574]), - values=tensor([0.8969, 0.3012, 0.5025, ..., 0.5812, 0.6517, 0.5598]), +tensor(crow_indices=tensor([ 0, 6, 15, ..., 999978, + 999990, 1000000]), + col_indices=tensor([ 5042, 44146, 46975, ..., 74653, 86841, 94052]), + values=tensor([0.0034, 0.2057, 0.9151, ..., 0.0568, 0.4580, 0.2811]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0089, 0.5150, 0.8606, ..., 0.9603, 0.9290, 0.1786]) +tensor([0.0913, 0.1633, 0.2108, ..., 0.2844, 0.2437, 0.6726]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +56,30 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.270436525344849 seconds +Time: 10.384181499481201 seconds -[18.44, 17.49, 17.8, 17.92, 17.7, 20.94, 18.06, 17.75, 21.44, 18.07] -[88.28] -14.085798501968384 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33105, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.270436525344849, 'TIME_S_1KI': 0.31023822761953934, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.494291753769, 'W': 88.28} -[18.44, 17.49, 17.8, 17.92, 17.7, 20.94, 18.06, 17.75, 21.44, 18.07, 18.18, 18.15, 17.95, 18.02, 21.88, 18.26, 18.15, 17.92, 17.92, 18.32] -333.855 -16.69275 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33105, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.270436525344849, 'TIME_S_1KI': 0.31023822761953934, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.494291753769, 'W': 88.28, 'J_1KI': 37.56212933858236, 'W_1KI': 2.6666666666666665, 'W_D': 71.58725, 'J_D': 1008.3635788100362, 'W_D_1KI': 2.1624301465035494, 'J_D_1KI': 0.0653203487842788} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 15, ..., 999978, + 999990, 1000000]), + col_indices=tensor([ 5042, 44146, 46975, ..., 74653, 86841, 94052]), + values=tensor([0.0034, 0.2057, 0.9151, ..., 0.0568, 0.4580, 0.2811]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0913, 0.1633, 0.2108, ..., 0.2844, 0.2437, 0.6726]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.384181499481201 seconds + +[19.26, 18.61, 18.82, 18.38, 18.86, 18.56, 18.72, 18.5, 18.64, 18.52] +[90.17] +14.196096181869507 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 30731, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.384181499481201, 'TIME_S_1KI': 0.3379057466233185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1280.0619927191735, 'W': 90.17} +[19.26, 18.61, 18.82, 18.38, 18.86, 18.56, 18.72, 18.5, 18.64, 18.52, 20.22, 18.46, 18.71, 18.42, 18.71, 18.56, 18.79, 20.64, 20.22, 18.41] +339.805 +16.99025 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 30731, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.384181499481201, 'TIME_S_1KI': 0.3379057466233185, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1280.0619927191735, 'W': 90.17, 'J_1KI': 41.65376957206643, 'W_1KI': 2.934170707103576, 'W_D': 73.17975, 'J_D': 1038.866769565165, 'W_D_1KI': 2.3813006410465003, 'J_D_1KI': 0.07748855035783087} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json index 8d42109..d019bd1 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2748, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.477163553237915, "TIME_S_1KI": 3.8126504924446563, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1424.6626942634582, "W": 81.1, "J_1KI": 518.4362060638495, "W_1KI": 29.512372634643373, "W_D": 64.67649999999999, "J_D": 1136.1553236193656, "W_D_1KI": 23.535844250363898, "J_D_1KI": 8.564717703916994} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2634, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.097931861877441, "TIME_S_1KI": 3.83368711536729, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1385.399602217674, "W": 83.66, "J_1KI": 525.9679583210608, "W_1KI": 31.761579347000758, "W_D": 66.5715, "J_D": 1102.4160843776465, "W_D_1KI": 25.273917995444194, "J_D_1KI": 9.595261197966664} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output index 280d994..d785c4e 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.4098339080810547} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3985879421234131} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 105, 217, ..., 9999774, - 9999878, 10000000]), - col_indices=tensor([ 2925, 3045, 3251, ..., 98848, 99298, 99703]), - values=tensor([0.4813, 0.4380, 0.0490, ..., 0.5758, 0.0326, 0.9259]), +tensor(crow_indices=tensor([ 0, 87, 172, ..., 9999773, + 9999880, 10000000]), + col_indices=tensor([ 1874, 1886, 2437, ..., 99368, 99870, 99904]), + values=tensor([0.7346, 0.5677, 0.7870, ..., 0.6229, 0.4001, 0.9653]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.0648, 0.1204, 0.6207, ..., 0.1724, 0.6764, 0.4459]) +tensor([0.5593, 0.9092, 0.2146, ..., 0.7858, 0.8030, 0.2816]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,39 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 0.4098339080810547 seconds +Time: 0.3985879421234131 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2562', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.787212610244751} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2634', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.097931861877441} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 90, 201, ..., 9999780, - 9999895, 10000000]), - col_indices=tensor([ 1242, 4056, 4707, ..., 96589, 97728, 98727]), - values=tensor([0.8897, 0.2716, 0.4760, ..., 0.6356, 0.3047, 0.7796]), - size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4134, 0.7359, 0.5031, ..., 0.9568, 0.9528, 0.4063]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 10000000 -Density: 0.001 -Time: 9.787212610244751 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2748', '-ss', '100000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.477163553237915} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 82, 171, ..., 9999801, +tensor(crow_indices=tensor([ 0, 97, 205, ..., 9999773, 9999897, 10000000]), - col_indices=tensor([ 1661, 2279, 2856, ..., 99449, 99691, 99739]), - values=tensor([0.1663, 0.0376, 0.1009, ..., 0.7118, 0.9261, 0.1836]), + col_indices=tensor([ 2815, 3659, 3819, ..., 99680, 99902, 99908]), + values=tensor([0.9090, 0.6219, 0.8306, ..., 0.7078, 0.9813, 0.3499]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7724, 0.0559, 0.5235, ..., 0.7708, 0.2517, 0.0642]) +tensor([0.4504, 0.4366, 0.8174, ..., 0.8474, 0.0095, 0.3536]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.477163553237915 seconds +Time: 10.097931861877441 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 82, 171, ..., 9999801, +tensor(crow_indices=tensor([ 0, 97, 205, ..., 9999773, 9999897, 10000000]), - col_indices=tensor([ 1661, 2279, 2856, ..., 99449, 99691, 99739]), - values=tensor([0.1663, 0.0376, 0.1009, ..., 0.7118, 0.9261, 0.1836]), + col_indices=tensor([ 2815, 3659, 3819, ..., 99680, 99902, 99908]), + values=tensor([0.9090, 0.6219, 0.8306, ..., 0.7078, 0.9813, 0.3499]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7724, 0.0559, 0.5235, ..., 0.7708, 0.2517, 0.0642]) +tensor([0.4504, 0.4366, 0.8174, ..., 0.8474, 0.0095, 0.3536]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.477163553237915 seconds +Time: 10.097931861877441 seconds -[18.37, 17.8, 17.96, 17.86, 22.53, 18.03, 17.75, 18.1, 18.23, 17.82] -[81.1] -17.56674098968506 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.477163553237915, 'TIME_S_1KI': 3.8126504924446563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.6626942634582, 'W': 81.1} -[18.37, 17.8, 17.96, 17.86, 22.53, 18.03, 17.75, 18.1, 18.23, 17.82, 18.37, 17.83, 17.94, 18.39, 17.8, 17.92, 17.99, 17.94, 18.19, 17.86] -328.47 -16.4235 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.477163553237915, 'TIME_S_1KI': 3.8126504924446563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.6626942634582, 'W': 81.1, 'J_1KI': 518.4362060638495, 'W_1KI': 29.512372634643373, 'W_D': 64.67649999999999, 'J_D': 1136.1553236193656, 'W_D_1KI': 23.535844250363898, 'J_D_1KI': 8.564717703916994} +[19.1, 18.42, 18.45, 18.83, 18.64, 19.33, 22.53, 19.65, 18.77, 18.43] +[83.66] +16.55988049507141 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2634, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.097931861877441, 'TIME_S_1KI': 3.83368711536729, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1385.399602217674, 'W': 83.66} +[19.1, 18.42, 18.45, 18.83, 18.64, 19.33, 22.53, 19.65, 18.77, 18.43, 18.95, 18.64, 18.76, 18.76, 18.59, 18.84, 18.6, 18.55, 18.85, 18.64] +341.77 +17.0885 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2634, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.097931861877441, 'TIME_S_1KI': 3.83368711536729, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1385.399602217674, 'W': 83.66, 'J_1KI': 525.9679583210608, 'W_1KI': 31.761579347000758, 'W_D': 66.5715, 'J_D': 1102.4160843776465, 'W_D_1KI': 25.273917995444194, 'J_D_1KI': 9.595261197966664} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..2bf9ffc --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 305, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.454807043075562, "TIME_S_1KI": 34.278055878936264, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2894.4997478580476, "W": 65.14, "J_1KI": 9490.163107731303, "W_1KI": 213.5737704918033, "W_D": 47.742999999999995, "J_D": 2121.4630252070424, "W_D_1KI": 156.53442622950817, "J_D_1KI": 513.227626981994} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..535d52d --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.442110300064087} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 998, 1943, ..., + 99998007, 99998988, 100000000]), + col_indices=tensor([ 14, 86, 254, ..., 99837, 99963, 99972]), + values=tensor([0.4084, 0.5337, 0.1918, ..., 0.2591, 0.7499, 0.7582]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.0487, 0.8713, 0.6713, ..., 0.4052, 0.6741, 0.8037]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 3.442110300064087 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '305', '-ss', '100000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.454807043075562} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 936, 2004, ..., + 99997969, 99998972, 100000000]), + col_indices=tensor([ 114, 337, 399, ..., 99809, 99864, 99916]), + values=tensor([0.1247, 0.0736, 0.4797, ..., 0.5713, 0.1398, 0.9744]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.2448, 0.2958, 0.9514, ..., 0.8807, 0.6919, 0.0958]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.454807043075562 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 936, 2004, ..., + 99997969, 99998972, 100000000]), + col_indices=tensor([ 114, 337, 399, ..., 99809, 99864, 99916]), + values=tensor([0.1247, 0.0736, 0.4797, ..., 0.5713, 0.1398, 0.9744]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.2448, 0.2958, 0.9514, ..., 0.8807, 0.6919, 0.0958]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.454807043075562 seconds + +[18.95, 18.95, 18.47, 18.47, 18.56, 18.56, 22.56, 20.46, 19.09, 19.2] +[65.14] +44.43505907058716 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.454807043075562, 'TIME_S_1KI': 34.278055878936264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2894.4997478580476, 'W': 65.14} +[18.95, 18.95, 18.47, 18.47, 18.56, 18.56, 22.56, 20.46, 19.09, 19.2, 18.7, 18.78, 23.07, 19.93, 18.61, 18.97, 18.63, 18.45, 18.49, 18.93] +347.94000000000005 +17.397000000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.454807043075562, 'TIME_S_1KI': 34.278055878936264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2894.4997478580476, 'W': 65.14, 'J_1KI': 9490.163107731303, 'W_1KI': 213.5737704918033, 'W_D': 47.742999999999995, 'J_D': 2121.4630252070424, 'W_D_1KI': 156.53442622950817, 'J_D_1KI': 513.227626981994} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json index d39bedd..62c8a18 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 65044, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.857811450958252, "TIME_S_1KI": 0.1669302541503944, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1160.4457891416548, "W": 82.65999999999998, "J_1KI": 17.840935199890147, "W_1KI": 1.2708320521493142, "W_D": 66.37149999999998, "J_D": 931.7750749336478, "W_D_1KI": 1.0204092614230365, "J_D_1KI": 0.015687984463179334} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 63380, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.448274612426758, "TIME_S_1KI": 0.16485128766845625, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1245.8409766912462, "W": 89.9, "J_1KI": 19.656689439748284, "W_1KI": 1.418428526349006, "W_D": 73.0515, "J_D": 1012.353193645835, "W_D_1KI": 1.1525954559798044, "J_D_1KI": 0.018185475796462677} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output index a139e8c..634073e 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03411436080932617} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03290224075317383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99997, 99998, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99998, 100000]), - col_indices=tensor([61814, 31861, 93735, ..., 37976, 26709, 88923]), - values=tensor([0.4964, 0.9275, 0.0463, ..., 0.6388, 0.5613, 0.1901]), + col_indices=tensor([54942, 27722, 96194, ..., 71284, 7275, 46774]), + values=tensor([0.2018, 0.2814, 0.6673, ..., 0.3211, 0.5324, 0.9423]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9466, 0.9805, 0.4146, ..., 0.4981, 0.9805, 0.0095]) +tensor([0.2274, 0.1599, 0.8719, ..., 0.7214, 0.9707, 0.2763]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.03411436080932617 seconds +Time: 0.03290224075317383 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '30778', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.968464136123657} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31912', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.286698579788208} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 99993, 99997, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 100000, 100000]), - col_indices=tensor([86302, 87189, 44148, ..., 4090, 44893, 91495]), - values=tensor([0.5947, 0.5243, 0.4325, ..., 0.8552, 0.8488, 0.7980]), + col_indices=tensor([93916, 16703, 19935, ..., 93618, 1127, 15891]), + values=tensor([0.6597, 0.5386, 0.9885, ..., 0.0840, 0.8294, 0.9651]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2800, 0.7786, 0.6115, ..., 0.5946, 0.9897, 0.5537]) +tensor([0.8297, 0.7040, 0.7892, ..., 0.5694, 0.6960, 0.4166]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 4.968464136123657 seconds +Time: 5.286698579788208 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '65044', '-ss', '100000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.857811450958252} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '63380', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.448274612426758} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99995, 99998, 100000]), - col_indices=tensor([27172, 43192, 23755, ..., 52370, 88374, 3897]), - values=tensor([0.0211, 0.7600, 0.3262, ..., 0.1220, 0.7210, 0.9662]), + col_indices=tensor([52857, 69814, 79622, ..., 57272, 6879, 75944]), + values=tensor([0.8320, 0.0214, 0.5046, ..., 0.3100, 0.5363, 0.2559]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8254, 0.5550, 0.3634, ..., 0.5298, 0.8710, 0.0274]) +tensor([0.4864, 0.2626, 0.9920, ..., 0.6643, 0.2369, 0.7561]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.857811450958252 seconds +Time: 10.448274612426758 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99995, 99998, 100000]), - col_indices=tensor([27172, 43192, 23755, ..., 52370, 88374, 3897]), - values=tensor([0.0211, 0.7600, 0.3262, ..., 0.1220, 0.7210, 0.9662]), + col_indices=tensor([52857, 69814, 79622, ..., 57272, 6879, 75944]), + values=tensor([0.8320, 0.0214, 0.5046, ..., 0.3100, 0.5363, 0.2559]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8254, 0.5550, 0.3634, ..., 0.5298, 0.8710, 0.0274]) +tensor([0.4864, 0.2626, 0.9920, ..., 0.6643, 0.2369, 0.7561]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.857811450958252 seconds +Time: 10.448274612426758 seconds -[18.23, 17.9, 18.0, 17.99, 18.15, 18.02, 18.22, 19.12, 18.05, 18.53] -[82.66] -14.038782835006714 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65044, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.857811450958252, 'TIME_S_1KI': 0.1669302541503944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.4457891416548, 'W': 82.65999999999998} -[18.23, 17.9, 18.0, 17.99, 18.15, 18.02, 18.22, 19.12, 18.05, 18.53, 18.28, 17.9, 18.17, 17.79, 18.21, 17.83, 17.84, 17.79, 18.22, 18.1] -325.77 -16.2885 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65044, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.857811450958252, 'TIME_S_1KI': 0.1669302541503944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.4457891416548, 'W': 82.65999999999998, 'J_1KI': 17.840935199890147, 'W_1KI': 1.2708320521493142, 'W_D': 66.37149999999998, 'J_D': 931.7750749336478, 'W_D_1KI': 1.0204092614230365, 'J_D_1KI': 0.015687984463179334} +[19.19, 18.69, 18.45, 18.47, 18.61, 18.61, 18.61, 19.04, 18.85, 18.85] +[89.9] +13.858075380325317 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 63380, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.448274612426758, 'TIME_S_1KI': 0.16485128766845625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.8409766912462, 'W': 89.9} +[19.19, 18.69, 18.45, 18.47, 18.61, 18.61, 18.61, 19.04, 18.85, 18.85, 19.75, 18.58, 18.57, 18.93, 18.64, 18.51, 18.65, 18.72, 18.83, 18.63] +336.96999999999997 +16.848499999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 63380, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.448274612426758, 'TIME_S_1KI': 0.16485128766845625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.8409766912462, 'W': 89.9, 'J_1KI': 19.656689439748284, 'W_1KI': 1.418428526349006, 'W_D': 73.0515, 'J_D': 1012.353193645835, 'W_D_1KI': 1.1525954559798044, 'J_D_1KI': 0.018185475796462677} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json index 4c80d3b..fe215d7 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 45510, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.42077898979187, "TIME_S_1KI": 0.22897778487787013, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1214.5695349788666, "W": 87.39, "J_1KI": 26.687970445591443, "W_1KI": 1.920237310481213, "W_D": 71.04925, "J_D": 987.4614318926334, "W_D_1KI": 1.561178861788618, "J_D_1KI": 0.03430408397689778} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 44511, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.551278114318848, "TIME_S_1KI": 0.2370487770285738, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1257.1718712615966, "W": 90.27, "J_1KI": 28.244071606155703, "W_1KI": 2.0280380130754194, "W_D": 73.12225, "J_D": 1018.3586558475494, "W_D_1KI": 1.6427905461571295, "J_D_1KI": 0.03690751827991125} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output index d883b1c..13147a5 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.039800167083740234} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.03855562210083008} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 499993, 499997, +tensor(crow_indices=tensor([ 0, 5, 15, ..., 499991, 499992, 500000]), - col_indices=tensor([ 4828, 48889, 52503, ..., 31911, 36084, 76746]), - values=tensor([0.4793, 0.0828, 0.1169, ..., 0.5530, 0.3033, 0.0718]), + col_indices=tensor([ 4960, 13928, 21214, ..., 73878, 78273, 81286]), + values=tensor([0.1301, 0.5462, 0.1608, ..., 0.3095, 0.2509, 0.1525]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.5059, 0.6298, 0.1664, ..., 0.1879, 0.5431, 0.8952]) +tensor([0.0485, 0.4431, 0.3633, ..., 0.2811, 0.8933, 0.3293]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.039800167083740234 seconds +Time: 0.03855562210083008 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '26381', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.086489200592041} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27233', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.424152374267578} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 499989, 499994, +tensor(crow_indices=tensor([ 0, 8, 17, ..., 499990, 499995, 500000]), - col_indices=tensor([14519, 22983, 80951, ..., 84187, 95762, 97051]), - values=tensor([0.1542, 0.8524, 0.3039, ..., 0.4189, 0.6409, 0.4295]), + col_indices=tensor([ 37, 25573, 30814, ..., 36843, 53237, 94063]), + values=tensor([0.0210, 0.1202, 0.2131, ..., 0.0650, 0.9267, 0.3226]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.0352, 0.2725, 0.4170, ..., 0.1491, 0.4370, 0.8032]) +tensor([0.5660, 0.0682, 0.3989, ..., 0.3041, 0.2596, 0.1166]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 6.086489200592041 seconds +Time: 6.424152374267578 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '45510', '-ss', '100000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.42077898979187} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44511', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.551278114318848} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 19, ..., 499990, 499995, +tensor(crow_indices=tensor([ 0, 5, 8, ..., 499992, 499997, 500000]), - col_indices=tensor([14786, 31808, 59751, ..., 39791, 89593, 95677]), - values=tensor([0.6756, 0.3891, 0.0863, ..., 0.6881, 0.4209, 0.2818]), + col_indices=tensor([ 63, 39760, 47695, ..., 39492, 46376, 76986]), + values=tensor([0.5378, 0.5224, 0.7674, ..., 0.6916, 0.3031, 0.4245]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.4948, 0.2234, 0.2049, ..., 0.0447, 0.7948, 0.3022]) +tensor([0.4779, 0.0684, 0.1618, ..., 0.9444, 0.2606, 0.1176]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.42077898979187 seconds +Time: 10.551278114318848 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 19, ..., 499990, 499995, +tensor(crow_indices=tensor([ 0, 5, 8, ..., 499992, 499997, 500000]), - col_indices=tensor([14786, 31808, 59751, ..., 39791, 89593, 95677]), - values=tensor([0.6756, 0.3891, 0.0863, ..., 0.6881, 0.4209, 0.2818]), + col_indices=tensor([ 63, 39760, 47695, ..., 39492, 46376, 76986]), + values=tensor([0.5378, 0.5224, 0.7674, ..., 0.6916, 0.3031, 0.4245]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.4948, 0.2234, 0.2049, ..., 0.0447, 0.7948, 0.3022]) +tensor([0.4779, 0.0684, 0.1618, ..., 0.9444, 0.2606, 0.1176]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.42077898979187 seconds +Time: 10.551278114318848 seconds -[19.52, 19.27, 18.28, 17.93, 17.92, 18.18, 17.97, 18.17, 18.27, 18.17] -[87.39] -13.898266792297363 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.42077898979187, 'TIME_S_1KI': 0.22897778487787013, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1214.5695349788666, 'W': 87.39} -[19.52, 19.27, 18.28, 17.93, 17.92, 18.18, 17.97, 18.17, 18.27, 18.17, 18.18, 17.95, 17.9, 18.1, 18.19, 18.03, 17.88, 17.96, 18.05, 17.66] -326.815 -16.34075 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.42077898979187, 'TIME_S_1KI': 0.22897778487787013, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1214.5695349788666, 'W': 87.39, 'J_1KI': 26.687970445591443, 'W_1KI': 1.920237310481213, 'W_D': 71.04925, 'J_D': 987.4614318926334, 'W_D_1KI': 1.561178861788618, 'J_D_1KI': 0.03430408397689778} +[19.01, 18.3, 18.41, 18.76, 20.61, 20.75, 19.07, 18.89, 18.69, 18.25] +[90.27] +13.926795959472656 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 44511, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.551278114318848, 'TIME_S_1KI': 0.2370487770285738, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.1718712615966, 'W': 90.27} +[19.01, 18.3, 18.41, 18.76, 20.61, 20.75, 19.07, 18.89, 18.69, 18.25, 20.63, 20.31, 18.94, 19.02, 18.94, 18.38, 18.66, 18.47, 18.45, 18.72] +342.95500000000004 +17.147750000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 44511, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.551278114318848, 'TIME_S_1KI': 0.2370487770285738, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.1718712615966, 'W': 90.27, 'J_1KI': 28.244071606155703, 'W_1KI': 2.0280380130754194, 'W_D': 73.12225, 'J_D': 1018.3586558475494, 'W_D_1KI': 1.6427905461571295, 'J_D_1KI': 0.03690751827991125} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json index df907c2..d8ae514 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 251263, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.831458568572998, "TIME_S_1KI": 0.04310805239359952, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1035.0438996100424, "W": 74.32999999999998, "J_1KI": 4.119364568639403, "W_1KI": 0.2958254896264073, "W_D": 58.06174999999998, "J_D": 808.5088139134047, "W_D_1KI": 0.2310795859318721, "J_D_1KI": 0.0009196721599752933} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 253116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.375891208648682, "TIME_S_1KI": 0.04099263266110669, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1177.9051132917405, "W": 82.7, "J_1KI": 4.653617761389009, "W_1KI": 0.32672766636640904, "W_D": 65.55350000000001, "J_D": 933.6856450322869, "W_D_1KI": 0.25898599851451515, "J_D_1KI": 0.0010231909421550402} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output index bb6d734..55827ff 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.019912242889404297} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02077317237854004} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9999, 9999, 10000]), - col_indices=tensor([1836, 4826, 4334, ..., 9720, 1658, 3253]), - values=tensor([0.6220, 0.1290, 0.9015, ..., 0.3260, 0.3650, 0.7979]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 10000, 10000, 10000]), + col_indices=tensor([7897, 2161, 4159, ..., 8292, 1674, 8120]), + values=tensor([0.6051, 0.7025, 0.1799, ..., 0.7244, 0.2043, 0.9406]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5720, 0.7293, 0.6280, ..., 0.0388, 0.6575, 0.1842]) +tensor([0.0769, 0.9771, 0.4968, ..., 0.7484, 0.1210, 0.2610]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.019912242889404297 seconds +Time: 0.02077317237854004 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52731', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.203561544418335} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50545', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.0967555046081543} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 5, ..., 9999, 9999, 10000]), - col_indices=tensor([1066, 3027, 5018, ..., 516, 4404, 8191]), - values=tensor([0.9408, 0.5840, 0.0232, ..., 0.8231, 0.8506, 0.7636]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9998, 10000]), + col_indices=tensor([2369, 543, 2531, ..., 9604, 4226, 7578]), + values=tensor([0.6904, 0.7524, 0.6457, ..., 0.1985, 0.4076, 0.1702]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2181, 0.5672, 0.6634, ..., 0.2110, 0.3174, 0.6218]) +tensor([0.3802, 0.7278, 0.1089, ..., 0.9979, 0.0210, 0.2529]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 2.203561544418335 seconds +Time: 2.0967555046081543 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '251263', '-ss', '10000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.831458568572998} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '253116', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.375891208648682} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), - col_indices=tensor([6180, 2035, 3071, ..., 490, 6496, 2315]), - values=tensor([0.2073, 0.5439, 0.2551, ..., 0.7953, 0.4550, 0.0057]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9998, 10000, 10000]), + col_indices=tensor([3651, 7635, 677, ..., 4204, 2003, 9081]), + values=tensor([0.0091, 0.3889, 0.0620, ..., 0.0091, 0.2713, 0.4199]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4967, 0.8117, 0.4603, ..., 0.8210, 0.9832, 0.0501]) +tensor([0.5126, 0.2678, 0.0619, ..., 0.9520, 0.1902, 0.4690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.831458568572998 seconds +Time: 10.375891208648682 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), - col_indices=tensor([6180, 2035, 3071, ..., 490, 6496, 2315]), - values=tensor([0.2073, 0.5439, 0.2551, ..., 0.7953, 0.4550, 0.0057]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9998, 10000, 10000]), + col_indices=tensor([3651, 7635, 677, ..., 4204, 2003, 9081]), + values=tensor([0.0091, 0.3889, 0.0620, ..., 0.0091, 0.2713, 0.4199]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4967, 0.8117, 0.4603, ..., 0.8210, 0.9832, 0.0501]) +tensor([0.5126, 0.2678, 0.0619, ..., 0.9520, 0.1902, 0.4690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.831458568572998 seconds +Time: 10.375891208648682 seconds -[18.25, 18.21, 18.06, 17.61, 18.2, 17.92, 18.19, 17.85, 18.22, 17.78] -[74.33] -13.924981832504272 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 251263, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.831458568572998, 'TIME_S_1KI': 0.04310805239359952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1035.0438996100424, 'W': 74.32999999999998} -[18.25, 18.21, 18.06, 17.61, 18.2, 17.92, 18.19, 17.85, 18.22, 17.78, 19.64, 17.79, 18.1, 18.15, 18.23, 17.78, 18.02, 18.14, 18.11, 17.9] -325.365 -16.268250000000002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 251263, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.831458568572998, 'TIME_S_1KI': 0.04310805239359952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1035.0438996100424, 'W': 74.32999999999998, 'J_1KI': 4.119364568639403, 'W_1KI': 0.2958254896264073, 'W_D': 58.06174999999998, 'J_D': 808.5088139134047, 'W_D_1KI': 0.2310795859318721, 'J_D_1KI': 0.0009196721599752933} +[19.24, 18.44, 19.49, 18.43, 18.88, 18.47, 18.56, 18.63, 18.72, 20.67] +[82.7] +14.243108987808228 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.375891208648682, 'TIME_S_1KI': 0.04099263266110669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1177.9051132917405, 'W': 82.7} +[19.24, 18.44, 19.49, 18.43, 18.88, 18.47, 18.56, 18.63, 18.72, 20.67, 18.97, 18.57, 18.9, 18.61, 18.67, 22.43, 19.29, 18.63, 19.39, 18.76] +342.92999999999995 +17.146499999999996 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.375891208648682, 'TIME_S_1KI': 0.04099263266110669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1177.9051132917405, 'W': 82.7, 'J_1KI': 4.653617761389009, 'W_1KI': 0.32672766636640904, 'W_D': 65.55350000000001, 'J_D': 933.6856450322869, 'W_D_1KI': 0.25898599851451515, 'J_D_1KI': 0.0010231909421550402} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json index 610ea48..76a952a 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 195195, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.530660390853882, "TIME_S_1KI": 0.05394943718258092, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1115.5897302937508, "W": 79.81, "J_1KI": 5.7152577181472415, "W_1KI": 0.40887317810394735, "W_D": 63.323, "J_D": 885.1332977244854, "W_D_1KI": 0.3244089244089244, "J_D_1KI": 0.0016619735362531027} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 192145, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.452706813812256, "TIME_S_1KI": 0.05440009791465954, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1239.9078391647338, "W": 88.07, "J_1KI": 6.452979984723692, "W_1KI": 0.45835176559369223, "W_D": 71.1265, "J_D": 1001.3660147876739, "W_D_1KI": 0.37017096463608207, "J_D_1KI": 0.00192651885105562} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output index d29559f..4a3271a 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02070331573486328} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.021983623504638672} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 12, 18, ..., 99984, 99988, +tensor(crow_indices=tensor([ 0, 7, 15, ..., 99984, 99994, 100000]), - col_indices=tensor([ 729, 732, 881, ..., 6002, 8211, 9107]), - values=tensor([0.1473, 0.0535, 0.1985, ..., 0.7529, 0.2592, 0.5040]), + col_indices=tensor([ 435, 4137, 4610, ..., 2205, 2567, 7007]), + values=tensor([0.9001, 0.7437, 0.7565, ..., 0.0290, 0.0508, 0.5447]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7970, 0.9066, 0.2901, ..., 0.5249, 0.8444, 0.0204]) +tensor([0.8078, 0.4866, 0.3722, ..., 0.5482, 0.6717, 0.7643]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.02070331573486328 seconds +Time: 0.021983623504638672 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50716', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.728132724761963} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '47762', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.6100127696990967} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 22, ..., 99985, 99992, +tensor(crow_indices=tensor([ 0, 6, 13, ..., 99978, 99986, 100000]), - col_indices=tensor([ 992, 2241, 2699, ..., 7485, 9702, 9755]), - values=tensor([0.1587, 0.3354, 0.8907, ..., 0.7458, 0.3952, 0.4445]), + col_indices=tensor([ 692, 1536, 1925, ..., 6926, 7604, 9271]), + values=tensor([0.2408, 0.0654, 0.4638, ..., 0.3016, 0.2922, 0.8002]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7531, 0.7793, 0.1410, ..., 0.7186, 0.3031, 0.2892]) +tensor([0.4827, 0.2535, 0.7045, ..., 0.0246, 0.7771, 0.7807]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.728132724761963 seconds +Time: 2.6100127696990967 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '195195', '-ss', '10000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.530660390853882} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '192145', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.452706813812256} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 99979, 99991, +tensor(crow_indices=tensor([ 0, 14, 27, ..., 99983, 99991, 100000]), - col_indices=tensor([ 74, 913, 1678, ..., 8042, 8094, 8596]), - values=tensor([0.3009, 0.6152, 0.9919, ..., 0.0065, 0.7111, 0.2350]), + col_indices=tensor([ 156, 519, 905, ..., 8338, 9210, 9603]), + values=tensor([0.5955, 0.0542, 0.7725, ..., 0.9735, 0.5428, 0.7094]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1186, 0.1112, 0.0471, ..., 0.5653, 0.6270, 0.7376]) +tensor([0.3824, 0.6115, 0.2638, ..., 0.8702, 0.5681, 0.8528]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.530660390853882 seconds +Time: 10.452706813812256 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 99979, 99991, +tensor(crow_indices=tensor([ 0, 14, 27, ..., 99983, 99991, 100000]), - col_indices=tensor([ 74, 913, 1678, ..., 8042, 8094, 8596]), - values=tensor([0.3009, 0.6152, 0.9919, ..., 0.0065, 0.7111, 0.2350]), + col_indices=tensor([ 156, 519, 905, ..., 8338, 9210, 9603]), + values=tensor([0.5955, 0.0542, 0.7725, ..., 0.9735, 0.5428, 0.7094]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1186, 0.1112, 0.0471, ..., 0.5653, 0.6270, 0.7376]) +tensor([0.3824, 0.6115, 0.2638, ..., 0.8702, 0.5681, 0.8528]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.530660390853882 seconds +Time: 10.452706813812256 seconds -[18.85, 21.66, 18.61, 18.04, 18.26, 18.19, 18.11, 17.83, 18.59, 17.86] -[79.81] -13.978069543838501 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.530660390853882, 'TIME_S_1KI': 0.05394943718258092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.5897302937508, 'W': 79.81} -[18.85, 21.66, 18.61, 18.04, 18.26, 18.19, 18.11, 17.83, 18.59, 17.86, 18.27, 17.86, 18.11, 17.94, 18.2, 17.78, 18.07, 17.82, 17.85, 18.66] -329.74 -16.487000000000002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.530660390853882, 'TIME_S_1KI': 0.05394943718258092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.5897302937508, 'W': 79.81, 'J_1KI': 5.7152577181472415, 'W_1KI': 0.40887317810394735, 'W_D': 63.323, 'J_D': 885.1332977244854, 'W_D_1KI': 0.3244089244089244, 'J_D_1KI': 0.0016619735362531027} +[18.87, 18.51, 19.31, 18.76, 18.76, 18.65, 19.3, 18.75, 18.8, 18.73] +[88.07] +14.078662872314453 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 192145, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.452706813812256, 'TIME_S_1KI': 0.05440009791465954, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1239.9078391647338, 'W': 88.07} +[18.87, 18.51, 19.31, 18.76, 18.76, 18.65, 19.3, 18.75, 18.8, 18.73, 19.62, 18.55, 18.88, 18.66, 18.93, 18.62, 18.78, 18.63, 19.05, 18.64] +338.87 +16.9435 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 192145, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.452706813812256, 'TIME_S_1KI': 0.05440009791465954, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1239.9078391647338, 'W': 88.07, 'J_1KI': 6.452979984723692, 'W_1KI': 0.45835176559369223, 'W_D': 71.1265, 'J_D': 1001.3660147876739, 'W_D_1KI': 0.37017096463608207, 'J_D_1KI': 0.00192651885105562} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json index 9a2b891..3ad103d 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 58200, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524534702301025, "TIME_S_1KI": 0.18083392959280112, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1230.7857535743713, "W": 87.63, "J_1KI": 21.147521539078543, "W_1KI": 1.5056701030927835, "W_D": 71.18625, "J_D": 999.8290807986259, "W_D_1KI": 1.223131443298969, "J_D_1KI": 0.02101600418039466} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57161, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.708652973175049, "TIME_S_1KI": 0.1873419459627202, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1288.4431428408623, "W": 90.87, "J_1KI": 22.540598359735874, "W_1KI": 1.5897202638162384, "W_D": 62.09775, "J_D": 880.4822292653918, "W_D_1KI": 1.086365703889015, "J_D_1KI": 0.01900536561447517} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output index b0a27e2..c398e44 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,34 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.032953500747680664} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.034076690673828125} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 87, 187, ..., 999811, +tensor(crow_indices=tensor([ 0, 107, 208, ..., 999803, + 999917, 1000000]), + col_indices=tensor([ 77, 189, 287, ..., 9483, 9488, 9787]), + values=tensor([0.8729, 0.2143, 0.4098, ..., 0.2067, 0.2304, 0.4743]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8489, 0.8735, 0.2882, ..., 0.7921, 0.8211, 0.7022]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.034076690673828125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '30812', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.659840822219849} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 89, 189, ..., 999802, 999895, 1000000]), - col_indices=tensor([ 60, 162, 170, ..., 9440, 9828, 9931]), - values=tensor([0.9691, 0.2545, 0.9233, ..., 0.5616, 0.3084, 0.5234]), + col_indices=tensor([ 64, 86, 225, ..., 9919, 9935, 9940]), + values=tensor([0.6271, 0.3388, 0.3681, ..., 0.0346, 0.0873, 0.5226]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0498, 0.1923, 0.6628, ..., 0.9993, 0.6267, 0.7810]) +tensor([0.1570, 0.5305, 0.5278, ..., 0.7249, 0.7100, 0.4427]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,39 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.032953500747680664 seconds +Time: 5.659840822219849 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31863', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.748403549194336} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '57161', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.708652973175049} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 110, 213, ..., 999801, - 999901, 1000000]), - col_indices=tensor([ 77, 119, 129, ..., 9737, 9950, 9990]), - values=tensor([0.4475, 0.9311, 0.1906, ..., 0.1630, 0.9417, 0.6731]), - size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2219, 0.3377, 0.8817, ..., 0.6372, 0.3631, 0.6898]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000000 -Density: 0.01 -Time: 5.748403549194336 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58200', '-ss', '10000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524534702301025} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 116, 213, ..., 999804, +tensor(crow_indices=tensor([ 0, 110, 202, ..., 999807, 999907, 1000000]), - col_indices=tensor([ 96, 100, 135, ..., 9713, 9783, 9969]), - values=tensor([0.2374, 0.5111, 0.2281, ..., 0.8006, 0.1634, 0.0785]), + col_indices=tensor([ 52, 75, 82, ..., 9616, 9679, 9992]), + values=tensor([0.7610, 0.2228, 0.0334, ..., 0.9210, 0.2713, 0.2170]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4187, 0.6286, 0.6485, ..., 0.1996, 0.5955, 0.8769]) +tensor([0.2927, 0.9778, 0.1836, ..., 0.2052, 0.7640, 0.7077]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.524534702301025 seconds +Time: 10.708652973175049 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 116, 213, ..., 999804, +tensor(crow_indices=tensor([ 0, 110, 202, ..., 999807, 999907, 1000000]), - col_indices=tensor([ 96, 100, 135, ..., 9713, 9783, 9969]), - values=tensor([0.2374, 0.5111, 0.2281, ..., 0.8006, 0.1634, 0.0785]), + col_indices=tensor([ 52, 75, 82, ..., 9616, 9679, 9992]), + values=tensor([0.7610, 0.2228, 0.0334, ..., 0.9210, 0.2713, 0.2170]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4187, 0.6286, 0.6485, ..., 0.1996, 0.5955, 0.8769]) +tensor([0.2927, 0.9778, 0.1836, ..., 0.2052, 0.7640, 0.7077]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.524534702301025 seconds +Time: 10.708652973175049 seconds -[18.15, 17.92, 18.14, 18.3, 18.24, 17.98, 17.94, 18.28, 17.96, 17.99] -[87.63] -14.045255661010742 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524534702301025, 'TIME_S_1KI': 0.18083392959280112, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7857535743713, 'W': 87.63} -[18.15, 17.92, 18.14, 18.3, 18.24, 17.98, 17.94, 18.28, 17.96, 17.99, 18.28, 18.07, 17.88, 17.88, 17.9, 18.02, 21.74, 18.02, 18.41, 17.97] -328.875 -16.44375 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524534702301025, 'TIME_S_1KI': 0.18083392959280112, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7857535743713, 'W': 87.63, 'J_1KI': 21.147521539078543, 'W_1KI': 1.5056701030927835, 'W_D': 71.18625, 'J_D': 999.8290807986259, 'W_D_1KI': 1.223131443298969, 'J_D_1KI': 0.02101600418039466} +[50.63, 43.53, 43.67, 43.68, 44.99, 46.29, 46.23, 45.2, 45.28, 44.72] +[90.87] +14.178971529006958 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57161, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.708652973175049, 'TIME_S_1KI': 0.1873419459627202, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.4431428408623, 'W': 90.87} +[50.63, 43.53, 43.67, 43.68, 44.99, 46.29, 46.23, 45.2, 45.28, 44.72, 18.99, 18.86, 18.72, 18.73, 18.8, 18.58, 18.67, 18.89, 18.72, 18.87] +575.4450000000002 +28.772250000000007 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57161, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.708652973175049, 'TIME_S_1KI': 0.1873419459627202, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1288.4431428408623, 'W': 90.87, 'J_1KI': 22.540598359735874, 'W_1KI': 1.5897202638162384, 'W_D': 62.09775, 'J_D': 880.4822292653918, 'W_D_1KI': 1.086365703889015, 'J_D_1KI': 0.01900536561447517} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json index 6f3f4c5..5df6b87 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8756, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44620156288147, "TIME_S_1KI": 1.193033527053617, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1333.577045211792, "W": 83.47, "J_1KI": 152.30436788622566, "W_1KI": 9.532891731384193, "W_D": 66.97425, "J_D": 1070.0290214481354, "W_D_1KI": 7.648955002284148, "J_D_1KI": 0.8735672684198433} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8729, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.783016443252563, "TIME_S_1KI": 1.2353094791216135, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1352.3832885718346, "W": 87.53, "J_1KI": 154.9299219351397, "W_1KI": 10.027494558368657, "W_D": 70.42325, "J_D": 1088.075247651279, "W_D_1KI": 8.067733990147781, "J_D_1KI": 0.924244929562124} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output index e9fd20d..23aad01 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.14447855949401855} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.13656830787658691} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 507, 1042, ..., 4998981, - 4999514, 5000000]), - col_indices=tensor([ 8, 38, 72, ..., 9951, 9971, 9980]), - values=tensor([0.6058, 0.5976, 0.8000, ..., 0.1658, 0.6430, 0.8003]), +tensor(crow_indices=tensor([ 0, 509, 1006, ..., 4999003, + 4999502, 5000000]), + col_indices=tensor([ 3, 5, 16, ..., 9942, 9959, 9980]), + values=tensor([0.0344, 0.2328, 0.6267, ..., 0.4030, 0.4110, 0.2937]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9071, 0.4156, 0.9536, ..., 0.8291, 0.1377, 0.0392]) +tensor([0.6327, 0.2020, 0.0705, ..., 0.9187, 0.0315, 0.1283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.14447855949401855 seconds +Time: 0.13656830787658691 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7267', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.714413404464722} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7688', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.24746060371399} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 473, 945, ..., 4999048, - 4999511, 5000000]), - col_indices=tensor([ 4, 25, 47, ..., 9937, 9967, 9993]), - values=tensor([0.2180, 0.8351, 0.6646, ..., 0.1409, 0.2302, 0.7325]), +tensor(crow_indices=tensor([ 0, 516, 1004, ..., 4998937, + 4999450, 5000000]), + col_indices=tensor([ 6, 10, 16, ..., 9961, 9963, 9994]), + values=tensor([0.1497, 0.2572, 0.1920, ..., 0.0397, 0.6799, 0.4086]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9797, 0.6380, 0.6196, ..., 0.0914, 0.5364, 0.9534]) +tensor([0.8056, 0.3224, 0.7213, ..., 0.7847, 0.3449, 0.8828]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 8.714413404464722 seconds +Time: 9.24746060371399 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8756', '-ss', '10000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44620156288147} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8729', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.783016443252563} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 484, 1014, ..., 4998952, - 4999472, 5000000]), - col_indices=tensor([ 28, 62, 89, ..., 9928, 9935, 9940]), - values=tensor([0.3908, 0.2484, 0.4500, ..., 0.3668, 0.3711, 0.8718]), +tensor(crow_indices=tensor([ 0, 513, 1011, ..., 4999049, + 4999518, 5000000]), + col_indices=tensor([ 19, 50, 54, ..., 9882, 9895, 9931]), + values=tensor([0.6614, 0.3405, 0.4062, ..., 0.2762, 0.6907, 0.8766]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8601, 0.7620, 0.0732, ..., 0.0545, 0.3750, 0.1934]) +tensor([0.3093, 0.3070, 0.7107, ..., 0.6329, 0.2063, 0.9404]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.44620156288147 seconds +Time: 10.783016443252563 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 484, 1014, ..., 4998952, - 4999472, 5000000]), - col_indices=tensor([ 28, 62, 89, ..., 9928, 9935, 9940]), - values=tensor([0.3908, 0.2484, 0.4500, ..., 0.3668, 0.3711, 0.8718]), +tensor(crow_indices=tensor([ 0, 513, 1011, ..., 4999049, + 4999518, 5000000]), + col_indices=tensor([ 19, 50, 54, ..., 9882, 9895, 9931]), + values=tensor([0.6614, 0.3405, 0.4062, ..., 0.2762, 0.6907, 0.8766]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8601, 0.7620, 0.0732, ..., 0.0545, 0.3750, 0.1934]) +tensor([0.3093, 0.3070, 0.7107, ..., 0.6329, 0.2063, 0.9404]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.44620156288147 seconds +Time: 10.783016443252563 seconds -[18.13, 18.04, 18.48, 17.86, 18.0, 18.3, 18.04, 17.84, 17.94, 18.14] -[83.47] -15.976722717285156 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8756, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44620156288147, 'TIME_S_1KI': 1.193033527053617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.577045211792, 'W': 83.47} -[18.13, 18.04, 18.48, 17.86, 18.0, 18.3, 18.04, 17.84, 17.94, 18.14, 18.29, 18.21, 17.99, 18.15, 21.9, 18.49, 18.31, 17.94, 18.08, 18.13] -329.91499999999996 -16.495749999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8756, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44620156288147, 'TIME_S_1KI': 1.193033527053617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.577045211792, 'W': 83.47, 'J_1KI': 152.30436788622566, 'W_1KI': 9.532891731384193, 'W_D': 66.97425, 'J_D': 1070.0290214481354, 'W_D_1KI': 7.648955002284148, 'J_D_1KI': 0.8735672684198433} +[19.4, 22.3, 19.22, 18.54, 19.29, 18.66, 18.82, 18.66, 18.68, 18.54] +[87.53] +15.450511693954468 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8729, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.783016443252563, 'TIME_S_1KI': 1.2353094791216135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1352.3832885718346, 'W': 87.53} +[19.4, 22.3, 19.22, 18.54, 19.29, 18.66, 18.82, 18.66, 18.68, 18.54, 19.2, 18.66, 18.77, 18.65, 18.7, 18.7, 18.9, 18.67, 19.02, 18.65] +342.13500000000005 +17.10675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8729, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.783016443252563, 'TIME_S_1KI': 1.2353094791216135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1352.3832885718346, 'W': 87.53, 'J_1KI': 154.9299219351397, 'W_1KI': 10.027494558368657, 'W_D': 70.42325, 'J_D': 1088.075247651279, 'W_D_1KI': 8.067733990147781, 'J_D_1KI': 0.924244929562124} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json index 646ac12..20e810f 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2958, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.525683641433716, "TIME_S_1KI": 3.558378512993143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.5020074129104, "W": 79.35, "J_1KI": 507.94523577177495, "W_1KI": 26.825557809330626, "W_D": 62.803999999999995, "J_D": 1189.2014628047941, "W_D_1KI": 21.231913455037187, "J_D_1KI": 7.177793595347258} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2874, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.399726152420044, "TIME_S_1KI": 3.9665017927696744, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1398.2346516990663, "W": 84.76, "J_1KI": 486.51170901150533, "W_1KI": 29.491997216423105, "W_D": 67.9355, "J_D": 1120.6910120399, "W_D_1KI": 23.637961029923453, "J_D_1KI": 8.224760274851583} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output index cd10b80..79c2bf9 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.38048672676086426} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.40368223190307617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 978, 2001, ..., 9997917, - 9998993, 10000000]), - col_indices=tensor([ 10, 12, 31, ..., 9968, 9976, 9993]), - values=tensor([0.1521, 0.7718, 0.5784, ..., 0.3138, 0.0420, 0.0283]), +tensor(crow_indices=tensor([ 0, 987, 1967, ..., 9997992, + 9999008, 10000000]), + col_indices=tensor([ 7, 16, 26, ..., 9960, 9968, 9979]), + values=tensor([0.6123, 0.2290, 0.8484, ..., 0.2566, 0.4522, 0.9117]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8445, 0.5776, 0.6277, ..., 0.5230, 0.9454, 0.0151]) +tensor([0.9379, 0.7537, 0.6942, ..., 0.2092, 0.6278, 0.4526]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 0.38048672676086426 seconds +Time: 0.40368223190307617 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2759', '-ss', '10000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.790999174118042} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2601', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.502490997314453} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 961, 1957, ..., 9997943, - 9998964, 10000000]), - col_indices=tensor([ 0, 2, 15, ..., 9987, 9990, 9997]), - values=tensor([0.8878, 0.8149, 0.0468, ..., 0.0944, 0.2051, 0.2941]), +tensor(crow_indices=tensor([ 0, 1026, 2004, ..., 9997979, + 9999022, 10000000]), + col_indices=tensor([ 4, 15, 19, ..., 9953, 9954, 9967]), + values=tensor([0.9619, 0.8012, 0.0780, ..., 0.6886, 0.8242, 0.5458]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8720, 0.1434, 0.3774, ..., 0.9472, 0.6076, 0.2537]) +tensor([0.2108, 0.1126, 0.9693, ..., 0.1000, 0.3114, 0.1665]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 9.790999174118042 seconds +Time: 9.502490997314453 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2958', '-ss', '10000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.525683641433716} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2874', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.399726152420044} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1002, 2060, ..., 9998101, - 9999084, 10000000]), - col_indices=tensor([ 6, 12, 22, ..., 9993, 9996, 9999]), - values=tensor([0.4647, 0.6377, 0.7581, ..., 0.1422, 0.4549, 0.7257]), +tensor(crow_indices=tensor([ 0, 993, 2025, ..., 9997969, + 9999011, 10000000]), + col_indices=tensor([ 6, 13, 15, ..., 9973, 9981, 9991]), + values=tensor([0.2056, 0.1415, 0.5151, ..., 0.4464, 0.8622, 0.1462]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2726, 0.9874, 0.6365, ..., 0.6635, 0.0461, 0.2273]) +tensor([0.0942, 0.3792, 0.4904, ..., 0.5152, 0.5684, 0.8787]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.525683641433716 seconds +Time: 11.399726152420044 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1002, 2060, ..., 9998101, - 9999084, 10000000]), - col_indices=tensor([ 6, 12, 22, ..., 9993, 9996, 9999]), - values=tensor([0.4647, 0.6377, 0.7581, ..., 0.1422, 0.4549, 0.7257]), +tensor(crow_indices=tensor([ 0, 993, 2025, ..., 9997969, + 9999011, 10000000]), + col_indices=tensor([ 6, 13, 15, ..., 9973, 9981, 9991]), + values=tensor([0.2056, 0.1415, 0.5151, ..., 0.4464, 0.8622, 0.1462]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2726, 0.9874, 0.6365, ..., 0.6635, 0.0461, 0.2273]) +tensor([0.0942, 0.3792, 0.4904, ..., 0.5152, 0.5684, 0.8787]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.525683641433716 seconds +Time: 11.399726152420044 seconds -[18.46, 22.29, 18.08, 17.85, 18.08, 18.14, 17.95, 17.97, 18.17, 18.1] -[79.35] -18.935122966766357 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2958, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.525683641433716, 'TIME_S_1KI': 3.558378512993143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.5020074129104, 'W': 79.35} -[18.46, 22.29, 18.08, 17.85, 18.08, 18.14, 17.95, 17.97, 18.17, 18.1, 18.4, 17.86, 18.25, 18.05, 17.97, 17.82, 20.01, 18.07, 17.96, 17.84] -330.91999999999996 -16.546 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2958, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.525683641433716, 'TIME_S_1KI': 3.558378512993143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.5020074129104, 'W': 79.35, 'J_1KI': 507.94523577177495, 'W_1KI': 26.825557809330626, 'W_D': 62.803999999999995, 'J_D': 1189.2014628047941, 'W_D_1KI': 21.231913455037187, 'J_D_1KI': 7.177793595347258} +[19.05, 18.99, 18.84, 18.46, 18.61, 18.71, 18.6, 18.73, 18.61, 19.17] +[84.76] +16.496397495269775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2874, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.399726152420044, 'TIME_S_1KI': 3.9665017927696744, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.2346516990663, 'W': 84.76} +[19.05, 18.99, 18.84, 18.46, 18.61, 18.71, 18.6, 18.73, 18.61, 19.17, 19.07, 18.75, 18.82, 18.42, 19.03, 18.69, 18.45, 18.24, 18.54, 18.71] +336.48999999999995 +16.824499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2874, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.399726152420044, 'TIME_S_1KI': 3.9665017927696744, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.2346516990663, 'W': 84.76, 'J_1KI': 486.51170901150533, 'W_1KI': 29.491997216423105, 'W_D': 67.9355, 'J_D': 1120.6910120399, 'W_D_1KI': 23.637961029923453, 'J_D_1KI': 8.224760274851583} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json index 600b3de..201d3d0 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1431, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.125213623046875, "TIME_S_1KI": 7.07562098046602, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1986.3526240086555, "W": 63.01, "J_1KI": 1388.087088755175, "W_1KI": 44.032145352900066, "W_D": 46.759, "J_D": 1474.0495531823635, "W_D_1KI": 32.67575122292103, "J_D_1KI": 22.83420770294971} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1479, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.389569997787476, "TIME_S_1KI": 7.024726164832641, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1529.2858192181588, "W": 80.81, "J_1KI": 1033.999877767518, "W_1KI": 54.63826910074375, "W_D": 63.61325000000001, "J_D": 1203.846567743838, "W_D_1KI": 43.01098715348209, "J_D_1KI": 29.081127216688362} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output index b3d4b08..19322b1 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.7335808277130127} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.709862232208252} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1938, 3929, ..., 19996007, - 19998018, 20000000]), - col_indices=tensor([ 10, 23, 25, ..., 9992, 9994, 9995]), - values=tensor([0.0730, 0.5628, 0.7699, ..., 0.2806, 0.9097, 0.3889]), +tensor(crow_indices=tensor([ 0, 1995, 3996, ..., 19995943, + 19997984, 20000000]), + col_indices=tensor([ 1, 15, 31, ..., 9973, 9977, 9994]), + values=tensor([0.9351, 0.4585, 0.8626, ..., 0.1490, 0.3791, 0.5352]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.2309, 0.6404, 0.8370, ..., 0.6670, 0.0943, 0.6898]) +tensor([0.1274, 0.7816, 0.0225, ..., 0.9691, 0.3129, 0.2104]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 0.7335808277130127 seconds +Time: 0.709862232208252 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1431', '-ss', '10000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.125213623046875} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1479', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.389569997787476} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2012, 4019, ..., 19995967, - 19998000, 20000000]), - col_indices=tensor([ 4, 7, 8, ..., 9979, 9980, 9988]), - values=tensor([0.3444, 0.9651, 0.7506, ..., 0.6074, 0.5252, 0.1862]), +tensor(crow_indices=tensor([ 0, 2030, 3992, ..., 19996023, + 19997977, 20000000]), + col_indices=tensor([ 0, 4, 14, ..., 9988, 9997, 9999]), + values=tensor([0.5125, 0.9592, 0.8688, ..., 0.5338, 0.8764, 0.3338]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7493, 0.9404, 0.6976, ..., 0.3307, 0.3774, 0.7329]) +tensor([0.9961, 0.7654, 0.4620, ..., 0.1667, 0.2329, 0.0326]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.125213623046875 seconds +Time: 10.389569997787476 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2012, 4019, ..., 19995967, - 19998000, 20000000]), - col_indices=tensor([ 4, 7, 8, ..., 9979, 9980, 9988]), - values=tensor([0.3444, 0.9651, 0.7506, ..., 0.6074, 0.5252, 0.1862]), +tensor(crow_indices=tensor([ 0, 2030, 3992, ..., 19996023, + 19997977, 20000000]), + col_indices=tensor([ 0, 4, 14, ..., 9988, 9997, 9999]), + values=tensor([0.5125, 0.9592, 0.8688, ..., 0.5338, 0.8764, 0.3338]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7493, 0.9404, 0.6976, ..., 0.3307, 0.3774, 0.7329]) +tensor([0.9961, 0.7654, 0.4620, ..., 0.1667, 0.2329, 0.0326]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.125213623046875 seconds +Time: 10.389569997787476 seconds -[18.03, 17.87, 18.44, 17.79, 18.02, 17.71, 17.95, 17.85, 17.83, 17.74] -[63.01] -31.524402856826782 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1431, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.125213623046875, 'TIME_S_1KI': 7.07562098046602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.3526240086555, 'W': 63.01} -[18.03, 17.87, 18.44, 17.79, 18.02, 17.71, 17.95, 17.85, 17.83, 17.74, 18.68, 17.68, 18.59, 19.66, 17.91, 17.67, 18.09, 18.04, 17.79, 17.81] -325.02 -16.250999999999998 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1431, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.125213623046875, 'TIME_S_1KI': 7.07562098046602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.3526240086555, 'W': 63.01, 'J_1KI': 1388.087088755175, 'W_1KI': 44.032145352900066, 'W_D': 46.759, 'J_D': 1474.0495531823635, 'W_D_1KI': 32.67575122292103, 'J_D_1KI': 22.83420770294971} +[18.82, 18.73, 24.37, 19.34, 18.75, 19.29, 18.77, 18.45, 18.74, 18.59] +[80.81] +18.92446255683899 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1479, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.389569997787476, 'TIME_S_1KI': 7.024726164832641, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1529.2858192181588, 'W': 80.81} +[18.82, 18.73, 24.37, 19.34, 18.75, 19.29, 18.77, 18.45, 18.74, 18.59, 18.95, 18.48, 19.06, 18.92, 18.82, 18.63, 19.11, 18.42, 18.59, 18.57] +343.93499999999995 +17.196749999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1479, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.389569997787476, 'TIME_S_1KI': 7.024726164832641, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1529.2858192181588, 'W': 80.81, 'J_1KI': 1033.999877767518, 'W_1KI': 54.63826910074375, 'W_D': 63.61325000000001, 'J_D': 1203.846567743838, 'W_D_1KI': 43.01098715348209, 'J_D_1KI': 29.081127216688362} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json index 913ab31..a29eca9 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 887, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.198672533035278, "TIME_S_1KI": 11.497939721573031, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3521.5318365383146, "W": 51.94, "J_1KI": 3970.1599059056534, "W_1KI": 58.55693348365276, "W_D": 35.649249999999995, "J_D": 2417.019037807345, "W_D_1KI": 40.19081172491544, "J_D_1KI": 45.31094895706363} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 998, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.352726221084595, "TIME_S_1KI": 10.373473167419434, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1634.5226301193238, "W": 77.59, "J_1KI": 1637.7982265724686, "W_1KI": 77.74549098196393, "W_D": 60.511500000000005, "J_D": 1274.7443759822847, "W_D_1KI": 60.63276553106213, "J_D_1KI": 60.754274079220565} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output index 697edf6..74b4d68 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.9959039688110352} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.0511415004730225} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2964, 5964, ..., 29994054, - 29997049, 30000000]), - col_indices=tensor([ 0, 6, 7, ..., 9989, 9993, 9996]), - values=tensor([0.3352, 0.3012, 0.1376, ..., 0.4634, 0.9038, 0.2157]), +tensor(crow_indices=tensor([ 0, 3013, 6009, ..., 29993988, + 29997010, 30000000]), + col_indices=tensor([ 3, 10, 11, ..., 9994, 9996, 9998]), + values=tensor([0.5882, 0.0748, 0.3485, ..., 0.9795, 0.2531, 0.2438]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3407, 0.2089, 0.1462, ..., 0.7488, 0.0030, 0.5159]) +tensor([0.3271, 0.7082, 0.9239, ..., 0.2840, 0.5244, 0.2057]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 1.9959039688110352 seconds +Time: 1.0511415004730225 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '526', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 7.3105573654174805} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '998', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.352726221084595} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2980, 5935, ..., 29993957, - 29996987, 30000000]), - col_indices=tensor([ 2, 4, 5, ..., 9985, 9987, 9990]), - values=tensor([0.7275, 0.2529, 0.2202, ..., 0.8048, 0.1786, 0.5578]), +tensor(crow_indices=tensor([ 0, 2925, 5880, ..., 29993881, + 29996945, 30000000]), + col_indices=tensor([ 7, 9, 15, ..., 9991, 9992, 9996]), + values=tensor([0.5335, 0.0122, 0.5796, ..., 0.5217, 0.7536, 0.5371]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3722, 0.8340, 0.1775, ..., 0.2787, 0.3419, 0.3614]) +tensor([0.2204, 0.0258, 0.7235, ..., 0.5781, 0.5536, 0.8900]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 7.3105573654174805 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '755', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 8.937042713165283} +Time: 10.352726221084595 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3082, 6089, ..., 29994069, - 29996991, 30000000]), - col_indices=tensor([ 3, 5, 7, ..., 9989, 9990, 9999]), - values=tensor([0.1400, 0.4959, 0.3565, ..., 0.5786, 0.8662, 0.8079]), +tensor(crow_indices=tensor([ 0, 2925, 5880, ..., 29993881, + 29996945, 30000000]), + col_indices=tensor([ 7, 9, 15, ..., 9991, 9992, 9996]), + values=tensor([0.5335, 0.0122, 0.5796, ..., 0.5217, 0.7536, 0.5371]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3795, 0.0549, 0.6386, ..., 0.9156, 0.7490, 0.5099]) +tensor([0.2204, 0.0258, 0.7235, ..., 0.5781, 0.5536, 0.8900]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,50 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 8.937042713165283 seconds +Time: 10.352726221084595 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '887', '-ss', '10000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.198672533035278} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3035, 5994, ..., 29994113, - 29997092, 30000000]), - col_indices=tensor([ 5, 10, 14, ..., 9987, 9996, 9999]), - values=tensor([0.8098, 0.4554, 0.6671, ..., 0.4349, 0.8044, 0.2223]), - size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3423, 0.6279, 0.0055, ..., 0.7400, 0.5417, 0.1422]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 30000000 -Density: 0.3 -Time: 10.198672533035278 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3035, 5994, ..., 29994113, - 29997092, 30000000]), - col_indices=tensor([ 5, 10, 14, ..., 9987, 9996, 9999]), - values=tensor([0.8098, 0.4554, 0.6671, ..., 0.4349, 0.8044, 0.2223]), - size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3423, 0.6279, 0.0055, ..., 0.7400, 0.5417, 0.1422]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 30000000 -Density: 0.3 -Time: 10.198672533035278 seconds - -[18.62, 17.81, 18.39, 18.03, 17.98, 18.26, 18.01, 18.0, 17.9, 17.68] -[51.94] -67.79999685287476 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.198672533035278, 'TIME_S_1KI': 11.497939721573031, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3521.5318365383146, 'W': 51.94} -[18.62, 17.81, 18.39, 18.03, 17.98, 18.26, 18.01, 18.0, 17.9, 17.68, 18.52, 17.91, 18.3, 18.31, 18.44, 18.04, 17.96, 18.17, 17.99, 17.81] -325.815 -16.29075 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.198672533035278, 'TIME_S_1KI': 11.497939721573031, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3521.5318365383146, 'W': 51.94, 'J_1KI': 3970.1599059056534, 'W_1KI': 58.55693348365276, 'W_D': 35.649249999999995, 'J_D': 2417.019037807345, 'W_D_1KI': 40.19081172491544, 'J_D_1KI': 45.31094895706363} +[20.76, 18.85, 19.04, 18.56, 18.79, 18.54, 18.88, 18.6, 18.72, 18.53] +[77.59] +21.066150665283203 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 998, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.352726221084595, 'TIME_S_1KI': 10.373473167419434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1634.5226301193238, 'W': 77.59} +[20.76, 18.85, 19.04, 18.56, 18.79, 18.54, 18.88, 18.6, 18.72, 18.53, 19.33, 18.53, 18.72, 18.46, 18.58, 18.72, 18.36, 18.5, 18.43, 27.96] +341.57 +17.0785 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 998, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.352726221084595, 'TIME_S_1KI': 10.373473167419434, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1634.5226301193238, 'W': 77.59, 'J_1KI': 1637.7982265724686, 'W_1KI': 77.74549098196393, 'W_D': 60.511500000000005, 'J_D': 1274.7443759822847, 'W_D_1KI': 60.63276553106213, 'J_D_1KI': 60.754274079220565} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..c3c441f --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 785, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.66882610321045, "TIME_S_1KI": 13.590861277975096, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1798.7167282104492, "W": 75.63, "J_1KI": 2291.3588894400627, "W_1KI": 96.34394904458598, "W_D": 58.558, "J_D": 1392.6914474487305, "W_D_1KI": 74.59617834394905, "J_D_1KI": 95.0269787821007} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..670751b --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 1.3374521732330322} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3951, 7984, ..., 39992020, + 39996017, 40000000]), + col_indices=tensor([ 4, 5, 12, ..., 9995, 9997, 9999]), + values=tensor([0.6637, 0.1161, 0.0076, ..., 0.1828, 0.4930, 0.5063]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4339, 0.0820, 0.1001, ..., 0.8568, 0.2207, 0.1791]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 1.3374521732330322 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '785', '-ss', '10000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.66882610321045} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4024, 7985, ..., 39991914, + 39996029, 40000000]), + col_indices=tensor([ 0, 1, 2, ..., 9994, 9995, 9996]), + values=tensor([0.8779, 0.6492, 0.8537, ..., 0.0562, 0.0638, 0.0669]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.8660, 0.7163, 0.6180, ..., 0.0766, 0.1126, 0.5921]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.66882610321045 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4024, 7985, ..., 39991914, + 39996029, 40000000]), + col_indices=tensor([ 0, 1, 2, ..., 9994, 9995, 9996]), + values=tensor([0.8779, 0.6492, 0.8537, ..., 0.0562, 0.0638, 0.0669]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.8660, 0.7163, 0.6180, ..., 0.0766, 0.1126, 0.5921]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.66882610321045 seconds + +[18.79, 18.81, 18.54, 18.57, 18.41, 18.56, 18.57, 18.37, 18.57, 18.5] +[75.63] +23.783111572265625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 785, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.66882610321045, 'TIME_S_1KI': 13.590861277975096, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1798.7167282104492, 'W': 75.63} +[18.79, 18.81, 18.54, 18.57, 18.41, 18.56, 18.57, 18.37, 18.57, 18.5, 19.16, 18.66, 18.58, 18.42, 19.27, 18.85, 22.54, 20.35, 18.51, 19.27] +341.43999999999994 +17.071999999999996 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 785, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.66882610321045, 'TIME_S_1KI': 13.590861277975096, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1798.7167282104492, 'W': 75.63, 'J_1KI': 2291.3588894400627, 'W_1KI': 96.34394904458598, 'W_D': 58.558, 'J_D': 1392.6914474487305, 'W_D_1KI': 74.59617834394905, 'J_D_1KI': 95.0269787821007} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..b0932eb --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 636, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.683557987213135, "TIME_S_1KI": 16.79804714970619, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1927.4814306020737, "W": 73.89, "J_1KI": 3030.6311801919396, "W_1KI": 116.17924528301887, "W_D": 56.59825, "J_D": 1476.4119079655409, "W_D_1KI": 88.99095911949685, "J_D_1KI": 139.9228916973221} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..39e266d --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.6492679119110107} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4994, 10028, ..., 49990032, + 49995032, 50000000]), + col_indices=tensor([ 0, 1, 4, ..., 9995, 9998, 9999]), + values=tensor([0.1973, 0.5130, 0.6236, ..., 0.3108, 0.1154, 0.1821]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.8813, 0.9082, 0.9703, ..., 0.1753, 0.3020, 0.5225]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 1.6492679119110107 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '636', '-ss', '10000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.683557987213135} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5002, 10029, ..., 49990100, + 49995028, 50000000]), + col_indices=tensor([ 6, 9, 11, ..., 9997, 9998, 9999]), + values=tensor([0.8163, 0.5188, 0.6591, ..., 0.6776, 0.3373, 0.8589]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.7220, 0.4730, 0.7069, ..., 0.0189, 0.3508, 0.7316]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.683557987213135 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5002, 10029, ..., 49990100, + 49995028, 50000000]), + col_indices=tensor([ 6, 9, 11, ..., 9997, 9998, 9999]), + values=tensor([0.8163, 0.5188, 0.6591, ..., 0.6776, 0.3373, 0.8589]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.7220, 0.4730, 0.7069, ..., 0.0189, 0.3508, 0.7316]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.683557987213135 seconds + +[18.98, 18.64, 19.64, 25.32, 19.4, 19.01, 19.04, 18.8, 18.69, 18.52] +[73.89] +26.085822582244873 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.683557987213135, 'TIME_S_1KI': 16.79804714970619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1927.4814306020737, 'W': 73.89} +[18.98, 18.64, 19.64, 25.32, 19.4, 19.01, 19.04, 18.8, 18.69, 18.52, 19.25, 18.51, 18.6, 18.89, 18.62, 18.41, 18.58, 18.78, 18.72, 19.62] +345.83500000000004 +17.29175 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.683557987213135, 'TIME_S_1KI': 16.79804714970619, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1927.4814306020737, 'W': 73.89, 'J_1KI': 3030.6311801919396, 'W_1KI': 116.17924528301887, 'W_D': 56.59825, 'J_D': 1476.4119079655409, 'W_D_1KI': 88.99095911949685, 'J_D_1KI': 139.9228916973221} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json index 4a5c029..63563c0 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 285101, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.263301372528076, "TIME_S_1KI": 0.03599882628446788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1023.6102543663977, "W": 73.21, "J_1KI": 3.5903425605886956, "W_1KI": 0.2567861915601839, "W_D": 56.916999999999994, "J_D": 795.8041913368701, "W_D_1KI": 0.1996380230164047, "J_D_1KI": 0.0007002361374264022} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 283081, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.251019954681396, "TIME_S_1KI": 0.03621232069507101, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1131.9865839004517, "W": 81.68, "J_1KI": 3.9988080581192373, "W_1KI": 0.28853932266736376, "W_D": 64.571, "J_D": 894.8764166140556, "W_D_1KI": 0.2281007909396957, "J_D_1KI": 0.0008057792325860644} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output index aea26a1..caf5fe8 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,752 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019815444946289062} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([ 147, 628, 1125, 5287, 8823, 6934, 2121, 9045, 1741, - 1008, 777, 1781, 8765, 5338, 5590, 4011, 9135, 3712, - 8048, 6348, 8740, 9035, 822, 3133, 9984, 8122, 9554, - 3712, 3272, 5515, 8959, 6829, 83, 3899, 7199, 7801, - 5285, 8079, 5441, 7100, 7667, 5056, 6862, 2526, 7349, - 1728, 3499, 3354, 7526, 5044, 3630, 6886, 4310, 5869, - 2649, 6497, 3797, 8787, 1590, 3717, 600, 9128, 9514, - 219, 9480, 7496, 7514, 5942, 3564, 2833, 1156, 3271, - 9892, 8323, 4817, 8727, 4029, 1225, 635, 3756, 3854, - 1580, 9309, 2348, 2287, 6728, 2416, 8735, 1996, 9982, - 2260, 9743, 6241, 6259, 3363, 2128, 9721, 8593, 9287, - 6303, 6167, 7208, 4450, 1275, 2370, 2881, 3104, 7161, - 9133, 226, 5026, 8446, 4133, 9547, 9259, 5653, 4323, - 9414, 5933, 1536, 7990, 6794, 7612, 5173, 6568, 8498, - 2655, 2146, 8320, 4457, 6510, 276, 9447, 6969, 2617, - 7175, 1400, 1929, 524, 929, 472, 5246, 4656, 4620, - 489, 4946, 342, 261, 5266, 9154, 4961, 841, 6798, - 6887, 9276, 203, 2231, 9861, 5260, 5410, 4989, 3777, - 5518, 2156, 807, 5251, 7532, 8301, 8516, 2536, 7883, - 4202, 2101, 5422, 6588, 505, 5736, 1011, 9457, 514, - 9520, 284, 2054, 2592, 5274, 6220, 4951, 8010, 3966, - 4007, 1997, 1966, 7251, 3207, 5838, 9132, 4982, 4877, - 8655, 8284, 876, 5569, 757, 2680, 1245, 1459, 64, - 365, 4927, 7024, 7351, 643, 3222, 4840, 6222, 7194, - 1143, 9570, 6110, 514, 6176, 3515, 236, 14, 9751, - 3159, 5832, 1147, 6202, 9999, 7112, 2023, 1940, 2795, - 6251, 8906, 5208, 3243, 7604, 6854, 2354, 2890, 2740, - 454, 4997, 9488, 8203, 5802, 49, 4850, 8249, 7823, - 1150, 8721, 9200, 1431, 3413, 5962, 300, 7898, 4190, - 1359, 832, 9165, 8870, 2860, 5248, 4896, 4884, 9315, - 6156, 4153, 5251, 9843, 5642, 7880, 3504, 3324, 7744, - 7101, 8647, 7753, 6224, 4352, 1703, 8312, 455, 3521, - 536, 6524, 6799, 7876, 7371, 8323, 7238, 5827, 4495, - 6175, 5872, 9038, 7184, 3438, 9862, 9699, 746, 3390, - 8840, 1555, 6503, 5323, 281, 1915, 41, 9952, 630, - 2524, 81, 7205, 5517, 7630, 9980, 8564, 2454, 5345, - 6893, 2025, 7310, 6297, 7720, 9244, 9072, 6121, 526, - 8438, 6441, 784, 8517, 5423, 2370, 1040, 9954, 5590, - 9371, 3627, 9962, 8967, 738, 9590, 2685, 5587, 8151, - 98, 9310, 2936, 529, 1987, 5122, 8678, 5271, 6797, - 8491, 7804, 9346, 6958, 1197, 3159, 3495, 4161, 7372, - 2305, 7596, 9495, 2623, 4621, 1213, 574, 4322, 5020, - 8442, 5191, 8488, 8804, 3945, 8995, 9875, 8614, 9285, - 5845, 4077, 4304, 1222, 7152, 3293, 357, 9313, 2693, - 5120, 942, 8204, 3155, 4829, 2464, 4701, 3688, 607, - 5172, 151, 3028, 4014, 1336, 976, 4925, 5627, 1156, - 9625, 5767, 8183, 5688, 8528, 9294, 4747, 8880, 7735, - 5253, 9274, 6989, 3528, 8002, 6326, 509, 3423, 8496, - 9738, 8213, 8077, 7550, 7738, 8069, 93, 3909, 5783, - 9543, 120, 9437, 6763, 7953, 9131, 7685, 7049, 6280, - 5195, 6701, 8174, 8541, 1693, 1873, 1258, 8665, 484, - 4711, 8704, 5880, 6139, 5452, 3962, 584, 5287, 6792, - 2514, 4003, 9542, 4836, 1519, 4148, 4206, 2568, 4631, - 2566, 3432, 2366, 2787, 6206, 7242, 5947, 1135, 3254, - 2113, 6329, 5453, 9815, 5777, 9459, 1217, 265, 3194, - 4738, 1004, 6119, 7621, 718, 5091, 7111, 8995, 5329, - 9627, 9467, 2122, 4961, 1162, 6369, 2607, 1279, 7314, - 4043, 9227, 6168, 4214, 1156, 6289, 4153, 2703, 9977, - 4203, 4485, 2363, 2942, 2937, 7682, 1361, 3609, 9757, - 8109, 4417, 1482, 5111, 1389, 2296, 737, 5089, 1163, - 4190, 4070, 610, 4301, 95, 453, 2406, 4010, 8161, - 762, 2233, 5838, 5351, 8383, 1707, 4709, 6703, 5945, - 4949, 1124, 3676, 9285, 7158, 3499, 7227, 1568, 2973, - 1428, 2316, 6991, 9226, 8007, 735, 3310, 6820, 9505, - 7011, 4908, 3243, 2472, 1968, 5864, 7326, 9622, 2967, - 5217, 8990, 3943, 387, 5835, 3583, 1763, 7032, 3775, - 4192, 6405, 7533, 1351, 805, 4502, 8291, 5499, 4398, - 7000, 8794, 1522, 2359, 7891, 6226, 7994, 1689, 6323, - 3184, 7685, 4860, 6403, 9802, 6255, 1821, 7732, 4494, - 8136, 7965, 2804, 7520, 2384, 6618, 7928, 6910, 9527, - 5014, 9950, 9409, 3928, 7865, 282, 2164, 9709, 3423, - 4150, 8746, 2581, 6098, 4292, 1480, 5919, 9694, 5887, - 6173, 9603, 4877, 6138, 1293, 3703, 2957, 2971, 131, - 2304, 7972, 1723, 1558, 5610, 5607, 4064, 6099, 1287, - 6409, 1303, 2598, 7669, 21, 1272, 118, 8324, 1938, - 2731, 4676, 7652, 6310, 6662, 2345, 232, 4946, 4044, - 5562, 4317, 383, 7955, 1674, 2856, 3908, 692, 9659, - 2014, 9423, 4192, 7578, 2859, 1638, 9429, 4145, 8698, - 9380, 7375, 7178, 5811, 1590, 199, 2988, 8205, 9471, - 7617, 1811, 5995, 2386, 2786, 4693, 6295, 485, 8968, - 3066, 5805, 3154, 3848, 1759, 3221, 5904, 1069, 3533, - 309, 7347, 7846, 6274, 7652, 6373, 3481, 6353, 9650, - 1516, 8787, 7982, 5148, 4709, 8309, 4232, 9115, 1418, - 5274, 395, 5709, 3174, 68, 3804, 7295, 9536, 3414, - 9104, 924, 3572, 6817, 8563, 561, 9344, 3220, 2063, - 7444, 7018, 707, 7026, 2578, 179, 5410, 8966, 9805, - 9112, 5656, 3110, 3676, 1235, 9544, 1495, 5008, 7289, - 727, 5468, 5698, 8696, 3288, 2439, 2448, 7548, 1409, - 3012, 7997, 4416, 7301, 2138, 3939, 8274, 9381, 88, - 583, 8683, 9292, 6899, 6134, 1702, 4060, 3625, 8216, - 7762, 6579, 8766, 6500, 9863, 474, 5904, 1723, 5347, - 8855, 8569, 5341, 7635, 8364, 1259, 7057, 2725, 6056, - 7618, 8691, 7207, 2875, 7923, 4624, 1246, 7177, 9717, - 3416, 2221, 2213, 5331, 7348, 4898, 7801, 7311, 6564, - 8018, 3122, 3971, 2426, 7210, 2126, 5178, 5966, 4092, - 1656, 7574, 6717, 1713, 5351, 7205, 1072, 1567, 4916, - 3143, 3039, 494, 5212, 2734, 8298, 5666, 1616, 991, - 5392, 219, 4095, 7599, 4336, 3201, 4103, 4232, 7641, - 6901, 3161, 911, 2271, 3833, 2362, 450, 702, 4414, - 1985, 9874, 7045, 303, 2861, 6503, 8031, 6998, 981, - 6383, 3804, 2132, 7446, 3522, 8554, 7230, 7247, 6842, - 8740, 6185, 4789, 9401, 7169, 4828, 5223, 2972, 315, - 9277, 5159, 6582, 3209, 3738, 2614, 6186, 7932, 8653, - 8758, 7882, 7681, 6252, 7053, 4207, 5682, 8498, 8562, - 9350, 9645, 4139, 5790, 9077, 5393, 6059, 4141, 5770, - 4443, 1642, 3655, 1526, 4326, 4199, 2823, 9283, 4707, - 4922, 7330, 7654, 5307, 5122, 5645, 5858, 9038, 8521, - 4752, 9830, 517, 6062, 7448, 3954, 8600, 4313, 3222, - 3628, 5629, 9654, 6445, 4277, 9969, 3299, 6212, 4242, - 5088, 3231, 2968, 3697, 4789, 7868, 6625, 5588, 531, - 6783, 3545, 9416, 3488, 5893, 600, 2516, 5328, 2390, - 6963]), - values=tensor([5.2560e-01, 4.5182e-01, 2.3298e-01, 1.2745e-01, - 6.7432e-01, 1.6305e-01, 6.7850e-01, 4.8297e-02, - 4.2986e-02, 4.6503e-01, 5.1950e-01, 9.2514e-01, - 3.0211e-02, 5.8682e-01, 1.6128e-01, 9.9806e-01, - 6.5284e-01, 4.7123e-01, 9.0892e-01, 7.2129e-01, - 1.4353e-01, 7.5709e-01, 6.9051e-01, 2.2834e-01, - 3.7105e-01, 5.2802e-01, 6.8136e-01, 5.9089e-01, - 3.2433e-01, 6.7476e-01, 6.9094e-01, 4.2556e-01, - 1.5749e-01, 7.8965e-01, 5.0067e-01, 9.5478e-01, - 7.8984e-01, 3.4707e-01, 4.1861e-02, 9.5964e-01, - 7.4705e-02, 6.3404e-01, 6.7261e-01, 1.7905e-01, - 8.5368e-01, 1.8088e-01, 7.0364e-01, 1.3954e-01, - 6.4433e-01, 2.6339e-01, 5.1852e-01, 1.7690e-01, - 3.6218e-01, 8.9644e-01, 9.8381e-01, 4.4953e-01, - 5.3938e-02, 8.2339e-01, 4.5796e-01, 9.4213e-02, - 3.3168e-02, 3.7108e-01, 8.3725e-01, 1.4729e-01, - 5.8007e-01, 6.0673e-01, 9.9643e-01, 7.2226e-01, - 7.1339e-01, 8.4565e-01, 4.7404e-01, 6.2064e-01, - 5.8324e-02, 2.8401e-01, 5.6260e-01, 8.1231e-01, - 5.0034e-01, 4.1114e-01, 4.3338e-01, 9.7312e-01, - 6.0643e-01, 4.1709e-01, 6.0714e-02, 7.0392e-01, - 3.0608e-01, 9.7941e-01, 3.7985e-01, 5.9718e-01, - 7.0541e-01, 6.7003e-01, 8.0980e-01, 1.1730e-02, - 5.2983e-01, 1.1390e-01, 9.7596e-01, 4.4723e-01, - 1.4768e-01, 6.1947e-01, 9.3278e-01, 6.5142e-02, - 3.7915e-01, 8.5646e-01, 4.6630e-01, 1.1336e-01, - 5.5405e-01, 6.7251e-01, 2.2618e-01, 5.7297e-02, - 8.3920e-01, 6.4694e-01, 2.9986e-01, 7.2391e-01, - 3.8923e-01, 8.9915e-02, 1.7642e-01, 1.2665e-01, - 1.1954e-01, 1.2088e-01, 1.4362e-01, 9.5926e-01, - 9.6244e-01, 8.2548e-01, 6.8499e-01, 7.3075e-01, - 5.3887e-01, 9.8974e-01, 3.7081e-01, 1.6977e-01, - 7.0021e-02, 8.2089e-01, 2.9793e-01, 4.7021e-02, - 7.1563e-01, 1.4888e-01, 3.8166e-01, 8.0958e-01, - 5.9825e-01, 4.8717e-02, 9.7864e-02, 4.0284e-01, - 9.2565e-01, 5.9953e-01, 5.3686e-01, 3.4093e-01, - 2.5458e-01, 6.5661e-01, 1.5050e-01, 1.0070e-01, - 9.2754e-01, 2.6429e-02, 8.6963e-01, 3.9150e-01, - 3.0459e-01, 9.0345e-01, 8.7288e-01, 1.4733e-01, - 5.5407e-01, 4.2790e-01, 3.4163e-01, 8.7761e-01, - 8.6974e-01, 9.4480e-01, 8.5739e-01, 3.5148e-01, - 7.1780e-01, 5.6444e-01, 3.1054e-01, 2.7980e-01, - 7.6360e-01, 2.0659e-01, 8.7043e-01, 8.6826e-01, - 6.1911e-01, 2.4894e-01, 1.7003e-01, 9.8128e-01, - 1.0649e-01, 4.0739e-01, 1.6335e-01, 5.4390e-01, - 5.5668e-01, 7.5228e-03, 5.5173e-01, 4.2472e-01, - 6.3659e-01, 1.2333e-02, 1.6682e-01, 7.0490e-01, - 1.7784e-01, 2.4643e-01, 9.9365e-01, 5.7578e-01, - 1.4710e-01, 7.3059e-01, 1.0914e-01, 1.5979e-02, - 8.4679e-01, 8.8970e-02, 4.2280e-01, 3.2340e-01, - 9.5658e-01, 1.8028e-01, 7.6493e-02, 9.1788e-01, - 3.5415e-01, 8.1832e-01, 9.8353e-01, 6.2593e-01, - 5.0861e-01, 6.5047e-01, 9.9291e-01, 2.6001e-01, - 5.3761e-01, 3.3600e-02, 1.8506e-01, 2.9837e-01, - 4.7576e-01, 3.3503e-01, 1.7338e-02, 1.0681e-02, - 4.8168e-01, 3.3971e-01, 3.0402e-01, 3.5616e-01, - 6.9091e-02, 4.0202e-01, 7.9475e-01, 9.6864e-01, - 5.8040e-01, 5.7299e-01, 8.5674e-01, 6.6006e-01, - 2.7936e-01, 1.3390e-01, 1.8119e-01, 7.3696e-01, - 2.1656e-02, 5.0719e-01, 7.9270e-01, 7.9608e-01, - 4.6938e-02, 6.4955e-01, 1.1024e-01, 3.3072e-01, - 7.0495e-01, 7.9866e-01, 2.9836e-01, 5.3080e-02, - 4.5780e-01, 3.2462e-01, 2.3343e-01, 3.4916e-01, - 4.9296e-01, 2.0091e-01, 7.6547e-01, 1.3016e-01, - 6.0404e-02, 8.2176e-01, 8.6472e-01, 1.0995e-01, - 3.7648e-01, 7.3952e-01, 9.6968e-01, 6.3923e-01, - 7.2826e-01, 7.1776e-01, 6.8745e-01, 2.7331e-01, - 1.2680e-01, 3.7805e-02, 4.0262e-01, 8.2898e-01, - 3.9871e-01, 5.1570e-01, 9.1900e-01, 4.6037e-01, - 6.9803e-01, 3.7678e-01, 6.0374e-01, 8.0866e-01, - 1.2782e-01, 1.3656e-01, 6.7064e-01, 9.7129e-01, - 9.1569e-01, 8.7380e-01, 6.8149e-01, 7.9829e-01, - 9.4074e-01, 4.9967e-01, 7.6367e-01, 4.1375e-01, - 3.3762e-01, 8.4138e-01, 5.7388e-01, 1.0199e-02, - 6.1160e-01, 2.7149e-01, 7.9430e-01, 6.0606e-01, - 3.8213e-01, 6.9915e-01, 7.9717e-01, 6.4264e-01, - 1.2570e-01, 4.2091e-01, 7.7556e-03, 4.2787e-01, - 2.4645e-01, 7.0922e-01, 9.7475e-01, 1.7749e-01, - 7.7920e-01, 8.8611e-01, 7.3655e-01, 2.6627e-02, - 4.3960e-01, 7.0921e-01, 1.2495e-01, 7.9556e-01, - 2.4612e-01, 2.1105e-01, 1.3608e-01, 2.6228e-01, - 5.4161e-01, 9.6676e-01, 7.1936e-01, 5.0925e-01, - 3.3548e-01, 4.0907e-01, 2.5859e-01, 7.8072e-01, - 5.3413e-01, 6.4004e-01, 4.0338e-01, 4.6080e-01, - 2.2626e-01, 6.6418e-02, 4.6412e-01, 3.7270e-01, - 4.9191e-01, 1.9996e-01, 7.9189e-01, 1.1419e-01, - 3.0279e-01, 5.9447e-01, 1.0009e-01, 3.9196e-01, - 8.7322e-02, 3.6585e-01, 5.0558e-01, 5.9756e-01, - 5.1223e-01, 7.7059e-01, 5.6411e-01, 8.6785e-01, - 4.9763e-01, 2.7060e-01, 4.6230e-01, 2.8485e-01, - 5.6333e-01, 6.4385e-01, 2.4189e-01, 4.3952e-01, - 9.1360e-01, 6.8688e-01, 2.3180e-01, 3.6477e-01, - 1.8153e-01, 7.8891e-01, 9.7111e-01, 5.2860e-01, - 7.2300e-01, 8.4822e-02, 6.4799e-01, 7.4738e-01, - 9.3333e-01, 3.0742e-01, 2.0477e-01, 7.5682e-01, - 3.8206e-01, 7.5470e-01, 1.5177e-01, 4.5879e-01, - 2.0623e-01, 2.9029e-01, 6.1429e-01, 8.4124e-01, - 3.1046e-01, 6.1053e-01, 5.9478e-01, 7.4185e-01, - 8.5712e-01, 1.5446e-01, 6.2325e-01, 1.5679e-01, - 8.9475e-02, 5.3521e-01, 5.0307e-02, 4.9942e-01, - 1.6993e-01, 9.1214e-01, 1.0981e-01, 5.4953e-01, - 1.3202e-04, 2.0668e-02, 2.6934e-02, 2.8142e-02, - 5.9963e-01, 6.2907e-01, 1.9020e-01, 8.4416e-01, - 4.7828e-01, 6.2037e-02, 8.4804e-01, 2.8713e-01, - 8.7914e-01, 5.2289e-02, 8.8096e-01, 7.7464e-02, - 1.8152e-01, 4.5361e-01, 9.2307e-02, 7.7685e-01, - 4.8882e-01, 3.8849e-02, 2.4650e-01, 9.4929e-01, - 4.8155e-01, 2.3940e-01, 6.0404e-01, 2.8524e-01, - 3.1995e-01, 1.7781e-01, 3.6728e-01, 9.4366e-01, - 6.2074e-01, 6.7714e-01, 9.4887e-01, 9.6136e-01, - 6.5217e-01, 4.4854e-01, 3.6396e-01, 2.0775e-01, - 9.0861e-01, 1.7247e-01, 2.0122e-01, 3.1052e-01, - 9.7944e-01, 4.6799e-01, 5.9893e-01, 1.8430e-01, - 6.1849e-01, 6.7723e-01, 8.1238e-01, 8.5110e-01, - 7.7606e-01, 4.8711e-01, 3.1304e-01, 7.0698e-01, - 6.6410e-01, 1.2702e-01, 6.6704e-01, 9.5507e-01, - 8.2579e-01, 9.9875e-01, 2.2652e-01, 7.2346e-01, - 5.4255e-01, 7.3418e-01, 5.4150e-02, 7.6473e-01, - 8.4496e-01, 2.7827e-02, 3.1883e-01, 6.2547e-01, - 3.3673e-01, 7.0332e-01, 7.4524e-01, 1.7202e-01, - 4.7220e-01, 2.5484e-01, 6.3754e-01, 2.6972e-01, - 5.7555e-01, 2.5729e-02, 8.9240e-01, 9.3243e-02, - 2.0048e-01, 3.9184e-01, 7.9079e-01, 7.6417e-01, - 3.6297e-01, 1.0701e-01, 7.3829e-01, 5.7379e-01, - 7.8135e-01, 5.9905e-01, 1.7596e-02, 2.0084e-01, - 2.6002e-01, 5.7793e-01, 1.1292e-01, 5.5716e-01, - 1.2785e-01, 3.4844e-01, 2.6200e-01, 9.5494e-01, - 8.4409e-01, 8.8543e-01, 8.3785e-01, 2.5559e-01, - 3.7511e-01, 8.7394e-02, 3.0524e-01, 1.7522e-01, - 7.9539e-01, 3.3646e-02, 5.2387e-01, 3.7442e-01, - 1.3978e-02, 1.8117e-01, 5.3870e-02, 9.4073e-01, - 9.3325e-01, 1.3931e-01, 9.5806e-03, 2.1513e-01, - 7.5372e-01, 8.8366e-01, 7.3370e-02, 3.8591e-01, - 4.2813e-01, 6.9476e-01, 8.6124e-01, 4.2190e-01, - 5.7470e-01, 3.1136e-01, 8.5208e-01, 7.0394e-01, - 1.8636e-01, 5.9006e-01, 5.1801e-01, 5.2659e-01, - 5.0269e-01, 7.7233e-01, 7.8972e-01, 8.3201e-01, - 4.3437e-01, 6.4845e-01, 5.5414e-01, 4.4734e-01, - 5.1213e-01, 1.6050e-01, 7.1213e-01, 1.6252e-01, - 8.9777e-01, 6.6440e-01, 4.4346e-02, 7.4956e-01, - 5.2652e-01, 8.8268e-02, 2.4019e-01, 1.9801e-01, - 8.9331e-01, 7.6786e-01, 1.5987e-01, 2.2736e-01, - 7.3131e-01, 6.1032e-01, 3.1343e-01, 4.4597e-02, - 5.9683e-01, 1.9413e-01, 1.9830e-01, 1.4040e-01, - 1.3606e-01, 6.0248e-02, 7.0841e-02, 8.1665e-01, - 1.6396e-01, 1.9495e-01, 7.9996e-01, 3.2143e-01, - 8.0826e-01, 7.2033e-01, 6.2158e-01, 2.8202e-01, - 8.5930e-01, 8.9293e-01, 4.3542e-01, 5.5467e-01, - 6.4518e-01, 6.6044e-01, 2.4172e-01, 6.8673e-01, - 2.6429e-01, 6.5201e-01, 7.9924e-01, 9.2253e-01, - 9.3220e-01, 3.3890e-01, 4.3856e-01, 4.7886e-01, - 2.4920e-01, 1.1320e-01, 8.1552e-01, 4.8103e-01, - 9.9413e-01, 1.2151e-01, 7.3907e-01, 5.9053e-01, - 3.2489e-01, 7.0052e-01, 1.4705e-01, 2.5163e-01, - 6.2341e-01, 2.4832e-01, 1.0102e-01, 6.1087e-01, - 4.9169e-01, 6.3088e-01, 6.5857e-02, 1.2103e-01, - 5.3802e-01, 4.3476e-01, 8.5162e-02, 9.5909e-02, - 2.7539e-01, 6.3053e-01, 8.2462e-01, 6.8849e-01, - 6.8116e-01, 4.4265e-01, 2.5334e-01, 5.7535e-01, - 7.3906e-01, 2.6868e-01, 8.2246e-01, 7.5326e-01, - 2.3140e-01, 6.7734e-01, 1.5208e-01, 4.3675e-01, - 8.5422e-01, 1.3327e-01, 2.9050e-01, 4.6933e-02, - 2.8711e-01, 6.4456e-01, 6.6952e-01, 9.7136e-01, - 8.7064e-01, 4.4199e-01, 9.0821e-01, 2.2048e-01, - 6.2945e-02, 6.9894e-01, 4.6657e-01, 4.9677e-01, - 5.3803e-01, 1.6106e-01, 1.9190e-01, 9.6721e-01, - 8.6755e-01, 6.7380e-01, 6.3082e-01, 6.2585e-01, - 9.8280e-01, 3.2073e-01, 7.9868e-01, 5.3679e-01, - 2.3558e-01, 7.8221e-01, 9.6604e-01, 1.5512e-01, - 2.7020e-01, 5.4598e-01, 7.2565e-01, 9.0695e-01, - 7.1888e-01, 5.5173e-01, 2.1659e-01, 7.2627e-01, - 1.6656e-02, 2.4187e-01, 2.0943e-01, 7.3096e-01, - 5.3434e-01, 5.8941e-01, 7.8362e-01, 3.0880e-01, - 5.8657e-01, 5.3713e-01, 1.5416e-01, 7.7206e-01, - 7.7405e-01, 2.2208e-01, 2.7616e-02, 4.1381e-02, - 3.6096e-01, 2.9990e-01, 7.2871e-01, 4.6144e-01, - 2.6081e-01, 1.1160e-01, 4.6879e-02, 9.6260e-01, - 6.3027e-02, 9.2736e-01, 5.1303e-01, 7.8810e-01, - 9.4326e-01, 8.3384e-01, 5.2389e-01, 3.7333e-01, - 7.0704e-01, 2.0490e-02, 9.7841e-01, 4.6449e-01, - 2.0052e-01, 1.6703e-01, 2.3432e-02, 8.7757e-01, - 4.3288e-01, 9.0692e-01, 8.0465e-01, 6.8254e-01, - 7.1185e-01, 6.0360e-01, 3.0568e-01, 3.3586e-01, - 8.0849e-01, 9.2920e-01, 1.8771e-01, 1.6081e-01, - 6.2577e-01, 8.0494e-01, 2.5995e-01, 6.9688e-01, - 8.1340e-01, 7.6095e-01, 4.3346e-01, 9.4688e-01, - 7.6554e-02, 3.0534e-01, 1.9986e-02, 6.3233e-01, - 7.6106e-01, 4.6982e-01, 6.3199e-01, 3.8163e-01, - 4.8463e-01, 8.4737e-01, 9.7787e-01, 6.8558e-02, - 8.4902e-01, 8.4066e-02, 9.5446e-01, 9.5276e-01, - 1.1721e-01, 6.6180e-01, 1.7366e-01, 5.8689e-01, - 3.5482e-01, 9.7331e-01, 3.4294e-01, 2.8538e-01, - 5.1595e-01, 3.1709e-01, 5.5250e-01, 5.6823e-01, - 8.1852e-01, 7.0949e-01, 8.0023e-01, 1.9327e-01, - 6.3159e-01, 8.3741e-01, 4.2915e-01, 4.5280e-01, - 3.6497e-02, 4.6729e-01, 1.7655e-01, 9.0173e-01, - 8.8293e-01, 3.8356e-01, 3.7646e-01, 7.9430e-01, - 6.1564e-01, 8.3823e-01, 6.4739e-01, 2.5501e-01, - 1.7745e-02, 2.0888e-01, 1.6213e-01, 1.4289e-01, - 4.3243e-01, 8.9590e-02, 6.2675e-01, 1.6139e-01, - 4.9212e-02, 7.8304e-01, 3.4728e-01, 1.1716e-01, - 6.6748e-01, 6.8148e-01, 6.0697e-01, 9.6199e-01, - 2.0651e-01, 7.4950e-01, 1.2579e-01, 8.0246e-01, - 1.6417e-01, 7.2462e-01, 2.5278e-02, 4.6134e-02, - 6.2831e-01, 5.8610e-01, 2.5098e-01, 3.0610e-01, - 5.9318e-01, 7.4726e-01, 6.9463e-01, 9.1050e-01, - 5.1888e-01, 7.0514e-01, 7.0205e-01, 7.2207e-01, - 8.0443e-01, 8.7596e-02, 4.7888e-01, 8.3782e-01, - 2.4008e-01, 8.0627e-01, 8.5026e-01, 9.1172e-01, - 8.6693e-01, 5.8864e-01, 7.3220e-01, 9.5311e-01, - 2.3545e-01, 5.9781e-01, 4.2475e-01, 5.7120e-01, - 7.9825e-01, 1.1148e-01, 9.6872e-01, 1.8660e-01, - 1.2651e-01, 6.4452e-01, 6.0136e-01, 4.0304e-01, - 3.2066e-01, 9.6919e-01, 8.9438e-01, 1.9197e-01, - 6.2802e-01, 1.2447e-01, 9.2235e-01, 5.3612e-01, - 7.6692e-01, 6.0437e-01, 3.9696e-01, 2.4947e-01, - 4.7984e-01, 2.4304e-01, 8.7832e-01, 4.4963e-01, - 1.4427e-01, 5.7088e-01, 7.6046e-01, 6.8213e-01, - 8.0719e-01, 4.0864e-01, 5.2984e-01, 6.0712e-01, - 9.0793e-01, 7.5984e-01, 6.8009e-01, 9.3405e-03, - 9.6069e-02, 1.2806e-01, 3.5111e-01, 6.9311e-01, - 8.2085e-01, 8.2835e-01, 9.1010e-01, 6.9786e-01, - 8.9484e-01, 6.1428e-01, 5.8339e-01, 7.7197e-01, - 9.0580e-01, 9.3461e-01, 5.1191e-01, 6.0450e-01, - 1.1201e-01, 9.4214e-01, 1.2487e-01, 9.0601e-01, - 2.9108e-01, 6.7647e-01, 9.4078e-01, 2.8280e-01, - 1.9018e-01, 8.2373e-01, 1.9392e-01, 2.5619e-01, - 4.7560e-01, 9.5113e-02, 7.0953e-01, 1.8132e-01, - 7.8736e-01, 2.9743e-01, 6.6220e-01, 7.0073e-01, - 4.2373e-01, 8.5630e-02, 1.5746e-01, 2.2218e-01, - 7.1860e-01, 7.1300e-01, 2.1873e-01, 5.3912e-01, - 3.1159e-01, 4.0946e-01, 3.6675e-01, 1.7526e-01, - 3.9314e-01, 6.8523e-01, 8.9266e-01, 2.6076e-01, - 1.9742e-01, 7.1643e-01, 5.7968e-02, 2.9522e-01, - 2.6531e-01, 9.4045e-01, 2.8546e-01, 5.0832e-01, - 2.7122e-01, 9.1348e-02, 4.6177e-01, 5.4269e-01, - 9.6392e-01, 5.7908e-01, 4.4952e-02, 4.4291e-01, - 1.9518e-01, 5.7031e-01, 4.6612e-01, 6.5682e-01, - 7.5169e-01, 7.1827e-01, 6.9783e-01, 8.7240e-01, - 9.7588e-01, 4.6460e-01, 4.9550e-01, 1.1087e-01, - 4.9333e-01, 4.5081e-01, 5.3065e-02, 1.0936e-01, - 2.3607e-01, 3.1568e-01, 4.5569e-02, 3.3065e-01, - 6.3115e-01, 3.4755e-01, 4.1582e-01, 6.8376e-01, - 7.2990e-02, 4.2965e-01, 9.9497e-01, 2.6721e-01, - 7.7370e-02, 9.9769e-01, 8.7087e-01, 8.0430e-01, - 2.4436e-01, 8.4694e-02, 2.4607e-01, 3.7822e-01, - 4.9074e-01, 2.9051e-01, 4.6776e-01, 9.0178e-01, - 1.2981e-01, 3.4897e-01, 4.9289e-01, 4.9671e-01, - 4.4983e-01, 5.6051e-01, 9.3221e-01, 5.3076e-01, - 3.0281e-01, 7.1212e-01, 7.5584e-01, 8.5415e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7030, 0.4856, 0.8302, ..., 0.1030, 0.0064, 0.5187]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.019815444946289062 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52988', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9514954090118408} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019530296325683594} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1913, 7654, 2578, 7438, 161, 1784, 4399, 9342, 5973, - 9317, 2386, 5390, 6826, 8246, 3533, 5774, 7850, 3210, - 5098, 9081, 1419, 9854, 4633, 4094, 2460, 1982, 5224, - 7488, 9470, 9542, 7636, 1813, 3516, 6772, 9387, 3527, - 2143, 2579, 3758, 6675, 2400, 6857, 5478, 3695, 3795, - 3756, 3719, 3404, 9547, 8983, 7175, 4037, 9581, 7185, - 929, 5259, 781, 9068, 9465, 4965, 5893, 2696, 2105, - 4213, 4999, 2582, 8142, 1368, 9056, 5530, 8794, 3411, - 8012, 2760, 7505, 2582, 7967, 1442, 5100, 4547, 8139, - 4597, 3901, 8732, 616, 6824, 2406, 5427, 99, 1530, - 6480, 9734, 2031, 8017, 9192, 5440, 6935, 2243, 5699, - 2761, 4735, 1992, 3936, 8939, 2912, 3975, 1293, 2526, - 1242, 1543, 6007, 2170, 8914, 4313, 7065, 1733, 3488, - 4650, 2435, 7570, 6849, 1021, 3506, 2267, 4325, 8183, - 4797, 2128, 7956, 4452, 1357, 2467, 9292, 7529, 2665, - 6939, 4926, 53, 3719, 2716, 4371, 322, 8247, 6141, - 3643, 4161, 1218, 4631, 45, 5094, 2416, 6978, 9777, - 8969, 1493, 6000, 920, 357, 7359, 4419, 5789, 2824, - 9510, 2214, 2666, 6941, 5654, 5280, 2914, 7629, 6757, - 5329, 6096, 4785, 7842, 272, 553, 1821, 2930, 6978, - 9552, 3585, 2668, 4929, 8592, 9008, 3436, 6246, 2869, - 9508, 6305, 674, 5548, 2178, 4623, 2766, 9426, 6636, - 2734, 5700, 6697, 4397, 1950, 4820, 2898, 9874, 4309, - 7607, 72, 4311, 993, 3186, 2078, 2317, 7450, 1239, - 2314, 5387, 5556, 1929, 9624, 7824, 8662, 8410, 6503, - 1443, 483, 7970, 4732, 6257, 7787, 3330, 448, 9261, - 4951, 1319, 3950, 7599, 1261, 2621, 8652, 8132, 3731, - 4079, 251, 4113, 1307, 6290, 2770, 662, 6841, 6792, - 6294, 900, 5110, 4306, 6618, 9974, 8342, 9695, 3143, - 349, 8326, 9528, 3464, 1504, 8313, 3204, 7429, 1054, - 9404, 1518, 4565, 6436, 3850, 3090, 1967, 9212, 1515, - 5737, 4332, 1307, 7812, 5169, 7957, 9263, 7549, 8305, - 2285, 6467, 7071, 9999, 316, 9584, 950, 4240, 3058, - 1871, 7709, 7377, 8353, 8543, 8976, 4692, 7226, 6989, - 5913, 9913, 4304, 5467, 6510, 8272, 6971, 8286, 4630, - 2781, 8362, 8692, 4774, 6297, 7725, 1668, 6756, 7100, - 1790, 4362, 6628, 6041, 4431, 2206, 3651, 7414, 1881, - 4217, 723, 6273, 1887, 8487, 8200, 432, 2818, 2742, - 7506, 5950, 5296, 9718, 1865, 3321, 5712, 5309, 379, - 7907, 5022, 6883, 7634, 8109, 8581, 2524, 5363, 4385, - 9481, 7050, 3722, 6905, 4123, 1193, 980, 1309, 149, - 1486, 4890, 4713, 1439, 1191, 9691, 6210, 5838, 8904, - 3708, 9438, 9896, 5287, 3948, 1147, 1698, 4269, 5066, - 6607, 1091, 9583, 1892, 4545, 7581, 5502, 5593, 3633, - 8334, 7742, 989, 5454, 5419, 8285, 7032, 5460, 8123, - 7047, 9497, 2742, 7941, 4354, 2115, 2169, 3579, 2975, - 9962, 9131, 790, 9536, 934, 6531, 4130, 6559, 8852, - 1912, 4091, 5108, 4143, 2911, 8718, 7157, 9474, 440, - 4893, 2348, 2435, 3348, 2896, 5362, 2471, 3557, 3045, - 5520, 967, 2083, 6596, 2972, 3187, 3743, 3692, 5305, - 3194, 8270, 1499, 3112, 1629, 7988, 1569, 6475, 4212, - 9528, 5004, 1871, 4699, 4105, 275, 5746, 4518, 8991, - 612, 6367, 1921, 7216, 7507, 1384, 2081, 8222, 1925, - 7535, 1383, 9622, 5576, 8183, 8495, 5340, 2634, 2966, - 1601, 3637, 3327, 1226, 7790, 4507, 184, 2680, 4609, - 3145, 7485, 1595, 8527, 372, 8745, 5290, 9948, 6519, - 4807, 6324, 2180, 9833, 1872, 4100, 7766, 6247, 620, - 5081, 2716, 5159, 2362, 801, 9878, 6460, 9085, 7887, - 3582, 5814, 6271, 8162, 1564, 2970, 6301, 6686, 5968, - 1206, 5937, 9353, 9579, 7815, 8300, 2535, 8236, 8452, - 3825, 4328, 576, 4303, 7075, 155, 3636, 8510, 2678, - 5217, 3095, 9634, 4084, 3672, 3215, 5812, 8839, 9347, - 309, 7836, 94, 6329, 4746, 3609, 9296, 317, 2851, - 46, 1020, 3989, 3035, 3996, 5929, 1518, 5537, 6352, - 4863, 4746, 9733, 302, 4341, 4989, 4874, 7046, 5778, - 5478, 7701, 7095, 9882, 3225, 2841, 7350, 9700, 4660, - 1226, 2708, 7505, 6041, 8700, 5831, 1161, 7332, 1204, - 9006, 5168, 5623, 3352, 6653, 1866, 2464, 4026, 7957, - 3439, 5787, 6667, 6239, 4086, 1384, 5108, 9156, 1458, - 2719, 6125, 3779, 1062, 9282, 1403, 7331, 1773, 6329, - 9804, 5498, 6916, 3918, 4475, 3365, 6047, 5222, 3317, - 6537, 509, 4180, 521, 4580, 4289, 5484, 7623, 3868, - 4419, 5070, 6414, 6858, 2146, 7503, 7930, 3541, 7363, - 8451, 3603, 8358, 1634, 6410, 6234, 8188, 7703, 3620, - 2497, 3571, 3652, 8113, 179, 7177, 3202, 9610, 1271, - 7092, 4329, 9137, 5792, 7968, 9987, 4895, 24, 7376, - 4933, 4134, 7890, 2095, 8514, 3695, 7529, 3931, 9993, - 6758, 9985, 2616, 3644, 5578, 7035, 179, 108, 2456, - 5883, 9468, 4254, 1026, 9434, 5692, 8912, 5618, 8685, - 7416, 7309, 9289, 2276, 7230, 1611, 6179, 8849, 4226, - 9492, 2313, 5622, 5323, 5301, 5407, 5574, 1863, 9491, - 8356, 8284, 7695, 2173, 8063, 7848, 2350, 8642, 9823, - 4277, 9963, 1573, 8018, 1052, 830, 8774, 1018, 172, - 1853, 1423, 6245, 8795, 9938, 6596, 9610, 862, 6125, - 2315, 4519, 6858, 5217, 8716, 1961, 2531, 1268, 7387, - 6952, 7916, 7660, 6607, 736, 3473, 3680, 805, 4228, - 6961, 6256, 8564, 3586, 5472, 6691, 3409, 2877, 3014, - 1535, 4145, 4790, 81, 5553, 8859, 301, 6125, 1877, - 154, 4300, 2111, 1019, 1624, 3701, 9220, 8623, 2362, - 829, 739, 7388, 915, 693, 7458, 9852, 8907, 5923, - 5956, 4414, 5133, 4520, 6933, 1176, 2370, 2636, 3929, - 4820, 9022, 540, 5654, 5463, 6385, 2215, 1288, 212, - 1094, 7155, 4886, 3573, 3936, 9921, 639, 1616, 3331, - 4763, 9667, 7478, 1120, 5091, 8732, 148, 6718, 9795, - 7275, 5297, 2632, 6832, 1603, 2702, 4697, 9374, 3919, - 6930, 8975, 1858, 1630, 4399, 4416, 2167, 8305, 3364, - 9978, 4771, 8200, 8150, 408, 9331, 1152, 4518, 4395, - 6258, 7579, 7384, 2197, 8376, 1280, 5621, 5003, 9089, - 7213, 8905, 4272, 872, 6014, 5018, 3312, 2473, 8110, - 2480, 4059, 1406, 2485, 4649, 4024, 1652, 5673, 1179, - 7991, 1865, 5736, 2314, 6696, 5611, 1890, 165, 7210, - 251, 3129, 8459, 6624, 5937, 9617, 3100, 1031, 2156, - 7215, 1201, 3895, 8947, 3151, 6187, 5537, 5189, 1415, - 2673, 3741, 3405, 3568, 1498, 8041, 8172, 9828, 2910, - 6307, 2704, 3112, 4789, 5911, 386, 6854, 7793, 6204, - 9467, 5350, 6683, 8303, 7420, 9997, 6967, 7155, 6481, - 8863, 6132, 9098, 3951, 6636, 6217, 281, 2164, 3250, - 1107, 2597, 8420, 140, 9581, 4141, 400, 4846, 3527, - 491, 8715, 153, 7242, 9297, 5207, 161, 4132, 8004, - 200, 9190, 3169, 6815, 3771, 6417, 8492, 4422, 9154, - 9220, 4572, 4257, 9842, 7045, 319, 3874, 3294, 9153, - 8782]), - values=tensor([3.8610e-01, 8.5377e-01, 1.8511e-02, 7.1095e-01, - 8.1683e-01, 1.2439e-01, 3.5277e-01, 4.2816e-01, - 7.4779e-01, 1.4578e-01, 3.9159e-01, 7.8942e-02, - 5.3719e-01, 6.3787e-01, 5.1539e-01, 9.1352e-01, - 9.8974e-01, 5.5580e-01, 9.2020e-01, 5.8107e-01, - 8.2675e-01, 8.9227e-01, 2.6957e-01, 4.1450e-01, - 6.4995e-01, 1.4668e-01, 8.5365e-01, 5.6586e-02, - 6.0310e-01, 1.6855e-01, 7.1945e-01, 4.5680e-01, - 6.4410e-01, 8.7497e-01, 2.0513e-01, 8.1387e-01, - 4.2941e-01, 9.2609e-02, 5.4848e-01, 6.0317e-01, - 4.9057e-02, 7.6647e-01, 8.1898e-01, 3.6237e-01, - 2.6095e-01, 8.1706e-01, 1.8677e-01, 6.6214e-01, - 3.4065e-02, 6.2301e-03, 8.1049e-01, 6.6202e-01, - 9.2002e-01, 9.4613e-01, 4.2801e-01, 2.5149e-01, - 3.7474e-01, 4.6613e-02, 7.6286e-01, 9.5591e-01, - 5.6819e-01, 7.2473e-01, 4.5420e-01, 1.5093e-02, - 7.5841e-01, 8.6923e-01, 8.2963e-01, 9.1618e-01, - 6.3268e-01, 3.7006e-01, 2.3403e-01, 3.7409e-01, - 9.4862e-01, 3.9135e-01, 3.1036e-01, 4.5561e-01, - 7.5923e-01, 4.9378e-01, 2.4860e-01, 2.0726e-02, - 9.3079e-01, 5.3296e-01, 8.8197e-01, 7.4151e-01, - 1.9571e-01, 6.5860e-01, 4.6394e-01, 1.0891e-02, - 7.8440e-01, 2.8027e-01, 7.0002e-01, 4.8993e-01, - 6.0351e-01, 3.4450e-01, 2.4156e-01, 9.3632e-01, - 4.7883e-01, 5.3698e-01, 6.6794e-01, 6.3277e-01, - 1.6690e-01, 9.6142e-01, 8.1668e-01, 7.2409e-01, - 9.5039e-01, 4.1018e-01, 7.6718e-02, 1.1277e-01, - 7.6653e-01, 5.7939e-02, 8.8132e-01, 2.3071e-01, - 1.1627e-01, 4.4793e-01, 1.8370e-01, 5.1300e-01, - 9.5201e-01, 8.6851e-01, 4.2478e-01, 4.8308e-01, - 3.7728e-01, 8.5856e-01, 4.4730e-01, 3.2080e-01, - 5.4767e-02, 9.7707e-01, 2.2574e-01, 5.5285e-01, - 5.6905e-01, 3.6943e-01, 7.5952e-03, 3.3257e-01, - 7.2686e-01, 4.2285e-01, 6.9822e-01, 4.3587e-01, - 7.2988e-01, 6.7748e-01, 1.8964e-01, 8.7304e-01, - 6.2990e-01, 5.3509e-01, 1.3803e-01, 8.4206e-01, - 4.7388e-01, 9.0321e-02, 3.6124e-01, 6.9940e-01, - 3.5166e-01, 6.4487e-02, 6.4762e-01, 8.7857e-01, - 8.1881e-01, 5.3164e-02, 2.4895e-01, 3.6164e-01, - 1.3464e-01, 2.9827e-01, 7.3571e-01, 3.0674e-01, - 6.0528e-01, 6.7903e-01, 3.1282e-01, 7.3350e-01, - 2.3174e-01, 3.6415e-01, 9.6921e-01, 5.4876e-01, - 5.4685e-01, 2.5853e-01, 9.7955e-01, 9.8906e-01, - 2.3474e-01, 7.2967e-01, 9.9754e-01, 4.1724e-01, - 6.2470e-01, 1.6830e-01, 9.6734e-01, 3.9468e-01, - 8.9653e-01, 4.4589e-01, 3.6177e-01, 3.5755e-01, - 4.1151e-01, 5.2403e-04, 8.0776e-01, 2.9056e-01, - 2.7543e-01, 9.3152e-01, 7.3236e-01, 6.5313e-01, - 1.0061e-01, 6.2182e-01, 6.9045e-01, 5.5055e-01, - 1.5520e-02, 1.7346e-02, 3.4910e-03, 6.4128e-01, - 7.5031e-01, 7.5949e-01, 9.3468e-02, 7.6496e-01, - 4.8529e-01, 6.2334e-01, 9.4296e-01, 3.5853e-01, - 7.2756e-01, 8.7206e-01, 5.2816e-01, 4.4438e-02, - 5.6329e-01, 2.2828e-01, 8.4849e-01, 9.4579e-01, - 8.4626e-01, 6.7086e-01, 2.8491e-01, 6.4624e-01, - 8.7513e-01, 2.6528e-02, 4.4953e-01, 9.1746e-01, - 7.8027e-01, 2.2730e-01, 7.9321e-01, 9.8797e-01, - 2.2814e-02, 8.7637e-01, 3.8994e-01, 3.2441e-01, - 6.8336e-01, 7.9926e-01, 3.8718e-01, 3.4786e-03, - 1.1534e-01, 7.4916e-01, 2.7534e-01, 9.8439e-02, - 9.1131e-02, 9.1836e-01, 6.1330e-01, 1.3102e-01, - 5.4847e-03, 8.6561e-01, 5.7958e-01, 8.0518e-01, - 2.8465e-01, 7.7917e-01, 4.3560e-01, 5.6084e-01, - 7.5286e-01, 6.3866e-01, 7.2910e-01, 3.0899e-01, - 9.1412e-01, 2.6555e-03, 2.3826e-01, 5.6212e-01, - 9.9504e-01, 5.0756e-01, 9.6585e-01, 8.8996e-01, - 4.2407e-01, 7.6316e-01, 5.6013e-01, 6.6055e-01, - 8.3748e-01, 6.2385e-01, 1.5983e-01, 9.3548e-01, - 6.6605e-01, 9.8112e-01, 5.8093e-01, 5.0434e-02, - 4.9205e-01, 5.1871e-01, 1.2512e-01, 5.0351e-01, - 1.8898e-01, 9.4631e-01, 4.2293e-02, 3.1088e-01, - 3.4520e-01, 5.0879e-01, 3.5372e-01, 7.2930e-01, - 5.2299e-01, 7.0660e-01, 7.7671e-01, 8.5673e-01, - 5.0209e-01, 7.6491e-01, 1.9923e-01, 1.5535e-01, - 8.8992e-01, 6.8837e-01, 2.4943e-02, 3.0610e-01, - 4.3221e-01, 4.6599e-01, 1.3047e-01, 4.3773e-01, - 3.6251e-01, 4.6205e-01, 1.0793e-01, 3.0838e-01, - 6.4493e-02, 5.2499e-01, 2.5719e-01, 7.0242e-01, - 2.3217e-01, 2.9568e-01, 8.8686e-01, 6.9929e-01, - 5.0218e-01, 6.2510e-01, 5.9748e-01, 8.6062e-01, - 9.3881e-01, 5.5598e-01, 8.9438e-01, 2.0280e-01, - 8.2777e-01, 9.5075e-01, 4.8198e-01, 6.2637e-01, - 6.3249e-01, 8.9193e-01, 9.1039e-01, 6.4972e-02, - 8.9820e-01, 5.2690e-01, 5.8321e-01, 7.9388e-01, - 7.0610e-01, 2.6038e-01, 2.6840e-01, 7.2624e-01, - 7.5041e-01, 4.5002e-01, 8.1630e-01, 5.7319e-02, - 9.1830e-01, 5.0848e-01, 8.3542e-01, 7.1332e-01, - 7.1294e-01, 9.2191e-01, 5.1896e-01, 1.2938e-01, - 9.5730e-01, 3.2242e-02, 6.5653e-01, 8.2194e-02, - 8.7798e-01, 8.3940e-01, 2.2781e-01, 5.9478e-01, - 7.2513e-01, 5.8600e-01, 5.5875e-01, 5.7178e-01, - 3.3916e-01, 9.8096e-01, 7.7440e-01, 8.1663e-01, - 4.5610e-01, 6.9842e-01, 8.7732e-01, 6.5963e-01, - 4.5872e-01, 1.5583e-01, 6.3053e-01, 8.6648e-01, - 3.9767e-01, 7.5734e-01, 7.1577e-01, 1.4568e-01, - 4.0268e-02, 7.2023e-01, 4.8936e-01, 9.0829e-01, - 5.9611e-01, 6.9061e-01, 8.1805e-01, 8.9041e-01, - 3.1423e-01, 1.5868e-01, 6.1366e-01, 9.2687e-01, - 8.9448e-01, 1.4864e-01, 7.1271e-01, 4.6104e-01, - 7.6188e-03, 2.2407e-01, 9.2907e-01, 1.1651e-01, - 6.1987e-01, 2.4907e-01, 9.6456e-01, 8.0986e-01, - 6.9956e-01, 9.0349e-01, 1.2001e-01, 3.6328e-01, - 1.6739e-01, 7.2792e-01, 8.4913e-01, 1.3978e-01, - 3.6502e-01, 8.3758e-03, 8.5686e-01, 1.3986e-01, - 8.7100e-01, 4.5934e-01, 7.1594e-01, 6.7867e-01, - 8.6897e-01, 6.6908e-01, 8.4732e-01, 6.6558e-01, - 5.1856e-01, 4.0954e-01, 6.9826e-01, 4.7976e-01, - 5.2070e-01, 1.3197e-01, 1.4248e-01, 7.8979e-01, - 3.8558e-01, 1.1534e-01, 7.6786e-01, 4.1531e-01, - 9.6341e-01, 3.9001e-01, 8.2289e-02, 8.0837e-01, - 7.1545e-01, 2.5727e-01, 7.6374e-01, 8.4542e-01, - 8.5587e-02, 5.2770e-01, 6.5768e-01, 8.0251e-01, - 2.2113e-01, 9.9737e-01, 6.1830e-01, 3.1632e-01, - 1.2613e-01, 4.1147e-01, 2.1405e-01, 1.2762e-01, - 1.1111e-01, 5.4527e-01, 1.6500e-01, 4.2444e-01, - 5.8801e-01, 1.6257e-01, 6.6900e-01, 7.6794e-01, - 4.0036e-01, 5.3006e-02, 3.5051e-02, 9.5344e-01, - 4.8371e-01, 4.9565e-01, 9.0943e-01, 2.3101e-01, - 2.0090e-01, 4.8225e-01, 3.0712e-02, 7.5834e-01, - 6.7334e-01, 1.1353e-01, 3.9036e-02, 6.9230e-01, - 9.0893e-02, 7.5872e-01, 8.7894e-01, 5.9393e-01, - 2.6585e-01, 1.6676e-01, 4.8532e-01, 5.9465e-01, - 4.4222e-01, 2.0170e-01, 9.2328e-01, 6.8154e-01, - 8.4402e-01, 7.0562e-01, 2.6520e-01, 8.8657e-01, - 5.4459e-01, 7.8087e-01, 2.0055e-01, 8.1105e-01, - 3.4705e-01, 7.8759e-01, 8.9155e-01, 6.5331e-01, - 6.7577e-01, 1.4852e-01, 9.2796e-01, 9.0692e-01, - 6.3434e-01, 3.1111e-01, 5.7140e-01, 9.0780e-01, - 4.6895e-01, 2.0287e-01, 3.3299e-01, 7.6187e-01, - 5.7104e-01, 2.4415e-01, 7.6380e-01, 1.7908e-01, - 8.0681e-01, 1.2670e-02, 1.6363e-01, 2.2970e-01, - 6.1572e-01, 5.6717e-01, 4.6578e-01, 8.6430e-01, - 2.4153e-01, 9.1603e-01, 5.3253e-01, 5.0666e-03, - 9.1421e-02, 5.1296e-02, 1.0304e-01, 9.9405e-01, - 9.5872e-01, 1.3353e-01, 3.5694e-01, 5.4543e-01, - 6.2537e-01, 2.8650e-01, 7.9959e-01, 8.2562e-01, - 2.7264e-01, 5.2583e-01, 6.3161e-01, 1.9397e-02, - 6.9587e-01, 3.3762e-01, 4.4200e-01, 1.8451e-01, - 3.6916e-01, 2.8108e-02, 7.7783e-02, 2.3773e-01, - 9.0051e-01, 2.8460e-01, 8.1089e-01, 9.4479e-01, - 9.7155e-01, 4.0098e-01, 6.8592e-01, 7.5205e-01, - 2.9211e-02, 5.6538e-02, 9.3352e-01, 7.7910e-03, - 8.0618e-01, 4.0845e-01, 2.0409e-02, 3.5837e-01, - 1.8328e-01, 3.9909e-01, 4.1918e-01, 4.8170e-01, - 9.9960e-01, 5.9804e-01, 2.0861e-01, 1.0751e-02, - 8.0381e-01, 5.3288e-01, 1.3326e-01, 8.6459e-01, - 5.5114e-01, 2.9667e-01, 7.3363e-01, 3.0714e-01, - 9.1825e-01, 7.6928e-02, 2.7699e-01, 6.2910e-01, - 3.6845e-01, 3.7640e-01, 6.0170e-01, 9.6457e-01, - 4.3097e-02, 6.8313e-01, 1.2273e-01, 5.5638e-01, - 9.3322e-01, 2.6998e-02, 6.8256e-01, 2.8860e-01, - 3.2874e-01, 6.0891e-02, 4.4244e-02, 5.2327e-01, - 7.6976e-02, 6.1998e-02, 3.1748e-01, 4.9074e-01, - 3.2159e-01, 1.1840e-01, 3.3449e-01, 9.8168e-01, - 2.0415e-01, 5.4059e-01, 6.0272e-01, 5.1307e-01, - 1.8527e-01, 4.1767e-01, 7.1985e-01, 5.0042e-01, - 8.5363e-01, 4.6183e-01, 7.7202e-01, 7.8648e-01, - 6.9129e-01, 9.0911e-01, 3.2770e-01, 5.1269e-01, - 6.9536e-01, 3.6706e-01, 9.1723e-01, 3.9701e-01, - 6.7993e-01, 3.2314e-01, 6.8715e-02, 8.2246e-02, - 2.8073e-01, 1.1831e-01, 6.2561e-01, 3.6529e-01, - 3.3884e-01, 3.6135e-01, 5.4802e-01, 6.7573e-01, - 5.0165e-01, 8.1397e-01, 8.5096e-01, 1.1027e-01, - 6.5757e-01, 6.0696e-01, 7.2836e-01, 5.8805e-01, - 9.2758e-01, 2.4110e-01, 3.3509e-01, 7.7407e-01, - 9.2691e-01, 5.3329e-01, 8.2629e-01, 7.0255e-01, - 7.7681e-01, 6.8983e-01, 2.5614e-02, 6.3758e-01, - 7.2682e-01, 8.5240e-01, 5.2949e-01, 7.1705e-01, - 8.5069e-01, 4.2186e-01, 3.0347e-01, 9.0121e-01, - 8.2273e-01, 4.1961e-01, 5.9519e-01, 7.2062e-01, - 5.8577e-01, 8.5471e-01, 4.2713e-01, 6.4293e-02, - 6.3663e-01, 6.6423e-01, 2.2498e-01, 5.1988e-01, - 6.5365e-01, 9.3245e-01, 1.7852e-01, 8.1156e-01, - 6.7345e-02, 2.4846e-01, 9.9602e-01, 9.1934e-02, - 4.1853e-01, 3.7241e-01, 9.4301e-01, 1.7991e-01, - 5.9621e-01, 6.9136e-01, 5.2793e-01, 1.9118e-01, - 1.9793e-01, 6.0317e-01, 9.6882e-01, 1.9873e-01, - 8.3014e-01, 7.9369e-01, 2.1870e-01, 1.9086e-01, - 6.9388e-01, 7.9719e-01, 4.1397e-01, 5.7192e-01, - 4.9351e-01, 2.4139e-01, 5.9855e-01, 7.0341e-01, - 1.4905e-02, 2.4436e-01, 9.2685e-01, 4.5334e-01, - 7.4665e-01, 5.1727e-01, 8.2996e-01, 2.0033e-01, - 9.2888e-01, 5.0175e-01, 5.4601e-01, 6.1251e-01, - 5.4993e-01, 9.6161e-01, 1.2794e-01, 3.2116e-01, - 8.4421e-01, 7.0886e-01, 3.5791e-01, 6.6184e-01, - 1.8923e-02, 2.5903e-01, 8.1313e-01, 4.1493e-01, - 8.8826e-01, 5.0309e-01, 5.5726e-01, 7.3682e-01, - 1.2897e-01, 2.6581e-01, 4.6832e-01, 7.7950e-01, - 2.3775e-01, 6.2503e-01, 2.5789e-01, 4.4719e-01, - 3.2605e-01, 1.6738e-01, 1.3471e-01, 5.8312e-01, - 2.6249e-01, 9.6756e-01, 4.5744e-01, 2.1122e-01, - 5.6749e-01, 1.6200e-01, 2.4114e-01, 3.4166e-01, - 3.6612e-01, 1.0509e-02, 5.1611e-01, 9.0580e-01, - 3.3763e-01, 9.2852e-01, 1.7939e-02, 8.7145e-01, - 8.8206e-01, 5.6772e-01, 3.3696e-01, 3.5279e-01, - 5.1465e-01, 6.8700e-01, 3.4390e-01, 6.7643e-01, - 6.6427e-01, 3.5840e-01, 1.5537e-02, 1.8473e-01, - 4.9930e-01, 3.1875e-01, 4.9058e-01, 9.2293e-01, - 5.6647e-01, 8.1066e-01, 5.2922e-01, 6.1174e-01, - 9.4076e-01, 4.0053e-01, 5.6874e-01, 9.8550e-01, - 7.6898e-01, 4.4491e-01, 9.4084e-01, 7.9641e-01, - 1.9048e-01, 6.7339e-01, 2.7576e-01, 3.5103e-01, - 8.8099e-01, 2.7326e-01, 8.0563e-01, 9.3555e-01, - 3.1474e-01, 4.7528e-01, 2.1165e-01, 5.4853e-01, - 6.8904e-01, 6.7265e-01, 1.7309e-02, 1.6399e-01, - 7.6108e-01, 2.3543e-01, 2.6175e-02, 9.0095e-01, - 6.2691e-01, 2.8392e-01, 7.7971e-01, 8.3351e-01, - 4.8417e-01, 5.2438e-01, 8.9569e-01, 3.2099e-01, - 1.8007e-01, 6.4459e-01, 3.2528e-02, 8.0234e-01, - 4.2430e-01, 2.4034e-01, 6.8400e-01, 6.6685e-01, - 8.1383e-01, 3.3149e-01, 3.7153e-01, 4.3221e-01, - 5.1683e-01, 4.8905e-01, 5.5635e-01, 4.2207e-01, - 8.0436e-01, 1.6030e-01, 4.4301e-01, 8.0845e-01, - 8.7237e-01, 2.8558e-01, 6.1831e-01, 7.4397e-01, - 6.6461e-01, 7.2410e-01, 9.6853e-01, 3.9492e-01, - 3.8188e-01, 5.0831e-01, 2.1636e-01, 3.5060e-01, - 2.3646e-01, 5.6253e-01, 6.9733e-01, 2.7720e-01, - 5.3757e-01, 8.8048e-01, 7.1650e-01, 9.6798e-01, - 7.7762e-01, 5.5153e-01, 6.1393e-01, 4.9133e-01, - 6.8381e-01, 6.6995e-02, 2.2135e-01, 5.2027e-01, - 2.3479e-01, 9.4917e-01, 4.1574e-01, 8.1023e-01, - 2.7657e-01, 4.8275e-01, 6.6333e-01, 6.5800e-01, - 9.8084e-01, 7.7307e-01, 2.9222e-02, 7.2481e-01, - 1.9828e-02, 2.7554e-01, 9.6906e-01, 1.0409e-01, - 6.4309e-01, 9.2824e-01, 7.2999e-01, 5.6517e-01, - 5.0434e-01, 4.7459e-01, 7.8320e-01, 3.3435e-01, - 2.4506e-01, 9.0578e-01, 5.1393e-01, 6.3417e-01, - 9.1206e-01, 1.7725e-01, 7.9767e-01, 8.8333e-01, - 7.5073e-01, 5.1574e-01, 5.4984e-01, 7.6118e-01, - 2.1850e-01, 1.6803e-01, 4.0066e-01, 8.0536e-02, - 8.9116e-01, 5.0132e-01, 9.0927e-01, 8.7983e-01, - 1.8210e-01, 9.5577e-01, 5.7504e-01, 6.1325e-01, - 5.4429e-01, 2.3767e-01, 3.5357e-01, 7.5911e-01, - 8.6730e-01, 4.9418e-02, 1.7969e-01, 8.5584e-01, - 3.1254e-01, 9.5390e-01, 3.9608e-01, 7.2622e-01, - 3.6906e-01, 5.6365e-01, 4.7089e-01, 9.1569e-02, - 7.5723e-01, 6.6632e-01, 1.8609e-01, 1.0110e-01, - 5.6893e-01, 9.3812e-01, 9.0428e-01, 1.8241e-01, - 1.4446e-01, 5.7043e-01, 8.7504e-03, 9.7478e-01, - 5.0913e-01, 2.0822e-01, 1.6866e-01, 8.7459e-01, - 7.2484e-02, 2.6479e-01, 6.7042e-01, 7.2394e-01, - 1.3267e-01, 5.8664e-01, 6.4844e-01, 1.5520e-01, - 4.8434e-01, 9.0714e-02, 9.9916e-02, 4.3195e-02, - 4.7733e-01, 6.8749e-01, 8.3543e-01, 4.4062e-01, - 5.9982e-01, 2.5620e-01, 3.7227e-01, 6.7200e-01, - 7.5098e-01, 9.3886e-01, 8.9364e-01, 7.4407e-02, - 1.5111e-01, 3.7773e-01, 3.3716e-01, 8.3074e-01, - 6.6617e-01, 1.1146e-01, 5.2723e-02, 8.9229e-01, - 9.9407e-01, 7.6735e-01, 1.3311e-01, 5.2952e-01, - 7.5053e-02, 7.9242e-01, 2.9142e-01, 5.4645e-01]), + col_indices=tensor([6383, 9274, 6714, 1877, 8353, 1605, 5999, 1796, 5365, + 3930, 9520, 2847, 2823, 9314, 878, 1171, 789, 4855, + 1239, 4945, 9512, 2414, 4242, 3280, 9138, 2705, 8386, + 1358, 7167, 6776, 3077, 2706, 1601, 9776, 440, 1987, + 2962, 1560, 963, 8733, 6314, 9472, 1198, 5524, 9588, + 6620, 5915, 9628, 1948, 2856, 7361, 8102, 1921, 317, + 3635, 8351, 398, 3096, 5442, 3754, 9816, 9759, 1764, + 7897, 1381, 8358, 7399, 1750, 6958, 6244, 5827, 3658, + 5358, 8935, 3033, 2361, 9740, 1904, 3442, 5265, 3486, + 7837, 5896, 6077, 940, 859, 7624, 8137, 9568, 7872, + 5275, 1394, 1371, 3037, 8722, 3615, 613, 3546, 922, + 2374, 6475, 9833, 8388, 4024, 8752, 504, 2530, 3393, + 7229, 2927, 9056, 8184, 9133, 3219, 3757, 663, 6699, + 9687, 4268, 8207, 7564, 6066, 4606, 6348, 7399, 792, + 1254, 9195, 1633, 5578, 1160, 6151, 2930, 4510, 1744, + 2676, 7637, 2953, 714, 2517, 9553, 2625, 4743, 1087, + 1174, 8209, 6889, 9533, 5016, 7802, 7355, 3252, 1863, + 3071, 4348, 2093, 3516, 9242, 2258, 6136, 5702, 3486, + 1591, 5799, 7265, 6648, 9292, 2493, 2123, 5933, 8709, + 1232, 7419, 9165, 8089, 7038, 8363, 8018, 9122, 5419, + 1304, 4248, 6241, 2754, 873, 5120, 7174, 5059, 1279, + 6941, 1281, 4873, 6045, 6310, 8096, 887, 7297, 4791, + 466, 6605, 1029, 9605, 9584, 9826, 8585, 2104, 2883, + 3035, 2165, 8305, 6031, 496, 9626, 7411, 8066, 2156, + 1215, 1966, 4336, 7383, 9518, 5430, 8869, 6466, 2586, + 6361, 7582, 5192, 2624, 9619, 300, 5410, 5645, 8155, + 7063, 6516, 590, 9678, 1227, 9210, 7409, 5200, 8674, + 7052, 4617, 852, 7617, 5330, 6043, 4879, 7775, 2536, + 6658, 5503, 485, 469, 872, 2272, 5436, 1205, 3792, + 2189, 2718, 3742, 8715, 1010, 601, 9723, 540, 7367, + 6201, 1601, 8971, 8680, 9487, 8319, 8794, 3130, 7235, + 684, 8393, 8938, 5496, 1418, 1174, 6820, 9098, 7841, + 8048, 4878, 5911, 2411, 2497, 5560, 5730, 1980, 4820, + 8192, 5034, 9739, 1519, 7929, 5706, 4698, 2337, 3314, + 4406, 5270, 2083, 1170, 1073, 8319, 5028, 4714, 8179, + 8887, 4754, 5306, 8128, 6764, 3411, 5129, 5445, 7033, + 291, 5899, 1513, 1936, 9728, 887, 9094, 1581, 5755, + 5499, 9400, 897, 4218, 831, 181, 6010, 792, 1733, + 3180, 1605, 4324, 6737, 8890, 2740, 5139, 5523, 3922, + 3880, 1990, 1793, 620, 969, 9387, 4728, 1714, 9998, + 1726, 4870, 2377, 4546, 5011, 7007, 7838, 2461, 6873, + 5307, 9832, 7342, 8896, 2269, 2149, 796, 1775, 4367, + 595, 208, 7675, 2101, 6763, 5116, 2725, 3048, 6503, + 111, 3377, 501, 3532, 8108, 4955, 3891, 2732, 9902, + 3935, 6940, 1454, 2342, 5394, 3415, 6997, 4265, 2768, + 1891, 551, 6399, 9970, 212, 676, 115, 7509, 1994, + 1365, 6507, 446, 6103, 1616, 475, 4972, 6080, 3907, + 5266, 1863, 6262, 4594, 2730, 2136, 1306, 8829, 7400, + 6951, 9597, 2871, 425, 4270, 4075, 7813, 5724, 491, + 1546, 4808, 2667, 424, 4869, 533, 7802, 7978, 5359, + 7323, 6788, 2236, 4507, 6700, 1908, 4483, 2707, 2187, + 9, 7538, 8665, 6317, 269, 122, 6768, 1608, 4519, + 3268, 7042, 2581, 8723, 4880, 8168, 5481, 5502, 2412, + 8435, 4371, 6894, 4642, 269, 8016, 5244, 2864, 1385, + 4290, 2264, 5959, 3490, 7620, 5669, 5644, 264, 3156, + 5796, 1462, 8783, 8243, 8014, 7490, 7635, 956, 7963, + 1670, 5536, 7556, 3571, 2411, 8733, 6892, 3054, 2242, + 3319, 6152, 5627, 1599, 6407, 221, 4578, 8552, 4250, + 1430, 1510, 9155, 8127, 8022, 2793, 8499, 4458, 4865, + 2331, 1798, 1880, 4029, 7261, 9028, 8250, 2196, 6678, + 9700, 9925, 2589, 6132, 6173, 1348, 7431, 7704, 2733, + 2454, 2442, 9114, 9585, 1124, 6162, 3354, 4947, 5191, + 6780, 1395, 1249, 4025, 5842, 9222, 9573, 6477, 2997, + 8691, 2790, 6282, 9276, 8351, 4542, 2857, 9867, 9280, + 605, 5983, 3802, 3249, 5638, 3630, 6295, 3636, 5109, + 3839, 2127, 1993, 9573, 733, 2829, 8284, 7837, 104, + 1260, 6063, 1339, 2800, 8811, 9573, 5199, 9566, 5397, + 2383, 1093, 1605, 7321, 7738, 4474, 1195, 2341, 6590, + 3031, 3539, 9543, 8855, 2899, 8584, 3666, 6057, 3097, + 1710, 2080, 1680, 2035, 3246, 3794, 3961, 7935, 8012, + 230, 2567, 6131, 2119, 6917, 4531, 5025, 9437, 487, + 5420, 3130, 6759, 3954, 1927, 2372, 7845, 4030, 5649, + 5496, 4682, 3981, 1241, 7520, 4578, 6285, 6325, 2618, + 2620, 9385, 1559, 398, 80, 8118, 8181, 9899, 9218, + 1363, 6328, 7389, 7318, 1576, 9402, 6398, 1317, 7863, + 9165, 2489, 9976, 4333, 5646, 1829, 5444, 2310, 5630, + 9885, 7792, 9532, 2518, 5988, 7325, 2272, 7606, 3562, + 9655, 2142, 9852, 3257, 5574, 761, 777, 9007, 5342, + 8394, 7902, 5591, 1014, 3304, 3523, 4385, 201, 613, + 7277, 2294, 839, 3228, 3438, 3320, 825, 1976, 2535, + 2160, 7131, 3754, 4231, 9282, 3433, 746, 5109, 821, + 2629, 6240, 3118, 5594, 2724, 8893, 2770, 9089, 2264, + 2772, 98, 6612, 2634, 4949, 4014, 5966, 7771, 5023, + 6816, 8449, 8334, 3904, 6096, 220, 1871, 1209, 6030, + 2988, 8807, 8859, 5498, 2156, 6162, 2956, 3255, 4520, + 551, 5056, 354, 5688, 2184, 5560, 6871, 3051, 5077, + 365, 7938, 1908, 6673, 3829, 1353, 309, 3434, 6400, + 4462, 102, 7040, 4039, 8295, 5924, 6295, 4128, 2709, + 8915, 3956, 4, 4662, 4129, 5107, 7924, 8101, 1577, + 8320, 3615, 9639, 2384, 2903, 1764, 3274, 1823, 9907, + 4188, 4421, 4831, 8665, 696, 9707, 9091, 5499, 8953, + 4239, 6095, 188, 1724, 1052, 1976, 5671, 331, 9384, + 3313, 407, 3377, 3450, 515, 3069, 1312, 5701, 3274, + 9772, 9870, 1055, 7069, 1837, 1109, 3098, 3376, 8274, + 6961, 5904, 9264, 8976, 2046, 113, 4884, 1350, 5017, + 9850, 380, 1793, 8048, 1576, 3554, 1715, 3908, 6361, + 1522, 247, 1651, 2578, 4144, 3634, 6361, 4906, 7126, + 9942, 7325, 2639, 9079, 1897, 793, 2892, 6303, 3136, + 3242, 3791, 9557, 6849, 3703, 9298, 587, 5095, 5259, + 7680, 9748, 4933, 6433, 1694, 3211, 3059, 2472, 9440, + 7705, 5477, 6483, 8927, 5027, 5316, 6102, 4259, 6605, + 5177, 4412, 813, 1943, 1193, 3348, 2607, 6021, 2242, + 6863, 8185, 7503, 1843, 206, 6765, 3597, 7177, 4395, + 6164, 8066, 1818, 4615, 3322, 8069, 1307, 8534, 1659, + 1019, 7920, 7538, 2486, 4900, 8346, 5227, 4947, 8886, + 792, 195, 1183, 4459, 5657, 1794, 5647, 2133, 6487, + 5427, 944, 454, 1934, 1445, 9041, 7978, 9901, 2979, + 4007, 3749, 1538, 4357, 9121, 1266, 3599, 3349, 8206, + 6917, 8038, 9766, 4896, 5840, 7474, 6525, 2424, 7892, + 726, 7949, 2883, 6597, 2250, 1594, 4029, 9164, 1981, + 6711, 8340, 6414, 7782, 3037, 4276, 7404, 8953, 2703, + 5322]), + values=tensor([1.0172e-01, 2.0291e-01, 5.0788e-01, 7.5343e-01, + 2.8887e-02, 4.5989e-01, 9.0792e-01, 5.6796e-01, + 3.8480e-01, 9.3370e-01, 2.1291e-01, 2.1005e-01, + 4.8619e-01, 4.8840e-01, 8.2772e-01, 5.9982e-01, + 4.3242e-01, 1.9964e-02, 5.8658e-01, 9.8603e-01, + 9.0572e-01, 2.7327e-01, 8.6683e-01, 4.5107e-01, + 6.3337e-01, 9.7708e-01, 4.5076e-01, 7.7090e-01, + 3.8583e-01, 3.4535e-01, 1.0137e-01, 3.3772e-01, + 5.7964e-01, 6.4588e-01, 1.5629e-01, 4.4453e-01, + 7.5256e-01, 4.0597e-01, 3.7064e-01, 4.0525e-01, + 1.3423e-01, 8.5332e-01, 3.9740e-01, 5.1268e-01, + 3.8336e-01, 9.0349e-03, 4.5925e-01, 6.9530e-01, + 8.4604e-01, 2.6220e-01, 5.1738e-01, 7.5711e-01, + 1.0444e-01, 7.2189e-01, 3.8648e-01, 9.0680e-01, + 2.0340e-01, 5.9320e-01, 3.0855e-01, 8.3742e-01, + 5.1916e-01, 2.8005e-01, 9.0532e-02, 7.1539e-01, + 5.3585e-01, 2.0187e-01, 8.1360e-01, 8.8180e-01, + 6.0232e-01, 5.3635e-01, 5.9378e-01, 2.6039e-01, + 6.8202e-01, 1.1946e-01, 7.3709e-01, 7.4553e-01, + 1.5187e-01, 6.3926e-01, 6.4444e-01, 6.5945e-01, + 6.0463e-01, 7.7537e-01, 3.8424e-01, 1.5474e-01, + 5.5407e-01, 2.3577e-01, 2.2865e-01, 3.7094e-01, + 6.2494e-01, 6.4704e-01, 7.6411e-01, 1.5001e-01, + 9.9450e-01, 6.4388e-01, 1.6830e-01, 5.0931e-01, + 5.1682e-01, 7.1058e-01, 1.2550e-01, 5.0820e-01, + 1.2769e-01, 2.7652e-01, 8.4249e-01, 7.6136e-01, + 4.4034e-01, 8.1325e-01, 4.7786e-01, 4.4585e-01, + 9.8290e-01, 6.4397e-01, 5.9684e-01, 7.4755e-01, + 9.3206e-01, 7.3601e-01, 7.1645e-01, 6.1850e-01, + 4.8790e-01, 3.8227e-01, 9.5079e-01, 3.4351e-01, + 6.2039e-01, 8.6747e-01, 2.4123e-01, 8.8493e-01, + 2.7700e-01, 3.7887e-01, 6.4731e-02, 7.2510e-01, + 6.2100e-01, 8.2008e-01, 8.9062e-01, 3.6719e-01, + 8.8582e-02, 6.7116e-01, 6.2448e-01, 9.8017e-01, + 1.4951e-01, 2.8197e-01, 1.6727e-01, 8.0102e-01, + 9.5490e-01, 4.4567e-01, 6.8016e-01, 1.7576e-01, + 4.0546e-01, 7.8618e-01, 4.1426e-01, 3.2969e-01, + 5.4109e-01, 6.6685e-01, 6.5734e-01, 4.4100e-01, + 6.6909e-01, 6.5982e-01, 1.5156e-01, 1.4792e-01, + 7.2379e-01, 9.5439e-01, 4.2359e-01, 2.4194e-01, + 5.8815e-01, 8.5528e-01, 8.4777e-01, 2.4329e-01, + 6.8267e-01, 9.2453e-01, 1.8318e-01, 8.4302e-01, + 5.5891e-01, 8.9433e-02, 9.4595e-01, 7.7506e-01, + 6.9545e-01, 5.4637e-01, 2.1200e-01, 9.6321e-01, + 8.4821e-01, 6.1581e-01, 4.8304e-01, 5.8504e-01, + 2.2829e-01, 4.8178e-01, 3.0840e-01, 1.0705e-01, + 2.7605e-01, 3.8084e-02, 6.2027e-01, 7.6792e-01, + 8.0614e-01, 6.0375e-01, 2.4001e-01, 1.8151e-01, + 3.6253e-01, 9.3944e-01, 8.1835e-01, 4.9776e-01, + 8.9759e-01, 3.5034e-02, 8.5087e-01, 7.7182e-01, + 7.1234e-01, 7.6312e-02, 9.9754e-01, 9.8260e-01, + 1.8777e-01, 4.4051e-01, 8.5192e-02, 1.2853e-01, + 3.6787e-01, 5.9349e-02, 1.1331e-01, 5.8029e-01, + 2.2840e-01, 2.6804e-01, 3.7637e-01, 9.7706e-01, + 8.4800e-01, 7.4790e-01, 7.8013e-01, 5.3943e-01, + 2.8307e-01, 2.0774e-01, 5.1149e-01, 8.3038e-01, + 8.3591e-01, 3.5520e-01, 4.0826e-01, 3.8253e-01, + 9.0709e-01, 4.1980e-02, 1.5234e-01, 6.8713e-01, + 5.0322e-01, 7.1516e-01, 5.9083e-02, 6.9041e-01, + 6.6428e-01, 2.1388e-01, 9.9842e-01, 2.2547e-01, + 8.4151e-01, 5.3777e-01, 5.9855e-01, 8.9719e-01, + 2.3899e-01, 9.2750e-01, 7.2911e-01, 9.3399e-01, + 6.7851e-01, 9.4539e-01, 3.2884e-01, 5.8840e-01, + 3.4729e-01, 7.0883e-01, 7.4526e-01, 8.5380e-02, + 1.0709e-01, 5.2472e-01, 6.6905e-01, 2.5688e-01, + 3.5506e-01, 5.6273e-01, 6.8897e-01, 2.9106e-01, + 3.1131e-01, 1.9292e-01, 6.3099e-01, 3.7207e-01, + 9.0646e-01, 2.3996e-01, 6.9820e-01, 9.6323e-01, + 3.9366e-01, 7.2790e-02, 1.1659e-02, 2.2438e-01, + 5.2232e-01, 7.1127e-01, 1.8416e-01, 8.4241e-01, + 5.4006e-01, 5.3537e-01, 1.7104e-01, 5.5774e-01, + 6.7393e-01, 6.6086e-01, 9.8136e-01, 6.3804e-01, + 8.4662e-01, 6.2611e-01, 1.9275e-01, 5.4222e-01, + 3.9854e-01, 3.9150e-01, 2.3447e-02, 9.3221e-01, + 8.5552e-02, 7.7426e-01, 5.9156e-01, 5.9357e-01, + 6.5440e-01, 3.1204e-03, 7.2965e-02, 1.2815e-01, + 6.1375e-01, 3.9290e-01, 7.3969e-01, 1.8341e-02, + 3.0123e-01, 7.3395e-02, 3.7094e-01, 3.6816e-03, + 4.3551e-01, 4.2500e-01, 9.1091e-01, 5.7215e-01, + 7.9613e-01, 2.8807e-01, 5.6742e-01, 1.5464e-01, + 9.1068e-01, 4.9303e-01, 4.3928e-01, 4.8248e-01, + 8.9812e-02, 1.0599e-01, 2.3276e-01, 5.0910e-04, + 6.4726e-01, 2.8003e-01, 6.8147e-01, 9.5861e-01, + 9.1664e-01, 6.9091e-01, 9.9642e-02, 6.5848e-01, + 9.7178e-01, 2.1680e-02, 3.0681e-01, 3.7676e-01, + 1.6026e-01, 7.6453e-01, 2.5057e-01, 8.2409e-01, + 6.1549e-01, 5.2332e-03, 1.6371e-01, 5.9321e-01, + 2.2698e-01, 4.2453e-01, 4.3058e-01, 5.2856e-01, + 3.8744e-01, 7.9877e-01, 6.0112e-01, 2.1771e-02, + 8.1726e-01, 7.2955e-01, 2.0791e-01, 6.7164e-01, + 4.4680e-01, 7.2193e-01, 9.4657e-01, 8.6552e-01, + 4.8617e-03, 5.7976e-01, 6.2864e-01, 4.8684e-02, + 6.2091e-01, 1.0214e-01, 6.3268e-05, 3.9941e-01, + 8.3198e-02, 8.4986e-01, 7.7637e-01, 3.0430e-01, + 7.2244e-01, 3.7223e-01, 1.2736e-02, 8.4226e-01, + 1.0132e-01, 3.7455e-01, 8.4925e-01, 7.2407e-01, + 7.4307e-01, 8.4997e-02, 8.1314e-01, 1.8099e-01, + 6.3386e-01, 6.6152e-01, 3.1652e-01, 8.4241e-01, + 7.8623e-01, 5.4581e-02, 8.9822e-01, 7.4640e-01, + 5.7991e-01, 9.6135e-01, 6.2166e-01, 9.8668e-01, + 6.3014e-02, 5.4185e-01, 1.2958e-02, 7.0761e-01, + 7.6812e-01, 8.6145e-01, 8.9733e-01, 9.9713e-01, + 6.2770e-01, 6.6090e-02, 6.8205e-01, 8.6057e-01, + 3.8954e-01, 7.9391e-01, 6.3376e-01, 3.1313e-01, + 9.5983e-01, 4.9517e-01, 9.8872e-01, 7.0193e-01, + 9.0731e-01, 9.7114e-01, 9.8860e-01, 7.1761e-01, + 9.7612e-01, 4.0387e-01, 4.8370e-01, 4.6904e-01, + 2.3920e-01, 1.1252e-01, 7.3034e-01, 5.5871e-01, + 9.4539e-01, 8.3507e-01, 1.5855e-01, 2.3442e-01, + 5.9937e-01, 7.0363e-01, 3.2310e-01, 8.4604e-01, + 4.9275e-01, 7.2605e-03, 9.4643e-01, 5.8938e-01, + 6.5456e-01, 7.9821e-01, 7.2550e-01, 9.6326e-02, + 6.7549e-01, 9.6211e-01, 2.6435e-01, 1.2144e-01, + 6.0175e-01, 3.2498e-01, 7.6349e-02, 5.3745e-01, + 5.8739e-01, 6.9290e-01, 3.5692e-01, 4.2637e-01, + 1.8315e-01, 6.0577e-01, 8.0275e-01, 7.7750e-01, + 3.5764e-01, 9.6740e-01, 5.4969e-01, 8.8072e-01, + 7.3039e-01, 3.3487e-02, 6.8732e-01, 4.0520e-01, + 9.2759e-01, 4.3387e-01, 8.2575e-02, 3.9573e-01, + 7.5306e-01, 6.4364e-01, 6.7135e-01, 3.1436e-01, + 8.0201e-01, 3.5584e-01, 9.2251e-01, 8.1706e-01, + 8.6434e-01, 8.9677e-01, 7.0093e-01, 7.2436e-01, + 7.5830e-01, 1.5639e-01, 4.7409e-01, 7.7152e-01, + 4.9498e-01, 6.7002e-01, 4.3165e-01, 9.5675e-01, + 6.4495e-01, 4.1482e-01, 9.9856e-01, 7.4340e-02, + 6.0686e-01, 4.7745e-01, 3.0539e-01, 8.1667e-01, + 3.2145e-01, 4.3643e-02, 3.7164e-01, 8.5987e-01, + 5.1085e-01, 3.9723e-01, 7.3196e-01, 3.6231e-01, + 7.0578e-01, 6.5640e-01, 3.3261e-01, 2.7356e-01, + 2.5233e-01, 5.6456e-01, 6.1233e-01, 3.8882e-01, + 5.0591e-01, 9.6537e-01, 2.9281e-01, 4.2849e-01, + 2.4580e-01, 5.4993e-01, 4.3272e-01, 3.2715e-03, + 9.9686e-02, 2.6291e-01, 7.2115e-01, 6.4610e-01, + 6.8384e-01, 3.7948e-01, 8.1704e-02, 5.2375e-01, + 6.6153e-01, 5.7655e-01, 9.9413e-01, 7.5641e-01, + 2.0624e-01, 8.3019e-01, 6.4660e-01, 5.2393e-01, + 8.6354e-01, 8.1274e-01, 3.8680e-01, 6.7270e-01, + 1.0272e-01, 7.1769e-01, 4.3218e-01, 3.3794e-03, + 8.3390e-01, 6.4310e-01, 7.8722e-01, 2.1376e-01, + 1.2048e-01, 3.8322e-01, 5.3423e-01, 7.1435e-01, + 8.0634e-02, 3.4556e-01, 8.2995e-01, 8.5185e-01, + 9.9964e-01, 1.6223e-03, 2.4923e-01, 3.6135e-01, + 1.0278e-01, 3.1553e-01, 6.2215e-01, 2.3291e-02, + 8.2469e-01, 6.8813e-01, 3.8969e-01, 2.0494e-01, + 5.2271e-01, 3.9839e-01, 9.6730e-01, 1.9877e-01, + 3.4252e-01, 5.2499e-01, 1.6078e-01, 4.4559e-02, + 3.9080e-01, 9.5308e-01, 8.6412e-01, 9.5870e-01, + 5.1206e-02, 6.4203e-01, 6.4852e-01, 7.2702e-01, + 9.4061e-01, 9.9486e-01, 2.4314e-01, 6.0516e-01, + 7.5232e-01, 5.5067e-01, 8.0897e-01, 6.9596e-01, + 1.6588e-01, 3.4356e-01, 6.1360e-01, 4.7207e-01, + 6.6337e-01, 4.2291e-01, 3.8269e-01, 7.6708e-01, + 6.0732e-01, 9.5632e-01, 5.8238e-01, 9.7415e-01, + 7.4368e-02, 3.9985e-01, 8.1989e-01, 5.2322e-01, + 2.8029e-02, 1.9588e-01, 2.4038e-01, 1.5924e-01, + 1.5873e-01, 2.0225e-01, 1.1911e-01, 7.4882e-01, + 6.2579e-01, 5.4242e-01, 6.2275e-01, 1.0356e-01, + 8.6937e-01, 1.9999e-01, 3.6278e-01, 7.6021e-01, + 9.6207e-02, 5.2341e-01, 6.3853e-01, 9.9934e-01, + 1.1806e-01, 3.2399e-01, 3.2023e-01, 1.4012e-01, + 9.3048e-01, 6.8626e-01, 2.3896e-01, 4.5986e-01, + 6.4903e-01, 4.6528e-01, 5.2530e-01, 7.8358e-01, + 5.7127e-01, 6.8708e-01, 4.3009e-01, 9.9883e-01, + 1.5120e-02, 3.4172e-01, 2.6804e-01, 8.5220e-01, + 9.4124e-01, 1.4929e-01, 3.6621e-02, 7.0726e-01, + 7.1630e-01, 8.7582e-01, 4.8970e-01, 2.6406e-01, + 1.5238e-01, 8.7218e-01, 5.7268e-02, 9.6314e-01, + 8.0361e-01, 7.5257e-01, 8.1039e-01, 7.5487e-01, + 2.7375e-01, 6.1534e-01, 7.8576e-01, 4.4655e-01, + 5.2124e-01, 7.9735e-01, 6.0807e-01, 4.4702e-01, + 1.9301e-01, 5.5222e-02, 3.1458e-01, 3.9861e-01, + 1.6485e-01, 9.3511e-01, 6.0067e-01, 3.6715e-01, + 9.2020e-01, 7.4341e-01, 1.5771e-01, 6.1633e-01, + 4.8488e-01, 6.7205e-01, 1.7327e-01, 5.5586e-01, + 7.9531e-01, 4.6991e-02, 8.3507e-01, 8.5850e-01, + 8.4924e-01, 2.5937e-01, 5.3613e-02, 9.2144e-01, + 6.9568e-01, 3.0273e-01, 8.1196e-01, 5.1218e-01, + 9.4006e-01, 7.8376e-01, 5.8712e-02, 7.0592e-03, + 5.0176e-01, 1.3069e-01, 4.6889e-01, 9.0454e-01, + 8.2837e-01, 1.3682e-01, 1.8028e-02, 2.9677e-01, + 7.2965e-02, 1.0027e-01, 7.9009e-01, 7.3391e-01, + 6.1875e-01, 4.2086e-01, 2.9228e-01, 4.3285e-01, + 5.2312e-01, 4.6791e-02, 1.4070e-01, 6.7578e-02, + 8.2689e-01, 2.2397e-01, 3.6294e-01, 7.4357e-01, + 2.0005e-02, 9.3685e-01, 9.5401e-02, 3.6227e-01, + 4.7559e-01, 8.2005e-01, 8.9021e-01, 1.0144e-01, + 2.0500e-01, 1.4629e-01, 7.9197e-01, 7.1978e-01, + 6.2116e-01, 1.0509e-01, 8.8501e-01, 5.2396e-01, + 3.8857e-01, 2.0386e-01, 1.7474e-01, 3.3768e-01, + 8.2283e-01, 5.6083e-01, 3.1505e-01, 9.3114e-01, + 9.7148e-01, 8.7002e-01, 5.5684e-01, 7.1164e-01, + 8.2687e-01, 4.9607e-01, 9.1544e-01, 3.8587e-02, + 5.4751e-01, 8.8256e-01, 2.9869e-01, 4.7488e-01, + 5.8408e-01, 9.1065e-01, 8.4555e-01, 7.0363e-01, + 5.5905e-01, 9.7573e-01, 1.3309e-01, 8.4825e-01, + 9.4575e-01, 1.1327e-01, 9.3295e-01, 1.9130e-02, + 5.5364e-02, 5.7852e-01, 4.7674e-01, 5.9520e-01, + 8.6953e-02, 9.8342e-01, 8.2338e-01, 2.3050e-01, + 9.0447e-02, 7.6427e-01, 6.9691e-01, 2.6394e-01, + 7.5784e-01, 4.9559e-01, 6.2825e-01, 7.4522e-02, + 5.1107e-01, 1.1304e-01, 7.8157e-01, 9.7539e-01, + 3.4088e-01, 2.2997e-01, 1.0902e-01, 6.5399e-01, + 9.8973e-01, 4.7259e-01, 8.9469e-01, 1.1628e-01, + 5.3043e-01, 2.2320e-02, 8.6500e-01, 7.5384e-01, + 7.2027e-01, 8.4837e-01, 8.3077e-01, 1.1058e-01, + 3.5021e-02, 7.7009e-01, 4.2844e-01, 3.1769e-01, + 9.3794e-01, 7.2023e-01, 2.8052e-01, 6.8778e-01, + 3.1839e-02, 1.1138e-01, 5.4302e-01, 8.4319e-01, + 5.3055e-02, 6.3191e-01, 3.2862e-01, 8.3556e-01, + 7.0264e-01, 3.4505e-01, 1.0167e-01, 7.0651e-01, + 4.4482e-01, 7.6390e-01, 3.2450e-01, 1.8771e-01, + 3.6165e-02, 5.2383e-01, 4.3474e-01, 2.6129e-01, + 6.0815e-01, 1.4995e-01, 8.8191e-01, 8.8865e-01, + 4.2800e-01, 3.6454e-04, 2.9078e-01, 2.3304e-01, + 8.0764e-01, 6.5338e-01, 1.1279e-01, 9.0925e-01, + 7.2542e-01, 2.7958e-01, 7.7309e-01, 4.1984e-01, + 6.3408e-01, 5.0658e-01, 6.6426e-01, 3.6068e-01, + 5.8737e-01, 5.1918e-01, 7.7424e-01, 9.5141e-01, + 4.6963e-02, 9.2598e-01, 1.2805e-02, 9.4582e-01, + 4.1881e-01, 6.2873e-01, 8.5533e-03, 3.8925e-01, + 8.1800e-01, 8.9552e-01, 9.6179e-01, 1.5072e-01, + 8.6834e-02, 9.2337e-01, 3.9199e-01, 7.5084e-01, + 8.4849e-01, 8.9455e-02, 7.9788e-01, 7.3268e-01, + 9.1954e-01, 1.2344e-02, 6.8678e-01, 6.3772e-02, + 5.3177e-01, 3.9728e-01, 1.1637e-01, 3.3620e-01, + 3.9649e-01, 9.0691e-01, 4.2514e-01, 2.8786e-01, + 5.9926e-01, 1.5363e-01, 6.3389e-01, 4.0988e-01, + 2.4228e-01, 3.6812e-01, 6.5290e-01, 8.2280e-01, + 4.5959e-01, 1.3245e-01, 8.0702e-02, 2.6937e-01, + 4.1334e-02, 4.6912e-01, 6.2878e-01, 9.3970e-01, + 1.4751e-01, 9.7979e-01, 8.8617e-01, 2.6783e-01, + 8.1664e-01, 6.1463e-01, 2.6794e-01, 5.7205e-01, + 5.6052e-01, 9.3478e-01, 3.1484e-01, 2.9561e-01, + 1.8499e-01, 4.9045e-01, 7.0567e-01, 5.5256e-01, + 8.4212e-01, 1.0943e-02, 4.7614e-01, 3.5642e-01, + 1.0228e-01, 9.1014e-01, 2.8703e-01, 8.2803e-01, + 3.9363e-01, 7.8274e-01, 2.6727e-01, 5.3601e-01, + 9.4923e-01, 8.2550e-01, 2.7582e-01, 5.1527e-01, + 6.7777e-01, 2.2556e-01, 1.4264e-01, 8.3865e-01, + 1.8537e-01, 5.7133e-01, 9.3149e-01, 7.9399e-01, + 3.9901e-02, 5.0303e-01, 1.0031e-01, 5.4979e-01, + 4.7959e-01, 4.5465e-02, 1.0153e-01, 6.4665e-01, + 6.6263e-02, 8.5004e-02, 4.1010e-01, 2.9905e-01, + 5.4532e-01, 7.6095e-01, 6.3094e-01, 9.3862e-01, + 7.8628e-01, 6.1402e-01, 9.7641e-01, 9.7867e-01, + 4.2284e-01, 3.2599e-01, 1.0796e-01, 8.2963e-01, + 2.3001e-01, 1.3105e-01, 7.4818e-01, 2.3227e-01, + 1.6548e-02, 2.2911e-02, 3.8742e-01, 3.5041e-01, + 3.6706e-02, 1.3530e-01, 3.4437e-01, 9.7773e-01, + 6.1926e-01, 3.4844e-01, 3.6060e-01, 7.5940e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.6534, 0.1395, 0.5723, ..., 0.2846, 0.6527, 0.4839]) +tensor([0.4174, 0.1958, 0.7347, ..., 0.7767, 0.9832, 0.8001]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,271 +375,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 1.9514954090118408 seconds +Time: 0.019530296325683594 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '285101', '-ss', '10000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.263301372528076} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53762', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9941279888153076} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7666, 3284, 1699, 7007, 1865, 642, 2301, 7889, 3112, - 4841, 4554, 2643, 3511, 9405, 9949, 4868, 7053, 8979, - 7155, 3080, 3345, 5376, 5682, 4591, 5457, 4012, 9907, - 4608, 8275, 6785, 214, 8256, 6706, 6096, 396, 213, - 9694, 8210, 4776, 4951, 9163, 2885, 6391, 5210, 5362, - 2964, 8727, 2087, 6653, 557, 2807, 404, 7235, 7348, - 5591, 9937, 5151, 9696, 3619, 4512, 6156, 4095, 5236, - 6999, 8669, 5309, 6087, 4118, 2455, 1780, 9742, 3315, - 7998, 8891, 4943, 9849, 3316, 2894, 3974, 2248, 8409, - 9978, 2602, 229, 3004, 5173, 479, 6004, 6080, 4963, - 6862, 9389, 8433, 2783, 2848, 968, 8951, 3760, 8407, - 7534, 2609, 3709, 9648, 2412, 7425, 8619, 7347, 9064, - 4963, 1354, 5818, 7386, 7369, 2381, 5800, 6012, 3532, - 214, 9362, 4731, 8238, 4276, 2221, 3400, 540, 4177, - 2164, 1191, 3257, 9047, 364, 2899, 2221, 8232, 4361, - 1480, 3950, 9412, 570, 6740, 5525, 7995, 856, 1549, - 3242, 5640, 6435, 1568, 3375, 4060, 6793, 1686, 9865, - 9598, 3307, 6572, 3171, 2008, 7637, 4922, 386, 6366, - 7528, 2850, 6318, 2345, 368, 5786, 9710, 7772, 1761, - 6140, 301, 570, 8828, 8741, 9119, 4206, 3780, 4780, - 707, 3068, 8924, 3477, 4065, 1036, 560, 3678, 9660, - 4487, 8282, 6249, 7109, 3781, 7458, 2555, 3159, 5856, - 7260, 2615, 6118, 4364, 9114, 9280, 8926, 3395, 7988, - 2698, 8725, 6447, 7235, 3203, 5300, 4394, 3058, 7417, - 5517, 1817, 6921, 9211, 8306, 7080, 8460, 7810, 7848, - 4752, 270, 8368, 8576, 7374, 8967, 2824, 9004, 8383, - 4024, 4694, 919, 6859, 4607, 3536, 5606, 5137, 6430, - 1226, 8149, 7263, 7513, 1278, 5694, 380, 1738, 433, - 9251, 916, 5723, 4516, 8117, 800, 5078, 9205, 3076, - 2156, 1929, 248, 8896, 6899, 3847, 2048, 8775, 3158, - 9717, 4304, 9305, 9027, 8444, 7211, 3882, 2571, 7542, - 1042, 2864, 4697, 4933, 9597, 2106, 1874, 1663, 8974, - 5845, 9264, 40, 4452, 2566, 3363, 7670, 7586, 7479, - 5981, 1928, 2498, 8107, 6502, 5725, 1280, 4382, 7811, - 4150, 2407, 5646, 7384, 6368, 9330, 4277, 4360, 9293, - 8848, 3141, 3719, 4630, 418, 68, 3077, 304, 5534, - 3069, 4046, 9645, 797, 7332, 2456, 818, 6040, 233, - 1356, 6755, 4249, 1643, 5939, 6192, 5023, 2182, 3038, - 615, 962, 6584, 6465, 4700, 5188, 378, 2580, 7736, - 8469, 7964, 4215, 5350, 5035, 723, 2366, 9255, 3296, - 2487, 9749, 9356, 2120, 2719, 1534, 5352, 4142, 2840, - 3874, 3309, 2282, 4165, 8221, 3151, 7532, 9093, 1914, - 875, 3267, 7484, 5334, 246, 69, 1760, 3158, 9485, - 2968, 8655, 8484, 1925, 6269, 8467, 4053, 8242, 2491, - 9174, 9141, 8207, 3765, 7176, 8536, 4547, 5155, 9600, - 59, 7491, 3224, 3860, 3405, 5155, 5872, 5087, 1030, - 5823, 67, 9402, 2052, 3137, 8203, 5381, 5014, 2445, - 2598, 6064, 583, 6264, 8514, 2132, 8834, 5724, 5040, - 3049, 7546, 5472, 9227, 9155, 4856, 8701, 9805, 8423, - 5471, 9572, 1489, 1146, 2738, 6131, 4467, 3775, 7623, - 3128, 1626, 4245, 5851, 8311, 5249, 9163, 5646, 9700, - 5912, 1064, 3427, 9742, 4822, 1609, 1247, 3225, 1349, - 1112, 7136, 4620, 9398, 180, 852, 8483, 99, 5880, - 6936, 3985, 7199, 43, 619, 1433, 378, 5613, 8778, - 9714, 6327, 2214, 4270, 2616, 7836, 6036, 4038, 1349, - 3609, 500, 8725, 5242, 3213, 7450, 9547, 6518, 3900, - 9820, 6085, 3210, 7810, 5855, 7609, 1890, 2354, 6091, - 8514, 7897, 3931, 2880, 6281, 3385, 5921, 9952, 5094, - 8855, 172, 3302, 3010, 4642, 2734, 9166, 8421, 2234, - 8565, 528, 8176, 6482, 994, 3099, 9724, 6379, 405, - 8321, 239, 6594, 996, 3440, 8976, 435, 2375, 6431, - 3253, 1630, 7853, 2956, 8895, 986, 6476, 1010, 8489, - 8021, 872, 1473, 1100, 5352, 9179, 2417, 9244, 9167, - 7160, 3376, 6327, 3920, 5098, 7118, 4103, 8332, 3090, - 9060, 6774, 7505, 4316, 2580, 8806, 3234, 5576, 1591, - 9446, 2876, 3861, 9007, 4047, 9387, 1828, 9480, 300, - 5810, 1681, 7603, 9696, 3967, 205, 4720, 6724, 1706, - 1179, 7731, 9912, 9119, 3539, 3641, 2691, 1738, 2069, - 64, 5901, 6907, 7329, 5038, 8087, 2346, 1417, 9802, - 2245, 9442, 3124, 6856, 7605, 3350, 9096, 9607, 6580, - 7026, 4159, 2283, 388, 6926, 8970, 9000, 5902, 3616, - 8251, 2022, 1387, 2040, 3499, 7352, 9723, 4720, 4072, - 3030, 8642, 1561, 3550, 728, 3483, 9957, 2852, 9769, - 530, 8950, 8791, 1868, 3201, 4039, 7668, 6271, 5458, - 8517, 9781, 4046, 3225, 9954, 206, 6894, 7597, 7497, - 6306, 3298, 6872, 2066, 7591, 1890, 8097, 1280, 7096, - 8226, 1359, 216, 3321, 8096, 7997, 7290, 8655, 8538, - 5466, 3483, 6990, 5527, 3778, 5009, 8586, 5007, 7530, - 2615, 6501, 9092, 3603, 3220, 7154, 9702, 5045, 8352, - 2617, 771, 5086, 2751, 4678, 9150, 7949, 9854, 737, - 4795, 2071, 9389, 7313, 9799, 3943, 6232, 1410, 8989, - 7328, 6562, 7366, 5222, 441, 8345, 6712, 3926, 515, - 5956, 5254, 6129, 5268, 1272, 9590, 5597, 1532, 744, - 7243, 1823, 6966, 9821, 5101, 1854, 836, 8195, 8533, - 9772, 6888, 999, 42, 8452, 7406, 756, 2186, 7921, - 7527, 6586, 9178, 9190, 6661, 1985, 2124, 5708, 2885, - 5344, 8420, 6388, 4916, 5751, 5447, 1612, 7115, 4012, - 4211, 4491, 2685, 8469, 3264, 3933, 8307, 2069, 5721, - 6785, 7552, 3662, 9294, 3272, 4972, 4038, 8502, 7978, - 730, 6090, 2443, 3573, 9783, 6945, 8456, 7714, 2278, - 1207, 9072, 1346, 555, 8623, 6821, 2442, 455, 1202, - 8844, 3296, 1101, 8043, 8852, 2122, 3173, 1731, 9938, - 5593, 93, 8956, 6237, 8545, 5229, 9239, 8804, 3713, - 4281, 5382, 8333, 8801, 93, 2212, 5217, 7239, 9312, - 2195, 1969, 8659, 674, 2456, 1219, 684, 5633, 7633, - 1822, 5479, 5138, 2448, 4428, 1533, 6135, 585, 2626, - 1535, 2390, 2292, 3199, 9797, 3083, 7782, 3749, 5972, - 5041, 873, 1762, 6907, 2439, 176, 7886, 8698, 5610, - 4063, 7434, 7641, 5504, 8492, 8226, 4607, 3891, 971, - 446, 5185, 8037, 5578, 2205, 6200, 5210, 7918, 8694, - 8369, 5656, 8140, 720, 6980, 4361, 6048, 5637, 1525, - 3287, 4786, 6431, 419, 584, 7972, 6800, 6611, 3925, - 1361, 345, 5934, 221, 3976, 5263, 273, 1124, 6638, - 9587, 1234, 7547, 9811, 1651, 2418, 9523, 5719, 2357, - 9754, 4924, 9608, 7139, 1769, 2001, 6705, 554, 4744, - 2302, 8692, 6947, 8654, 8683, 9773, 4469, 8661, 3479, - 222, 2813, 8116, 1105, 4667, 9744, 3524, 7923, 7688, - 5748, 4643, 3806, 589, 8275, 7300, 9905, 8566, 3005, - 8277, 3362, 9117, 4507, 4746, 196, 1702, 5992, 1090, - 7587, 5382, 7431, 8949, 1611, 5830, 5183, 8125, 3201, - 6255, 9408, 1748, 1554, 1906, 729, 8466, 4725, 4398, - 3941]), - values=tensor([0.6959, 0.7960, 0.0781, 0.6007, 0.3460, 0.7293, 0.2445, - 0.3377, 0.4176, 0.3625, 0.7235, 0.1442, 0.0335, 0.4983, - 0.5500, 0.1056, 0.9940, 0.8727, 0.8069, 0.4130, 0.1455, - 0.6420, 0.2494, 0.3495, 0.7133, 0.4395, 0.2694, 0.2841, - 0.5549, 0.0490, 0.7986, 0.5242, 0.3692, 0.2460, 0.8859, - 0.0105, 0.6123, 0.7972, 0.2878, 0.9495, 0.0416, 0.7368, - 0.5235, 0.2640, 0.4232, 0.7400, 0.4660, 0.1817, 0.7149, - 0.8459, 0.3355, 0.5095, 0.8904, 0.2582, 0.0507, 0.5007, - 0.7876, 0.1015, 0.4881, 0.3695, 0.9348, 0.8654, 0.8541, - 0.1701, 0.5411, 0.8559, 0.0858, 0.6724, 0.4755, 0.9937, - 0.3688, 0.4551, 0.1705, 0.5343, 0.7652, 0.5023, 0.4761, - 0.8553, 0.5042, 0.1378, 0.3402, 0.0214, 0.3317, 0.7760, - 0.6136, 0.0754, 0.7266, 0.3108, 0.1557, 0.5342, 0.2368, - 0.6255, 0.6338, 0.2103, 0.3547, 0.7898, 0.3532, 0.6879, - 0.5756, 0.5446, 0.6549, 0.1833, 0.3545, 0.3075, 0.8999, - 0.0866, 0.3178, 0.1771, 0.9904, 0.5584, 0.7474, 0.3985, - 0.7736, 0.7195, 0.1836, 0.1731, 0.6012, 0.6546, 0.5119, - 0.7668, 0.7804, 0.2026, 0.4119, 0.1043, 0.5762, 0.3249, - 0.0350, 0.9469, 0.9597, 0.3663, 0.4396, 0.3526, 0.6112, - 0.2028, 0.6398, 0.7385, 0.7867, 0.3703, 0.9189, 0.5053, - 0.2346, 0.3574, 0.9106, 0.6490, 0.0571, 0.7581, 0.0037, - 0.2000, 0.0078, 0.3345, 0.5987, 0.1644, 0.3401, 0.1628, - 0.6190, 0.0170, 0.3230, 0.7145, 0.0065, 0.2218, 0.4568, - 0.5352, 0.7506, 0.8970, 0.0769, 0.7770, 0.0375, 0.5961, - 0.1851, 0.1482, 0.0403, 0.1972, 0.9194, 0.1678, 0.8901, - 0.8766, 0.0092, 0.2714, 0.5465, 0.2373, 0.1403, 0.0755, - 0.7137, 0.4544, 0.3006, 0.0421, 0.8286, 0.0333, 0.4192, - 0.1514, 0.5585, 0.9974, 0.8611, 0.3646, 0.5144, 0.1233, - 0.0881, 0.9866, 0.2420, 0.9365, 0.3756, 0.2095, 0.4280, - 0.2119, 0.0313, 0.7899, 0.6265, 0.8099, 0.3894, 0.1829, - 0.4932, 0.0535, 0.2941, 0.7543, 0.3779, 0.6784, 0.2573, - 0.6186, 0.1475, 0.2682, 0.6713, 0.2567, 0.7592, 0.7252, - 0.8305, 0.0652, 0.2366, 0.0143, 0.5795, 0.7446, 0.7182, - 0.2430, 0.3788, 0.2324, 0.6863, 0.2975, 0.5231, 0.1994, - 0.6772, 0.0117, 0.2661, 0.4983, 0.2255, 0.1217, 0.5970, - 0.4181, 0.1888, 0.5662, 0.8459, 0.4818, 0.5664, 0.7551, - 0.3425, 0.7495, 0.5635, 0.3037, 0.9253, 0.1746, 0.0949, - 0.1444, 0.1382, 0.6207, 0.2484, 0.6872, 0.6011, 0.8909, - 0.5037, 0.7646, 0.8169, 0.6714, 0.6011, 0.0228, 0.7584, - 0.5712, 0.9573, 0.5293, 0.0187, 0.0769, 0.3750, 0.5318, - 0.5469, 0.0475, 0.6832, 0.1012, 0.3177, 0.3205, 0.9912, - 0.7967, 0.3297, 0.8600, 0.8649, 0.1408, 0.1008, 0.6301, - 0.4827, 0.2363, 0.4056, 0.0155, 0.7202, 0.0029, 0.5561, - 0.7290, 0.5676, 0.5965, 0.5363, 0.0921, 0.1850, 0.8448, - 0.7451, 0.5778, 0.2613, 0.9353, 0.9780, 0.5328, 0.8762, - 0.7506, 0.6058, 0.6343, 0.8670, 0.6461, 0.0050, 0.0322, - 0.7176, 0.9509, 0.0141, 0.2849, 0.4851, 0.7572, 0.2430, - 0.0132, 0.9420, 0.2551, 0.1494, 0.1934, 0.9914, 0.0389, - 0.5433, 0.7196, 0.7758, 0.0818, 0.2780, 0.1470, 0.9575, - 0.0039, 0.7218, 0.1816, 0.4767, 0.9657, 0.5165, 0.5374, - 0.9692, 0.8055, 0.2454, 0.3490, 0.9352, 0.0487, 0.6894, - 0.9618, 0.0152, 0.3404, 0.6613, 0.6038, 0.4468, 0.2839, - 0.1984, 0.0949, 0.4107, 0.3440, 0.6650, 0.9782, 0.7647, - 0.3472, 0.8008, 0.6515, 0.2253, 0.1516, 0.0721, 0.0532, - 0.1489, 0.2701, 0.4286, 0.6556, 0.2800, 0.4837, 0.1700, - 0.4031, 0.7582, 0.2764, 0.1452, 0.4317, 0.8870, 0.8904, - 0.2127, 0.1224, 0.0877, 0.6062, 0.7479, 0.8289, 0.8979, - 0.5176, 0.3928, 0.1454, 0.7837, 0.1713, 0.5346, 0.7913, - 0.1056, 0.5996, 0.7813, 0.5215, 0.9208, 0.9514, 0.2525, - 0.4305, 0.2552, 0.7349, 0.3762, 0.7076, 0.1290, 0.1830, - 0.8907, 0.4787, 0.2419, 0.4726, 0.6765, 0.2643, 0.8126, - 0.8757, 0.7952, 0.1205, 0.1298, 0.3806, 0.3136, 0.8469, - 0.0054, 0.5516, 0.9247, 0.4707, 0.9515, 0.8050, 0.4300, - 0.7760, 0.9682, 0.9549, 0.3683, 0.9720, 0.3750, 0.1016, - 0.1056, 0.6301, 0.1820, 0.6373, 0.4706, 0.8360, 0.8820, - 0.8684, 0.5608, 0.4950, 0.7219, 0.7477, 0.3138, 0.4091, - 0.3672, 0.6820, 0.1598, 0.4262, 0.8790, 0.5580, 0.7601, - 0.6733, 0.7879, 0.3866, 0.8117, 0.6549, 0.4184, 0.9377, - 0.0728, 0.7220, 0.5621, 0.8045, 0.9104, 0.1723, 0.5263, - 0.7555, 0.7751, 0.2045, 0.2067, 0.3395, 0.0619, 0.6870, - 0.6295, 0.5145, 0.4071, 0.2642, 0.1755, 0.1193, 0.9114, - 0.5765, 0.1704, 0.3481, 0.6444, 0.1809, 0.0182, 0.0117, - 0.5466, 0.1851, 0.8523, 0.0951, 0.8918, 0.4279, 0.3555, - 0.1480, 0.9159, 0.1610, 0.2426, 0.6025, 0.7344, 0.3367, - 0.2722, 0.6946, 0.3251, 0.3742, 0.1985, 0.3410, 0.7549, - 0.7152, 0.6307, 0.2442, 0.6188, 0.4939, 0.3315, 0.6606, - 0.3168, 0.5301, 0.3465, 0.9986, 0.1887, 0.1622, 0.9650, - 0.5986, 0.8953, 0.8288, 0.8866, 0.2675, 0.7017, 0.2466, - 0.8798, 0.3267, 0.8028, 0.3919, 0.3126, 0.5504, 0.3778, - 0.7414, 0.0821, 0.0301, 0.4972, 0.4197, 0.6222, 0.4811, - 0.0587, 0.6802, 0.1914, 0.3226, 0.5795, 0.4956, 0.3432, - 0.0689, 0.7911, 0.3203, 0.5275, 0.7748, 0.2775, 0.7469, - 0.8369, 0.2113, 0.0507, 0.9763, 0.3517, 0.6939, 0.7093, - 0.5765, 0.2753, 0.5658, 0.3861, 0.2320, 0.5725, 0.7333, - 0.9639, 0.9626, 0.9957, 0.2482, 0.2599, 0.2589, 0.0011, - 0.5195, 0.4786, 0.6863, 0.9629, 0.0159, 0.8498, 0.9892, - 0.4985, 0.5712, 0.8487, 0.3862, 0.6221, 0.2142, 0.4490, - 0.8922, 0.7035, 0.5839, 0.3975, 0.4180, 0.6609, 0.9340, - 0.0330, 0.6173, 0.9389, 0.6227, 0.3648, 0.5424, 0.3871, - 0.5239, 0.6250, 0.8573, 0.0643, 0.3775, 0.5397, 0.5294, - 0.8842, 0.4971, 0.4772, 0.0587, 0.4167, 0.3990, 0.8149, - 0.3392, 0.7700, 0.3194, 0.6137, 0.4234, 0.6017, 0.1367, - 0.5706, 0.9656, 0.6379, 0.8556, 0.0899, 0.6243, 0.0430, - 0.0444, 0.2529, 0.6988, 0.6018, 0.0937, 0.9108, 0.0874, - 0.8000, 0.6934, 0.6987, 0.3747, 0.5361, 0.5595, 0.4715, - 0.8963, 0.2831, 0.7115, 0.8126, 0.3770, 0.9565, 0.0805, - 0.5965, 0.4760, 0.3946, 0.5740, 0.5718, 0.8089, 0.3195, - 0.6564, 0.2940, 0.1074, 0.1914, 0.3616, 0.2781, 0.5799, - 0.2506, 0.9786, 0.8031, 0.2290, 0.7132, 0.8597, 0.1502, - 0.5501, 0.8473, 0.0961, 0.3091, 0.2671, 0.6716, 0.2749, - 0.8922, 0.9331, 0.7831, 0.3662, 0.4411, 0.0261, 0.1996, - 0.1425, 0.1518, 0.1002, 0.5879, 0.8248, 0.4447, 0.4024, - 0.2709, 0.9656, 0.2968, 0.8687, 0.7429, 0.2038, 0.6212, - 0.5590, 0.3980, 0.6056, 0.7683, 0.9384, 0.6645, 0.3018, - 0.9914, 0.4717, 0.9312, 0.2458, 0.6373, 0.3662, 0.7338, - 0.8539, 0.2373, 0.8468, 0.9313, 0.9985, 0.9108, 0.2928, - 0.5675, 0.4918, 0.6788, 0.6532, 0.8578, 0.9756, 0.7977, - 0.7426, 0.3820, 0.0559, 0.7421, 0.6276, 0.5143, 0.1295, - 0.1380, 0.3699, 0.7837, 0.8925, 0.0521, 0.7792, 0.8213, - 0.9725, 0.3183, 0.1008, 0.4133, 0.0178, 0.2000, 0.5122, - 0.9307, 0.7232, 0.2629, 0.4049, 0.1523, 0.9357, 0.6589, - 0.3894, 0.5017, 0.4405, 0.3895, 0.3989, 0.4152, 0.3842, - 0.2309, 0.0256, 0.1505, 0.6988, 0.2384, 0.4692, 0.2583, - 0.3660, 0.9101, 0.3800, 0.5037, 0.1464, 0.3283, 0.7269, - 0.8936, 0.2059, 0.3661, 0.4488, 0.0114, 0.3810, 0.0566, - 0.9065, 0.6607, 0.4611, 0.6537, 0.1070, 0.8951, 0.1591, - 0.3640, 0.0681, 0.1975, 0.8229, 0.3451, 0.9813, 0.1129, - 0.2130, 0.1791, 0.6985, 0.3325, 0.8236, 0.5894, 0.8602, - 0.7245, 0.8692, 0.0849, 0.3757, 0.4767, 0.2866, 0.8776, - 0.1222, 0.4554, 0.1437, 0.8286, 0.4194, 0.9171, 0.1793, - 0.4714, 0.7967, 0.3271, 0.9508, 0.9596, 0.9553, 0.2671, - 0.5226, 0.2545, 0.2904, 0.3931, 0.1604, 0.9966, 0.9778, - 0.1750, 0.2027, 0.1076, 0.4041, 0.9413, 0.1702, 0.9720, - 0.8515, 0.2862, 0.5026, 0.1329, 0.7104, 0.6485, 0.6173, - 0.1418, 0.0456, 0.2708, 0.2491, 0.4879, 0.6711, 0.3460, - 0.2430, 0.0081, 0.6377, 0.4017, 0.6538, 0.0909, 0.0154, - 0.6563, 0.2168, 0.7672, 0.2347, 0.2597, 0.5759, 0.0490, - 0.0102, 0.2013, 0.8005, 0.6998, 0.2945, 0.4318, 0.5007, - 0.1644, 0.7497, 0.5000, 0.5912, 0.9818, 0.5953, 0.9114, - 0.1285, 0.8707, 0.3809, 0.7743, 0.4282, 0.7606, 0.5688, - 0.3152, 0.8989, 0.1583, 0.8766, 0.6326, 0.8886, 0.2620, - 0.3086, 0.3602, 0.7866, 0.9547, 0.2244, 0.2724, 0.9583, - 0.7470, 0.9623, 0.1429, 0.8508, 0.5799, 0.9955, 0.6647, - 0.1399, 0.1801, 0.9233, 0.2772, 0.2445, 0.0531, 0.7452, - 0.0969, 0.7391, 0.2375, 0.8335, 0.2274, 0.5999, 0.0299, - 0.1018, 0.7622, 0.6825, 0.5068, 0.5262, 0.2390, 0.3450, - 0.7486, 0.6337, 0.3874, 0.1886, 0.2751, 0.8412, 0.0794, - 0.2398, 0.8937, 0.9405, 0.8703, 0.2503, 0.9532, 0.7538, - 0.3676, 0.7234, 0.4557, 0.9672, 0.5687, 0.6227, 0.7700, - 0.6581, 0.6226, 0.6058, 0.8459, 0.4844, 0.8195, 0.3510, - 0.5321, 0.7146, 0.4273, 0.4330, 0.8640, 0.8088, 0.5159, - 0.8375, 0.9589, 0.0615, 0.2675, 0.6890, 0.9951, 0.0718, - 0.5084, 0.2519, 0.6662, 0.9841, 0.4845, 0.3961]), + col_indices=tensor([3017, 6525, 28, 24, 3613, 511, 2077, 9343, 1981, + 2675, 3814, 2518, 3592, 7187, 8177, 1609, 2633, 8891, + 8208, 2654, 3721, 325, 9626, 9666, 7654, 4126, 8546, + 343, 6234, 2907, 1277, 8538, 1341, 6982, 5535, 2735, + 4763, 125, 4086, 4100, 9950, 6695, 2647, 9028, 6623, + 9386, 620, 3592, 5539, 6767, 1794, 9464, 5125, 5152, + 1491, 3070, 5768, 7821, 8521, 6246, 3608, 4797, 3181, + 8724, 2229, 4477, 4570, 7312, 5082, 5170, 711, 5095, + 9319, 2603, 5108, 1722, 6812, 9225, 4313, 9423, 4221, + 4617, 2777, 2957, 7778, 792, 2569, 294, 6465, 4588, + 2237, 6047, 2870, 2844, 1818, 4818, 3605, 4362, 1894, + 4298, 6163, 3226, 9137, 9510, 358, 1792, 8418, 7898, + 6869, 9364, 3218, 6282, 3742, 4692, 5470, 3051, 6701, + 549, 5500, 9879, 8770, 700, 6254, 8525, 2783, 9471, + 2499, 4443, 9518, 7061, 9344, 2867, 534, 2853, 5833, + 62, 3163, 6069, 1861, 5628, 8713, 9621, 9799, 9599, + 5524, 4671, 8800, 3406, 9182, 2190, 503, 5312, 7247, + 108, 5162, 9847, 9303, 1724, 7280, 280, 6945, 8532, + 7398, 5906, 6174, 917, 5369, 7903, 297, 1846, 5559, + 2098, 2473, 2679, 6172, 118, 5785, 8722, 8025, 732, + 28, 9033, 2229, 9614, 9805, 4751, 52, 9662, 5287, + 3074, 8490, 2683, 7972, 4740, 960, 5117, 3007, 7200, + 2145, 7084, 922, 3319, 8132, 3356, 963, 3323, 2604, + 9375, 3774, 7748, 3069, 4337, 3104, 5040, 9132, 7887, + 1736, 3895, 9880, 8928, 9105, 1827, 140, 1487, 5977, + 4223, 9192, 9230, 7170, 6400, 9089, 5588, 7501, 6582, + 1473, 2224, 4943, 6800, 9798, 1589, 1848, 8901, 5244, + 1687, 7059, 549, 6690, 594, 3275, 50, 2262, 7509, + 4998, 5923, 7762, 7873, 7048, 767, 5886, 9526, 9325, + 9283, 3137, 2952, 1200, 6264, 9812, 7137, 3266, 8892, + 3589, 5083, 7229, 8601, 8443, 1426, 1435, 8787, 158, + 899, 8335, 6631, 5469, 6114, 1489, 9417, 4142, 4494, + 1969, 9761, 8580, 5421, 8075, 3214, 6768, 3925, 4761, + 6092, 8138, 7757, 7193, 7009, 8188, 3802, 3399, 3997, + 2104, 1338, 3951, 7823, 9324, 3128, 8753, 7905, 3285, + 8234, 638, 8378, 7299, 4833, 5865, 921, 6284, 2635, + 703, 8291, 5995, 7675, 6888, 5153, 1250, 8132, 1259, + 9413, 7150, 8115, 4847, 1054, 2093, 1552, 1686, 2419, + 8462, 6170, 7929, 5348, 2829, 7222, 2608, 3885, 8847, + 3219, 6860, 4529, 189, 3926, 7008, 8567, 8358, 9485, + 3157, 9546, 2907, 1744, 3907, 923, 5943, 7272, 1716, + 5797, 6225, 7896, 1233, 5347, 7954, 4805, 6623, 1538, + 4405, 8767, 1061, 312, 6438, 8714, 4621, 9826, 9497, + 8825, 8760, 8220, 2821, 2749, 9979, 1089, 6252, 2955, + 5640, 5836, 2584, 5075, 4075, 8342, 9970, 2919, 9855, + 1612, 4375, 8337, 7206, 5014, 198, 1574, 9968, 8967, + 1255, 3566, 477, 112, 7216, 8755, 6436, 3348, 3106, + 271, 7241, 4242, 7076, 996, 3898, 3552, 5621, 9141, + 7884, 9176, 3841, 1828, 1646, 5619, 2159, 3453, 6294, + 264, 7985, 9663, 4971, 9721, 5164, 5815, 4635, 9706, + 6743, 8738, 2321, 7232, 2072, 8935, 3259, 235, 8062, + 6386, 8466, 7703, 958, 6870, 6378, 2013, 5363, 4804, + 2901, 5281, 245, 7348, 33, 6406, 9808, 9231, 1147, + 2971, 5481, 9666, 9263, 8519, 4492, 5589, 8436, 5821, + 4660, 3869, 7092, 7957, 8086, 7690, 5631, 2271, 3737, + 3129, 5383, 7311, 9240, 3952, 7668, 1857, 7274, 904, + 1059, 3514, 8292, 2400, 6965, 8327, 9962, 4672, 6954, + 1539, 4140, 5912, 1212, 4257, 989, 8821, 6253, 7583, + 198, 593, 1543, 8222, 9857, 2765, 4495, 1300, 3735, + 435, 6718, 9670, 7230, 136, 2268, 1703, 7143, 8025, + 6375, 9671, 8618, 8170, 131, 3676, 8663, 258, 1397, + 1164, 8598, 648, 2945, 5492, 8130, 1070, 11, 2176, + 3626, 5349, 1385, 2743, 2843, 4686, 1546, 6975, 5883, + 3087, 3418, 4348, 160, 8024, 1828, 5561, 4603, 4318, + 6835, 5997, 6618, 7557, 8134, 8173, 4450, 6737, 8169, + 9970, 5886, 4123, 8377, 4010, 5814, 942, 2386, 3378, + 6689, 7653, 1827, 1563, 5570, 1553, 4924, 3945, 4340, + 1185, 9582, 5563, 6958, 3323, 5917, 8601, 4545, 9818, + 6458, 9431, 8270, 4381, 2988, 5272, 2575, 1491, 3996, + 4347, 6606, 9960, 6210, 6296, 8836, 1888, 2207, 7915, + 9315, 4812, 2396, 6723, 9680, 2433, 1615, 9255, 7643, + 4050, 114, 1302, 6327, 4496, 8071, 1290, 4626, 2923, + 7328, 5021, 3259, 1143, 5739, 2333, 2227, 5658, 1089, + 8665, 6252, 5851, 9332, 1317, 2965, 3120, 1788, 1832, + 8569, 702, 5149, 8463, 382, 7805, 4566, 2218, 9171, + 9944, 2509, 7858, 8093, 9300, 6417, 3886, 3681, 4310, + 5640, 6047, 5327, 41, 939, 1431, 3468, 3774, 4037, + 2667, 3699, 8821, 1295, 2588, 8820, 4836, 8093, 9270, + 6380, 2270, 3194, 144, 746, 1243, 1579, 1349, 1123, + 1790, 2594, 8724, 2636, 359, 2884, 3039, 750, 8496, + 6354, 6046, 6105, 2116, 2766, 3077, 9878, 1532, 5253, + 7987, 9516, 5569, 3622, 8204, 4170, 2851, 4813, 1595, + 4050, 7523, 4099, 2104, 1902, 6175, 3253, 7821, 2983, + 9194, 437, 7973, 6120, 6374, 9958, 4821, 9823, 709, + 5878, 4639, 8171, 5667, 5420, 7480, 1399, 3234, 9138, + 7644, 5614, 4005, 5956, 4655, 3784, 810, 8327, 3165, + 5164, 3785, 5663, 4563, 3851, 6097, 1925, 1045, 4104, + 5370, 7188, 4191, 586, 9062, 1038, 4353, 5984, 5573, + 2508, 4171, 4288, 6172, 1837, 3190, 6276, 9516, 8394, + 1990, 3566, 6641, 1318, 8729, 7971, 963, 5115, 4891, + 746, 1047, 6412, 6823, 3824, 4678, 6494, 6928, 7675, + 3129, 3286, 4921, 929, 8386, 8079, 4445, 8466, 565, + 1207, 1342, 3019, 3276, 2813, 3143, 1685, 4374, 2422, + 8196, 8378, 2499, 8037, 9099, 4450, 3604, 459, 7804, + 9696, 657, 4975, 7312, 1499, 6023, 4858, 541, 1495, + 1562, 7862, 8909, 2533, 872, 9556, 4959, 3010, 7336, + 490, 195, 2949, 2152, 6440, 9407, 2927, 9332, 2564, + 6448, 9536, 2849, 2968, 2234, 4166, 5509, 2929, 7878, + 775, 7417, 7559, 28, 9004, 4685, 1316, 4295, 9983, + 2905, 6012, 7634, 8010, 2605, 6861, 909, 6110, 7088, + 9354, 563, 5337, 7788, 6381, 9326, 5819, 5496, 5034, + 1898, 6626, 3502, 2435, 5287, 5082, 7445, 1999, 8418, + 8435, 6772, 1057, 3473, 7675, 3597, 3297, 2048, 3313, + 1144, 3932, 9686, 3864, 7127, 5666, 6192, 3082, 5777, + 8710, 8592, 4599, 8217, 5669, 7525, 2748, 1716, 8993, + 2527, 8016, 1510, 2622, 9804, 9795, 562, 8967, 1840, + 7315, 7406, 9468, 1272, 3286, 2524, 6716, 4155, 7918, + 1345, 4808, 5071, 4023, 6670, 551, 4142, 52, 1156, + 202, 5844, 9030, 7221, 7721, 7179, 6727, 4502, 5454, + 7710, 428, 6999, 6019, 5058, 5064, 6086, 6304, 5493, + 6229, 6126, 7296, 9430, 2240, 2275, 2824, 644, 7215, + 7156]), + values=tensor([0.4658, 0.9270, 0.5490, 0.5664, 0.9467, 0.0273, 0.3687, + 0.5148, 0.1792, 0.2224, 0.5315, 0.1442, 0.4447, 0.5133, + 0.9765, 0.3152, 0.5951, 0.0062, 0.6802, 0.4787, 0.8011, + 0.7302, 0.6825, 0.0093, 0.8127, 0.4729, 0.6237, 0.1471, + 0.0997, 0.0059, 0.8515, 0.2133, 0.1282, 0.0739, 0.8972, + 0.4136, 0.0293, 0.8080, 0.1319, 0.1439, 0.0081, 0.5090, + 0.0536, 0.5635, 0.8032, 0.8099, 0.6537, 0.0730, 0.7060, + 0.5633, 0.7581, 0.4032, 0.4497, 0.3382, 0.8007, 0.8440, + 0.0640, 0.9379, 0.2532, 0.7521, 0.8116, 0.1231, 0.4688, + 0.1014, 0.3135, 0.2680, 0.8085, 0.3528, 0.4832, 0.1234, + 0.7331, 0.6661, 0.4684, 0.6604, 0.9194, 0.7040, 0.8534, + 0.8945, 0.6165, 0.2059, 0.9799, 0.6705, 0.7486, 0.1136, + 0.5135, 0.6040, 0.7904, 0.9107, 0.4826, 0.1377, 0.3000, + 0.3347, 0.0268, 0.8130, 0.9601, 0.9985, 0.0244, 0.1425, + 0.0659, 0.1727, 0.5276, 0.2948, 0.9156, 0.7379, 0.6327, + 0.8973, 0.2955, 0.3767, 0.8936, 0.8826, 0.4752, 0.6822, + 0.2934, 0.6979, 0.1928, 0.6380, 0.0229, 0.0190, 0.0822, + 0.1070, 0.3340, 0.7901, 0.4474, 0.5430, 0.0677, 0.6342, + 0.4259, 0.5601, 0.4038, 0.3170, 0.9036, 0.6650, 0.2453, + 0.1882, 0.6635, 0.4837, 0.1116, 0.6044, 0.5844, 0.0594, + 0.5447, 0.6317, 0.8995, 0.5180, 0.3047, 0.7088, 0.0165, + 0.8860, 0.5332, 0.9713, 0.3607, 0.0440, 0.7790, 0.3922, + 0.9995, 0.0019, 0.6340, 0.2705, 0.4705, 0.4874, 0.5527, + 0.6054, 0.8495, 0.6730, 0.4685, 0.3425, 0.6567, 0.6535, + 0.7722, 0.7787, 0.1481, 0.6840, 0.6216, 0.0623, 0.9740, + 0.7622, 0.8784, 0.7484, 0.6001, 0.4961, 0.0240, 0.0204, + 0.8633, 0.1443, 0.9868, 0.8662, 0.4203, 0.1556, 0.1537, + 0.8208, 0.3737, 0.9026, 0.5644, 0.7591, 0.0461, 0.2373, + 0.9137, 0.0193, 0.7276, 0.5926, 0.6514, 0.2656, 0.8533, + 0.3861, 0.0833, 0.2941, 0.9189, 0.3001, 0.1377, 0.9873, + 0.6463, 0.9792, 0.6549, 0.1544, 0.3919, 0.6413, 0.8828, + 0.0314, 0.9964, 0.4246, 0.6474, 0.5815, 0.2231, 0.8021, + 0.6237, 0.2034, 0.7623, 0.3836, 0.2860, 0.8203, 0.7801, + 0.0973, 0.4525, 0.3874, 0.2251, 0.7039, 0.9267, 0.0409, + 0.6409, 0.3719, 0.9407, 0.0841, 0.9175, 0.7810, 0.0298, + 0.3496, 0.1179, 0.5229, 0.4784, 0.5349, 0.2567, 0.3711, + 0.4227, 0.5798, 0.6057, 0.6297, 0.0531, 0.2666, 0.8683, + 0.3069, 0.8094, 0.7765, 0.7275, 0.5282, 0.2447, 0.1244, + 0.8161, 0.4764, 0.6404, 0.9573, 0.3589, 0.9508, 0.1070, + 0.3943, 0.8685, 0.3093, 0.1782, 0.4598, 0.1793, 0.6449, + 0.3879, 0.6837, 0.2053, 0.7864, 0.1555, 0.9447, 0.3140, + 0.3562, 0.8878, 0.3690, 0.1968, 0.0213, 0.7780, 0.1362, + 0.8874, 0.5695, 0.8913, 0.5893, 0.4487, 0.1703, 0.3019, + 0.4180, 0.8376, 0.5071, 0.2915, 0.4153, 0.8636, 0.1962, + 0.4795, 0.0441, 0.1997, 0.1331, 0.2398, 0.2421, 0.9247, + 0.4866, 0.1045, 0.6710, 0.6637, 0.2824, 0.1935, 0.8970, + 0.8809, 0.8849, 0.1387, 0.3534, 0.9388, 0.3924, 0.1111, + 0.2107, 0.5125, 0.1255, 0.3357, 0.6101, 0.0588, 0.2270, + 0.4589, 0.5225, 0.8822, 0.4568, 0.4541, 0.3673, 0.6895, + 0.3405, 0.4459, 0.2832, 0.9914, 0.7111, 0.8177, 0.6897, + 0.9745, 0.9038, 0.2610, 0.7419, 0.6419, 0.6219, 0.7057, + 0.8928, 0.8297, 0.8534, 0.9111, 0.4132, 0.4025, 0.0413, + 0.0027, 0.3796, 0.5498, 0.4564, 0.0574, 0.4767, 0.2362, + 0.2260, 0.1677, 0.2243, 0.2712, 0.3503, 0.7225, 0.2351, + 0.7172, 0.7637, 0.4307, 0.9831, 0.6868, 0.8843, 0.7751, + 0.5843, 0.1912, 0.5561, 0.1069, 0.2612, 0.4789, 0.5445, + 0.9592, 0.2543, 0.3663, 0.7849, 0.7526, 0.9587, 0.6559, + 0.5205, 0.2149, 0.0696, 0.0231, 0.7235, 0.5479, 0.1070, + 0.1785, 0.1437, 0.3983, 0.2482, 0.9706, 0.9861, 0.9049, + 0.8366, 0.1358, 0.3228, 0.7937, 0.1255, 0.3342, 0.7089, + 0.7727, 0.3914, 0.8876, 0.9441, 0.6583, 0.6166, 0.1400, + 0.3536, 0.2608, 0.5951, 0.2415, 0.6768, 0.2513, 0.8661, + 0.6152, 0.7759, 0.3983, 0.2024, 0.0463, 0.9265, 0.1147, + 0.6857, 0.4634, 0.4162, 0.2533, 0.3638, 0.4190, 0.7810, + 0.3877, 0.9652, 0.9534, 0.5941, 0.8275, 0.6836, 0.5207, + 0.6464, 0.6842, 0.8917, 0.4933, 0.0258, 0.8420, 0.0482, + 0.6634, 0.0617, 0.8271, 0.6414, 0.6519, 0.2043, 0.8593, + 0.7451, 0.1994, 0.3969, 0.7583, 0.7018, 0.3403, 0.0454, + 0.3834, 0.0657, 0.1787, 0.0031, 0.7500, 0.6098, 0.5618, + 0.7078, 0.3698, 0.1777, 0.2119, 0.8224, 0.6315, 0.0443, + 0.3275, 0.8441, 0.8362, 0.2077, 0.1283, 0.0729, 0.6541, + 0.4056, 0.3429, 0.4087, 0.6412, 0.9957, 0.5052, 0.3058, + 0.6131, 0.4482, 0.7311, 0.4849, 0.8557, 0.2864, 0.7378, + 0.8314, 0.5412, 0.5503, 0.4478, 0.8842, 0.6391, 0.1029, + 0.4805, 0.1758, 0.4669, 0.4469, 0.3488, 0.0367, 0.0410, + 0.7396, 0.0879, 0.6424, 0.2920, 0.4366, 0.0294, 0.1272, + 0.2395, 0.3911, 0.8344, 0.5476, 0.3737, 0.8843, 0.6396, + 0.4827, 0.1382, 0.9810, 0.2164, 0.6001, 0.9513, 0.1294, + 0.1069, 0.3102, 0.0689, 0.9482, 0.7489, 0.0116, 0.8200, + 0.5082, 0.4683, 0.8662, 0.5692, 0.7622, 0.5110, 0.4814, + 0.2753, 0.8573, 0.0744, 0.1622, 0.2648, 0.6746, 0.3785, + 0.0111, 0.8476, 0.3978, 0.5884, 0.5899, 0.5490, 0.0583, + 0.2957, 0.3061, 0.0426, 0.0487, 0.3074, 0.5200, 0.0843, + 0.5315, 0.8763, 0.7982, 0.5827, 0.5094, 0.8024, 0.6666, + 0.6665, 0.9386, 0.1689, 0.0077, 0.6749, 0.6850, 0.5247, + 0.9956, 0.7889, 0.7216, 0.4705, 0.6236, 0.0946, 0.5038, + 0.1530, 0.7348, 0.7342, 0.3383, 0.4565, 0.2888, 0.3342, + 0.0713, 0.7769, 0.5812, 0.3484, 0.1290, 0.1278, 0.2420, + 0.1885, 0.6706, 0.4659, 0.7411, 0.4491, 0.8411, 0.3267, + 0.2326, 0.2227, 0.9353, 0.8981, 0.7944, 0.5231, 0.9386, + 0.3370, 0.1551, 0.2120, 0.0043, 0.6096, 0.3259, 0.0676, + 0.8205, 0.2308, 0.1255, 0.5308, 0.2718, 0.9880, 0.3003, + 0.7498, 0.8631, 0.0693, 0.6604, 0.2453, 0.8982, 0.1108, + 0.3505, 0.2345, 0.3182, 0.2311, 0.8986, 0.7388, 0.8421, + 0.8756, 0.7090, 0.7166, 0.4911, 0.5885, 0.8511, 0.1037, + 0.2988, 0.4383, 0.0887, 0.1277, 0.2984, 0.3893, 0.0778, + 0.9634, 0.2267, 0.5031, 0.6240, 0.7679, 0.6331, 0.4827, + 0.1686, 0.8944, 0.8967, 0.9982, 0.0123, 0.1139, 0.9882, + 0.2802, 0.6838, 0.0050, 0.6512, 0.0341, 0.1465, 0.6131, + 0.6526, 0.8094, 0.9845, 0.7225, 0.2824, 0.1967, 0.0838, + 0.3967, 0.9265, 0.2471, 0.5886, 0.5597, 0.0322, 0.0602, + 0.0296, 0.3152, 0.5806, 0.3540, 0.1364, 0.2465, 0.0481, + 0.9054, 0.4020, 0.4646, 0.6669, 0.9666, 0.9322, 0.2825, + 0.6623, 0.0243, 0.6081, 0.2479, 0.4742, 0.6375, 0.3096, + 0.9206, 0.9436, 0.9511, 0.1787, 0.6827, 0.9171, 0.8117, + 0.3415, 0.6329, 0.3421, 0.7824, 0.1833, 0.8064, 0.3602, + 0.2387, 0.8481, 0.5893, 0.4878, 0.9482, 0.8424, 0.2857, + 0.3799, 0.5910, 0.0186, 0.3411, 0.5488, 0.9465, 0.1227, + 0.0764, 0.1977, 0.7671, 0.1037, 0.3711, 0.6139, 0.9592, + 0.4451, 0.7680, 0.8791, 0.4279, 0.3947, 0.5403, 0.5523, + 0.9111, 0.2238, 0.4612, 0.0473, 0.8741, 0.2792, 0.4863, + 0.4417, 0.1727, 0.5207, 0.7036, 0.8263, 0.0685, 0.5237, + 0.9832, 0.8605, 0.9291, 0.0733, 0.5975, 0.3205, 0.6822, + 0.7343, 0.0285, 0.3679, 0.1022, 0.9092, 0.6772, 0.5990, + 0.8805, 0.3345, 0.1082, 0.5831, 0.7243, 0.0043, 0.6104, + 0.5221, 0.2665, 0.6547, 0.5966, 0.5735, 0.6547, 0.7912, + 0.4232, 0.4892, 0.2963, 0.8855, 0.8826, 0.8135, 0.8936, + 0.3412, 0.6738, 0.6646, 0.2358, 0.4376, 0.5636, 0.7424, + 0.3569, 0.5638, 0.8467, 0.8789, 0.4973, 0.2098, 0.7970, + 0.5817, 0.9466, 0.4594, 0.3389, 0.0539, 0.4635, 0.0705, + 0.7166, 0.6538, 0.4355, 0.7811, 0.6070, 0.9496, 0.3375, + 0.4482, 0.8927, 0.4858, 0.7911, 0.3411, 0.7049, 0.8307, + 0.9476, 0.6876, 0.1934, 0.9667, 0.4999, 0.6343, 0.2795, + 0.6768, 0.1129, 0.4401, 0.9022, 0.0740, 0.4167, 0.6813, + 0.5662, 0.6817, 0.9802, 0.8007, 0.6661, 0.3594, 0.9923, + 0.4568, 0.4293, 0.7070, 0.9026, 0.7849, 0.3776, 0.7445, + 0.2091, 0.2842, 0.6006, 0.7205, 0.1956, 0.7626, 0.0090, + 0.1955, 0.6769, 0.6449, 0.6677, 0.9844, 0.9060, 0.4388, + 0.0320, 0.5383, 0.1067, 0.0816, 0.6384, 0.6027, 0.7422, + 0.2486, 0.7529, 0.0022, 0.6737, 0.9765, 0.1942, 0.0687, + 0.4339, 0.0010, 0.3537, 0.5434, 0.9251, 0.5814, 0.3943, + 0.0832, 0.2526, 0.6999, 0.8348, 0.2505, 0.3392, 0.0236, + 0.8039, 0.1209, 0.5745, 0.9476, 0.2789, 0.6989, 0.6458, + 0.8753, 0.3179, 0.5945, 0.8746, 0.3311, 0.9774, 0.4684, + 0.9539, 0.4889, 0.5601, 0.7761, 0.0460, 0.5329, 0.8928, + 0.2310, 0.4027, 0.7458, 0.7100, 0.9267, 0.4303, 0.2954, + 0.2504, 0.4169, 0.0397, 0.6713, 0.2575, 0.8986, 0.0728, + 0.6795, 0.8951, 0.8317, 0.1696, 0.1261, 0.4741, 0.1195, + 0.2846, 0.6550, 0.2205, 0.6540, 0.4671, 0.0459, 0.6185, + 0.0039, 0.4488, 0.2437, 0.2279, 0.0034, 0.0921, 0.1372, + 0.0296, 0.1398, 0.6599, 0.0791, 0.6720, 0.7434, 0.5575, + 0.6773, 0.4435, 0.7117, 0.8455, 0.4233, 0.3456, 0.5199, + 0.6632, 0.4549, 0.9847, 0.6801, 0.4013, 0.0164]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4905, 0.9858, 0.4305, ..., 0.2183, 0.4525, 0.0992]) +tensor([0.8565, 0.5607, 0.9219, ..., 0.4382, 0.2051, 0.0014]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,268 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.263301372528076 seconds +Time: 1.9941279888153076 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '283081', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.251019954681396} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7666, 3284, 1699, 7007, 1865, 642, 2301, 7889, 3112, - 4841, 4554, 2643, 3511, 9405, 9949, 4868, 7053, 8979, - 7155, 3080, 3345, 5376, 5682, 4591, 5457, 4012, 9907, - 4608, 8275, 6785, 214, 8256, 6706, 6096, 396, 213, - 9694, 8210, 4776, 4951, 9163, 2885, 6391, 5210, 5362, - 2964, 8727, 2087, 6653, 557, 2807, 404, 7235, 7348, - 5591, 9937, 5151, 9696, 3619, 4512, 6156, 4095, 5236, - 6999, 8669, 5309, 6087, 4118, 2455, 1780, 9742, 3315, - 7998, 8891, 4943, 9849, 3316, 2894, 3974, 2248, 8409, - 9978, 2602, 229, 3004, 5173, 479, 6004, 6080, 4963, - 6862, 9389, 8433, 2783, 2848, 968, 8951, 3760, 8407, - 7534, 2609, 3709, 9648, 2412, 7425, 8619, 7347, 9064, - 4963, 1354, 5818, 7386, 7369, 2381, 5800, 6012, 3532, - 214, 9362, 4731, 8238, 4276, 2221, 3400, 540, 4177, - 2164, 1191, 3257, 9047, 364, 2899, 2221, 8232, 4361, - 1480, 3950, 9412, 570, 6740, 5525, 7995, 856, 1549, - 3242, 5640, 6435, 1568, 3375, 4060, 6793, 1686, 9865, - 9598, 3307, 6572, 3171, 2008, 7637, 4922, 386, 6366, - 7528, 2850, 6318, 2345, 368, 5786, 9710, 7772, 1761, - 6140, 301, 570, 8828, 8741, 9119, 4206, 3780, 4780, - 707, 3068, 8924, 3477, 4065, 1036, 560, 3678, 9660, - 4487, 8282, 6249, 7109, 3781, 7458, 2555, 3159, 5856, - 7260, 2615, 6118, 4364, 9114, 9280, 8926, 3395, 7988, - 2698, 8725, 6447, 7235, 3203, 5300, 4394, 3058, 7417, - 5517, 1817, 6921, 9211, 8306, 7080, 8460, 7810, 7848, - 4752, 270, 8368, 8576, 7374, 8967, 2824, 9004, 8383, - 4024, 4694, 919, 6859, 4607, 3536, 5606, 5137, 6430, - 1226, 8149, 7263, 7513, 1278, 5694, 380, 1738, 433, - 9251, 916, 5723, 4516, 8117, 800, 5078, 9205, 3076, - 2156, 1929, 248, 8896, 6899, 3847, 2048, 8775, 3158, - 9717, 4304, 9305, 9027, 8444, 7211, 3882, 2571, 7542, - 1042, 2864, 4697, 4933, 9597, 2106, 1874, 1663, 8974, - 5845, 9264, 40, 4452, 2566, 3363, 7670, 7586, 7479, - 5981, 1928, 2498, 8107, 6502, 5725, 1280, 4382, 7811, - 4150, 2407, 5646, 7384, 6368, 9330, 4277, 4360, 9293, - 8848, 3141, 3719, 4630, 418, 68, 3077, 304, 5534, - 3069, 4046, 9645, 797, 7332, 2456, 818, 6040, 233, - 1356, 6755, 4249, 1643, 5939, 6192, 5023, 2182, 3038, - 615, 962, 6584, 6465, 4700, 5188, 378, 2580, 7736, - 8469, 7964, 4215, 5350, 5035, 723, 2366, 9255, 3296, - 2487, 9749, 9356, 2120, 2719, 1534, 5352, 4142, 2840, - 3874, 3309, 2282, 4165, 8221, 3151, 7532, 9093, 1914, - 875, 3267, 7484, 5334, 246, 69, 1760, 3158, 9485, - 2968, 8655, 8484, 1925, 6269, 8467, 4053, 8242, 2491, - 9174, 9141, 8207, 3765, 7176, 8536, 4547, 5155, 9600, - 59, 7491, 3224, 3860, 3405, 5155, 5872, 5087, 1030, - 5823, 67, 9402, 2052, 3137, 8203, 5381, 5014, 2445, - 2598, 6064, 583, 6264, 8514, 2132, 8834, 5724, 5040, - 3049, 7546, 5472, 9227, 9155, 4856, 8701, 9805, 8423, - 5471, 9572, 1489, 1146, 2738, 6131, 4467, 3775, 7623, - 3128, 1626, 4245, 5851, 8311, 5249, 9163, 5646, 9700, - 5912, 1064, 3427, 9742, 4822, 1609, 1247, 3225, 1349, - 1112, 7136, 4620, 9398, 180, 852, 8483, 99, 5880, - 6936, 3985, 7199, 43, 619, 1433, 378, 5613, 8778, - 9714, 6327, 2214, 4270, 2616, 7836, 6036, 4038, 1349, - 3609, 500, 8725, 5242, 3213, 7450, 9547, 6518, 3900, - 9820, 6085, 3210, 7810, 5855, 7609, 1890, 2354, 6091, - 8514, 7897, 3931, 2880, 6281, 3385, 5921, 9952, 5094, - 8855, 172, 3302, 3010, 4642, 2734, 9166, 8421, 2234, - 8565, 528, 8176, 6482, 994, 3099, 9724, 6379, 405, - 8321, 239, 6594, 996, 3440, 8976, 435, 2375, 6431, - 3253, 1630, 7853, 2956, 8895, 986, 6476, 1010, 8489, - 8021, 872, 1473, 1100, 5352, 9179, 2417, 9244, 9167, - 7160, 3376, 6327, 3920, 5098, 7118, 4103, 8332, 3090, - 9060, 6774, 7505, 4316, 2580, 8806, 3234, 5576, 1591, - 9446, 2876, 3861, 9007, 4047, 9387, 1828, 9480, 300, - 5810, 1681, 7603, 9696, 3967, 205, 4720, 6724, 1706, - 1179, 7731, 9912, 9119, 3539, 3641, 2691, 1738, 2069, - 64, 5901, 6907, 7329, 5038, 8087, 2346, 1417, 9802, - 2245, 9442, 3124, 6856, 7605, 3350, 9096, 9607, 6580, - 7026, 4159, 2283, 388, 6926, 8970, 9000, 5902, 3616, - 8251, 2022, 1387, 2040, 3499, 7352, 9723, 4720, 4072, - 3030, 8642, 1561, 3550, 728, 3483, 9957, 2852, 9769, - 530, 8950, 8791, 1868, 3201, 4039, 7668, 6271, 5458, - 8517, 9781, 4046, 3225, 9954, 206, 6894, 7597, 7497, - 6306, 3298, 6872, 2066, 7591, 1890, 8097, 1280, 7096, - 8226, 1359, 216, 3321, 8096, 7997, 7290, 8655, 8538, - 5466, 3483, 6990, 5527, 3778, 5009, 8586, 5007, 7530, - 2615, 6501, 9092, 3603, 3220, 7154, 9702, 5045, 8352, - 2617, 771, 5086, 2751, 4678, 9150, 7949, 9854, 737, - 4795, 2071, 9389, 7313, 9799, 3943, 6232, 1410, 8989, - 7328, 6562, 7366, 5222, 441, 8345, 6712, 3926, 515, - 5956, 5254, 6129, 5268, 1272, 9590, 5597, 1532, 744, - 7243, 1823, 6966, 9821, 5101, 1854, 836, 8195, 8533, - 9772, 6888, 999, 42, 8452, 7406, 756, 2186, 7921, - 7527, 6586, 9178, 9190, 6661, 1985, 2124, 5708, 2885, - 5344, 8420, 6388, 4916, 5751, 5447, 1612, 7115, 4012, - 4211, 4491, 2685, 8469, 3264, 3933, 8307, 2069, 5721, - 6785, 7552, 3662, 9294, 3272, 4972, 4038, 8502, 7978, - 730, 6090, 2443, 3573, 9783, 6945, 8456, 7714, 2278, - 1207, 9072, 1346, 555, 8623, 6821, 2442, 455, 1202, - 8844, 3296, 1101, 8043, 8852, 2122, 3173, 1731, 9938, - 5593, 93, 8956, 6237, 8545, 5229, 9239, 8804, 3713, - 4281, 5382, 8333, 8801, 93, 2212, 5217, 7239, 9312, - 2195, 1969, 8659, 674, 2456, 1219, 684, 5633, 7633, - 1822, 5479, 5138, 2448, 4428, 1533, 6135, 585, 2626, - 1535, 2390, 2292, 3199, 9797, 3083, 7782, 3749, 5972, - 5041, 873, 1762, 6907, 2439, 176, 7886, 8698, 5610, - 4063, 7434, 7641, 5504, 8492, 8226, 4607, 3891, 971, - 446, 5185, 8037, 5578, 2205, 6200, 5210, 7918, 8694, - 8369, 5656, 8140, 720, 6980, 4361, 6048, 5637, 1525, - 3287, 4786, 6431, 419, 584, 7972, 6800, 6611, 3925, - 1361, 345, 5934, 221, 3976, 5263, 273, 1124, 6638, - 9587, 1234, 7547, 9811, 1651, 2418, 9523, 5719, 2357, - 9754, 4924, 9608, 7139, 1769, 2001, 6705, 554, 4744, - 2302, 8692, 6947, 8654, 8683, 9773, 4469, 8661, 3479, - 222, 2813, 8116, 1105, 4667, 9744, 3524, 7923, 7688, - 5748, 4643, 3806, 589, 8275, 7300, 9905, 8566, 3005, - 8277, 3362, 9117, 4507, 4746, 196, 1702, 5992, 1090, - 7587, 5382, 7431, 8949, 1611, 5830, 5183, 8125, 3201, - 6255, 9408, 1748, 1554, 1906, 729, 8466, 4725, 4398, - 3941]), - values=tensor([0.6959, 0.7960, 0.0781, 0.6007, 0.3460, 0.7293, 0.2445, - 0.3377, 0.4176, 0.3625, 0.7235, 0.1442, 0.0335, 0.4983, - 0.5500, 0.1056, 0.9940, 0.8727, 0.8069, 0.4130, 0.1455, - 0.6420, 0.2494, 0.3495, 0.7133, 0.4395, 0.2694, 0.2841, - 0.5549, 0.0490, 0.7986, 0.5242, 0.3692, 0.2460, 0.8859, - 0.0105, 0.6123, 0.7972, 0.2878, 0.9495, 0.0416, 0.7368, - 0.5235, 0.2640, 0.4232, 0.7400, 0.4660, 0.1817, 0.7149, - 0.8459, 0.3355, 0.5095, 0.8904, 0.2582, 0.0507, 0.5007, - 0.7876, 0.1015, 0.4881, 0.3695, 0.9348, 0.8654, 0.8541, - 0.1701, 0.5411, 0.8559, 0.0858, 0.6724, 0.4755, 0.9937, - 0.3688, 0.4551, 0.1705, 0.5343, 0.7652, 0.5023, 0.4761, - 0.8553, 0.5042, 0.1378, 0.3402, 0.0214, 0.3317, 0.7760, - 0.6136, 0.0754, 0.7266, 0.3108, 0.1557, 0.5342, 0.2368, - 0.6255, 0.6338, 0.2103, 0.3547, 0.7898, 0.3532, 0.6879, - 0.5756, 0.5446, 0.6549, 0.1833, 0.3545, 0.3075, 0.8999, - 0.0866, 0.3178, 0.1771, 0.9904, 0.5584, 0.7474, 0.3985, - 0.7736, 0.7195, 0.1836, 0.1731, 0.6012, 0.6546, 0.5119, - 0.7668, 0.7804, 0.2026, 0.4119, 0.1043, 0.5762, 0.3249, - 0.0350, 0.9469, 0.9597, 0.3663, 0.4396, 0.3526, 0.6112, - 0.2028, 0.6398, 0.7385, 0.7867, 0.3703, 0.9189, 0.5053, - 0.2346, 0.3574, 0.9106, 0.6490, 0.0571, 0.7581, 0.0037, - 0.2000, 0.0078, 0.3345, 0.5987, 0.1644, 0.3401, 0.1628, - 0.6190, 0.0170, 0.3230, 0.7145, 0.0065, 0.2218, 0.4568, - 0.5352, 0.7506, 0.8970, 0.0769, 0.7770, 0.0375, 0.5961, - 0.1851, 0.1482, 0.0403, 0.1972, 0.9194, 0.1678, 0.8901, - 0.8766, 0.0092, 0.2714, 0.5465, 0.2373, 0.1403, 0.0755, - 0.7137, 0.4544, 0.3006, 0.0421, 0.8286, 0.0333, 0.4192, - 0.1514, 0.5585, 0.9974, 0.8611, 0.3646, 0.5144, 0.1233, - 0.0881, 0.9866, 0.2420, 0.9365, 0.3756, 0.2095, 0.4280, - 0.2119, 0.0313, 0.7899, 0.6265, 0.8099, 0.3894, 0.1829, - 0.4932, 0.0535, 0.2941, 0.7543, 0.3779, 0.6784, 0.2573, - 0.6186, 0.1475, 0.2682, 0.6713, 0.2567, 0.7592, 0.7252, - 0.8305, 0.0652, 0.2366, 0.0143, 0.5795, 0.7446, 0.7182, - 0.2430, 0.3788, 0.2324, 0.6863, 0.2975, 0.5231, 0.1994, - 0.6772, 0.0117, 0.2661, 0.4983, 0.2255, 0.1217, 0.5970, - 0.4181, 0.1888, 0.5662, 0.8459, 0.4818, 0.5664, 0.7551, - 0.3425, 0.7495, 0.5635, 0.3037, 0.9253, 0.1746, 0.0949, - 0.1444, 0.1382, 0.6207, 0.2484, 0.6872, 0.6011, 0.8909, - 0.5037, 0.7646, 0.8169, 0.6714, 0.6011, 0.0228, 0.7584, - 0.5712, 0.9573, 0.5293, 0.0187, 0.0769, 0.3750, 0.5318, - 0.5469, 0.0475, 0.6832, 0.1012, 0.3177, 0.3205, 0.9912, - 0.7967, 0.3297, 0.8600, 0.8649, 0.1408, 0.1008, 0.6301, - 0.4827, 0.2363, 0.4056, 0.0155, 0.7202, 0.0029, 0.5561, - 0.7290, 0.5676, 0.5965, 0.5363, 0.0921, 0.1850, 0.8448, - 0.7451, 0.5778, 0.2613, 0.9353, 0.9780, 0.5328, 0.8762, - 0.7506, 0.6058, 0.6343, 0.8670, 0.6461, 0.0050, 0.0322, - 0.7176, 0.9509, 0.0141, 0.2849, 0.4851, 0.7572, 0.2430, - 0.0132, 0.9420, 0.2551, 0.1494, 0.1934, 0.9914, 0.0389, - 0.5433, 0.7196, 0.7758, 0.0818, 0.2780, 0.1470, 0.9575, - 0.0039, 0.7218, 0.1816, 0.4767, 0.9657, 0.5165, 0.5374, - 0.9692, 0.8055, 0.2454, 0.3490, 0.9352, 0.0487, 0.6894, - 0.9618, 0.0152, 0.3404, 0.6613, 0.6038, 0.4468, 0.2839, - 0.1984, 0.0949, 0.4107, 0.3440, 0.6650, 0.9782, 0.7647, - 0.3472, 0.8008, 0.6515, 0.2253, 0.1516, 0.0721, 0.0532, - 0.1489, 0.2701, 0.4286, 0.6556, 0.2800, 0.4837, 0.1700, - 0.4031, 0.7582, 0.2764, 0.1452, 0.4317, 0.8870, 0.8904, - 0.2127, 0.1224, 0.0877, 0.6062, 0.7479, 0.8289, 0.8979, - 0.5176, 0.3928, 0.1454, 0.7837, 0.1713, 0.5346, 0.7913, - 0.1056, 0.5996, 0.7813, 0.5215, 0.9208, 0.9514, 0.2525, - 0.4305, 0.2552, 0.7349, 0.3762, 0.7076, 0.1290, 0.1830, - 0.8907, 0.4787, 0.2419, 0.4726, 0.6765, 0.2643, 0.8126, - 0.8757, 0.7952, 0.1205, 0.1298, 0.3806, 0.3136, 0.8469, - 0.0054, 0.5516, 0.9247, 0.4707, 0.9515, 0.8050, 0.4300, - 0.7760, 0.9682, 0.9549, 0.3683, 0.9720, 0.3750, 0.1016, - 0.1056, 0.6301, 0.1820, 0.6373, 0.4706, 0.8360, 0.8820, - 0.8684, 0.5608, 0.4950, 0.7219, 0.7477, 0.3138, 0.4091, - 0.3672, 0.6820, 0.1598, 0.4262, 0.8790, 0.5580, 0.7601, - 0.6733, 0.7879, 0.3866, 0.8117, 0.6549, 0.4184, 0.9377, - 0.0728, 0.7220, 0.5621, 0.8045, 0.9104, 0.1723, 0.5263, - 0.7555, 0.7751, 0.2045, 0.2067, 0.3395, 0.0619, 0.6870, - 0.6295, 0.5145, 0.4071, 0.2642, 0.1755, 0.1193, 0.9114, - 0.5765, 0.1704, 0.3481, 0.6444, 0.1809, 0.0182, 0.0117, - 0.5466, 0.1851, 0.8523, 0.0951, 0.8918, 0.4279, 0.3555, - 0.1480, 0.9159, 0.1610, 0.2426, 0.6025, 0.7344, 0.3367, - 0.2722, 0.6946, 0.3251, 0.3742, 0.1985, 0.3410, 0.7549, - 0.7152, 0.6307, 0.2442, 0.6188, 0.4939, 0.3315, 0.6606, - 0.3168, 0.5301, 0.3465, 0.9986, 0.1887, 0.1622, 0.9650, - 0.5986, 0.8953, 0.8288, 0.8866, 0.2675, 0.7017, 0.2466, - 0.8798, 0.3267, 0.8028, 0.3919, 0.3126, 0.5504, 0.3778, - 0.7414, 0.0821, 0.0301, 0.4972, 0.4197, 0.6222, 0.4811, - 0.0587, 0.6802, 0.1914, 0.3226, 0.5795, 0.4956, 0.3432, - 0.0689, 0.7911, 0.3203, 0.5275, 0.7748, 0.2775, 0.7469, - 0.8369, 0.2113, 0.0507, 0.9763, 0.3517, 0.6939, 0.7093, - 0.5765, 0.2753, 0.5658, 0.3861, 0.2320, 0.5725, 0.7333, - 0.9639, 0.9626, 0.9957, 0.2482, 0.2599, 0.2589, 0.0011, - 0.5195, 0.4786, 0.6863, 0.9629, 0.0159, 0.8498, 0.9892, - 0.4985, 0.5712, 0.8487, 0.3862, 0.6221, 0.2142, 0.4490, - 0.8922, 0.7035, 0.5839, 0.3975, 0.4180, 0.6609, 0.9340, - 0.0330, 0.6173, 0.9389, 0.6227, 0.3648, 0.5424, 0.3871, - 0.5239, 0.6250, 0.8573, 0.0643, 0.3775, 0.5397, 0.5294, - 0.8842, 0.4971, 0.4772, 0.0587, 0.4167, 0.3990, 0.8149, - 0.3392, 0.7700, 0.3194, 0.6137, 0.4234, 0.6017, 0.1367, - 0.5706, 0.9656, 0.6379, 0.8556, 0.0899, 0.6243, 0.0430, - 0.0444, 0.2529, 0.6988, 0.6018, 0.0937, 0.9108, 0.0874, - 0.8000, 0.6934, 0.6987, 0.3747, 0.5361, 0.5595, 0.4715, - 0.8963, 0.2831, 0.7115, 0.8126, 0.3770, 0.9565, 0.0805, - 0.5965, 0.4760, 0.3946, 0.5740, 0.5718, 0.8089, 0.3195, - 0.6564, 0.2940, 0.1074, 0.1914, 0.3616, 0.2781, 0.5799, - 0.2506, 0.9786, 0.8031, 0.2290, 0.7132, 0.8597, 0.1502, - 0.5501, 0.8473, 0.0961, 0.3091, 0.2671, 0.6716, 0.2749, - 0.8922, 0.9331, 0.7831, 0.3662, 0.4411, 0.0261, 0.1996, - 0.1425, 0.1518, 0.1002, 0.5879, 0.8248, 0.4447, 0.4024, - 0.2709, 0.9656, 0.2968, 0.8687, 0.7429, 0.2038, 0.6212, - 0.5590, 0.3980, 0.6056, 0.7683, 0.9384, 0.6645, 0.3018, - 0.9914, 0.4717, 0.9312, 0.2458, 0.6373, 0.3662, 0.7338, - 0.8539, 0.2373, 0.8468, 0.9313, 0.9985, 0.9108, 0.2928, - 0.5675, 0.4918, 0.6788, 0.6532, 0.8578, 0.9756, 0.7977, - 0.7426, 0.3820, 0.0559, 0.7421, 0.6276, 0.5143, 0.1295, - 0.1380, 0.3699, 0.7837, 0.8925, 0.0521, 0.7792, 0.8213, - 0.9725, 0.3183, 0.1008, 0.4133, 0.0178, 0.2000, 0.5122, - 0.9307, 0.7232, 0.2629, 0.4049, 0.1523, 0.9357, 0.6589, - 0.3894, 0.5017, 0.4405, 0.3895, 0.3989, 0.4152, 0.3842, - 0.2309, 0.0256, 0.1505, 0.6988, 0.2384, 0.4692, 0.2583, - 0.3660, 0.9101, 0.3800, 0.5037, 0.1464, 0.3283, 0.7269, - 0.8936, 0.2059, 0.3661, 0.4488, 0.0114, 0.3810, 0.0566, - 0.9065, 0.6607, 0.4611, 0.6537, 0.1070, 0.8951, 0.1591, - 0.3640, 0.0681, 0.1975, 0.8229, 0.3451, 0.9813, 0.1129, - 0.2130, 0.1791, 0.6985, 0.3325, 0.8236, 0.5894, 0.8602, - 0.7245, 0.8692, 0.0849, 0.3757, 0.4767, 0.2866, 0.8776, - 0.1222, 0.4554, 0.1437, 0.8286, 0.4194, 0.9171, 0.1793, - 0.4714, 0.7967, 0.3271, 0.9508, 0.9596, 0.9553, 0.2671, - 0.5226, 0.2545, 0.2904, 0.3931, 0.1604, 0.9966, 0.9778, - 0.1750, 0.2027, 0.1076, 0.4041, 0.9413, 0.1702, 0.9720, - 0.8515, 0.2862, 0.5026, 0.1329, 0.7104, 0.6485, 0.6173, - 0.1418, 0.0456, 0.2708, 0.2491, 0.4879, 0.6711, 0.3460, - 0.2430, 0.0081, 0.6377, 0.4017, 0.6538, 0.0909, 0.0154, - 0.6563, 0.2168, 0.7672, 0.2347, 0.2597, 0.5759, 0.0490, - 0.0102, 0.2013, 0.8005, 0.6998, 0.2945, 0.4318, 0.5007, - 0.1644, 0.7497, 0.5000, 0.5912, 0.9818, 0.5953, 0.9114, - 0.1285, 0.8707, 0.3809, 0.7743, 0.4282, 0.7606, 0.5688, - 0.3152, 0.8989, 0.1583, 0.8766, 0.6326, 0.8886, 0.2620, - 0.3086, 0.3602, 0.7866, 0.9547, 0.2244, 0.2724, 0.9583, - 0.7470, 0.9623, 0.1429, 0.8508, 0.5799, 0.9955, 0.6647, - 0.1399, 0.1801, 0.9233, 0.2772, 0.2445, 0.0531, 0.7452, - 0.0969, 0.7391, 0.2375, 0.8335, 0.2274, 0.5999, 0.0299, - 0.1018, 0.7622, 0.6825, 0.5068, 0.5262, 0.2390, 0.3450, - 0.7486, 0.6337, 0.3874, 0.1886, 0.2751, 0.8412, 0.0794, - 0.2398, 0.8937, 0.9405, 0.8703, 0.2503, 0.9532, 0.7538, - 0.3676, 0.7234, 0.4557, 0.9672, 0.5687, 0.6227, 0.7700, - 0.6581, 0.6226, 0.6058, 0.8459, 0.4844, 0.8195, 0.3510, - 0.5321, 0.7146, 0.4273, 0.4330, 0.8640, 0.8088, 0.5159, - 0.8375, 0.9589, 0.0615, 0.2675, 0.6890, 0.9951, 0.0718, - 0.5084, 0.2519, 0.6662, 0.9841, 0.4845, 0.3961]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 999, 1000, 1000]), + col_indices=tensor([6337, 6253, 6887, 9242, 4768, 6015, 4529, 9548, 1595, + 4824, 8428, 3021, 6404, 9778, 1690, 6193, 9247, 3032, + 2497, 1684, 4366, 1179, 3199, 1210, 8827, 260, 8366, + 6198, 7506, 3241, 875, 1272, 4944, 5203, 1187, 5756, + 211, 2088, 9817, 9835, 6021, 3840, 2185, 4006, 1537, + 2995, 6201, 8435, 8923, 8907, 7515, 7430, 5822, 9922, + 2044, 7750, 7906, 1082, 6451, 154, 4143, 213, 6863, + 5696, 3741, 3326, 9758, 2705, 5798, 105, 5494, 3610, + 3202, 5160, 7226, 8546, 5325, 1924, 7174, 7551, 1995, + 855, 1391, 4170, 9209, 1447, 5275, 2681, 1810, 8276, + 7234, 5988, 3014, 3695, 4405, 9809, 1200, 4282, 548, + 2865, 8197, 9815, 4638, 337, 5602, 690, 7426, 3370, + 7307, 9723, 9052, 2016, 3607, 4832, 2053, 7790, 7536, + 6270, 8193, 8651, 2372, 9909, 9570, 6080, 9324, 7410, + 5175, 8046, 7677, 2213, 7718, 7630, 3775, 5050, 6485, + 850, 2674, 7769, 5577, 3838, 5701, 1650, 9534, 7011, + 1890, 7869, 8541, 3508, 7428, 310, 6110, 4723, 8960, + 2453, 8119, 4785, 590, 935, 3774, 5197, 7485, 8842, + 6189, 4511, 4178, 9552, 2567, 9282, 4565, 4874, 3063, + 138, 6925, 2428, 3891, 6900, 6790, 5627, 1703, 3014, + 9860, 3749, 6146, 2541, 3788, 9915, 628, 5299, 318, + 3903, 6794, 6177, 7188, 570, 9964, 7280, 1104, 9401, + 6043, 7474, 1969, 7967, 907, 9905, 707, 1999, 4973, + 8937, 9361, 3946, 8868, 8995, 8271, 8640, 2016, 9553, + 6026, 1566, 7895, 5868, 6156, 9464, 3169, 7935, 208, + 2840, 6989, 282, 4248, 7859, 2346, 3311, 5676, 652, + 7209, 2733, 9915, 1803, 5814, 8336, 3664, 7836, 1371, + 1609, 8360, 4353, 1056, 489, 4044, 5190, 9267, 4537, + 7884, 4517, 6422, 7329, 6479, 5051, 3973, 8436, 253, + 5317, 6149, 2483, 8523, 2586, 3339, 5210, 7172, 9964, + 5416, 545, 8960, 9405, 4565, 6937, 6131, 6599, 4989, + 7748, 863, 847, 4903, 4347, 8933, 508, 4128, 7115, + 5357, 5191, 4282, 6775, 1030, 3238, 3809, 1005, 2495, + 4552, 3779, 1572, 9609, 7137, 259, 4849, 334, 4810, + 4903, 6322, 7942, 9657, 4189, 5075, 8964, 634, 1590, + 4380, 4733, 6906, 6146, 3898, 3758, 6545, 9449, 4029, + 7898, 3163, 3639, 1942, 9232, 9128, 3242, 6813, 6174, + 7219, 2775, 5894, 8490, 9712, 1365, 5562, 9335, 2632, + 4451, 5670, 1434, 5293, 7618, 4448, 5133, 6206, 7403, + 9277, 3917, 8082, 4877, 8977, 4087, 8282, 4673, 4174, + 8024, 4123, 7809, 6063, 8967, 7477, 5136, 2097, 5627, + 1222, 6507, 3842, 508, 3319, 4641, 6974, 1751, 7198, + 8348, 1442, 35, 1644, 8065, 1995, 673, 7425, 3797, + 6591, 9260, 4093, 2659, 4276, 7434, 2094, 2302, 5152, + 1175, 7236, 9174, 5000, 8426, 9863, 4907, 5968, 7517, + 6799, 5216, 6239, 5631, 9895, 4625, 662, 9866, 4327, + 4338, 678, 2458, 275, 6859, 1032, 8384, 329, 4693, + 3648, 5273, 9909, 942, 8364, 9995, 2715, 1897, 2171, + 1546, 2525, 3810, 2805, 8453, 3081, 9965, 1581, 1935, + 4572, 5694, 5361, 5098, 8858, 9344, 5261, 8527, 1107, + 1160, 9856, 4177, 7423, 494, 296, 269, 2605, 8887, + 3358, 9924, 7644, 3963, 8756, 7728, 1689, 1517, 8896, + 738, 5342, 9971, 7299, 233, 3569, 4255, 7068, 8925, + 2434, 1166, 9286, 5417, 6075, 3889, 6567, 9735, 6644, + 6788, 4533, 5705, 9314, 2299, 4410, 8484, 148, 8607, + 7159, 3505, 2676, 3474, 2218, 4608, 7333, 8149, 4788, + 2661, 9885, 9028, 4675, 6760, 438, 449, 1577, 7010, + 6021, 2054, 8064, 8112, 696, 4240, 9566, 9924, 8269, + 430, 8850, 51, 5782, 4715, 8674, 3485, 5265, 8332, + 3575, 6476, 305, 5968, 5003, 537, 2220, 1503, 9358, + 8506, 8170, 66, 5381, 8645, 4683, 2387, 8102, 1837, + 6751, 3809, 6175, 8349, 5990, 4193, 6255, 9433, 3491, + 5504, 6724, 9423, 2713, 9402, 7125, 2047, 9087, 4430, + 2982, 3288, 5762, 7384, 1945, 4164, 4058, 5582, 7730, + 233, 6512, 5793, 2294, 8055, 2942, 4065, 2856, 2579, + 2004, 9512, 654, 4457, 7364, 7533, 264, 3614, 8663, + 2785, 6683, 9346, 6694, 2353, 6233, 7366, 6692, 8681, + 7898, 8152, 8797, 5218, 9637, 3345, 6199, 9330, 6921, + 1265, 6277, 1965, 4451, 4004, 1426, 1705, 3872, 2572, + 72, 2018, 3157, 1057, 4137, 3275, 1963, 3933, 4474, + 5712, 4713, 5460, 249, 886, 4523, 8892, 6793, 7692, + 8370, 4264, 6526, 3842, 803, 3573, 1941, 4436, 9920, + 2420, 5483, 2367, 8018, 7268, 1307, 6340, 2067, 1686, + 8589, 9747, 1983, 1688, 5896, 3769, 1189, 2727, 1016, + 3715, 1681, 6867, 8527, 4960, 8462, 8940, 168, 3808, + 6150, 8565, 7884, 8405, 4063, 3171, 1186, 5059, 8903, + 6602, 875, 4517, 7168, 8441, 6379, 4580, 6052, 8218, + 7332, 1238, 8354, 4989, 8887, 4935, 1443, 6584, 9867, + 2425, 267, 8798, 1150, 4533, 553, 7057, 5527, 7207, + 6414, 5989, 9638, 1960, 9222, 2058, 22, 1474, 3538, + 3890, 768, 5469, 4336, 982, 2699, 3204, 5837, 4786, + 6115, 9621, 2847, 703, 8979, 6693, 515, 187, 5765, + 2434, 9356, 9680, 3382, 7846, 7488, 613, 5938, 1825, + 3669, 1229, 3490, 7397, 3003, 2656, 8622, 4760, 705, + 4617, 6800, 342, 6968, 3077, 3096, 5013, 1452, 8703, + 262, 1019, 9628, 9426, 9699, 2398, 6727, 4819, 4621, + 7759, 2580, 5304, 3929, 2764, 7905, 991, 4293, 8126, + 7711, 9992, 2619, 9355, 9384, 1466, 2134, 6993, 2634, + 8618, 1513, 8251, 8332, 1426, 68, 1411, 8056, 7127, + 7954, 3753, 5389, 8765, 5868, 2958, 1390, 8317, 5798, + 5311, 1034, 2359, 9360, 51, 8700, 9949, 4824, 6110, + 215, 3236, 3437, 1136, 9944, 5098, 279, 62, 8279, + 4624, 6047, 8365, 4369, 1334, 9676, 381, 8188, 2016, + 8679, 4525, 2209, 6727, 5230, 9965, 892, 3143, 4106, + 40, 3303, 4214, 691, 3846, 9913, 5788, 3178, 2173, + 8316, 9434, 6626, 3122, 5836, 3842, 3987, 1995, 9148, + 3515, 1475, 7570, 7970, 4351, 10, 715, 9759, 797, + 4144, 6357, 4175, 9929, 1530, 4865, 7625, 1650, 7175, + 1441, 4178, 6150, 3161, 334, 4730, 4184, 9595, 7705, + 430, 8106, 8496, 6095, 4084, 489, 2087, 6737, 3907, + 3469, 2240, 9853, 8414, 9187, 8777, 8268, 9967, 3862, + 1163, 2985, 5790, 1525, 6521, 9596, 9952, 3337, 6163, + 6802, 6571, 4350, 8511, 2081, 5623, 9045, 3126, 3216, + 1296, 2667, 5515, 6956, 2110, 7575, 9209, 4764, 5757, + 643, 7008, 5202, 9361, 7777, 8025, 4650, 1583, 9443, + 7103, 8949, 1343, 518, 6597, 6770, 1562, 9497, 160, + 580, 6210, 4358, 6102, 1255, 4401, 3687, 3603, 8736, + 8151, 3736, 8179, 474, 3654, 6081, 6883, 5062, 5121, + 1157, 1241, 6973, 3927, 2212, 1429, 9900, 1999, 8912, + 212, 7231, 7655, 3276, 4066, 3390, 5316, 7132, 8466, + 3851, 3041, 2473, 5050, 3587, 8845, 3409, 2223, 3030, + 3844]), + values=tensor([8.2393e-01, 1.5019e-01, 8.5274e-02, 1.3200e-01, + 6.9695e-01, 8.8007e-01, 2.6063e-01, 2.6633e-01, + 1.5895e-01, 3.2459e-01, 7.9944e-01, 7.9665e-01, + 5.7807e-01, 9.3262e-01, 4.0176e-01, 3.9150e-01, + 6.4199e-01, 5.2198e-01, 5.0451e-02, 5.6095e-01, + 1.9517e-01, 5.8229e-01, 5.9680e-01, 6.3858e-01, + 6.0167e-01, 5.9238e-01, 5.4524e-01, 5.5850e-01, + 4.6533e-01, 8.0528e-01, 4.3126e-01, 3.8328e-02, + 2.6529e-01, 6.6051e-01, 1.2248e-01, 8.8354e-01, + 4.6105e-01, 1.4615e-01, 5.3722e-01, 2.9376e-01, + 1.1100e-01, 2.8144e-01, 6.9849e-01, 6.0340e-01, + 2.6582e-01, 1.3794e-01, 5.4512e-01, 2.6792e-01, + 9.2262e-01, 9.4629e-01, 4.5982e-01, 8.0294e-01, + 9.6663e-01, 3.5252e-01, 1.4888e-01, 3.9557e-01, + 5.5774e-01, 8.2395e-01, 2.9728e-01, 1.6107e-01, + 4.9380e-02, 3.3702e-01, 2.9645e-01, 9.9303e-01, + 5.1034e-01, 4.1965e-01, 4.9080e-01, 9.5543e-01, + 6.9234e-01, 3.4877e-02, 6.6442e-01, 9.4141e-01, + 4.0285e-01, 6.6260e-03, 7.4368e-01, 7.0828e-01, + 5.0894e-01, 8.5305e-01, 5.5481e-02, 8.2447e-01, + 1.6340e-01, 7.3925e-01, 4.3103e-01, 4.8413e-01, + 2.9843e-01, 1.0276e-01, 8.2942e-01, 2.2059e-01, + 7.2107e-01, 5.7189e-01, 7.2616e-01, 8.6178e-02, + 2.1037e-02, 1.1812e-01, 7.5470e-01, 4.6164e-01, + 5.4577e-01, 3.3790e-02, 4.1304e-01, 9.4406e-01, + 5.2676e-01, 5.6383e-01, 4.2839e-01, 5.2438e-01, + 3.0384e-01, 6.3788e-01, 9.1300e-01, 3.1034e-01, + 4.0981e-01, 4.7271e-01, 4.5633e-01, 4.7760e-01, + 1.1020e-01, 2.0795e-02, 2.1505e-02, 3.2759e-01, + 8.9685e-01, 2.9522e-02, 7.9562e-01, 8.1609e-02, + 6.4739e-01, 1.1012e-01, 1.0288e-01, 2.9924e-01, + 6.1639e-01, 4.0882e-01, 2.4966e-01, 7.7845e-01, + 8.5243e-01, 1.1680e-01, 8.5183e-03, 4.1251e-01, + 1.3189e-01, 3.6699e-01, 2.2821e-01, 8.8812e-01, + 8.9146e-01, 2.6685e-01, 7.8972e-01, 6.2874e-01, + 6.5315e-01, 7.5277e-01, 4.3741e-01, 5.3894e-01, + 5.8431e-01, 2.6621e-01, 8.8110e-01, 6.5876e-01, + 2.2655e-01, 6.8296e-01, 6.9625e-01, 9.3140e-01, + 4.0692e-01, 5.1780e-02, 5.5894e-02, 1.2944e-01, + 3.6693e-01, 4.1609e-01, 1.7315e-01, 2.7629e-02, + 2.5275e-01, 2.8886e-01, 5.8279e-01, 2.8189e-01, + 3.6442e-01, 2.6982e-03, 7.7985e-01, 2.9648e-01, + 1.4814e-01, 7.0420e-01, 2.0899e-01, 9.6353e-01, + 6.4830e-01, 4.0625e-01, 6.1961e-02, 9.6137e-01, + 7.4018e-01, 5.0172e-02, 2.1344e-01, 6.1846e-02, + 3.1852e-02, 1.9861e-02, 1.7409e-01, 7.6915e-01, + 3.3864e-01, 8.0804e-01, 9.1722e-01, 5.3964e-01, + 9.9293e-01, 6.2878e-01, 8.7881e-01, 6.7693e-01, + 6.7201e-02, 1.6527e-01, 9.3481e-01, 4.3980e-01, + 4.0867e-01, 2.9267e-01, 6.8959e-01, 2.4241e-01, + 7.8145e-03, 5.9585e-01, 6.0192e-01, 9.7289e-01, + 5.1873e-01, 3.8243e-01, 4.5731e-01, 9.5420e-02, + 5.2338e-01, 5.8730e-01, 1.0387e-02, 1.6742e-01, + 1.0675e-01, 2.3090e-01, 9.0040e-04, 8.6833e-01, + 8.5394e-01, 8.6286e-01, 3.5583e-01, 3.8844e-01, + 2.8401e-02, 6.9319e-01, 9.6281e-01, 9.4490e-01, + 5.9538e-01, 5.8005e-01, 8.4878e-01, 3.8606e-01, + 7.1957e-01, 8.7779e-01, 4.6601e-02, 6.9247e-01, + 2.8323e-01, 4.4013e-01, 3.4905e-01, 7.0103e-01, + 8.4005e-01, 1.8965e-01, 9.6502e-01, 6.9016e-01, + 2.7636e-01, 9.9683e-01, 6.3166e-01, 3.5829e-01, + 3.5663e-01, 7.4335e-01, 3.2532e-01, 4.8880e-01, + 4.8332e-01, 5.0647e-01, 1.3356e-01, 5.7495e-01, + 7.3269e-01, 3.5103e-01, 4.5958e-01, 7.1566e-01, + 9.0254e-01, 6.5628e-01, 7.9665e-01, 6.7599e-01, + 6.3119e-01, 7.7948e-01, 4.1314e-01, 1.4388e-01, + 6.3143e-01, 9.8648e-01, 1.1911e-01, 8.6919e-01, + 8.6949e-01, 4.0618e-01, 9.9670e-01, 3.7990e-01, + 6.2679e-02, 9.6655e-01, 6.6141e-01, 4.4495e-01, + 5.4496e-01, 9.9103e-01, 8.1184e-01, 4.9858e-01, + 7.0636e-02, 2.9484e-01, 6.2061e-01, 5.2765e-01, + 9.4633e-01, 6.7516e-01, 8.7235e-01, 8.8024e-03, + 7.8317e-01, 7.1939e-01, 1.3820e-01, 5.7895e-02, + 8.5905e-01, 2.1049e-01, 3.6238e-01, 4.8634e-01, + 7.7892e-01, 1.8014e-02, 6.1106e-01, 8.9913e-01, + 4.7975e-01, 5.3033e-01, 4.9454e-02, 6.5441e-01, + 4.3588e-01, 4.5978e-01, 7.9851e-01, 5.8266e-01, + 8.6943e-01, 1.8774e-01, 9.4717e-01, 1.4729e-01, + 1.6653e-01, 3.4556e-01, 9.3043e-01, 7.5589e-01, + 3.3972e-02, 4.5488e-01, 2.9637e-02, 1.0942e-01, + 5.9212e-01, 8.9300e-01, 1.1449e-01, 7.3050e-02, + 6.8841e-01, 1.1163e-01, 8.9769e-01, 1.8155e-01, + 1.6967e-02, 5.1254e-01, 4.3338e-01, 8.5761e-01, + 5.3372e-01, 4.9336e-01, 7.0137e-01, 7.6005e-01, + 8.2441e-02, 9.6106e-01, 6.4389e-01, 2.7839e-01, + 1.8078e-02, 1.6927e-01, 1.7373e-01, 8.2545e-01, + 9.8833e-01, 6.1096e-01, 3.9049e-01, 9.8441e-01, + 7.2349e-01, 3.1303e-01, 2.0875e-02, 3.7698e-01, + 9.5393e-01, 2.3365e-01, 1.5516e-01, 1.5819e-01, + 5.1214e-01, 6.8187e-01, 6.8530e-01, 3.6052e-01, + 2.5738e-01, 3.9612e-01, 7.4286e-01, 7.2272e-01, + 6.6211e-01, 7.2587e-01, 9.4744e-01, 2.2103e-01, + 5.6878e-01, 1.0684e-01, 7.3965e-01, 1.8014e-01, + 9.5019e-01, 9.4053e-01, 9.3201e-01, 7.2668e-01, + 9.9669e-01, 3.2227e-02, 9.0540e-01, 6.3320e-01, + 9.0097e-02, 7.6524e-01, 2.0366e-01, 7.8086e-02, + 3.4722e-02, 4.3548e-01, 8.4004e-01, 3.7208e-01, + 2.7096e-01, 9.7185e-01, 2.4165e-01, 2.0683e-01, + 7.3755e-01, 3.5581e-01, 3.0437e-01, 7.8542e-01, + 5.2676e-02, 5.3939e-01, 8.0517e-01, 9.7260e-02, + 5.7463e-01, 5.7621e-01, 8.1216e-01, 5.0011e-01, + 8.1083e-01, 9.3175e-01, 7.0638e-01, 3.9207e-01, + 3.2076e-01, 6.4083e-01, 6.5601e-01, 4.3012e-02, + 7.3732e-01, 4.1352e-01, 8.6784e-01, 7.5795e-01, + 2.1025e-01, 8.5163e-01, 9.1445e-01, 3.1700e-01, + 1.3861e-01, 2.3234e-01, 1.5243e-01, 5.9649e-01, + 8.1250e-01, 7.9491e-01, 9.1780e-01, 2.5034e-01, + 6.0943e-01, 8.0713e-01, 2.3685e-01, 4.8337e-02, + 7.6438e-01, 2.8810e-01, 9.0052e-01, 2.0727e-02, + 1.5070e-01, 2.4464e-01, 6.7879e-01, 5.0227e-01, + 4.1695e-01, 5.9048e-01, 7.5940e-01, 8.1938e-01, + 3.3075e-02, 2.3685e-01, 9.4062e-01, 7.0140e-01, + 7.4609e-01, 5.3763e-01, 4.1158e-01, 3.8017e-01, + 2.0844e-01, 4.9449e-01, 3.9290e-01, 9.4230e-01, + 3.6105e-01, 2.0854e-01, 8.3946e-01, 7.9189e-01, + 5.7396e-01, 4.5173e-01, 3.3482e-01, 8.9965e-01, + 7.6627e-01, 4.6385e-01, 8.8760e-01, 7.1382e-01, + 9.2541e-01, 6.3113e-01, 9.1679e-01, 1.4940e-01, + 2.6223e-01, 6.7949e-01, 2.3126e-01, 4.4777e-01, + 4.8690e-01, 4.6878e-01, 9.2394e-01, 9.0957e-02, + 7.4730e-02, 8.1567e-01, 1.4386e-01, 8.4355e-02, + 6.3291e-01, 1.5873e-01, 1.3449e-01, 2.7177e-01, + 9.8144e-01, 8.8629e-02, 4.1631e-01, 8.2361e-01, + 3.9964e-01, 5.3625e-01, 6.4722e-01, 3.1974e-01, + 7.4877e-01, 4.1946e-01, 1.6767e-01, 9.5301e-01, + 1.1349e-01, 9.3522e-01, 5.6833e-01, 2.4655e-01, + 5.7035e-01, 9.4009e-01, 2.7573e-01, 6.8067e-02, + 3.8545e-01, 3.5183e-01, 9.3628e-01, 2.2545e-01, + 5.2786e-01, 5.1525e-01, 8.8308e-01, 3.0606e-01, + 8.8008e-01, 2.0526e-01, 1.3389e-01, 9.1710e-01, + 4.0652e-01, 3.3488e-01, 1.5011e-02, 1.1502e-01, + 4.1701e-01, 9.8652e-01, 4.1692e-01, 2.7305e-01, + 5.2481e-01, 7.1756e-01, 8.2327e-01, 6.5926e-01, + 7.9342e-01, 1.5732e-01, 9.6197e-01, 9.1746e-01, + 9.7947e-01, 1.6791e-01, 4.2411e-01, 2.8344e-01, + 4.7260e-01, 9.8220e-01, 2.1073e-01, 9.6048e-01, + 9.8275e-01, 7.2793e-01, 8.3146e-01, 3.0882e-01, + 5.6035e-01, 7.5521e-02, 3.4334e-01, 5.8304e-01, + 1.1692e-01, 6.2285e-01, 5.7510e-01, 2.3869e-01, + 2.1560e-01, 6.7674e-01, 8.3941e-01, 6.5056e-01, + 7.4357e-01, 4.0811e-01, 8.7028e-01, 1.4760e-01, + 7.6489e-01, 2.6693e-01, 6.0770e-01, 6.7832e-01, + 9.1858e-01, 1.8055e-01, 2.9557e-01, 1.5732e-01, + 9.1102e-02, 1.7503e-01, 1.1015e-01, 9.3398e-01, + 8.2965e-01, 8.8696e-01, 7.3273e-01, 3.4421e-01, + 4.3937e-02, 7.7633e-01, 9.4005e-01, 7.7454e-01, + 2.4800e-01, 1.9198e-01, 1.1211e-02, 9.5570e-01, + 2.5893e-01, 9.6021e-01, 1.3908e-01, 1.1804e-01, + 2.0655e-01, 2.5434e-01, 2.1264e-01, 1.7281e-01, + 7.1567e-02, 6.7535e-01, 3.7410e-01, 4.8327e-01, + 6.8231e-01, 1.9468e-01, 4.1573e-01, 2.3954e-01, + 5.0490e-01, 6.1368e-01, 2.3484e-02, 5.2961e-01, + 2.6565e-01, 2.7586e-01, 4.9329e-01, 6.2392e-01, + 3.7622e-01, 5.0887e-01, 6.8648e-01, 3.3176e-01, + 2.8325e-01, 4.1615e-01, 4.9558e-01, 3.2988e-01, + 8.4707e-02, 3.4267e-01, 8.4326e-01, 8.9372e-01, + 5.0974e-01, 2.8287e-01, 5.9186e-01, 3.0996e-01, + 7.6409e-01, 7.0890e-01, 2.2737e-01, 3.6292e-01, + 3.8446e-02, 1.6772e-01, 5.9850e-02, 7.5418e-01, + 1.5142e-01, 9.2527e-02, 4.2487e-01, 6.2427e-01, + 4.8719e-01, 3.6504e-01, 2.3157e-01, 4.2160e-01, + 6.3014e-01, 7.5451e-01, 6.6881e-01, 5.0257e-01, + 7.0750e-01, 3.3808e-04, 1.1733e-01, 2.5561e-01, + 2.8932e-01, 2.9238e-01, 4.9687e-01, 5.6535e-01, + 5.6395e-01, 7.1426e-01, 5.3330e-01, 6.5220e-01, + 5.9182e-01, 4.5308e-01, 4.0896e-01, 1.8641e-01, + 3.5124e-01, 1.6527e-01, 6.9706e-01, 2.1298e-01, + 1.7984e-01, 4.8733e-01, 3.4480e-01, 9.8324e-01, + 4.6122e-01, 6.0673e-01, 6.6544e-01, 5.2080e-01, + 2.4066e-01, 1.9742e-01, 5.4138e-01, 8.0746e-01, + 6.3658e-01, 7.1402e-01, 4.2173e-01, 8.2907e-01, + 7.5547e-01, 3.7213e-01, 6.6507e-01, 2.5612e-01, + 1.5830e-01, 7.5655e-01, 9.7480e-01, 4.0051e-01, + 5.7804e-03, 8.2709e-01, 4.2182e-01, 4.4408e-01, + 8.9222e-01, 3.6208e-01, 8.9519e-01, 2.5600e-01, + 1.8632e-01, 9.8365e-01, 8.1851e-02, 5.2123e-01, + 9.5804e-01, 5.6319e-03, 5.9916e-01, 1.5515e-01, + 9.1388e-01, 8.6155e-01, 4.1283e-01, 2.0695e-01, + 6.8297e-01, 5.4021e-01, 1.9099e-01, 5.1440e-01, + 8.1793e-01, 1.3168e-01, 1.8958e-01, 6.2631e-01, + 8.6577e-01, 4.4099e-01, 1.4760e-01, 1.3065e-01, + 7.5405e-01, 2.6639e-01, 4.7266e-01, 8.4939e-01, + 3.1579e-01, 9.1292e-02, 6.1113e-01, 9.2818e-02, + 2.0669e-01, 1.2783e-01, 9.3739e-01, 1.3247e-01, + 2.9160e-02, 6.9776e-01, 8.8792e-01, 7.3849e-01, + 3.1319e-01, 9.7556e-01, 4.3062e-01, 2.6838e-01, + 9.6352e-01, 4.5384e-01, 5.3614e-01, 7.6115e-01, + 7.8346e-02, 9.7936e-01, 2.0486e-01, 3.3608e-01, + 9.7405e-01, 6.3823e-01, 5.2497e-01, 7.9240e-01, + 5.6798e-01, 4.8818e-01, 9.9144e-01, 9.0264e-01, + 7.2435e-01, 1.8029e-01, 1.9755e-01, 6.8914e-01, + 6.9843e-01, 3.5573e-01, 5.2406e-01, 1.1539e-01, + 8.4006e-01, 1.1812e-01, 5.2865e-01, 7.6911e-01, + 6.8536e-01, 6.7350e-01, 5.9683e-01, 6.0014e-01, + 9.6642e-01, 1.8403e-01, 3.0091e-01, 5.1426e-02, + 9.1433e-01, 3.3716e-01, 5.0284e-01, 7.4283e-01, + 2.7395e-02, 1.2681e-01, 6.1057e-01, 9.2038e-01, + 1.7242e-01, 5.0040e-01, 1.4167e-01, 9.2800e-01, + 3.1623e-01, 6.6496e-01, 4.6361e-01, 4.2210e-01, + 5.4576e-01, 7.4592e-01, 6.7106e-01, 2.2625e-01, + 1.7871e-01, 2.0941e-01, 4.0171e-01, 5.2365e-02, + 9.3492e-02, 6.2121e-01, 5.6627e-01, 4.8088e-01, + 3.4543e-01, 6.5256e-01, 3.8111e-01, 5.0820e-01, + 6.4190e-01, 9.4368e-01, 9.4619e-01, 1.4001e-01, + 9.6033e-01, 2.7958e-01, 7.0197e-01, 6.1710e-01, + 2.7678e-01, 5.4832e-01, 7.9254e-01, 8.3342e-01, + 6.2200e-01, 8.3286e-01, 9.9807e-01, 6.2790e-02, + 8.0453e-01, 1.0971e-01, 8.1520e-01, 3.9444e-01, + 6.3046e-01, 4.7325e-01, 4.5247e-01, 6.8061e-02, + 1.5491e-01, 4.0141e-03, 7.3446e-01, 6.2330e-02, + 9.0610e-01, 8.6523e-01, 4.7703e-01, 7.2769e-01, + 5.6920e-02, 4.7380e-01, 6.2567e-01, 9.8501e-01, + 6.2170e-01, 1.8872e-01, 1.3379e-02, 3.4054e-01, + 7.6416e-01, 3.8986e-01, 5.3500e-01, 9.8245e-01, + 7.0761e-01, 2.2468e-01, 5.4399e-01, 3.9694e-02, + 7.6734e-01, 1.0932e-01, 1.2210e-01, 7.1353e-01, + 8.0572e-01, 2.3582e-01, 9.8215e-01, 1.1015e-01, + 8.9113e-03, 7.1177e-01, 1.7986e-01, 4.5091e-01, + 1.7810e-01, 7.7559e-01, 9.4792e-01, 3.8139e-01, + 1.1546e-01, 7.6492e-01, 7.9508e-01, 8.6260e-01, + 9.6169e-01, 8.0818e-02, 2.8676e-02, 7.0994e-01, + 5.3818e-01, 2.6338e-01, 1.4087e-01, 6.8694e-01, + 2.4939e-01, 7.6196e-01, 6.6186e-01, 8.5655e-01, + 2.6657e-01, 6.6963e-01, 1.3788e-01, 4.1556e-01, + 1.9527e-01, 3.4537e-01, 1.4042e-01, 6.6402e-01, + 3.1351e-01, 6.6071e-01, 2.9595e-01, 9.6000e-01, + 7.2292e-01, 5.6025e-01, 7.7751e-01, 1.0276e-01, + 6.5010e-01, 1.5068e-01, 5.8440e-01, 3.1344e-01, + 1.3546e-01, 2.9211e-01, 4.6813e-01, 2.4935e-02, + 9.6457e-02, 7.4214e-01, 4.3047e-02, 2.1731e-01, + 3.8465e-01, 9.4445e-01, 6.5976e-01, 8.8821e-01, + 5.0696e-01, 5.1792e-01, 4.7107e-01, 5.6703e-01, + 1.5516e-01, 8.0371e-01, 6.5735e-02, 9.3342e-01, + 7.5234e-01, 9.3810e-01, 8.0659e-02, 7.2725e-01, + 7.4287e-01, 8.0905e-01, 5.7371e-01, 8.0566e-01, + 8.6814e-01, 9.0878e-01, 8.1847e-02, 9.7577e-01, + 5.2302e-01, 6.0315e-01, 7.6834e-01, 9.3738e-01, + 5.8105e-01, 6.2760e-01, 7.9012e-01, 7.6416e-01, + 9.0308e-01, 7.2939e-01, 6.2535e-01, 8.1521e-01, + 4.6661e-01, 6.4054e-01, 6.4950e-01, 1.2072e-01, + 6.6340e-01, 6.0429e-01, 4.3164e-02, 8.3696e-02, + 8.7069e-01, 8.1124e-01, 5.2189e-01, 2.7111e-01, + 7.0551e-01, 6.3514e-02, 9.6973e-01, 6.3092e-01, + 5.8354e-01, 1.5335e-01, 7.9160e-01, 1.2117e-02, + 6.9520e-01, 6.4075e-01, 6.0235e-01, 6.8160e-02, + 2.6503e-01, 6.1870e-01, 6.3837e-01, 1.2293e-01, + 7.3640e-01, 2.3190e-01, 5.3413e-01, 4.5806e-01, + 2.4139e-01, 2.6287e-02, 3.6355e-01, 4.1787e-01, + 3.1233e-01, 4.8046e-01, 9.4417e-01, 6.1386e-01, + 4.9249e-01, 3.1649e-01, 9.6072e-01, 7.6183e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4905, 0.9858, 0.4305, ..., 0.2183, 0.4525, 0.0992]) +tensor([0.0260, 0.2662, 0.7797, ..., 0.3158, 0.7779, 0.5618]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1295,13 +1026,389 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.263301372528076 seconds +Time: 10.251019954681396 seconds -[18.52, 17.73, 17.82, 17.87, 17.98, 18.03, 17.91, 19.05, 18.64, 18.02] -[73.21] -13.981836557388306 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 285101, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.263301372528076, 'TIME_S_1KI': 0.03599882628446788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1023.6102543663977, 'W': 73.21} -[18.52, 17.73, 17.82, 17.87, 17.98, 18.03, 17.91, 19.05, 18.64, 18.02, 18.85, 18.33, 18.07, 17.99, 17.95, 17.9, 17.98, 18.04, 17.93, 17.89] -325.86 -16.293 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 285101, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.263301372528076, 'TIME_S_1KI': 0.03599882628446788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1023.6102543663977, 'W': 73.21, 'J_1KI': 3.5903425605886956, 'W_1KI': 0.2567861915601839, 'W_D': 56.916999999999994, 'J_D': 795.8041913368701, 'W_D_1KI': 0.1996380230164047, 'J_D_1KI': 0.0007002361374264022} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 999, 1000, 1000]), + col_indices=tensor([6337, 6253, 6887, 9242, 4768, 6015, 4529, 9548, 1595, + 4824, 8428, 3021, 6404, 9778, 1690, 6193, 9247, 3032, + 2497, 1684, 4366, 1179, 3199, 1210, 8827, 260, 8366, + 6198, 7506, 3241, 875, 1272, 4944, 5203, 1187, 5756, + 211, 2088, 9817, 9835, 6021, 3840, 2185, 4006, 1537, + 2995, 6201, 8435, 8923, 8907, 7515, 7430, 5822, 9922, + 2044, 7750, 7906, 1082, 6451, 154, 4143, 213, 6863, + 5696, 3741, 3326, 9758, 2705, 5798, 105, 5494, 3610, + 3202, 5160, 7226, 8546, 5325, 1924, 7174, 7551, 1995, + 855, 1391, 4170, 9209, 1447, 5275, 2681, 1810, 8276, + 7234, 5988, 3014, 3695, 4405, 9809, 1200, 4282, 548, + 2865, 8197, 9815, 4638, 337, 5602, 690, 7426, 3370, + 7307, 9723, 9052, 2016, 3607, 4832, 2053, 7790, 7536, + 6270, 8193, 8651, 2372, 9909, 9570, 6080, 9324, 7410, + 5175, 8046, 7677, 2213, 7718, 7630, 3775, 5050, 6485, + 850, 2674, 7769, 5577, 3838, 5701, 1650, 9534, 7011, + 1890, 7869, 8541, 3508, 7428, 310, 6110, 4723, 8960, + 2453, 8119, 4785, 590, 935, 3774, 5197, 7485, 8842, + 6189, 4511, 4178, 9552, 2567, 9282, 4565, 4874, 3063, + 138, 6925, 2428, 3891, 6900, 6790, 5627, 1703, 3014, + 9860, 3749, 6146, 2541, 3788, 9915, 628, 5299, 318, + 3903, 6794, 6177, 7188, 570, 9964, 7280, 1104, 9401, + 6043, 7474, 1969, 7967, 907, 9905, 707, 1999, 4973, + 8937, 9361, 3946, 8868, 8995, 8271, 8640, 2016, 9553, + 6026, 1566, 7895, 5868, 6156, 9464, 3169, 7935, 208, + 2840, 6989, 282, 4248, 7859, 2346, 3311, 5676, 652, + 7209, 2733, 9915, 1803, 5814, 8336, 3664, 7836, 1371, + 1609, 8360, 4353, 1056, 489, 4044, 5190, 9267, 4537, + 7884, 4517, 6422, 7329, 6479, 5051, 3973, 8436, 253, + 5317, 6149, 2483, 8523, 2586, 3339, 5210, 7172, 9964, + 5416, 545, 8960, 9405, 4565, 6937, 6131, 6599, 4989, + 7748, 863, 847, 4903, 4347, 8933, 508, 4128, 7115, + 5357, 5191, 4282, 6775, 1030, 3238, 3809, 1005, 2495, + 4552, 3779, 1572, 9609, 7137, 259, 4849, 334, 4810, + 4903, 6322, 7942, 9657, 4189, 5075, 8964, 634, 1590, + 4380, 4733, 6906, 6146, 3898, 3758, 6545, 9449, 4029, + 7898, 3163, 3639, 1942, 9232, 9128, 3242, 6813, 6174, + 7219, 2775, 5894, 8490, 9712, 1365, 5562, 9335, 2632, + 4451, 5670, 1434, 5293, 7618, 4448, 5133, 6206, 7403, + 9277, 3917, 8082, 4877, 8977, 4087, 8282, 4673, 4174, + 8024, 4123, 7809, 6063, 8967, 7477, 5136, 2097, 5627, + 1222, 6507, 3842, 508, 3319, 4641, 6974, 1751, 7198, + 8348, 1442, 35, 1644, 8065, 1995, 673, 7425, 3797, + 6591, 9260, 4093, 2659, 4276, 7434, 2094, 2302, 5152, + 1175, 7236, 9174, 5000, 8426, 9863, 4907, 5968, 7517, + 6799, 5216, 6239, 5631, 9895, 4625, 662, 9866, 4327, + 4338, 678, 2458, 275, 6859, 1032, 8384, 329, 4693, + 3648, 5273, 9909, 942, 8364, 9995, 2715, 1897, 2171, + 1546, 2525, 3810, 2805, 8453, 3081, 9965, 1581, 1935, + 4572, 5694, 5361, 5098, 8858, 9344, 5261, 8527, 1107, + 1160, 9856, 4177, 7423, 494, 296, 269, 2605, 8887, + 3358, 9924, 7644, 3963, 8756, 7728, 1689, 1517, 8896, + 738, 5342, 9971, 7299, 233, 3569, 4255, 7068, 8925, + 2434, 1166, 9286, 5417, 6075, 3889, 6567, 9735, 6644, + 6788, 4533, 5705, 9314, 2299, 4410, 8484, 148, 8607, + 7159, 3505, 2676, 3474, 2218, 4608, 7333, 8149, 4788, + 2661, 9885, 9028, 4675, 6760, 438, 449, 1577, 7010, + 6021, 2054, 8064, 8112, 696, 4240, 9566, 9924, 8269, + 430, 8850, 51, 5782, 4715, 8674, 3485, 5265, 8332, + 3575, 6476, 305, 5968, 5003, 537, 2220, 1503, 9358, + 8506, 8170, 66, 5381, 8645, 4683, 2387, 8102, 1837, + 6751, 3809, 6175, 8349, 5990, 4193, 6255, 9433, 3491, + 5504, 6724, 9423, 2713, 9402, 7125, 2047, 9087, 4430, + 2982, 3288, 5762, 7384, 1945, 4164, 4058, 5582, 7730, + 233, 6512, 5793, 2294, 8055, 2942, 4065, 2856, 2579, + 2004, 9512, 654, 4457, 7364, 7533, 264, 3614, 8663, + 2785, 6683, 9346, 6694, 2353, 6233, 7366, 6692, 8681, + 7898, 8152, 8797, 5218, 9637, 3345, 6199, 9330, 6921, + 1265, 6277, 1965, 4451, 4004, 1426, 1705, 3872, 2572, + 72, 2018, 3157, 1057, 4137, 3275, 1963, 3933, 4474, + 5712, 4713, 5460, 249, 886, 4523, 8892, 6793, 7692, + 8370, 4264, 6526, 3842, 803, 3573, 1941, 4436, 9920, + 2420, 5483, 2367, 8018, 7268, 1307, 6340, 2067, 1686, + 8589, 9747, 1983, 1688, 5896, 3769, 1189, 2727, 1016, + 3715, 1681, 6867, 8527, 4960, 8462, 8940, 168, 3808, + 6150, 8565, 7884, 8405, 4063, 3171, 1186, 5059, 8903, + 6602, 875, 4517, 7168, 8441, 6379, 4580, 6052, 8218, + 7332, 1238, 8354, 4989, 8887, 4935, 1443, 6584, 9867, + 2425, 267, 8798, 1150, 4533, 553, 7057, 5527, 7207, + 6414, 5989, 9638, 1960, 9222, 2058, 22, 1474, 3538, + 3890, 768, 5469, 4336, 982, 2699, 3204, 5837, 4786, + 6115, 9621, 2847, 703, 8979, 6693, 515, 187, 5765, + 2434, 9356, 9680, 3382, 7846, 7488, 613, 5938, 1825, + 3669, 1229, 3490, 7397, 3003, 2656, 8622, 4760, 705, + 4617, 6800, 342, 6968, 3077, 3096, 5013, 1452, 8703, + 262, 1019, 9628, 9426, 9699, 2398, 6727, 4819, 4621, + 7759, 2580, 5304, 3929, 2764, 7905, 991, 4293, 8126, + 7711, 9992, 2619, 9355, 9384, 1466, 2134, 6993, 2634, + 8618, 1513, 8251, 8332, 1426, 68, 1411, 8056, 7127, + 7954, 3753, 5389, 8765, 5868, 2958, 1390, 8317, 5798, + 5311, 1034, 2359, 9360, 51, 8700, 9949, 4824, 6110, + 215, 3236, 3437, 1136, 9944, 5098, 279, 62, 8279, + 4624, 6047, 8365, 4369, 1334, 9676, 381, 8188, 2016, + 8679, 4525, 2209, 6727, 5230, 9965, 892, 3143, 4106, + 40, 3303, 4214, 691, 3846, 9913, 5788, 3178, 2173, + 8316, 9434, 6626, 3122, 5836, 3842, 3987, 1995, 9148, + 3515, 1475, 7570, 7970, 4351, 10, 715, 9759, 797, + 4144, 6357, 4175, 9929, 1530, 4865, 7625, 1650, 7175, + 1441, 4178, 6150, 3161, 334, 4730, 4184, 9595, 7705, + 430, 8106, 8496, 6095, 4084, 489, 2087, 6737, 3907, + 3469, 2240, 9853, 8414, 9187, 8777, 8268, 9967, 3862, + 1163, 2985, 5790, 1525, 6521, 9596, 9952, 3337, 6163, + 6802, 6571, 4350, 8511, 2081, 5623, 9045, 3126, 3216, + 1296, 2667, 5515, 6956, 2110, 7575, 9209, 4764, 5757, + 643, 7008, 5202, 9361, 7777, 8025, 4650, 1583, 9443, + 7103, 8949, 1343, 518, 6597, 6770, 1562, 9497, 160, + 580, 6210, 4358, 6102, 1255, 4401, 3687, 3603, 8736, + 8151, 3736, 8179, 474, 3654, 6081, 6883, 5062, 5121, + 1157, 1241, 6973, 3927, 2212, 1429, 9900, 1999, 8912, + 212, 7231, 7655, 3276, 4066, 3390, 5316, 7132, 8466, + 3851, 3041, 2473, 5050, 3587, 8845, 3409, 2223, 3030, + 3844]), + values=tensor([8.2393e-01, 1.5019e-01, 8.5274e-02, 1.3200e-01, + 6.9695e-01, 8.8007e-01, 2.6063e-01, 2.6633e-01, + 1.5895e-01, 3.2459e-01, 7.9944e-01, 7.9665e-01, + 5.7807e-01, 9.3262e-01, 4.0176e-01, 3.9150e-01, + 6.4199e-01, 5.2198e-01, 5.0451e-02, 5.6095e-01, + 1.9517e-01, 5.8229e-01, 5.9680e-01, 6.3858e-01, + 6.0167e-01, 5.9238e-01, 5.4524e-01, 5.5850e-01, + 4.6533e-01, 8.0528e-01, 4.3126e-01, 3.8328e-02, + 2.6529e-01, 6.6051e-01, 1.2248e-01, 8.8354e-01, + 4.6105e-01, 1.4615e-01, 5.3722e-01, 2.9376e-01, + 1.1100e-01, 2.8144e-01, 6.9849e-01, 6.0340e-01, + 2.6582e-01, 1.3794e-01, 5.4512e-01, 2.6792e-01, + 9.2262e-01, 9.4629e-01, 4.5982e-01, 8.0294e-01, + 9.6663e-01, 3.5252e-01, 1.4888e-01, 3.9557e-01, + 5.5774e-01, 8.2395e-01, 2.9728e-01, 1.6107e-01, + 4.9380e-02, 3.3702e-01, 2.9645e-01, 9.9303e-01, + 5.1034e-01, 4.1965e-01, 4.9080e-01, 9.5543e-01, + 6.9234e-01, 3.4877e-02, 6.6442e-01, 9.4141e-01, + 4.0285e-01, 6.6260e-03, 7.4368e-01, 7.0828e-01, + 5.0894e-01, 8.5305e-01, 5.5481e-02, 8.2447e-01, + 1.6340e-01, 7.3925e-01, 4.3103e-01, 4.8413e-01, + 2.9843e-01, 1.0276e-01, 8.2942e-01, 2.2059e-01, + 7.2107e-01, 5.7189e-01, 7.2616e-01, 8.6178e-02, + 2.1037e-02, 1.1812e-01, 7.5470e-01, 4.6164e-01, + 5.4577e-01, 3.3790e-02, 4.1304e-01, 9.4406e-01, + 5.2676e-01, 5.6383e-01, 4.2839e-01, 5.2438e-01, + 3.0384e-01, 6.3788e-01, 9.1300e-01, 3.1034e-01, + 4.0981e-01, 4.7271e-01, 4.5633e-01, 4.7760e-01, + 1.1020e-01, 2.0795e-02, 2.1505e-02, 3.2759e-01, + 8.9685e-01, 2.9522e-02, 7.9562e-01, 8.1609e-02, + 6.4739e-01, 1.1012e-01, 1.0288e-01, 2.9924e-01, + 6.1639e-01, 4.0882e-01, 2.4966e-01, 7.7845e-01, + 8.5243e-01, 1.1680e-01, 8.5183e-03, 4.1251e-01, + 1.3189e-01, 3.6699e-01, 2.2821e-01, 8.8812e-01, + 8.9146e-01, 2.6685e-01, 7.8972e-01, 6.2874e-01, + 6.5315e-01, 7.5277e-01, 4.3741e-01, 5.3894e-01, + 5.8431e-01, 2.6621e-01, 8.8110e-01, 6.5876e-01, + 2.2655e-01, 6.8296e-01, 6.9625e-01, 9.3140e-01, + 4.0692e-01, 5.1780e-02, 5.5894e-02, 1.2944e-01, + 3.6693e-01, 4.1609e-01, 1.7315e-01, 2.7629e-02, + 2.5275e-01, 2.8886e-01, 5.8279e-01, 2.8189e-01, + 3.6442e-01, 2.6982e-03, 7.7985e-01, 2.9648e-01, + 1.4814e-01, 7.0420e-01, 2.0899e-01, 9.6353e-01, + 6.4830e-01, 4.0625e-01, 6.1961e-02, 9.6137e-01, + 7.4018e-01, 5.0172e-02, 2.1344e-01, 6.1846e-02, + 3.1852e-02, 1.9861e-02, 1.7409e-01, 7.6915e-01, + 3.3864e-01, 8.0804e-01, 9.1722e-01, 5.3964e-01, + 9.9293e-01, 6.2878e-01, 8.7881e-01, 6.7693e-01, + 6.7201e-02, 1.6527e-01, 9.3481e-01, 4.3980e-01, + 4.0867e-01, 2.9267e-01, 6.8959e-01, 2.4241e-01, + 7.8145e-03, 5.9585e-01, 6.0192e-01, 9.7289e-01, + 5.1873e-01, 3.8243e-01, 4.5731e-01, 9.5420e-02, + 5.2338e-01, 5.8730e-01, 1.0387e-02, 1.6742e-01, + 1.0675e-01, 2.3090e-01, 9.0040e-04, 8.6833e-01, + 8.5394e-01, 8.6286e-01, 3.5583e-01, 3.8844e-01, + 2.8401e-02, 6.9319e-01, 9.6281e-01, 9.4490e-01, + 5.9538e-01, 5.8005e-01, 8.4878e-01, 3.8606e-01, + 7.1957e-01, 8.7779e-01, 4.6601e-02, 6.9247e-01, + 2.8323e-01, 4.4013e-01, 3.4905e-01, 7.0103e-01, + 8.4005e-01, 1.8965e-01, 9.6502e-01, 6.9016e-01, + 2.7636e-01, 9.9683e-01, 6.3166e-01, 3.5829e-01, + 3.5663e-01, 7.4335e-01, 3.2532e-01, 4.8880e-01, + 4.8332e-01, 5.0647e-01, 1.3356e-01, 5.7495e-01, + 7.3269e-01, 3.5103e-01, 4.5958e-01, 7.1566e-01, + 9.0254e-01, 6.5628e-01, 7.9665e-01, 6.7599e-01, + 6.3119e-01, 7.7948e-01, 4.1314e-01, 1.4388e-01, + 6.3143e-01, 9.8648e-01, 1.1911e-01, 8.6919e-01, + 8.6949e-01, 4.0618e-01, 9.9670e-01, 3.7990e-01, + 6.2679e-02, 9.6655e-01, 6.6141e-01, 4.4495e-01, + 5.4496e-01, 9.9103e-01, 8.1184e-01, 4.9858e-01, + 7.0636e-02, 2.9484e-01, 6.2061e-01, 5.2765e-01, + 9.4633e-01, 6.7516e-01, 8.7235e-01, 8.8024e-03, + 7.8317e-01, 7.1939e-01, 1.3820e-01, 5.7895e-02, + 8.5905e-01, 2.1049e-01, 3.6238e-01, 4.8634e-01, + 7.7892e-01, 1.8014e-02, 6.1106e-01, 8.9913e-01, + 4.7975e-01, 5.3033e-01, 4.9454e-02, 6.5441e-01, + 4.3588e-01, 4.5978e-01, 7.9851e-01, 5.8266e-01, + 8.6943e-01, 1.8774e-01, 9.4717e-01, 1.4729e-01, + 1.6653e-01, 3.4556e-01, 9.3043e-01, 7.5589e-01, + 3.3972e-02, 4.5488e-01, 2.9637e-02, 1.0942e-01, + 5.9212e-01, 8.9300e-01, 1.1449e-01, 7.3050e-02, + 6.8841e-01, 1.1163e-01, 8.9769e-01, 1.8155e-01, + 1.6967e-02, 5.1254e-01, 4.3338e-01, 8.5761e-01, + 5.3372e-01, 4.9336e-01, 7.0137e-01, 7.6005e-01, + 8.2441e-02, 9.6106e-01, 6.4389e-01, 2.7839e-01, + 1.8078e-02, 1.6927e-01, 1.7373e-01, 8.2545e-01, + 9.8833e-01, 6.1096e-01, 3.9049e-01, 9.8441e-01, + 7.2349e-01, 3.1303e-01, 2.0875e-02, 3.7698e-01, + 9.5393e-01, 2.3365e-01, 1.5516e-01, 1.5819e-01, + 5.1214e-01, 6.8187e-01, 6.8530e-01, 3.6052e-01, + 2.5738e-01, 3.9612e-01, 7.4286e-01, 7.2272e-01, + 6.6211e-01, 7.2587e-01, 9.4744e-01, 2.2103e-01, + 5.6878e-01, 1.0684e-01, 7.3965e-01, 1.8014e-01, + 9.5019e-01, 9.4053e-01, 9.3201e-01, 7.2668e-01, + 9.9669e-01, 3.2227e-02, 9.0540e-01, 6.3320e-01, + 9.0097e-02, 7.6524e-01, 2.0366e-01, 7.8086e-02, + 3.4722e-02, 4.3548e-01, 8.4004e-01, 3.7208e-01, + 2.7096e-01, 9.7185e-01, 2.4165e-01, 2.0683e-01, + 7.3755e-01, 3.5581e-01, 3.0437e-01, 7.8542e-01, + 5.2676e-02, 5.3939e-01, 8.0517e-01, 9.7260e-02, + 5.7463e-01, 5.7621e-01, 8.1216e-01, 5.0011e-01, + 8.1083e-01, 9.3175e-01, 7.0638e-01, 3.9207e-01, + 3.2076e-01, 6.4083e-01, 6.5601e-01, 4.3012e-02, + 7.3732e-01, 4.1352e-01, 8.6784e-01, 7.5795e-01, + 2.1025e-01, 8.5163e-01, 9.1445e-01, 3.1700e-01, + 1.3861e-01, 2.3234e-01, 1.5243e-01, 5.9649e-01, + 8.1250e-01, 7.9491e-01, 9.1780e-01, 2.5034e-01, + 6.0943e-01, 8.0713e-01, 2.3685e-01, 4.8337e-02, + 7.6438e-01, 2.8810e-01, 9.0052e-01, 2.0727e-02, + 1.5070e-01, 2.4464e-01, 6.7879e-01, 5.0227e-01, + 4.1695e-01, 5.9048e-01, 7.5940e-01, 8.1938e-01, + 3.3075e-02, 2.3685e-01, 9.4062e-01, 7.0140e-01, + 7.4609e-01, 5.3763e-01, 4.1158e-01, 3.8017e-01, + 2.0844e-01, 4.9449e-01, 3.9290e-01, 9.4230e-01, + 3.6105e-01, 2.0854e-01, 8.3946e-01, 7.9189e-01, + 5.7396e-01, 4.5173e-01, 3.3482e-01, 8.9965e-01, + 7.6627e-01, 4.6385e-01, 8.8760e-01, 7.1382e-01, + 9.2541e-01, 6.3113e-01, 9.1679e-01, 1.4940e-01, + 2.6223e-01, 6.7949e-01, 2.3126e-01, 4.4777e-01, + 4.8690e-01, 4.6878e-01, 9.2394e-01, 9.0957e-02, + 7.4730e-02, 8.1567e-01, 1.4386e-01, 8.4355e-02, + 6.3291e-01, 1.5873e-01, 1.3449e-01, 2.7177e-01, + 9.8144e-01, 8.8629e-02, 4.1631e-01, 8.2361e-01, + 3.9964e-01, 5.3625e-01, 6.4722e-01, 3.1974e-01, + 7.4877e-01, 4.1946e-01, 1.6767e-01, 9.5301e-01, + 1.1349e-01, 9.3522e-01, 5.6833e-01, 2.4655e-01, + 5.7035e-01, 9.4009e-01, 2.7573e-01, 6.8067e-02, + 3.8545e-01, 3.5183e-01, 9.3628e-01, 2.2545e-01, + 5.2786e-01, 5.1525e-01, 8.8308e-01, 3.0606e-01, + 8.8008e-01, 2.0526e-01, 1.3389e-01, 9.1710e-01, + 4.0652e-01, 3.3488e-01, 1.5011e-02, 1.1502e-01, + 4.1701e-01, 9.8652e-01, 4.1692e-01, 2.7305e-01, + 5.2481e-01, 7.1756e-01, 8.2327e-01, 6.5926e-01, + 7.9342e-01, 1.5732e-01, 9.6197e-01, 9.1746e-01, + 9.7947e-01, 1.6791e-01, 4.2411e-01, 2.8344e-01, + 4.7260e-01, 9.8220e-01, 2.1073e-01, 9.6048e-01, + 9.8275e-01, 7.2793e-01, 8.3146e-01, 3.0882e-01, + 5.6035e-01, 7.5521e-02, 3.4334e-01, 5.8304e-01, + 1.1692e-01, 6.2285e-01, 5.7510e-01, 2.3869e-01, + 2.1560e-01, 6.7674e-01, 8.3941e-01, 6.5056e-01, + 7.4357e-01, 4.0811e-01, 8.7028e-01, 1.4760e-01, + 7.6489e-01, 2.6693e-01, 6.0770e-01, 6.7832e-01, + 9.1858e-01, 1.8055e-01, 2.9557e-01, 1.5732e-01, + 9.1102e-02, 1.7503e-01, 1.1015e-01, 9.3398e-01, + 8.2965e-01, 8.8696e-01, 7.3273e-01, 3.4421e-01, + 4.3937e-02, 7.7633e-01, 9.4005e-01, 7.7454e-01, + 2.4800e-01, 1.9198e-01, 1.1211e-02, 9.5570e-01, + 2.5893e-01, 9.6021e-01, 1.3908e-01, 1.1804e-01, + 2.0655e-01, 2.5434e-01, 2.1264e-01, 1.7281e-01, + 7.1567e-02, 6.7535e-01, 3.7410e-01, 4.8327e-01, + 6.8231e-01, 1.9468e-01, 4.1573e-01, 2.3954e-01, + 5.0490e-01, 6.1368e-01, 2.3484e-02, 5.2961e-01, + 2.6565e-01, 2.7586e-01, 4.9329e-01, 6.2392e-01, + 3.7622e-01, 5.0887e-01, 6.8648e-01, 3.3176e-01, + 2.8325e-01, 4.1615e-01, 4.9558e-01, 3.2988e-01, + 8.4707e-02, 3.4267e-01, 8.4326e-01, 8.9372e-01, + 5.0974e-01, 2.8287e-01, 5.9186e-01, 3.0996e-01, + 7.6409e-01, 7.0890e-01, 2.2737e-01, 3.6292e-01, + 3.8446e-02, 1.6772e-01, 5.9850e-02, 7.5418e-01, + 1.5142e-01, 9.2527e-02, 4.2487e-01, 6.2427e-01, + 4.8719e-01, 3.6504e-01, 2.3157e-01, 4.2160e-01, + 6.3014e-01, 7.5451e-01, 6.6881e-01, 5.0257e-01, + 7.0750e-01, 3.3808e-04, 1.1733e-01, 2.5561e-01, + 2.8932e-01, 2.9238e-01, 4.9687e-01, 5.6535e-01, + 5.6395e-01, 7.1426e-01, 5.3330e-01, 6.5220e-01, + 5.9182e-01, 4.5308e-01, 4.0896e-01, 1.8641e-01, + 3.5124e-01, 1.6527e-01, 6.9706e-01, 2.1298e-01, + 1.7984e-01, 4.8733e-01, 3.4480e-01, 9.8324e-01, + 4.6122e-01, 6.0673e-01, 6.6544e-01, 5.2080e-01, + 2.4066e-01, 1.9742e-01, 5.4138e-01, 8.0746e-01, + 6.3658e-01, 7.1402e-01, 4.2173e-01, 8.2907e-01, + 7.5547e-01, 3.7213e-01, 6.6507e-01, 2.5612e-01, + 1.5830e-01, 7.5655e-01, 9.7480e-01, 4.0051e-01, + 5.7804e-03, 8.2709e-01, 4.2182e-01, 4.4408e-01, + 8.9222e-01, 3.6208e-01, 8.9519e-01, 2.5600e-01, + 1.8632e-01, 9.8365e-01, 8.1851e-02, 5.2123e-01, + 9.5804e-01, 5.6319e-03, 5.9916e-01, 1.5515e-01, + 9.1388e-01, 8.6155e-01, 4.1283e-01, 2.0695e-01, + 6.8297e-01, 5.4021e-01, 1.9099e-01, 5.1440e-01, + 8.1793e-01, 1.3168e-01, 1.8958e-01, 6.2631e-01, + 8.6577e-01, 4.4099e-01, 1.4760e-01, 1.3065e-01, + 7.5405e-01, 2.6639e-01, 4.7266e-01, 8.4939e-01, + 3.1579e-01, 9.1292e-02, 6.1113e-01, 9.2818e-02, + 2.0669e-01, 1.2783e-01, 9.3739e-01, 1.3247e-01, + 2.9160e-02, 6.9776e-01, 8.8792e-01, 7.3849e-01, + 3.1319e-01, 9.7556e-01, 4.3062e-01, 2.6838e-01, + 9.6352e-01, 4.5384e-01, 5.3614e-01, 7.6115e-01, + 7.8346e-02, 9.7936e-01, 2.0486e-01, 3.3608e-01, + 9.7405e-01, 6.3823e-01, 5.2497e-01, 7.9240e-01, + 5.6798e-01, 4.8818e-01, 9.9144e-01, 9.0264e-01, + 7.2435e-01, 1.8029e-01, 1.9755e-01, 6.8914e-01, + 6.9843e-01, 3.5573e-01, 5.2406e-01, 1.1539e-01, + 8.4006e-01, 1.1812e-01, 5.2865e-01, 7.6911e-01, + 6.8536e-01, 6.7350e-01, 5.9683e-01, 6.0014e-01, + 9.6642e-01, 1.8403e-01, 3.0091e-01, 5.1426e-02, + 9.1433e-01, 3.3716e-01, 5.0284e-01, 7.4283e-01, + 2.7395e-02, 1.2681e-01, 6.1057e-01, 9.2038e-01, + 1.7242e-01, 5.0040e-01, 1.4167e-01, 9.2800e-01, + 3.1623e-01, 6.6496e-01, 4.6361e-01, 4.2210e-01, + 5.4576e-01, 7.4592e-01, 6.7106e-01, 2.2625e-01, + 1.7871e-01, 2.0941e-01, 4.0171e-01, 5.2365e-02, + 9.3492e-02, 6.2121e-01, 5.6627e-01, 4.8088e-01, + 3.4543e-01, 6.5256e-01, 3.8111e-01, 5.0820e-01, + 6.4190e-01, 9.4368e-01, 9.4619e-01, 1.4001e-01, + 9.6033e-01, 2.7958e-01, 7.0197e-01, 6.1710e-01, + 2.7678e-01, 5.4832e-01, 7.9254e-01, 8.3342e-01, + 6.2200e-01, 8.3286e-01, 9.9807e-01, 6.2790e-02, + 8.0453e-01, 1.0971e-01, 8.1520e-01, 3.9444e-01, + 6.3046e-01, 4.7325e-01, 4.5247e-01, 6.8061e-02, + 1.5491e-01, 4.0141e-03, 7.3446e-01, 6.2330e-02, + 9.0610e-01, 8.6523e-01, 4.7703e-01, 7.2769e-01, + 5.6920e-02, 4.7380e-01, 6.2567e-01, 9.8501e-01, + 6.2170e-01, 1.8872e-01, 1.3379e-02, 3.4054e-01, + 7.6416e-01, 3.8986e-01, 5.3500e-01, 9.8245e-01, + 7.0761e-01, 2.2468e-01, 5.4399e-01, 3.9694e-02, + 7.6734e-01, 1.0932e-01, 1.2210e-01, 7.1353e-01, + 8.0572e-01, 2.3582e-01, 9.8215e-01, 1.1015e-01, + 8.9113e-03, 7.1177e-01, 1.7986e-01, 4.5091e-01, + 1.7810e-01, 7.7559e-01, 9.4792e-01, 3.8139e-01, + 1.1546e-01, 7.6492e-01, 7.9508e-01, 8.6260e-01, + 9.6169e-01, 8.0818e-02, 2.8676e-02, 7.0994e-01, + 5.3818e-01, 2.6338e-01, 1.4087e-01, 6.8694e-01, + 2.4939e-01, 7.6196e-01, 6.6186e-01, 8.5655e-01, + 2.6657e-01, 6.6963e-01, 1.3788e-01, 4.1556e-01, + 1.9527e-01, 3.4537e-01, 1.4042e-01, 6.6402e-01, + 3.1351e-01, 6.6071e-01, 2.9595e-01, 9.6000e-01, + 7.2292e-01, 5.6025e-01, 7.7751e-01, 1.0276e-01, + 6.5010e-01, 1.5068e-01, 5.8440e-01, 3.1344e-01, + 1.3546e-01, 2.9211e-01, 4.6813e-01, 2.4935e-02, + 9.6457e-02, 7.4214e-01, 4.3047e-02, 2.1731e-01, + 3.8465e-01, 9.4445e-01, 6.5976e-01, 8.8821e-01, + 5.0696e-01, 5.1792e-01, 4.7107e-01, 5.6703e-01, + 1.5516e-01, 8.0371e-01, 6.5735e-02, 9.3342e-01, + 7.5234e-01, 9.3810e-01, 8.0659e-02, 7.2725e-01, + 7.4287e-01, 8.0905e-01, 5.7371e-01, 8.0566e-01, + 8.6814e-01, 9.0878e-01, 8.1847e-02, 9.7577e-01, + 5.2302e-01, 6.0315e-01, 7.6834e-01, 9.3738e-01, + 5.8105e-01, 6.2760e-01, 7.9012e-01, 7.6416e-01, + 9.0308e-01, 7.2939e-01, 6.2535e-01, 8.1521e-01, + 4.6661e-01, 6.4054e-01, 6.4950e-01, 1.2072e-01, + 6.6340e-01, 6.0429e-01, 4.3164e-02, 8.3696e-02, + 8.7069e-01, 8.1124e-01, 5.2189e-01, 2.7111e-01, + 7.0551e-01, 6.3514e-02, 9.6973e-01, 6.3092e-01, + 5.8354e-01, 1.5335e-01, 7.9160e-01, 1.2117e-02, + 6.9520e-01, 6.4075e-01, 6.0235e-01, 6.8160e-02, + 2.6503e-01, 6.1870e-01, 6.3837e-01, 1.2293e-01, + 7.3640e-01, 2.3190e-01, 5.3413e-01, 4.5806e-01, + 2.4139e-01, 2.6287e-02, 3.6355e-01, 4.1787e-01, + 3.1233e-01, 4.8046e-01, 9.4417e-01, 6.1386e-01, + 4.9249e-01, 3.1649e-01, 9.6072e-01, 7.6183e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0260, 0.2662, 0.7797, ..., 0.3158, 0.7779, 0.5618]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.251019954681396 seconds + +[19.15, 18.55, 18.85, 18.7, 18.83, 18.64, 18.66, 18.71, 18.71, 18.69] +[81.68] +13.858797550201416 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 283081, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.251019954681396, 'TIME_S_1KI': 0.03621232069507101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1131.9865839004517, 'W': 81.68} +[19.15, 18.55, 18.85, 18.7, 18.83, 18.64, 18.66, 18.71, 18.71, 18.69, 19.67, 19.87, 19.02, 18.63, 19.42, 18.75, 18.68, 18.46, 19.14, 23.61] +342.18 +17.109 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 283081, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.251019954681396, 'TIME_S_1KI': 0.03621232069507101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1131.9865839004517, 'W': 81.68, 'J_1KI': 3.9988080581192373, 'W_1KI': 0.28853932266736376, 'W_D': 64.571, 'J_D': 894.8764166140556, 'W_D_1KI': 0.2281007909396957, 'J_D_1KI': 0.0008057792325860644} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json index 0ec114e..b5ab784 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 259324, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.257395267486572, "TIME_S_1KI": 0.03955436159972302, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1007.3763615226745, "W": 73.52, "J_1KI": 3.8846244910716883, "W_1KI": 0.28350634727213836, "W_D": 56.727999999999994, "J_D": 777.2911620845794, "W_D_1KI": 0.21875337415742466, "J_D_1KI": 0.000843552367530289} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 260903, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.26060175895691, "TIME_S_1KI": 0.03932726629803762, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1139.2804224228857, "W": 81.91, "J_1KI": 4.366681956217007, "W_1KI": 0.3139480956524072, "W_D": 65.07875, "J_D": 905.1757513216138, "W_D_1KI": 0.24943657221266138, "J_D_1KI": 0.0009560509929462728} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output index 838ccfc..c8b3970 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.021072864532470703} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.022187471389770508} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), - col_indices=tensor([6834, 1931, 4346, ..., 6725, 2972, 1681]), - values=tensor([0.7465, 0.7749, 0.3553, ..., 0.3449, 0.2710, 0.4644]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([1918, 5095, 6627, ..., 7262, 3668, 8808]), + values=tensor([0.5666, 0.3017, 0.3210, ..., 0.1526, 0.0926, 0.2620]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.4472, 0.0239, 0.4773, ..., 0.7523, 0.1836, 0.2389]) +tensor([0.7773, 0.7111, 0.8277, ..., 0.4499, 0.1514, 0.4790]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.021072864532470703 seconds +Time: 0.022187471389770508 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49827', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.0174834728240967} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '47324', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9045445919036865} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 5000, 5000]), - col_indices=tensor([6306, 9296, 8522, ..., 5641, 7164, 5943]), - values=tensor([0.9605, 0.0866, 0.1892, ..., 0.4816, 0.2836, 0.0365]), + col_indices=tensor([5402, 7, 882, ..., 1424, 1990, 6900]), + values=tensor([0.9801, 0.1240, 0.1724, ..., 0.6076, 0.6381, 0.9238]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.7120, 0.8205, 0.0862, ..., 0.5109, 0.7192, 0.7608]) +tensor([0.8469, 0.8145, 0.0221, ..., 0.4733, 0.3926, 0.1930]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 2.0174834728240967 seconds +Time: 1.9045445919036865 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '259324', '-ss', '10000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.257395267486572} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '260903', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.26060175895691} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), - col_indices=tensor([2130, 3883, 5256, ..., 7000, 8219, 5132]), - values=tensor([0.8393, 0.8650, 0.4056, ..., 0.4895, 0.0562, 0.7603]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4997, 4998, 5000]), + col_indices=tensor([1861, 320, 3544, ..., 2680, 2737, 8798]), + values=tensor([0.9330, 0.5695, 0.5821, ..., 0.6374, 0.5841, 0.7944]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.9495, 0.1374, 0.1837, ..., 0.6231, 0.7099, 0.0387]) +tensor([0.6764, 0.7170, 0.1559, ..., 0.5275, 0.9424, 0.9186]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.257395267486572 seconds +Time: 10.26060175895691 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), - col_indices=tensor([2130, 3883, 5256, ..., 7000, 8219, 5132]), - values=tensor([0.8393, 0.8650, 0.4056, ..., 0.4895, 0.0562, 0.7603]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4997, 4998, 5000]), + col_indices=tensor([1861, 320, 3544, ..., 2680, 2737, 8798]), + values=tensor([0.9330, 0.5695, 0.5821, ..., 0.6374, 0.5841, 0.7944]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.9495, 0.1374, 0.1837, ..., 0.6231, 0.7099, 0.0387]) +tensor([0.6764, 0.7170, 0.1559, ..., 0.5275, 0.9424, 0.9186]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.257395267486572 seconds +Time: 10.26060175895691 seconds -[18.17, 18.06, 17.92, 18.22, 18.12, 18.42, 22.11, 19.11, 18.46, 18.0] -[73.52] -13.702072381973267 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 259324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.257395267486572, 'TIME_S_1KI': 0.03955436159972302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1007.3763615226745, 'W': 73.52} -[18.17, 18.06, 17.92, 18.22, 18.12, 18.42, 22.11, 19.11, 18.46, 18.0, 18.1, 18.11, 18.16, 22.77, 18.06, 18.3, 17.88, 17.93, 18.0, 18.15] -335.84000000000003 -16.792 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 259324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.257395267486572, 'TIME_S_1KI': 0.03955436159972302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1007.3763615226745, 'W': 73.52, 'J_1KI': 3.8846244910716883, 'W_1KI': 0.28350634727213836, 'W_D': 56.727999999999994, 'J_D': 777.2911620845794, 'W_D_1KI': 0.21875337415742466, 'J_D_1KI': 0.000843552367530289} +[19.13, 18.94, 18.64, 18.55, 18.48, 18.76, 18.52, 18.73, 18.63, 18.84] +[81.91] +13.908929586410522 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 260903, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.26060175895691, 'TIME_S_1KI': 0.03932726629803762, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1139.2804224228857, 'W': 81.91} +[19.13, 18.94, 18.64, 18.55, 18.48, 18.76, 18.52, 18.73, 18.63, 18.84, 19.04, 18.5, 18.67, 18.84, 18.66, 18.62, 18.96, 18.69, 18.74, 18.38] +336.625 +16.83125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 260903, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.26060175895691, 'TIME_S_1KI': 0.03932726629803762, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1139.2804224228857, 'W': 81.91, 'J_1KI': 4.366681956217007, 'W_1KI': 0.3139480956524072, 'W_D': 65.07875, 'J_D': 905.1757513216138, 'W_D_1KI': 0.24943657221266138, 'J_D_1KI': 0.0009560509929462728} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json index da0856a..8565a48 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 665, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.126577615737915, "TIME_S_1KI": 15.22793626426754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3144.6024104833605, "W": 46.7, "J_1KI": 4728.725429298286, "W_1KI": 70.22556390977444, "W_D": 20.655250000000002, "J_D": 1390.846872358382, "W_D_1KI": 31.060526315789478, "J_D_1KI": 46.707558369608236} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 743, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.847487449645996, "TIME_S_1KI": 14.599579340034987, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1707.3248128795624, "W": 78.28, "J_1KI": 2297.879963498738, "W_1KI": 105.35666218034993, "W_D": 61.384750000000004, "J_D": 1338.8312060220837, "W_D_1KI": 82.61742934051145, "J_D_1KI": 111.19438673016346} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output index 4beed36..ef0fdc6 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,36 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.0304152965545654} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.4131314754486084} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 59, 94, ..., 24999900, - 24999951, 25000000]), - col_indices=tensor([ 1276, 15885, 34398, ..., 446460, 484343, - 488114]), - values=tensor([0.3408, 0.7505, 0.4683, ..., 0.6426, 0.2990, 0.1628]), - size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.4984, 0.2348, 0.7546, ..., 0.4897, 0.9555, 0.5266]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 25000000 -Density: 0.0001 -Time: 2.0304152965545654 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '517', '-ss', '500000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.159608364105225} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 98, ..., 24999895, +tensor(crow_indices=tensor([ 0, 45, 94, ..., 24999909, 24999952, 25000000]), - col_indices=tensor([ 1859, 10480, 11583, ..., 471819, 483100, - 486034]), - values=tensor([0.5566, 0.4872, 0.1210, ..., 0.2476, 0.9480, 0.3070]), + col_indices=tensor([ 333, 411, 6379, ..., 461667, 473356, + 487603]), + values=tensor([0.3560, 0.8516, 0.0241, ..., 0.9709, 0.9869, 0.6984]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1091, 0.0853, 0.9295, ..., 0.2076, 0.8766, 0.1664]) +tensor([0.0647, 0.7295, 0.8675, ..., 0.7068, 0.2858, 0.5210]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 8.159608364105225 seconds +Time: 1.4131314754486084 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '665', '-ss', '500000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.126577615737915} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '743', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.847487449645996} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 66, 119, ..., 24999902, - 24999949, 25000000]), - col_indices=tensor([ 3829, 12709, 24306, ..., 491038, 494248, - 495364]), - values=tensor([0.8354, 0.9747, 0.5569, ..., 0.5257, 0.7884, 0.2877]), +tensor(crow_indices=tensor([ 0, 52, 106, ..., 24999903, + 24999952, 25000000]), + col_indices=tensor([ 253, 4284, 6146, ..., 481164, 481548, + 482908]), + values=tensor([0.9604, 0.9584, 0.2027, ..., 0.8146, 0.5069, 0.5308]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.8209, 0.7651, 0.4978, ..., 0.6892, 0.4643, 0.4864]) +tensor([0.2685, 0.5529, 0.3924, ..., 0.7041, 0.0047, 0.4106]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.126577615737915 seconds +Time: 10.847487449645996 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 66, 119, ..., 24999902, - 24999949, 25000000]), - col_indices=tensor([ 3829, 12709, 24306, ..., 491038, 494248, - 495364]), - values=tensor([0.8354, 0.9747, 0.5569, ..., 0.5257, 0.7884, 0.2877]), +tensor(crow_indices=tensor([ 0, 52, 106, ..., 24999903, + 24999952, 25000000]), + col_indices=tensor([ 253, 4284, 6146, ..., 481164, 481548, + 482908]), + values=tensor([0.9604, 0.9584, 0.2027, ..., 0.8146, 0.5069, 0.5308]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.8209, 0.7651, 0.4978, ..., 0.6892, 0.4643, 0.4864]) +tensor([0.2685, 0.5529, 0.3924, ..., 0.7041, 0.0047, 0.4106]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -77,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.126577615737915 seconds +Time: 10.847487449645996 seconds -[18.36, 18.09, 18.07, 17.97, 17.94, 18.13, 18.13, 17.77, 17.92, 18.01] -[46.7] -67.33624005317688 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.126577615737915, 'TIME_S_1KI': 15.22793626426754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3144.6024104833605, 'W': 46.7} -[18.36, 18.09, 18.07, 17.97, 17.94, 18.13, 18.13, 17.77, 17.92, 18.01, 39.86, 40.69, 39.54, 40.28, 39.05, 39.85, 39.62, 39.36, 40.17, 40.4] -520.895 -26.04475 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.126577615737915, 'TIME_S_1KI': 15.22793626426754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3144.6024104833605, 'W': 46.7, 'J_1KI': 4728.725429298286, 'W_1KI': 70.22556390977444, 'W_D': 20.655250000000002, 'J_D': 1390.846872358382, 'W_D_1KI': 31.060526315789478, 'J_D_1KI': 46.707558369608236} +[19.4, 18.5, 18.39, 18.48, 18.93, 18.52, 18.42, 18.25, 18.93, 18.28] +[78.28] +21.81048560142517 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 743, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.847487449645996, 'TIME_S_1KI': 14.599579340034987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1707.3248128795624, 'W': 78.28} +[19.4, 18.5, 18.39, 18.48, 18.93, 18.52, 18.42, 18.25, 18.93, 18.28, 18.83, 18.79, 18.7, 18.45, 18.42, 21.07, 19.15, 18.58, 18.85, 18.44] +337.905 +16.895249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 743, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.847487449645996, 'TIME_S_1KI': 14.599579340034987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1707.3248128795624, 'W': 78.28, 'J_1KI': 2297.879963498738, 'W_1KI': 105.35666218034993, 'W_D': 61.384750000000004, 'J_D': 1338.8312060220837, 'W_D_1KI': 82.61742934051145, 'J_D_1KI': 111.19438673016346} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json index ac8e083..9cdb73b 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8088, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521591186523438, "TIME_S_1KI": 1.30088911801724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1272.5733924508095, "W": 87.25, "J_1KI": 157.34092389352244, "W_1KI": 10.787586547972305, "W_D": 70.764, "J_D": 1032.1190090932846, "W_D_1KI": 8.749258160237389, "J_D_1KI": 1.0817579327692124} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 7744, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.456795454025269, "TIME_S_1KI": 1.350309330323511, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1300.6653230977058, "W": 89.23, "J_1KI": 167.95781548265828, "W_1KI": 11.522469008264462, "W_D": 72.1055, "J_D": 1051.0492374159098, "W_D_1KI": 9.311144111570249, "J_D_1KI": 1.2023688160602077} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output index a65a620..b742134 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,57 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14249539375305176} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.15149736404418945} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 6, ..., 2499988, - 2499994, 2500000]), - col_indices=tensor([159074, 199303, 338786, ..., 336877, 376694, - 404714]), - values=tensor([0.7251, 0.9700, 0.9965, ..., 0.4798, 0.8363, 0.5285]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5063, 0.7490, 0.8579, ..., 0.3117, 0.7674, 0.7165]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 0.14249539375305176 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7368', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.564131021499634} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 11, ..., 2499988, - 2499994, 2500000]), - col_indices=tensor([ 45977, 46883, 132654, ..., 283974, 337716, - 438050]), - values=tensor([0.6941, 0.4659, 0.2903, ..., 0.1328, 0.8033, 0.9427]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8274, 0.6187, 0.7071, ..., 0.9433, 0.5745, 0.3570]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 9.564131021499634 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8088', '-ss', '500000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521591186523438} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 2499988, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 2499992, 2499995, 2500000]), - col_indices=tensor([ 55151, 55855, 262240, ..., 129037, 280325, - 497898]), - values=tensor([0.5548, 0.3291, 0.4545, ..., 0.6191, 0.2200, 0.6842]), + col_indices=tensor([ 74157, 220860, 221973, ..., 150255, 261218, + 336444]), + values=tensor([0.4232, 0.0404, 0.7971, ..., 0.3800, 0.9766, 0.9632]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8740, 0.8697, 0.8262, ..., 0.4420, 0.1114, 0.8177]) +tensor([0.7557, 0.5265, 0.2382, ..., 0.3943, 0.8940, 0.5498]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.521591186523438 seconds +Time: 0.15149736404418945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '6930', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.39523720741272} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 2499988, +tensor(crow_indices=tensor([ 0, 3, 9, ..., 2499985, 2499995, 2500000]), - col_indices=tensor([ 55151, 55855, 262240, ..., 129037, 280325, - 497898]), - values=tensor([0.5548, 0.3291, 0.4545, ..., 0.6191, 0.2200, 0.6842]), + col_indices=tensor([204165, 362178, 474289, ..., 280949, 390948, + 429692]), + values=tensor([0.7877, 0.4298, 0.2604, ..., 0.9690, 0.2975, 0.5629]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8740, 0.8697, 0.8262, ..., 0.4420, 0.1114, 0.8177]) +tensor([0.2673, 0.3483, 0.3518, ..., 0.9196, 0.5251, 0.0617]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -77,13 +38,52 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.521591186523438 seconds +Time: 9.39523720741272 seconds -[19.45, 17.88, 18.22, 21.5, 18.64, 18.15, 18.16, 17.8, 17.97, 17.95] -[87.25] -14.585368394851685 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8088, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521591186523438, 'TIME_S_1KI': 1.30088911801724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.5733924508095, 'W': 87.25} -[19.45, 17.88, 18.22, 21.5, 18.64, 18.15, 18.16, 17.8, 17.97, 17.95, 18.5, 17.88, 17.95, 18.22, 18.0, 18.18, 18.21, 18.1, 18.01, 17.8] -329.72 -16.486 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8088, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521591186523438, 'TIME_S_1KI': 1.30088911801724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.5733924508095, 'W': 87.25, 'J_1KI': 157.34092389352244, 'W_1KI': 10.787586547972305, 'W_D': 70.764, 'J_D': 1032.1190090932846, 'W_D_1KI': 8.749258160237389, 'J_D_1KI': 1.0817579327692124} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7744', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.456795454025269} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499990, + 2499993, 2500000]), + col_indices=tensor([ 84916, 193491, 208640, ..., 274348, 297691, + 374693]), + values=tensor([0.3724, 0.0478, 0.9524, ..., 0.8845, 0.7053, 0.9023]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0947, 0.8980, 0.4248, ..., 0.5144, 0.1147, 0.5545]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.456795454025269 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499990, + 2499993, 2500000]), + col_indices=tensor([ 84916, 193491, 208640, ..., 274348, 297691, + 374693]), + values=tensor([0.3724, 0.0478, 0.9524, ..., 0.8845, 0.7053, 0.9023]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0947, 0.8980, 0.4248, ..., 0.5144, 0.1147, 0.5545]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.456795454025269 seconds + +[19.09, 18.64, 19.23, 18.57, 18.57, 18.65, 18.89, 18.58, 18.6, 21.3] +[89.23] +14.576547384262085 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7744, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.456795454025269, 'TIME_S_1KI': 1.350309330323511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.6653230977058, 'W': 89.23} +[19.09, 18.64, 19.23, 18.57, 18.57, 18.65, 18.89, 18.58, 18.6, 21.3, 18.87, 18.69, 18.86, 18.42, 18.71, 22.08, 19.26, 18.57, 19.16, 18.76] +342.49 +17.1245 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7744, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.456795454025269, 'TIME_S_1KI': 1.350309330323511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.6653230977058, 'W': 89.23, 'J_1KI': 167.95781548265828, 'W_1KI': 11.522469008264462, 'W_D': 72.1055, 'J_D': 1051.0492374159098, 'W_D_1KI': 9.311144111570249, 'J_D_1KI': 1.2023688160602077} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json index 17825c9..cd41306 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1356, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.575294256210327, "TIME_S_1KI": 7.798889569476643, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.6908141350748, "W": 76.43, "J_1KI": 1108.179066471294, "W_1KI": 56.36430678466077, "W_D": 60.11625000000001, "J_D": 1181.9460507032277, "W_D_1KI": 44.333517699115056, "J_D_1KI": 32.694334586368036} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1328, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.82228398323059, "TIME_S_1KI": 8.149310228336288, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1475.9286375498773, "W": 83.17, "J_1KI": 1111.3920463477991, "W_1KI": 62.628012048192765, "W_D": 66.243, "J_D": 1175.5433538200855, "W_D_1KI": 49.88177710843373, "J_D_1KI": 37.561579147916966} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output index b27b9d0..34a0ce8 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,36 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.7741575241088867} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.790607213973999} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 27, 54, ..., 12499941, +tensor(crow_indices=tensor([ 0, 21, 47, ..., 12499944, + 12499967, 12500000]), + col_indices=tensor([ 1494, 57305, 78344, ..., 463720, 471221, + 471958]), + values=tensor([0.3917, 0.2908, 0.7809, ..., 0.7032, 0.0658, 0.8610]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4834, 0.6730, 0.9333, ..., 0.5341, 0.2922, 0.8114]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 0.790607213973999 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1328', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.82228398323059} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 19, 44, ..., 12499949, 12499975, 12500000]), - col_indices=tensor([ 19879, 19996, 22547, ..., 457855, 459779, - 462945]), - values=tensor([0.0262, 0.3741, 0.0922, ..., 0.5524, 0.1014, 0.8276]), + col_indices=tensor([ 57604, 61315, 85606, ..., 382989, 409177, + 486939]), + values=tensor([0.6373, 0.9438, 0.2505, ..., 0.8754, 0.9247, 0.8731]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.7209, 0.9008, 0.7814, ..., 0.2206, 0.4926, 0.1534]) +tensor([0.4492, 0.4704, 0.5551, ..., 0.2025, 0.2950, 0.9971]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 0.7741575241088867 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1356', '-ss', '500000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.575294256210327} +Time: 10.82228398323059 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 59, ..., 12499949, - 12499973, 12500000]), - col_indices=tensor([ 13892, 45461, 46784, ..., 469557, 488276, - 489508]), - values=tensor([0.8469, 0.7554, 0.2394, ..., 0.6309, 0.5261, 0.2516]), +tensor(crow_indices=tensor([ 0, 19, 44, ..., 12499949, + 12499975, 12500000]), + col_indices=tensor([ 57604, 61315, 85606, ..., 382989, 409177, + 486939]), + values=tensor([0.6373, 0.9438, 0.2505, ..., 0.8754, 0.9247, 0.8731]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9763, 0.3043, 0.0965, ..., 0.1822, 0.5455, 0.4604]) +tensor([0.4492, 0.4704, 0.5551, ..., 0.2025, 0.2950, 0.9971]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,31 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.575294256210327 seconds +Time: 10.82228398323059 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 59, ..., 12499949, - 12499973, 12500000]), - col_indices=tensor([ 13892, 45461, 46784, ..., 469557, 488276, - 489508]), - values=tensor([0.8469, 0.7554, 0.2394, ..., 0.6309, 0.5261, 0.2516]), - size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9763, 0.3043, 0.0965, ..., 0.1822, 0.5455, 0.4604]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 12500000 -Density: 5e-05 -Time: 10.575294256210327 seconds - -[18.28, 17.9, 18.0, 17.69, 18.19, 17.78, 17.99, 17.71, 17.86, 17.72] -[76.43] -19.66100764274597 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1356, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.575294256210327, 'TIME_S_1KI': 7.798889569476643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.6908141350748, 'W': 76.43} -[18.28, 17.9, 18.0, 17.69, 18.19, 17.78, 17.99, 17.71, 17.86, 17.72, 22.73, 18.23, 18.33, 18.05, 18.19, 17.82, 18.25, 18.16, 17.83, 17.86] -326.275 -16.31375 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1356, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.575294256210327, 'TIME_S_1KI': 7.798889569476643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.6908141350748, 'W': 76.43, 'J_1KI': 1108.179066471294, 'W_1KI': 56.36430678466077, 'W_D': 60.11625000000001, 'J_D': 1181.9460507032277, 'W_D_1KI': 44.333517699115056, 'J_D_1KI': 32.694334586368036} +[22.8, 18.6, 18.68, 18.35, 18.69, 18.46, 18.87, 18.58, 18.66, 18.46] +[83.17] +17.745925664901733 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1328, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.82228398323059, 'TIME_S_1KI': 8.149310228336288, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1475.9286375498773, 'W': 83.17} +[22.8, 18.6, 18.68, 18.35, 18.69, 18.46, 18.87, 18.58, 18.66, 18.46, 19.23, 18.49, 19.15, 18.46, 19.64, 18.46, 18.68, 18.56, 18.73, 18.47] +338.54 +16.927 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1328, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.82228398323059, 'TIME_S_1KI': 8.149310228336288, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1475.9286375498773, 'W': 83.17, 'J_1KI': 1111.3920463477991, 'W_1KI': 62.628012048192765, 'W_D': 66.243, 'J_D': 1175.5433538200855, 'W_D_1KI': 49.88177710843373, 'J_D_1KI': 37.561579147916966} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json index 8609bc2..58e9731 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 80207, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.527606010437012, "TIME_S_1KI": 0.13125545164932004, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1181.3196375966072, "W": 83.35, "J_1KI": 14.728385771773127, "W_1KI": 1.039186105950852, "W_D": 67.15325, "J_D": 951.7630828246474, "W_D_1KI": 0.8372492425848118, "J_D_1KI": 0.010438605640216089} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 78502, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.604196548461914, "TIME_S_1KI": 0.13508186477366071, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1248.9033283519743, "W": 89.76999999999998, "J_1KI": 15.909191209803243, "W_1KI": 1.1435377442612924, "W_D": 72.76324999999999, "J_D": 1012.3010483090875, "W_D_1KI": 0.926896766961351, "J_D_1KI": 0.011807301303933034} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output index 381203d..143ee83 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03568005561828613} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.029583454132080078} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 9, ..., 249987, 249994, +tensor(crow_indices=tensor([ 0, 6, 14, ..., 249987, 249994, 250000]), - col_indices=tensor([ 1312, 19953, 25282, ..., 26652, 33001, 38879]), - values=tensor([0.3658, 0.9367, 0.4335, ..., 0.7027, 0.8564, 0.9906]), + col_indices=tensor([ 9923, 19449, 22561, ..., 21964, 36889, 39975]), + values=tensor([0.4210, 0.0559, 0.4971, ..., 0.9048, 0.5499, 0.1509]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4942, 0.6881, 0.2872, ..., 0.3001, 0.6556, 0.7300]) +tensor([0.0733, 0.6322, 0.8044, ..., 0.8642, 0.7618, 0.4999]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.03568005561828613 seconds +Time: 0.029583454132080078 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '29428', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.8524389266967773} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '35492', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.747204542160034} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 8, ..., 249998, 250000, +tensor(crow_indices=tensor([ 0, 8, 11, ..., 249987, 249995, 250000]), - col_indices=tensor([ 4346, 9215, 13661, ..., 37674, 16332, 22572]), - values=tensor([0.5552, 0.4398, 0.7001, ..., 0.6234, 0.7005, 0.0878]), + col_indices=tensor([ 252, 9477, 12548, ..., 34072, 35023, 41292]), + values=tensor([0.7567, 0.0178, 0.4836, ..., 0.7947, 0.4700, 0.9389]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5304, 0.8900, 0.0447, ..., 0.3418, 0.1958, 0.2486]) +tensor([0.1654, 0.8334, 0.6400, ..., 0.5149, 0.8662, 0.5728]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 3.8524389266967773 seconds +Time: 4.747204542160034 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '80207', '-ss', '50000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.527606010437012} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '78502', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.604196548461914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 249993, 249998, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 249989, 249995, 250000]), - col_indices=tensor([ 8045, 12111, 14477, ..., 47402, 12160, 19361]), - values=tensor([0.9649, 0.3819, 0.5636, ..., 0.2633, 0.1370, 0.0196]), + col_indices=tensor([ 1101, 25106, 38381, ..., 17831, 22084, 28136]), + values=tensor([0.1529, 0.1331, 0.6574, ..., 0.5721, 0.3973, 0.7056]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0927, 0.4142, 0.0895, ..., 0.8219, 0.5339, 0.8064]) +tensor([0.8762, 0.0918, 0.6182, ..., 0.1694, 0.9682, 0.4170]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.527606010437012 seconds +Time: 10.604196548461914 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 249993, 249998, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 249989, 249995, 250000]), - col_indices=tensor([ 8045, 12111, 14477, ..., 47402, 12160, 19361]), - values=tensor([0.9649, 0.3819, 0.5636, ..., 0.2633, 0.1370, 0.0196]), + col_indices=tensor([ 1101, 25106, 38381, ..., 17831, 22084, 28136]), + values=tensor([0.1529, 0.1331, 0.6574, ..., 0.5721, 0.3973, 0.7056]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0927, 0.4142, 0.0895, ..., 0.8219, 0.5339, 0.8064]) +tensor([0.8762, 0.0918, 0.6182, ..., 0.1694, 0.9682, 0.4170]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.527606010437012 seconds +Time: 10.604196548461914 seconds -[18.32, 17.98, 17.86, 18.86, 18.68, 17.81, 17.68, 18.11, 17.76, 17.85] -[83.35] -14.173001050949097 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80207, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.527606010437012, 'TIME_S_1KI': 0.13125545164932004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.3196375966072, 'W': 83.35} -[18.32, 17.98, 17.86, 18.86, 18.68, 17.81, 17.68, 18.11, 17.76, 17.85, 18.01, 18.02, 17.88, 17.81, 17.97, 17.93, 17.96, 17.74, 17.82, 17.95] -323.93499999999995 -16.196749999999998 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80207, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.527606010437012, 'TIME_S_1KI': 0.13125545164932004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.3196375966072, 'W': 83.35, 'J_1KI': 14.728385771773127, 'W_1KI': 1.039186105950852, 'W_D': 67.15325, 'J_D': 951.7630828246474, 'W_D_1KI': 0.8372492425848118, 'J_D_1KI': 0.010438605640216089} +[18.91, 18.66, 18.4, 18.85, 18.71, 18.62, 18.6, 18.6, 18.59, 18.36] +[89.77] +13.912257194519043 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78502, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.604196548461914, 'TIME_S_1KI': 0.13508186477366071, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.9033283519743, 'W': 89.76999999999998} +[18.91, 18.66, 18.4, 18.85, 18.71, 18.62, 18.6, 18.6, 18.59, 18.36, 19.02, 18.67, 23.43, 18.59, 18.63, 18.45, 18.84, 18.48, 18.52, 18.7] +340.135 +17.00675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78502, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.604196548461914, 'TIME_S_1KI': 0.13508186477366071, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.9033283519743, 'W': 89.76999999999998, 'J_1KI': 15.909191209803243, 'W_1KI': 1.1435377442612924, 'W_D': 72.76324999999999, 'J_D': 1012.3010483090875, 'W_D_1KI': 0.926896766961351, 'J_D_1KI': 0.011807301303933034} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json index 1ef0389..fe8b0b3 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17086, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.693793773651123, "TIME_S_1KI": 0.6258804737007564, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.9557392597198, "W": 87.4, "J_1KI": 74.61990748330328, "W_1KI": 5.1152990752663, "W_D": 70.92850000000001, "J_D": 1034.6761802297833, "W_D_1KI": 4.151264192906474, "J_D_1KI": 0.24296290488742092} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 16566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.542575359344482, "TIME_S_1KI": 0.6363983677015865, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1301.1103634548188, "W": 89.24, "J_1KI": 78.5410095046975, "W_1KI": 5.38693710008451, "W_D": 72.376, "J_D": 1055.234913328171, "W_D_1KI": 4.368948448629724, "J_D_1KI": 0.26372983512191983} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output index 168fd00..b103663 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output @@ -1,15 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07488131523132324} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07738423347473145} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 64, 108, ..., 2499910, - 2499955, 2500000]), - col_indices=tensor([ 984, 1625, 1972, ..., 46651, 48149, 48861]), - values=tensor([5.1121e-01, 5.6272e-01, 1.1145e-04, ..., - 9.0355e-01, 4.1789e-01, 4.2355e-01]), +tensor(crow_indices=tensor([ 0, 42, 96, ..., 2499904, + 2499957, 2500000]), + col_indices=tensor([ 1691, 2079, 2423, ..., 45303, 47027, 48713]), + values=tensor([0.6144, 0.7881, 0.6854, ..., 0.3269, 0.0689, 0.3794]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3839, 0.9792, 0.8841, ..., 0.7211, 0.2437, 0.3590]) +tensor([0.1047, 0.4397, 0.6737, ..., 0.5492, 0.2785, 0.2061]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -17,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.07488131523132324 seconds +Time: 0.07738423347473145 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '14022', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.616644144058228} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '13568', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.599735021591187} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 54, 105, ..., 2499907, - 2499960, 2500000]), - col_indices=tensor([ 369, 1157, 3425, ..., 45077, 46820, 49764]), - values=tensor([0.6429, 0.4063, 0.5775, ..., 0.7664, 0.6925, 0.8507]), +tensor(crow_indices=tensor([ 0, 57, 103, ..., 2499904, + 2499953, 2500000]), + col_indices=tensor([ 50, 533, 1501, ..., 46106, 47815, 49257]), + values=tensor([0.9408, 0.2538, 0.6981, ..., 0.4395, 0.0590, 0.3961]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4381, 0.1550, 0.0791, ..., 0.0177, 0.9903, 0.0608]) +tensor([0.1785, 0.5003, 0.9612, ..., 0.8268, 0.6661, 0.8175]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -37,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 8.616644144058228 seconds +Time: 8.599735021591187 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17086', '-ss', '50000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.693793773651123} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '16566', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.542575359344482} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 91, ..., 2499883, - 2499936, 2500000]), - col_indices=tensor([ 188, 2361, 2646, ..., 48274, 48923, 49377]), - values=tensor([0.2734, 0.1056, 0.9298, ..., 0.4005, 0.9270, 0.9473]), +tensor(crow_indices=tensor([ 0, 51, 94, ..., 2499892, + 2499941, 2500000]), + col_indices=tensor([ 1586, 1774, 2808, ..., 47697, 48009, 49307]), + values=tensor([0.9728, 0.9906, 0.5047, ..., 0.0535, 0.3566, 0.5066]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1965, 0.7813, 0.8576, ..., 0.6695, 0.1581, 0.1443]) +tensor([0.7723, 0.7483, 0.0194, ..., 0.7269, 0.0365, 0.8768]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -57,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.693793773651123 seconds +Time: 10.542575359344482 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 91, ..., 2499883, - 2499936, 2500000]), - col_indices=tensor([ 188, 2361, 2646, ..., 48274, 48923, 49377]), - values=tensor([0.2734, 0.1056, 0.9298, ..., 0.4005, 0.9270, 0.9473]), +tensor(crow_indices=tensor([ 0, 51, 94, ..., 2499892, + 2499941, 2500000]), + col_indices=tensor([ 1586, 1774, 2808, ..., 47697, 48009, 49307]), + values=tensor([0.9728, 0.9906, 0.5047, ..., 0.0535, 0.3566, 0.5066]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1965, 0.7813, 0.8576, ..., 0.6695, 0.1581, 0.1443]) +tensor([0.7723, 0.7483, 0.0194, ..., 0.7269, 0.0365, 0.8768]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -74,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.693793773651123 seconds +Time: 10.542575359344482 seconds -[18.43, 18.16, 17.96, 18.08, 18.33, 18.04, 18.36, 18.18, 18.22, 18.06] -[87.4] -14.587594270706177 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17086, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.693793773651123, 'TIME_S_1KI': 0.6258804737007564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.9557392597198, 'W': 87.4} -[18.43, 18.16, 17.96, 18.08, 18.33, 18.04, 18.36, 18.18, 18.22, 18.06, 18.48, 17.93, 19.19, 18.85, 18.16, 18.04, 18.63, 18.25, 18.43, 18.27] -329.43 -16.4715 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17086, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.693793773651123, 'TIME_S_1KI': 0.6258804737007564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.9557392597198, 'W': 87.4, 'J_1KI': 74.61990748330328, 'W_1KI': 5.1152990752663, 'W_D': 70.92850000000001, 'J_D': 1034.6761802297833, 'W_D_1KI': 4.151264192906474, 'J_D_1KI': 0.24296290488742092} +[19.58, 18.68, 19.21, 18.55, 18.74, 18.46, 19.06, 18.57, 19.35, 18.59] +[89.24] +14.579900979995728 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.542575359344482, 'TIME_S_1KI': 0.6363983677015865, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.1103634548188, 'W': 89.24} +[19.58, 18.68, 19.21, 18.55, 18.74, 18.46, 19.06, 18.57, 19.35, 18.59, 18.96, 18.5, 18.54, 18.83, 18.59, 18.61, 18.59, 18.56, 18.69, 18.37] +337.28 +16.863999999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.542575359344482, 'TIME_S_1KI': 0.6363983677015865, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.1103634548188, 'W': 89.24, 'J_1KI': 78.5410095046975, 'W_1KI': 5.38693710008451, 'W_D': 72.376, 'J_D': 1055.234913328171, 'W_D_1KI': 4.368948448629724, 'J_D_1KI': 0.26372983512191983} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json index 6f4838a..66c32ba 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1159, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524485111236572, "TIME_S_1KI": 9.080660147745101, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2883.053437728882, "W": 54.72, "J_1KI": 2487.5353215952387, "W_1KI": 47.21311475409836, "W_D": 38.2975, "J_D": 2017.7949384397268, "W_D_1KI": 33.043572044866266, "J_D_1KI": 28.51041591446615} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1171, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.663051128387451, "TIME_S_1KI": 9.10593606181678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1689.2026331734658, "W": 77.67, "J_1KI": 1442.5300027100477, "W_1KI": 66.32792485055508, "W_D": 60.839, "J_D": 1323.1543581774235, "W_D_1KI": 51.95473953885568, "J_D_1KI": 44.36783905965472} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output index c966aa4..dc4d5d3 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.2965989112854004} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.8961431980133057} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 462, 963, ..., 24999009, - 24999507, 25000000]), - col_indices=tensor([ 19, 54, 59, ..., 49770, 49789, 49840]), - values=tensor([0.0062, 0.3047, 0.0339, ..., 0.6533, 0.8264, 0.4065]), +tensor(crow_indices=tensor([ 0, 519, 989, ..., 24998978, + 24999454, 25000000]), + col_indices=tensor([ 45, 174, 218, ..., 49244, 49258, 49545]), + values=tensor([0.1125, 0.2142, 0.3311, ..., 0.0504, 0.1981, 0.8104]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.0572, 0.3375, 0.5398, ..., 0.2388, 0.0349, 0.7555]) +tensor([0.3205, 0.7140, 0.4617, ..., 0.1872, 0.0736, 0.0707]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 1.2965989112854004 seconds +Time: 0.8961431980133057 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '809', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.848743677139282} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1171', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.663051128387451} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 486, 980, ..., 24998984, - 24999490, 25000000]), - col_indices=tensor([ 15, 197, 386, ..., 49782, 49793, 49889]), - values=tensor([0.3923, 0.3887, 0.8681, ..., 0.2288, 0.1762, 0.4981]), +tensor(crow_indices=tensor([ 0, 544, 1055, ..., 24999059, + 24999508, 25000000]), + col_indices=tensor([ 351, 404, 563, ..., 49538, 49715, 49727]), + values=tensor([0.9378, 0.6332, 0.6319, ..., 0.7390, 0.2560, 0.4027]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2965, 0.2935, 0.4053, ..., 0.9117, 0.1428, 0.4127]) +tensor([0.0290, 0.6833, 0.3576, ..., 0.6974, 0.2953, 0.9792]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 7.848743677139282 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1082', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.800410747528076} +Time: 10.663051128387451 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 497, 982, ..., 24999036, - 24999517, 25000000]), - col_indices=tensor([ 46, 129, 426, ..., 49653, 49766, 49830]), - values=tensor([0.6195, 0.6207, 0.9497, ..., 0.4637, 0.0557, 0.5508]), +tensor(crow_indices=tensor([ 0, 544, 1055, ..., 24999059, + 24999508, 25000000]), + col_indices=tensor([ 351, 404, 563, ..., 49538, 49715, 49727]), + values=tensor([0.9378, 0.6332, 0.6319, ..., 0.7390, 0.2560, 0.4027]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1589, 0.3084, 0.3696, ..., 0.3780, 0.7461, 0.4084]) +tensor([0.0290, 0.6833, 0.3576, ..., 0.6974, 0.2953, 0.9792]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,50 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 9.800410747528076 seconds +Time: 10.663051128387451 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1159', '-ss', '50000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524485111236572} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 513, 1013, ..., 24998968, - 24999473, 25000000]), - col_indices=tensor([ 48, 124, 131, ..., 49410, 49843, 49893]), - values=tensor([0.3835, 0.3241, 0.0409, ..., 0.5767, 0.7491, 0.8402]), - size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3332, 0.0589, 0.5895, ..., 0.7200, 0.0490, 0.9504]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000000 -Density: 0.01 -Time: 10.524485111236572 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 513, 1013, ..., 24998968, - 24999473, 25000000]), - col_indices=tensor([ 48, 124, 131, ..., 49410, 49843, 49893]), - values=tensor([0.3835, 0.3241, 0.0409, ..., 0.5767, 0.7491, 0.8402]), - size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3332, 0.0589, 0.5895, ..., 0.7200, 0.0490, 0.9504]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000000 -Density: 0.01 -Time: 10.524485111236572 seconds - -[18.54, 17.98, 18.07, 17.85, 18.22, 17.93, 18.48, 18.6, 18.31, 18.9] -[54.72] -52.68738007545471 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1159, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524485111236572, 'TIME_S_1KI': 9.080660147745101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2883.053437728882, 'W': 54.72} -[18.54, 17.98, 18.07, 17.85, 18.22, 17.93, 18.48, 18.6, 18.31, 18.9, 18.48, 18.15, 18.06, 18.06, 17.98, 17.84, 18.37, 17.81, 17.96, 21.64] -328.45 -16.4225 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1159, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524485111236572, 'TIME_S_1KI': 9.080660147745101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2883.053437728882, 'W': 54.72, 'J_1KI': 2487.5353215952387, 'W_1KI': 47.21311475409836, 'W_D': 38.2975, 'J_D': 2017.7949384397268, 'W_D_1KI': 33.043572044866266, 'J_D_1KI': 28.51041591446615} +[19.29, 18.98, 18.82, 18.67, 18.7, 18.48, 18.8, 18.48, 18.69, 18.72] +[77.67] +21.748456716537476 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1171, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.663051128387451, 'TIME_S_1KI': 9.10593606181678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1689.2026331734658, 'W': 77.67} +[19.29, 18.98, 18.82, 18.67, 18.7, 18.48, 18.8, 18.48, 18.69, 18.72, 19.17, 18.51, 18.58, 18.49, 18.99, 18.48, 18.57, 18.54, 19.01, 18.48] +336.62 +16.831 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1171, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.663051128387451, 'TIME_S_1KI': 9.10593606181678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1689.2026331734658, 'W': 77.67, 'J_1KI': 1442.5300027100477, 'W_1KI': 66.32792485055508, 'W_D': 60.839, 'J_D': 1323.1543581774235, 'W_D_1KI': 51.95473953885568, 'J_D_1KI': 44.36783905965472} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..5d1cbb5 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 246, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44187068939209, "TIME_S_1KI": 42.44662881866703, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3199.6955910372735, "W": 63.97, "J_1KI": 13006.892646492983, "W_1KI": 260.040650406504, "W_D": 47.1205, "J_D": 2356.90567605865, "W_D_1KI": 191.54674796747966, "J_D_1KI": 778.6453169409742} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..c913954 --- /dev/null +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 4.265462636947632} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2506, 5031, ..., + 124994966, 124997482, 125000000]), + col_indices=tensor([ 0, 23, 38, ..., 49921, 49929, 49974]), + values=tensor([0.8348, 0.2779, 0.9906, ..., 0.7530, 0.5252, 0.2131]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.8800, 0.4802, 0.4436, ..., 0.3999, 0.1613, 0.9688]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 4.265462636947632 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '246', '-ss', '50000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44187068939209} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2491, 4960, ..., + 124994953, 124997533, 125000000]), + col_indices=tensor([ 7, 10, 21, ..., 49960, 49969, 49999]), + values=tensor([0.3142, 0.3593, 0.3701, ..., 0.0388, 0.8323, 0.4619]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.9622, 0.4731, 0.9509, ..., 0.9547, 0.9339, 0.1027]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.44187068939209 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2491, 4960, ..., + 124994953, 124997533, 125000000]), + col_indices=tensor([ 7, 10, 21, ..., 49960, 49969, 49999]), + values=tensor([0.3142, 0.3593, 0.3701, ..., 0.0388, 0.8323, 0.4619]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.9622, 0.4731, 0.9509, ..., 0.9547, 0.9339, 0.1027]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.44187068939209 seconds + +[18.92, 18.86, 18.6, 19.01, 18.68, 18.68, 19.09, 18.37, 18.68, 18.77] +[63.97] +50.01868987083435 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 246, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44187068939209, 'TIME_S_1KI': 42.44662881866703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3199.6955910372735, 'W': 63.97} +[18.92, 18.86, 18.6, 19.01, 18.68, 18.68, 19.09, 18.37, 18.68, 18.77, 18.88, 18.32, 18.42, 18.71, 18.57, 19.28, 18.78, 18.72, 18.64, 18.59] +336.99 +16.8495 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 246, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44187068939209, 'TIME_S_1KI': 42.44662881866703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3199.6955910372735, 'W': 63.97, 'J_1KI': 13006.892646492983, 'W_1KI': 260.040650406504, 'W_D': 47.1205, 'J_D': 2356.90567605865, 'W_D_1KI': 191.54674796747966, 'J_D_1KI': 778.6453169409742} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json index be4ba65..b1b48d4 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 113077, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.7483811378479, "TIME_S_1KI": 0.09505364608052831, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1073.5031987619402, "W": 76.43000000000002, "J_1KI": 9.49355924513332, "W_1KI": 0.6759111048223778, "W_D": 60.05525000000002, "J_D": 843.5104406312707, "W_D_1KI": 0.5311004890472866, "J_D_1KI": 0.004696803850891751} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 110042, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.494260787963867, "TIME_S_1KI": 0.09536595834284971, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1150.5043627023697, "W": 84.45, "J_1KI": 10.455138608007577, "W_1KI": 0.7674342523763654, "W_D": 67.43625, "J_D": 918.7175823479892, "W_D_1KI": 0.6128228312826012, "J_D_1KI": 0.0055689903062703435} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output index 8dbecc1..22265c6 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02812027931213379} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.025803804397583008} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([42362, 37248, 40868, ..., 37764, 10134, 17711]), - values=tensor([0.4763, 0.9715, 0.1475, ..., 0.3126, 0.8815, 0.0115]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 25000, 25000]), + col_indices=tensor([19109, 3237, 16845, ..., 27846, 2234, 15836]), + values=tensor([0.7799, 0.7161, 0.7964, ..., 0.4451, 0.9674, 0.7259]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9973, 0.6491, 0.6388, ..., 0.7622, 0.1974, 0.5505]) +tensor([0.5700, 0.8969, 0.4532, ..., 0.0768, 0.3223, 0.3140]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.02812027931213379 seconds +Time: 0.025803804397583008 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '37339', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.4671623706817627} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '40691', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.8826541900634766} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([ 629, 39590, 3531, ..., 29068, 27842, 31077]), - values=tensor([0.8879, 0.6863, 0.1252, ..., 0.4874, 0.7763, 0.0925]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([22975, 26462, 32598, ..., 46764, 26072, 23370]), + values=tensor([0.8868, 0.3719, 0.2452, ..., 0.3502, 0.5196, 0.5481]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1798, 0.4085, 0.6464, ..., 0.4512, 0.1603, 0.8018]) +tensor([0.5003, 0.6252, 0.5103, ..., 0.7624, 0.8677, 0.3765]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 3.4671623706817627 seconds +Time: 3.8826541900634766 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '113077', '-ss', '50000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.7483811378479} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '110042', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.494260787963867} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 24998, 24999, 25000]), - col_indices=tensor([ 3755, 45041, 41651, ..., 28239, 26624, 23506]), - values=tensor([0.7255, 0.9443, 0.1927, ..., 0.4549, 0.6422, 0.3790]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24998, 25000]), + col_indices=tensor([ 968, 37612, 48250, ..., 42979, 11526, 36099]), + values=tensor([0.0287, 0.5894, 0.3848, ..., 0.9306, 0.7000, 0.7540]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1750, 0.3712, 0.8832, ..., 0.2728, 0.8510, 0.3193]) +tensor([0.9632, 0.8456, 0.5500, ..., 0.5594, 0.3118, 0.7403]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.7483811378479 seconds +Time: 10.494260787963867 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 24998, 24999, 25000]), - col_indices=tensor([ 3755, 45041, 41651, ..., 28239, 26624, 23506]), - values=tensor([0.7255, 0.9443, 0.1927, ..., 0.4549, 0.6422, 0.3790]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24998, 25000]), + col_indices=tensor([ 968, 37612, 48250, ..., 42979, 11526, 36099]), + values=tensor([0.0287, 0.5894, 0.3848, ..., 0.9306, 0.7000, 0.7540]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1750, 0.3712, 0.8832, ..., 0.2728, 0.8510, 0.3193]) +tensor([0.9632, 0.8456, 0.5500, ..., 0.5594, 0.3118, 0.7403]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.7483811378479 seconds +Time: 10.494260787963867 seconds -[18.23, 17.8, 18.68, 17.79, 18.26, 17.97, 18.22, 17.87, 18.18, 18.24] -[76.43] -14.045573711395264 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 113077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.7483811378479, 'TIME_S_1KI': 0.09505364608052831, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1073.5031987619402, 'W': 76.43000000000002} -[18.23, 17.8, 18.68, 17.79, 18.26, 17.97, 18.22, 17.87, 18.18, 18.24, 18.73, 17.83, 18.03, 17.81, 18.07, 18.07, 17.89, 17.96, 20.53, 17.87] -327.495 -16.37475 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 113077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.7483811378479, 'TIME_S_1KI': 0.09505364608052831, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1073.5031987619402, 'W': 76.43000000000002, 'J_1KI': 9.49355924513332, 'W_1KI': 0.6759111048223778, 'W_D': 60.05525000000002, 'J_D': 843.5104406312707, 'W_D_1KI': 0.5311004890472866, 'J_D_1KI': 0.004696803850891751} +[19.13, 18.37, 21.18, 19.25, 18.77, 19.21, 18.57, 18.51, 18.4, 18.72] +[84.45] +13.623497486114502 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110042, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.494260787963867, 'TIME_S_1KI': 0.09536595834284971, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1150.5043627023697, 'W': 84.45} +[19.13, 18.37, 21.18, 19.25, 18.77, 19.21, 18.57, 18.51, 18.4, 18.72, 18.82, 19.16, 18.6, 18.62, 18.99, 18.66, 18.81, 18.6, 18.74, 19.0] +340.27500000000003 +17.01375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110042, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.494260787963867, 'TIME_S_1KI': 0.09536595834284971, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1150.5043627023697, 'W': 84.45, 'J_1KI': 10.455138608007577, 'W_1KI': 0.7674342523763654, 'W_D': 67.43625, 'J_D': 918.7175823479892, 'W_D_1KI': 0.6128228312826012, 'J_D_1KI': 0.0055689903062703435} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json index e084c9d..b3c7367 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 88156, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.868364572525024, "TIME_S_1KI": 0.12328559113985461, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1132.9122091054917, "W": 79.86, "J_1KI": 12.851220666834834, "W_1KI": 0.9058940968283498, "W_D": 63.193749999999994, "J_D": 896.4809781387447, "W_D_1KI": 0.716840033576841, "J_D_1KI": 0.00813149455030674} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 87980, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.691504955291748, "TIME_S_1KI": 0.12152199312675321, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1230.567190117836, "W": 88.47, "J_1KI": 13.98689690972762, "W_1KI": 1.0055694476017276, "W_D": 71.44725, "J_D": 993.790456359744, "W_D_1KI": 0.8120851329847691, "J_D_1KI": 0.009230337951634112} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output index a0c02dc..b6f37ce 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.028945207595825195} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.028501510620117188} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 124995, 124996, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124997, 125000, 125000]), - col_indices=tensor([ 6853, 13029, 2913, ..., 16543, 38720, 48018]), - values=tensor([0.0310, 0.8074, 0.8860, ..., 0.7640, 0.7803, 0.3703]), + col_indices=tensor([ 688, 22534, 35293, ..., 27965, 30917, 45363]), + values=tensor([0.8089, 0.8662, 0.3715, ..., 0.9018, 0.4368, 0.1391]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5471, 0.4507, 0.3833, ..., 0.1733, 0.0716, 0.3889]) +tensor([0.4275, 0.6873, 0.5905, ..., 0.8775, 0.0153, 0.6240]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.028945207595825195 seconds +Time: 0.028501510620117188 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '36275', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.320595741271973} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '36840', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.396669387817383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 6, ..., 124997, 124999, +tensor(crow_indices=tensor([ 0, 3, 7, ..., 124995, 124997, 125000]), - col_indices=tensor([26872, 2155, 12844, ..., 14460, 31839, 14088]), - values=tensor([0.4897, 0.3509, 0.0171, ..., 0.2036, 0.0300, 0.0283]), + col_indices=tensor([10255, 14562, 30918, ..., 6027, 29124, 40379]), + values=tensor([0.9249, 0.3245, 0.9331, ..., 0.1161, 0.7409, 0.4879]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.1984, 0.6261, 0.3986, ..., 0.2975, 0.8868, 0.2739]) +tensor([0.7659, 0.9381, 0.3858, ..., 0.5909, 0.8665, 0.5688]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 4.320595741271973 seconds +Time: 4.396669387817383 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '88156', '-ss', '50000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.868364572525024} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '87980', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.691504955291748} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 124997, 124998, +tensor(crow_indices=tensor([ 0, 4, 6, ..., 124996, 124998, 125000]), - col_indices=tensor([17427, 27540, 29335, ..., 48451, 4975, 32778]), - values=tensor([0.3820, 0.7973, 0.1142, ..., 0.5863, 0.3733, 0.5873]), + col_indices=tensor([ 9926, 12121, 27292, ..., 4019, 43243, 48084]), + values=tensor([0.5351, 0.6475, 0.3719, ..., 0.2513, 0.9989, 0.4768]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7507, 0.0731, 0.4946, ..., 0.5316, 0.6298, 0.9971]) +tensor([0.0373, 0.7318, 0.2383, ..., 0.3868, 0.6719, 0.1788]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.868364572525024 seconds +Time: 10.691504955291748 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 6, ..., 124997, 124998, +tensor(crow_indices=tensor([ 0, 4, 6, ..., 124996, 124998, 125000]), - col_indices=tensor([17427, 27540, 29335, ..., 48451, 4975, 32778]), - values=tensor([0.3820, 0.7973, 0.1142, ..., 0.5863, 0.3733, 0.5873]), + col_indices=tensor([ 9926, 12121, 27292, ..., 4019, 43243, 48084]), + values=tensor([0.5351, 0.6475, 0.3719, ..., 0.2513, 0.9989, 0.4768]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7507, 0.0731, 0.4946, ..., 0.5316, 0.6298, 0.9971]) +tensor([0.0373, 0.7318, 0.2383, ..., 0.3868, 0.6719, 0.1788]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.868364572525024 seconds +Time: 10.691504955291748 seconds -[18.49, 17.88, 18.37, 17.92, 18.86, 17.84, 18.14, 20.87, 18.57, 18.13] -[79.86] -14.186228513717651 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 88156, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.868364572525024, 'TIME_S_1KI': 0.12328559113985461, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.9122091054917, 'W': 79.86} -[18.49, 17.88, 18.37, 17.92, 18.86, 17.84, 18.14, 20.87, 18.57, 18.13, 18.45, 17.94, 18.01, 21.62, 18.32, 18.14, 18.03, 18.0, 18.36, 17.84] -333.32500000000005 -16.66625 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 88156, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.868364572525024, 'TIME_S_1KI': 0.12328559113985461, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.9122091054917, 'W': 79.86, 'J_1KI': 12.851220666834834, 'W_1KI': 0.9058940968283498, 'W_D': 63.193749999999994, 'J_D': 896.4809781387447, 'W_D_1KI': 0.716840033576841, 'J_D_1KI': 0.00813149455030674} +[19.17, 18.37, 18.62, 19.0, 18.76, 19.72, 18.55, 18.79, 18.81, 18.44] +[88.47] +13.90942907333374 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87980, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.691504955291748, 'TIME_S_1KI': 0.12152199312675321, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.567190117836, 'W': 88.47} +[19.17, 18.37, 18.62, 19.0, 18.76, 19.72, 18.55, 18.79, 18.81, 18.44, 18.89, 18.83, 18.65, 18.38, 18.48, 18.74, 18.64, 18.42, 21.9, 19.09] +340.45500000000004 +17.022750000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87980, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.691504955291748, 'TIME_S_1KI': 0.12152199312675321, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.567190117836, 'W': 88.47, 'J_1KI': 13.98689690972762, 'W_1KI': 1.0055694476017276, 'W_D': 71.44725, 'J_D': 993.790456359744, 'W_D_1KI': 0.8120851329847691, 'J_D_1KI': 0.009230337951634112} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json index 63a6b21..aacc2c9 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 339415, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661308526992798, "TIME_S_1KI": 0.031410834898259646, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1042.263754928112, "W": 73.63, "J_1KI": 3.0707651545397576, "W_1KI": 0.2169320743043177, "W_D": 56.882, "J_D": 805.1887397503853, "W_D_1KI": 0.1675883505443189, "J_D_1KI": 0.0004937564649303033} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 340799, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.474717378616333, "TIME_S_1KI": 0.03073576324641895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1184.1861459732056, "W": 82.2, "J_1KI": 3.474734802546972, "W_1KI": 0.24119789083888157, "W_D": 65.00150000000001, "J_D": 936.4218463196755, "W_D_1KI": 0.19073266060052993, "J_D_1KI": 0.0005596632049992222} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output index 4acadea..d0dfbed 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,14 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02093958854675293} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.020232677459716797} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), - col_indices=tensor([ 145, 181, 299, ..., 1340, 4416, 166]), - values=tensor([0.2713, 0.7441, 0.5681, ..., 0.3863, 0.3329, 0.3299]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2500, 2500, 2500]), + col_indices=tensor([2628, 3754, 592, ..., 2458, 3315, 4944]), + values=tensor([0.4123, 0.7806, 0.8053, ..., 0.5946, 0.1806, 0.3377]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([6.8436e-01, 7.7662e-01, 5.3826e-02, ..., 2.4557e-01, 1.4818e-01, - 6.5619e-04]) +tensor([0.7124, 0.1921, 0.3514, ..., 0.3599, 0.5299, 0.1827]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.02093958854675293 seconds +Time: 0.020232677459716797 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50144', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.5512330532073975} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51896', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.6946630477905273} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), - col_indices=tensor([ 261, 2124, 2825, ..., 2342, 1684, 3815]), - values=tensor([0.1132, 0.9807, 0.3410, ..., 0.0783, 0.3569, 0.0713]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([4256, 1296, 4417, ..., 3148, 1518, 3293]), + values=tensor([0.6152, 0.7132, 0.3577, ..., 0.6179, 0.4799, 0.9734]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8182, 0.0907, 0.1359, ..., 0.4059, 0.0754, 0.0727]) +tensor([0.2217, 0.2835, 0.9132, ..., 0.3426, 0.7119, 0.8690]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -35,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 1.5512330532073975 seconds +Time: 1.6946630477905273 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '339415', '-ss', '5000', '-sd', '0.0001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661308526992798} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '321543', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.906718492507935} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), - col_indices=tensor([ 91, 2131, 2855, ..., 2446, 470, 1581]), - values=tensor([0.9229, 0.3729, 0.6792, ..., 0.1416, 0.2267, 0.3921]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([ 523, 3699, 1839, ..., 2785, 659, 2568]), + values=tensor([0.0225, 0.6703, 0.1062, ..., 0.0367, 0.5194, 0.4406]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.5750, 0.0749, 0.6665, ..., 0.8045, 0.0578, 0.3106]) +tensor([0.2271, 0.1701, 0.4937, ..., 0.4807, 0.0704, 0.7192]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -54,15 +53,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.661308526992798 seconds +Time: 9.906718492507935 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '340799', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.474717378616333} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), - col_indices=tensor([ 91, 2131, 2855, ..., 2446, 470, 1581]), - values=tensor([0.9229, 0.3729, 0.6792, ..., 0.1416, 0.2267, 0.3921]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([3046, 2614, 1442, ..., 2131, 1160, 199]), + values=tensor([0.0087, 0.6144, 0.0411, ..., 0.0390, 0.8578, 0.8978]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.5750, 0.0749, 0.6665, ..., 0.8045, 0.0578, 0.3106]) +tensor([0.5522, 0.2768, 0.6866, ..., 0.7223, 0.2595, 0.0285]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -70,13 +72,29 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.661308526992798 seconds +Time: 10.474717378616333 seconds -[18.68, 17.92, 18.02, 18.26, 18.14, 18.26, 18.3, 17.84, 18.15, 17.87] -[73.63] -14.155422449111938 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 339415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661308526992798, 'TIME_S_1KI': 0.031410834898259646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1042.263754928112, 'W': 73.63} -[18.68, 17.92, 18.02, 18.26, 18.14, 18.26, 18.3, 17.84, 18.15, 17.87, 18.33, 18.37, 18.02, 18.02, 22.34, 18.22, 18.25, 22.38, 17.9, 18.26] -334.9599999999999 -16.747999999999998 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 339415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661308526992798, 'TIME_S_1KI': 0.031410834898259646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1042.263754928112, 'W': 73.63, 'J_1KI': 3.0707651545397576, 'W_1KI': 0.2169320743043177, 'W_D': 56.882, 'J_D': 805.1887397503853, 'W_D_1KI': 0.1675883505443189, 'J_D_1KI': 0.0004937564649303033} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([3046, 2614, 1442, ..., 2131, 1160, 199]), + values=tensor([0.0087, 0.6144, 0.0411, ..., 0.0390, 0.8578, 0.8978]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5522, 0.2768, 0.6866, ..., 0.7223, 0.2595, 0.0285]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.474717378616333 seconds + +[19.26, 18.52, 18.76, 18.46, 18.71, 19.29, 18.61, 18.48, 18.54, 20.56] +[82.2] +14.406157493591309 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 340799, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.474717378616333, 'TIME_S_1KI': 0.03073576324641895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1184.1861459732056, 'W': 82.2} +[19.26, 18.52, 18.76, 18.46, 18.71, 19.29, 18.61, 18.48, 18.54, 20.56, 19.09, 18.94, 18.73, 18.56, 18.9, 23.81, 19.07, 18.44, 19.46, 18.47] +343.9699999999999 +17.198499999999996 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 340799, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.474717378616333, 'TIME_S_1KI': 0.03073576324641895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1184.1861459732056, 'W': 82.2, 'J_1KI': 3.474734802546972, 'W_1KI': 0.24119789083888157, 'W_D': 65.00150000000001, 'J_D': 936.4218463196755, 'W_D_1KI': 0.19073266060052993, 'J_D_1KI': 0.0005596632049992222} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json index 4f7c98c..25b8108 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 242735, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6920804977417, "TIME_S_1KI": 0.044048367552028754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1022.2163880014419, "W": 74.91, "J_1KI": 4.211244311703883, "W_1KI": 0.30860815292397054, "W_D": 58.500499999999995, "J_D": 798.2935496766567, "W_D_1KI": 0.241005623416483, "J_D_1KI": 0.000992875454369922} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 242501, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.588265895843506, "TIME_S_1KI": 0.04366277209513984, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1181.5985651540755, "W": 83.01999999999998, "J_1KI": 4.872551309702127, "W_1KI": 0.3423491037150362, "W_D": 66.15374999999997, "J_D": 941.5463271448012, "W_D_1KI": 0.2727978441326014, "J_D_1KI": 0.0011249349245265024} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output index 6818997..fe2e3ea 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.019707918167114258} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.021615982055664062} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 24992, 24998, 25000]), - col_indices=tensor([2443, 3271, 4233, ..., 3520, 3792, 4350]), - values=tensor([0.8426, 0.5824, 0.3389, ..., 0.9840, 0.1147, 0.8239]), +tensor(crow_indices=tensor([ 0, 5, 11, ..., 24992, 24995, 25000]), + col_indices=tensor([ 538, 1794, 1869, ..., 620, 754, 2185]), + values=tensor([0.7958, 0.9810, 0.8441, ..., 0.1848, 0.5789, 0.4718]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5471, 0.6226, 0.3278, ..., 0.2451, 0.7959, 0.6112]) +tensor([0.9458, 0.8492, 0.0516, ..., 0.5673, 0.4627, 0.7477]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.019707918167114258 seconds +Time: 0.021615982055664062 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53278', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.304640293121338} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '48575', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.1032345294952393} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 13, ..., 24990, 24994, 25000]), - col_indices=tensor([ 876, 897, 2274, ..., 2103, 3712, 4740]), - values=tensor([0.9508, 0.2626, 0.2379, ..., 0.2341, 0.9066, 0.6182]), +tensor(crow_indices=tensor([ 0, 2, 7, ..., 24994, 24997, 25000]), + col_indices=tensor([1341, 3194, 65, ..., 424, 2435, 3817]), + values=tensor([0.3518, 0.9410, 0.4009, ..., 0.7954, 0.6370, 0.0462]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8511, 0.0306, 0.7639, ..., 0.5025, 0.8599, 0.9690]) +tensor([0.6184, 0.6272, 0.7963, ..., 0.5192, 0.6796, 0.6431]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 2.304640293121338 seconds +Time: 2.1032345294952393 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '242735', '-ss', '5000', '-sd', '0.001', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6920804977417} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '242501', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.588265895843506} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 24991, 24997, 25000]), - col_indices=tensor([1152, 1177, 2929, ..., 1264, 2609, 4571]), - values=tensor([0.4587, 0.9835, 0.0653, ..., 0.8655, 0.2110, 0.7343]), +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24992, 24996, 25000]), + col_indices=tensor([1959, 1972, 3833, ..., 874, 3224, 4993]), + values=tensor([0.1621, 0.3934, 0.1039, ..., 0.7772, 0.8981, 0.7652]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5612, 0.6517, 0.4101, ..., 0.1691, 0.2414, 0.3717]) +tensor([0.4878, 0.7321, 0.0138, ..., 0.8024, 0.2204, 0.3168]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.6920804977417 seconds +Time: 10.588265895843506 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 24991, 24997, 25000]), - col_indices=tensor([1152, 1177, 2929, ..., 1264, 2609, 4571]), - values=tensor([0.4587, 0.9835, 0.0653, ..., 0.8655, 0.2110, 0.7343]), +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24992, 24996, 25000]), + col_indices=tensor([1959, 1972, 3833, ..., 874, 3224, 4993]), + values=tensor([0.1621, 0.3934, 0.1039, ..., 0.7772, 0.8981, 0.7652]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5612, 0.6517, 0.4101, ..., 0.1691, 0.2414, 0.3717]) +tensor([0.4878, 0.7321, 0.0138, ..., 0.8024, 0.2204, 0.3168]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.6920804977417 seconds +Time: 10.588265895843506 seconds -[18.79, 17.83, 18.25, 17.86, 18.02, 18.39, 18.09, 17.82, 18.07, 17.75] -[74.91] -13.64592695236206 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6920804977417, 'TIME_S_1KI': 0.044048367552028754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.2163880014419, 'W': 74.91} -[18.79, 17.83, 18.25, 17.86, 18.02, 18.39, 18.09, 17.82, 18.07, 17.75, 18.54, 18.01, 18.05, 19.12, 19.51, 18.12, 18.41, 17.83, 18.29, 17.96] -328.19 -16.4095 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6920804977417, 'TIME_S_1KI': 0.044048367552028754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.2163880014419, 'W': 74.91, 'J_1KI': 4.211244311703883, 'W_1KI': 0.30860815292397054, 'W_D': 58.500499999999995, 'J_D': 798.2935496766567, 'W_D_1KI': 0.241005623416483, 'J_D_1KI': 0.000992875454369922} +[19.01, 18.78, 19.12, 18.48, 18.82, 18.63, 18.62, 18.49, 18.85, 18.55] +[83.02] +14.23269772529602 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242501, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.588265895843506, 'TIME_S_1KI': 0.04366277209513984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.5985651540755, 'W': 83.01999999999998} +[19.01, 18.78, 19.12, 18.48, 18.82, 18.63, 18.62, 18.49, 18.85, 18.55, 19.01, 18.56, 19.17, 18.58, 18.61, 18.4, 18.91, 18.6, 19.13, 18.58] +337.325 +16.86625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242501, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.588265895843506, 'TIME_S_1KI': 0.04366277209513984, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.5985651540755, 'W': 83.01999999999998, 'J_1KI': 4.872551309702127, 'W_1KI': 0.3423491037150362, 'W_D': 66.15374999999997, 'J_D': 941.5463271448012, 'W_D_1KI': 0.2727978441326014, 'J_D_1KI': 0.0011249349245265024} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json index 64ab37a..284b6b4 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 161950, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.493181943893433, "TIME_S_1KI": 0.06479272580360255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1161.5640621185303, "W": 84.0, "J_1KI": 7.172362223640199, "W_1KI": 0.5186786045075641, "W_D": 67.18725, "J_D": 929.0749408639671, "W_D_1KI": 0.41486415560358136, "J_D_1KI": 0.0025616804915318393} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 160288, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.41415810585022, "TIME_S_1KI": 0.06497153939066069, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1300.6230839395523, "W": 90.31, "J_1KI": 8.114288555222801, "W_1KI": 0.5634233379916151, "W_D": 73.274, "J_D": 1055.2746744832993, "W_D_1KI": 0.45713964863246154, "J_D_1KI": 0.002851989223350853} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output index f4d043e..c1f07bf 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.022989749908447266} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.022222280502319336} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 112, ..., 249913, 249957, +tensor(crow_indices=tensor([ 0, 65, 108, ..., 249912, 249955, 250000]), - col_indices=tensor([ 48, 54, 234, ..., 4368, 4853, 4864]), - values=tensor([0.3452, 0.1008, 0.0125, ..., 0.4983, 0.8936, 0.7126]), + col_indices=tensor([ 39, 109, 188, ..., 4800, 4890, 4910]), + values=tensor([0.0753, 0.9561, 0.7366, ..., 0.1284, 0.9854, 0.3908]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5619, 0.6285, 0.8433, ..., 0.7149, 0.5039, 0.6932]) +tensor([0.1414, 0.2099, 0.8694, ..., 0.2280, 0.9608, 0.7506]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.022989749908447266 seconds +Time: 0.022222280502319336 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '45672', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.961124897003174} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '47249', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.095125675201416} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 98, ..., 249913, 249948, +tensor(crow_indices=tensor([ 0, 40, 94, ..., 249926, 249968, 250000]), - col_indices=tensor([ 202, 295, 369, ..., 4836, 4929, 4943]), - values=tensor([0.0950, 0.5576, 0.0327, ..., 0.3192, 0.9052, 0.1110]), + col_indices=tensor([ 87, 303, 372, ..., 4332, 4413, 4812]), + values=tensor([0.0586, 0.1707, 0.3614, ..., 0.6651, 0.1911, 0.3417]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6070, 0.1874, 0.1819, ..., 0.4756, 0.1999, 0.3064]) +tensor([0.0815, 0.5844, 0.5335, ..., 0.2225, 0.7876, 0.4789]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 2.961124897003174 seconds +Time: 3.095125675201416 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '161950', '-ss', '5000', '-sd', '0.01', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.493181943893433} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '160288', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.41415810585022} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 111, ..., 249911, 249964, +tensor(crow_indices=tensor([ 0, 33, 67, ..., 249892, 249948, 250000]), - col_indices=tensor([ 19, 43, 144, ..., 4843, 4924, 4947]), - values=tensor([0.0652, 0.5238, 0.9360, ..., 0.5118, 0.2782, 0.1343]), + col_indices=tensor([ 195, 263, 291, ..., 4372, 4543, 4623]), + values=tensor([0.5403, 0.8262, 0.1866, ..., 0.1892, 0.9511, 0.1533]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4213, 0.5109, 0.7737, ..., 0.0670, 0.5810, 0.9899]) +tensor([0.0116, 0.2797, 0.4745, ..., 0.5306, 0.6019, 0.8794]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.493181943893433 seconds +Time: 10.41415810585022 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 111, ..., 249911, 249964, +tensor(crow_indices=tensor([ 0, 33, 67, ..., 249892, 249948, 250000]), - col_indices=tensor([ 19, 43, 144, ..., 4843, 4924, 4947]), - values=tensor([0.0652, 0.5238, 0.9360, ..., 0.5118, 0.2782, 0.1343]), + col_indices=tensor([ 195, 263, 291, ..., 4372, 4543, 4623]), + values=tensor([0.5403, 0.8262, 0.1866, ..., 0.1892, 0.9511, 0.1533]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4213, 0.5109, 0.7737, ..., 0.0670, 0.5810, 0.9899]) +tensor([0.0116, 0.2797, 0.4745, ..., 0.5306, 0.6019, 0.8794]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.493181943893433 seconds +Time: 10.41415810585022 seconds -[18.86, 18.02, 19.18, 18.0, 18.43, 21.89, 19.39, 17.96, 18.27, 17.81] -[84.0] -13.82814359664917 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 161950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.493181943893433, 'TIME_S_1KI': 0.06479272580360255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1161.5640621185303, 'W': 84.0} -[18.86, 18.02, 19.18, 18.0, 18.43, 21.89, 19.39, 17.96, 18.27, 17.81, 18.1, 20.23, 20.02, 18.19, 17.98, 17.88, 18.2, 17.87, 18.32, 18.08] -336.255 -16.81275 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 161950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.493181943893433, 'TIME_S_1KI': 0.06479272580360255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1161.5640621185303, 'W': 84.0, 'J_1KI': 7.172362223640199, 'W_1KI': 0.5186786045075641, 'W_D': 67.18725, 'J_D': 929.0749408639671, 'W_D_1KI': 0.41486415560358136, 'J_D_1KI': 0.0025616804915318393} +[18.89, 18.49, 18.95, 21.32, 19.61, 18.65, 19.28, 18.61, 18.59, 18.51] +[90.31] +14.401761531829834 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 160288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.41415810585022, 'TIME_S_1KI': 0.06497153939066069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.6230839395523, 'W': 90.31} +[18.89, 18.49, 18.95, 21.32, 19.61, 18.65, 19.28, 18.61, 18.59, 18.51, 19.15, 18.78, 18.93, 18.53, 18.69, 18.51, 18.5, 18.5, 18.83, 19.35] +340.71999999999997 +17.035999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 160288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.41415810585022, 'TIME_S_1KI': 0.06497153939066069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.6230839395523, 'W': 90.31, 'J_1KI': 8.114288555222801, 'W_1KI': 0.5634233379916151, 'W_D': 73.274, 'J_D': 1055.2746744832993, 'W_D_1KI': 0.45713964863246154, 'J_D_1KI': 0.002851989223350853} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json index c0ca085..02d1c44 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46969, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.723698854446411, "TIME_S_1KI": 0.22831439575989293, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1262.0827813720703, "W": 87.97, "J_1KI": 26.87054826315379, "W_1KI": 1.8729374693947072, "W_D": 71.6635, "J_D": 1028.1376537780761, "W_D_1KI": 1.5257616725925611, "J_D_1KI": 0.03248444021785776} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 43317, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.514539003372192, "TIME_S_1KI": 0.24273470008015774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1282.2987394380568, "W": 89.82, "J_1KI": 29.602667300091348, "W_1KI": 2.073550799916892, "W_D": 72.69899999999998, "J_D": 1037.8739262793063, "W_D_1KI": 1.6783018214557792, "J_D_1KI": 0.03874464578469837} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output index 62668bd..21a66b8 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03811240196228027} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03870248794555664} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 252, 486, ..., 1249499, - 1249768, 1250000]), - col_indices=tensor([ 15, 65, 81, ..., 4947, 4948, 4952]), - values=tensor([0.2497, 0.0794, 0.3182, ..., 0.5399, 0.7483, 0.2341]), +tensor(crow_indices=tensor([ 0, 249, 456, ..., 1249509, + 1249760, 1250000]), + col_indices=tensor([ 11, 61, 68, ..., 4983, 4987, 4999]), + values=tensor([0.8543, 0.3334, 0.8458, ..., 0.8706, 0.3239, 0.8971]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.6929, 0.1024, 0.7145, ..., 0.7803, 0.6014, 0.5585]) +tensor([0.1640, 0.6869, 0.3358, ..., 0.6749, 0.0366, 0.9855]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.03811240196228027 seconds +Time: 0.03870248794555664 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27550', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.158770799636841} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27130', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.576195955276489} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 250, 496, ..., 1249504, - 1249751, 1250000]), - col_indices=tensor([ 13, 88, 93, ..., 4888, 4919, 4936]), - values=tensor([0.3293, 0.4307, 0.3782, ..., 0.2045, 0.2965, 0.3765]), +tensor(crow_indices=tensor([ 0, 251, 503, ..., 1249463, + 1249716, 1250000]), + col_indices=tensor([ 0, 17, 18, ..., 4944, 4963, 4970]), + values=tensor([0.0874, 0.9701, 0.9470, ..., 0.5996, 0.4415, 0.6823]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.2742, 0.5184, 0.9225, ..., 0.0830, 0.2762, 0.4744]) +tensor([0.2939, 0.4606, 0.6555, ..., 0.6332, 0.8096, 0.8939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 6.158770799636841 seconds +Time: 6.576195955276489 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46969', '-ss', '5000', '-sd', '0.05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.723698854446411} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '43317', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.514539003372192} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 267, 518, ..., 1249533, - 1249757, 1250000]), - col_indices=tensor([ 4, 12, 26, ..., 4948, 4976, 4977]), - values=tensor([0.8262, 0.2304, 0.8718, ..., 0.9371, 0.9418, 0.0811]), +tensor(crow_indices=tensor([ 0, 225, 465, ..., 1249521, + 1249744, 1250000]), + col_indices=tensor([ 10, 14, 60, ..., 4938, 4947, 4964]), + values=tensor([0.1905, 0.3997, 0.9962, ..., 0.5332, 0.4508, 0.1955]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.8290, 0.5732, 0.0178, ..., 0.9134, 0.7238, 0.4621]) +tensor([0.0493, 0.7294, 0.8838, ..., 0.4629, 0.2283, 0.0408]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.723698854446411 seconds +Time: 10.514539003372192 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 267, 518, ..., 1249533, - 1249757, 1250000]), - col_indices=tensor([ 4, 12, 26, ..., 4948, 4976, 4977]), - values=tensor([0.8262, 0.2304, 0.8718, ..., 0.9371, 0.9418, 0.0811]), +tensor(crow_indices=tensor([ 0, 225, 465, ..., 1249521, + 1249744, 1250000]), + col_indices=tensor([ 10, 14, 60, ..., 4938, 4947, 4964]), + values=tensor([0.1905, 0.3997, 0.9962, ..., 0.5332, 0.4508, 0.1955]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.8290, 0.5732, 0.0178, ..., 0.9134, 0.7238, 0.4621]) +tensor([0.0493, 0.7294, 0.8838, ..., 0.4629, 0.2283, 0.0408]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.723698854446411 seconds +Time: 10.514539003372192 seconds -[18.22, 17.77, 18.1, 17.81, 18.08, 18.08, 18.29, 17.93, 18.14, 17.97] -[87.97] -14.34674072265625 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.723698854446411, 'TIME_S_1KI': 0.22831439575989293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.0827813720703, 'W': 87.97} -[18.22, 17.77, 18.1, 17.81, 18.08, 18.08, 18.29, 17.93, 18.14, 17.97, 18.49, 18.0, 18.1, 18.25, 18.1, 17.98, 17.87, 18.07, 17.92, 20.6] -326.13 -16.3065 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.723698854446411, 'TIME_S_1KI': 0.22831439575989293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.0827813720703, 'W': 87.97, 'J_1KI': 26.87054826315379, 'W_1KI': 1.8729374693947072, 'W_D': 71.6635, 'J_D': 1028.1376537780761, 'W_D_1KI': 1.5257616725925611, 'J_D_1KI': 0.03248444021785776} +[18.88, 18.95, 18.56, 18.93, 18.61, 18.37, 18.57, 18.8, 18.58, 18.47] +[89.82] +14.276316404342651 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 43317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.514539003372192, 'TIME_S_1KI': 0.24273470008015774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.2987394380568, 'W': 89.82} +[18.88, 18.95, 18.56, 18.93, 18.61, 18.37, 18.57, 18.8, 18.58, 18.47, 19.07, 18.87, 18.54, 18.68, 18.92, 18.59, 23.9, 19.06, 18.7, 19.16] +342.42 +17.121000000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 43317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.514539003372192, 'TIME_S_1KI': 0.24273470008015774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1282.2987394380568, 'W': 89.82, 'J_1KI': 29.602667300091348, 'W_1KI': 2.073550799916892, 'W_D': 72.69899999999998, 'J_D': 1037.8739262793063, 'W_D_1KI': 1.6783018214557792, 'J_D_1KI': 0.03874464578469837} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json index fe585f9..7a2dbef 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19220, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.306657314300537, "TIME_S_1KI": 0.536246478371516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1258.2980075454711, "W": 87.22999999999999, "J_1KI": 65.46815856115875, "W_1KI": 4.538501560874089, "W_D": 70.87349999999999, "J_D": 1022.354509202957, "W_D_1KI": 3.6874869927159204, "J_D_1KI": 0.19185676340873678} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19195, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.674326419830322, "TIME_S_1KI": 0.5560993185637052, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1320.7079879951475, "W": 89.32, "J_1KI": 68.80479228940597, "W_1KI": 4.653295128939828, "W_D": 72.452, "J_D": 1071.2934969348908, "W_D_1KI": 3.7745246157853605, "J_D_1KI": 0.19664103234099298} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output index 2db63b3..3982142 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06937050819396973} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06820058822631836} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 517, 1013, ..., 2498965, - 2499518, 2500000]), - col_indices=tensor([ 16, 31, 39, ..., 4973, 4987, 4996]), - values=tensor([0.8092, 0.6907, 0.1859, ..., 0.6156, 0.1820, 0.0827]), +tensor(crow_indices=tensor([ 0, 527, 1022, ..., 2498961, + 2499470, 2500000]), + col_indices=tensor([ 3, 9, 18, ..., 4988, 4996, 4997]), + values=tensor([0.9931, 0.9342, 0.5454, ..., 0.8792, 0.3313, 0.4412]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0341, 0.1297, 0.9553, ..., 0.9560, 0.1422, 0.0438]) +tensor([0.4695, 0.7281, 0.8416, ..., 0.2227, 0.0195, 0.5717]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.06937050819396973 seconds +Time: 0.06820058822631836 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15136', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.268742084503174} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15395', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.421234846115112} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 498, 1015, ..., 2498995, - 2499510, 2500000]), - col_indices=tensor([ 1, 5, 12, ..., 4987, 4992, 4994]), - values=tensor([0.2706, 0.5291, 0.0606, ..., 0.9998, 0.6766, 0.8077]), +tensor(crow_indices=tensor([ 0, 525, 988, ..., 2498961, + 2499466, 2500000]), + col_indices=tensor([ 0, 4, 11, ..., 4982, 4991, 4997]), + values=tensor([0.8697, 0.9027, 0.4817, ..., 0.5631, 0.4015, 0.3568]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5238, 0.3873, 0.9372, ..., 0.7751, 0.3587, 0.1743]) +tensor([0.9397, 0.4161, 0.2766, ..., 0.4206, 0.4046, 0.6689]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 8.268742084503174 seconds +Time: 8.421234846115112 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19220', '-ss', '5000', '-sd', '0.1', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.306657314300537} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19195', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.674326419830322} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 478, 976, ..., 2498986, - 2499483, 2500000]), - col_indices=tensor([ 0, 15, 20, ..., 4988, 4995, 4997]), - values=tensor([0.9696, 0.2544, 0.3304, ..., 0.5139, 0.4686, 0.4850]), +tensor(crow_indices=tensor([ 0, 474, 982, ..., 2498973, + 2499464, 2500000]), + col_indices=tensor([ 0, 4, 10, ..., 4986, 4995, 4998]), + values=tensor([0.6694, 0.2603, 0.0355, ..., 0.0763, 0.4858, 0.2671]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8873, 0.6521, 0.3260, ..., 0.9177, 0.3863, 0.5956]) +tensor([0.9454, 0.3117, 0.1592, ..., 0.6761, 0.9412, 0.4124]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.306657314300537 seconds +Time: 10.674326419830322 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 478, 976, ..., 2498986, - 2499483, 2500000]), - col_indices=tensor([ 0, 15, 20, ..., 4988, 4995, 4997]), - values=tensor([0.9696, 0.2544, 0.3304, ..., 0.5139, 0.4686, 0.4850]), +tensor(crow_indices=tensor([ 0, 474, 982, ..., 2498973, + 2499464, 2500000]), + col_indices=tensor([ 0, 4, 10, ..., 4986, 4995, 4998]), + values=tensor([0.6694, 0.2603, 0.0355, ..., 0.0763, 0.4858, 0.2671]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8873, 0.6521, 0.3260, ..., 0.9177, 0.3863, 0.5956]) +tensor([0.9454, 0.3117, 0.1592, ..., 0.6761, 0.9412, 0.4124]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.306657314300537 seconds +Time: 10.674326419830322 seconds -[18.67, 18.15, 17.98, 18.09, 18.08, 18.12, 18.09, 17.98, 17.86, 18.03] -[87.23] -14.425060272216797 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.306657314300537, 'TIME_S_1KI': 0.536246478371516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.2980075454711, 'W': 87.22999999999999} -[18.67, 18.15, 17.98, 18.09, 18.08, 18.12, 18.09, 17.98, 17.86, 18.03, 18.76, 17.96, 18.22, 17.88, 18.31, 18.19, 18.22, 17.8, 18.27, 20.4] -327.13 -16.3565 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.306657314300537, 'TIME_S_1KI': 0.536246478371516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.2980075454711, 'W': 87.22999999999999, 'J_1KI': 65.46815856115875, 'W_1KI': 4.538501560874089, 'W_D': 70.87349999999999, 'J_D': 1022.354509202957, 'W_D_1KI': 3.6874869927159204, 'J_D_1KI': 0.19185676340873678} +[19.27, 18.44, 18.93, 18.6, 18.67, 18.58, 19.07, 19.64, 18.53, 18.58] +[89.32] +14.786251544952393 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.674326419830322, 'TIME_S_1KI': 0.5560993185637052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1320.7079879951475, 'W': 89.32} +[19.27, 18.44, 18.93, 18.6, 18.67, 18.58, 19.07, 19.64, 18.53, 18.58, 19.02, 18.67, 18.78, 18.82, 18.69, 18.42, 18.7, 18.66, 18.5, 18.45] +337.36 +16.868000000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.674326419830322, 'TIME_S_1KI': 0.5560993185637052, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1320.7079879951475, 'W': 89.32, 'J_1KI': 68.80479228940597, 'W_1KI': 4.653295128939828, 'W_D': 72.452, 'J_D': 1071.2934969348908, 'W_D_1KI': 3.7745246157853605, 'J_D_1KI': 0.19664103234099298} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json index 5da43df..4dac3eb 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9074, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.487424612045288, "TIME_S_1KI": 1.1557664328901573, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1301.6191907930374, "W": 85.83, "J_1KI": 143.44491853571054, "W_1KI": 9.458893541988099, "W_D": 69.53375, "J_D": 1054.4851847583054, "W_D_1KI": 7.662965616045845, "J_D_1KI": 0.8444969821518454} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8923, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.545594930648804, "TIME_S_1KI": 1.1818441029529085, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1330.735052251816, "W": 87.79, "J_1KI": 149.13538633327533, "W_1KI": 9.838619298442229, "W_D": 70.932, "J_D": 1075.1987552833557, "W_D_1KI": 7.949344390899922, "J_D_1KI": 0.89088248244984} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output index 4c63e35..4bc8723 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.12977051734924316} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.13272905349731445} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1037, 2033, ..., 4997947, - 4998969, 5000000]), - col_indices=tensor([ 0, 3, 11, ..., 4985, 4988, 4990]), - values=tensor([0.1539, 0.2882, 0.0917, ..., 0.8336, 0.9260, 0.3814]), +tensor(crow_indices=tensor([ 0, 997, 1995, ..., 4998002, + 4998986, 5000000]), + col_indices=tensor([ 7, 8, 17, ..., 4982, 4983, 4992]), + values=tensor([0.4184, 0.9767, 0.6771, ..., 0.6978, 0.0394, 0.2404]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.9778, 0.3097, 0.5480, ..., 0.9590, 0.3024, 0.0294]) +tensor([0.1273, 0.8365, 0.7708, ..., 0.6648, 0.0907, 0.2516]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 0.12977051734924316 seconds +Time: 0.13272905349731445 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8091', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.36232042312622} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7910', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.307441234588623} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 972, 1989, ..., 4997978, - 4998985, 5000000]), - col_indices=tensor([ 0, 9, 19, ..., 4989, 4992, 4995]), - values=tensor([0.2585, 0.0110, 0.4823, ..., 0.4314, 0.8099, 0.9487]), +tensor(crow_indices=tensor([ 0, 974, 1954, ..., 4998031, + 4999055, 5000000]), + col_indices=tensor([ 7, 10, 12, ..., 4985, 4990, 4995]), + values=tensor([0.9202, 0.4707, 0.8760, ..., 0.2235, 0.0017, 0.5563]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7513, 0.6352, 0.8184, ..., 0.0273, 0.2479, 0.5631]) +tensor([0.0317, 0.9028, 0.8670, ..., 0.2249, 0.4204, 0.9373]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 9.36232042312622 seconds +Time: 9.307441234588623 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9074', '-ss', '5000', '-sd', '0.2', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.487424612045288} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8923', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.545594930648804} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1061, 2118, ..., 4997993, - 4998997, 5000000]), - col_indices=tensor([ 3, 6, 10, ..., 4996, 4998, 4999]), - values=tensor([0.5066, 0.7039, 0.5374, ..., 0.1064, 0.9581, 0.5937]), +tensor(crow_indices=tensor([ 0, 983, 1975, ..., 4998032, + 4998999, 5000000]), + col_indices=tensor([ 6, 16, 20, ..., 4983, 4987, 4992]), + values=tensor([0.2859, 0.1301, 0.3895, ..., 0.6829, 0.0353, 0.2028]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1982, 0.1257, 0.6934, ..., 0.0401, 0.8872, 0.0311]) +tensor([0.4672, 0.7822, 0.1527, ..., 0.8581, 0.0623, 0.3252]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.487424612045288 seconds +Time: 10.545594930648804 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1061, 2118, ..., 4997993, - 4998997, 5000000]), - col_indices=tensor([ 3, 6, 10, ..., 4996, 4998, 4999]), - values=tensor([0.5066, 0.7039, 0.5374, ..., 0.1064, 0.9581, 0.5937]), +tensor(crow_indices=tensor([ 0, 983, 1975, ..., 4998032, + 4998999, 5000000]), + col_indices=tensor([ 6, 16, 20, ..., 4983, 4987, 4992]), + values=tensor([0.2859, 0.1301, 0.3895, ..., 0.6829, 0.0353, 0.2028]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1982, 0.1257, 0.6934, ..., 0.0401, 0.8872, 0.0311]) +tensor([0.4672, 0.7822, 0.1527, ..., 0.8581, 0.0623, 0.3252]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.487424612045288 seconds +Time: 10.545594930648804 seconds -[18.43, 18.11, 17.89, 18.52, 18.92, 18.0, 18.0, 17.79, 18.16, 17.76] -[85.83] -15.16508436203003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9074, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.487424612045288, 'TIME_S_1KI': 1.1557664328901573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.6191907930374, 'W': 85.83} -[18.43, 18.11, 17.89, 18.52, 18.92, 18.0, 18.0, 17.79, 18.16, 17.76, 18.23, 17.97, 18.42, 17.87, 17.92, 18.08, 18.11, 17.82, 17.87, 18.53] -325.925 -16.29625 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9074, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.487424612045288, 'TIME_S_1KI': 1.1557664328901573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.6191907930374, 'W': 85.83, 'J_1KI': 143.44491853571054, 'W_1KI': 9.458893541988099, 'W_D': 69.53375, 'J_D': 1054.4851847583054, 'W_D_1KI': 7.662965616045845, 'J_D_1KI': 0.8444969821518454} +[19.05, 18.78, 18.53, 18.46, 18.67, 18.5, 18.62, 18.45, 18.84, 18.85] +[87.79] +15.158162117004395 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8923, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.545594930648804, 'TIME_S_1KI': 1.1818441029529085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1330.735052251816, 'W': 87.79} +[19.05, 18.78, 18.53, 18.46, 18.67, 18.5, 18.62, 18.45, 18.84, 18.85, 18.85, 18.82, 18.67, 19.46, 18.69, 19.1, 18.66, 18.45, 18.69, 18.79] +337.16 +16.858 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8923, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.545594930648804, 'TIME_S_1KI': 1.1818441029529085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1330.735052251816, 'W': 87.79, 'J_1KI': 149.13538633327533, 'W_1KI': 9.838619298442229, 'W_D': 70.932, 'J_D': 1075.1987552833557, 'W_D_1KI': 7.949344390899922, 'J_D_1KI': 0.89088248244984} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json index c0e5e12..6906142 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5607, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.746073484420776, "TIME_S_1KI": 1.9165460111326513, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1333.0484231472017, "W": 84.51, "J_1KI": 237.7471773046552, "W_1KI": 15.07223113964687, "W_D": 68.30000000000001, "J_D": 1077.3542456626894, "W_D_1KI": 12.18120206884252, "J_D_1KI": 2.1724990313612484} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5529, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.533730745315552, "TIME_S_1KI": 1.905178286365627, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1373.9562348842621, "W": 86.44, "J_1KI": 248.49995204996603, "W_1KI": 15.633930186290467, "W_D": 69.4085, "J_D": 1103.2420329588651, "W_D_1KI": 12.553535901609694, "J_D_1KI": 2.270489401629534} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output index 4501c06..5a9a550 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.20109891891479492} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.20404601097106934} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1526, 3035, ..., 7496941, - 7498453, 7500000]), - col_indices=tensor([ 2, 6, 10, ..., 4993, 4994, 4996]), - values=tensor([0.3579, 0.5981, 0.5931, ..., 0.3837, 0.5123, 0.1240]), +tensor(crow_indices=tensor([ 0, 1447, 2938, ..., 7497029, + 7498441, 7500000]), + col_indices=tensor([ 3, 5, 7, ..., 4987, 4996, 4997]), + values=tensor([0.2329, 0.7294, 0.8836, ..., 0.5208, 0.0518, 0.9364]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.0391, 0.3406, 0.5361, ..., 0.5039, 0.0205, 0.2852]) +tensor([0.5789, 0.0903, 0.7224, ..., 0.8391, 0.9209, 0.7019]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 0.20109891891479492 seconds +Time: 0.20404601097106934 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5221', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.77708387374878} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5145', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.769284009933472} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1505, 2964, ..., 7496939, - 7498442, 7500000]), - col_indices=tensor([ 2, 4, 5, ..., 4996, 4997, 4999]), - values=tensor([0.1729, 0.9195, 0.5163, ..., 0.0436, 0.1803, 0.9350]), +tensor(crow_indices=tensor([ 0, 1458, 3003, ..., 7496989, + 7498497, 7500000]), + col_indices=tensor([ 1, 4, 10, ..., 4989, 4990, 4994]), + values=tensor([0.1464, 0.4974, 0.0225, ..., 0.5683, 0.0440, 0.9269]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.3955, 0.7196, 0.4505, ..., 0.8904, 0.4770, 0.0844]) +tensor([0.7014, 0.8503, 0.5401, ..., 0.5553, 0.6399, 0.6983]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 9.77708387374878 seconds +Time: 9.769284009933472 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5607', '-ss', '5000', '-sd', '0.3', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.746073484420776} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5529', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.533730745315552} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1451, 2905, ..., 7497061, - 7498533, 7500000]), - col_indices=tensor([ 4, 9, 12, ..., 4994, 4998, 4999]), - values=tensor([0.3703, 0.3823, 0.6108, ..., 0.0984, 0.8524, 0.0373]), +tensor(crow_indices=tensor([ 0, 1537, 3056, ..., 7497003, + 7498499, 7500000]), + col_indices=tensor([ 6, 10, 11, ..., 4981, 4989, 4990]), + values=tensor([0.6097, 0.4352, 0.7201, ..., 0.3526, 0.9200, 0.7753]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.6688, 0.5149, 0.3458, ..., 0.1151, 0.9310, 0.8037]) +tensor([0.1460, 0.3019, 0.8185, ..., 0.1969, 0.4512, 0.0832]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.746073484420776 seconds +Time: 10.533730745315552 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1451, 2905, ..., 7497061, - 7498533, 7500000]), - col_indices=tensor([ 4, 9, 12, ..., 4994, 4998, 4999]), - values=tensor([0.3703, 0.3823, 0.6108, ..., 0.0984, 0.8524, 0.0373]), +tensor(crow_indices=tensor([ 0, 1537, 3056, ..., 7497003, + 7498499, 7500000]), + col_indices=tensor([ 6, 10, 11, ..., 4981, 4989, 4990]), + values=tensor([0.6097, 0.4352, 0.7201, ..., 0.3526, 0.9200, 0.7753]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.6688, 0.5149, 0.3458, ..., 0.1151, 0.9310, 0.8037]) +tensor([0.1460, 0.3019, 0.8185, ..., 0.1969, 0.4512, 0.0832]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.746073484420776 seconds +Time: 10.533730745315552 seconds -[18.36, 17.95, 18.08, 17.67, 18.17, 17.84, 18.12, 17.84, 18.09, 18.06] -[84.51] -15.77385425567627 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5607, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.746073484420776, 'TIME_S_1KI': 1.9165460111326513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.0484231472017, 'W': 84.51} -[18.36, 17.95, 18.08, 17.67, 18.17, 17.84, 18.12, 17.84, 18.09, 18.06, 18.35, 17.89, 17.87, 18.07, 18.23, 17.83, 18.12, 17.92, 18.19, 17.87] -324.20000000000005 -16.21 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5607, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.746073484420776, 'TIME_S_1KI': 1.9165460111326513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.0484231472017, 'W': 84.51, 'J_1KI': 237.7471773046552, 'W_1KI': 15.07223113964687, 'W_D': 68.30000000000001, 'J_D': 1077.3542456626894, 'W_D_1KI': 12.18120206884252, 'J_D_1KI': 2.1724990313612484} +[19.34, 18.74, 18.52, 18.62, 18.6, 18.73, 18.65, 19.79, 18.5, 18.66] +[86.44] +15.894912481307983 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5529, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.533730745315552, 'TIME_S_1KI': 1.905178286365627, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1373.9562348842621, 'W': 86.44} +[19.34, 18.74, 18.52, 18.62, 18.6, 18.73, 18.65, 19.79, 18.5, 18.66, 18.87, 18.59, 18.65, 18.49, 18.51, 18.61, 18.87, 18.52, 22.37, 18.87] +340.63 +17.0315 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5529, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.533730745315552, 'TIME_S_1KI': 1.905178286365627, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1373.9562348842621, 'W': 86.44, 'J_1KI': 248.49995204996603, 'W_1KI': 15.633930186290467, 'W_D': 69.4085, 'J_D': 1103.2420329588651, 'W_D_1KI': 12.553535901609694, 'J_D_1KI': 2.270489401629534} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json index 363edc0..ead5e1d 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2843, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.510854721069336, "TIME_S_1KI": 3.6970997963662806, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1362.9070807647706, "W": 82.31, "J_1KI": 479.39046104986653, "W_1KI": 28.951811466760464, "W_D": 66.04650000000001, "J_D": 1093.6124712638857, "W_D_1KI": 23.23126978543792, "J_D_1KI": 8.171392819359099} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2846, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.30638337135315, "TIME_S_1KI": 3.621357474122681, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1425.0354087996482, "W": 85.47, "J_1KI": 500.715182290811, "W_1KI": 30.031623330990865, "W_D": 68.37174999999999, "J_D": 1139.957467083156, "W_D_1KI": 24.02380534082923, "J_D_1KI": 8.441252755034865} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output index 26353c7..66e74c6 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.3692905902862549} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.39473772048950195} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1942, 3937, ..., 9996021, - 9998014, 10000000]), - col_indices=tensor([ 1, 6, 11, ..., 4995, 4996, 4997]), - values=tensor([0.9006, 0.6515, 0.7177, ..., 0.3221, 0.1090, 0.6573]), +tensor(crow_indices=tensor([ 0, 2002, 3985, ..., 9995971, + 9998009, 10000000]), + col_indices=tensor([ 0, 2, 5, ..., 4991, 4992, 4996]), + values=tensor([0.1620, 0.9199, 0.6443, ..., 0.6479, 0.2394, 0.7085]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3731, 0.3806, 0.5183, ..., 0.1665, 0.0476, 0.3514]) +tensor([0.6678, 0.3987, 0.1276, ..., 0.4805, 0.4392, 0.0605]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 0.3692905902862549 seconds +Time: 0.39473772048950195 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2843', '-ss', '5000', '-sd', '0.4', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.510854721069336} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2659', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.807755947113037} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2016, 4009, ..., 9996028, - 9998016, 10000000]), - col_indices=tensor([ 0, 3, 5, ..., 4991, 4992, 4997]), - values=tensor([0.1122, 0.1212, 0.4120, ..., 0.9869, 0.5095, 0.1756]), +tensor(crow_indices=tensor([ 0, 2021, 4024, ..., 9995985, + 9998035, 10000000]), + col_indices=tensor([ 0, 1, 3, ..., 4985, 4994, 4995]), + values=tensor([0.7018, 0.1471, 0.5279, ..., 0.3317, 0.2521, 0.8359]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1243, 0.0753, 0.2426, ..., 0.5390, 0.1485, 0.6469]) +tensor([0.1015, 0.1457, 0.5192, ..., 0.4135, 0.5810, 0.6590]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.510854721069336 seconds +Time: 9.807755947113037 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2846', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.30638337135315} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2016, 4009, ..., 9996028, - 9998016, 10000000]), - col_indices=tensor([ 0, 3, 5, ..., 4991, 4992, 4997]), - values=tensor([0.1122, 0.1212, 0.4120, ..., 0.9869, 0.5095, 0.1756]), +tensor(crow_indices=tensor([ 0, 1985, 3951, ..., 9995929, + 9997921, 10000000]), + col_indices=tensor([ 1, 3, 5, ..., 4995, 4997, 4998]), + values=tensor([0.2102, 0.3320, 0.2208, ..., 0.4653, 0.1673, 0.9633]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1243, 0.0753, 0.2426, ..., 0.5390, 0.1485, 0.6469]) +tensor([0.2836, 0.6428, 0.7336, ..., 0.3764, 0.7686, 0.5592]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +56,30 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.510854721069336 seconds +Time: 10.30638337135315 seconds -[18.34, 18.07, 17.87, 17.95, 18.1, 18.25, 18.13, 17.86, 18.35, 17.99] -[82.31] -16.55821990966797 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.510854721069336, 'TIME_S_1KI': 3.6970997963662806, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.9070807647706, 'W': 82.31} -[18.34, 18.07, 17.87, 17.95, 18.1, 18.25, 18.13, 17.86, 18.35, 17.99, 18.23, 18.01, 18.07, 17.94, 18.3, 17.93, 18.17, 17.9, 18.02, 18.14] -325.2699999999999 -16.263499999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.510854721069336, 'TIME_S_1KI': 3.6970997963662806, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.9070807647706, 'W': 82.31, 'J_1KI': 479.39046104986653, 'W_1KI': 28.951811466760464, 'W_D': 66.04650000000001, 'J_D': 1093.6124712638857, 'W_D_1KI': 23.23126978543792, 'J_D_1KI': 8.171392819359099} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1985, 3951, ..., 9995929, + 9997921, 10000000]), + col_indices=tensor([ 1, 3, 5, ..., 4995, 4997, 4998]), + values=tensor([0.2102, 0.3320, 0.2208, ..., 0.4653, 0.1673, 0.9633]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2836, 0.6428, 0.7336, ..., 0.3764, 0.7686, 0.5592]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.30638337135315 seconds + +[19.22, 18.77, 18.64, 18.81, 18.58, 18.67, 18.62, 18.73, 18.7, 18.37] +[85.47] +16.67293095588684 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2846, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.30638337135315, 'TIME_S_1KI': 3.621357474122681, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1425.0354087996482, 'W': 85.47} +[19.22, 18.77, 18.64, 18.81, 18.58, 18.67, 18.62, 18.73, 18.7, 18.37, 18.99, 18.55, 18.63, 22.5, 19.62, 18.58, 19.47, 18.45, 19.02, 18.67] +341.96500000000003 +17.09825 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2846, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.30638337135315, 'TIME_S_1KI': 3.621357474122681, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1425.0354087996482, 'W': 85.47, 'J_1KI': 500.715182290811, 'W_1KI': 30.031623330990865, 'W_D': 68.37174999999999, 'J_D': 1139.957467083156, 'W_D_1KI': 24.02380534082923, 'J_D_1KI': 8.441252755034865} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json index 5cc798d..94053c3 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2367, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.31517243385315, "TIME_S_1KI": 4.35790977349098, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1435.467025976181, "W": 77.42, "J_1KI": 606.4499476029494, "W_1KI": 32.708069286016055, "W_D": 60.97375, "J_D": 1130.5322600764036, "W_D_1KI": 25.7599281791297, "J_D_1KI": 10.882943886408832} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2370, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.843660354614258, "TIME_S_1KI": 4.575384115870995, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1460.4018950557709, "W": 84.26, "J_1KI": 616.2033312471608, "W_1KI": 35.552742616033754, "W_D": 67.05925, "J_D": 1162.2769496916533, "W_D_1KI": 28.29504219409283, "J_D_1KI": 11.93883636881554} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output index 9c08cbf..1bfc58f 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.4716074466705322} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.4699594974517822} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2514, 4962, ..., 12495038, - 12497495, 12500000]), - col_indices=tensor([ 0, 2, 3, ..., 4994, 4998, 4999]), - values=tensor([0.6154, 0.9669, 0.2665, ..., 0.0694, 0.4098, 0.1560]), +tensor(crow_indices=tensor([ 0, 2530, 5011, ..., 12494917, + 12497466, 12500000]), + col_indices=tensor([ 0, 2, 4, ..., 4989, 4990, 4993]), + values=tensor([0.0612, 0.4688, 0.2670, ..., 0.2342, 0.9548, 0.3408]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.5763, 0.6030, 0.9940, ..., 0.2125, 0.0764, 0.1654]) +tensor([0.3044, 0.4364, 0.9300, ..., 0.3535, 0.8813, 0.4187]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 0.4716074466705322 seconds +Time: 0.4699594974517822 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2226', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.872556447982788} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2234', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.893749475479126} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2494, 5060, ..., 12495015, - 12497540, 12500000]), - col_indices=tensor([ 0, 1, 3, ..., 4996, 4997, 4998]), - values=tensor([0.6641, 0.1400, 0.8724, ..., 0.1588, 0.2497, 0.0435]), +tensor(crow_indices=tensor([ 0, 2495, 4993, ..., 12494938, + 12497473, 12500000]), + col_indices=tensor([ 1, 2, 3, ..., 4995, 4996, 4998]), + values=tensor([0.3117, 0.8528, 0.6760, ..., 0.0359, 0.3985, 0.2346]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4856, 0.9202, 0.9997, ..., 0.6539, 0.7245, 0.6538]) +tensor([0.9035, 0.2602, 0.8518, ..., 0.1682, 0.8170, 0.2713]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 9.872556447982788 seconds +Time: 9.893749475479126 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2367', '-ss', '5000', '-sd', '0.5', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.31517243385315} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2370', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.843660354614258} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2524, 5066, ..., 12495116, - 12497547, 12500000]), - col_indices=tensor([ 0, 3, 4, ..., 4997, 4998, 4999]), - values=tensor([0.7920, 0.5049, 0.7981, ..., 0.7837, 0.6861, 0.6359]), +tensor(crow_indices=tensor([ 0, 2500, 4943, ..., 12494930, + 12497475, 12500000]), + col_indices=tensor([ 2, 4, 5, ..., 4996, 4997, 4998]), + values=tensor([0.2671, 0.5264, 0.0833, ..., 0.7249, 0.3539, 0.6800]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9717, 0.2840, 0.2667, ..., 0.4259, 0.7903, 0.5279]) +tensor([0.6057, 0.0631, 0.1621, ..., 0.7635, 0.6231, 0.8684]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.31517243385315 seconds +Time: 10.843660354614258 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2524, 5066, ..., 12495116, - 12497547, 12500000]), - col_indices=tensor([ 0, 3, 4, ..., 4997, 4998, 4999]), - values=tensor([0.7920, 0.5049, 0.7981, ..., 0.7837, 0.6861, 0.6359]), +tensor(crow_indices=tensor([ 0, 2500, 4943, ..., 12494930, + 12497475, 12500000]), + col_indices=tensor([ 2, 4, 5, ..., 4996, 4997, 4998]), + values=tensor([0.2671, 0.5264, 0.0833, ..., 0.7249, 0.3539, 0.6800]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9717, 0.2840, 0.2667, ..., 0.4259, 0.7903, 0.5279]) +tensor([0.6057, 0.0631, 0.1621, ..., 0.7635, 0.6231, 0.8684]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.31517243385315 seconds +Time: 10.843660354614258 seconds -[18.22, 18.01, 18.1, 18.03, 17.97, 17.83, 18.06, 17.94, 17.8, 18.08] -[77.42] -18.54129457473755 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.31517243385315, 'TIME_S_1KI': 4.35790977349098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1435.467025976181, 'W': 77.42} -[18.22, 18.01, 18.1, 18.03, 17.97, 17.83, 18.06, 17.94, 17.8, 18.08, 18.25, 18.11, 18.19, 18.18, 17.9, 17.94, 21.89, 18.55, 18.24, 17.82] -328.925 -16.44625 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.31517243385315, 'TIME_S_1KI': 4.35790977349098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1435.467025976181, 'W': 77.42, 'J_1KI': 606.4499476029494, 'W_1KI': 32.708069286016055, 'W_D': 60.97375, 'J_D': 1130.5322600764036, 'W_D_1KI': 25.7599281791297, 'J_D_1KI': 10.882943886408832} +[19.66, 18.62, 19.14, 24.55, 19.01, 18.74, 19.62, 18.67, 18.57, 18.42] +[84.26] +17.33208990097046 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.843660354614258, 'TIME_S_1KI': 4.575384115870995, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1460.4018950557709, 'W': 84.26} +[19.66, 18.62, 19.14, 24.55, 19.01, 18.74, 19.62, 18.67, 18.57, 18.42, 19.01, 18.69, 18.69, 18.48, 18.8, 18.47, 18.78, 18.55, 18.86, 18.46] +344.015 +17.20075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.843660354614258, 'TIME_S_1KI': 4.575384115870995, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1460.4018950557709, 'W': 84.26, 'J_1KI': 616.2033312471608, 'W_1KI': 35.552742616033754, 'W_D': 67.05925, 'J_D': 1162.2769496916533, 'W_D_1KI': 28.29504219409283, 'J_D_1KI': 11.93883636881554} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json index f03b074..3647620 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 352628, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.273355960845947, "TIME_S_1KI": 0.029133693186150694, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 997.6947084188461, "W": 73.02, "J_1KI": 2.8293122168938547, "W_1KI": 0.20707374343500798, "W_D": 56.76349999999999, "J_D": 775.5771512097119, "W_D_1KI": 0.16097275315630066, "J_D_1KI": 0.00045649453008921774} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 358898, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.994742631912231, "TIME_S_1KI": 0.030634728061767497, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1136.2339201164245, "W": 81.52, "J_1KI": 3.1658964945929613, "W_1KI": 0.22713974443992443, "W_D": 64.66225, "J_D": 901.2689131630063, "W_D_1KI": 0.18016887806563425, "J_D_1KI": 0.000502005801273995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output index a48462c..1e05b99 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,102 +1,183 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03506755828857422} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0216214656829834} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2913, 1648, 2644, 3381, 1372, 3923, 4637, 4740, 3605, - 3437, 243, 589, 1110, 3938, 3652, 4390, 556, 1399, - 147, 3692, 103, 2333, 953, 1384, 659, 1993, 868, - 614, 4322, 4844, 1783, 3862, 2228, 274, 4521, 1878, - 2924, 2109, 11, 387, 2654, 1288, 3264, 2715, 783, - 864, 4478, 4578, 483, 3494, 1607, 265, 2566, 3626, - 4895, 3427, 3874, 3738, 1296, 657, 640, 614, 787, - 1550, 2439, 4660, 269, 3982, 4338, 3845, 315, 2485, - 3306, 2923, 3930, 969, 1457, 1460, 4340, 2708, 47, - 565, 464, 4753, 1236, 577, 1132, 636, 2214, 288, - 1601, 2014, 3937, 1117, 3005, 646, 3874, 3732, 3404, - 1250, 1263, 2388, 4327, 3524, 3464, 3950, 1309, 1452, - 2645, 1166, 3908, 4608, 3126, 3808, 84, 3605, 3573, - 3190, 4563, 3165, 2986, 3344, 1519, 2294, 4187, 3643, - 4112, 3606, 2436, 2384, 625, 3600, 188, 4651, 2237, - 3672, 1930, 392, 4612, 4270, 771, 4369, 1789, 974, - 1168, 2614, 2307, 3695, 2412, 1459, 1895, 4730, 4451, - 4843, 4273, 1205, 3679, 3774, 564, 1427, 336, 3203, - 3958, 2398, 726, 4337, 2127, 409, 1972, 1019, 1933, - 580, 2862, 277, 4275, 3026, 969, 4172, 2441, 3256, - 2388, 1358, 3834, 3780, 4305, 2776, 2801, 3127, 4359, - 3593, 1929, 1465, 4820, 2245, 3253, 2958, 788, 2326, - 4311, 1014, 3630, 3362, 3298, 1282, 4253, 3492, 4064, - 3984, 424, 3900, 4395, 2947, 1349, 3432, 2872, 1837, - 3566, 2145, 4347, 1140, 404, 290, 1225, 228, 177, - 2040, 2532, 4933, 2387, 3999, 2398, 1862, 25, 1623, - 3988, 4385, 855, 1469, 4176, 2016, 3588, 1388, 4820, - 138, 2359, 3521, 918, 3462, 3885, 3534]), - values=tensor([8.7230e-01, 2.2609e-01, 1.4440e-01, 9.6081e-01, - 3.3014e-01, 2.6737e-01, 1.3690e-01, 4.9883e-01, - 1.9471e-01, 7.1750e-01, 8.2877e-01, 7.8140e-02, - 4.1936e-01, 2.9344e-01, 1.7249e-01, 6.6013e-03, - 2.1956e-01, 3.1653e-02, 5.7945e-01, 7.5527e-01, - 7.0478e-02, 1.5299e-01, 2.9192e-01, 9.5876e-01, - 1.5930e-01, 5.3970e-01, 2.0450e-01, 6.3456e-01, - 2.1730e-01, 5.0751e-01, 5.7632e-01, 6.8165e-01, - 4.1461e-01, 3.3339e-01, 2.7547e-01, 9.8206e-02, - 2.4585e-02, 5.1512e-01, 1.4352e-01, 1.0422e-01, - 9.9198e-01, 6.0170e-01, 1.7000e-01, 2.2028e-01, - 8.5709e-01, 1.0687e-01, 5.2938e-02, 2.0130e-01, - 4.0830e-01, 6.9702e-01, 7.4460e-02, 5.4586e-03, - 2.2453e-01, 8.8854e-02, 9.3819e-01, 3.1406e-01, - 4.2855e-01, 5.9846e-01, 4.2003e-01, 5.0858e-01, - 7.3609e-01, 6.1148e-01, 7.1703e-01, 6.0561e-01, - 4.5768e-01, 1.2397e-01, 3.5627e-01, 8.0143e-01, - 6.1472e-01, 2.3014e-01, 6.6661e-01, 2.9043e-01, - 6.4900e-01, 1.0069e-01, 1.8357e-01, 7.5440e-01, - 8.4145e-01, 9.4584e-01, 6.2814e-01, 7.3938e-01, - 4.9410e-01, 3.8555e-01, 8.5109e-01, 9.4921e-01, - 2.4875e-01, 7.9168e-02, 8.0965e-02, 6.7150e-01, - 4.1002e-01, 8.0019e-01, 7.4899e-01, 3.7086e-01, - 2.2021e-01, 2.3499e-01, 1.8010e-01, 6.8475e-01, - 7.1328e-01, 6.7819e-01, 9.7254e-01, 9.5271e-01, - 3.5404e-01, 5.3603e-01, 8.9419e-01, 6.8372e-02, - 1.6953e-01, 6.5824e-01, 7.6890e-01, 9.3812e-02, - 3.8563e-01, 4.0621e-01, 9.1471e-01, 1.9823e-01, - 4.3999e-01, 3.0395e-01, 7.1340e-01, 2.5131e-01, - 8.7557e-01, 1.5075e-01, 9.8650e-01, 5.3651e-01, - 5.0817e-01, 5.9476e-01, 1.9720e-01, 9.0115e-01, - 9.2163e-01, 9.8019e-01, 7.8150e-01, 5.5400e-01, - 1.5169e-01, 3.9261e-01, 9.8313e-01, 2.7392e-01, - 9.8061e-01, 8.9033e-01, 1.7201e-01, 1.4114e-01, - 8.8501e-01, 9.2742e-01, 1.7757e-01, 4.1985e-01, - 6.1529e-01, 7.8249e-01, 3.0982e-01, 1.9173e-01, - 4.3381e-01, 8.0061e-01, 4.6306e-01, 2.6376e-03, - 9.0015e-01, 8.6386e-01, 2.7373e-04, 8.2181e-01, - 1.7497e-01, 8.6446e-01, 3.8278e-02, 9.3738e-01, - 1.4478e-01, 3.6074e-01, 4.9870e-01, 4.1068e-01, - 2.2159e-02, 9.1232e-01, 4.8533e-01, 3.4753e-01, - 1.7253e-01, 7.5020e-01, 9.1742e-01, 5.3659e-01, - 1.8035e-01, 2.3939e-01, 4.1854e-01, 7.0508e-01, - 2.9775e-01, 6.8891e-01, 5.7259e-01, 2.1233e-01, - 2.8848e-02, 5.9922e-01, 1.3774e-01, 9.7988e-02, - 2.9594e-02, 9.9852e-01, 6.2377e-01, 1.2506e-01, - 6.2903e-01, 2.4709e-01, 7.9481e-01, 2.8351e-01, - 7.5214e-01, 8.0716e-01, 4.7242e-01, 4.4288e-02, - 1.8578e-02, 9.9935e-01, 2.7602e-01, 1.0075e-01, - 6.3752e-01, 2.7320e-01, 1.0876e-01, 8.9150e-01, - 5.5601e-01, 4.9492e-01, 5.2617e-01, 3.1376e-01, - 6.2485e-02, 6.1369e-01, 8.9174e-01, 2.5796e-01, - 3.3213e-01, 2.3179e-01, 9.8658e-01, 2.3060e-01, - 9.3135e-01, 5.2215e-02, 9.7039e-01, 5.8413e-01, - 1.2826e-01, 4.7981e-01, 9.6357e-01, 3.2682e-02, - 1.6298e-02, 1.5945e-01, 9.8437e-01, 2.0032e-01, - 9.5029e-01, 6.2119e-01, 2.5447e-01, 3.9302e-01, - 7.6479e-01, 4.3998e-01, 4.2741e-02, 8.0712e-01, - 7.5958e-01, 9.4342e-01, 6.1220e-02, 8.5277e-01, - 8.0569e-01, 8.5113e-02, 3.3815e-01, 2.2293e-01, - 7.4069e-01, 9.6487e-01, 9.2541e-01, 4.0658e-01, - 7.8100e-01, 1.8005e-01, 7.5104e-01, 2.0211e-01, - 1.3360e-01, 5.3522e-01]), size=(5000, 5000), nnz=250, + col_indices=tensor([2613, 1817, 3946, 1992, 2616, 129, 4213, 2509, 3820, + 236, 537, 4893, 2589, 3697, 1676, 2508, 145, 948, + 40, 430, 349, 4228, 1986, 2398, 3557, 3646, 2218, + 2015, 2093, 2251, 416, 4965, 3391, 4588, 4174, 376, + 2789, 1796, 1459, 2115, 2600, 1710, 4448, 3115, 1800, + 2609, 4625, 1752, 3155, 1950, 1482, 2583, 514, 4047, + 602, 975, 387, 4596, 3016, 4887, 3675, 396, 627, + 887, 2774, 3856, 584, 2762, 807, 4233, 1371, 3179, + 2518, 1027, 1954, 3801, 2093, 2335, 417, 1975, 1009, + 4249, 1993, 2957, 3300, 805, 544, 3304, 735, 2351, + 3892, 1236, 4952, 2569, 206, 644, 4055, 2873, 1176, + 1247, 2549, 3684, 3193, 1911, 1573, 2876, 4734, 3283, + 2837, 1367, 2966, 3836, 2188, 2729, 1559, 308, 1855, + 2500, 2215, 3151, 158, 1510, 2974, 1250, 877, 2654, + 1001, 3746, 4023, 2832, 1493, 1455, 3845, 2541, 2111, + 2124, 1595, 4138, 2453, 4376, 768, 2965, 703, 2859, + 3810, 3000, 3578, 4469, 3224, 965, 182, 1227, 2929, + 2828, 2819, 3254, 1226, 2050, 4082, 2408, 2251, 2227, + 3107, 402, 3970, 4442, 1377, 351, 2701, 3671, 4762, + 1553, 585, 687, 617, 3556, 925, 1339, 1049, 2821, + 4879, 4294, 3820, 2604, 2258, 3660, 4401, 457, 3480, + 4829, 184, 1866, 2858, 2516, 3422, 3944, 1861, 2036, + 3465, 2931, 2105, 3542, 1187, 2590, 3604, 2943, 772, + 4518, 1966, 3015, 2199, 1749, 4430, 897, 1556, 2068, + 4470, 3141, 3271, 3879, 1406, 3174, 792, 4062, 1315, + 4513, 3922, 3174, 1013, 217, 3220, 3761, 2628, 1240, + 4090, 2766, 823, 2481, 2006, 1187, 4212, 4379, 4867, + 2385, 2594, 2056, 4894, 2138, 2754, 769]), + values=tensor([0.4773, 0.5667, 0.2393, 0.9093, 0.0398, 0.5200, 0.4508, + 0.5855, 0.1385, 0.7100, 0.2886, 0.5201, 0.8578, 0.3873, + 0.6284, 0.6030, 0.9393, 0.1647, 0.1931, 0.7859, 0.2167, + 0.0958, 0.0294, 0.1729, 0.6091, 0.9606, 0.9690, 0.4139, + 0.9596, 0.6887, 0.1955, 0.2455, 0.9244, 0.4446, 0.0135, + 0.4731, 0.8666, 0.3970, 0.8826, 0.9278, 0.8202, 0.5499, + 0.0386, 0.9539, 0.3474, 0.9359, 0.8489, 0.6483, 0.5570, + 0.8902, 0.4442, 0.2464, 0.3322, 0.8702, 0.0325, 0.7632, + 0.8839, 0.2862, 0.2367, 0.0177, 0.4382, 0.0162, 0.5936, + 0.9867, 0.3275, 0.7496, 0.6102, 0.4504, 0.7683, 0.5604, + 0.7182, 0.0552, 0.7256, 0.9202, 0.4957, 0.4986, 0.4128, + 0.7768, 0.9637, 0.5132, 0.0330, 0.1883, 0.0179, 0.5536, + 0.4168, 0.0414, 0.3720, 0.5946, 0.3061, 0.7445, 0.8260, + 0.5733, 0.8961, 0.7693, 0.5040, 0.0390, 0.1875, 0.2155, + 0.5766, 0.2547, 0.5002, 0.2061, 0.7535, 0.4043, 0.4292, + 0.6330, 0.7764, 0.6365, 0.0274, 0.0300, 0.5879, 0.7878, + 0.9210, 0.5322, 0.2769, 0.6183, 0.8678, 0.0456, 0.3832, + 0.8826, 0.6217, 0.1552, 0.6501, 0.2458, 0.2183, 0.1728, + 0.1001, 0.6206, 0.0758, 0.0685, 0.0649, 0.2558, 0.3169, + 0.1911, 0.8620, 0.0213, 0.1460, 0.1986, 0.8879, 0.9890, + 0.1214, 0.0813, 0.6424, 0.3255, 0.8728, 0.3216, 0.9667, + 0.9794, 0.9481, 0.7212, 0.7223, 0.4492, 0.0038, 0.4353, + 0.6461, 0.8489, 0.4017, 0.6844, 0.5717, 0.5076, 0.1800, + 0.0143, 0.7874, 0.6780, 0.8072, 0.8644, 0.3539, 0.3668, + 0.1289, 0.5685, 0.9289, 0.9085, 0.0540, 0.5458, 0.5676, + 0.7523, 0.8657, 0.8307, 0.2174, 0.8795, 0.0789, 0.5120, + 0.7562, 0.4736, 0.5858, 0.2737, 0.0743, 0.0293, 0.6524, + 0.8033, 0.4071, 0.7736, 0.8376, 0.6554, 0.5091, 0.0839, + 0.4165, 0.7343, 0.5554, 0.4727, 0.6239, 0.5295, 0.0143, + 0.8893, 0.5034, 0.9847, 0.8587, 0.4137, 0.3554, 0.3394, + 0.6275, 0.1036, 0.3455, 0.1147, 0.9934, 0.9105, 0.5057, + 0.0068, 0.6274, 0.7320, 0.2306, 0.1168, 0.7498, 0.4082, + 0.3634, 0.0247, 0.1449, 0.2806, 0.3220, 0.4202, 0.2959, + 0.8904, 0.1465, 0.0324, 0.6961, 0.0028, 0.9391, 0.6624, + 0.7913, 0.7052, 0.0821, 0.0440, 0.5760, 0.8694, 0.4885, + 0.4788, 0.1794, 0.4535, 0.1456, 0.4424]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.5462, 0.5771, 0.7542, ..., 0.5076, 0.8334, 0.7435]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.0216214656829834 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '48562', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.420738697052002} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3521, 4577, 3700, 4060, 2070, 3153, 3572, 4721, 1322, + 1551, 493, 4958, 789, 1337, 3842, 3215, 3039, 2060, + 524, 2203, 219, 3669, 3661, 4875, 91, 1286, 4064, + 4928, 4194, 4278, 1828, 1977, 1930, 3593, 58, 3394, + 4375, 292, 2972, 3332, 1179, 1105, 2704, 3956, 2489, + 966, 2293, 3849, 2212, 66, 2948, 2131, 4704, 1804, + 313, 3992, 611, 4029, 1267, 2149, 941, 3245, 3798, + 1364, 2130, 2370, 311, 1607, 1231, 1522, 456, 4110, + 1024, 2271, 2672, 3016, 3385, 79, 4910, 3025, 779, + 3021, 992, 2666, 2740, 2293, 3945, 1361, 1628, 4430, + 2405, 4401, 3058, 4500, 1983, 2372, 4334, 2353, 3692, + 2782, 1652, 4493, 1732, 1444, 2388, 2875, 587, 2481, + 1741, 3110, 1365, 867, 4767, 965, 917, 1015, 274, + 1275, 1662, 1471, 3836, 4340, 3788, 1964, 3795, 4046, + 590, 2260, 334, 4924, 3787, 523, 4896, 1860, 4332, + 4705, 4818, 4188, 1574, 1976, 352, 4453, 851, 2453, + 2780, 3036, 4974, 830, 2264, 494, 3749, 749, 2747, + 3511, 3912, 4160, 4536, 3923, 1704, 3601, 2336, 2875, + 2250, 4918, 386, 1663, 4600, 1792, 2793, 4336, 4367, + 1540, 1091, 2281, 4513, 3895, 2398, 3767, 3021, 4118, + 3573, 2882, 1245, 782, 4513, 2275, 3878, 4402, 1392, + 2143, 4999, 1009, 4499, 1058, 3778, 1938, 2514, 529, + 3044, 3945, 3899, 617, 1093, 2827, 2403, 2869, 4726, + 2424, 11, 1438, 4947, 1981, 1346, 455, 404, 2283, + 1559, 3843, 2823, 2792, 1346, 4055, 136, 608, 870, + 589, 2351, 2817, 222, 3068, 1263, 3746, 2053, 348, + 3461, 573, 2582, 1833, 2362, 2158, 2391, 3010, 3705, + 3518, 2706, 1079, 4904, 4765, 1653, 1722]), + values=tensor([8.5649e-01, 7.4008e-01, 8.9356e-01, 1.6667e-01, + 4.3548e-01, 7.2692e-01, 7.1685e-01, 3.7507e-01, + 3.9033e-01, 5.8773e-01, 2.0444e-01, 2.3474e-01, + 7.9101e-01, 4.3790e-01, 8.2058e-01, 4.6105e-01, + 2.9171e-02, 3.6850e-01, 6.4378e-01, 9.8629e-01, + 1.7279e-02, 3.2479e-01, 3.3292e-01, 3.5157e-01, + 1.6861e-01, 7.9261e-01, 1.9250e-03, 4.7199e-01, + 5.8533e-01, 5.1223e-01, 7.5386e-01, 9.8537e-01, + 9.4312e-01, 5.5087e-02, 5.1480e-01, 1.4259e-01, + 5.8382e-01, 6.7530e-01, 5.8669e-02, 5.5221e-02, + 2.5814e-01, 5.0132e-01, 6.2725e-02, 6.2479e-01, + 9.1102e-01, 7.7790e-01, 5.4886e-01, 9.0865e-01, + 1.9430e-01, 4.4762e-01, 3.5263e-01, 5.8757e-01, + 6.6505e-01, 5.5276e-02, 3.3006e-01, 2.7181e-01, + 7.2701e-01, 8.2345e-01, 5.8578e-01, 9.1428e-01, + 2.4712e-01, 3.6151e-01, 1.6086e-01, 4.7388e-01, + 5.9849e-01, 3.7646e-01, 3.1637e-01, 5.1353e-01, + 3.7998e-01, 9.6791e-01, 2.8224e-01, 4.1180e-01, + 6.0743e-01, 7.4152e-01, 7.4723e-01, 7.2198e-01, + 5.9072e-02, 4.7897e-01, 4.3923e-01, 7.6361e-02, + 9.4134e-01, 5.7769e-01, 6.0419e-01, 6.4599e-01, + 1.7646e-02, 6.5630e-02, 3.2637e-01, 1.0918e-01, + 4.5670e-01, 5.8733e-01, 1.3241e-01, 6.0168e-01, + 2.7977e-01, 5.7707e-01, 3.3511e-01, 7.9853e-01, + 5.8489e-04, 8.2255e-01, 9.0539e-01, 4.6027e-02, + 3.3435e-02, 5.8266e-01, 9.0465e-01, 9.8159e-01, + 4.3210e-01, 2.1247e-01, 5.5212e-01, 6.4424e-01, + 5.4893e-01, 1.0990e-01, 2.4331e-03, 9.8139e-01, + 5.1101e-01, 7.4851e-01, 8.7510e-01, 7.1618e-01, + 2.1882e-01, 4.8602e-01, 9.5388e-01, 7.0380e-02, + 5.4068e-01, 9.7372e-01, 9.6886e-01, 6.7893e-01, + 8.9598e-01, 6.0333e-02, 6.6953e-01, 5.4894e-01, + 6.2993e-01, 5.2172e-01, 8.7821e-01, 4.2757e-01, + 6.0504e-01, 9.6947e-01, 2.6017e-01, 1.5998e-01, + 8.6316e-01, 9.6608e-01, 5.3507e-01, 3.6038e-01, + 6.2620e-01, 4.3271e-02, 7.9121e-01, 1.0766e-01, + 3.8610e-01, 9.4758e-01, 1.3797e-01, 6.9100e-01, + 5.1072e-02, 5.3062e-01, 3.4758e-01, 3.4925e-01, + 5.1688e-02, 6.0623e-01, 4.8384e-01, 2.3975e-01, + 4.6347e-01, 3.8608e-01, 6.7906e-01, 1.9134e-01, + 6.0399e-01, 9.8768e-01, 4.3690e-01, 8.6622e-01, + 3.4014e-01, 5.4044e-01, 7.1001e-01, 3.0508e-01, + 3.0257e-01, 7.6167e-01, 9.7747e-01, 4.2672e-01, + 2.9577e-02, 7.7585e-01, 1.6705e-01, 9.4547e-01, + 9.8798e-01, 2.1103e-02, 5.1205e-02, 6.9018e-01, + 4.6041e-02, 4.3012e-01, 3.1301e-01, 4.6722e-01, + 3.0969e-01, 3.7583e-01, 6.1399e-01, 8.6336e-02, + 2.0974e-02, 3.7619e-01, 5.0581e-01, 2.2995e-01, + 1.9118e-01, 1.4488e-01, 5.2678e-01, 3.0863e-01, + 6.3200e-01, 1.1445e-01, 4.9489e-01, 4.2427e-01, + 5.5931e-01, 4.9060e-01, 1.1023e-02, 5.4224e-01, + 9.0445e-01, 4.8678e-01, 4.6426e-01, 7.0510e-01, + 3.1878e-01, 1.3130e-02, 8.0874e-01, 7.7766e-01, + 7.6596e-01, 7.4843e-01, 5.2859e-01, 9.2261e-02, + 3.1449e-01, 5.5807e-01, 2.4840e-01, 6.8463e-01, + 1.9623e-01, 2.1203e-01, 3.9765e-02, 1.6807e-01, + 9.9544e-01, 6.4099e-01, 5.3386e-01, 7.2927e-01, + 2.7552e-01, 9.8303e-01, 8.4882e-01, 7.9896e-01, + 1.1533e-01, 3.7806e-02, 6.9796e-02, 1.1726e-01, + 7.0794e-01, 5.8791e-01, 5.8728e-01, 8.7830e-01, + 3.8329e-01, 4.1515e-01, 5.6035e-01, 2.5407e-01, + 6.0859e-02, 7.3250e-01, 6.1293e-01, 8.3081e-01, + 6.0668e-02, 5.6773e-01]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.6988, 0.9907, 0.6239, ..., 0.8127, 0.8805, 0.8314]) +tensor([0.6306, 0.5318, 0.8619, ..., 0.0870, 0.9085, 0.1453]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -104,80 +185,107 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.03506755828857422 seconds +Time: 1.420738697052002 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '29942', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.8915646076202393} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '358898', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.994742631912231} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([3138, 4671, 1014, 2355, 4387, 3668, 1195, 4247, 491, - 1252, 2094, 1714, 1299, 3079, 802, 3268, 2381, 2379, - 4459, 4147, 1428, 4131, 184, 2357, 1540, 3877, 1899, - 1523, 3927, 3281, 842, 2521, 2709, 1317, 126, 4208, - 1524, 1551, 2327, 636, 2832, 4733, 2849, 2855, 4835, - 772, 892, 1005, 3131, 4981, 1907, 3534, 4774, 4049, - 1270, 2687, 3294, 2436, 4012, 573, 1486, 3183, 4324, - 4017, 2252, 3315, 416, 1701, 4054, 3869, 3901, 4397, - 823, 3663, 4776, 1010, 4393, 780, 1327, 4090, 2288, - 2171, 4207, 3745, 3322, 1764, 3400, 1657, 1112, 3388, - 2470, 1470, 3180, 2688, 2946, 2902, 1005, 2105, 1503, - 2575, 2457, 2786, 3986, 3537, 3241, 1529, 183, 137, - 1518, 3278, 1251, 4619, 3661, 1392, 2046, 4296, 1832, - 956, 2052, 4152, 1659, 837, 4363, 3579, 287, 2170, - 416, 3532, 4324, 1141, 3724, 3599, 1673, 3014, 4249, - 3374, 3935, 2108, 3990, 1261, 1792, 2644, 1713, 4760, - 4851, 2651, 534, 3407, 667, 503, 11, 3298, 4145, - 866, 1624, 2824, 3873, 374, 2059, 1354, 4922, 4948, - 4789, 3518, 4827, 1892, 755, 1448, 1250, 4481, 4595, - 1189, 4206, 1769, 3228, 4907, 1812, 3137, 28, 3706, - 4874, 758, 3184, 4721, 3431, 1443, 4896, 2475, 3238, - 2981, 3892, 1841, 3186, 3895, 4298, 4039, 2742, 396, - 3291, 3897, 2330, 4741, 2415, 80, 1815, 1491, 991, - 2013, 2616, 1000, 1578, 818, 4909, 1615, 1781, 539, - 3952, 4002, 1509, 2715, 4070, 3688, 3477, 3166, 2780, - 265, 1338, 4642, 4409, 4565, 4567, 435, 4855, 4082, - 1256, 4264, 2072, 151, 2943, 1612, 2224, 2929, 923, - 3069, 3516, 3474, 16, 4892, 1006, 3067]), - values=tensor([0.1423, 0.3764, 0.8726, 0.6883, 0.7846, 0.4459, 0.0452, - 0.1595, 0.8993, 0.6993, 0.8582, 0.8926, 0.6386, 0.2255, - 0.1989, 0.6924, 0.6856, 0.9311, 0.6401, 0.4844, 0.1827, - 0.6094, 0.0844, 0.8088, 0.7780, 0.4677, 0.5210, 0.2681, - 0.6746, 0.2234, 0.2579, 0.1990, 0.4883, 0.0424, 0.2782, - 0.3929, 0.1674, 0.3710, 0.6509, 0.3822, 0.8632, 0.4975, - 0.5252, 0.8601, 0.4531, 0.6836, 0.5476, 0.3247, 0.1133, - 0.1630, 0.8905, 0.4050, 0.0529, 0.3709, 0.6633, 0.0041, - 0.8263, 0.2824, 0.9484, 0.9316, 0.1253, 0.0387, 0.8159, - 0.2554, 0.3130, 0.0737, 0.4738, 0.5116, 0.7090, 0.2759, - 0.6768, 0.9020, 0.6712, 0.8917, 0.8115, 0.3531, 0.4688, - 0.4566, 0.9670, 0.6423, 0.7005, 0.5390, 0.9066, 0.6596, - 0.7123, 0.3209, 0.0601, 0.3019, 0.6328, 0.0158, 0.7210, - 0.6919, 0.8834, 0.4854, 0.1747, 0.7990, 0.5800, 0.5557, - 0.1228, 0.3669, 0.1142, 0.1249, 0.9221, 0.7233, 0.8693, - 0.8032, 0.3909, 0.5535, 0.3233, 0.2959, 0.5645, 0.9214, - 0.1205, 0.5140, 0.3231, 0.3354, 0.2668, 0.9663, 0.9554, - 0.5077, 0.0968, 0.7096, 0.1594, 0.4013, 0.3294, 0.5998, - 0.4436, 0.2240, 0.9058, 0.0648, 0.8462, 0.2153, 0.7426, - 0.6462, 0.4532, 0.1398, 0.7161, 0.9030, 0.7302, 0.9922, - 0.7361, 0.0549, 0.7258, 0.7856, 0.3469, 0.7982, 0.7709, - 0.2339, 0.9960, 0.4194, 0.7112, 0.5143, 0.2695, 0.8909, - 0.6861, 0.0216, 0.5087, 0.4296, 0.4732, 0.2124, 0.0993, - 0.1882, 0.5905, 0.6824, 0.3641, 0.2671, 0.8679, 0.5636, - 0.0946, 0.2765, 0.6901, 0.1089, 0.9019, 0.8860, 0.2216, - 0.8984, 0.5901, 0.3288, 0.4042, 0.3888, 0.6821, 0.5168, - 0.1585, 0.6704, 0.7681, 0.8172, 0.4528, 0.4017, 0.4631, - 0.8088, 0.1020, 0.1485, 0.7270, 0.4608, 0.5168, 0.6847, - 0.9585, 0.6296, 0.5947, 0.3092, 0.4016, 0.0159, 0.5160, - 0.0621, 0.9856, 0.0778, 0.2539, 0.3235, 0.9242, 0.1079, - 0.9852, 0.7752, 0.1954, 0.3552, 0.8036, 0.4824, 0.2198, - 0.6211, 0.1556, 0.7647, 0.6061, 0.7231, 0.7227, 0.4738, - 0.4499, 0.9377, 0.6610, 0.2220, 0.5305, 0.8038, 0.7592, - 0.9215, 0.9933, 0.6030, 0.5785, 0.4115, 0.6221, 0.6776, - 0.4489, 0.6315, 0.2327, 0.4513, 0.7262, 0.7754, 0.6206, - 0.4823, 0.8933, 0.7206, 0.5757, 0.6875]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4051, 0.4452, 0.8286, ..., 0.6416, 0.7748, 0.9825]) + col_indices=tensor([4562, 2658, 106, 2707, 1737, 31, 1236, 2601, 4534, + 3254, 3271, 4540, 3081, 2346, 2928, 2662, 4713, 4877, + 807, 2740, 4962, 2976, 66, 2583, 3811, 4806, 3845, + 2261, 3732, 3017, 4707, 140, 2265, 2383, 1022, 324, + 586, 4190, 1598, 4806, 880, 2096, 2650, 1734, 3886, + 4475, 1386, 572, 1771, 361, 474, 400, 251, 2254, + 164, 4775, 2715, 667, 530, 3685, 1812, 4570, 2585, + 1061, 132, 3970, 1892, 4376, 4938, 1177, 1540, 1094, + 2598, 1929, 2455, 3409, 2754, 3004, 2819, 4249, 1236, + 2450, 4024, 2612, 44, 2802, 1797, 3005, 2436, 4704, + 4446, 4605, 565, 151, 2903, 2651, 2863, 2056, 4578, + 4132, 3598, 1672, 2855, 1374, 2534, 3111, 2262, 3781, + 411, 4876, 1654, 2116, 1289, 1601, 2348, 3492, 1891, + 843, 2287, 4012, 1387, 4470, 2004, 1891, 2033, 1853, + 2464, 2578, 1338, 4816, 3074, 4285, 3913, 3541, 551, + 2748, 1498, 995, 2268, 4331, 446, 1905, 250, 1077, + 2526, 3827, 4511, 1434, 1869, 4116, 2998, 2893, 3357, + 3594, 4700, 621, 3200, 3851, 2027, 2593, 3334, 4466, + 2920, 1564, 1636, 2281, 754, 1381, 4482, 375, 1762, + 3406, 106, 4651, 2115, 2062, 2646, 1312, 2177, 314, + 2509, 1679, 2541, 3269, 2951, 2242, 3586, 2240, 610, + 1147, 4273, 4823, 556, 3140, 670, 1247, 2220, 3574, + 3114, 502, 1907, 3708, 59, 1444, 525, 3369, 4055, + 3616, 3354, 4261, 4934, 749, 2897, 283, 728, 4351, + 130, 1856, 4457, 4040, 1276, 3419, 1572, 40, 3855, + 3150, 1314, 2103, 3228, 3721, 36, 226, 4178, 3192, + 796, 648, 313, 3279, 4408, 2934, 509, 744, 2625, + 966, 4967, 1858, 4229, 1329, 2910, 1456]), + values=tensor([8.0445e-01, 1.6460e-01, 1.5819e-01, 2.5322e-01, + 1.2726e-02, 4.4524e-01, 4.2645e-01, 7.0378e-01, + 4.7825e-01, 6.5571e-01, 5.3803e-01, 7.0301e-01, + 6.0138e-01, 2.0470e-01, 6.5732e-01, 3.8060e-01, + 8.5678e-01, 6.0247e-01, 9.3278e-01, 4.1187e-01, + 1.3415e-01, 5.1322e-01, 8.4647e-01, 8.4494e-01, + 8.3918e-01, 9.7017e-01, 6.2745e-01, 6.2131e-02, + 4.9514e-01, 5.6931e-01, 4.2294e-01, 6.2142e-01, + 9.2804e-01, 1.4183e-01, 3.7922e-01, 3.6274e-01, + 9.7354e-01, 6.3392e-01, 2.3882e-01, 8.3557e-01, + 8.4776e-01, 5.9301e-01, 4.3111e-01, 7.4024e-01, + 6.2693e-01, 5.5763e-01, 7.0525e-01, 7.4129e-01, + 7.6963e-01, 6.8183e-01, 1.1384e-01, 1.5937e-01, + 1.3322e-01, 3.9472e-01, 5.3676e-01, 7.4470e-01, + 1.9547e-01, 1.8929e-01, 2.3424e-01, 4.7418e-02, + 9.1805e-01, 4.1401e-01, 6.5459e-01, 8.9676e-01, + 2.8504e-01, 8.2652e-01, 4.3184e-01, 6.9357e-01, + 9.3383e-02, 7.0300e-01, 4.1300e-01, 7.4788e-01, + 4.7790e-01, 4.5509e-01, 7.6673e-01, 7.4493e-02, + 6.4246e-01, 5.5179e-03, 3.0238e-01, 3.8676e-01, + 6.6123e-01, 7.0235e-02, 6.9899e-02, 9.0151e-02, + 8.2116e-01, 5.5872e-02, 4.5865e-01, 7.4454e-01, + 7.3370e-01, 1.5898e-01, 7.6046e-01, 9.7744e-01, + 5.4908e-01, 7.3208e-01, 9.4656e-04, 4.3773e-01, + 4.0173e-01, 8.6463e-02, 8.8687e-01, 6.2508e-01, + 8.8316e-01, 9.5433e-01, 2.3738e-01, 1.0758e-01, + 1.5327e-02, 9.2363e-01, 1.0454e-01, 7.1558e-01, + 3.6301e-02, 4.9718e-02, 3.0986e-01, 2.4098e-01, + 1.6240e-01, 7.0127e-01, 6.4991e-01, 1.2629e-01, + 8.5916e-01, 2.7549e-02, 8.3010e-01, 8.2890e-01, + 6.7815e-01, 3.1205e-01, 9.6655e-01, 3.3408e-01, + 1.2578e-02, 4.9775e-01, 2.1561e-01, 6.5682e-01, + 5.7125e-02, 1.8404e-01, 5.7052e-01, 8.8783e-01, + 3.9235e-01, 6.8313e-01, 4.1377e-01, 1.7511e-01, + 7.7060e-01, 7.0855e-01, 1.0184e-01, 9.8457e-01, + 2.0869e-01, 6.3023e-01, 3.7898e-01, 9.3396e-01, + 8.1871e-01, 8.3452e-01, 3.0948e-03, 6.4409e-01, + 4.7573e-02, 7.7738e-01, 9.1017e-01, 9.2124e-01, + 4.5314e-01, 8.5341e-01, 1.2478e-01, 5.0038e-01, + 3.6159e-02, 1.3102e-02, 1.0628e-01, 4.3346e-01, + 6.9472e-01, 1.3019e-01, 9.8634e-01, 5.1172e-01, + 4.1841e-01, 3.0226e-01, 1.6686e-01, 3.7051e-01, + 9.7840e-03, 1.0244e-01, 7.6910e-02, 8.8517e-01, + 9.8793e-01, 2.0405e-01, 3.4306e-01, 4.3266e-01, + 7.6748e-01, 2.4746e-01, 8.3364e-01, 7.1551e-02, + 2.4507e-01, 8.7205e-01, 9.6253e-01, 5.5396e-02, + 8.2181e-01, 6.1903e-01, 2.9350e-01, 7.1402e-01, + 9.9218e-01, 8.7025e-01, 3.8981e-01, 7.1050e-02, + 5.7844e-02, 1.2463e-03, 5.3615e-01, 8.5665e-01, + 6.4317e-01, 5.8345e-01, 2.4747e-01, 9.5192e-01, + 8.2362e-03, 2.1185e-01, 8.4901e-01, 2.1411e-01, + 4.5617e-01, 9.7912e-01, 7.7893e-01, 2.6371e-01, + 4.3274e-01, 2.2486e-01, 6.5112e-01, 8.4142e-01, + 6.2713e-04, 2.6749e-01, 8.4131e-01, 1.7395e-01, + 1.9240e-01, 7.8649e-01, 4.2937e-01, 1.3871e-01, + 6.2229e-01, 4.8731e-01, 2.5164e-01, 7.5094e-01, + 9.0593e-01, 4.6680e-02, 3.7037e-01, 4.7620e-01, + 9.7336e-01, 9.0943e-01, 5.9848e-01, 9.7297e-01, + 4.5819e-01, 8.0542e-01, 9.9578e-01, 1.3114e-01, + 4.1456e-01, 1.6044e-01, 7.0167e-01, 7.8015e-01, + 5.4577e-02, 8.3172e-01, 6.3844e-01, 2.9679e-01, + 3.5497e-01, 9.5436e-01, 3.4236e-01, 7.1246e-01, + 9.0983e-01, 8.5134e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.6142, 0.8273, 0.9812, ..., 0.2618, 0.5624, 0.1273]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -185,80 +293,104 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.8915646076202393 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '352628', '-ss', '5000', '-sd', '1e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.273355960845947} +Time: 10.994742631912231 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1604, 4, 806, 370, 4659, 2266, 385, 4480, 1740, - 2477, 1011, 4368, 1436, 1511, 582, 2881, 3146, 679, - 1335, 340, 2368, 3531, 2793, 4894, 1704, 800, 4449, - 2819, 3830, 944, 715, 291, 1651, 2756, 3425, 4366, - 1387, 4461, 4123, 3059, 45, 97, 307, 3123, 2010, - 4861, 3262, 819, 2940, 4148, 3668, 1416, 2946, 250, - 2020, 1865, 1972, 4176, 4993, 3807, 275, 1852, 2676, - 3641, 2214, 2133, 3702, 1587, 40, 2796, 4522, 2611, - 2391, 408, 3928, 2051, 4768, 4967, 847, 3011, 916, - 2658, 2737, 3985, 653, 1850, 4982, 4426, 3558, 4287, - 1078, 1321, 2196, 3153, 3474, 1886, 4386, 4813, 4479, - 1247, 1097, 4368, 3496, 4390, 2441, 28, 3845, 3018, - 574, 3154, 4908, 4477, 1259, 4186, 1078, 3130, 3163, - 3314, 3378, 1369, 3074, 965, 3176, 3034, 4337, 2593, - 2468, 3576, 1728, 3670, 4022, 4287, 319, 2341, 1420, - 3431, 3473, 1919, 368, 2943, 1836, 2897, 4091, 4055, - 2042, 694, 518, 3464, 437, 2319, 2327, 4527, 3332, - 286, 2756, 1769, 821, 2234, 2362, 3901, 2835, 3532, - 56, 1262, 1926, 2816, 573, 4537, 612, 849, 3556, - 1060, 4100, 3259, 4604, 1644, 551, 216, 1429, 4706, - 4000, 2046, 67, 4772, 4808, 2103, 2457, 770, 19, - 3752, 4627, 3183, 2351, 2290, 676, 4693, 3832, 2391, - 2085, 4458, 4110, 3726, 941, 4345, 4377, 4491, 3791, - 4120, 2339, 4337, 3728, 1293, 1315, 3558, 3212, 1812, - 4592, 3120, 1244, 3477, 1623, 73, 4441, 1447, 3927, - 4954, 4072, 1985, 625, 3210, 3147, 4908, 2800, 1924, - 230, 2107, 2981, 3816, 1923, 3645, 3133, 4236, 4114, - 2851, 2177, 1793, 4717, 666, 1768, 4852]), - values=tensor([0.8340, 0.7919, 0.0199, 0.7955, 0.8390, 0.2112, 0.2062, - 0.2416, 0.0078, 0.1110, 0.8766, 0.2461, 0.5766, 0.2522, - 0.8938, 0.7197, 0.9623, 0.5758, 0.3379, 0.5611, 0.1986, - 0.0227, 0.4551, 0.8241, 0.0932, 0.4545, 0.5055, 0.7378, - 0.9811, 0.7838, 0.9261, 0.3312, 0.1662, 0.7114, 0.8864, - 0.9809, 0.1390, 0.6532, 0.2965, 0.0298, 0.8840, 0.9398, - 0.6219, 0.4181, 0.3747, 0.5146, 0.8402, 0.0806, 0.9003, - 0.4097, 0.4861, 0.8634, 0.8848, 0.4692, 0.4523, 0.5039, - 0.7094, 0.3166, 0.2806, 0.4769, 0.9739, 0.8634, 0.3699, - 0.8453, 0.0189, 0.8787, 0.8196, 0.8724, 0.2325, 0.0224, - 0.5326, 0.1429, 0.6605, 0.4303, 0.9331, 0.8262, 0.4714, - 0.3810, 0.9149, 0.4305, 0.2891, 0.7127, 0.3828, 0.4241, - 0.9483, 0.5644, 0.3167, 0.4464, 0.4110, 0.1906, 0.8227, - 0.3284, 0.6812, 0.4592, 0.8170, 0.4218, 0.2545, 0.2861, - 0.1807, 0.3784, 0.2316, 0.6484, 0.4370, 0.4606, 0.6060, - 0.1427, 0.6182, 0.8321, 0.4963, 0.9467, 0.6222, 0.0087, - 0.8644, 0.1970, 0.6141, 0.5044, 0.8825, 0.7629, 0.0116, - 0.7947, 0.1399, 0.5336, 0.5972, 0.1395, 0.9791, 0.9029, - 0.5148, 0.1269, 0.3422, 0.7435, 0.2942, 0.7550, 0.2954, - 0.5429, 0.3946, 0.1495, 0.9295, 0.4788, 0.3075, 0.4290, - 0.1023, 0.3547, 0.2906, 0.5885, 0.8529, 0.0126, 0.2314, - 0.8888, 0.5984, 0.0063, 0.0122, 0.5164, 0.6866, 0.4135, - 0.9434, 0.8529, 0.4727, 0.6175, 0.7220, 0.1600, 0.7729, - 0.7553, 0.8476, 0.2583, 0.1648, 0.3383, 0.1827, 0.5841, - 0.8183, 0.2678, 0.2397, 0.1691, 0.8089, 0.7103, 0.0096, - 0.5130, 0.0577, 0.3835, 0.4322, 0.8199, 0.2829, 0.8244, - 0.4148, 0.6484, 0.7719, 0.3598, 0.6003, 0.6391, 0.8970, - 0.2186, 0.5556, 0.9770, 0.6002, 0.0280, 0.6160, 0.1589, - 0.7241, 0.2905, 0.4033, 0.4301, 0.8521, 0.8618, 0.5604, - 0.1077, 0.2810, 0.1105, 0.5637, 0.8228, 0.0305, 0.7660, - 0.3373, 0.7652, 0.7287, 0.6077, 0.2858, 0.4001, 0.8614, - 0.8105, 0.5021, 0.3182, 0.2015, 0.3600, 0.7160, 0.9874, - 0.0572, 0.8754, 0.4725, 0.5233, 0.0364, 0.1500, 0.2431, - 0.3915, 0.8270, 0.2064, 0.5104, 0.9129, 0.4413, 0.8801, - 0.9179, 0.9739, 0.2250, 0.5404, 0.9261, 0.5735, 0.2090, - 0.7470, 0.4131, 0.1494, 0.0532, 0.6628]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.3633, 0.6430, 0.8109, ..., 0.6589, 0.7112, 0.9999]) + col_indices=tensor([4562, 2658, 106, 2707, 1737, 31, 1236, 2601, 4534, + 3254, 3271, 4540, 3081, 2346, 2928, 2662, 4713, 4877, + 807, 2740, 4962, 2976, 66, 2583, 3811, 4806, 3845, + 2261, 3732, 3017, 4707, 140, 2265, 2383, 1022, 324, + 586, 4190, 1598, 4806, 880, 2096, 2650, 1734, 3886, + 4475, 1386, 572, 1771, 361, 474, 400, 251, 2254, + 164, 4775, 2715, 667, 530, 3685, 1812, 4570, 2585, + 1061, 132, 3970, 1892, 4376, 4938, 1177, 1540, 1094, + 2598, 1929, 2455, 3409, 2754, 3004, 2819, 4249, 1236, + 2450, 4024, 2612, 44, 2802, 1797, 3005, 2436, 4704, + 4446, 4605, 565, 151, 2903, 2651, 2863, 2056, 4578, + 4132, 3598, 1672, 2855, 1374, 2534, 3111, 2262, 3781, + 411, 4876, 1654, 2116, 1289, 1601, 2348, 3492, 1891, + 843, 2287, 4012, 1387, 4470, 2004, 1891, 2033, 1853, + 2464, 2578, 1338, 4816, 3074, 4285, 3913, 3541, 551, + 2748, 1498, 995, 2268, 4331, 446, 1905, 250, 1077, + 2526, 3827, 4511, 1434, 1869, 4116, 2998, 2893, 3357, + 3594, 4700, 621, 3200, 3851, 2027, 2593, 3334, 4466, + 2920, 1564, 1636, 2281, 754, 1381, 4482, 375, 1762, + 3406, 106, 4651, 2115, 2062, 2646, 1312, 2177, 314, + 2509, 1679, 2541, 3269, 2951, 2242, 3586, 2240, 610, + 1147, 4273, 4823, 556, 3140, 670, 1247, 2220, 3574, + 3114, 502, 1907, 3708, 59, 1444, 525, 3369, 4055, + 3616, 3354, 4261, 4934, 749, 2897, 283, 728, 4351, + 130, 1856, 4457, 4040, 1276, 3419, 1572, 40, 3855, + 3150, 1314, 2103, 3228, 3721, 36, 226, 4178, 3192, + 796, 648, 313, 3279, 4408, 2934, 509, 744, 2625, + 966, 4967, 1858, 4229, 1329, 2910, 1456]), + values=tensor([8.0445e-01, 1.6460e-01, 1.5819e-01, 2.5322e-01, + 1.2726e-02, 4.4524e-01, 4.2645e-01, 7.0378e-01, + 4.7825e-01, 6.5571e-01, 5.3803e-01, 7.0301e-01, + 6.0138e-01, 2.0470e-01, 6.5732e-01, 3.8060e-01, + 8.5678e-01, 6.0247e-01, 9.3278e-01, 4.1187e-01, + 1.3415e-01, 5.1322e-01, 8.4647e-01, 8.4494e-01, + 8.3918e-01, 9.7017e-01, 6.2745e-01, 6.2131e-02, + 4.9514e-01, 5.6931e-01, 4.2294e-01, 6.2142e-01, + 9.2804e-01, 1.4183e-01, 3.7922e-01, 3.6274e-01, + 9.7354e-01, 6.3392e-01, 2.3882e-01, 8.3557e-01, + 8.4776e-01, 5.9301e-01, 4.3111e-01, 7.4024e-01, + 6.2693e-01, 5.5763e-01, 7.0525e-01, 7.4129e-01, + 7.6963e-01, 6.8183e-01, 1.1384e-01, 1.5937e-01, + 1.3322e-01, 3.9472e-01, 5.3676e-01, 7.4470e-01, + 1.9547e-01, 1.8929e-01, 2.3424e-01, 4.7418e-02, + 9.1805e-01, 4.1401e-01, 6.5459e-01, 8.9676e-01, + 2.8504e-01, 8.2652e-01, 4.3184e-01, 6.9357e-01, + 9.3383e-02, 7.0300e-01, 4.1300e-01, 7.4788e-01, + 4.7790e-01, 4.5509e-01, 7.6673e-01, 7.4493e-02, + 6.4246e-01, 5.5179e-03, 3.0238e-01, 3.8676e-01, + 6.6123e-01, 7.0235e-02, 6.9899e-02, 9.0151e-02, + 8.2116e-01, 5.5872e-02, 4.5865e-01, 7.4454e-01, + 7.3370e-01, 1.5898e-01, 7.6046e-01, 9.7744e-01, + 5.4908e-01, 7.3208e-01, 9.4656e-04, 4.3773e-01, + 4.0173e-01, 8.6463e-02, 8.8687e-01, 6.2508e-01, + 8.8316e-01, 9.5433e-01, 2.3738e-01, 1.0758e-01, + 1.5327e-02, 9.2363e-01, 1.0454e-01, 7.1558e-01, + 3.6301e-02, 4.9718e-02, 3.0986e-01, 2.4098e-01, + 1.6240e-01, 7.0127e-01, 6.4991e-01, 1.2629e-01, + 8.5916e-01, 2.7549e-02, 8.3010e-01, 8.2890e-01, + 6.7815e-01, 3.1205e-01, 9.6655e-01, 3.3408e-01, + 1.2578e-02, 4.9775e-01, 2.1561e-01, 6.5682e-01, + 5.7125e-02, 1.8404e-01, 5.7052e-01, 8.8783e-01, + 3.9235e-01, 6.8313e-01, 4.1377e-01, 1.7511e-01, + 7.7060e-01, 7.0855e-01, 1.0184e-01, 9.8457e-01, + 2.0869e-01, 6.3023e-01, 3.7898e-01, 9.3396e-01, + 8.1871e-01, 8.3452e-01, 3.0948e-03, 6.4409e-01, + 4.7573e-02, 7.7738e-01, 9.1017e-01, 9.2124e-01, + 4.5314e-01, 8.5341e-01, 1.2478e-01, 5.0038e-01, + 3.6159e-02, 1.3102e-02, 1.0628e-01, 4.3346e-01, + 6.9472e-01, 1.3019e-01, 9.8634e-01, 5.1172e-01, + 4.1841e-01, 3.0226e-01, 1.6686e-01, 3.7051e-01, + 9.7840e-03, 1.0244e-01, 7.6910e-02, 8.8517e-01, + 9.8793e-01, 2.0405e-01, 3.4306e-01, 4.3266e-01, + 7.6748e-01, 2.4746e-01, 8.3364e-01, 7.1551e-02, + 2.4507e-01, 8.7205e-01, 9.6253e-01, 5.5396e-02, + 8.2181e-01, 6.1903e-01, 2.9350e-01, 7.1402e-01, + 9.9218e-01, 8.7025e-01, 3.8981e-01, 7.1050e-02, + 5.7844e-02, 1.2463e-03, 5.3615e-01, 8.5665e-01, + 6.4317e-01, 5.8345e-01, 2.4747e-01, 9.5192e-01, + 8.2362e-03, 2.1185e-01, 8.4901e-01, 2.1411e-01, + 4.5617e-01, 9.7912e-01, 7.7893e-01, 2.6371e-01, + 4.3274e-01, 2.2486e-01, 6.5112e-01, 8.4142e-01, + 6.2713e-04, 2.6749e-01, 8.4131e-01, 1.7395e-01, + 1.9240e-01, 7.8649e-01, 4.2937e-01, 1.3871e-01, + 6.2229e-01, 4.8731e-01, 2.5164e-01, 7.5094e-01, + 9.0593e-01, 4.6680e-02, 3.7037e-01, 4.7620e-01, + 9.7336e-01, 9.0943e-01, 5.9848e-01, 9.7297e-01, + 4.5819e-01, 8.0542e-01, 9.9578e-01, 1.3114e-01, + 4.1456e-01, 1.6044e-01, 7.0167e-01, 7.8015e-01, + 5.4577e-02, 8.3172e-01, 6.3844e-01, 2.9679e-01, + 3.5497e-01, 9.5436e-01, 3.4236e-01, 7.1246e-01, + 9.0983e-01, 8.5134e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.6142, 0.8273, 0.9812, ..., 0.2618, 0.5624, 0.1273]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -266,91 +398,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.273355960845947 seconds +Time: 10.994742631912231 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1604, 4, 806, 370, 4659, 2266, 385, 4480, 1740, - 2477, 1011, 4368, 1436, 1511, 582, 2881, 3146, 679, - 1335, 340, 2368, 3531, 2793, 4894, 1704, 800, 4449, - 2819, 3830, 944, 715, 291, 1651, 2756, 3425, 4366, - 1387, 4461, 4123, 3059, 45, 97, 307, 3123, 2010, - 4861, 3262, 819, 2940, 4148, 3668, 1416, 2946, 250, - 2020, 1865, 1972, 4176, 4993, 3807, 275, 1852, 2676, - 3641, 2214, 2133, 3702, 1587, 40, 2796, 4522, 2611, - 2391, 408, 3928, 2051, 4768, 4967, 847, 3011, 916, - 2658, 2737, 3985, 653, 1850, 4982, 4426, 3558, 4287, - 1078, 1321, 2196, 3153, 3474, 1886, 4386, 4813, 4479, - 1247, 1097, 4368, 3496, 4390, 2441, 28, 3845, 3018, - 574, 3154, 4908, 4477, 1259, 4186, 1078, 3130, 3163, - 3314, 3378, 1369, 3074, 965, 3176, 3034, 4337, 2593, - 2468, 3576, 1728, 3670, 4022, 4287, 319, 2341, 1420, - 3431, 3473, 1919, 368, 2943, 1836, 2897, 4091, 4055, - 2042, 694, 518, 3464, 437, 2319, 2327, 4527, 3332, - 286, 2756, 1769, 821, 2234, 2362, 3901, 2835, 3532, - 56, 1262, 1926, 2816, 573, 4537, 612, 849, 3556, - 1060, 4100, 3259, 4604, 1644, 551, 216, 1429, 4706, - 4000, 2046, 67, 4772, 4808, 2103, 2457, 770, 19, - 3752, 4627, 3183, 2351, 2290, 676, 4693, 3832, 2391, - 2085, 4458, 4110, 3726, 941, 4345, 4377, 4491, 3791, - 4120, 2339, 4337, 3728, 1293, 1315, 3558, 3212, 1812, - 4592, 3120, 1244, 3477, 1623, 73, 4441, 1447, 3927, - 4954, 4072, 1985, 625, 3210, 3147, 4908, 2800, 1924, - 230, 2107, 2981, 3816, 1923, 3645, 3133, 4236, 4114, - 2851, 2177, 1793, 4717, 666, 1768, 4852]), - values=tensor([0.8340, 0.7919, 0.0199, 0.7955, 0.8390, 0.2112, 0.2062, - 0.2416, 0.0078, 0.1110, 0.8766, 0.2461, 0.5766, 0.2522, - 0.8938, 0.7197, 0.9623, 0.5758, 0.3379, 0.5611, 0.1986, - 0.0227, 0.4551, 0.8241, 0.0932, 0.4545, 0.5055, 0.7378, - 0.9811, 0.7838, 0.9261, 0.3312, 0.1662, 0.7114, 0.8864, - 0.9809, 0.1390, 0.6532, 0.2965, 0.0298, 0.8840, 0.9398, - 0.6219, 0.4181, 0.3747, 0.5146, 0.8402, 0.0806, 0.9003, - 0.4097, 0.4861, 0.8634, 0.8848, 0.4692, 0.4523, 0.5039, - 0.7094, 0.3166, 0.2806, 0.4769, 0.9739, 0.8634, 0.3699, - 0.8453, 0.0189, 0.8787, 0.8196, 0.8724, 0.2325, 0.0224, - 0.5326, 0.1429, 0.6605, 0.4303, 0.9331, 0.8262, 0.4714, - 0.3810, 0.9149, 0.4305, 0.2891, 0.7127, 0.3828, 0.4241, - 0.9483, 0.5644, 0.3167, 0.4464, 0.4110, 0.1906, 0.8227, - 0.3284, 0.6812, 0.4592, 0.8170, 0.4218, 0.2545, 0.2861, - 0.1807, 0.3784, 0.2316, 0.6484, 0.4370, 0.4606, 0.6060, - 0.1427, 0.6182, 0.8321, 0.4963, 0.9467, 0.6222, 0.0087, - 0.8644, 0.1970, 0.6141, 0.5044, 0.8825, 0.7629, 0.0116, - 0.7947, 0.1399, 0.5336, 0.5972, 0.1395, 0.9791, 0.9029, - 0.5148, 0.1269, 0.3422, 0.7435, 0.2942, 0.7550, 0.2954, - 0.5429, 0.3946, 0.1495, 0.9295, 0.4788, 0.3075, 0.4290, - 0.1023, 0.3547, 0.2906, 0.5885, 0.8529, 0.0126, 0.2314, - 0.8888, 0.5984, 0.0063, 0.0122, 0.5164, 0.6866, 0.4135, - 0.9434, 0.8529, 0.4727, 0.6175, 0.7220, 0.1600, 0.7729, - 0.7553, 0.8476, 0.2583, 0.1648, 0.3383, 0.1827, 0.5841, - 0.8183, 0.2678, 0.2397, 0.1691, 0.8089, 0.7103, 0.0096, - 0.5130, 0.0577, 0.3835, 0.4322, 0.8199, 0.2829, 0.8244, - 0.4148, 0.6484, 0.7719, 0.3598, 0.6003, 0.6391, 0.8970, - 0.2186, 0.5556, 0.9770, 0.6002, 0.0280, 0.6160, 0.1589, - 0.7241, 0.2905, 0.4033, 0.4301, 0.8521, 0.8618, 0.5604, - 0.1077, 0.2810, 0.1105, 0.5637, 0.8228, 0.0305, 0.7660, - 0.3373, 0.7652, 0.7287, 0.6077, 0.2858, 0.4001, 0.8614, - 0.8105, 0.5021, 0.3182, 0.2015, 0.3600, 0.7160, 0.9874, - 0.0572, 0.8754, 0.4725, 0.5233, 0.0364, 0.1500, 0.2431, - 0.3915, 0.8270, 0.2064, 0.5104, 0.9129, 0.4413, 0.8801, - 0.9179, 0.9739, 0.2250, 0.5404, 0.9261, 0.5735, 0.2090, - 0.7470, 0.4131, 0.1494, 0.0532, 0.6628]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.3633, 0.6430, 0.8109, ..., 0.6589, 0.7112, 0.9999]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 250 -Density: 1e-05 -Time: 10.273355960845947 seconds - -[18.62, 17.97, 17.77, 17.77, 18.2, 18.07, 17.98, 17.9, 18.36, 17.75] -[73.02] -13.663307428359985 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 352628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.273355960845947, 'TIME_S_1KI': 0.029133693186150694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.6947084188461, 'W': 73.02} -[18.62, 17.97, 17.77, 17.77, 18.2, 18.07, 17.98, 17.9, 18.36, 17.75, 18.68, 18.07, 18.34, 18.06, 18.18, 17.85, 18.03, 18.15, 17.94, 17.93] -325.13 -16.2565 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 352628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.273355960845947, 'TIME_S_1KI': 0.029133693186150694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.6947084188461, 'W': 73.02, 'J_1KI': 2.8293122168938547, 'W_1KI': 0.20707374343500798, 'W_D': 56.76349999999999, 'J_D': 775.5771512097119, 'W_D_1KI': 0.16097275315630066, 'J_D_1KI': 0.00045649453008921774} +[19.38, 18.54, 18.64, 18.62, 18.88, 18.7, 19.3, 18.66, 18.88, 18.55] +[81.52] +13.938100099563599 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 358898, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.994742631912231, 'TIME_S_1KI': 0.030634728061767497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.2339201164245, 'W': 81.52} +[19.38, 18.54, 18.64, 18.62, 18.88, 18.7, 19.3, 18.66, 18.88, 18.55, 18.99, 18.73, 19.19, 18.53, 18.66, 18.43, 18.64, 18.47, 18.57, 18.51] +337.155 +16.85775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 358898, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.994742631912231, 'TIME_S_1KI': 0.030634728061767497, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.2339201164245, 'W': 81.52, 'J_1KI': 3.1658964945929613, 'W_1KI': 0.22713974443992443, 'W_D': 64.66225, 'J_D': 901.2689131630063, 'W_D_1KI': 0.18016887806563425, 'J_D_1KI': 0.000502005801273995} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json index f8ae472..cfada83 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 348362, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331048488616943, "TIME_S_1KI": 0.029656071812129176, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1019.3632760500908, "W": 73.39, "J_1KI": 2.9261609361815895, "W_1KI": 0.2106716576434858, "W_D": 57.056, "J_D": 792.4893184127807, "W_D_1KI": 0.1637836503407375, "J_D_1KI": 0.0004701536055618509} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 345614, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329712390899658, "TIME_S_1KI": 0.029888003353161788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1114.3590168857575, "W": 80.92, "J_1KI": 3.224287838125069, "W_1KI": 0.23413403392223694, "W_D": 63.62, "J_D": 876.1186437749863, "W_D_1KI": 0.18407819127697372, "J_D_1KI": 0.0005326120795945005} diff --git a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output index 7fb17e1..ac5f600 100644 --- a/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_16core/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01948690414428711} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01959514617919922} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([3543, 2601, 4811, ..., 3171, 1181, 2171]), - values=tensor([0.9467, 0.6961, 0.1720, ..., 0.4974, 0.2968, 0.3956]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1249, 1250]), + col_indices=tensor([4264, 4669, 451, ..., 1795, 620, 4857]), + values=tensor([0.1704, 0.3269, 0.7430, ..., 0.5652, 0.6816, 0.7253]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.9876, 0.5815, 0.6649, ..., 0.6796, 0.2344, 0.7286]) +tensor([0.6615, 0.0381, 0.0367, ..., 0.5298, 0.9396, 0.7509]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.01948690414428711 seconds +Time: 0.01959514617919922 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53882', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6240589618682861} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53584', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.627917766571045} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([ 267, 783, 3915, ..., 3618, 4520, 1464]), - values=tensor([0.2837, 0.8920, 0.5250, ..., 0.9331, 0.1091, 0.1041]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([1581, 3868, 1736, ..., 4846, 4742, 3573]), + values=tensor([0.6556, 0.7489, 0.5131, ..., 0.6895, 0.9760, 0.2267]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.6941, 0.6065, 0.9857, ..., 0.4180, 0.3910, 0.2569]) +tensor([0.8272, 0.8622, 0.3196, ..., 0.2839, 0.4073, 0.4097]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 1.6240589618682861 seconds +Time: 1.627917766571045 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '348362', '-ss', '5000', '-sd', '5e-05', '-c', '16'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331048488616943} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '345614', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329712390899658} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([2866, 1356, 2436, ..., 421, 1796, 3666]), - values=tensor([0.0261, 0.5356, 0.3907, ..., 0.0828, 0.6288, 0.2100]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 1248, 1249, 1250]), + col_indices=tensor([ 42, 3037, 1550, ..., 1416, 889, 4366]), + values=tensor([0.7068, 0.8277, 0.9092, ..., 0.9350, 0.4485, 0.3337]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.2739, 0.9422, 0.5483, ..., 0.7719, 0.1377, 0.3851]) +tensor([0.4471, 0.7775, 0.4895, ..., 0.3897, 0.9890, 0.2278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.331048488616943 seconds +Time: 10.329712390899658 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([2866, 1356, 2436, ..., 421, 1796, 3666]), - values=tensor([0.0261, 0.5356, 0.3907, ..., 0.0828, 0.6288, 0.2100]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 1248, 1249, 1250]), + col_indices=tensor([ 42, 3037, 1550, ..., 1416, 889, 4366]), + values=tensor([0.7068, 0.8277, 0.9092, ..., 0.9350, 0.4485, 0.3337]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.2739, 0.9422, 0.5483, ..., 0.7719, 0.1377, 0.3851]) +tensor([0.4471, 0.7775, 0.4895, ..., 0.3897, 0.9890, 0.2278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.331048488616943 seconds +Time: 10.329712390899658 seconds -[18.79, 18.13, 18.25, 18.09, 18.59, 17.94, 18.05, 18.04, 18.23, 18.09] -[73.39] -13.889675378799438 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 348362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331048488616943, 'TIME_S_1KI': 0.029656071812129176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1019.3632760500908, 'W': 73.39} -[18.79, 18.13, 18.25, 18.09, 18.59, 17.94, 18.05, 18.04, 18.23, 18.09, 18.49, 18.19, 18.09, 17.88, 18.04, 18.12, 18.21, 18.0, 18.16, 17.97] -326.68 -16.334 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 348362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331048488616943, 'TIME_S_1KI': 0.029656071812129176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1019.3632760500908, 'W': 73.39, 'J_1KI': 2.9261609361815895, 'W_1KI': 0.2106716576434858, 'W_D': 57.056, 'J_D': 792.4893184127807, 'W_D_1KI': 0.1637836503407375, 'J_D_1KI': 0.0004701536055618509} +[19.68, 18.79, 19.94, 18.61, 18.88, 18.56, 18.74, 18.48, 18.79, 24.64] +[80.92] +13.771119832992554 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 345614, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329712390899658, 'TIME_S_1KI': 0.029888003353161788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1114.3590168857575, 'W': 80.92} +[19.68, 18.79, 19.94, 18.61, 18.88, 18.56, 18.74, 18.48, 18.79, 24.64, 18.84, 18.56, 18.75, 18.85, 18.69, 22.73, 19.15, 18.71, 18.95, 18.48] +346.00000000000006 +17.300000000000004 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 345614, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329712390899658, 'TIME_S_1KI': 0.029888003353161788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1114.3590168857575, 'W': 80.92, 'J_1KI': 3.224287838125069, 'W_1KI': 0.23413403392223694, 'W_D': 63.62, 'J_D': 876.1186437749863, 'W_D_1KI': 0.18407819127697372, 'J_D_1KI': 0.0005326120795945005} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..e9b3498 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1748, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.143786191940308, "TIME_S_1KI": 5.8030813455036085, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 530.3414198303223, "W": 36.295146632325824, "J_1KI": 303.3989815962942, "W_1KI": 20.763813862886625, "W_D": 17.602146632325823, "J_D": 257.20098424220083, "W_D_1KI": 10.069877936113171, "J_D_1KI": 5.760799734618519} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..2da7ef7 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6005632877349854} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 17, ..., 999970, + 999989, 1000000]), + col_indices=tensor([27708, 32922, 35240, ..., 82805, 88487, 98517]), + values=tensor([0.0088, 0.7733, 0.0012, ..., 0.6420, 0.7382, 0.2177]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1129, 0.5965, 0.7496, ..., 0.0902, 0.9107, 0.7724]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.6005632877349854 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1748 -ss 100000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.143786191940308} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 23, ..., 999980, + 999992, 1000000]), + col_indices=tensor([ 8017, 17251, 18992, ..., 72823, 91334, 91663]), + values=tensor([0.4596, 0.1797, 0.9797, ..., 0.0499, 0.7967, 0.0183]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8873, 0.5523, 0.3791, ..., 0.8812, 0.4027, 0.2259]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.143786191940308 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 23, ..., 999980, + 999992, 1000000]), + col_indices=tensor([ 8017, 17251, 18992, ..., 72823, 91334, 91663]), + values=tensor([0.4596, 0.1797, 0.9797, ..., 0.0499, 0.7967, 0.0183]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8873, 0.5523, 0.3791, ..., 0.8812, 0.4027, 0.2259]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.143786191940308 seconds + +[20.48, 20.6, 20.64, 20.68, 21.12, 21.12, 21.24, 21.12, 21.04, 21.04] +[20.76, 20.76, 21.04, 22.36, 24.68, 32.36, 38.56, 45.04, 50.12, 51.88, 51.52, 52.0, 52.0, 51.84] +14.611910104751587 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.143786191940308, 'TIME_S_1KI': 5.8030813455036085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.3414198303223, 'W': 36.295146632325824} +[20.48, 20.6, 20.64, 20.68, 21.12, 21.12, 21.24, 21.12, 21.04, 21.04, 20.36, 20.64, 20.56, 20.72, 20.68, 20.52, 20.72, 20.76, 20.52, 20.48] +373.86 +18.693 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.143786191940308, 'TIME_S_1KI': 5.8030813455036085, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 530.3414198303223, 'W': 36.295146632325824, 'J_1KI': 303.3989815962942, 'W_1KI': 20.763813862886625, 'W_D': 17.602146632325823, 'J_D': 257.20098424220083, 'W_D_1KI': 10.069877936113171, 'J_D_1KI': 5.760799734618519} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..fdbe85f --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 175, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.183927774429321, "TIME_S_1KI": 63.90815871102469, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 628.162894821167, "W": 35.42748603907242, "J_1KI": 3589.5022561209544, "W_1KI": 202.44277736612813, "W_D": 16.71548603907242, "J_D": 296.38140530395515, "W_D_1KI": 95.51706308041383, "J_D_1KI": 545.8117890309362} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..887a602 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.9738054275512695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 100, 229, ..., 9999825, + 9999913, 10000000]), + col_indices=tensor([ 2839, 3131, 5153, ..., 92533, 94576, 98932]), + values=tensor([0.4697, 0.9996, 0.7875, ..., 0.5192, 0.5202, 0.9540]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.9598, 0.0952, 0.8851, ..., 0.3844, 0.8104, 0.5939]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 5.9738054275512695 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 175 -ss 100000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.183927774429321} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 193, ..., 9999801, + 9999901, 10000000]), + col_indices=tensor([ 2869, 4015, 6080, ..., 94953, 95635, 98117]), + values=tensor([0.0857, 0.9758, 0.7363, ..., 0.8151, 0.8595, 0.7723]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7586, 0.5970, 0.0221, ..., 0.6721, 0.2659, 0.4588]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 11.183927774429321 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 193, ..., 9999801, + 9999901, 10000000]), + col_indices=tensor([ 2869, 4015, 6080, ..., 94953, 95635, 98117]), + values=tensor([0.0857, 0.9758, 0.7363, ..., 0.8151, 0.8595, 0.7723]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7586, 0.5970, 0.0221, ..., 0.6721, 0.2659, 0.4588]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 11.183927774429321 seconds + +[20.52, 20.84, 20.72, 20.68, 20.8, 20.68, 20.68, 20.72, 20.64, 20.64] +[20.64, 20.76, 20.84, 24.4, 27.0, 28.6, 31.96, 32.96, 34.0, 38.76, 44.2, 47.96, 51.6, 50.84, 50.88, 50.6, 50.76] +17.730947494506836 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.183927774429321, 'TIME_S_1KI': 63.90815871102469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 628.162894821167, 'W': 35.42748603907242} +[20.52, 20.84, 20.72, 20.68, 20.8, 20.68, 20.68, 20.72, 20.64, 20.64, 20.64, 20.64, 20.64, 20.72, 20.76, 20.76, 21.0, 20.92, 21.36, 21.56] +374.24 +18.712 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.183927774429321, 'TIME_S_1KI': 63.90815871102469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 628.162894821167, 'W': 35.42748603907242, 'J_1KI': 3589.5022561209544, 'W_1KI': 202.44277736612813, 'W_D': 16.71548603907242, 'J_D': 296.38140530395515, 'W_D_1KI': 95.51706308041383, 'J_D_1KI': 545.8117890309362} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..eff1e86 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 11597, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.436057329177856, "TIME_S_1KI": 0.8998928454926151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 483.6025880336762, "W": 33.00177532885384, "J_1KI": 41.700662932971994, "W_1KI": 2.845716592985586, "W_D": 14.225775328853839, "J_D": 208.462172027588, "W_D_1KI": 1.2266771862424626, "J_D_1KI": 0.10577538900081596} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..cdfe200 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.12187767028808594} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99999, + 100000]), + col_indices=tensor([99237, 81965, 52149, ..., 94819, 50598, 82628]), + values=tensor([0.3300, 0.8237, 0.5005, ..., 0.6469, 0.1010, 0.4687]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0038, 0.2456, 0.3182, ..., 0.7163, 0.7510, 0.9775]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.12187767028808594 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8615 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.799410104751587} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99999, + 100000]), + col_indices=tensor([88588, 42232, 90125, ..., 27244, 80106, 39636]), + values=tensor([0.8018, 0.8315, 0.5597, ..., 0.5532, 0.0030, 0.5793]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.1929, 0.1411, 0.4568, ..., 0.6294, 0.2188, 0.4350]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 7.799410104751587 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 11597 -ss 100000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.436057329177856} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99998, + 100000]), + col_indices=tensor([24172, 83350, 29274, ..., 76990, 53592, 71081]), + values=tensor([0.7302, 0.8346, 0.3553, ..., 0.4222, 0.8183, 0.0288]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8537, 0.9431, 0.0277, ..., 0.1357, 0.7019, 0.9196]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.436057329177856 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99998, + 100000]), + col_indices=tensor([24172, 83350, 29274, ..., 76990, 53592, 71081]), + values=tensor([0.7302, 0.8346, 0.3553, ..., 0.4222, 0.8183, 0.0288]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8537, 0.9431, 0.0277, ..., 0.1357, 0.7019, 0.9196]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.436057329177856 seconds + +[21.08, 20.64, 20.76, 20.76, 20.92, 21.32, 21.32, 21.36, 21.0, 20.8] +[20.64, 20.36, 20.72, 23.08, 24.72, 29.44, 35.88, 40.08, 43.56, 45.4, 45.96, 45.6, 45.36, 46.04] +14.653835535049438 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11597, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.436057329177856, 'TIME_S_1KI': 0.8998928454926151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 483.6025880336762, 'W': 33.00177532885384} +[21.08, 20.64, 20.76, 20.76, 20.92, 21.32, 21.32, 21.36, 21.0, 20.8, 20.36, 20.32, 20.32, 20.44, 20.64, 20.8, 21.0, 21.16, 21.12, 21.04] +375.52 +18.776 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 11597, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.436057329177856, 'TIME_S_1KI': 0.8998928454926151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 483.6025880336762, 'W': 33.00177532885384, 'J_1KI': 41.700662932971994, 'W_1KI': 2.845716592985586, 'W_D': 14.225775328853839, 'J_D': 208.462172027588, 'W_D_1KI': 1.2266771862424626, 'J_D_1KI': 0.10577538900081596} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..67cd5e0 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3297, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.891435861587524, "TIME_S_1KI": 3.3034382352403777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 528.672490940094, "W": 36.05478479295784, "J_1KI": 160.34955745832394, "W_1KI": 10.935633846817664, "W_D": 17.45578479295784, "J_D": 255.95474444794658, "W_D_1KI": 5.2944448871573675, "J_D_1KI": 1.605837090432929} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..3c3d8e5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4573814868927002} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 16, ..., 499990, 499995, + 500000]), + col_indices=tensor([ 5164, 6869, 8448, ..., 29154, 68140, 97893]), + values=tensor([0.8386, 0.0921, 0.7067, ..., 0.9232, 0.1449, 0.6848]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.0246, 0.8160, 0.3295, ..., 0.0588, 0.6998, 0.9868]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.4573814868927002 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2295 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.306779146194458} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 8, ..., 499989, 499995, + 500000]), + col_indices=tensor([ 2059, 19971, 54406, ..., 65065, 65922, 83323]), + values=tensor([0.5530, 0.6181, 0.7781, ..., 0.5380, 0.6243, 0.8378]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.4055, 0.5945, 0.9428, ..., 0.6446, 0.1456, 0.3700]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 7.306779146194458 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3297 -ss 100000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.891435861587524} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 11, ..., 499995, 499995, + 500000]), + col_indices=tensor([ 8913, 22689, 49331, ..., 65321, 72756, 72788]), + values=tensor([0.7511, 0.0782, 0.7533, ..., 0.6341, 0.1803, 0.2288]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.5601, 0.4293, 0.2285, ..., 0.5137, 0.5400, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.891435861587524 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 11, ..., 499995, 499995, + 500000]), + col_indices=tensor([ 8913, 22689, 49331, ..., 65321, 72756, 72788]), + values=tensor([0.7511, 0.0782, 0.7533, ..., 0.6341, 0.1803, 0.2288]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.5601, 0.4293, 0.2285, ..., 0.5137, 0.5400, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.891435861587524 seconds + +[20.84, 20.48, 20.44, 20.64, 20.64, 20.68, 20.72, 20.64, 20.64, 20.92] +[20.6, 20.48, 20.72, 25.12, 26.88, 32.4, 38.84, 42.28, 46.96, 50.12, 51.2, 51.8, 51.56, 51.6] +14.66303277015686 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.891435861587524, 'TIME_S_1KI': 3.3034382352403777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 528.672490940094, 'W': 36.05478479295784} +[20.84, 20.48, 20.44, 20.64, 20.64, 20.68, 20.72, 20.64, 20.64, 20.92, 20.84, 21.04, 20.84, 20.84, 20.76, 20.8, 20.68, 20.44, 20.28, 20.24] +371.98 +18.599 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.891435861587524, 'TIME_S_1KI': 3.3034382352403777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 528.672490940094, 'W': 36.05478479295784, 'J_1KI': 160.34955745832394, 'W_1KI': 10.935633846817664, 'W_D': 17.45578479295784, 'J_D': 255.95474444794658, 'W_D_1KI': 5.2944448871573675, 'J_D_1KI': 1.605837090432929} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..6fa3845 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 32636, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.47010850906372, "TIME_S_1KI": 0.32081469877018387, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 319.63640854835506, "W": 22.48260624860275, "J_1KI": 9.793982367580435, "W_1KI": 0.6888897612637195, "W_D": 3.984606248602752, "J_D": 56.64935891771315, "W_D_1KI": 0.12209235962136145, "J_D_1KI": 0.003741033203252894} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..21cdb17 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.050879478454589844} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), + col_indices=tensor([5382, 2827, 5658, ..., 9195, 8647, 1137]), + values=tensor([0.6423, 0.5656, 0.8194, ..., 0.3825, 0.7281, 0.0248]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.6609, 0.7541, 0.4159, ..., 0.2180, 0.3481, 0.0053]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.050879478454589844 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 20637 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.639445781707764} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 10000, 10000]), + col_indices=tensor([1538, 6690, 5733, ..., 9607, 7438, 7782]), + values=tensor([0.7222, 0.1089, 0.5631, ..., 0.3116, 0.0243, 0.6999]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.2878, 0.8940, 0.0961, ..., 0.0631, 0.2895, 0.2219]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 6.639445781707764 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32636 -ss 10000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.47010850906372} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), + col_indices=tensor([7014, 8766, 3433, ..., 9466, 1431, 7728]), + values=tensor([0.0370, 0.3747, 0.2051, ..., 0.2901, 0.3737, 0.7201]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.8451, 0.4833, 0.4298, ..., 0.9015, 0.0937, 0.6764]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.47010850906372 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), + col_indices=tensor([7014, 8766, 3433, ..., 9466, 1431, 7728]), + values=tensor([0.0370, 0.3747, 0.2051, ..., 0.2901, 0.3737, 0.7201]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.8451, 0.4833, 0.4298, ..., 0.9015, 0.0937, 0.6764]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.47010850906372 seconds + +[20.64, 20.64, 20.48, 20.48, 20.48, 20.4, 20.32, 20.16, 20.24, 20.4] +[20.72, 20.64, 21.24, 23.32, 25.32, 26.04, 26.72, 26.72, 26.48, 25.08, 23.72, 23.6, 23.56, 23.48] +14.217053174972534 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.47010850906372, 'TIME_S_1KI': 0.32081469877018387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.63640854835506, 'W': 22.48260624860275} +[20.64, 20.64, 20.48, 20.48, 20.48, 20.4, 20.32, 20.16, 20.24, 20.4, 20.64, 20.56, 20.48, 20.52, 20.68, 20.84, 20.88, 20.88, 20.84, 20.48] +369.96 +18.497999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 32636, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.47010850906372, 'TIME_S_1KI': 0.32081469877018387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.63640854835506, 'W': 22.48260624860275, 'J_1KI': 9.793982367580435, 'W_1KI': 0.6888897612637195, 'W_D': 3.984606248602752, 'J_D': 56.64935891771315, 'W_D_1KI': 0.12209235962136145, 'J_D_1KI': 0.003741033203252894} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..c0d7ab3 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 4519, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300970077514648, "TIME_S_1KI": 2.279479990598506, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.2094431686402, "W": 23.392743273719635, "J_1KI": 73.51392856132777, "W_1KI": 5.176530930232272, "W_D": 4.8837432737196345, "J_D": 69.35593720483783, "W_D_1KI": 1.0807132714582062, "J_D_1KI": 0.2391487655362262} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..f0ce63e --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2727935314178467} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99975, 99993, + 100000]), + col_indices=tensor([2872, 4034, 5620, ..., 6357, 6556, 9590]), + values=tensor([0.7995, 0.0045, 0.2448, ..., 0.5761, 0.7842, 0.1546]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8077, 0.7130, 0.7281, ..., 0.3829, 0.9486, 0.9162]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.2727935314178467 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3849 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.942286252975464} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 21, ..., 99991, 99998, + 100000]), + col_indices=tensor([ 425, 574, 695, ..., 9570, 6024, 9715]), + values=tensor([0.7410, 0.8879, 0.5840, ..., 0.6995, 0.9280, 0.9465]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2929, 0.5164, 0.5482, ..., 0.5103, 0.5008, 0.9557]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 8.942286252975464 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4519 -ss 10000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.300970077514648} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 16, ..., 99974, 99990, + 100000]), + col_indices=tensor([ 582, 1691, 2515, ..., 7345, 7996, 8295]), + values=tensor([0.8177, 0.9283, 0.6030, ..., 0.2647, 0.3717, 0.8633]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2209, 0.7260, 0.0429, ..., 0.4887, 0.7834, 0.0043]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.300970077514648 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 16, ..., 99974, 99990, + 100000]), + col_indices=tensor([ 582, 1691, 2515, ..., 7345, 7996, 8295]), + values=tensor([0.8177, 0.9283, 0.6030, ..., 0.2647, 0.3717, 0.8633]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2209, 0.7260, 0.0429, ..., 0.4887, 0.7834, 0.0043]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.300970077514648 seconds + +[20.48, 20.64, 20.44, 20.28, 20.28, 20.6, 20.72, 20.68, 20.68, 20.48] +[20.4, 20.36, 23.16, 24.08, 27.0, 27.6, 28.72, 28.72, 26.24, 26.28, 24.44, 24.28, 24.24, 24.08] +14.201388835906982 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300970077514648, 'TIME_S_1KI': 2.279479990598506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2094431686402, 'W': 23.392743273719635} +[20.48, 20.64, 20.44, 20.28, 20.28, 20.6, 20.72, 20.68, 20.68, 20.48, 20.52, 20.6, 20.68, 20.64, 20.64, 20.52, 20.52, 20.48, 20.68, 20.72] +370.18 +18.509 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 4519, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.300970077514648, 'TIME_S_1KI': 2.279479990598506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2094431686402, 'W': 23.392743273719635, 'J_1KI': 73.51392856132777, 'W_1KI': 5.176530930232272, 'W_D': 4.8837432737196345, 'J_D': 69.35593720483783, 'W_D_1KI': 1.0807132714582062, 'J_D_1KI': 0.2391487655362262} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..6d63757 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 490, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.49430251121521, "TIME_S_1KI": 21.416943900439204, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 322.6961988353729, "W": 22.678695903983957, "J_1KI": 658.5636710925977, "W_1KI": 46.283052865273376, "W_D": 4.3196959039839555, "J_D": 61.46515012335775, "W_D_1KI": 8.815705926497868, "J_D_1KI": 17.991236584689528} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..8697ed9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.140977382659912} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 103, 212, ..., 999798, + 999901, 1000000]), + col_indices=tensor([ 63, 140, 146, ..., 9691, 9771, 9918]), + values=tensor([0.8748, 0.2571, 0.8906, ..., 0.1504, 0.2890, 0.7825]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.5882, 0.3416, 0.1892, ..., 0.3016, 0.5220, 0.0626]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.140977382659912 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 490 -ss 10000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.49430251121521} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 113, 202, ..., 999820, + 999916, 1000000]), + col_indices=tensor([ 2, 35, 39, ..., 9519, 9605, 9656]), + values=tensor([0.0992, 0.7724, 0.9238, ..., 0.3639, 0.9758, 0.0697]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2199, 0.1288, 0.7757, ..., 0.1449, 0.2950, 0.2928]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.49430251121521 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 113, 202, ..., 999820, + 999916, 1000000]), + col_indices=tensor([ 2, 35, 39, ..., 9519, 9605, 9656]), + values=tensor([0.0992, 0.7724, 0.9238, ..., 0.3639, 0.9758, 0.0697]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2199, 0.1288, 0.7757, ..., 0.1449, 0.2950, 0.2928]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.49430251121521 seconds + +[20.68, 20.72, 20.56, 20.52, 20.48, 20.2, 19.92, 19.96, 19.92, 19.92] +[19.96, 20.36, 20.52, 22.04, 24.08, 26.92, 27.96, 27.96, 26.64, 25.04, 24.52, 24.4, 24.4, 24.52] +14.229045629501343 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.49430251121521, 'TIME_S_1KI': 21.416943900439204, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.6961988353729, 'W': 22.678695903983957} +[20.68, 20.72, 20.56, 20.52, 20.48, 20.2, 19.92, 19.96, 19.92, 19.92, 20.44, 20.36, 20.4, 20.52, 20.4, 20.64, 20.76, 20.52, 20.52, 20.52] +367.18 +18.359 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 490, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.49430251121521, 'TIME_S_1KI': 21.416943900439204, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.6961988353729, 'W': 22.678695903983957, 'J_1KI': 658.5636710925977, 'W_1KI': 46.283052865273376, 'W_D': 4.3196959039839555, 'J_D': 61.46515012335775, 'W_D_1KI': 8.815705926497868, 'J_D_1KI': 17.991236584689528} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..d53721b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.575754880905151, "TIME_S_1KI": 105.75754880905151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 357.48722974777223, "W": 23.357767084816274, "J_1KI": 3574.8722974777224, "W_1KI": 233.57767084816274, "W_D": 4.891767084816273, "J_D": 74.86778412389756, "W_D_1KI": 48.91767084816273, "J_D_1KI": 489.1767084816273} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..5bc40f5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.575754880905151} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 505, 1011, ..., 4998999, + 4999505, 5000000]), + col_indices=tensor([ 17, 34, 93, ..., 9927, 9945, 9977]), + values=tensor([0.3942, 0.9668, 0.2842, ..., 0.3748, 0.5474, 0.6270]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3102, 0.7326, 0.1847, ..., 0.2267, 0.2009, 0.0941]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.575754880905151 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 505, 1011, ..., 4998999, + 4999505, 5000000]), + col_indices=tensor([ 17, 34, 93, ..., 9927, 9945, 9977]), + values=tensor([0.3942, 0.9668, 0.2842, ..., 0.3748, 0.5474, 0.6270]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3102, 0.7326, 0.1847, ..., 0.2267, 0.2009, 0.0941]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.575754880905151 seconds + +[20.44, 20.44, 20.44, 20.52, 20.44, 20.4, 20.32, 20.2, 20.08, 20.32] +[20.32, 20.16, 21.08, 22.56, 24.44, 27.6, 27.6, 29.28, 28.88, 28.12, 25.16, 24.16, 24.12, 24.4, 24.56] +15.30485463142395 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.575754880905151, 'TIME_S_1KI': 105.75754880905151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.48722974777223, 'W': 23.357767084816274} +[20.44, 20.44, 20.44, 20.52, 20.44, 20.4, 20.32, 20.2, 20.08, 20.32, 20.52, 20.56, 20.76, 20.76, 20.84, 20.84, 20.72, 20.56, 20.56, 20.48] +369.32 +18.466 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.575754880905151, 'TIME_S_1KI': 105.75754880905151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.48722974777223, 'W': 23.357767084816274, 'J_1KI': 3574.8722974777224, 'W_1KI': 233.57767084816274, 'W_D': 4.891767084816273, 'J_D': 74.86778412389756, 'W_D_1KI': 48.91767084816273, 'J_D_1KI': 489.1767084816273} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..d4908f3 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.58586049079895, "TIME_S_1KI": 215.8586049079895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 695.2596657943726, "W": 24.42843676698333, "J_1KI": 6952.596657943726, "W_1KI": 244.2843676698333, "W_D": 6.1854367669833294, "J_D": 176.04420374608034, "W_D_1KI": 61.854367669833294, "J_D_1KI": 618.543676698333} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..b9e17f1 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.58586049079895} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 983, 1945, ..., 9997995, + 9998975, 10000000]), + col_indices=tensor([ 7, 29, 32, ..., 9986, 9994, 9999]), + values=tensor([0.5805, 0.0545, 0.7779, ..., 0.8799, 0.6314, 0.5149]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1246, 0.2739, 0.0084, ..., 0.7975, 0.3318, 0.0977]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.58586049079895 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 983, 1945, ..., 9997995, + 9998975, 10000000]), + col_indices=tensor([ 7, 29, 32, ..., 9986, 9994, 9999]), + values=tensor([0.5805, 0.0545, 0.7779, ..., 0.8799, 0.6314, 0.5149]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1246, 0.2739, 0.0084, ..., 0.7975, 0.3318, 0.0977]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.58586049079895 seconds + +[20.44, 20.24, 20.32, 20.24, 20.16, 20.52, 20.56, 20.68, 20.88, 21.04] +[20.8, 20.6, 23.72, 23.72, 26.04, 27.56, 30.88, 32.68, 29.8, 29.08, 27.52, 26.28, 24.44, 24.48, 24.48, 24.2, 24.2, 24.2, 24.12, 24.12, 24.28, 24.28, 24.4, 24.32, 24.16, 24.0, 24.08, 24.16] +28.461078882217407 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.58586049079895, 'TIME_S_1KI': 215.8586049079895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 695.2596657943726, 'W': 24.42843676698333} +[20.44, 20.24, 20.32, 20.24, 20.16, 20.52, 20.56, 20.68, 20.88, 21.04, 19.8, 19.8, 19.8, 19.96, 20.0, 20.08, 20.12, 20.36, 20.32, 20.36] +364.86 +18.243000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.58586049079895, 'TIME_S_1KI': 215.8586049079895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 695.2596657943726, 'W': 24.42843676698333, 'J_1KI': 6952.596657943726, 'W_1KI': 244.2843676698333, 'W_D': 6.1854367669833294, 'J_D': 176.04420374608034, 'W_D_1KI': 61.854367669833294, 'J_D_1KI': 618.543676698333} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..00303c3 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.006776571273804, "TIME_S_1KI": 420.06776571273804, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1323.3857918548583, "W": 24.65839535462019, "J_1KI": 13233.857918548583, "W_1KI": 246.58395354620188, "W_D": 6.256395354620192, "J_D": 335.77305422592167, "W_D_1KI": 62.56395354620192, "J_D_1KI": 625.6395354620192} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..7c8bead --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.006776571273804} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1950, 3929, ..., 19995954, + 19997973, 20000000]), + col_indices=tensor([ 0, 10, 17, ..., 9977, 9980, 9990]), + values=tensor([0.1470, 0.8510, 0.9446, ..., 0.3735, 0.6466, 0.3885]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.1499, 0.7404, 0.4886, ..., 0.1182, 0.4158, 0.3615]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.006776571273804 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1950, 3929, ..., 19995954, + 19997973, 20000000]), + col_indices=tensor([ 0, 10, 17, ..., 9977, 9980, 9990]), + values=tensor([0.1470, 0.8510, 0.9446, ..., 0.3735, 0.6466, 0.3885]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.1499, 0.7404, 0.4886, ..., 0.1182, 0.4158, 0.3615]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.006776571273804 seconds + +[20.44, 20.28, 20.36, 20.28, 20.32, 20.16, 20.32, 20.32, 20.52, 20.56] +[20.6, 20.56, 20.76, 24.64, 26.0, 26.92, 30.76, 29.04, 28.68, 29.52, 30.24, 30.24, 27.76, 27.48, 26.4, 24.64, 24.48, 24.32, 24.28, 24.28, 24.28, 24.08, 24.28, 24.28, 24.36, 24.2, 24.24, 24.6, 24.64, 24.52, 24.72, 24.48, 24.64, 24.4, 24.52, 24.52, 24.36, 24.4, 24.4, 24.4, 24.48, 24.68, 24.76, 24.56, 24.36, 24.16, 24.24, 24.4, 24.4, 24.76, 24.88, 24.96, 24.92] +53.668771743774414 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.006776571273804, 'TIME_S_1KI': 420.06776571273804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.3857918548583, 'W': 24.65839535462019} +[20.44, 20.28, 20.36, 20.28, 20.32, 20.16, 20.32, 20.32, 20.52, 20.56, 20.12, 19.92, 20.0, 20.28, 20.44, 20.72, 20.96, 21.04, 21.04, 21.04] +368.03999999999996 +18.401999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.006776571273804, 'TIME_S_1KI': 420.06776571273804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1323.3857918548583, 'W': 24.65839535462019, 'J_1KI': 13233.857918548583, 'W_1KI': 246.58395354620188, 'W_D': 6.256395354620192, 'J_D': 335.77305422592167, 'W_D_1KI': 62.56395354620192, 'J_D_1KI': 625.6395354620192} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..efd7bc8 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 67.20304369926453, "TIME_S_1KI": 672.0304369926453, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1846.4787199783327, "W": 24.29422238106021, "J_1KI": 18464.787199783328, "W_1KI": 242.9422238106021, "W_D": 5.818222381060206, "J_D": 442.2131174325942, "W_D_1KI": 58.18222381060206, "J_D_1KI": 581.8222381060207} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..f1c9a57 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 67.20304369926453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2926, 5920, ..., 29993999, + 29997022, 30000000]), + col_indices=tensor([ 1, 4, 6, ..., 9978, 9982, 9992]), + values=tensor([0.3929, 0.6592, 0.7367, ..., 0.3321, 0.3012, 0.1502]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.6782, 0.5388, 0.0901, ..., 0.7339, 0.4235, 0.1483]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 67.20304369926453 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2926, 5920, ..., 29993999, + 29997022, 30000000]), + col_indices=tensor([ 1, 4, 6, ..., 9978, 9982, 9992]), + values=tensor([0.3929, 0.6592, 0.7367, ..., 0.3321, 0.3012, 0.1502]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.6782, 0.5388, 0.0901, ..., 0.7339, 0.4235, 0.1483]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 67.20304369926453 seconds + +[20.6, 20.8, 20.76, 20.64, 20.32, 20.48, 20.48, 20.52, 20.8, 20.6] +[20.64, 20.6, 20.6, 21.32, 22.6, 24.24, 25.68, 28.84, 30.4, 30.64, 31.4, 31.32, 28.4, 28.16, 27.72, 26.8, 26.8, 25.96, 24.6, 24.4, 24.12, 24.12, 24.12, 24.08, 24.36, 24.56, 24.8, 24.88, 24.88, 24.92, 24.92, 24.72, 24.64, 24.44, 24.28, 24.52, 24.68, 24.64, 24.68, 24.64, 24.64, 24.52, 24.76, 24.84, 24.68, 24.72, 24.68, 24.68, 24.76, 24.68, 24.52, 24.2, 24.12, 24.12, 24.24, 24.48, 24.64, 24.76, 24.76, 24.52, 24.28, 24.32, 24.12, 24.04, 24.32, 24.32, 24.6, 24.52, 24.76, 24.72, 24.36, 24.12, 24.16, 24.36, 24.48] +76.0048496723175 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 67.20304369926453, 'TIME_S_1KI': 672.0304369926453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1846.4787199783327, 'W': 24.29422238106021} +[20.6, 20.8, 20.76, 20.64, 20.32, 20.48, 20.48, 20.52, 20.8, 20.6, 20.04, 20.16, 20.28, 20.16, 20.16, 20.64, 20.72, 20.72, 20.92, 20.68] +369.52000000000004 +18.476000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 67.20304369926453, 'TIME_S_1KI': 672.0304369926453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1846.4787199783327, 'W': 24.29422238106021, 'J_1KI': 18464.787199783328, 'W_1KI': 242.9422238106021, 'W_D': 5.818222381060206, 'J_D': 442.2131174325942, 'W_D_1KI': 58.18222381060206, 'J_D_1KI': 581.8222381060207} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..e0e6919 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 145476, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.560847282409668, "TIME_S_1KI": 0.07259511728676668, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.8679765701293, "W": 22.125839021878733, "J_1KI": 2.1643980902013342, "W_1KI": 0.15209270960075016, "W_D": 3.6298390218787304, "J_D": 51.65544533538807, "W_D_1KI": 0.024951462934633413, "J_D_1KI": 0.00017151600906426773} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..29cdb63 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1307 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015525579452514648} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([3898, 1482, 5519, 8868, 7883, 9892, 3663, 8094, 3265, + 5214, 2679, 8390, 8300, 1254, 5771, 2330, 2277, 5567, + 3590, 9876, 9425, 1312, 3675, 250, 2676, 6405, 193, + 4817, 1118, 5220, 848, 1258, 8124, 2361, 9824, 8605, + 9110, 7791, 9707, 7161, 2008, 5741, 2334, 3730, 757, + 2296, 932, 6682, 8936, 2942, 9026, 5748, 1994, 1085, + 8416, 9758, 9650, 6359, 2155, 2494, 3974, 6463, 3581, + 5365, 2085, 839, 8439, 7015, 1395, 6847, 8399, 8014, + 2130, 4017, 54, 2801, 9388, 5676, 8884, 4034, 7935, + 8947, 2234, 5545, 9207, 3191, 9648, 2875, 4561, 7931, + 3725, 5272, 5784, 4597, 9489, 9250, 7919, 1669, 9885, + 3310, 3149, 6923, 5545, 6508, 9800, 2657, 407, 655, + 406, 5436, 7564, 5016, 8520, 4483, 4450, 4851, 2131, + 3975, 4798, 7045, 7501, 7521, 8639, 1296, 7684, 6036, + 1810, 3941, 2660, 7466, 7025, 675, 3142, 6789, 8851, + 6431, 2897, 4149, 5159, 680, 7589, 1161, 4000, 1900, + 6580, 7108, 3052, 1112, 5611, 1198, 1887, 1274, 7907, + 3980, 3920, 7284, 8685, 2676, 8814, 2020, 2087, 7588, + 3473, 6720, 3346, 2385, 331, 4730, 8085, 823, 9752, + 6054, 4004, 3518, 6972, 6393, 1022, 1826, 136, 9104, + 644, 7087, 4535, 6495, 835, 3627, 1719, 5375, 3188, + 2721, 5370, 4060, 7632, 6334, 3349, 38, 8498, 3165, + 8955, 2406, 4105, 3127, 5298, 6106, 8762, 5107, 5923, + 5348, 1666, 7355, 5280, 5787, 8949, 6219, 1327, 3885, + 4849, 3598, 3254, 3228, 5330, 4493, 6213, 7289, 3249, + 8132, 1290, 6830, 5688, 301, 690, 2742, 8864, 5921, + 5003, 9181, 3079, 5148, 1551, 1694, 5480, 2170, 996, + 4985, 1075, 5138, 124, 940, 948, 790, 7486, 9030, + 1477, 9142, 6659, 1811, 9169, 4503, 6175, 4785, 257, + 6391, 3383, 3331, 6911, 1486, 8788, 912, 7174, 9477, + 2748, 6272, 6520, 2719, 8184, 7316, 355, 1823, 8556, + 1459, 3956, 2370, 4381, 4423, 3578, 3880, 2683, 6911, + 2226, 9500, 796, 7668, 4666, 352, 6328, 9286, 6457, + 6272, 7478, 7423, 9055, 4170, 4251, 4175, 8805, 3517, + 7463, 5717, 5192, 4123, 6402, 6096, 3259, 4246, 3250, + 9200, 9472, 913, 8843, 8722, 570, 5337, 2453, 5040, + 5362, 1911, 6859, 1882, 1285, 6680, 1244, 5808, 2588, + 6330, 9064, 794, 6569, 9800, 7433, 8463, 8846, 1304, + 7393, 935, 8025, 9550, 5525, 2976, 4949, 8851, 1570, + 3653, 6681, 4564, 728, 7281, 2542, 4708, 3948, 9376, + 6780, 9074, 2736, 9846, 1380, 937, 7606, 2545, 5198, + 5963, 9747, 3442, 3023, 85, 8214, 770, 1399, 3822, + 9430, 1851, 2996, 5209, 2392, 9546, 4627, 1175, 617, + 8182, 6735, 5491, 7308, 8708, 2805, 8080, 7395, 4480, + 7631, 718, 6417, 689, 5328, 6587, 329, 9209, 5321, + 1461, 6105, 9464, 1295, 7201, 6830, 6066, 6587, 4948, + 3548, 7991, 675, 5203, 1178, 3798, 316, 2004, 1726, + 4002, 1942, 3929, 5033, 5856, 6830, 2915, 4600, 8654, + 5488, 5734, 2078, 667, 6513, 5791, 3009, 2836, 6123, + 6550, 1123, 1084, 663, 7685, 49, 494, 2703, 2647, + 7554, 6681, 5448, 7756, 1560, 7102, 8537, 9152, 84, + 3412, 2379, 6668, 3950, 3737, 3834, 9623, 5269, 4317, + 8632, 3929, 7262, 8925, 8077, 1607, 7516, 928, 9628, + 3158, 8912, 7204, 843, 5299, 6571, 6817, 7581, 3627, + 5903, 1128, 6080, 6794, 135, 8922, 789, 4054, 7312, + 4581, 2854, 8527, 6140, 4291, 3168, 7090, 1112, 9120, + 9629, 458, 4871, 6869, 7933, 6321, 81, 9105, 20, + 4065, 9879, 6324, 5948, 4258, 6176, 8280, 5550, 3648, + 7828, 4940, 925, 9064, 4130, 997, 220, 3952, 3265, + 6751, 778, 1658, 2790, 9143, 4446, 5474, 5369, 9890, + 175, 5804, 2578, 9295, 68, 9554, 875, 4714, 6630, + 6903, 8256, 9485, 6873, 788, 4576, 257, 4058, 9901, + 1081, 6164, 1099, 9686, 1784, 8594, 5780, 5851, 1800, + 5807, 2784, 1548, 5394, 1010, 5864, 5995, 2197, 3622, + 6360, 3236, 1985, 7074, 4983, 423, 3106, 5792, 3257, + 3522, 3832, 8509, 3852, 7499, 7269, 9610, 1857, 3828, + 1095, 6707, 489, 8713, 7327, 839, 4913, 6149, 7538, + 8425, 3515, 9652, 3014, 3558, 6510, 3484, 3152, 9750, + 2084, 2579, 5450, 9665, 6024, 8898, 7948, 302, 5315, + 4180, 7959, 8289, 7761, 8450, 7959, 3206, 7670, 9154, + 9147, 6208, 5499, 7616, 9052, 4253, 8243, 2955, 7028, + 8394, 3989, 1599, 2737, 7385, 5626, 8082, 6230, 7546, + 57, 2410, 2942, 700, 3122, 9243, 2814, 7397, 4819, + 1084, 7234, 7860, 4622, 6516, 8828, 7448, 8210, 8805, + 9395, 6187, 2031, 3767, 7179, 2583, 6085, 53, 4677, + 4729, 9923, 5613, 1409, 9440, 8359, 7777, 2897, 714, + 7282, 512, 8652, 6139, 3021, 3847, 1702, 2455, 7034, + 1994, 4895, 247, 8803, 6199, 492, 8970, 5329, 1576, + 3945, 8058, 9716, 5849, 5153, 3106, 3211, 3905, 6116, + 1826, 7235, 9824, 1073, 4092, 8941, 2805, 3916, 9074, + 3940, 6464, 5740, 9399, 1287, 3979, 6746, 3637, 4616, + 8686, 7537, 7730, 8467, 9287, 3649, 9249, 9531, 8797, + 8683, 9126, 8105, 4413, 4839, 9084, 7493, 1505, 4616, + 1315, 9867, 8108, 9105, 3235, 2976, 2185, 8767, 9318, + 1590, 5862, 8416, 8494, 8249, 1922, 2173, 387, 689, + 5537, 319, 2548, 8781, 3876, 2904, 3074, 5424, 2666, + 1126, 9321, 8588, 3445, 8617, 334, 9161, 2384, 5147, + 6895, 9089, 6921, 7186, 4121, 7073, 3155, 3080, 9515, + 7841, 4184, 6560, 3776, 3166, 1037, 8649, 3341, 5486, + 8702, 8561, 2034, 4122, 6170, 611, 3580, 3403, 2813, + 9678, 1682, 4710, 1831, 6141, 5603, 6459, 1617, 1704, + 3743, 8018, 4183, 2372, 4280, 5580, 5554, 685, 4906, + 6177, 8204, 7285, 3595, 6551, 4218, 1732, 9617, 2156, + 3300, 434, 1438, 2176, 7789, 6779, 4580, 226, 6725, + 8186, 2641, 2667, 1600, 4234, 2261, 5072, 8276, 7022, + 8494, 775, 6, 6308, 1899, 4053, 5784, 3072, 8025, + 9379, 1116, 5148, 7628, 5060, 7829, 5341, 3570, 8529, + 1061, 3503, 8581, 9989, 8482, 7388, 2356, 4100, 322, + 4769, 9793, 1301, 2512, 7579, 4298, 1600, 8953, 8920, + 1692, 4787, 9285, 898, 2698, 1475, 3001, 3611, 3866, + 9122, 3753, 1211, 3092, 2312, 1040, 1123, 4411, 7130, + 9430, 8051, 8329, 3156, 6985, 5781, 3991, 8161, 6751, + 1449, 9718, 369, 9063, 273, 371, 8003, 9858, 1365, + 1729, 6226, 4643, 9125, 2272, 8996, 3896, 1943, 4185, + 4415, 9955, 2412, 9510, 3232, 4806, 2200, 1990, 6139, + 6256, 358, 9382, 2583, 7365, 9846, 8260, 733, 2064, + 118, 7040, 8862, 4366, 6192, 8535, 9244, 9993, 8572, + 3490, 6291, 7873, 3279, 5895, 2996, 7172, 761, 8005, + 1773, 7176, 2507, 2938, 3630, 9271, 4295, 6683, 5600, + 2223, 796, 5209, 6697, 614, 4824, 298, 6536, 5113, + 5778, 5914, 2119, 5281, 1677, 5222, 2043, 1439, 5141, + 1903]), + values=tensor([8.8707e-01, 8.2479e-01, 8.1424e-01, 6.0134e-01, + 2.1717e-01, 8.7330e-01, 7.2034e-01, 8.8373e-01, + 6.7753e-01, 7.2863e-01, 2.3933e-01, 7.4252e-01, + 7.9896e-02, 1.7017e-01, 7.4504e-01, 4.4774e-01, + 7.3251e-01, 7.9618e-01, 1.2655e-01, 1.0721e-01, + 6.0589e-01, 3.9688e-01, 1.0220e-01, 4.9557e-01, + 7.9026e-02, 8.0845e-01, 2.4031e-01, 1.3799e-02, + 3.6592e-01, 9.1889e-01, 4.4397e-01, 9.5919e-02, + 5.2901e-01, 6.3995e-01, 7.8787e-01, 7.3005e-01, + 5.9923e-01, 1.5700e-01, 6.1980e-01, 8.2992e-02, + 8.1762e-01, 6.8552e-01, 5.1150e-01, 6.6341e-01, + 9.1871e-01, 4.9767e-02, 8.7331e-01, 3.8092e-01, + 3.0824e-01, 3.9823e-01, 4.3039e-01, 9.3772e-01, + 1.4926e-01, 7.9505e-01, 8.0083e-01, 6.1216e-01, + 2.0931e-01, 6.1824e-01, 6.4771e-01, 5.6077e-01, + 7.8449e-01, 7.6306e-01, 7.5572e-01, 9.7267e-01, + 3.5240e-01, 5.5727e-01, 1.8393e-02, 9.6845e-01, + 6.0478e-02, 8.9628e-01, 4.4149e-01, 3.1202e-02, + 1.7476e-01, 9.2967e-01, 8.8825e-01, 9.2412e-01, + 9.8151e-01, 9.7789e-01, 3.3578e-01, 3.7791e-01, + 5.2634e-01, 5.3450e-01, 4.7320e-01, 3.5529e-02, + 8.2385e-01, 6.8358e-01, 9.8549e-01, 5.6940e-01, + 7.9720e-01, 2.6003e-01, 6.6890e-01, 1.7961e-01, + 6.2016e-01, 2.3928e-01, 1.8421e-02, 3.3260e-01, + 1.0002e-01, 8.5414e-02, 3.1758e-01, 3.3376e-01, + 6.7478e-01, 9.7524e-02, 4.6108e-01, 1.0332e-02, + 4.0865e-01, 3.7106e-02, 6.9802e-01, 8.7269e-01, + 8.2593e-01, 9.5227e-01, 3.3687e-01, 9.4058e-01, + 7.4500e-01, 3.6415e-01, 4.5557e-01, 9.7232e-01, + 5.1097e-01, 5.5568e-01, 2.7656e-01, 3.3562e-01, + 7.3346e-01, 7.7270e-01, 9.0672e-01, 2.6724e-01, + 2.5245e-01, 4.3405e-01, 6.8147e-01, 5.7156e-01, + 8.0443e-01, 2.9648e-01, 5.6969e-01, 8.6520e-01, + 4.0162e-01, 7.0064e-01, 9.6519e-01, 7.6895e-01, + 3.9290e-01, 8.0828e-01, 1.5562e-02, 5.5538e-01, + 9.3270e-01, 4.4461e-01, 7.5449e-01, 9.9752e-01, + 3.2640e-01, 5.5045e-01, 9.9590e-01, 5.7782e-01, + 3.0729e-01, 9.4262e-02, 9.6363e-01, 7.8404e-01, + 4.7225e-01, 2.8178e-01, 4.0082e-01, 4.3801e-02, + 9.9491e-01, 7.4034e-01, 6.3304e-01, 7.9732e-01, + 3.8854e-02, 3.3586e-01, 1.2839e-01, 6.2910e-01, + 1.3753e-01, 5.4593e-01, 1.8915e-01, 8.0045e-01, + 1.5614e-02, 5.9397e-01, 6.1183e-01, 5.9631e-02, + 2.4885e-01, 1.7688e-01, 3.3617e-01, 8.6754e-01, + 6.9855e-01, 9.5329e-01, 3.7958e-01, 4.4295e-01, + 3.9831e-01, 5.8230e-02, 5.4947e-01, 1.1816e-01, + 2.4831e-01, 2.1298e-01, 9.6579e-01, 8.9969e-01, + 3.8227e-01, 9.3422e-01, 1.6318e-01, 1.9859e-01, + 9.2078e-01, 9.3321e-02, 8.2575e-01, 7.2248e-01, + 8.1688e-01, 7.9359e-01, 5.1287e-01, 6.0873e-01, + 3.4040e-01, 4.0884e-01, 3.0602e-01, 7.1454e-01, + 9.3968e-01, 1.3258e-01, 3.7262e-01, 2.4053e-01, + 2.6331e-01, 9.9278e-01, 5.5712e-01, 1.1806e-01, + 3.2942e-01, 6.4174e-01, 6.8423e-01, 2.5605e-01, + 9.5326e-01, 6.1876e-01, 9.1393e-02, 9.7102e-01, + 6.2910e-01, 9.9254e-01, 6.9186e-01, 7.1631e-01, + 3.8637e-01, 4.1415e-01, 9.1739e-01, 9.1011e-01, + 7.0735e-01, 2.6752e-01, 6.7070e-02, 5.6268e-01, + 8.9992e-01, 7.8722e-02, 9.8479e-01, 8.4552e-02, + 4.0442e-01, 5.1450e-01, 8.6027e-01, 3.6511e-01, + 3.6834e-02, 9.6351e-01, 8.2693e-01, 8.1645e-01, + 7.5576e-01, 5.1891e-01, 3.3959e-01, 1.2834e-01, + 1.4061e-02, 2.5165e-01, 4.1608e-02, 4.2840e-01, + 9.2551e-01, 6.1411e-01, 6.3363e-01, 4.0275e-01, + 5.9319e-01, 2.9263e-01, 2.1565e-01, 4.4295e-01, + 5.1955e-01, 4.5938e-01, 8.5177e-01, 9.2589e-01, + 6.1009e-01, 7.1321e-01, 3.9839e-01, 6.6454e-01, + 4.9760e-01, 7.4274e-01, 6.1422e-01, 9.7794e-01, + 2.5439e-01, 7.2161e-01, 8.4072e-01, 9.1789e-01, + 2.0604e-01, 4.9255e-01, 3.0544e-01, 7.3972e-01, + 5.1823e-01, 2.7708e-02, 1.0035e-01, 3.6838e-01, + 6.9918e-01, 9.7863e-01, 4.6654e-01, 7.0863e-01, + 5.2875e-01, 1.3497e-01, 5.8760e-01, 1.5144e-01, + 8.5865e-01, 5.7095e-01, 4.1667e-01, 3.1601e-02, + 2.1484e-01, 8.1898e-02, 8.0481e-02, 3.2171e-01, + 4.1080e-01, 3.7800e-01, 2.5945e-01, 7.1999e-01, + 8.7656e-02, 6.2152e-01, 7.8688e-01, 5.0297e-01, + 7.9838e-01, 1.5162e-01, 6.3852e-01, 8.1854e-02, + 5.8775e-01, 2.1661e-01, 1.9911e-01, 6.6725e-01, + 6.6917e-01, 6.4379e-01, 5.7853e-01, 5.7889e-01, + 3.4456e-01, 3.7407e-01, 8.9088e-01, 9.6166e-01, + 3.6272e-01, 3.1812e-01, 5.3217e-02, 6.0500e-01, + 8.7168e-01, 9.1038e-01, 4.5582e-01, 6.3918e-02, + 6.9142e-01, 3.2649e-01, 9.8192e-01, 6.0464e-01, + 9.6815e-01, 1.0503e-01, 7.1600e-01, 4.3663e-01, + 8.7706e-01, 4.5971e-02, 6.6596e-01, 9.3079e-01, + 1.5823e-01, 3.2003e-01, 1.2870e-01, 6.3500e-01, + 5.3860e-01, 4.0744e-01, 2.8252e-01, 9.0788e-01, + 9.4387e-01, 3.8651e-01, 4.5116e-01, 7.6362e-01, + 9.0434e-01, 3.7603e-01, 8.1415e-01, 3.1082e-01, + 2.3815e-01, 3.2440e-01, 2.3116e-01, 3.3219e-01, + 4.3396e-01, 2.5441e-01, 3.3591e-01, 3.7212e-01, + 9.9808e-01, 7.3589e-01, 5.1922e-01, 3.1994e-01, + 5.7040e-01, 9.8825e-01, 6.5887e-01, 9.4511e-01, + 1.4550e-01, 6.0308e-01, 6.0994e-01, 3.0875e-01, + 9.5170e-01, 7.2979e-01, 1.0265e-03, 4.4914e-01, + 4.0148e-01, 5.7954e-01, 3.6786e-01, 1.5804e-01, + 7.6112e-01, 2.7800e-01, 3.6618e-01, 3.8852e-01, + 6.2086e-01, 7.5561e-01, 3.3979e-01, 9.2805e-01, + 2.8089e-01, 2.6715e-01, 4.1424e-03, 5.2598e-01, + 6.1266e-01, 2.0902e-01, 5.0368e-01, 8.0383e-01, + 7.4025e-01, 4.4304e-01, 7.4164e-01, 1.8111e-01, + 9.0030e-01, 2.8654e-01, 5.1264e-02, 6.6938e-01, + 4.7722e-03, 4.2012e-01, 6.0558e-01, 6.9264e-01, + 1.6392e-01, 8.8908e-01, 5.7035e-01, 1.4618e-01, + 6.2172e-01, 4.4803e-01, 4.2737e-01, 2.5387e-01, + 7.3742e-01, 4.9420e-01, 5.4518e-01, 4.1535e-01, + 9.8074e-01, 2.9175e-01, 7.5267e-01, 5.0346e-01, + 3.7893e-01, 3.1575e-01, 1.8698e-01, 4.7304e-01, + 9.0176e-02, 6.7209e-01, 3.8501e-01, 3.2009e-01, + 6.0189e-02, 6.3261e-01, 5.9643e-01, 5.9368e-01, + 8.3045e-01, 7.8057e-01, 7.7127e-02, 6.9980e-01, + 7.4648e-01, 4.1912e-01, 6.6123e-01, 8.0898e-01, + 9.4798e-01, 6.1651e-01, 7.6225e-01, 4.9304e-02, + 3.8411e-01, 5.1162e-01, 4.2531e-01, 4.4692e-01, + 9.0702e-01, 9.2927e-01, 7.5881e-01, 6.8857e-01, + 7.8800e-01, 5.1040e-01, 1.1781e-02, 2.1146e-01, + 2.3732e-01, 8.1244e-01, 4.9067e-01, 8.1642e-01, + 1.0624e-01, 6.6891e-01, 8.1822e-01, 5.6763e-01, + 1.5228e-01, 7.8509e-02, 9.4350e-02, 9.0172e-01, + 3.1038e-01, 4.9468e-01, 6.6537e-01, 5.0303e-01, + 2.8804e-01, 8.0134e-01, 3.1829e-02, 8.0496e-01, + 4.3146e-01, 2.1680e-01, 8.5241e-01, 4.0167e-01, + 6.2134e-01, 5.0197e-01, 6.2379e-01, 4.6456e-01, + 8.5710e-01, 3.8621e-01, 9.3479e-02, 8.4115e-01, + 1.3886e-01, 4.5770e-01, 4.5754e-01, 8.0833e-01, + 4.9763e-01, 2.5510e-01, 9.6840e-01, 1.9707e-01, + 4.0702e-01, 7.7955e-01, 5.7241e-01, 2.2345e-01, + 4.6512e-02, 8.3993e-01, 5.3641e-01, 6.2885e-01, + 3.4946e-01, 3.5017e-01, 4.0024e-01, 1.4394e-01, + 5.9394e-01, 6.1531e-01, 3.5865e-01, 2.2163e-03, + 4.5091e-01, 8.5622e-01, 9.8299e-01, 2.6351e-01, + 1.5344e-01, 8.8509e-01, 9.4732e-01, 2.3528e-01, + 4.9871e-04, 9.2326e-01, 6.8197e-01, 7.7192e-01, + 3.7885e-01, 1.9122e-01, 4.2807e-01, 4.4857e-01, + 1.8060e-01, 5.1258e-01, 7.8550e-01, 1.0826e-02, + 4.0016e-01, 5.1045e-01, 9.8409e-03, 9.4114e-01, + 7.7132e-01, 5.9286e-01, 4.3541e-01, 7.4579e-01, + 9.4221e-02, 2.5992e-01, 8.5052e-01, 6.8925e-01, + 3.1627e-01, 2.0341e-01, 2.6395e-01, 9.5170e-01, + 2.6815e-02, 5.5151e-01, 9.4061e-01, 7.8800e-01, + 7.9643e-01, 1.9611e-01, 3.9293e-01, 8.1546e-01, + 6.0137e-01, 7.1575e-01, 6.3150e-01, 4.2849e-01, + 7.8772e-01, 3.2756e-01, 8.6487e-01, 2.6993e-01, + 8.7612e-01, 3.3995e-01, 1.0473e-01, 9.2012e-01, + 3.5880e-01, 3.0548e-01, 7.9299e-02, 7.5157e-01, + 4.4155e-02, 4.3668e-01, 6.7371e-01, 3.6075e-01, + 8.1871e-01, 4.1564e-01, 7.8281e-01, 7.8903e-01, + 1.2638e-01, 8.9728e-02, 9.2093e-01, 6.7936e-01, + 2.5980e-01, 1.6090e-01, 1.8550e-01, 1.4940e-01, + 7.0870e-01, 9.5589e-02, 5.0303e-01, 6.9889e-01, + 1.5938e-01, 2.2200e-01, 6.2784e-01, 1.5446e-01, + 1.2124e-02, 8.7384e-01, 6.0268e-02, 8.1065e-02, + 9.6171e-01, 5.4637e-01, 6.6976e-01, 6.1643e-01, + 1.7711e-01, 6.3124e-01, 7.1647e-01, 1.8549e-01, + 7.9711e-01, 8.6392e-01, 6.8736e-01, 7.6796e-01, + 3.6441e-01, 7.4881e-01, 7.4269e-01, 3.0689e-01, + 7.3449e-01, 5.6269e-01, 8.7710e-01, 8.8517e-01, + 6.5795e-01, 4.5779e-01, 9.4681e-01, 5.7892e-01, + 5.7829e-01, 7.6546e-02, 8.1582e-01, 9.6362e-01, + 9.3306e-01, 5.3107e-01, 9.4367e-01, 9.9397e-01, + 2.1635e-02, 3.3178e-01, 6.0266e-01, 3.8334e-01, + 9.5467e-01, 6.0713e-01, 9.6563e-01, 1.5662e-01, + 4.2960e-02, 4.6865e-01, 4.7849e-01, 1.1095e-01, + 4.4027e-01, 7.9249e-01, 1.3336e-01, 1.8189e-01, + 6.7993e-01, 1.6154e-01, 3.7795e-01, 9.8899e-01, + 6.0252e-01, 8.8063e-01, 5.0927e-01, 8.2521e-01, + 6.4850e-01, 9.2767e-01, 1.6998e-01, 5.2612e-01, + 5.6923e-01, 1.4796e-01, 8.0528e-01, 4.2194e-01, + 6.5700e-01, 6.3868e-01, 1.8030e-01, 5.5921e-01, + 2.3978e-01, 2.7126e-01, 1.5009e-01, 4.5821e-01, + 5.9677e-01, 6.0861e-01, 2.3955e-01, 7.1788e-01, + 3.6544e-01, 3.7107e-01, 7.6629e-01, 1.4653e-01, + 9.6017e-01, 2.9226e-01, 9.6527e-01, 5.7781e-01, + 3.1803e-01, 3.4564e-01, 8.5270e-01, 8.0335e-01, + 7.2978e-01, 7.4313e-01, 2.9239e-01, 9.3231e-02, + 3.7290e-01, 9.6242e-01, 9.3227e-01, 5.2356e-02, + 4.5231e-01, 7.0243e-01, 2.0260e-01, 6.0563e-01, + 6.3537e-01, 7.0126e-03, 2.0549e-01, 6.4348e-01, + 1.1234e-01, 9.1736e-01, 9.5159e-01, 2.5712e-01, + 3.6692e-02, 2.6733e-01, 6.4886e-01, 7.0188e-01, + 7.3541e-01, 7.5349e-02, 1.9926e-01, 6.8626e-01, + 9.8980e-01, 5.8998e-01, 9.5392e-01, 2.7051e-02, + 6.8310e-01, 1.5713e-01, 5.7251e-01, 9.5125e-02, + 6.3484e-01, 8.0309e-01, 3.5265e-01, 3.2764e-01, + 4.8770e-01, 9.2443e-01, 3.0410e-01, 5.5153e-02, + 4.4606e-01, 1.5785e-01, 6.6155e-01, 1.6335e-01, + 1.6467e-01, 8.6871e-02, 9.3798e-01, 5.7792e-01, + 5.6320e-01, 7.8288e-01, 4.2018e-01, 9.8989e-01, + 1.5890e-02, 5.4217e-01, 7.9690e-01, 6.7578e-01, + 1.7863e-01, 2.4870e-01, 2.4165e-01, 9.3261e-01, + 4.3334e-01, 4.5406e-01, 1.2943e-01, 2.4438e-01, + 6.1877e-01, 1.5403e-01, 2.3696e-01, 6.0019e-01, + 7.6954e-02, 4.8879e-01, 3.9922e-01, 6.1722e-02, + 9.8407e-01, 1.3447e-01, 3.0239e-01, 9.1036e-01, + 9.3287e-01, 4.3368e-01, 2.4395e-01, 4.6772e-01, + 1.3810e-01, 4.4311e-01, 1.8397e-01, 1.7834e-01, + 5.5151e-01, 6.6086e-01, 8.9171e-01, 6.7263e-01, + 8.5315e-01, 7.7935e-01, 9.3442e-01, 4.3695e-01, + 1.5108e-01, 3.6121e-01, 3.5574e-01, 7.4972e-01, + 8.1697e-01, 8.9608e-01, 4.1783e-01, 5.6045e-02, + 2.1298e-01, 7.5669e-01, 2.7640e-01, 8.8664e-01, + 7.8179e-01, 2.7053e-02, 1.3976e-01, 6.5301e-01, + 2.0818e-01, 9.5708e-02, 7.3530e-01, 5.6532e-01, + 1.9201e-01, 7.5872e-01, 8.5304e-01, 7.6696e-01, + 2.3298e-01, 7.3064e-02, 4.5750e-01, 3.0491e-01, + 8.4562e-01, 5.0911e-01, 8.4650e-01, 1.7022e-01, + 1.6887e-01, 2.5773e-01, 4.1362e-01, 2.3742e-01, + 9.0183e-01, 9.8016e-01, 4.6076e-02, 3.1594e-01, + 6.9052e-01, 9.5460e-01, 9.5283e-01, 7.5063e-01, + 2.9345e-01, 3.4141e-01, 4.1893e-01, 5.2709e-01, + 7.8712e-01, 7.9375e-01, 9.2839e-01, 8.4541e-01, + 6.4293e-01, 9.0380e-01, 6.3515e-01, 8.8752e-01, + 4.0009e-03, 5.8117e-02, 3.0656e-02, 8.5350e-01, + 7.0642e-01, 1.4772e-02, 7.6152e-01, 9.4227e-01, + 1.6103e-01, 5.6090e-01, 8.9968e-02, 4.7046e-01, + 5.8490e-01, 8.4874e-01, 9.9450e-01, 7.0178e-01, + 8.8232e-01, 9.9210e-01, 3.7964e-01, 5.7953e-01, + 7.0927e-01, 5.9254e-01, 3.0037e-01, 1.9808e-01, + 1.3903e-01, 6.7066e-01, 4.3748e-01, 7.7025e-01, + 8.2460e-01, 5.1989e-01, 6.9893e-01, 3.1880e-02, + 6.5270e-01, 7.5305e-01, 2.6387e-01, 2.3470e-01, + 7.7775e-01, 5.4475e-02, 8.4215e-01, 7.9833e-01, + 6.8635e-01, 5.8313e-01, 6.2967e-01, 9.3181e-02, + 2.5176e-01, 2.8259e-01, 8.5278e-01, 2.0479e-01, + 3.2888e-01, 4.0002e-01, 4.2985e-01, 1.6745e-02, + 6.4703e-01, 3.8685e-01, 2.7396e-01, 4.0083e-02, + 6.9795e-01, 3.2896e-01, 9.4557e-01, 6.2325e-01, + 6.2040e-01, 7.2851e-01, 1.4586e-01, 2.2619e-01, + 8.3001e-01, 6.9165e-01, 9.3075e-01, 8.4699e-02, + 9.1120e-01, 6.1131e-01, 6.7315e-01, 6.2863e-01, + 6.1502e-02, 6.6647e-01, 3.9426e-01, 3.4242e-01, + 2.4155e-01, 7.0349e-01, 6.4289e-01, 2.0874e-01, + 3.7193e-03, 1.9972e-01, 6.8698e-01, 1.8595e-01, + 5.9335e-01, 5.0666e-01, 6.8596e-01, 7.5661e-01, + 4.4919e-01, 9.2522e-01, 3.4907e-01, 4.9716e-01, + 5.3208e-01, 1.8718e-01, 8.8527e-01, 3.9827e-01, + 3.4328e-01, 2.1804e-01, 3.2701e-01, 7.1900e-01, + 8.8112e-01, 5.6475e-01, 2.6203e-01, 5.6843e-01, + 3.6244e-01, 7.4864e-01, 4.2898e-02, 1.0206e-01, + 5.8229e-01, 3.6364e-01, 9.0089e-01, 6.3906e-01, + 9.4625e-01, 1.5743e-01, 7.0745e-01, 9.7285e-01, + 3.2107e-01, 4.6844e-01, 5.0980e-01, 3.0922e-01, + 6.4165e-01, 5.8791e-01, 5.8697e-01, 5.0368e-01, + 1.3440e-01, 7.0304e-01, 9.5832e-01, 4.9678e-01, + 7.5464e-01, 5.7994e-01, 2.8987e-01, 9.1487e-03, + 6.1330e-01, 3.2294e-01, 3.1984e-01, 5.8267e-01, + 6.6203e-01, 7.6829e-01, 9.8125e-01, 6.4370e-01, + 6.1405e-01, 2.6304e-01, 8.6038e-01, 6.6028e-01, + 1.6081e-02, 9.8894e-01, 5.8987e-01, 3.5565e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.3198, 0.6605, 0.6944, ..., 0.6134, 0.5235, 0.6507]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.015525579452514648 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 67630 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.8812994956970215} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([8647, 4654, 5499, 4449, 7451, 3553, 6737, 3727, 9375, + 328, 3652, 7464, 1792, 3337, 3219, 9816, 8774, 722, + 7286, 8975, 5239, 5207, 7520, 2047, 9786, 1775, 7824, + 1686, 6523, 1807, 7651, 3617, 6569, 7853, 4440, 4649, + 2754, 2264, 1878, 5699, 1779, 6715, 4641, 6933, 4479, + 8379, 2007, 678, 8640, 712, 2714, 3092, 8404, 958, + 6892, 5229, 232, 1407, 5081, 5811, 8597, 2045, 959, + 9609, 9725, 137, 6909, 8887, 5531, 9907, 8259, 3598, + 3106, 4242, 1459, 975, 9373, 279, 5289, 239, 4982, + 7449, 2338, 3106, 7326, 5651, 7345, 5951, 2276, 9406, + 1555, 187, 5936, 3172, 5886, 8072, 5078, 8086, 5802, + 9928, 7066, 2942, 5468, 8880, 3550, 6036, 7923, 2059, + 5727, 8966, 3271, 5191, 8019, 143, 8926, 3410, 1927, + 5129, 6995, 4214, 8413, 2923, 5122, 289, 6094, 9084, + 3943, 7811, 5472, 4461, 9629, 2075, 1933, 9084, 666, + 1579, 5138, 7074, 4251, 2116, 8273, 7982, 7533, 4948, + 9847, 5061, 4404, 5976, 6566, 4338, 4776, 2382, 9615, + 4466, 7669, 7291, 5642, 7421, 9834, 953, 7266, 4045, + 4744, 4897, 9364, 1730, 4368, 9844, 1957, 9401, 1180, + 2799, 2434, 2079, 3790, 944, 1503, 5578, 6431, 8745, + 9367, 9630, 9634, 3838, 2042, 1290, 752, 5314, 9275, + 1425, 3780, 3456, 7397, 6659, 2673, 3050, 213, 3865, + 108, 374, 8126, 9474, 7399, 1622, 8557, 3681, 4079, + 3800, 9932, 1087, 1720, 2470, 9941, 7514, 803, 6483, + 6709, 7368, 2720, 2999, 6038, 6911, 1766, 360, 1533, + 1807, 5357, 5158, 2293, 7675, 8708, 6003, 955, 3712, + 7798, 1523, 5030, 4107, 8054, 1811, 47, 8376, 4369, + 3092, 3036, 7834, 3657, 7158, 1452, 2555, 7073, 7909, + 7008, 2655, 9224, 6660, 9253, 1818, 4261, 3550, 579, + 5989, 8739, 5752, 2907, 2571, 1252, 8072, 119, 3969, + 1752, 2664, 8669, 8282, 3748, 1807, 2214, 2082, 1936, + 5224, 9356, 8771, 9920, 2445, 9815, 3980, 583, 9973, + 8012, 4204, 3618, 3650, 5388, 358, 1532, 1955, 4875, + 158, 6910, 686, 8612, 6716, 2830, 2503, 2259, 8773, + 9299, 2216, 6023, 9677, 1449, 3531, 9724, 643, 8636, + 3334, 5334, 3817, 8516, 9990, 2373, 5938, 4650, 9304, + 4529, 4507, 9739, 8866, 5608, 724, 4488, 9831, 257, + 2286, 7193, 1443, 6012, 8866, 327, 2031, 3505, 4806, + 6614, 384, 3462, 494, 3168, 3506, 6739, 5696, 7354, + 9211, 6058, 9379, 1835, 4352, 2144, 4085, 70, 1475, + 9132, 9185, 6667, 9380, 4485, 7941, 1673, 3774, 5096, + 1961, 1035, 737, 664, 885, 9541, 1359, 1408, 8312, + 5862, 2042, 7184, 2999, 9384, 5279, 8734, 1236, 2120, + 7519, 5331, 9850, 3362, 7044, 5123, 9555, 7957, 1966, + 4504, 9159, 7008, 2116, 6625, 9822, 5379, 5010, 96, + 6277, 7752, 9818, 4290, 1742, 8951, 1186, 3862, 2517, + 9435, 8552, 3364, 3956, 4806, 2389, 9824, 8423, 1737, + 3271, 5071, 836, 5580, 9804, 5970, 4791, 6708, 506, + 5852, 8007, 4051, 4343, 4073, 9299, 5039, 3444, 1200, + 3576, 7481, 5426, 7249, 286, 6046, 6879, 1392, 3912, + 5483, 3682, 6743, 235, 4404, 9434, 5444, 7481, 6774, + 1351, 3311, 6546, 8886, 4270, 2502, 694, 1728, 6058, + 5335, 9753, 5031, 6419, 2685, 4780, 4575, 2462, 1331, + 6584, 7477, 346, 1448, 1524, 8493, 3416, 5801, 5122, + 9403, 20, 3995, 7626, 7649, 4382, 6018, 4365, 415, + 1738, 1419, 3860, 5817, 1889, 5409, 6227, 459, 3324, + 462, 8734, 6319, 6608, 2415, 1202, 8065, 3675, 3079, + 7072, 1983, 4067, 8193, 238, 8863, 1444, 1643, 9833, + 9735, 3625, 4690, 2588, 2956, 8114, 2900, 2471, 2097, + 9641, 705, 9577, 9145, 5785, 4597, 41, 1057, 2855, + 1577, 3417, 2854, 6268, 5170, 6633, 6168, 6675, 610, + 79, 3397, 4486, 8438, 1953, 8993, 2, 9913, 3737, + 8115, 1773, 5791, 7003, 8850, 7715, 7068, 2545, 22, + 2997, 7784, 2193, 8115, 4276, 9097, 8603, 2402, 6620, + 4490, 3390, 8009, 9364, 1928, 6058, 1117, 8459, 7279, + 9802, 937, 4414, 269, 5996, 3388, 2213, 8303, 1471, + 6791, 5503, 786, 3105, 6092, 3993, 5432, 5514, 1482, + 284, 4826, 3661, 1299, 3045, 947, 2784, 6465, 53, + 3237, 3325, 8755, 4946, 5582, 9687, 4216, 8261, 8643, + 4128, 9567, 5015, 2626, 7520, 7301, 1665, 4611, 1178, + 4466, 8878, 105, 9091, 656, 38, 6417, 9503, 8593, + 85, 2340, 2943, 6566, 86, 5872, 5085, 7636, 6075, + 8279, 3927, 2818, 3066, 5835, 9800, 3701, 8166, 5053, + 2914, 5582, 6646, 4031, 152, 1816, 2513, 6513, 9514, + 7368, 2044, 9261, 8730, 2154, 9077, 900, 5045, 1209, + 7769, 5333, 2182, 9868, 7515, 6495, 8081, 9467, 485, + 4842, 7595, 346, 3896, 4589, 7304, 102, 9131, 1881, + 7363, 4667, 3124, 1494, 7640, 2085, 5969, 327, 8422, + 4445, 7075, 7495, 4090, 7290, 414, 7042, 2615, 5534, + 9631, 6285, 5895, 6803, 3363, 109, 1042, 5715, 699, + 6263, 3089, 8671, 1903, 1932, 1891, 6086, 2033, 4653, + 236, 8358, 3454, 2498, 1012, 1022, 7187, 8474, 6396, + 2842, 3209, 5149, 2014, 5465, 5988, 5452, 2136, 2496, + 5828, 8399, 834, 1934, 4582, 2435, 4518, 8149, 1215, + 2360, 572, 7934, 7805, 7177, 4755, 7951, 6277, 3279, + 3301, 4324, 7617, 3138, 4131, 8647, 4535, 537, 9550, + 7765, 7501, 7201, 79, 676, 5103, 21, 5905, 5104, + 8383, 1494, 9361, 748, 2928, 9945, 8214, 6916, 8639, + 9369, 2781, 3513, 6667, 4267, 1563, 9150, 329, 6830, + 2855, 9810, 1621, 8822, 1933, 9318, 1258, 8146, 8930, + 8826, 5989, 2187, 6638, 9013, 7520, 9266, 3394, 1048, + 8429, 6417, 8432, 9384, 9431, 9345, 7911, 313, 7347, + 7640, 9284, 302, 4482, 517, 9019, 2279, 7907, 7949, + 94, 3374, 6951, 3507, 2953, 7996, 9649, 4108, 9885, + 1601, 9214, 4791, 7898, 3000, 8842, 4620, 6241, 6616, + 2733, 147, 5293, 9372, 2648, 2261, 1230, 6095, 4677, + 2716, 6197, 8800, 7896, 4094, 6693, 1892, 1616, 5199, + 6890, 5117, 7860, 6533, 6255, 6453, 8306, 6190, 616, + 6010, 482, 4524, 4452, 5701, 2687, 6600, 6294, 7700, + 8616, 665, 690, 6448, 6296, 6304, 8118, 8907, 31, + 6487, 9750, 5850, 4854, 162, 383, 206, 7720, 7300, + 1965, 2943, 9050, 8357, 4610, 1564, 3450, 7501, 5942, + 805, 2704, 8681, 8214, 7783, 2722, 8026, 3060, 4714, + 1241, 6614, 1767, 9333, 6280, 2376, 4705, 5029, 507, + 2510, 3816, 3176, 4093, 4640, 1394, 4466, 1634, 5625, + 9616, 9856, 5608, 847, 5783, 8919, 4444, 2893, 8647, + 657, 2075, 373, 2227, 3120, 4500, 9463, 5337, 7565, + 9168, 5069, 8132, 5707, 2138, 1858, 3288, 6897, 4099, + 3082, 484, 3044, 2456, 7707, 5443, 5260, 805, 9070, + 2483, 8225, 9487, 1239, 1687, 3954, 6303, 8989, 5541, + 7977, 7424, 7522, 2231, 1796, 5265, 9895, 4183, 3338, + 1377]), + values=tensor([2.6711e-01, 1.5256e-01, 4.7895e-01, 9.7177e-01, + 3.3217e-01, 4.8662e-01, 9.6837e-02, 4.9364e-02, + 4.1631e-01, 7.9638e-01, 4.9818e-01, 1.0329e-01, + 8.5050e-01, 3.5207e-01, 8.8170e-01, 1.3600e-01, + 8.9499e-01, 8.9928e-01, 9.5856e-02, 2.0110e-01, + 6.7254e-01, 8.1814e-01, 1.4681e-01, 1.3722e-01, + 8.1086e-01, 1.9533e-01, 8.2412e-01, 8.9212e-01, + 9.7206e-01, 6.8201e-02, 3.7707e-01, 2.6646e-01, + 7.2796e-01, 8.8582e-01, 2.5316e-01, 6.6138e-01, + 7.9265e-01, 3.3510e-01, 6.5242e-02, 9.4971e-01, + 4.5732e-01, 4.8267e-02, 1.9899e-01, 8.6938e-01, + 2.0791e-01, 4.0034e-01, 7.8684e-01, 4.0125e-01, + 2.0749e-02, 7.8661e-01, 1.5651e-02, 2.8427e-01, + 1.1615e-01, 8.5568e-01, 1.5852e-01, 5.8611e-02, + 3.7139e-01, 9.1537e-01, 7.3291e-01, 6.1366e-02, + 9.4998e-01, 3.3764e-01, 5.4722e-01, 2.2605e-01, + 9.0416e-01, 2.5427e-01, 8.4136e-01, 5.3180e-01, + 9.3963e-01, 8.3464e-01, 7.9459e-02, 2.1388e-01, + 7.0909e-01, 1.5803e-01, 4.7915e-01, 5.6413e-01, + 7.7630e-01, 9.0240e-01, 1.3854e-01, 7.6111e-01, + 5.9490e-01, 7.5820e-01, 4.9648e-01, 3.5839e-01, + 9.7634e-01, 6.3633e-01, 6.7076e-01, 1.2479e-01, + 6.1116e-01, 2.5072e-01, 3.9865e-01, 3.1353e-01, + 8.0245e-01, 8.9919e-01, 7.6141e-01, 5.2578e-02, + 6.3377e-01, 3.9030e-01, 5.8898e-01, 9.7807e-01, + 6.7515e-02, 9.7033e-01, 5.3258e-01, 2.4756e-01, + 6.1442e-01, 5.7467e-01, 9.1736e-01, 3.8045e-01, + 6.4755e-01, 8.4549e-01, 3.4941e-02, 2.2137e-01, + 2.6165e-01, 1.5812e-01, 4.4836e-01, 4.0372e-01, + 6.8160e-01, 3.4621e-01, 3.1729e-01, 7.9594e-01, + 8.5949e-03, 4.1101e-01, 2.9323e-01, 3.0476e-01, + 2.1348e-01, 1.9134e-01, 9.2995e-01, 5.6460e-01, + 1.6420e-01, 4.6461e-01, 1.6407e-01, 9.0001e-01, + 2.1794e-01, 4.7626e-01, 9.9720e-01, 1.9139e-01, + 7.5819e-01, 6.4924e-01, 2.8439e-01, 8.1343e-01, + 9.9954e-01, 2.0023e-01, 1.9576e-01, 2.2422e-01, + 3.9389e-01, 4.0590e-01, 9.4587e-01, 9.5859e-01, + 9.1196e-01, 3.3142e-01, 6.5074e-02, 8.1017e-01, + 2.6550e-01, 5.0111e-01, 4.9418e-01, 3.1742e-01, + 4.8848e-01, 7.9520e-01, 2.2326e-01, 9.0457e-01, + 2.3438e-01, 3.5241e-01, 8.5370e-01, 2.5089e-01, + 9.9178e-01, 5.9049e-01, 1.4631e-01, 3.6536e-02, + 5.5173e-01, 4.9073e-01, 4.1231e-01, 2.7039e-01, + 3.3169e-01, 2.5824e-01, 9.4158e-03, 2.0982e-01, + 3.0498e-01, 8.5326e-01, 4.7060e-01, 7.8361e-01, + 1.3420e-01, 2.0663e-01, 7.7739e-01, 5.1495e-01, + 1.6129e-01, 6.9651e-01, 1.0713e-02, 6.5069e-01, + 5.6758e-01, 4.3940e-01, 8.3930e-01, 6.0393e-01, + 3.0148e-01, 9.9091e-02, 5.5146e-02, 3.0113e-01, + 7.6004e-01, 5.5121e-01, 4.1376e-01, 3.0646e-01, + 8.9187e-01, 3.8609e-01, 4.1549e-01, 2.1979e-01, + 5.1501e-01, 2.2452e-01, 1.4563e-01, 1.4372e-02, + 4.8200e-02, 5.2964e-01, 1.1575e-03, 3.5902e-01, + 7.5079e-01, 3.0657e-01, 9.7819e-01, 6.8395e-01, + 5.8067e-01, 8.4327e-01, 5.8382e-01, 7.1746e-01, + 2.3417e-03, 4.3092e-01, 9.2003e-01, 9.2056e-01, + 3.9954e-01, 7.7219e-01, 1.6823e-01, 6.3088e-01, + 9.6458e-01, 8.5932e-02, 6.4564e-02, 8.0078e-01, + 2.8911e-01, 6.4202e-01, 9.9214e-01, 3.9409e-01, + 4.4199e-01, 5.1642e-01, 7.1285e-01, 4.9413e-01, + 7.5052e-01, 2.8058e-01, 2.2623e-01, 2.1988e-01, + 6.1212e-01, 6.0671e-01, 8.2609e-01, 9.1290e-01, + 7.1068e-01, 9.9832e-01, 2.6371e-01, 6.6080e-01, + 5.4266e-01, 4.3958e-01, 1.5237e-01, 3.0533e-01, + 9.5470e-01, 7.7750e-01, 7.7943e-01, 5.6432e-01, + 8.5319e-01, 3.9678e-02, 8.6073e-01, 5.0306e-01, + 8.2644e-01, 8.2269e-01, 3.4132e-01, 9.1906e-01, + 7.9166e-01, 3.2085e-01, 4.6701e-01, 9.9773e-01, + 6.9791e-01, 3.6137e-01, 1.1040e-01, 1.7501e-01, + 9.8849e-01, 4.4263e-01, 1.9965e-01, 3.4535e-01, + 5.6003e-01, 3.6558e-01, 9.4761e-01, 8.2034e-01, + 2.4737e-01, 4.2745e-01, 1.7134e-01, 5.8162e-01, + 1.6103e-01, 6.0025e-01, 2.7172e-01, 7.9051e-01, + 9.2993e-01, 3.7228e-02, 4.5206e-01, 6.1660e-01, + 3.6437e-01, 4.8195e-01, 9.5990e-01, 6.9781e-01, + 7.3194e-01, 9.0573e-01, 9.1063e-01, 8.5010e-01, + 8.7019e-01, 3.7969e-01, 1.0579e-01, 3.2359e-01, + 2.3793e-01, 3.0734e-01, 4.3956e-01, 7.3710e-01, + 3.4507e-01, 4.9102e-02, 8.9148e-01, 6.3684e-01, + 3.3010e-01, 4.9130e-01, 1.4864e-01, 8.4955e-01, + 7.1091e-01, 1.8830e-01, 7.9391e-01, 3.4619e-01, + 2.0274e-01, 7.6287e-01, 9.2535e-01, 8.5751e-01, + 2.0318e-01, 3.7269e-01, 8.1414e-01, 6.3746e-01, + 2.4272e-02, 9.0444e-01, 2.7878e-01, 6.2372e-01, + 8.5718e-01, 5.8344e-01, 7.8253e-01, 8.7141e-02, + 1.8131e-01, 7.4047e-01, 3.6256e-01, 4.1791e-01, + 9.5543e-01, 5.4189e-01, 8.9479e-01, 9.2614e-01, + 3.3140e-01, 3.7395e-02, 1.7005e-02, 6.2850e-01, + 6.3872e-01, 7.6598e-02, 5.2965e-01, 5.2871e-02, + 3.4428e-01, 3.5255e-01, 3.5881e-01, 4.4021e-01, + 9.4895e-01, 5.3642e-02, 1.2858e-01, 2.7157e-01, + 4.5236e-01, 4.0391e-01, 4.7756e-01, 8.1354e-01, + 8.3760e-02, 5.6691e-01, 6.4345e-02, 7.4975e-01, + 4.5835e-02, 2.0528e-01, 2.5545e-01, 5.2956e-01, + 8.8487e-02, 3.1582e-03, 9.6494e-01, 6.3755e-01, + 9.1466e-01, 4.2722e-01, 3.3045e-01, 7.7722e-01, + 2.3250e-01, 9.4868e-01, 5.6161e-01, 3.1958e-01, + 8.5872e-01, 7.0911e-01, 3.9428e-01, 7.6624e-01, + 3.2459e-01, 4.3472e-01, 1.1225e-01, 5.3608e-01, + 3.8279e-01, 1.1010e-01, 5.9535e-01, 8.1914e-01, + 3.5874e-01, 2.9956e-01, 7.5475e-01, 4.1724e-01, + 6.6390e-01, 4.5093e-02, 1.4779e-01, 3.3404e-01, + 6.0949e-01, 3.4111e-01, 5.3452e-01, 4.6772e-01, + 6.5799e-01, 1.4563e-01, 8.9795e-02, 7.0677e-01, + 9.6720e-01, 3.5216e-01, 6.8202e-01, 6.4105e-01, + 5.2730e-02, 9.0455e-01, 5.0695e-01, 2.2618e-01, + 4.1787e-01, 4.9977e-01, 1.7190e-01, 8.1795e-01, + 2.5523e-01, 1.6846e-01, 3.0340e-01, 1.2116e-01, + 5.7507e-01, 3.2181e-02, 4.0972e-01, 5.3684e-01, + 7.7024e-02, 2.5776e-01, 3.8207e-02, 5.0350e-01, + 6.8774e-01, 3.7681e-01, 7.8401e-01, 3.5398e-01, + 1.7284e-01, 8.2055e-01, 8.0188e-01, 7.5652e-01, + 8.4424e-01, 7.5359e-02, 5.7660e-01, 6.7798e-01, + 5.1769e-01, 2.5610e-01, 6.8145e-01, 1.9421e-01, + 2.4913e-01, 3.1308e-01, 7.1412e-01, 5.3230e-01, + 3.8665e-01, 2.4735e-01, 3.3487e-01, 3.6920e-01, + 9.0259e-01, 4.9511e-01, 5.0551e-01, 4.5521e-01, + 2.8694e-01, 8.0323e-01, 7.3814e-01, 3.2435e-02, + 9.1875e-01, 1.7711e-01, 1.7888e-01, 4.0248e-01, + 7.6070e-01, 1.6275e-01, 2.8341e-01, 7.5435e-01, + 8.6267e-01, 4.2804e-01, 4.0729e-01, 4.6713e-01, + 6.8111e-01, 6.2087e-01, 5.1119e-01, 5.5762e-01, + 2.1222e-01, 3.0056e-01, 9.3891e-01, 7.6294e-01, + 9.2317e-01, 6.5980e-01, 6.9628e-01, 3.8871e-01, + 2.1686e-02, 8.4704e-01, 1.3727e-01, 2.5067e-01, + 3.1685e-01, 2.0661e-01, 4.0914e-01, 5.7463e-01, + 9.9025e-01, 9.5729e-01, 9.5966e-01, 7.1123e-01, + 4.3624e-01, 8.1709e-01, 6.3299e-01, 7.6594e-01, + 2.4838e-01, 3.6857e-01, 9.2991e-01, 1.9568e-01, + 8.4934e-01, 9.8161e-01, 4.7543e-01, 3.4680e-01, + 8.5980e-01, 5.5619e-01, 8.9898e-01, 1.1013e-01, + 5.1630e-01, 6.2091e-01, 8.6039e-01, 4.2882e-01, + 6.1202e-01, 8.7425e-01, 4.3832e-01, 5.2528e-01, + 9.2188e-01, 8.4098e-01, 6.1872e-01, 7.1332e-01, + 7.0086e-01, 9.5349e-01, 2.9772e-01, 1.4673e-01, + 4.6976e-01, 1.7743e-01, 7.4055e-01, 9.7266e-01, + 8.9465e-01, 8.7893e-01, 5.5357e-01, 9.5969e-01, + 9.4823e-01, 8.3593e-01, 7.5239e-01, 8.5419e-01, + 8.2583e-01, 6.3540e-02, 9.2498e-01, 9.5993e-01, + 7.7555e-01, 2.7650e-01, 4.8435e-01, 3.2763e-01, + 6.3364e-01, 3.2754e-01, 5.6219e-01, 2.8495e-01, + 5.3835e-01, 3.8651e-01, 1.3108e-02, 7.7623e-01, + 3.0955e-01, 6.0302e-01, 8.1158e-01, 2.7890e-01, + 2.2545e-01, 5.6962e-02, 4.3985e-01, 6.0311e-01, + 1.9924e-01, 1.1954e-01, 6.5392e-02, 7.3132e-01, + 4.1029e-01, 3.1499e-01, 4.5299e-01, 6.2903e-01, + 3.9752e-01, 3.2276e-01, 7.1679e-01, 2.7750e-01, + 1.6683e-01, 8.9769e-01, 2.6029e-02, 2.7055e-01, + 8.4552e-02, 3.8641e-01, 1.2100e-01, 5.4977e-01, + 5.6060e-01, 9.5285e-01, 7.9471e-01, 8.9880e-01, + 7.2027e-01, 6.6256e-01, 4.0963e-02, 7.9062e-01, + 1.1688e-01, 6.4308e-01, 7.9788e-01, 2.4992e-01, + 2.6054e-01, 2.4177e-01, 6.5576e-01, 5.0046e-02, + 2.6875e-01, 7.4788e-01, 9.9256e-01, 7.4616e-01, + 9.4601e-01, 8.6212e-01, 3.7690e-01, 6.9520e-01, + 5.7234e-01, 8.1622e-01, 5.1617e-01, 4.5916e-01, + 1.9811e-01, 8.1565e-01, 6.0003e-01, 8.9139e-01, + 5.3729e-01, 7.2110e-01, 8.6277e-01, 9.4231e-01, + 8.4618e-01, 4.9181e-01, 6.9875e-01, 2.7656e-01, + 2.0255e-01, 5.9269e-02, 1.4714e-01, 6.9136e-01, + 3.8930e-01, 5.4488e-01, 5.5210e-01, 9.9922e-01, + 2.7776e-01, 6.7954e-01, 2.6406e-01, 9.1547e-01, + 4.4960e-01, 3.0461e-01, 2.7204e-02, 3.8050e-01, + 7.6526e-01, 9.9671e-01, 9.6274e-02, 4.6527e-01, + 6.3199e-01, 1.3465e-01, 1.2424e-01, 4.2475e-01, + 1.5085e-02, 5.3115e-01, 2.5265e-01, 5.4656e-01, + 8.3119e-01, 7.9993e-01, 1.3612e-01, 6.2849e-01, + 9.6348e-01, 8.4367e-01, 1.0647e-01, 3.5132e-01, + 5.4878e-02, 3.0414e-01, 3.8242e-01, 3.4146e-01, + 5.8380e-01, 4.3874e-01, 6.8347e-01, 3.3214e-01, + 9.5222e-01, 2.4075e-01, 5.4615e-01, 5.0775e-01, + 6.2950e-01, 6.5791e-01, 5.3492e-01, 4.6581e-01, + 2.9513e-01, 5.5712e-01, 9.6987e-01, 9.2809e-01, + 6.6533e-01, 1.5652e-01, 3.0325e-01, 8.3782e-01, + 3.8865e-02, 6.0800e-01, 5.9107e-03, 7.1721e-01, + 1.8278e-01, 2.9456e-01, 7.1928e-01, 4.3675e-01, + 4.2686e-04, 3.2199e-01, 3.1531e-01, 9.5563e-01, + 5.9256e-01, 6.6577e-01, 9.4022e-01, 5.4210e-01, + 2.2494e-01, 8.8292e-01, 5.0262e-01, 1.7811e-01, + 7.3303e-01, 3.7569e-01, 1.6461e-01, 4.7748e-01, + 6.5068e-01, 7.6588e-01, 6.9365e-01, 1.6987e-01, + 9.3394e-01, 5.6170e-01, 7.6574e-01, 4.8042e-01, + 5.2666e-01, 4.4468e-02, 7.1181e-01, 5.8269e-01, + 5.8374e-01, 3.1109e-01, 4.0255e-01, 4.9434e-01, + 6.6836e-01, 2.9763e-01, 7.7282e-01, 7.7640e-01, + 5.5950e-01, 8.2989e-01, 3.9255e-01, 3.2097e-01, + 3.5498e-01, 1.4379e-01, 6.9156e-01, 6.0956e-01, + 5.7389e-01, 2.1995e-01, 5.8540e-02, 1.3269e-01, + 2.2999e-01, 4.0583e-01, 7.6687e-01, 3.5515e-02, + 3.8287e-01, 5.1448e-01, 8.5014e-01, 2.6067e-01, + 6.8776e-01, 4.4364e-01, 5.2084e-01, 5.8748e-01, + 7.7529e-01, 5.3467e-03, 3.3276e-01, 2.7419e-01, + 2.1121e-01, 5.0536e-01, 6.9922e-01, 3.7431e-02, + 8.7829e-01, 5.9974e-01, 7.9058e-03, 3.0586e-01, + 3.7018e-01, 7.6306e-01, 7.8815e-01, 6.7061e-01, + 9.4150e-01, 5.2596e-01, 9.8341e-01, 7.4131e-01, + 4.6374e-01, 3.6025e-02, 8.4405e-01, 5.0067e-01, + 1.3499e-01, 1.0402e-01, 2.6472e-02, 9.9025e-01, + 4.3637e-01, 2.0747e-01, 8.8258e-01, 7.6534e-01, + 2.0794e-01, 6.9392e-01, 3.5335e-01, 3.5247e-02, + 2.4280e-01, 3.4927e-01, 3.3197e-01, 6.3293e-01, + 5.4021e-01, 3.0541e-01, 1.3719e-01, 4.9790e-01, + 6.1273e-01, 5.6672e-01, 1.8746e-01, 9.1112e-01, + 5.0549e-01, 2.7840e-01, 8.5873e-01, 3.6234e-01, + 7.5441e-01, 8.7309e-01, 2.3727e-01, 5.8749e-01, + 8.4630e-02, 7.2921e-01, 4.7617e-01, 2.0277e-01, + 7.1199e-01, 6.3057e-01, 7.4494e-01, 5.9390e-01, + 6.3551e-01, 7.0786e-01, 7.5953e-01, 9.4655e-01, + 6.3295e-01, 2.9744e-01, 5.7620e-02, 8.4973e-01, + 2.3404e-01, 9.0329e-01, 9.1647e-01, 8.5317e-01, + 3.9654e-03, 6.7276e-01, 7.7647e-01, 2.8644e-03, + 1.2519e-01, 2.2041e-01, 9.9379e-01, 7.0161e-01, + 9.8575e-01, 1.5756e-01, 5.1589e-01, 6.4543e-01, + 9.7651e-01, 6.5372e-01, 8.7253e-01, 3.7374e-01, + 2.1858e-01, 2.3323e-01, 3.9934e-02, 5.3707e-01, + 7.2272e-01, 3.0457e-01, 8.7516e-01, 9.0816e-01, + 6.7837e-01, 4.3251e-01, 3.3498e-01, 9.0997e-01, + 6.9389e-02, 7.7021e-01, 8.7092e-01, 3.8024e-01, + 5.4805e-01, 1.9497e-01, 5.6864e-01, 5.6939e-01, + 1.8313e-02, 1.8784e-01, 3.8429e-01, 5.5461e-02, + 1.5808e-01, 4.2510e-01, 4.4210e-01, 2.2740e-01, + 6.7933e-01, 4.3438e-01, 9.9588e-01, 8.9235e-01, + 4.4846e-01, 9.2256e-01, 4.5922e-01, 6.8803e-01, + 1.9931e-01, 2.3433e-01, 1.4563e-01, 8.9319e-01, + 8.7190e-01, 7.9103e-01, 2.3878e-01, 3.1898e-01, + 8.8999e-01, 6.2143e-01, 1.3975e-01, 3.9679e-01, + 3.7014e-01, 4.5515e-01, 2.9703e-01, 9.1947e-01, + 1.2286e-01, 4.4267e-01, 7.6303e-01, 5.3320e-01, + 5.6167e-01, 9.1226e-02, 2.2957e-01, 3.7713e-01, + 8.1719e-01, 2.0355e-02, 4.1507e-01, 6.6296e-01, + 3.8091e-01, 6.7962e-01, 5.0347e-01, 4.7727e-01, + 7.4304e-01, 5.9352e-01, 6.8517e-01, 1.9044e-02, + 9.1414e-01, 1.4885e-01, 1.5007e-01, 3.7674e-02, + 6.0095e-01, 2.4963e-01, 1.8637e-01, 6.8931e-01, + 2.8481e-01, 6.1241e-01, 5.2515e-01, 9.8569e-01, + 8.9238e-01, 7.5556e-01, 4.9874e-01, 7.9759e-01, + 9.6169e-01, 7.9671e-01, 8.1893e-01, 4.5951e-01, + 7.5276e-01, 4.0520e-01, 4.0465e-02, 1.9412e-01, + 3.5283e-01, 6.9192e-01, 3.7641e-01, 4.3958e-02, + 8.6658e-01, 1.6543e-01, 8.0442e-01, 2.8446e-01, + 7.9453e-01, 5.9950e-01, 8.1424e-02, 7.0566e-01, + 8.2535e-01, 1.1568e-01, 2.6458e-01, 4.1053e-02, + 9.9795e-01, 1.0083e-01, 8.4276e-01, 9.7061e-01, + 6.5811e-01, 8.8023e-01, 9.4782e-01, 1.7553e-01, + 2.3410e-02, 7.5416e-01, 9.5642e-01, 5.3947e-01, + 1.7772e-01, 2.1459e-01, 8.0957e-01, 7.8863e-01, + 7.3928e-01, 5.6401e-01, 5.4479e-01, 1.4586e-01, + 5.2477e-01, 8.2925e-01, 7.6176e-01, 7.7261e-01, + 9.2809e-01, 7.2949e-01, 6.4136e-01, 4.5900e-01, + 9.7133e-01, 8.6138e-01, 7.7439e-01, 9.1759e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0493, 0.7033, 0.8450, ..., 0.3708, 0.9702, 0.0106]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 4.8812994956970215 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 145476 -ss 10000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.560847282409668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5150, 3949, 6725, 9833, 6214, 5081, 1554, 3283, 4592, + 1367, 5501, 6306, 1908, 8847, 9684, 2365, 4516, 2030, + 6246, 1037, 3129, 6994, 6859, 6054, 2697, 3073, 4745, + 5149, 9683, 6168, 7171, 2986, 8282, 5962, 9044, 9859, + 4416, 2729, 3201, 9334, 70, 3704, 1992, 4539, 3854, + 9774, 4984, 5696, 4186, 7809, 2303, 2349, 6788, 8167, + 7467, 8573, 1950, 1383, 4872, 1688, 3467, 531, 2507, + 77, 4045, 9094, 8112, 2615, 9777, 5813, 1750, 6905, + 7986, 8272, 5559, 7416, 2650, 6301, 7023, 6146, 5728, + 154, 3305, 852, 1556, 1672, 8260, 3100, 4027, 2524, + 7136, 1881, 3609, 4077, 5415, 1342, 7463, 0, 7237, + 949, 4271, 7500, 4398, 202, 6370, 6523, 8149, 1606, + 644, 1179, 4251, 732, 4140, 9430, 7869, 4357, 3082, + 7706, 929, 1350, 2066, 3153, 7727, 3490, 4529, 2830, + 604, 9531, 3037, 674, 1118, 7439, 1800, 9605, 7647, + 7273, 8451, 1727, 5065, 8403, 4478, 8920, 7608, 949, + 4223, 6471, 1464, 4183, 9344, 7358, 8169, 5124, 7436, + 7379, 4302, 4579, 6954, 7352, 2475, 6524, 6023, 4419, + 4933, 4411, 5203, 2328, 5122, 1046, 4112, 415, 8434, + 5247, 5282, 7252, 5516, 6132, 9262, 1383, 4445, 3838, + 3480, 5499, 6857, 5666, 2223, 3948, 5622, 6586, 8728, + 1267, 1798, 4209, 2945, 7759, 2936, 2435, 508, 7969, + 658, 334, 4257, 5246, 6393, 357, 3505, 4514, 1464, + 8590, 9903, 4716, 9051, 4470, 9570, 4777, 3595, 8441, + 9110, 9670, 2187, 8548, 3616, 2595, 1352, 5502, 4886, + 8337, 7807, 4921, 643, 1759, 191, 6520, 5823, 2075, + 4740, 3014, 8077, 3584, 7499, 5306, 1223, 1385, 7517, + 8020, 7952, 2205, 3204, 4927, 5397, 2795, 9112, 9159, + 2689, 6163, 2719, 3134, 7968, 7593, 4653, 54, 597, + 3621, 6399, 2434, 6710, 967, 6700, 1213, 1026, 551, + 5577, 3471, 9061, 2300, 9239, 9461, 23, 3996, 596, + 2494, 5453, 3959, 6826, 5001, 7382, 416, 1281, 7317, + 7289, 1990, 4004, 9139, 5821, 8865, 9134, 9753, 7856, + 3678, 597, 9576, 3434, 7138, 8525, 4380, 4708, 1098, + 1864, 2641, 5053, 725, 7343, 1604, 5046, 9253, 1076, + 1733, 7452, 5764, 4569, 2060, 2335, 2788, 666, 560, + 4714, 8624, 8928, 6013, 1750, 3137, 5802, 2485, 8605, + 4692, 8176, 851, 670, 6436, 4116, 3254, 4703, 3344, + 9079, 2177, 3623, 7381, 508, 1878, 8203, 179, 3099, + 5174, 8679, 6987, 5356, 2553, 3507, 667, 8739, 4484, + 5254, 6753, 2060, 7479, 4766, 9854, 7766, 9804, 802, + 2515, 3329, 2210, 7753, 2069, 70, 3487, 1593, 4737, + 6304, 4273, 1474, 5728, 8160, 440, 2963, 5961, 9940, + 3963, 6482, 6596, 1104, 8908, 2324, 4677, 901, 5681, + 7684, 1987, 3941, 1028, 3945, 522, 155, 9582, 618, + 9191, 2285, 2363, 1005, 6492, 2446, 5028, 8066, 9102, + 7602, 8504, 3215, 2742, 946, 538, 5191, 4078, 3141, + 982, 1552, 5688, 9154, 3693, 2376, 5721, 4025, 5733, + 4065, 1715, 8170, 7847, 2860, 4001, 475, 7239, 5611, + 5117, 3773, 7630, 4037, 8971, 3180, 5691, 7586, 7012, + 1221, 6168, 6276, 2660, 6301, 4688, 6558, 7433, 4113, + 3119, 4374, 617, 7732, 5185, 9146, 7702, 4380, 1370, + 4149, 205, 3424, 954, 5456, 1934, 9488, 5247, 9946, + 2494, 4258, 7045, 5497, 588, 1288, 5965, 6814, 3539, + 1764, 4809, 3064, 9477, 5056, 3833, 5491, 8813, 623, + 9737, 8723, 3926, 2417, 6250, 1832, 4224, 3868, 2666, + 5475, 2503, 4400, 2951, 8789, 8901, 2576, 1586, 2358, + 8683, 701, 2218, 324, 2750, 4642, 6563, 7866, 2142, + 2049, 8029, 6801, 8630, 4530, 1913, 7492, 4784, 1756, + 2725, 811, 3186, 5499, 565, 5265, 4256, 9478, 1700, + 8194, 7843, 9531, 2519, 7331, 6421, 4938, 2047, 9115, + 5085, 6558, 2790, 5305, 7400, 6481, 8276, 6051, 376, + 2232, 3828, 5087, 901, 859, 311, 5986, 646, 2534, + 3646, 6330, 6624, 44, 9526, 1720, 7671, 786, 2103, + 7291, 6456, 6706, 9840, 151, 5594, 5539, 8478, 8848, + 9399, 6447, 7659, 8019, 3178, 3373, 8082, 4366, 3291, + 1095, 8101, 138, 8637, 5651, 2986, 4186, 1262, 8903, + 7692, 5148, 5226, 6772, 7717, 1398, 9840, 6283, 4653, + 916, 1943, 7857, 14, 2435, 9045, 6355, 6437, 5876, + 2079, 6222, 9106, 7167, 8674, 3568, 5387, 6404, 4327, + 9378, 7023, 3428, 7208, 3991, 3503, 1112, 5994, 7543, + 8077, 3450, 7732, 95, 4124, 4136, 4748, 216, 5127, + 2129, 134, 2135, 8151, 9959, 5515, 3277, 4587, 1429, + 2979, 932, 7056, 783, 3334, 7562, 460, 5737, 5306, + 1839, 8429, 174, 7235, 4614, 34, 4009, 7389, 701, + 5758, 8329, 9563, 8743, 4338, 3477, 1180, 1540, 5860, + 9475, 5545, 4339, 9525, 5428, 2826, 7521, 4441, 8311, + 1115, 7387, 6593, 2389, 3900, 8596, 9443, 4909, 2179, + 2149, 5400, 6357, 6980, 7081, 5466, 6799, 5303, 6495, + 572, 6717, 3685, 9544, 9986, 5977, 7724, 1134, 4967, + 2145, 3442, 1984, 9728, 2275, 9320, 1257, 102, 4987, + 8007, 1513, 959, 4257, 799, 4709, 204, 6285, 7273, + 8278, 2651, 8788, 6857, 3198, 3750, 7764, 9530, 2525, + 5355, 2828, 6803, 3742, 9808, 2702, 3428, 5248, 7386, + 9259, 3003, 8487, 8936, 9681, 4024, 4691, 9077, 8624, + 670, 3644, 2024, 5179, 8050, 1290, 8269, 8524, 4786, + 2503, 35, 5675, 3549, 6350, 1714, 678, 5292, 9599, + 9848, 2823, 3918, 8633, 2596, 7468, 1783, 4984, 5190, + 2747, 7283, 6599, 5946, 9563, 8503, 9940, 9756, 3292, + 4953, 1009, 8896, 653, 1878, 8055, 8508, 2037, 2440, + 37, 3449, 5269, 3294, 7638, 3773, 5688, 5609, 7128, + 1885, 2653, 15, 148, 8228, 7481, 7965, 3146, 3889, + 6682, 8636, 1153, 8978, 9471, 4557, 8139, 4145, 2086, + 2623, 2951, 2381, 6771, 7405, 7405, 7866, 6651, 961, + 6157, 9117, 8248, 4454, 4081, 1801, 8411, 624, 6352, + 5499, 2637, 2893, 2441, 8243, 5608, 5218, 6178, 2460, + 7724, 4117, 2164, 8643, 7095, 2933, 8474, 6756, 7154, + 4556, 7730, 8140, 7610, 525, 1161, 9855, 6850, 3574, + 5534, 4716, 9318, 179, 5902, 7492, 3084, 3893, 1120, + 3911, 5634, 9144, 2088, 5803, 5300, 9679, 7556, 7644, + 1213, 9980, 8515, 3166, 262, 4231, 480, 4900, 7635, + 9607, 6653, 3153, 9282, 6882, 6866, 9053, 6078, 1605, + 2679, 9434, 1838, 5200, 8526, 145, 8529, 81, 7768, + 2792, 1368, 1146, 6845, 2082, 779, 3727, 3189, 5616, + 2883, 6077, 3380, 5032, 8816, 3861, 2679, 2096, 1826, + 4481, 3081, 2880, 9558, 48, 8852, 7596, 4096, 1993, + 3536, 4469, 1068, 8771, 110, 3938, 7946, 5524, 2146, + 2369, 7248, 6850, 2766, 3813, 4176, 2118, 2240, 2650, + 661, 3663, 7071, 4825, 3012, 5874, 7763, 2, 9014, + 386, 6880, 6261, 4740, 4948, 6569, 5412, 7485, 5084, + 2240, 7660, 5562, 8271, 9281, 7660, 7758, 8339, 2685, + 4281]), + values=tensor([0.7305, 0.2440, 0.3019, 0.7007, 0.9449, 0.8426, 0.6302, + 0.6840, 0.3760, 0.2330, 0.8640, 0.1425, 0.7949, 0.3165, + 0.9440, 0.0634, 0.1824, 0.8939, 0.0831, 0.8632, 0.5557, + 0.9222, 0.7152, 0.4959, 0.3642, 0.8742, 0.0533, 0.8773, + 0.3271, 0.1850, 0.8267, 0.0692, 0.8764, 0.6532, 0.0689, + 0.0059, 0.9796, 0.6870, 0.9154, 0.4784, 0.5447, 0.7909, + 0.5242, 0.2989, 0.3478, 0.3160, 0.9938, 0.4624, 0.7891, + 0.5624, 0.6799, 0.1505, 0.3167, 0.1174, 0.4734, 0.9357, + 0.4414, 0.2681, 0.3600, 0.9851, 0.0984, 0.9817, 0.6071, + 0.2437, 0.6382, 0.9904, 0.2276, 0.5007, 0.7924, 0.8288, + 0.4341, 0.5700, 0.0922, 0.1698, 0.6893, 0.1819, 0.6179, + 0.0688, 0.2326, 0.7694, 0.2736, 0.3807, 0.6497, 0.0934, + 0.2985, 0.3575, 0.2669, 0.2111, 0.0895, 0.1728, 0.5018, + 0.9851, 0.2093, 0.7627, 0.8317, 0.2321, 0.2733, 0.9575, + 0.1373, 0.4112, 0.4346, 0.2690, 0.0832, 0.9308, 0.0856, + 0.4248, 0.1271, 0.2415, 0.1779, 0.9677, 0.0600, 0.9191, + 0.1112, 0.1565, 0.1580, 0.1619, 0.6745, 0.6379, 0.9366, + 0.4143, 0.1271, 0.0864, 0.7266, 0.3906, 0.9727, 0.7339, + 0.0407, 0.0059, 0.6237, 0.7622, 0.9907, 0.2749, 0.4392, + 0.2002, 0.6558, 0.9415, 0.5261, 0.2561, 0.7660, 0.3448, + 0.1122, 0.7076, 0.6037, 0.4897, 0.4670, 0.8339, 0.5757, + 0.4177, 0.7623, 0.5865, 0.8495, 0.6497, 0.9778, 0.6297, + 0.3389, 0.2747, 0.0702, 0.1398, 0.1005, 0.4201, 0.1831, + 0.2403, 0.3332, 0.4306, 0.5321, 0.7778, 0.0204, 0.4136, + 0.1536, 0.7541, 0.0838, 0.5081, 0.7545, 0.0833, 0.4845, + 0.3873, 0.2490, 0.4675, 0.2018, 0.9437, 0.0574, 0.7574, + 0.4217, 0.4481, 0.8897, 0.5683, 0.3881, 0.1927, 0.8680, + 0.3664, 0.8069, 0.2192, 0.4607, 0.7321, 0.3086, 0.6446, + 0.3613, 0.6584, 0.1132, 0.8934, 0.0915, 0.5183, 0.8634, + 0.8577, 0.6923, 0.8618, 0.6853, 0.8256, 0.8384, 0.2295, + 0.3689, 0.5760, 0.2086, 0.3564, 0.8483, 0.6783, 0.7902, + 0.6082, 0.2549, 0.6415, 0.4994, 0.2078, 0.5032, 0.3790, + 0.4926, 0.0907, 0.6911, 0.2975, 0.8561, 0.2283, 0.4530, + 0.2236, 0.5933, 0.7581, 0.3982, 0.1913, 0.0682, 0.2359, + 0.4488, 0.3220, 0.4875, 0.7597, 0.7930, 0.2062, 0.4708, + 0.1870, 0.0373, 0.0367, 0.8306, 0.1947, 0.2315, 0.0207, + 0.8163, 0.7626, 0.9301, 0.8099, 0.3633, 0.5641, 0.8120, + 0.6141, 0.5594, 0.4658, 0.2150, 0.1654, 0.3051, 0.0343, + 0.3400, 0.5978, 0.5354, 0.0263, 0.3151, 0.8046, 0.5327, + 0.3066, 0.2487, 0.3111, 0.0893, 0.1612, 0.5907, 0.3107, + 0.8584, 0.8229, 0.4034, 0.2252, 0.3659, 0.2053, 0.7999, + 0.7701, 0.7502, 0.7518, 0.5690, 0.1322, 0.1586, 0.1160, + 0.9796, 0.3007, 0.1296, 0.7414, 0.1813, 0.4708, 0.0361, + 0.8196, 0.1266, 0.8025, 0.2789, 0.7870, 0.4347, 0.9531, + 0.2674, 0.9698, 0.4764, 0.0519, 0.6509, 0.3234, 0.4769, + 0.3125, 0.1997, 0.6900, 0.7719, 0.1567, 0.8227, 0.8361, + 0.3658, 0.9962, 0.8049, 0.2839, 0.0239, 0.5230, 0.1679, + 0.0012, 0.1941, 0.1825, 0.6328, 0.8021, 0.7872, 0.5540, + 0.0997, 0.9139, 0.1493, 0.1197, 0.6714, 0.2685, 0.5289, + 0.4741, 0.4375, 0.4510, 0.6505, 0.0060, 0.0804, 0.3052, + 0.4065, 0.1432, 0.8715, 0.0973, 0.2004, 0.0400, 0.2303, + 0.6891, 0.4089, 0.8692, 0.1440, 0.3744, 0.8894, 0.4839, + 0.5594, 0.2863, 0.2636, 0.1806, 0.4900, 0.9233, 0.0287, + 0.1519, 0.0238, 0.9718, 0.0696, 0.4915, 0.4842, 0.1594, + 0.7936, 0.6860, 0.5880, 0.4352, 0.0092, 0.6365, 0.1016, + 0.1299, 0.9522, 0.9906, 0.4238, 0.2435, 0.5165, 0.1731, + 0.6409, 0.2362, 0.6101, 0.1717, 0.7603, 0.4938, 0.2723, + 0.7599, 0.9112, 0.0268, 0.9393, 0.8969, 0.3631, 0.2998, + 0.3500, 0.7422, 0.9345, 0.9975, 0.0517, 0.1544, 0.6201, + 0.5972, 0.3434, 0.9546, 0.0842, 0.2284, 0.4080, 0.3233, + 0.9813, 0.3166, 0.9934, 0.5541, 0.2131, 0.8230, 0.1841, + 0.3426, 0.7598, 0.6218, 0.4928, 0.7872, 0.4823, 0.5321, + 0.2430, 0.9717, 0.6158, 0.5835, 0.2198, 0.3750, 0.8817, + 0.8977, 0.6208, 0.4204, 0.5681, 0.7034, 0.0297, 0.1272, + 0.9109, 0.2850, 0.6202, 0.3491, 0.4077, 0.0265, 0.9469, + 0.5186, 0.0530, 0.5136, 0.4687, 0.4898, 0.2296, 0.6555, + 0.6814, 0.2474, 0.5224, 0.6744, 0.7214, 0.5305, 0.5181, + 0.3443, 0.7930, 0.3254, 0.8409, 0.0232, 0.8876, 0.4606, + 0.8758, 0.4816, 0.3285, 0.3194, 0.0473, 0.5240, 0.4558, + 0.2643, 0.3823, 0.3375, 0.5496, 0.8842, 0.2643, 0.8295, + 0.0774, 0.7973, 0.2342, 0.2846, 0.4365, 0.5561, 0.8911, + 0.9974, 0.3818, 0.8914, 0.6534, 0.3445, 0.0570, 0.5409, + 0.8934, 0.8748, 0.8762, 0.1700, 0.1286, 0.5886, 0.2604, + 0.5122, 0.3910, 0.6718, 0.8386, 0.7880, 0.7735, 0.8382, + 0.7683, 0.8908, 0.5226, 0.8048, 0.4884, 0.3660, 0.1809, + 0.2335, 0.6855, 0.8170, 0.5295, 0.7842, 0.7798, 0.2910, + 0.3853, 0.8829, 0.7835, 0.1690, 0.4663, 0.2045, 0.1035, + 0.4997, 0.5297, 0.1289, 0.5179, 0.0606, 0.8583, 0.3048, + 0.7708, 0.1893, 0.6309, 0.7686, 0.9828, 0.9048, 0.3841, + 0.2185, 0.6435, 0.4478, 0.9468, 0.8737, 0.2111, 0.1349, + 0.9495, 0.4091, 0.3367, 0.4818, 0.6928, 0.9558, 0.4398, + 0.5439, 0.7708, 0.4337, 0.4611, 0.2583, 0.1955, 0.4439, + 0.4445, 0.7809, 0.8863, 0.3069, 0.4805, 0.8381, 0.8548, + 0.5700, 0.1066, 0.1066, 0.8937, 0.5723, 0.6920, 0.0327, + 0.2123, 0.4301, 0.6855, 0.9118, 0.1103, 0.8642, 0.3839, + 0.6499, 0.7405, 0.8262, 0.1273, 0.8596, 0.1214, 0.5538, + 0.2683, 0.7685, 0.0380, 0.9668, 0.3679, 0.4231, 0.3206, + 0.2939, 0.2000, 0.2628, 0.6644, 0.0046, 0.4634, 0.6009, + 0.9618, 0.0916, 0.1533, 0.8964, 0.0750, 0.6530, 0.7420, + 0.0171, 0.1412, 0.9378, 0.1278, 0.5952, 0.5957, 0.1602, + 0.4569, 0.6997, 0.3923, 0.9702, 0.9204, 0.6140, 0.5178, + 0.9051, 0.2480, 0.1154, 0.8068, 0.0751, 0.2951, 0.2063, + 0.5678, 0.8248, 0.3371, 0.6619, 0.2068, 0.7570, 0.0168, + 0.6561, 0.1487, 0.6018, 0.3884, 0.9519, 0.9836, 0.1373, + 0.8836, 0.8831, 0.2552, 0.0736, 0.2226, 0.3008, 0.4150, + 0.1279, 0.9067, 0.1991, 0.2146, 0.2746, 0.7514, 0.1652, + 0.6727, 0.7846, 0.1471, 0.6132, 0.2493, 0.3325, 0.1885, + 0.6417, 0.6590, 0.4757, 0.4061, 0.4359, 0.7236, 0.4441, + 0.8643, 0.8479, 0.1003, 0.0185, 0.7836, 0.3159, 0.9798, + 0.5805, 0.8284, 0.5046, 0.3086, 0.3844, 0.2360, 0.1980, + 0.4207, 0.9796, 0.1878, 0.5333, 0.3240, 0.4450, 0.8072, + 0.9168, 0.9993, 0.1170, 0.7410, 0.7954, 0.0307, 0.0989, + 0.1791, 0.2914, 0.1820, 0.6230, 0.6850, 0.2813, 0.1157, + 0.9867, 0.0737, 0.8029, 0.1015, 0.4682, 0.7620, 0.7696, + 0.1064, 0.5765, 0.9709, 0.1174, 0.5832, 0.0978, 0.3568, + 0.0730, 0.0563, 0.7057, 0.7628, 0.0959, 0.4747, 0.3801, + 0.4508, 0.6394, 0.2715, 0.6105, 0.9926, 0.0896, 0.9574, + 0.6793, 0.6180, 0.4648, 0.2921, 0.3386, 0.6820, 0.5212, + 0.2154, 0.4717, 0.5014, 0.2617, 0.2977, 0.8006, 0.3353, + 0.8470, 0.7611, 0.1607, 0.8669, 0.7715, 0.5232, 0.4841, + 0.9115, 0.0228, 0.4693, 0.3305, 0.8215, 0.0869, 0.1824, + 0.6579, 0.0833, 0.9613, 0.4715, 0.3285, 0.5471, 0.6905, + 0.3957, 0.1840, 0.6202, 0.6851, 0.4733, 0.5934, 0.1386, + 0.7851, 0.7342, 0.6370, 0.2348, 0.9459, 0.2391, 0.7316, + 0.0941, 0.4717, 0.1253, 0.8566, 0.1216, 0.7571, 0.4196, + 0.8179, 0.8132, 0.2885, 0.9707, 0.1275, 0.3342, 0.3134, + 0.7312, 0.8352, 0.1365, 0.9373, 0.3091, 0.3749, 0.0663, + 0.0702, 0.0409, 0.7695, 0.6795, 0.1004, 0.2734, 0.4594, + 0.7910, 0.2158, 0.2145, 0.3182, 0.4406, 0.5618, 0.1494, + 0.2911, 0.5915, 0.9878, 0.1365, 0.7549, 0.8342, 0.9059, + 0.4562, 0.0558, 0.1508, 0.1673, 0.9872, 0.7096, 0.7520, + 0.1691, 0.3662, 0.9410, 0.2625, 0.1475, 0.8645, 0.3039, + 0.2029, 0.3516, 0.6215, 0.5740, 0.4584, 0.2225, 0.4006, + 0.3128, 0.0380, 0.6050, 0.2938, 0.0878, 0.3289, 0.8456, + 0.2525, 0.9209, 0.0593, 0.6809, 0.3115, 0.4275, 0.2499, + 0.9295, 0.5425, 0.4391, 0.3831, 0.4593, 0.7149, 0.1921, + 0.1184, 0.2786, 0.9624, 0.1167, 0.0044, 0.4051, 0.0108, + 0.4520, 0.6056, 0.9254, 0.0610, 0.3916, 0.2002, 0.0181, + 0.2959, 0.8354, 0.8965, 0.7722, 0.6322, 0.5425, 0.0976, + 0.8386, 0.2720, 0.5773, 0.9188, 0.0410, 0.3565, 0.1415, + 0.0175, 0.2161, 0.8758, 0.7699, 0.2833, 0.0538, 0.3260, + 0.5205, 0.1162, 0.9185, 0.1645, 0.4861, 0.6898, 0.0238, + 0.7657, 0.0799, 0.4505, 0.6300, 0.6548, 0.1225, 0.0206, + 0.5211, 0.1829, 0.9455, 0.5775, 0.0847, 0.2663, 0.6607, + 0.9023, 0.5472, 0.7809, 0.3315, 0.1532, 0.1912, 0.3343, + 0.7726, 0.2157, 0.2423, 0.7378, 0.9800, 0.4469, 0.4539, + 0.9687, 0.0064, 0.0441, 0.0697, 0.5833, 0.6814, 0.6849, + 0.0435, 0.1250, 0.2613, 0.0854, 0.3080, 0.5157, 0.4405, + 0.5866, 0.4456, 0.1962, 0.6798, 0.4460, 0.0218, 0.8899, + 0.8373, 0.1209, 0.8163, 0.3718, 0.6930, 0.1628, 0.4197, + 0.2782, 0.5692, 0.9005, 0.5938, 0.2539, 0.8654, 0.7168, + 0.9464, 0.8460, 0.4902, 0.5805, 0.0640, 0.5710, 0.7328, + 0.9874, 0.0901, 0.6221, 0.7762, 0.9765, 0.8525]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4976, 0.0481, 0.1913, ..., 0.0301, 0.3766, 0.0826]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.560847282409668 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5150, 3949, 6725, 9833, 6214, 5081, 1554, 3283, 4592, + 1367, 5501, 6306, 1908, 8847, 9684, 2365, 4516, 2030, + 6246, 1037, 3129, 6994, 6859, 6054, 2697, 3073, 4745, + 5149, 9683, 6168, 7171, 2986, 8282, 5962, 9044, 9859, + 4416, 2729, 3201, 9334, 70, 3704, 1992, 4539, 3854, + 9774, 4984, 5696, 4186, 7809, 2303, 2349, 6788, 8167, + 7467, 8573, 1950, 1383, 4872, 1688, 3467, 531, 2507, + 77, 4045, 9094, 8112, 2615, 9777, 5813, 1750, 6905, + 7986, 8272, 5559, 7416, 2650, 6301, 7023, 6146, 5728, + 154, 3305, 852, 1556, 1672, 8260, 3100, 4027, 2524, + 7136, 1881, 3609, 4077, 5415, 1342, 7463, 0, 7237, + 949, 4271, 7500, 4398, 202, 6370, 6523, 8149, 1606, + 644, 1179, 4251, 732, 4140, 9430, 7869, 4357, 3082, + 7706, 929, 1350, 2066, 3153, 7727, 3490, 4529, 2830, + 604, 9531, 3037, 674, 1118, 7439, 1800, 9605, 7647, + 7273, 8451, 1727, 5065, 8403, 4478, 8920, 7608, 949, + 4223, 6471, 1464, 4183, 9344, 7358, 8169, 5124, 7436, + 7379, 4302, 4579, 6954, 7352, 2475, 6524, 6023, 4419, + 4933, 4411, 5203, 2328, 5122, 1046, 4112, 415, 8434, + 5247, 5282, 7252, 5516, 6132, 9262, 1383, 4445, 3838, + 3480, 5499, 6857, 5666, 2223, 3948, 5622, 6586, 8728, + 1267, 1798, 4209, 2945, 7759, 2936, 2435, 508, 7969, + 658, 334, 4257, 5246, 6393, 357, 3505, 4514, 1464, + 8590, 9903, 4716, 9051, 4470, 9570, 4777, 3595, 8441, + 9110, 9670, 2187, 8548, 3616, 2595, 1352, 5502, 4886, + 8337, 7807, 4921, 643, 1759, 191, 6520, 5823, 2075, + 4740, 3014, 8077, 3584, 7499, 5306, 1223, 1385, 7517, + 8020, 7952, 2205, 3204, 4927, 5397, 2795, 9112, 9159, + 2689, 6163, 2719, 3134, 7968, 7593, 4653, 54, 597, + 3621, 6399, 2434, 6710, 967, 6700, 1213, 1026, 551, + 5577, 3471, 9061, 2300, 9239, 9461, 23, 3996, 596, + 2494, 5453, 3959, 6826, 5001, 7382, 416, 1281, 7317, + 7289, 1990, 4004, 9139, 5821, 8865, 9134, 9753, 7856, + 3678, 597, 9576, 3434, 7138, 8525, 4380, 4708, 1098, + 1864, 2641, 5053, 725, 7343, 1604, 5046, 9253, 1076, + 1733, 7452, 5764, 4569, 2060, 2335, 2788, 666, 560, + 4714, 8624, 8928, 6013, 1750, 3137, 5802, 2485, 8605, + 4692, 8176, 851, 670, 6436, 4116, 3254, 4703, 3344, + 9079, 2177, 3623, 7381, 508, 1878, 8203, 179, 3099, + 5174, 8679, 6987, 5356, 2553, 3507, 667, 8739, 4484, + 5254, 6753, 2060, 7479, 4766, 9854, 7766, 9804, 802, + 2515, 3329, 2210, 7753, 2069, 70, 3487, 1593, 4737, + 6304, 4273, 1474, 5728, 8160, 440, 2963, 5961, 9940, + 3963, 6482, 6596, 1104, 8908, 2324, 4677, 901, 5681, + 7684, 1987, 3941, 1028, 3945, 522, 155, 9582, 618, + 9191, 2285, 2363, 1005, 6492, 2446, 5028, 8066, 9102, + 7602, 8504, 3215, 2742, 946, 538, 5191, 4078, 3141, + 982, 1552, 5688, 9154, 3693, 2376, 5721, 4025, 5733, + 4065, 1715, 8170, 7847, 2860, 4001, 475, 7239, 5611, + 5117, 3773, 7630, 4037, 8971, 3180, 5691, 7586, 7012, + 1221, 6168, 6276, 2660, 6301, 4688, 6558, 7433, 4113, + 3119, 4374, 617, 7732, 5185, 9146, 7702, 4380, 1370, + 4149, 205, 3424, 954, 5456, 1934, 9488, 5247, 9946, + 2494, 4258, 7045, 5497, 588, 1288, 5965, 6814, 3539, + 1764, 4809, 3064, 9477, 5056, 3833, 5491, 8813, 623, + 9737, 8723, 3926, 2417, 6250, 1832, 4224, 3868, 2666, + 5475, 2503, 4400, 2951, 8789, 8901, 2576, 1586, 2358, + 8683, 701, 2218, 324, 2750, 4642, 6563, 7866, 2142, + 2049, 8029, 6801, 8630, 4530, 1913, 7492, 4784, 1756, + 2725, 811, 3186, 5499, 565, 5265, 4256, 9478, 1700, + 8194, 7843, 9531, 2519, 7331, 6421, 4938, 2047, 9115, + 5085, 6558, 2790, 5305, 7400, 6481, 8276, 6051, 376, + 2232, 3828, 5087, 901, 859, 311, 5986, 646, 2534, + 3646, 6330, 6624, 44, 9526, 1720, 7671, 786, 2103, + 7291, 6456, 6706, 9840, 151, 5594, 5539, 8478, 8848, + 9399, 6447, 7659, 8019, 3178, 3373, 8082, 4366, 3291, + 1095, 8101, 138, 8637, 5651, 2986, 4186, 1262, 8903, + 7692, 5148, 5226, 6772, 7717, 1398, 9840, 6283, 4653, + 916, 1943, 7857, 14, 2435, 9045, 6355, 6437, 5876, + 2079, 6222, 9106, 7167, 8674, 3568, 5387, 6404, 4327, + 9378, 7023, 3428, 7208, 3991, 3503, 1112, 5994, 7543, + 8077, 3450, 7732, 95, 4124, 4136, 4748, 216, 5127, + 2129, 134, 2135, 8151, 9959, 5515, 3277, 4587, 1429, + 2979, 932, 7056, 783, 3334, 7562, 460, 5737, 5306, + 1839, 8429, 174, 7235, 4614, 34, 4009, 7389, 701, + 5758, 8329, 9563, 8743, 4338, 3477, 1180, 1540, 5860, + 9475, 5545, 4339, 9525, 5428, 2826, 7521, 4441, 8311, + 1115, 7387, 6593, 2389, 3900, 8596, 9443, 4909, 2179, + 2149, 5400, 6357, 6980, 7081, 5466, 6799, 5303, 6495, + 572, 6717, 3685, 9544, 9986, 5977, 7724, 1134, 4967, + 2145, 3442, 1984, 9728, 2275, 9320, 1257, 102, 4987, + 8007, 1513, 959, 4257, 799, 4709, 204, 6285, 7273, + 8278, 2651, 8788, 6857, 3198, 3750, 7764, 9530, 2525, + 5355, 2828, 6803, 3742, 9808, 2702, 3428, 5248, 7386, + 9259, 3003, 8487, 8936, 9681, 4024, 4691, 9077, 8624, + 670, 3644, 2024, 5179, 8050, 1290, 8269, 8524, 4786, + 2503, 35, 5675, 3549, 6350, 1714, 678, 5292, 9599, + 9848, 2823, 3918, 8633, 2596, 7468, 1783, 4984, 5190, + 2747, 7283, 6599, 5946, 9563, 8503, 9940, 9756, 3292, + 4953, 1009, 8896, 653, 1878, 8055, 8508, 2037, 2440, + 37, 3449, 5269, 3294, 7638, 3773, 5688, 5609, 7128, + 1885, 2653, 15, 148, 8228, 7481, 7965, 3146, 3889, + 6682, 8636, 1153, 8978, 9471, 4557, 8139, 4145, 2086, + 2623, 2951, 2381, 6771, 7405, 7405, 7866, 6651, 961, + 6157, 9117, 8248, 4454, 4081, 1801, 8411, 624, 6352, + 5499, 2637, 2893, 2441, 8243, 5608, 5218, 6178, 2460, + 7724, 4117, 2164, 8643, 7095, 2933, 8474, 6756, 7154, + 4556, 7730, 8140, 7610, 525, 1161, 9855, 6850, 3574, + 5534, 4716, 9318, 179, 5902, 7492, 3084, 3893, 1120, + 3911, 5634, 9144, 2088, 5803, 5300, 9679, 7556, 7644, + 1213, 9980, 8515, 3166, 262, 4231, 480, 4900, 7635, + 9607, 6653, 3153, 9282, 6882, 6866, 9053, 6078, 1605, + 2679, 9434, 1838, 5200, 8526, 145, 8529, 81, 7768, + 2792, 1368, 1146, 6845, 2082, 779, 3727, 3189, 5616, + 2883, 6077, 3380, 5032, 8816, 3861, 2679, 2096, 1826, + 4481, 3081, 2880, 9558, 48, 8852, 7596, 4096, 1993, + 3536, 4469, 1068, 8771, 110, 3938, 7946, 5524, 2146, + 2369, 7248, 6850, 2766, 3813, 4176, 2118, 2240, 2650, + 661, 3663, 7071, 4825, 3012, 5874, 7763, 2, 9014, + 386, 6880, 6261, 4740, 4948, 6569, 5412, 7485, 5084, + 2240, 7660, 5562, 8271, 9281, 7660, 7758, 8339, 2685, + 4281]), + values=tensor([0.7305, 0.2440, 0.3019, 0.7007, 0.9449, 0.8426, 0.6302, + 0.6840, 0.3760, 0.2330, 0.8640, 0.1425, 0.7949, 0.3165, + 0.9440, 0.0634, 0.1824, 0.8939, 0.0831, 0.8632, 0.5557, + 0.9222, 0.7152, 0.4959, 0.3642, 0.8742, 0.0533, 0.8773, + 0.3271, 0.1850, 0.8267, 0.0692, 0.8764, 0.6532, 0.0689, + 0.0059, 0.9796, 0.6870, 0.9154, 0.4784, 0.5447, 0.7909, + 0.5242, 0.2989, 0.3478, 0.3160, 0.9938, 0.4624, 0.7891, + 0.5624, 0.6799, 0.1505, 0.3167, 0.1174, 0.4734, 0.9357, + 0.4414, 0.2681, 0.3600, 0.9851, 0.0984, 0.9817, 0.6071, + 0.2437, 0.6382, 0.9904, 0.2276, 0.5007, 0.7924, 0.8288, + 0.4341, 0.5700, 0.0922, 0.1698, 0.6893, 0.1819, 0.6179, + 0.0688, 0.2326, 0.7694, 0.2736, 0.3807, 0.6497, 0.0934, + 0.2985, 0.3575, 0.2669, 0.2111, 0.0895, 0.1728, 0.5018, + 0.9851, 0.2093, 0.7627, 0.8317, 0.2321, 0.2733, 0.9575, + 0.1373, 0.4112, 0.4346, 0.2690, 0.0832, 0.9308, 0.0856, + 0.4248, 0.1271, 0.2415, 0.1779, 0.9677, 0.0600, 0.9191, + 0.1112, 0.1565, 0.1580, 0.1619, 0.6745, 0.6379, 0.9366, + 0.4143, 0.1271, 0.0864, 0.7266, 0.3906, 0.9727, 0.7339, + 0.0407, 0.0059, 0.6237, 0.7622, 0.9907, 0.2749, 0.4392, + 0.2002, 0.6558, 0.9415, 0.5261, 0.2561, 0.7660, 0.3448, + 0.1122, 0.7076, 0.6037, 0.4897, 0.4670, 0.8339, 0.5757, + 0.4177, 0.7623, 0.5865, 0.8495, 0.6497, 0.9778, 0.6297, + 0.3389, 0.2747, 0.0702, 0.1398, 0.1005, 0.4201, 0.1831, + 0.2403, 0.3332, 0.4306, 0.5321, 0.7778, 0.0204, 0.4136, + 0.1536, 0.7541, 0.0838, 0.5081, 0.7545, 0.0833, 0.4845, + 0.3873, 0.2490, 0.4675, 0.2018, 0.9437, 0.0574, 0.7574, + 0.4217, 0.4481, 0.8897, 0.5683, 0.3881, 0.1927, 0.8680, + 0.3664, 0.8069, 0.2192, 0.4607, 0.7321, 0.3086, 0.6446, + 0.3613, 0.6584, 0.1132, 0.8934, 0.0915, 0.5183, 0.8634, + 0.8577, 0.6923, 0.8618, 0.6853, 0.8256, 0.8384, 0.2295, + 0.3689, 0.5760, 0.2086, 0.3564, 0.8483, 0.6783, 0.7902, + 0.6082, 0.2549, 0.6415, 0.4994, 0.2078, 0.5032, 0.3790, + 0.4926, 0.0907, 0.6911, 0.2975, 0.8561, 0.2283, 0.4530, + 0.2236, 0.5933, 0.7581, 0.3982, 0.1913, 0.0682, 0.2359, + 0.4488, 0.3220, 0.4875, 0.7597, 0.7930, 0.2062, 0.4708, + 0.1870, 0.0373, 0.0367, 0.8306, 0.1947, 0.2315, 0.0207, + 0.8163, 0.7626, 0.9301, 0.8099, 0.3633, 0.5641, 0.8120, + 0.6141, 0.5594, 0.4658, 0.2150, 0.1654, 0.3051, 0.0343, + 0.3400, 0.5978, 0.5354, 0.0263, 0.3151, 0.8046, 0.5327, + 0.3066, 0.2487, 0.3111, 0.0893, 0.1612, 0.5907, 0.3107, + 0.8584, 0.8229, 0.4034, 0.2252, 0.3659, 0.2053, 0.7999, + 0.7701, 0.7502, 0.7518, 0.5690, 0.1322, 0.1586, 0.1160, + 0.9796, 0.3007, 0.1296, 0.7414, 0.1813, 0.4708, 0.0361, + 0.8196, 0.1266, 0.8025, 0.2789, 0.7870, 0.4347, 0.9531, + 0.2674, 0.9698, 0.4764, 0.0519, 0.6509, 0.3234, 0.4769, + 0.3125, 0.1997, 0.6900, 0.7719, 0.1567, 0.8227, 0.8361, + 0.3658, 0.9962, 0.8049, 0.2839, 0.0239, 0.5230, 0.1679, + 0.0012, 0.1941, 0.1825, 0.6328, 0.8021, 0.7872, 0.5540, + 0.0997, 0.9139, 0.1493, 0.1197, 0.6714, 0.2685, 0.5289, + 0.4741, 0.4375, 0.4510, 0.6505, 0.0060, 0.0804, 0.3052, + 0.4065, 0.1432, 0.8715, 0.0973, 0.2004, 0.0400, 0.2303, + 0.6891, 0.4089, 0.8692, 0.1440, 0.3744, 0.8894, 0.4839, + 0.5594, 0.2863, 0.2636, 0.1806, 0.4900, 0.9233, 0.0287, + 0.1519, 0.0238, 0.9718, 0.0696, 0.4915, 0.4842, 0.1594, + 0.7936, 0.6860, 0.5880, 0.4352, 0.0092, 0.6365, 0.1016, + 0.1299, 0.9522, 0.9906, 0.4238, 0.2435, 0.5165, 0.1731, + 0.6409, 0.2362, 0.6101, 0.1717, 0.7603, 0.4938, 0.2723, + 0.7599, 0.9112, 0.0268, 0.9393, 0.8969, 0.3631, 0.2998, + 0.3500, 0.7422, 0.9345, 0.9975, 0.0517, 0.1544, 0.6201, + 0.5972, 0.3434, 0.9546, 0.0842, 0.2284, 0.4080, 0.3233, + 0.9813, 0.3166, 0.9934, 0.5541, 0.2131, 0.8230, 0.1841, + 0.3426, 0.7598, 0.6218, 0.4928, 0.7872, 0.4823, 0.5321, + 0.2430, 0.9717, 0.6158, 0.5835, 0.2198, 0.3750, 0.8817, + 0.8977, 0.6208, 0.4204, 0.5681, 0.7034, 0.0297, 0.1272, + 0.9109, 0.2850, 0.6202, 0.3491, 0.4077, 0.0265, 0.9469, + 0.5186, 0.0530, 0.5136, 0.4687, 0.4898, 0.2296, 0.6555, + 0.6814, 0.2474, 0.5224, 0.6744, 0.7214, 0.5305, 0.5181, + 0.3443, 0.7930, 0.3254, 0.8409, 0.0232, 0.8876, 0.4606, + 0.8758, 0.4816, 0.3285, 0.3194, 0.0473, 0.5240, 0.4558, + 0.2643, 0.3823, 0.3375, 0.5496, 0.8842, 0.2643, 0.8295, + 0.0774, 0.7973, 0.2342, 0.2846, 0.4365, 0.5561, 0.8911, + 0.9974, 0.3818, 0.8914, 0.6534, 0.3445, 0.0570, 0.5409, + 0.8934, 0.8748, 0.8762, 0.1700, 0.1286, 0.5886, 0.2604, + 0.5122, 0.3910, 0.6718, 0.8386, 0.7880, 0.7735, 0.8382, + 0.7683, 0.8908, 0.5226, 0.8048, 0.4884, 0.3660, 0.1809, + 0.2335, 0.6855, 0.8170, 0.5295, 0.7842, 0.7798, 0.2910, + 0.3853, 0.8829, 0.7835, 0.1690, 0.4663, 0.2045, 0.1035, + 0.4997, 0.5297, 0.1289, 0.5179, 0.0606, 0.8583, 0.3048, + 0.7708, 0.1893, 0.6309, 0.7686, 0.9828, 0.9048, 0.3841, + 0.2185, 0.6435, 0.4478, 0.9468, 0.8737, 0.2111, 0.1349, + 0.9495, 0.4091, 0.3367, 0.4818, 0.6928, 0.9558, 0.4398, + 0.5439, 0.7708, 0.4337, 0.4611, 0.2583, 0.1955, 0.4439, + 0.4445, 0.7809, 0.8863, 0.3069, 0.4805, 0.8381, 0.8548, + 0.5700, 0.1066, 0.1066, 0.8937, 0.5723, 0.6920, 0.0327, + 0.2123, 0.4301, 0.6855, 0.9118, 0.1103, 0.8642, 0.3839, + 0.6499, 0.7405, 0.8262, 0.1273, 0.8596, 0.1214, 0.5538, + 0.2683, 0.7685, 0.0380, 0.9668, 0.3679, 0.4231, 0.3206, + 0.2939, 0.2000, 0.2628, 0.6644, 0.0046, 0.4634, 0.6009, + 0.9618, 0.0916, 0.1533, 0.8964, 0.0750, 0.6530, 0.7420, + 0.0171, 0.1412, 0.9378, 0.1278, 0.5952, 0.5957, 0.1602, + 0.4569, 0.6997, 0.3923, 0.9702, 0.9204, 0.6140, 0.5178, + 0.9051, 0.2480, 0.1154, 0.8068, 0.0751, 0.2951, 0.2063, + 0.5678, 0.8248, 0.3371, 0.6619, 0.2068, 0.7570, 0.0168, + 0.6561, 0.1487, 0.6018, 0.3884, 0.9519, 0.9836, 0.1373, + 0.8836, 0.8831, 0.2552, 0.0736, 0.2226, 0.3008, 0.4150, + 0.1279, 0.9067, 0.1991, 0.2146, 0.2746, 0.7514, 0.1652, + 0.6727, 0.7846, 0.1471, 0.6132, 0.2493, 0.3325, 0.1885, + 0.6417, 0.6590, 0.4757, 0.4061, 0.4359, 0.7236, 0.4441, + 0.8643, 0.8479, 0.1003, 0.0185, 0.7836, 0.3159, 0.9798, + 0.5805, 0.8284, 0.5046, 0.3086, 0.3844, 0.2360, 0.1980, + 0.4207, 0.9796, 0.1878, 0.5333, 0.3240, 0.4450, 0.8072, + 0.9168, 0.9993, 0.1170, 0.7410, 0.7954, 0.0307, 0.0989, + 0.1791, 0.2914, 0.1820, 0.6230, 0.6850, 0.2813, 0.1157, + 0.9867, 0.0737, 0.8029, 0.1015, 0.4682, 0.7620, 0.7696, + 0.1064, 0.5765, 0.9709, 0.1174, 0.5832, 0.0978, 0.3568, + 0.0730, 0.0563, 0.7057, 0.7628, 0.0959, 0.4747, 0.3801, + 0.4508, 0.6394, 0.2715, 0.6105, 0.9926, 0.0896, 0.9574, + 0.6793, 0.6180, 0.4648, 0.2921, 0.3386, 0.6820, 0.5212, + 0.2154, 0.4717, 0.5014, 0.2617, 0.2977, 0.8006, 0.3353, + 0.8470, 0.7611, 0.1607, 0.8669, 0.7715, 0.5232, 0.4841, + 0.9115, 0.0228, 0.4693, 0.3305, 0.8215, 0.0869, 0.1824, + 0.6579, 0.0833, 0.9613, 0.4715, 0.3285, 0.5471, 0.6905, + 0.3957, 0.1840, 0.6202, 0.6851, 0.4733, 0.5934, 0.1386, + 0.7851, 0.7342, 0.6370, 0.2348, 0.9459, 0.2391, 0.7316, + 0.0941, 0.4717, 0.1253, 0.8566, 0.1216, 0.7571, 0.4196, + 0.8179, 0.8132, 0.2885, 0.9707, 0.1275, 0.3342, 0.3134, + 0.7312, 0.8352, 0.1365, 0.9373, 0.3091, 0.3749, 0.0663, + 0.0702, 0.0409, 0.7695, 0.6795, 0.1004, 0.2734, 0.4594, + 0.7910, 0.2158, 0.2145, 0.3182, 0.4406, 0.5618, 0.1494, + 0.2911, 0.5915, 0.9878, 0.1365, 0.7549, 0.8342, 0.9059, + 0.4562, 0.0558, 0.1508, 0.1673, 0.9872, 0.7096, 0.7520, + 0.1691, 0.3662, 0.9410, 0.2625, 0.1475, 0.8645, 0.3039, + 0.2029, 0.3516, 0.6215, 0.5740, 0.4584, 0.2225, 0.4006, + 0.3128, 0.0380, 0.6050, 0.2938, 0.0878, 0.3289, 0.8456, + 0.2525, 0.9209, 0.0593, 0.6809, 0.3115, 0.4275, 0.2499, + 0.9295, 0.5425, 0.4391, 0.3831, 0.4593, 0.7149, 0.1921, + 0.1184, 0.2786, 0.9624, 0.1167, 0.0044, 0.4051, 0.0108, + 0.4520, 0.6056, 0.9254, 0.0610, 0.3916, 0.2002, 0.0181, + 0.2959, 0.8354, 0.8965, 0.7722, 0.6322, 0.5425, 0.0976, + 0.8386, 0.2720, 0.5773, 0.9188, 0.0410, 0.3565, 0.1415, + 0.0175, 0.2161, 0.8758, 0.7699, 0.2833, 0.0538, 0.3260, + 0.5205, 0.1162, 0.9185, 0.1645, 0.4861, 0.6898, 0.0238, + 0.7657, 0.0799, 0.4505, 0.6300, 0.6548, 0.1225, 0.0206, + 0.5211, 0.1829, 0.9455, 0.5775, 0.0847, 0.2663, 0.6607, + 0.9023, 0.5472, 0.7809, 0.3315, 0.1532, 0.1912, 0.3343, + 0.7726, 0.2157, 0.2423, 0.7378, 0.9800, 0.4469, 0.4539, + 0.9687, 0.0064, 0.0441, 0.0697, 0.5833, 0.6814, 0.6849, + 0.0435, 0.1250, 0.2613, 0.0854, 0.3080, 0.5157, 0.4405, + 0.5866, 0.4456, 0.1962, 0.6798, 0.4460, 0.0218, 0.8899, + 0.8373, 0.1209, 0.8163, 0.3718, 0.6930, 0.1628, 0.4197, + 0.2782, 0.5692, 0.9005, 0.5938, 0.2539, 0.8654, 0.7168, + 0.9464, 0.8460, 0.4902, 0.5805, 0.0640, 0.5710, 0.7328, + 0.9874, 0.0901, 0.6221, 0.7762, 0.9765, 0.8525]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4976, 0.0481, 0.1913, ..., 0.0301, 0.3766, 0.0826]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.560847282409668 seconds + +[20.4, 20.16, 20.52, 20.6, 20.92, 20.92, 20.96, 20.76, 20.68, 20.44] +[20.4, 20.52, 20.92, 23.12, 24.52, 26.12, 26.6, 26.16, 24.72, 24.72, 23.36, 23.32, 23.4, 23.52] +14.230781316757202 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 145476, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.560847282409668, 'TIME_S_1KI': 0.07259511728676668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.8679765701293, 'W': 22.125839021878733} +[20.4, 20.16, 20.52, 20.6, 20.92, 20.92, 20.96, 20.76, 20.68, 20.44, 20.2, 20.44, 20.48, 20.56, 20.52, 20.48, 20.4, 20.4, 20.4, 20.4] +369.9200000000001 +18.496000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 145476, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.560847282409668, 'TIME_S_1KI': 0.07259511728676668, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.8679765701293, 'W': 22.125839021878733, 'J_1KI': 2.1643980902013342, 'W_1KI': 0.15209270960075016, 'W_D': 3.6298390218787304, 'J_D': 51.65544533538807, 'W_D_1KI': 0.024951462934633413, 'J_D_1KI': 0.00017151600906426773} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..2bcb239 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 52342, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.44128155708313, "TIME_S_1KI": 0.19948189899283808, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 334.7413140869141, "W": 23.567352524316387, "J_1KI": 6.395271752835469, "W_1KI": 0.4502570120422679, "W_D": 3.4093525243163825, "J_D": 48.42508902931206, "W_D_1KI": 0.06513607665577133, "J_D_1KI": 0.0012444323231013588} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..0a68cee --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.027875900268554688} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([4151, 7566, 61, ..., 1923, 6890, 8738]), + values=tensor([0.6199, 0.1524, 0.8589, ..., 0.4429, 0.5764, 0.1533]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.5335, 0.6247, 0.4039, ..., 0.6064, 0.4993, 0.4017]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.027875900268554688 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37666 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.555848598480225} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), + col_indices=tensor([4832, 7617, 3198, ..., 2337, 8239, 2535]), + values=tensor([0.0012, 0.2497, 0.5477, ..., 0.0331, 0.3343, 0.4565]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.1414, 0.6293, 0.2915, ..., 0.6179, 0.0556, 0.9688]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 7.555848598480225 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52342 -ss 10000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.44128155708313} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([6929, 6481, 6208, ..., 5185, 5914, 4436]), + values=tensor([0.2292, 0.3731, 0.2148, ..., 0.4978, 0.6385, 0.1071]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.7171, 0.2412, 0.9457, ..., 0.4356, 0.0163, 0.8101]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.44128155708313 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([6929, 6481, 6208, ..., 5185, 5914, 4436]), + values=tensor([0.2292, 0.3731, 0.2148, ..., 0.4978, 0.6385, 0.1071]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.7171, 0.2412, 0.9457, ..., 0.4356, 0.0163, 0.8101]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.44128155708313 seconds + +[25.48, 25.4, 25.4, 24.8, 24.68, 24.36, 24.4, 24.16, 24.24, 23.68] +[23.36, 22.52, 25.48, 26.0, 27.84, 27.84, 27.8, 28.32, 24.88, 24.76, 23.72, 23.52, 23.72, 23.68] +14.20360279083252 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52342, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.44128155708313, 'TIME_S_1KI': 0.19948189899283808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.7413140869141, 'W': 23.567352524316387} +[25.48, 25.4, 25.4, 24.8, 24.68, 24.36, 24.4, 24.16, 24.24, 23.68, 20.0, 19.92, 19.76, 20.2, 20.12, 20.12, 20.36, 20.24, 20.24, 20.36] +403.1600000000001 +20.158000000000005 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 52342, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.44128155708313, 'TIME_S_1KI': 0.19948189899283808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.7413140869141, 'W': 23.567352524316387, 'J_1KI': 6.395271752835469, 'W_1KI': 0.4502570120422679, 'W_D': 3.4093525243163825, 'J_D': 48.42508902931206, 'W_D_1KI': 0.06513607665577133, 'J_D_1KI': 0.0012444323231013588} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..ece3918 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 137, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396055936813354, "TIME_S_1KI": 75.88361997673981, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 753.5857475948335, "W": 33.1694657622459, "J_1KI": 5500.625894852799, "W_1KI": 242.11288877551752, "W_D": 14.2684657622459, "J_D": 324.16899673771866, "W_D_1KI": 104.14938512588249, "J_D_1KI": 760.2144899699452} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..5419742 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.629654169082642} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 92, ..., 24999905, + 24999955, 25000000]), + col_indices=tensor([ 2191, 6192, 41052, ..., 471066, 488040, + 493296]), + values=tensor([0.3986, 0.5227, 0.3241, ..., 0.9261, 0.7192, 0.3287]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.0957, 0.3468, 0.1431, ..., 0.5849, 0.2942, 0.3782]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 7.629654169082642 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 137 -ss 500000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.396055936813354} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 94, ..., 24999919, + 24999951, 25000000]), + col_indices=tensor([ 2485, 12624, 22152, ..., 462150, 467889, + 476331]), + values=tensor([0.9572, 0.0985, 0.5455, ..., 0.5648, 0.8530, 0.8208]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3242, 0.9080, 0.9457, ..., 0.2147, 0.3332, 0.4113]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.396055936813354 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 94, ..., 24999919, + 24999951, 25000000]), + col_indices=tensor([ 2485, 12624, 22152, ..., 462150, 467889, + 476331]), + values=tensor([0.9572, 0.0985, 0.5455, ..., 0.5648, 0.8530, 0.8208]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3242, 0.9080, 0.9457, ..., 0.2147, 0.3332, 0.4113]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.396055936813354 seconds + +[20.64, 20.72, 20.84, 20.96, 21.08, 21.36, 21.56, 21.56, 21.36, 21.36] +[21.36, 21.04, 20.8, 24.16, 25.16, 26.72, 28.8, 27.16, 30.52, 29.84, 29.76, 29.92, 29.92, 30.28, 32.88, 38.36, 44.52, 49.04, 53.12, 53.28, 53.2, 52.84] +22.719260931015015 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396055936813354, 'TIME_S_1KI': 75.88361997673981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.5857475948335, 'W': 33.1694657622459} +[20.64, 20.72, 20.84, 20.96, 21.08, 21.36, 21.56, 21.56, 21.36, 21.36, 20.8, 20.84, 20.88, 21.0, 21.04, 20.84, 20.96, 20.68, 20.68, 20.52] +378.02 +18.901 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.396055936813354, 'TIME_S_1KI': 75.88361997673981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.5857475948335, 'W': 33.1694657622459, 'J_1KI': 5500.625894852799, 'W_1KI': 242.11288877551752, 'W_D': 14.2684657622459, 'J_D': 324.16899673771866, 'W_D_1KI': 104.14938512588249, 'J_D_1KI': 760.2144899699452} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..28ed47d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1548, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.061279058456421, "TIME_S_1KI": 7.145529107529987, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 576.3272948646545, "W": 36.84956710355697, "J_1KI": 372.3044540469344, "W_1KI": 23.80462991185851, "W_D": 18.03556710355697, "J_D": 282.0763014917373, "W_D_1KI": 11.65088314183267, "J_D_1KI": 7.526410298341518} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..ddb3af3 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.6782102584838867} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 13, ..., 2499990, + 2499997, 2500000]), + col_indices=tensor([ 40175, 122073, 147940, ..., 245767, 297950, + 495791]), + values=tensor([0.1248, 0.8645, 0.7112, ..., 0.2227, 0.8085, 0.2637]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3055, 0.1588, 0.3916, ..., 0.3608, 0.8122, 0.4114]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.6782102584838867 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1548 -ss 500000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.061279058456421} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 4, ..., 2499989, + 2499994, 2500000]), + col_indices=tensor([ 33871, 87157, 252512, ..., 380315, 410804, + 497208]), + values=tensor([0.0607, 0.8545, 0.0688, ..., 0.9965, 0.6178, 0.6113]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3094, 0.9384, 0.5289, ..., 0.5205, 0.7717, 0.7334]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.061279058456421 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 4, ..., 2499989, + 2499994, 2500000]), + col_indices=tensor([ 33871, 87157, 252512, ..., 380315, 410804, + 497208]), + values=tensor([0.0607, 0.8545, 0.0688, ..., 0.9965, 0.6178, 0.6113]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3094, 0.9384, 0.5289, ..., 0.5205, 0.7717, 0.7334]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.061279058456421 seconds + +[20.72, 20.76, 21.12, 20.92, 20.92, 20.92, 20.88, 20.44, 20.48, 20.48] +[20.56, 20.56, 21.24, 22.4, 24.48, 28.6, 36.72, 41.88, 48.52, 48.52, 52.36, 52.92, 53.48, 53.4, 53.48] +15.640001773834229 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.061279058456421, 'TIME_S_1KI': 7.145529107529987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.3272948646545, 'W': 36.84956710355697} +[20.72, 20.76, 21.12, 20.92, 20.92, 20.92, 20.88, 20.44, 20.48, 20.48, 20.6, 20.68, 20.88, 21.24, 21.48, 21.32, 21.48, 20.96, 20.64, 20.52] +376.28 +18.814 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1548, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.061279058456421, 'TIME_S_1KI': 7.145529107529987, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 576.3272948646545, 'W': 36.84956710355697, 'J_1KI': 372.3044540469344, 'W_1KI': 23.80462991185851, 'W_D': 18.03556710355697, 'J_D': 282.0763014917373, 'W_D_1KI': 11.65088314183267, 'J_D_1KI': 7.526410298341518} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..9620135 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 285, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.060697317123413, "TIME_S_1KI": 38.809464270608466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 641.8192787170411, "W": 34.323302144261945, "J_1KI": 2251.9974691826005, "W_1KI": 120.43263910267349, "W_D": 15.818302144261942, "J_D": 295.7900504469872, "W_D_1KI": 55.50281454126997, "J_D_1KI": 194.74671768866656} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..0f40f5a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.6834404468536377} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 36, 64, ..., 12499962, + 12499977, 12500000]), + col_indices=tensor([ 6540, 37225, 45963, ..., 476281, 491551, + 491729]), + values=tensor([0.4995, 0.3434, 0.2289, ..., 0.9980, 0.3953, 0.2839]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.6494, 0.0196, 0.1697, ..., 0.1655, 0.3294, 0.8926]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 3.6834404468536377 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 285 -ss 500000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.060697317123413} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 51, ..., 12499947, + 12499979, 12500000]), + col_indices=tensor([ 32854, 40713, 51141, ..., 464012, 471829, + 496055]), + values=tensor([0.7704, 0.8573, 0.2864, ..., 0.9432, 0.9508, 0.7094]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9627, 0.2273, 0.6691, ..., 0.1238, 0.9472, 0.0057]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 11.060697317123413 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 51, ..., 12499947, + 12499979, 12500000]), + col_indices=tensor([ 32854, 40713, 51141, ..., 464012, 471829, + 496055]), + values=tensor([0.7704, 0.8573, 0.2864, ..., 0.9432, 0.9508, 0.7094]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9627, 0.2273, 0.6691, ..., 0.1238, 0.9472, 0.0057]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 11.060697317123413 seconds + +[20.52, 20.84, 20.92, 20.88, 20.88, 20.72, 20.52, 20.32, 20.36, 20.2] +[20.44, 20.6, 21.48, 21.48, 23.84, 25.24, 27.32, 30.28, 29.76, 32.76, 37.84, 41.44, 46.92, 51.96, 52.12, 52.64, 52.6, 52.8] +18.699228763580322 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.060697317123413, 'TIME_S_1KI': 38.809464270608466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 641.8192787170411, 'W': 34.323302144261945} +[20.52, 20.84, 20.92, 20.88, 20.88, 20.72, 20.52, 20.32, 20.36, 20.2, 20.52, 20.44, 20.6, 20.44, 20.44, 20.48, 20.48, 20.36, 20.44, 20.72] +370.1 +18.505000000000003 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 285, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.060697317123413, 'TIME_S_1KI': 38.809464270608466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 641.8192787170411, 'W': 34.323302144261945, 'J_1KI': 2251.9974691826005, 'W_1KI': 120.43263910267349, 'W_D': 15.818302144261942, 'J_D': 295.7900504469872, 'W_D_1KI': 55.50281454126997, 'J_D_1KI': 194.74671768866656} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..29a7f3d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 3424, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.731184720993042, "TIME_S_1KI": 3.1341076872059115, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 463.79336466789243, "W": 31.765422510706816, "J_1KI": 135.4536695875854, "W_1KI": 9.27728461177185, "W_D": 13.349422510706813, "J_D": 194.90921553230277, "W_D_1KI": 3.8987799388746534, "J_D_1KI": 1.1386623653255412} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..192c47b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.4611988067626953} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 9, ..., 249990, 249996, + 250000]), + col_indices=tensor([ 1266, 4071, 18947, ..., 33754, 36171, 46993]), + values=tensor([0.2894, 0.3028, 0.5808, ..., 0.9499, 0.5530, 0.4490]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9097, 0.0887, 0.0049, ..., 0.6179, 0.8641, 0.1772]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.4611988067626953 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2276 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.978086233139038} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 9, ..., 249990, 249995, + 250000]), + col_indices=tensor([ 2233, 6887, 19755, ..., 38632, 41476, 48223]), + values=tensor([0.3109, 0.9167, 0.4160, ..., 0.6671, 0.8506, 0.5777]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7812, 0.4224, 0.5960, ..., 0.2514, 0.6292, 0.3012]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 6.978086233139038 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3424 -ss 50000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.731184720993042} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 7, ..., 249990, 249996, + 250000]), + col_indices=tensor([12104, 14436, 24112, ..., 12878, 32819, 38734]), + values=tensor([0.5759, 0.9600, 0.3696, ..., 0.0040, 0.7766, 0.9665]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4011, 0.3434, 0.3941, ..., 0.7256, 0.6030, 0.5117]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.731184720993042 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 7, ..., 249990, 249996, + 250000]), + col_indices=tensor([12104, 14436, 24112, ..., 12878, 32819, 38734]), + values=tensor([0.5759, 0.9600, 0.3696, ..., 0.0040, 0.7766, 0.9665]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4011, 0.3434, 0.3941, ..., 0.7256, 0.6030, 0.5117]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.731184720993042 seconds + +[20.72, 20.68, 20.48, 20.6, 20.64, 20.32, 20.32, 20.36, 20.32, 20.4] +[20.52, 20.76, 22.24, 22.24, 24.16, 27.64, 32.84, 38.08, 39.84, 44.4, 44.52, 43.96, 44.12, 44.28] +14.60057282447815 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.731184720993042, 'TIME_S_1KI': 3.1341076872059115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.79336466789243, 'W': 31.765422510706816} +[20.72, 20.68, 20.48, 20.6, 20.64, 20.32, 20.32, 20.36, 20.32, 20.4, 20.68, 20.44, 20.48, 20.48, 20.2, 20.32, 20.44, 20.44, 20.6, 20.6] +368.32000000000005 +18.416000000000004 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 3424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.731184720993042, 'TIME_S_1KI': 3.1341076872059115, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 463.79336466789243, 'W': 31.765422510706816, 'J_1KI': 135.4536695875854, 'W_1KI': 9.27728461177185, 'W_D': 13.349422510706813, 'J_D': 194.90921553230277, 'W_D_1KI': 3.8987799388746534, 'J_D_1KI': 1.1386623653255412} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..e699ae2 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 385, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.064192295074463, "TIME_S_1KI": 28.738161805388216, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 468.0283072185517, "W": 32.037671646494225, "J_1KI": 1215.657940827407, "W_1KI": 83.21473154933565, "W_D": 13.341671646494227, "J_D": 194.90430094528205, "W_D_1KI": 34.65369258829669, "J_D_1KI": 90.00959113843297} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..c964dd6 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.9453940391540527} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 37, 86, ..., 2499902, + 2499952, 2500000]), + col_indices=tensor([ 541, 1139, 1813, ..., 42919, 43072, 44933]), + values=tensor([0.0452, 0.1724, 0.8861, ..., 0.4157, 0.9772, 0.2120]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1045, 0.1557, 0.1178, ..., 0.5894, 0.9079, 0.5773]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 2.9453940391540527 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 356 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.708060026168823} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 46, 99, ..., 2499902, + 2499948, 2500000]), + col_indices=tensor([ 4031, 7226, 7309, ..., 44877, 48582, 49711]), + values=tensor([0.9329, 0.4420, 0.5313, ..., 0.9423, 0.2849, 0.2389]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0088, 0.8123, 0.3302, ..., 0.9483, 0.6171, 0.9552]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 9.708060026168823 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 385 -ss 50000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.064192295074463} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 67, 125, ..., 2499902, + 2499956, 2500000]), + col_indices=tensor([ 1129, 2884, 2891, ..., 49010, 49022, 49816]), + values=tensor([0.8127, 0.7656, 0.2912, ..., 0.8978, 0.1718, 0.1428]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7569, 0.5985, 0.1427, ..., 0.6714, 0.1732, 0.3064]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 11.064192295074463 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 67, 125, ..., 2499902, + 2499956, 2500000]), + col_indices=tensor([ 1129, 2884, 2891, ..., 49010, 49022, 49816]), + values=tensor([0.8127, 0.7656, 0.2912, ..., 0.8978, 0.1718, 0.1428]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7569, 0.5985, 0.1427, ..., 0.6714, 0.1732, 0.3064]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 11.064192295074463 seconds + +[20.72, 20.52, 20.56, 20.48, 20.48, 20.52, 20.56, 20.6, 20.36, 20.6] +[20.48, 20.52, 21.48, 23.4, 25.44, 29.64, 35.04, 38.48, 42.0, 43.12, 43.12, 43.84, 43.68, 43.32] +14.608686685562134 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 385, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.064192295074463, 'TIME_S_1KI': 28.738161805388216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 468.0283072185517, 'W': 32.037671646494225} +[20.72, 20.52, 20.56, 20.48, 20.48, 20.52, 20.56, 20.6, 20.36, 20.6, 20.84, 20.8, 21.0, 21.24, 21.16, 21.08, 21.28, 20.92, 20.84, 20.88] +373.91999999999996 +18.695999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 385, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.064192295074463, 'TIME_S_1KI': 28.738161805388216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 468.0283072185517, 'W': 32.037671646494225, 'J_1KI': 1215.657940827407, 'W_1KI': 83.21473154933565, 'W_D': 13.341671646494227, 'J_D': 194.90430094528205, 'W_D_1KI': 34.65369258829669, 'J_D_1KI': 90.00959113843297} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..3c1f90f --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 32.80737018585205, "TIME_S_1KI": 328.0737018585205, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1398.267660713196, "W": 32.405416577679354, "J_1KI": 13982.676607131958, "W_1KI": 324.0541657767935, "W_D": 13.697416577679352, "J_D": 591.0325080986024, "W_D_1KI": 136.97416577679354, "J_D_1KI": 1369.7416577679353} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..0d92e6b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 32.80737018585205} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 489, 963, ..., 24999055, + 24999529, 25000000]), + col_indices=tensor([ 18, 157, 241, ..., 49747, 49771, 49960]), + values=tensor([0.7706, 0.7949, 0.9210, ..., 0.0962, 0.6322, 0.0053]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2729, 0.2896, 0.6966, ..., 0.6831, 0.4086, 0.6520]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 32.80737018585205 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 489, 963, ..., 24999055, + 24999529, 25000000]), + col_indices=tensor([ 18, 157, 241, ..., 49747, 49771, 49960]), + values=tensor([0.7706, 0.7949, 0.9210, ..., 0.0962, 0.6322, 0.0053]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2729, 0.2896, 0.6966, ..., 0.6831, 0.4086, 0.6520]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 32.80737018585205 seconds + +[20.96, 20.68, 20.68, 20.68, 20.72, 20.64, 20.64, 20.76, 20.84, 20.92] +[20.92, 20.84, 21.32, 22.6, 25.12, 26.44, 26.44, 28.96, 28.92, 32.68, 31.76, 31.16, 31.4, 31.52, 32.12, 34.6, 36.28, 37.8, 37.28, 37.56, 37.96, 37.96, 38.28, 38.36, 37.96, 37.32, 36.48, 36.68, 36.84, 37.44, 37.44, 37.76, 38.0, 39.0, 37.68, 36.84, 37.4, 37.4, 36.8, 37.32, 37.6, 37.72] +43.14919567108154 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 32.80737018585205, 'TIME_S_1KI': 328.0737018585205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.267660713196, 'W': 32.405416577679354} +[20.96, 20.68, 20.68, 20.68, 20.72, 20.64, 20.64, 20.76, 20.84, 20.92, 21.04, 20.84, 20.72, 20.68, 20.72, 20.6, 20.92, 21.2, 20.96, 20.84] +374.16 +18.708000000000002 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 32.80737018585205, 'TIME_S_1KI': 328.0737018585205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1398.267660713196, 'W': 32.405416577679354, 'J_1KI': 13982.676607131958, 'W_1KI': 324.0541657767935, 'W_D': 13.697416577679352, 'J_D': 591.0325080986024, 'W_D_1KI': 136.97416577679354, 'J_D_1KI': 1369.7416577679353} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..a4de095 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 19951, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.462696075439453, "TIME_S_1KI": 0.5244196318700542, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 488.71538955688476, "W": 33.3802333465878, "J_1KI": 24.49578414900931, "W_1KI": 1.6731107887618566, "W_D": 15.011233346587801, "J_D": 219.77739569807053, "W_D_1KI": 0.752405059725718, "J_D_1KI": 0.03771264897627778} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..06a37e6 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05949115753173828} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), + col_indices=tensor([35821, 49411, 3789, ..., 32092, 27347, 39445]), + values=tensor([0.1439, 0.1701, 0.0383, ..., 0.6521, 0.3755, 0.5678]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8709, 0.6173, 0.3475, ..., 0.7020, 0.1451, 0.7453]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.05949115753173828 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17649 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.288093090057373} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([10903, 22613, 1325, ..., 4616, 25772, 38217]), + values=tensor([0.1548, 0.5404, 0.0562, ..., 0.6796, 0.5534, 0.6437]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8066, 0.3465, 0.3699, ..., 0.5654, 0.2544, 0.1290]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.288093090057373 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 19951 -ss 50000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.462696075439453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), + col_indices=tensor([36526, 27522, 9271, ..., 28337, 20494, 41611]), + values=tensor([0.2838, 0.5711, 0.3512, ..., 0.1758, 0.7475, 0.3339]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9803, 0.0496, 0.4924, ..., 0.5397, 0.0486, 0.3592]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.462696075439453 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), + col_indices=tensor([36526, 27522, 9271, ..., 28337, 20494, 41611]), + values=tensor([0.2838, 0.5711, 0.3512, ..., 0.1758, 0.7475, 0.3339]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9803, 0.0496, 0.4924, ..., 0.5397, 0.0486, 0.3592]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.462696075439453 seconds + +[20.44, 20.56, 20.6, 20.64, 20.64, 20.44, 20.44, 20.44, 20.4, 20.56] +[20.44, 20.64, 23.28, 24.2, 26.6, 32.52, 37.24, 40.44, 44.28, 44.96, 44.96, 44.92, 44.44, 43.84] +14.640861988067627 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.462696075439453, 'TIME_S_1KI': 0.5244196318700542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 488.71538955688476, 'W': 33.3802333465878} +[20.44, 20.56, 20.6, 20.64, 20.64, 20.44, 20.44, 20.44, 20.4, 20.56, 20.32, 20.32, 20.12, 20.24, 20.32, 20.56, 20.52, 20.28, 20.2, 20.0] +367.38 +18.369 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 19951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.462696075439453, 'TIME_S_1KI': 0.5244196318700542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 488.71538955688476, 'W': 33.3802333465878, 'J_1KI': 24.49578414900931, 'W_1KI': 1.6731107887618566, 'W_D': 15.011233346587801, 'J_D': 219.77739569807053, 'W_D_1KI': 0.752405059725718, 'J_D_1KI': 0.03771264897627778} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..cff7542 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 6322, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.657772779464722, "TIME_S_1KI": 1.68582296416715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 425.8721401214599, "W": 31.344652680689325, "J_1KI": 67.3635147297469, "W_1KI": 4.958027946961298, "W_D": 12.573652680689325, "J_D": 170.83514789009087, "W_D_1KI": 1.9888726163697126, "J_D_1KI": 0.31459547870447846} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..57f8461 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3423471450805664} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 124998, 124999, + 125000]), + col_indices=tensor([ 303, 26221, 28347, ..., 8622, 14261, 4291]), + values=tensor([0.9240, 0.5223, 0.0365, ..., 0.6044, 0.0072, 0.5479]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.1523, 0.9417, 0.1754, ..., 0.6908, 0.2427, 0.5501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.3423471450805664 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3067 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 5.0935986042022705} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 124997, 125000, + 125000]), + col_indices=tensor([ 1194, 5034, 6320, ..., 11179, 21504, 33093]), + values=tensor([0.7209, 0.3055, 0.4482, ..., 0.3076, 0.8643, 0.0918]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.9680, 0.6265, 0.9723, ..., 0.1304, 0.1284, 0.7215]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 5.0935986042022705 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6322 -ss 50000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.657772779464722} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 124992, 124996, + 125000]), + col_indices=tensor([41720, 5446, 23409, ..., 23991, 37197, 42632]), + values=tensor([0.7857, 0.2010, 0.0929, ..., 0.8446, 0.3352, 0.3559]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5851, 0.1828, 0.1733, ..., 0.7326, 0.4663, 0.8685]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.657772779464722 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 124992, 124996, + 125000]), + col_indices=tensor([41720, 5446, 23409, ..., 23991, 37197, 42632]), + values=tensor([0.7857, 0.2010, 0.0929, ..., 0.8446, 0.3352, 0.3559]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5851, 0.1828, 0.1733, ..., 0.7326, 0.4663, 0.8685]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.657772779464722 seconds + +[20.44, 20.32, 20.44, 20.56, 20.76, 20.84, 20.88, 21.04, 21.08, 21.08] +[21.08, 20.88, 20.92, 24.6, 25.64, 31.32, 35.8, 37.92, 41.28, 43.2, 43.12, 43.4, 43.48] +13.586755752563477 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6322, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.657772779464722, 'TIME_S_1KI': 1.68582296416715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.8721401214599, 'W': 31.344652680689325} +[20.44, 20.32, 20.44, 20.56, 20.76, 20.84, 20.88, 21.04, 21.08, 21.08, 20.84, 20.88, 20.84, 20.72, 20.96, 20.96, 21.08, 21.16, 21.2, 21.04] +375.42 +18.771 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 6322, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.657772779464722, 'TIME_S_1KI': 1.68582296416715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.8721401214599, 'W': 31.344652680689325, 'J_1KI': 67.3635147297469, 'W_1KI': 4.958027946961298, 'W_D': 12.573652680689325, 'J_D': 170.83514789009087, 'W_D_1KI': 1.9888726163697126, 'J_D_1KI': 0.31459547870447846} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..3609346 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 98325, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519834756851196, "TIME_S_1KI": 0.10699043739487614, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.21768825531, "W": 22.200046075478067, "J_1KI": 3.216045647142741, "W_1KI": 0.22578231452304162, "W_D": 3.72304607547807, "J_D": 53.03110719919203, "W_D_1KI": 0.03786469438574187, "J_D_1KI": 0.0003850973240350051} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..555507d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,82 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018892765045166016} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([3456, 1605, 749, ..., 2516, 837, 4620]), + values=tensor([0.8429, 0.4221, 0.2092, ..., 0.3256, 0.3578, 0.9398]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1595, 0.2560, 0.8545, ..., 0.4673, 0.4412, 0.6412]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.018892765045166016 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 55576 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.934850692749023} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([2304, 3497, 2599, ..., 3517, 2336, 3180]), + values=tensor([7.9793e-01, 1.3489e-04, 7.1193e-01, ..., + 7.4115e-01, 8.0632e-01, 9.8789e-03]), size=(5000, 5000), + nnz=2500, layout=torch.sparse_csr) +tensor([0.4232, 0.5545, 0.0889, ..., 0.2237, 0.6245, 0.5041]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 5.934850692749023 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 98325 -ss 5000 -sd 0.0001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.519834756851196} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([ 417, 1523, 4116, ..., 1599, 2107, 3220]), + values=tensor([0.7284, 0.4903, 0.1270, ..., 0.3684, 0.2323, 0.2388]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8570, 0.2399, 0.2271, ..., 0.1785, 0.2270, 0.3588]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.519834756851196 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([ 417, 1523, 4116, ..., 1599, 2107, 3220]), + values=tensor([0.7284, 0.4903, 0.1270, ..., 0.3684, 0.2323, 0.2388]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8570, 0.2399, 0.2271, ..., 0.1785, 0.2270, 0.3588]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.519834756851196 seconds + +[20.24, 20.24, 20.16, 20.08, 20.32, 20.6, 20.6, 20.56, 20.52, 20.6] +[20.68, 20.8, 21.12, 22.88, 24.6, 25.72, 26.32, 26.12, 25.12, 25.12, 23.36, 23.52, 23.52, 23.76] +14.24401044845581 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 98325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519834756851196, 'TIME_S_1KI': 0.10699043739487614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.21768825531, 'W': 22.200046075478067} +[20.24, 20.24, 20.16, 20.08, 20.32, 20.6, 20.6, 20.56, 20.52, 20.6, 20.68, 20.68, 20.72, 20.52, 20.44, 20.56, 20.8, 20.84, 20.76, 20.76] +369.53999999999996 +18.476999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 98325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.519834756851196, 'TIME_S_1KI': 0.10699043739487614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.21768825531, 'W': 22.200046075478067, 'J_1KI': 3.216045647142741, 'W_1KI': 0.22578231452304162, 'W_D': 3.72304607547807, 'J_D': 53.03110719919203, 'W_D_1KI': 0.03786469438574187, 'J_D_1KI': 0.0003850973240350051} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..2fa1f21 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 17780, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.605318784713745, "TIME_S_1KI": 0.5964746223123591, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 313.57909806251524, "W": 22.019197605482734, "J_1KI": 17.636619688555413, "W_1KI": 1.2384250621756319, "W_D": 3.4661976054827335, "J_D": 49.36270332407948, "W_D_1KI": 0.19494924665257218, "J_D_1KI": 0.010964524558637355} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..8d3b43b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06766819953918457} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 15, ..., 24992, 24996, 25000]), + col_indices=tensor([ 734, 800, 1880, ..., 3125, 3280, 3794]), + values=tensor([0.0540, 0.4911, 0.3592, ..., 0.2590, 0.5736, 0.3057]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9823, 0.9343, 0.9377, ..., 0.0786, 0.0908, 0.1511]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.06766819953918457 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15516 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.162637948989868} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 11, ..., 24988, 24995, 25000]), + col_indices=tensor([ 62, 227, 575, ..., 2337, 2631, 3700]), + values=tensor([0.5265, 0.4146, 0.5026, ..., 0.0706, 0.1241, 0.5991]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6610, 0.4053, 0.0257, ..., 0.7779, 0.2973, 0.6422]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.162637948989868 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17780 -ss 5000 -sd 0.001 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.605318784713745} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 8, ..., 24994, 24997, 25000]), + col_indices=tensor([ 423, 1662, 2124, ..., 288, 1379, 2658]), + values=tensor([0.1096, 0.1453, 0.3978, ..., 0.4089, 0.5724, 0.6122]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.2174, 0.6127, 0.5782, ..., 0.6057, 0.7055, 0.7233]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.605318784713745 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 8, ..., 24994, 24997, 25000]), + col_indices=tensor([ 423, 1662, 2124, ..., 288, 1379, 2658]), + values=tensor([0.1096, 0.1453, 0.3978, ..., 0.4089, 0.5724, 0.6122]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.2174, 0.6127, 0.5782, ..., 0.6057, 0.7055, 0.7233]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.605318784713745 seconds + +[20.64, 20.56, 20.28, 20.4, 20.28, 20.28, 20.36, 20.44, 20.4, 20.4] +[20.64, 20.56, 20.76, 22.16, 23.52, 25.36, 26.08, 26.2, 25.48, 23.84, 23.92, 23.84, 23.84, 23.88] +14.24116826057434 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.605318784713745, 'TIME_S_1KI': 0.5964746223123591, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.57909806251524, 'W': 22.019197605482734} +[20.64, 20.56, 20.28, 20.4, 20.28, 20.28, 20.36, 20.44, 20.4, 20.4, 20.84, 20.68, 20.92, 20.96, 20.88, 20.88, 21.0, 20.8, 20.68, 20.64] +371.06 +18.553 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 17780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.605318784713745, 'TIME_S_1KI': 0.5964746223123591, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.57909806251524, 'W': 22.019197605482734, 'J_1KI': 17.636619688555413, 'W_1KI': 1.2384250621756319, 'W_D': 3.4661976054827335, 'J_D': 49.36270332407948, 'W_D_1KI': 0.19494924665257218, 'J_D_1KI': 0.010964524558637355} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..84660f7 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 1921, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.388477563858032, "TIME_S_1KI": 5.407848809920892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.91259566307076, "W": 21.966357313024826, "J_1KI": 162.89047145396708, "W_1KI": 11.434855446655297, "W_D": 3.429357313024827, "J_D": 48.85148151707659, "W_D_1KI": 1.785193812089967, "J_D_1KI": 0.9293044310723411} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..1a4afe9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.593717098236084} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 39, 88, ..., 249897, 249951, + 250000]), + col_indices=tensor([ 1, 41, 120, ..., 4868, 4902, 4963]), + values=tensor([0.6487, 0.6379, 0.3189, ..., 0.3941, 0.1960, 0.9453]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9493, 0.7713, 0.4212, ..., 0.5345, 0.1694, 0.1229]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.593717098236084 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1768 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.65909719467163} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 105, ..., 249907, 249948, + 250000]), + col_indices=tensor([ 103, 261, 471, ..., 4857, 4933, 4959]), + values=tensor([0.8889, 0.3073, 0.1638, ..., 0.6109, 0.3049, 0.0052]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5335, 0.0728, 0.9615, ..., 0.8926, 0.1348, 0.8188]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 9.65909719467163 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1921 -ss 5000 -sd 0.01 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.388477563858032} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 65, 98, ..., 249897, 249948, + 250000]), + col_indices=tensor([ 141, 179, 219, ..., 4719, 4923, 4985]), + values=tensor([0.6589, 0.9882, 0.9555, ..., 0.3007, 0.0365, 0.3378]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9859, 0.3282, 0.7924, ..., 0.6550, 0.5905, 0.4141]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.388477563858032 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 65, 98, ..., 249897, 249948, + 250000]), + col_indices=tensor([ 141, 179, 219, ..., 4719, 4923, 4985]), + values=tensor([0.6589, 0.9882, 0.9555, ..., 0.3007, 0.0365, 0.3378]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9859, 0.3282, 0.7924, ..., 0.6550, 0.5905, 0.4141]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.388477563858032 seconds + +[21.12, 20.84, 20.6, 20.44, 20.44, 20.44, 20.52, 20.52, 20.6, 20.96] +[20.8, 20.8, 20.76, 21.68, 22.28, 24.84, 25.84, 26.2, 25.88, 24.96, 23.84, 23.92, 23.72, 23.88] +14.245083570480347 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1921, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.388477563858032, 'TIME_S_1KI': 5.407848809920892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.91259566307076, 'W': 21.966357313024826} +[21.12, 20.84, 20.6, 20.44, 20.44, 20.44, 20.52, 20.52, 20.6, 20.96, 20.44, 20.44, 20.44, 20.48, 20.56, 20.68, 20.68, 20.56, 20.84, 20.8] +370.74 +18.537 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 1921, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.388477563858032, 'TIME_S_1KI': 5.407848809920892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.91259566307076, 'W': 21.966357313024826, 'J_1KI': 162.89047145396708, 'W_1KI': 11.434855446655297, 'W_D': 3.429357313024827, 'J_D': 48.85148151707659, 'W_D_1KI': 1.785193812089967, 'J_D_1KI': 0.9293044310723411} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..50afdc0 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 396, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.481398582458496, "TIME_S_1KI": 26.468178238531554, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 323.34643746376037, "W": 22.685982650996596, "J_1KI": 816.5314077367685, "W_1KI": 57.28783497726413, "W_D": 4.214982650996596, "J_D": 60.07672866272925, "W_D_1KI": 10.643895583324738, "J_D_1KI": 26.878524200314995} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..07e41de --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6507530212402344} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 267, 531, ..., 1249531, + 1249748, 1250000]), + col_indices=tensor([ 12, 24, 45, ..., 4958, 4983, 4986]), + values=tensor([0.7384, 0.2434, 0.0755, ..., 0.4736, 0.1384, 0.4678]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2921, 0.5624, 0.4015, ..., 0.8005, 0.9400, 0.6114]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 2.6507530212402344 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 396 -ss 5000 -sd 0.05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.481398582458496} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 247, 505, ..., 1249488, + 1249757, 1250000]), + col_indices=tensor([ 27, 35, 41, ..., 4930, 4938, 4952]), + values=tensor([0.8294, 0.9821, 0.6691, ..., 0.3905, 0.4873, 0.1672]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8352, 0.4457, 0.1150, ..., 0.9988, 0.2164, 0.9018]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.481398582458496 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 247, 505, ..., 1249488, + 1249757, 1250000]), + col_indices=tensor([ 27, 35, 41, ..., 4930, 4938, 4952]), + values=tensor([0.8294, 0.9821, 0.6691, ..., 0.3905, 0.4873, 0.1672]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8352, 0.4457, 0.1150, ..., 0.9988, 0.2164, 0.9018]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.481398582458496 seconds + +[20.72, 20.84, 20.92, 20.68, 20.6, 20.44, 20.48, 20.24, 20.44, 20.24] +[20.24, 20.52, 20.72, 22.36, 24.28, 26.72, 27.0, 27.56, 26.28, 25.88, 24.68, 24.52, 24.36, 24.36] +14.253137826919556 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.481398582458496, 'TIME_S_1KI': 26.468178238531554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.34643746376037, 'W': 22.685982650996596} +[20.72, 20.84, 20.92, 20.68, 20.6, 20.44, 20.48, 20.24, 20.44, 20.24, 20.24, 20.24, 20.36, 20.48, 20.48, 20.6, 20.68, 20.64, 20.4, 20.6] +369.42 +18.471 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.481398582458496, 'TIME_S_1KI': 26.468178238531554, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 323.34643746376037, 'W': 22.685982650996596, 'J_1KI': 816.5314077367685, 'W_1KI': 57.28783497726413, 'W_D': 4.214982650996596, 'J_D': 60.07672866272925, 'W_D_1KI': 10.643895583324738, 'J_D_1KI': 26.878524200314995} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..721e6aa --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 199, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.477930784225464, "TIME_S_1KI": 52.6529185136958, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 366.7794834327698, "W": 23.985504753820422, "J_1KI": 1843.11298207422, "W_1KI": 120.53017464231368, "W_D": 5.225504753820424, "J_D": 79.90692520141607, "W_D_1KI": 26.25881785839409, "J_D_1KI": 131.95385858489493} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..a6a4a54 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.267488241195679} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 489, 972, ..., 2499002, + 2499515, 2500000]), + col_indices=tensor([ 0, 4, 21, ..., 4965, 4988, 4998]), + values=tensor([0.4985, 0.2439, 0.0801, ..., 0.3726, 0.6532, 0.2308]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7620, 0.1310, 0.6898, ..., 0.4324, 0.6267, 0.4614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 5.267488241195679 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 199 -ss 5000 -sd 0.1 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.477930784225464} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 479, 980, ..., 2498983, + 2499479, 2500000]), + col_indices=tensor([ 7, 13, 23, ..., 4987, 4988, 4998]), + values=tensor([0.4519, 0.3203, 0.6830, ..., 0.2361, 0.6866, 0.7928]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4502, 0.7188, 0.8112, ..., 0.2797, 0.2285, 0.9848]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.477930784225464 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 479, 980, ..., 2498983, + 2499479, 2500000]), + col_indices=tensor([ 7, 13, 23, ..., 4987, 4988, 4998]), + values=tensor([0.4519, 0.3203, 0.6830, ..., 0.2361, 0.6866, 0.7928]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4502, 0.7188, 0.8112, ..., 0.2797, 0.2285, 0.9848]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.477930784225464 seconds + +[20.56, 20.84, 21.36, 21.88, 21.64, 21.6, 21.4, 20.76, 20.76, 20.72] +[21.16, 21.2, 21.4, 25.96, 27.32, 30.6, 31.24, 28.68, 27.4, 26.08, 24.2, 24.2, 24.4, 24.36, 24.2] +15.291714191436768 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.477930784225464, 'TIME_S_1KI': 52.6529185136958, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.7794834327698, 'W': 23.985504753820422} +[20.56, 20.84, 21.36, 21.88, 21.64, 21.6, 21.4, 20.76, 20.76, 20.72, 20.44, 20.32, 20.44, 20.64, 20.64, 20.44, 20.4, 20.36, 20.44, 20.84] +375.2 +18.759999999999998 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.477930784225464, 'TIME_S_1KI': 52.6529185136958, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 366.7794834327698, 'W': 23.985504753820422, 'J_1KI': 1843.11298207422, 'W_1KI': 120.53017464231368, 'W_D': 5.225504753820424, 'J_D': 79.90692520141607, 'W_D_1KI': 26.25881785839409, 'J_D_1KI': 131.95385858489493} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..328683d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.774195671081543, "TIME_S_1KI": 107.74195671081543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 357.5530287361145, "W": 23.422029760012947, "J_1KI": 3575.5302873611454, "W_1KI": 234.22029760012947, "W_D": 5.2480297600129475, "J_D": 80.11470204830175, "W_D_1KI": 52.480297600129475, "J_D_1KI": 524.8029760012947} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..4419f1e --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.774195671081543} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1044, 1986, ..., 4998023, + 4998990, 5000000]), + col_indices=tensor([ 2, 11, 17, ..., 4984, 4985, 4991]), + values=tensor([0.4872, 0.8747, 0.2341, ..., 0.7866, 0.4499, 0.5164]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.5529, 0.0016, 0.5040, ..., 0.3915, 0.6771, 0.4202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.774195671081543 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1044, 1986, ..., 4998023, + 4998990, 5000000]), + col_indices=tensor([ 2, 11, 17, ..., 4984, 4985, 4991]), + values=tensor([0.4872, 0.8747, 0.2341, ..., 0.7866, 0.4499, 0.5164]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.5529, 0.0016, 0.5040, ..., 0.3915, 0.6771, 0.4202]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.774195671081543 seconds + +[20.36, 20.12, 20.12, 20.04, 20.28, 20.12, 20.08, 20.4, 20.4, 20.72] +[20.8, 21.08, 22.4, 23.32, 25.48, 25.48, 27.96, 29.08, 28.16, 27.04, 25.64, 24.2, 24.2, 24.4, 24.44] +15.265672206878662 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.774195671081543, 'TIME_S_1KI': 107.74195671081543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.5530287361145, 'W': 23.422029760012947} +[20.36, 20.12, 20.12, 20.04, 20.28, 20.12, 20.08, 20.4, 20.4, 20.72, 20.32, 20.16, 20.04, 20.08, 20.08, 20.0, 20.16, 20.28, 20.28, 20.28] +363.48 +18.174 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.774195671081543, 'TIME_S_1KI': 107.74195671081543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.5530287361145, 'W': 23.422029760012947, 'J_1KI': 3575.5302873611454, 'W_1KI': 234.22029760012947, 'W_D': 5.2480297600129475, 'J_D': 80.11470204830175, 'W_D_1KI': 52.480297600129475, 'J_D_1KI': 524.8029760012947} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..1787dbd --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.919427633285522, "TIME_S_1KI": 159.19427633285522, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 506.52752479553226, "W": 23.776826253573663, "J_1KI": 5065.275247955323, "W_1KI": 237.76826253573662, "W_D": 5.445826253573667, "J_D": 116.01468014574058, "W_D_1KI": 54.45826253573667, "J_D_1KI": 544.5826253573666} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..2f8bfb8 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.919427633285522} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1464, 2929, ..., 7497018, + 7498512, 7500000]), + col_indices=tensor([ 1, 9, 13, ..., 4985, 4989, 4990]), + values=tensor([0.4014, 0.1905, 0.8906, ..., 0.4332, 0.9731, 0.1283]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.5776, 0.8031, 0.5959, ..., 0.3626, 0.0858, 0.0842]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 15.919427633285522 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1464, 2929, ..., 7497018, + 7498512, 7500000]), + col_indices=tensor([ 1, 9, 13, ..., 4985, 4989, 4990]), + values=tensor([0.4014, 0.1905, 0.8906, ..., 0.4332, 0.9731, 0.1283]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.5776, 0.8031, 0.5959, ..., 0.3626, 0.0858, 0.0842]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 15.919427633285522 seconds + +[20.2, 20.32, 20.32, 20.32, 20.32, 20.24, 20.52, 20.56, 20.64, 20.76] +[20.76, 20.64, 21.8, 21.8, 22.88, 24.6, 28.28, 30.28, 29.56, 29.28, 26.08, 25.32, 24.68, 24.72, 24.64, 24.44, 24.44, 24.48, 24.24, 24.32, 24.48] +21.303411960601807 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.919427633285522, 'TIME_S_1KI': 159.19427633285522, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 506.52752479553226, 'W': 23.776826253573663} +[20.2, 20.32, 20.32, 20.32, 20.32, 20.24, 20.52, 20.56, 20.64, 20.76, 20.12, 20.12, 19.96, 20.2, 20.04, 20.4, 20.6, 20.56, 20.64, 20.64] +366.61999999999995 +18.330999999999996 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.919427633285522, 'TIME_S_1KI': 159.19427633285522, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 506.52752479553226, 'W': 23.776826253573663, 'J_1KI': 5065.275247955323, 'W_1KI': 237.76826253573662, 'W_D': 5.445826253573667, 'J_D': 116.01468014574058, 'W_D_1KI': 54.45826253573667, 'J_D_1KI': 544.5826253573666} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..d18f4ce --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.475390195846558, "TIME_S_1KI": 214.75390195846558, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 659.3497357940673, "W": 24.06026504427972, "J_1KI": 6593.497357940673, "W_1KI": 240.60265044279723, "W_D": 5.657265044279722, "J_D": 155.03221620368953, "W_D_1KI": 56.57265044279722, "J_D_1KI": 565.7265044279723} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..6dfec86 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.475390195846558} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2112, 4108, ..., 9995977, + 9998003, 10000000]), + col_indices=tensor([ 0, 2, 5, ..., 4993, 4997, 4998]), + values=tensor([0.6521, 0.2294, 0.7060, ..., 0.9592, 0.5713, 0.6385]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2067, 0.4320, 0.3905, ..., 0.7782, 0.8244, 0.2696]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 21.475390195846558 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2112, 4108, ..., 9995977, + 9998003, 10000000]), + col_indices=tensor([ 0, 2, 5, ..., 4993, 4997, 4998]), + values=tensor([0.6521, 0.2294, 0.7060, ..., 0.9592, 0.5713, 0.6385]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2067, 0.4320, 0.3905, ..., 0.7782, 0.8244, 0.2696]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 21.475390195846558 seconds + +[20.36, 20.32, 20.12, 20.16, 20.12, 20.28, 20.68, 20.96, 21.08, 21.04] +[20.56, 20.44, 20.44, 23.96, 25.32, 27.12, 30.0, 32.28, 29.04, 28.16, 26.48, 25.72, 24.44, 24.36, 24.24, 24.24, 24.2, 24.16, 24.28, 24.32, 24.36, 24.48, 24.04, 23.92, 23.8, 23.68, 23.88] +27.40409278869629 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.475390195846558, 'TIME_S_1KI': 214.75390195846558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.3497357940673, 'W': 24.06026504427972} +[20.36, 20.32, 20.12, 20.16, 20.12, 20.28, 20.68, 20.96, 21.08, 21.04, 20.4, 20.4, 20.36, 20.56, 20.6, 20.4, 20.24, 20.36, 20.28, 20.48] +368.06 +18.403 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.475390195846558, 'TIME_S_1KI': 214.75390195846558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 659.3497357940673, 'W': 24.06026504427972, 'J_1KI': 6593.497357940673, 'W_1KI': 240.60265044279723, 'W_D': 5.657265044279722, 'J_D': 155.03221620368953, 'W_D_1KI': 56.57265044279722, 'J_D_1KI': 565.7265044279723} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..dc66501 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.60726523399353, "TIME_S_1KI": 266.0726523399353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 821.7415859985354, "W": 24.511735271206813, "J_1KI": 8217.415859985353, "W_1KI": 245.11735271206814, "W_D": 6.057735271206813, "J_D": 203.08203129005446, "W_D_1KI": 60.57735271206813, "J_D_1KI": 605.7735271206813} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..ad4e507 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.60726523399353} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2547, 5140, ..., 12494977, + 12497506, 12500000]), + col_indices=tensor([ 3, 4, 6, ..., 4994, 4995, 4998]), + values=tensor([0.6176, 0.1216, 0.2065, ..., 0.5783, 0.0575, 0.3833]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.3583, 0.1424, 0.2491, ..., 0.0607, 0.2583, 0.4693]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.60726523399353 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2547, 5140, ..., 12494977, + 12497506, 12500000]), + col_indices=tensor([ 3, 4, 6, ..., 4994, 4995, 4998]), + values=tensor([0.6176, 0.1216, 0.2065, ..., 0.5783, 0.0575, 0.3833]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.3583, 0.1424, 0.2491, ..., 0.0607, 0.2583, 0.4693]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.60726523399353 seconds + +[20.28, 20.52, 20.68, 20.56, 20.44, 20.52, 20.44, 20.44, 20.72, 21.0] +[20.92, 20.68, 20.68, 23.84, 26.2, 27.84, 31.04, 29.96, 30.76, 29.48, 26.6, 26.6, 26.28, 24.68, 24.6, 24.48, 24.4, 24.24, 24.32, 24.32, 24.48, 24.52, 24.44, 24.52, 24.52, 24.56, 24.4, 24.56, 24.72, 24.68, 24.84, 24.8, 24.64] +33.524415016174316 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.60726523399353, 'TIME_S_1KI': 266.0726523399353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7415859985354, 'W': 24.511735271206813} +[20.28, 20.52, 20.68, 20.56, 20.44, 20.52, 20.44, 20.44, 20.72, 21.0, 20.24, 20.2, 20.4, 20.6, 20.6, 20.6, 20.44, 20.56, 20.52, 20.16] +369.08000000000004 +18.454 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.60726523399353, 'TIME_S_1KI': 266.0726523399353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7415859985354, 'W': 24.511735271206813, 'J_1KI': 8217.415859985353, 'W_1KI': 245.11735271206814, 'W_D': 6.057735271206813, 'J_D': 203.08203129005446, 'W_D_1KI': 60.57735271206813, 'J_D_1KI': 605.7735271206813} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..b24de3c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 284909, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.16942024230957, "TIME_S_1KI": 0.035693573184102885, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.9542731094361, "W": 22.304497332617817, "J_1KI": 1.1124754679895548, "W_1KI": 0.07828639085679223, "W_D": 3.6764973326178207, "J_D": 52.24424125194558, "W_D_1KI": 0.012904110900736098, "J_D_1KI": 4.529204377796454e-05} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..ece41ca --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012842655181884766} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1287, 2037, 612, 4005, 465, 4495, 4486, 1954, 4095, + 1514, 3786, 3287, 3358, 3432, 3673, 489, 2823, 505, + 4424, 1572, 4277, 474, 3301, 30, 2842, 4780, 2739, + 564, 2900, 4485, 4784, 2295, 755, 3717, 1261, 1856, + 2818, 3372, 3761, 1939, 4279, 1416, 4196, 1024, 159, + 3430, 1464, 630, 4128, 1057, 4758, 4930, 4819, 4211, + 3868, 1700, 2760, 4521, 1355, 4737, 4580, 1838, 4056, + 1953, 4561, 1726, 3125, 4174, 510, 4743, 502, 2822, + 338, 1706, 4412, 4712, 3417, 4607, 4478, 4287, 4365, + 4223, 3755, 467, 2870, 999, 1516, 3711, 3345, 4540, + 4303, 4477, 4047, 3188, 522, 451, 4048, 1301, 3760, + 3807, 142, 526, 3797, 3415, 942, 3041, 1022, 555, + 2433, 3440, 4291, 2481, 2516, 1226, 4664, 1242, 2239, + 3542, 3300, 3985, 1261, 628, 3797, 3571, 1648, 545, + 3417, 523, 297, 1814, 2339, 1387, 4149, 2499, 1698, + 4107, 3910, 907, 1601, 3072, 2976, 1955, 76, 3173, + 63, 633, 2089, 1360, 1226, 4574, 730, 2472, 4618, + 425, 3915, 1299, 1950, 4945, 1796, 628, 1797, 3210, + 2055, 2428, 876, 1161, 1529, 1660, 2886, 4614, 2062, + 2433, 3539, 1521, 33, 1294, 4198, 863, 2582, 1498, + 77, 507, 2697, 2034, 2514, 1935, 4132, 2925, 876, + 2808, 4770, 271, 3697, 1635, 2519, 4995, 3590, 3245, + 130, 480, 3111, 3121, 3132, 1937, 3910, 1943, 2562, + 426, 3962, 1910, 1189, 1897, 1056, 462, 1607, 1444, + 118, 191, 2005, 615, 1379, 633, 2360, 3526, 4732, + 2267, 3397, 1029, 3432, 2182, 2675, 4099, 3777, 2171, + 2640, 3913, 4300, 2946, 3758, 3305, 1103, 4800, 3668, + 4286, 3562, 281, 919, 4442, 2167, 2728]), + values=tensor([0.8347, 0.3655, 0.0811, 0.8356, 0.0205, 0.3330, 0.9286, + 0.0736, 0.7654, 0.8451, 0.0234, 0.4126, 0.2439, 0.1012, + 0.1525, 0.4404, 0.8423, 0.5434, 0.2968, 0.3607, 0.9939, + 0.0443, 0.6432, 0.5086, 0.6326, 0.2329, 0.7870, 0.7820, + 0.9646, 0.4656, 0.9109, 0.0130, 0.3562, 0.2378, 0.0761, + 0.1724, 0.0722, 0.8084, 0.1566, 0.8788, 0.9593, 0.2473, + 0.2746, 0.1767, 0.8469, 0.1106, 0.8653, 0.5297, 0.8543, + 0.5387, 0.4683, 0.0500, 0.6408, 0.2485, 0.5053, 0.9278, + 0.6730, 0.1223, 0.9361, 0.1415, 0.0908, 0.6368, 0.4532, + 0.7711, 0.1924, 0.7435, 0.0645, 0.3989, 0.7433, 0.7022, + 0.6974, 0.8264, 0.3293, 0.6363, 0.9947, 0.1723, 0.3099, + 0.5498, 0.6041, 0.9256, 0.6505, 0.2218, 0.5727, 0.8460, + 0.3386, 0.9152, 0.1985, 0.3213, 0.2437, 0.8619, 0.4265, + 0.8019, 0.3028, 0.4559, 0.9203, 0.9762, 0.2222, 0.3112, + 0.4047, 0.0709, 0.2379, 0.3209, 0.9982, 0.9963, 0.6946, + 0.0267, 0.7677, 0.2026, 0.6034, 0.5006, 0.8273, 0.2191, + 0.6497, 0.2706, 0.0892, 0.8677, 0.9857, 0.5541, 0.2974, + 0.1559, 0.1745, 0.4744, 0.1426, 0.1224, 0.3669, 0.1827, + 0.5044, 0.5810, 0.3220, 0.7231, 0.9240, 0.0412, 0.3152, + 0.9088, 0.3617, 0.9935, 0.3508, 0.0434, 0.0453, 0.5299, + 0.2529, 0.0232, 0.7419, 0.0564, 0.5519, 0.6136, 0.5013, + 0.9801, 0.4708, 0.5636, 0.5144, 0.1368, 0.7207, 0.1775, + 0.9552, 0.2262, 0.7144, 0.1124, 0.8514, 0.1783, 0.8401, + 0.1256, 0.7454, 0.1258, 0.2191, 0.5753, 0.9252, 0.8693, + 0.6514, 0.3440, 0.7780, 0.4771, 0.0787, 0.5042, 0.0634, + 0.8013, 0.8286, 0.4280, 0.3433, 0.9749, 0.0712, 0.9286, + 0.0320, 0.8979, 0.5094, 0.4000, 0.4693, 0.8308, 0.6000, + 0.3933, 0.7591, 0.2335, 0.5450, 0.3018, 0.3121, 0.4779, + 0.9302, 0.5324, 0.1295, 0.6438, 0.5030, 0.3371, 0.9613, + 0.8059, 0.9687, 0.2898, 0.7067, 0.8974, 0.1763, 0.0222, + 0.0300, 0.9494, 0.3209, 0.6515, 0.7028, 0.8063, 0.2794, + 0.7392, 0.1814, 0.3171, 0.4591, 0.7578, 0.6336, 0.8392, + 0.6142, 0.8521, 0.4206, 0.9799, 0.4517, 0.1512, 0.3696, + 0.0957, 0.3165, 0.3328, 0.9242, 0.5247, 0.8176, 0.9760, + 0.3689, 0.9384, 0.3805, 0.7826, 0.4113, 0.3311, 0.7250, + 0.9146, 0.3319, 0.6199, 0.8288, 0.1278]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0363, 0.1704, 0.8959, ..., 0.1381, 0.6314, 0.8045]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.012842655181884766 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 81758 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.01309871673584} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3901, 3663, 229, 3286, 2751, 3483, 2128, 1446, 4503, + 3960, 1594, 4088, 4861, 755, 2490, 2051, 2278, 4729, + 1103, 1042, 1554, 2825, 4031, 3744, 2602, 531, 4883, + 202, 3192, 4676, 1858, 1406, 3540, 1868, 4258, 1296, + 40, 2235, 3617, 4799, 680, 649, 3963, 1933, 1725, + 575, 4509, 1266, 2706, 2278, 2363, 503, 1376, 3736, + 4576, 1431, 3018, 4449, 379, 171, 699, 4269, 423, + 15, 756, 1388, 2467, 4283, 3498, 330, 1828, 2936, + 727, 2300, 2833, 2863, 109, 1560, 2849, 1474, 443, + 230, 4619, 4963, 1262, 1748, 3663, 2453, 3127, 315, + 2799, 4404, 3122, 4451, 3711, 4611, 77, 2376, 1019, + 1276, 1245, 139, 3620, 243, 636, 3728, 523, 3275, + 1319, 407, 4648, 783, 1431, 764, 2964, 886, 1325, + 4241, 4394, 4361, 2591, 57, 2666, 3482, 767, 690, + 1227, 174, 1857, 1992, 525, 1315, 1715, 350, 2942, + 4827, 2788, 3414, 2521, 2538, 2474, 540, 1078, 638, + 3541, 2163, 4818, 623, 1593, 2829, 4885, 1974, 2036, + 2239, 4847, 1633, 3181, 2785, 2293, 3813, 4130, 3145, + 586, 2131, 1377, 3635, 1416, 96, 1290, 4122, 4111, + 3371, 3627, 1730, 4198, 1682, 3780, 3696, 967, 3018, + 2372, 3866, 4983, 4085, 4264, 3039, 2899, 4144, 4646, + 2984, 172, 4147, 1120, 2707, 124, 2849, 2323, 1520, + 3250, 2548, 2660, 4708, 2235, 593, 199, 448, 1975, + 4340, 1665, 3502, 4596, 1936, 3151, 759, 4834, 1142, + 2548, 2586, 1859, 3767, 1222, 4845, 1226, 3446, 4199, + 4381, 2057, 3436, 396, 3566, 1282, 173, 1462, 3915, + 4165, 1821, 3059, 730, 1341, 2913, 4066, 2638, 3070, + 430, 826, 3316, 2761, 4646, 4961, 2657]), + values=tensor([0.2777, 0.7994, 0.6055, 0.8858, 0.5401, 0.6289, 0.2315, + 0.7108, 0.2919, 0.9440, 0.4352, 0.9935, 0.7547, 0.9487, + 0.1198, 0.7138, 0.2381, 0.9158, 0.3679, 0.3919, 0.7781, + 0.8318, 0.6054, 0.4010, 0.3469, 0.3425, 0.7408, 0.3350, + 0.1090, 0.6705, 0.8875, 0.7480, 0.9795, 0.0427, 0.1593, + 0.2017, 0.8682, 0.9704, 0.5078, 0.7021, 0.2351, 0.1990, + 0.1799, 0.1614, 0.9052, 0.7818, 0.9469, 0.9144, 0.6416, + 0.0978, 0.2154, 0.3794, 0.0722, 0.4288, 0.4423, 0.0392, + 0.2361, 0.1773, 0.5238, 0.6266, 0.9495, 0.4070, 0.9779, + 0.1080, 0.9756, 0.0212, 0.7575, 0.6901, 0.5589, 0.8829, + 0.5870, 0.3829, 0.0909, 0.7414, 0.5878, 0.3480, 0.1168, + 0.3972, 0.2804, 0.8860, 0.5903, 0.9778, 0.2522, 0.2229, + 0.0973, 0.3159, 0.6835, 0.0134, 0.3067, 0.7266, 0.6764, + 0.3082, 0.0327, 0.3921, 0.8622, 0.8074, 0.6252, 0.9606, + 0.3313, 0.3455, 0.4533, 0.6697, 0.2711, 0.3754, 0.8727, + 0.6651, 0.0380, 0.1210, 0.0259, 0.0087, 0.3017, 0.7186, + 0.9688, 0.5810, 0.6939, 0.8057, 0.2727, 0.5144, 0.0126, + 0.0636, 0.8543, 0.9756, 0.4583, 0.3014, 0.5014, 0.4285, + 0.3361, 0.3583, 0.8660, 0.8752, 0.5050, 0.1837, 0.7102, + 0.1957, 0.9064, 0.7982, 0.5015, 0.4099, 0.5809, 0.8801, + 0.0073, 0.5658, 0.8433, 0.7251, 0.8971, 0.9752, 0.6676, + 0.2814, 0.9394, 0.9811, 0.1778, 0.5627, 0.3569, 0.2951, + 0.4362, 0.7414, 0.7224, 0.6917, 0.2922, 0.7465, 0.6523, + 0.5621, 0.0779, 0.8744, 0.6553, 0.5271, 0.0990, 0.8629, + 0.6483, 0.0044, 0.2027, 0.6359, 0.0842, 0.9816, 0.4377, + 0.7291, 0.7757, 0.4150, 0.9512, 0.9053, 0.6628, 0.9162, + 0.6353, 0.3725, 0.8919, 0.1505, 0.1975, 0.7728, 0.1846, + 0.5340, 0.4217, 0.7643, 0.3438, 0.6005, 0.7795, 0.2067, + 0.6674, 0.9142, 0.4620, 0.8140, 0.1036, 0.3590, 0.3372, + 0.0756, 0.4219, 0.7019, 0.2017, 0.1876, 0.8857, 0.9443, + 0.7034, 0.3858, 0.6463, 0.0872, 0.7101, 0.2546, 0.8101, + 0.3637, 0.4495, 0.8137, 0.4469, 0.4204, 0.1055, 0.8379, + 0.1725, 0.3312, 0.1791, 0.6141, 0.0562, 0.4774, 0.5212, + 0.7724, 0.9039, 0.5626, 0.1051, 0.2569, 0.5243, 0.3982, + 0.0444, 0.0991, 0.8125, 0.2081, 0.2559, 0.6572, 0.3238, + 0.3534, 0.8270, 0.9704, 0.5262, 0.1397]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.2610, 0.0051, 0.8611, ..., 0.6706, 0.7457, 0.2823]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 3.01309871673584 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 284909 -ss 5000 -sd 1e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.16942024230957} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1669, 2388, 3410, 214, 4888, 2047, 1859, 3824, 1130, + 3331, 4650, 808, 1845, 4600, 2980, 4756, 2639, 4242, + 120, 4542, 2175, 1322, 104, 704, 854, 2110, 1063, + 1256, 2794, 2665, 1239, 4623, 2397, 2905, 1669, 3634, + 691, 1001, 4550, 1274, 2606, 2628, 4848, 3423, 4205, + 4849, 3844, 4805, 2751, 822, 856, 3866, 2362, 4396, + 3513, 3731, 4108, 1129, 2401, 2429, 238, 3568, 2538, + 4839, 3438, 2131, 3982, 1035, 620, 3061, 2659, 870, + 31, 582, 3725, 2164, 3897, 4881, 3537, 2824, 936, + 4420, 341, 4499, 2690, 351, 3823, 4169, 4790, 4554, + 2495, 1376, 3626, 221, 4721, 2833, 4128, 83, 287, + 4091, 4135, 4551, 3973, 764, 392, 4740, 2858, 4378, + 2517, 4820, 3243, 3784, 1749, 2694, 3058, 661, 4273, + 2427, 4542, 135, 3704, 3578, 4193, 3743, 3465, 2179, + 4188, 2714, 3316, 1323, 3063, 3972, 3355, 1842, 1656, + 2481, 1669, 1106, 4204, 1040, 565, 3967, 2999, 776, + 1132, 4335, 252, 3480, 3592, 4417, 2743, 508, 1998, + 2250, 4747, 3247, 3778, 2520, 4340, 4333, 889, 3347, + 1306, 252, 3840, 4251, 3753, 922, 1530, 732, 4724, + 4652, 2305, 676, 3763, 2577, 479, 3149, 3237, 682, + 2204, 1170, 4037, 1115, 902, 2463, 2133, 49, 3338, + 846, 2596, 1254, 611, 336, 2556, 4596, 3162, 2347, + 1052, 1946, 3013, 1910, 3262, 793, 681, 3061, 4097, + 649, 4096, 3982, 4856, 2244, 770, 1157, 3683, 1150, + 4034, 4307, 4867, 947, 1680, 3888, 190, 677, 2841, + 816, 454, 4546, 1683, 1115, 4528, 4055, 324, 2442, + 1530, 1512, 2880, 1124, 741, 2337, 2820, 1096, 969, + 4662, 1861, 4067, 2109, 3996, 1635, 499]), + values=tensor([0.8132, 0.1702, 0.5583, 0.1261, 0.6291, 0.5508, 0.1330, + 0.9627, 0.2059, 0.3644, 0.3622, 0.4731, 0.3091, 0.8919, + 0.7060, 0.5289, 0.7945, 0.3422, 0.4040, 0.9747, 0.1778, + 0.1060, 0.3373, 0.1041, 0.0936, 0.4036, 0.4021, 0.5444, + 0.4938, 0.5992, 0.1894, 0.3036, 0.6677, 0.4744, 0.8443, + 0.2067, 0.1390, 0.7860, 0.2069, 0.5019, 0.5539, 0.4807, + 0.6194, 0.5176, 0.2767, 0.7631, 0.4453, 0.0999, 0.7181, + 0.2470, 0.2255, 0.5250, 0.2866, 0.8997, 0.0544, 0.1824, + 0.2628, 0.9339, 0.2590, 0.5943, 0.2439, 0.4256, 0.8224, + 0.2204, 0.5000, 0.2703, 0.2122, 0.2501, 0.5794, 0.6155, + 0.5183, 0.5021, 0.6112, 0.1537, 0.1024, 0.3154, 0.0744, + 0.5354, 0.3979, 0.6342, 0.7319, 0.0847, 0.3194, 0.5800, + 0.2467, 0.5775, 0.6339, 0.2050, 0.2286, 0.7874, 0.1733, + 0.7255, 0.0573, 0.6716, 0.4231, 0.6554, 0.3477, 0.4703, + 0.1981, 0.3923, 0.3520, 0.4289, 0.4033, 0.1353, 0.5197, + 0.9189, 0.6985, 0.9291, 0.8051, 0.5530, 0.0423, 0.9594, + 0.8487, 0.2554, 0.0395, 0.4103, 0.1345, 0.0607, 0.2812, + 0.7571, 0.9906, 0.2249, 0.3326, 0.1389, 0.8069, 0.2156, + 0.3462, 0.2324, 0.0457, 0.8244, 0.5205, 0.0833, 0.1781, + 0.3837, 0.9227, 0.2976, 0.9031, 0.2499, 0.3484, 0.3298, + 0.6568, 0.3816, 0.5687, 0.3523, 0.3593, 0.7242, 0.1034, + 0.3478, 0.4454, 0.7734, 0.2847, 0.4512, 0.5866, 0.1633, + 0.7139, 0.4511, 0.5642, 0.2230, 0.1384, 0.2467, 0.5114, + 0.5149, 0.4901, 0.7340, 0.5840, 0.0495, 0.1493, 0.4501, + 0.5299, 0.1752, 0.0737, 0.0887, 0.7004, 0.7171, 0.6451, + 0.1099, 0.6191, 0.3209, 0.2667, 0.2735, 0.3592, 0.7035, + 0.1766, 0.2292, 0.6138, 0.2492, 0.8422, 0.5205, 0.0949, + 0.6311, 0.1200, 0.6842, 0.3167, 0.3418, 0.7978, 0.1885, + 0.9433, 0.6390, 0.5217, 0.8313, 0.4066, 0.8623, 0.9330, + 0.7999, 0.0688, 0.3315, 0.2496, 0.2006, 0.0199, 0.1239, + 0.0030, 0.9251, 0.8374, 0.2492, 0.6001, 0.0171, 0.3645, + 0.9564, 0.7314, 0.8427, 0.8917, 0.1465, 0.2355, 0.6975, + 0.9025, 0.0358, 0.2860, 0.4051, 0.9734, 0.8626, 0.4028, + 0.9642, 0.0743, 0.8714, 0.6919, 0.3640, 0.9239, 0.1573, + 0.9549, 0.3068, 0.2789, 0.0169, 0.6253, 0.7318, 0.1857, + 0.1394, 0.2220, 0.2355, 0.9726, 0.9750]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9060, 0.0911, 0.6185, ..., 0.7353, 0.0547, 0.2301]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.16942024230957 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1669, 2388, 3410, 214, 4888, 2047, 1859, 3824, 1130, + 3331, 4650, 808, 1845, 4600, 2980, 4756, 2639, 4242, + 120, 4542, 2175, 1322, 104, 704, 854, 2110, 1063, + 1256, 2794, 2665, 1239, 4623, 2397, 2905, 1669, 3634, + 691, 1001, 4550, 1274, 2606, 2628, 4848, 3423, 4205, + 4849, 3844, 4805, 2751, 822, 856, 3866, 2362, 4396, + 3513, 3731, 4108, 1129, 2401, 2429, 238, 3568, 2538, + 4839, 3438, 2131, 3982, 1035, 620, 3061, 2659, 870, + 31, 582, 3725, 2164, 3897, 4881, 3537, 2824, 936, + 4420, 341, 4499, 2690, 351, 3823, 4169, 4790, 4554, + 2495, 1376, 3626, 221, 4721, 2833, 4128, 83, 287, + 4091, 4135, 4551, 3973, 764, 392, 4740, 2858, 4378, + 2517, 4820, 3243, 3784, 1749, 2694, 3058, 661, 4273, + 2427, 4542, 135, 3704, 3578, 4193, 3743, 3465, 2179, + 4188, 2714, 3316, 1323, 3063, 3972, 3355, 1842, 1656, + 2481, 1669, 1106, 4204, 1040, 565, 3967, 2999, 776, + 1132, 4335, 252, 3480, 3592, 4417, 2743, 508, 1998, + 2250, 4747, 3247, 3778, 2520, 4340, 4333, 889, 3347, + 1306, 252, 3840, 4251, 3753, 922, 1530, 732, 4724, + 4652, 2305, 676, 3763, 2577, 479, 3149, 3237, 682, + 2204, 1170, 4037, 1115, 902, 2463, 2133, 49, 3338, + 846, 2596, 1254, 611, 336, 2556, 4596, 3162, 2347, + 1052, 1946, 3013, 1910, 3262, 793, 681, 3061, 4097, + 649, 4096, 3982, 4856, 2244, 770, 1157, 3683, 1150, + 4034, 4307, 4867, 947, 1680, 3888, 190, 677, 2841, + 816, 454, 4546, 1683, 1115, 4528, 4055, 324, 2442, + 1530, 1512, 2880, 1124, 741, 2337, 2820, 1096, 969, + 4662, 1861, 4067, 2109, 3996, 1635, 499]), + values=tensor([0.8132, 0.1702, 0.5583, 0.1261, 0.6291, 0.5508, 0.1330, + 0.9627, 0.2059, 0.3644, 0.3622, 0.4731, 0.3091, 0.8919, + 0.7060, 0.5289, 0.7945, 0.3422, 0.4040, 0.9747, 0.1778, + 0.1060, 0.3373, 0.1041, 0.0936, 0.4036, 0.4021, 0.5444, + 0.4938, 0.5992, 0.1894, 0.3036, 0.6677, 0.4744, 0.8443, + 0.2067, 0.1390, 0.7860, 0.2069, 0.5019, 0.5539, 0.4807, + 0.6194, 0.5176, 0.2767, 0.7631, 0.4453, 0.0999, 0.7181, + 0.2470, 0.2255, 0.5250, 0.2866, 0.8997, 0.0544, 0.1824, + 0.2628, 0.9339, 0.2590, 0.5943, 0.2439, 0.4256, 0.8224, + 0.2204, 0.5000, 0.2703, 0.2122, 0.2501, 0.5794, 0.6155, + 0.5183, 0.5021, 0.6112, 0.1537, 0.1024, 0.3154, 0.0744, + 0.5354, 0.3979, 0.6342, 0.7319, 0.0847, 0.3194, 0.5800, + 0.2467, 0.5775, 0.6339, 0.2050, 0.2286, 0.7874, 0.1733, + 0.7255, 0.0573, 0.6716, 0.4231, 0.6554, 0.3477, 0.4703, + 0.1981, 0.3923, 0.3520, 0.4289, 0.4033, 0.1353, 0.5197, + 0.9189, 0.6985, 0.9291, 0.8051, 0.5530, 0.0423, 0.9594, + 0.8487, 0.2554, 0.0395, 0.4103, 0.1345, 0.0607, 0.2812, + 0.7571, 0.9906, 0.2249, 0.3326, 0.1389, 0.8069, 0.2156, + 0.3462, 0.2324, 0.0457, 0.8244, 0.5205, 0.0833, 0.1781, + 0.3837, 0.9227, 0.2976, 0.9031, 0.2499, 0.3484, 0.3298, + 0.6568, 0.3816, 0.5687, 0.3523, 0.3593, 0.7242, 0.1034, + 0.3478, 0.4454, 0.7734, 0.2847, 0.4512, 0.5866, 0.1633, + 0.7139, 0.4511, 0.5642, 0.2230, 0.1384, 0.2467, 0.5114, + 0.5149, 0.4901, 0.7340, 0.5840, 0.0495, 0.1493, 0.4501, + 0.5299, 0.1752, 0.0737, 0.0887, 0.7004, 0.7171, 0.6451, + 0.1099, 0.6191, 0.3209, 0.2667, 0.2735, 0.3592, 0.7035, + 0.1766, 0.2292, 0.6138, 0.2492, 0.8422, 0.5205, 0.0949, + 0.6311, 0.1200, 0.6842, 0.3167, 0.3418, 0.7978, 0.1885, + 0.9433, 0.6390, 0.5217, 0.8313, 0.4066, 0.8623, 0.9330, + 0.7999, 0.0688, 0.3315, 0.2496, 0.2006, 0.0199, 0.1239, + 0.0030, 0.9251, 0.8374, 0.2492, 0.6001, 0.0171, 0.3645, + 0.9564, 0.7314, 0.8427, 0.8917, 0.1465, 0.2355, 0.6975, + 0.9025, 0.0358, 0.2860, 0.4051, 0.9734, 0.8626, 0.4028, + 0.9642, 0.0743, 0.8714, 0.6919, 0.3640, 0.9239, 0.1573, + 0.9549, 0.3068, 0.2789, 0.0169, 0.6253, 0.7318, 0.1857, + 0.1394, 0.2220, 0.2355, 0.9726, 0.9750]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9060, 0.0911, 0.6185, ..., 0.7353, 0.0547, 0.2301]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.16942024230957 seconds + +[20.28, 20.4, 20.4, 20.48, 20.76, 21.28, 21.16, 20.96, 20.76, 20.4] +[20.12, 20.44, 21.32, 22.44, 25.12, 25.12, 25.76, 26.28, 25.96, 25.44, 23.88, 24.04, 24.16, 23.84] +14.210330247879028 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 284909, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.16942024230957, 'TIME_S_1KI': 0.035693573184102885, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.9542731094361, 'W': 22.304497332617817} +[20.28, 20.4, 20.4, 20.48, 20.76, 21.28, 21.16, 20.96, 20.76, 20.4, 20.8, 20.88, 20.72, 20.76, 20.84, 20.84, 20.44, 20.52, 20.32, 20.6] +372.55999999999995 +18.627999999999997 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 284909, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.16942024230957, 'TIME_S_1KI': 0.035693573184102885, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.9542731094361, 'W': 22.304497332617817, 'J_1KI': 1.1124754679895548, 'W_1KI': 0.07828639085679223, 'W_D': 3.6764973326178207, 'J_D': 52.24424125194558, 'W_D_1KI': 0.012904110900736098, 'J_D_1KI': 4.529204377796454e-05} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..758834a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 16, "ITERATIONS": 154432, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.621034145355225, "TIME_S_1KI": 0.06877482740206191, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.4244461250305, "W": 22.262246477486542, "J_1KI": 2.0489564735613763, "W_1KI": 0.144155657360434, "W_D": 3.7392464774865424, "J_D": 53.14778078484536, "W_D_1KI": 0.024212899382812774, "J_D_1KI": 0.00015678680184685024} diff --git a/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..8f01bb5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/altra_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014879941940307617} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1250, 1250]), + col_indices=tensor([1397, 3608, 621, ..., 1983, 2722, 4972]), + values=tensor([0.7898, 0.8890, 0.9853, ..., 0.2806, 0.4332, 0.7785]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8515, 0.1205, 0.1290, ..., 0.0596, 0.1294, 0.2178]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.014879941940307617 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 70564 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.797720670700073} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1249, 1250, 1250]), + col_indices=tensor([4236, 1927, 389, ..., 3900, 4084, 4178]), + values=tensor([0.5819, 0.5926, 0.4032, ..., 0.1422, 0.8129, 0.9187]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.4782, 0.7587, 0.6755, ..., 0.4641, 0.3230, 0.1517]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 4.797720670700073 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 154432 -ss 5000 -sd 5e-05 -c 16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.621034145355225} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([ 91, 2944, 3974, ..., 4430, 70, 3263]), + values=tensor([0.2553, 0.0855, 0.4739, ..., 0.3797, 0.6721, 0.4378]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8009, 0.9874, 0.1682, ..., 0.8612, 0.3697, 0.0752]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.621034145355225 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([ 91, 2944, 3974, ..., 4430, 70, 3263]), + values=tensor([0.2553, 0.0855, 0.4739, ..., 0.3797, 0.6721, 0.4378]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8009, 0.9874, 0.1682, ..., 0.8612, 0.3697, 0.0752]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.621034145355225 seconds + +[20.4, 20.68, 20.44, 20.16, 20.16, 20.48, 20.44, 20.68, 20.64, 20.88] +[20.92, 20.96, 21.52, 22.88, 25.2, 25.52, 26.2, 26.2, 25.6, 24.92, 23.32, 23.36, 23.56, 23.44] +14.213500261306763 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 154432, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.621034145355225, 'TIME_S_1KI': 0.06877482740206191, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.4244461250305, 'W': 22.262246477486542} +[20.4, 20.68, 20.44, 20.16, 20.16, 20.48, 20.44, 20.68, 20.64, 20.88, 20.72, 20.64, 20.68, 20.52, 20.64, 20.8, 20.76, 20.76, 20.68, 20.6] +370.46 +18.523 +{'CPU': 'Altra', 'CORES': 16, 'ITERATIONS': 154432, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.621034145355225, 'TIME_S_1KI': 0.06877482740206191, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.4244461250305, 'W': 22.262246477486542, 'J_1KI': 2.0489564735613763, 'W_1KI': 0.144155657360434, 'W_D': 3.7392464774865424, 'J_D': 53.14778078484536, 'W_D_1KI': 0.024212899382812774, 'J_D_1KI': 0.00015678680184685024} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..a438e21 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 65446, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.748796463012695, "TIME_S_1KI": 0.16423916607604278, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1866.1015530323982, "W": 143.13, "J_1KI": 28.51360744785622, "W_1KI": 2.1869938575314, "W_D": 106.99974999999999, "J_D": 1395.0422668139338, "W_D_1KI": 1.634931852214039, "J_D_1KI": 0.024981386978792274} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..7f0e014 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.060246944427490234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 16, 25, ..., 999980, + 999989, 1000000]), + col_indices=tensor([ 4573, 4595, 4948, ..., 71788, 92544, 99741]), + values=tensor([0.3512, 0.1040, 0.2729, ..., 0.2513, 0.9554, 0.9408]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1257, 0.5794, 0.5612, ..., 0.8235, 0.1474, 0.3975]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.060246944427490234 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17428', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7960927486419678} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999980, + 999989, 1000000]), + col_indices=tensor([11836, 34889, 39226, ..., 79566, 86668, 94364]), + values=tensor([0.7886, 0.3777, 0.4340, ..., 0.5250, 0.8836, 0.4934]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9435, 0.7532, 0.3829, ..., 0.0561, 0.6547, 0.0145]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 2.7960927486419678 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65446', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.748796463012695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, + 999990, 1000000]), + col_indices=tensor([ 6624, 6694, 37331, ..., 71444, 97628, 99166]), + values=tensor([0.8094, 0.0427, 0.0622, ..., 0.4502, 0.4633, 0.1157]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2357, 0.1643, 0.3206, ..., 0.7759, 0.8620, 0.1771]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.748796463012695 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999978, + 999990, 1000000]), + col_indices=tensor([ 6624, 6694, 37331, ..., 71444, 97628, 99166]), + values=tensor([0.8094, 0.0427, 0.0622, ..., 0.4502, 0.4633, 0.1157]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2357, 0.1643, 0.3206, ..., 0.7759, 0.8620, 0.1771]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.748796463012695 seconds + +[40.51, 40.31, 40.08, 39.71, 39.8, 39.59, 39.58, 41.48, 39.56, 40.06] +[143.13] +13.037808656692505 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 65446, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.748796463012695, 'TIME_S_1KI': 0.16423916607604278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.1015530323982, 'W': 143.13} +[40.51, 40.31, 40.08, 39.71, 39.8, 39.59, 39.58, 41.48, 39.56, 40.06, 41.48, 39.67, 40.17, 39.98, 41.44, 39.98, 40.5, 40.23, 39.59, 39.82] +722.605 +36.130250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 65446, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.748796463012695, 'TIME_S_1KI': 0.16423916607604278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1866.1015530323982, 'W': 143.13, 'J_1KI': 28.51360744785622, 'W_1KI': 2.1869938575314, 'W_D': 106.99974999999999, 'J_D': 1395.0422668139338, 'W_D_1KI': 1.634931852214039, 'J_D_1KI': 0.024981386978792274} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..d0be3f6 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.009309530258179, "TIME_S_1KI": 2.558983492490556, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2204.305100774765, "W": 128.95, "J_1KI": 469.7006394150362, "W_1KI": 27.477093543575535, "W_D": 92.7465, "J_D": 1585.4329820008277, "W_D_1KI": 19.76273172810569, "J_D_1KI": 4.211108401471487} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..561971c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.27472805976867676} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 103, 224, ..., 9999788, + 9999890, 10000000]), + col_indices=tensor([ 311, 3365, 5161, ..., 98602, 99530, 99576]), + values=tensor([0.9917, 0.0583, 0.3712, ..., 0.9136, 0.4986, 0.7909]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4323, 0.4083, 0.9080, ..., 0.7530, 0.1922, 0.7136]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 0.27472805976867676 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3821', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.548935651779175} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 86, 193, ..., 9999790, + 9999889, 10000000]), + col_indices=tensor([ 598, 3163, 6325, ..., 93333, 94869, 95502]), + values=tensor([0.3479, 0.2007, 0.7107, ..., 0.5121, 0.1193, 0.0296]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.9967, 0.6546, 0.0107, ..., 0.1473, 0.4856, 0.1261]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 8.548935651779175 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4693', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.009309530258179} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 80, 177, ..., 9999782, + 9999890, 10000000]), + col_indices=tensor([ 1894, 3295, 3747, ..., 98404, 98823, 99018]), + values=tensor([0.1540, 0.7163, 0.3077, ..., 0.3211, 0.5255, 0.5012]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8104, 0.7178, 0.6885, ..., 0.8661, 0.7147, 0.1559]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 12.009309530258179 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 80, 177, ..., 9999782, + 9999890, 10000000]), + col_indices=tensor([ 1894, 3295, 3747, ..., 98404, 98823, 99018]), + values=tensor([0.1540, 0.7163, 0.3077, ..., 0.3211, 0.5255, 0.5012]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8104, 0.7178, 0.6885, ..., 0.8661, 0.7147, 0.1559]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 12.009309530258179 seconds + +[41.32, 39.94, 39.97, 39.81, 39.83, 40.24, 40.5, 40.22, 40.21, 41.33] +[128.95] +17.09426212310791 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.009309530258179, 'TIME_S_1KI': 2.558983492490556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2204.305100774765, 'W': 128.95} +[41.32, 39.94, 39.97, 39.81, 39.83, 40.24, 40.5, 40.22, 40.21, 41.33, 40.63, 40.93, 40.28, 39.66, 39.87, 41.7, 39.67, 40.03, 39.68, 39.78] +724.0699999999999 +36.2035 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.009309530258179, 'TIME_S_1KI': 2.558983492490556, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2204.305100774765, 'W': 128.95, 'J_1KI': 469.7006394150362, 'W_1KI': 27.477093543575535, 'W_D': 92.7465, 'J_D': 1585.4329820008277, 'W_D_1KI': 19.76273172810569, 'J_D_1KI': 4.211108401471487} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..5f6b7ac --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 99857, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.432250738143921, "TIME_S_1KI": 0.10447190220158749, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1536.9044136524199, "W": 115.09999999999998, "J_1KI": 15.391053342804408, "W_1KI": 1.152648287050482, "W_D": 79.15799999999997, "J_D": 1056.9789711198803, "W_D_1KI": 0.7927135804200004, "J_D_1KI": 0.007938487841813797} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..5aa8a41 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.043670654296875} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99998, + 100000]), + col_indices=tensor([ 6609, 19255, 81333, ..., 81128, 51531, 76130]), + values=tensor([0.9876, 0.0139, 0.8085, ..., 0.3685, 0.4758, 0.0266]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.1735, 0.8240, 0.8190, ..., 0.4288, 0.7745, 0.1715]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.043670654296875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24043', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.5281074047088623} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99997, 99999, + 100000]), + col_indices=tensor([69039, 75318, 84133, ..., 16483, 23976, 47642]), + values=tensor([0.3961, 0.2517, 0.3876, ..., 0.3761, 0.7912, 0.1675]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.9918, 0.3750, 0.7737, ..., 0.5214, 0.0832, 0.2225]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 2.5281074047088623 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '99857', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.432250738143921} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 100000, + 100000]), + col_indices=tensor([18969, 38131, 43029, ..., 81495, 1519, 27704]), + values=tensor([0.3850, 0.3770, 0.8820, ..., 0.3865, 0.0804, 0.8829]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4374, 0.1348, 0.8967, ..., 0.5157, 0.0353, 0.0014]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.432250738143921 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 100000, + 100000]), + col_indices=tensor([18969, 38131, 43029, ..., 81495, 1519, 27704]), + values=tensor([0.3850, 0.3770, 0.8820, ..., 0.3865, 0.0804, 0.8829]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4374, 0.1348, 0.8967, ..., 0.5157, 0.0353, 0.0014]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.432250738143921 seconds + +[40.36, 39.67, 39.74, 39.69, 39.75, 39.62, 40.16, 41.41, 40.17, 40.09] +[115.1] +13.35277509689331 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99857, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.432250738143921, 'TIME_S_1KI': 0.10447190220158749, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.9044136524199, 'W': 115.09999999999998} +[40.36, 39.67, 39.74, 39.69, 39.75, 39.62, 40.16, 41.41, 40.17, 40.09, 40.33, 39.61, 39.56, 41.58, 39.51, 39.51, 39.84, 39.34, 39.38, 39.82] +718.8400000000001 +35.94200000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99857, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.432250738143921, 'TIME_S_1KI': 0.10447190220158749, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.9044136524199, 'W': 115.09999999999998, 'J_1KI': 15.391053342804408, 'W_1KI': 1.152648287050482, 'W_D': 79.15799999999997, 'J_D': 1056.9789711198803, 'W_D_1KI': 0.7927135804200004, 'J_D_1KI': 0.007938487841813797} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..80ab5da --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 81276, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.078009605407715, "TIME_S_1KI": 0.12399736214144047, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1722.4750410318375, "W": 132.75, "J_1KI": 21.192911081153568, "W_1KI": 1.6333234903292484, "W_D": 96.411, "J_D": 1250.9645286698342, "W_D_1KI": 1.1862173335301935, "J_D_1KI": 0.014594927574317062} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..0f8dc83 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04085874557495117} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 499987, 499993, + 500000]), + col_indices=tensor([ 4658, 51132, 55767, ..., 77897, 84680, 91168]), + values=tensor([0.8716, 0.7460, 0.9968, ..., 0.7762, 0.8585, 0.9878]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.7678, 0.5187, 0.4774, ..., 0.8664, 0.3724, 0.0254]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.04085874557495117 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25698', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.3198819160461426} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 499992, 499998, + 500000]), + col_indices=tensor([33478, 35089, 63624, ..., 93258, 3464, 77760]), + values=tensor([0.8303, 0.5286, 0.9064, ..., 0.8655, 0.5788, 0.5903]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.0892, 0.6340, 0.1475, ..., 0.5230, 0.0009, 0.8265]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 3.3198819160461426 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '81276', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.078009605407715} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 9, ..., 499997, 500000, + 500000]), + col_indices=tensor([38450, 44227, 69625, ..., 8507, 39094, 82179]), + values=tensor([0.2677, 0.9845, 0.1042, ..., 0.9974, 0.0756, 0.3422]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.8400, 0.1962, 0.3075, ..., 0.6034, 0.5737, 0.0994]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.078009605407715 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 9, ..., 499997, 500000, + 500000]), + col_indices=tensor([38450, 44227, 69625, ..., 8507, 39094, 82179]), + values=tensor([0.2677, 0.9845, 0.1042, ..., 0.9974, 0.0756, 0.3422]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.8400, 0.1962, 0.3075, ..., 0.6034, 0.5737, 0.0994]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.078009605407715 seconds + +[41.19, 39.74, 39.62, 40.23, 39.69, 39.5, 44.76, 39.57, 39.56, 39.98] +[132.75] +12.975329875946045 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 81276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.078009605407715, 'TIME_S_1KI': 0.12399736214144047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1722.4750410318375, 'W': 132.75} +[41.19, 39.74, 39.62, 40.23, 39.69, 39.5, 44.76, 39.57, 39.56, 39.98, 40.29, 39.57, 39.62, 45.06, 39.69, 39.48, 40.04, 39.95, 40.05, 39.84] +726.78 +36.339 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 81276, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.078009605407715, 'TIME_S_1KI': 0.12399736214144047, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1722.4750410318375, 'W': 132.75, 'J_1KI': 21.192911081153568, 'W_1KI': 1.6333234903292484, 'W_D': 96.411, 'J_D': 1250.9645286698342, 'W_D_1KI': 1.1862173335301935, 'J_D_1KI': 0.014594927574317062} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..0d37837 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 280711, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.449347734451294, "TIME_S_1KI": 0.03722457521953644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.5745413303375, "W": 98.5, "J_1KI": 4.593957989998032, "W_1KI": 0.35089469240606885, "W_D": 62.83475, "J_D": 822.6405473183394, "W_D_1KI": 0.22384142409809377, "J_D_1KI": 0.0007974088086968226} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..d302c45 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.019724130630493164} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), + col_indices=tensor([ 730, 4220, 7544, ..., 4458, 7562, 5619]), + values=tensor([0.0181, 0.7832, 0.5914, ..., 0.2469, 0.2734, 0.2796]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.6994, 0.7339, 0.7582, ..., 0.9456, 0.1186, 0.3856]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.019724130630493164 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '53234', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.991217851638794} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 5, ..., 9999, 9999, 10000]), + col_indices=tensor([2031, 5960, 7493, ..., 3747, 8534, 6060]), + values=tensor([0.1847, 0.1000, 0.1920, ..., 0.9911, 0.4392, 0.2330]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.7239, 0.0636, 0.4781, ..., 0.2276, 0.2279, 0.8613]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 1.991217851638794 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '280711', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.449347734451294} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9999, 10000]), + col_indices=tensor([8732, 42, 2512, ..., 1373, 9550, 9690]), + values=tensor([0.4706, 0.1126, 0.6045, ..., 0.0102, 0.1178, 0.6557]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4976, 0.6299, 0.3127, ..., 0.9623, 0.9434, 0.7070]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.449347734451294 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9997, 9999, 10000]), + col_indices=tensor([8732, 42, 2512, ..., 1373, 9550, 9690]), + values=tensor([0.4706, 0.1126, 0.6045, ..., 0.0102, 0.1178, 0.6557]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4976, 0.6299, 0.3127, ..., 0.9623, 0.9434, 0.7070]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.449347734451294 seconds + +[40.09, 39.2, 39.41, 39.39, 39.38, 42.02, 40.51, 39.25, 39.25, 39.51] +[98.5] +13.092127323150635 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 280711, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.449347734451294, 'TIME_S_1KI': 0.03722457521953644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.5745413303375, 'W': 98.5} +[40.09, 39.2, 39.41, 39.39, 39.38, 42.02, 40.51, 39.25, 39.25, 39.51, 40.09, 39.17, 39.95, 40.04, 39.24, 39.23, 39.51, 39.16, 39.2, 39.1] +713.3050000000001 +35.66525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 280711, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.449347734451294, 'TIME_S_1KI': 0.03722457521953644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.5745413303375, 'W': 98.5, 'J_1KI': 4.593957989998032, 'W_1KI': 0.35089469240606885, 'W_D': 62.83475, 'J_D': 822.6405473183394, 'W_D_1KI': 0.22384142409809377, 'J_D_1KI': 0.0007974088086968226} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..9665b3b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 193546, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.657205820083618, "TIME_S_1KI": 0.05506290917964524, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1424.268603067398, "W": 108.67, "J_1KI": 7.358811874528009, "W_1KI": 0.5614685914459612, "W_D": 72.91375, "J_D": 955.6341663467883, "W_D_1KI": 0.3767256879501513, "J_D_1KI": 0.001946440060503195} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..77b5a62 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,105 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.025844097137451172} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 28, ..., 99969, 99983, + 100000]), + col_indices=tensor([1079, 2122, 3254, ..., 9373, 9823, 9958]), + values=tensor([0.1589, 0.8596, 0.7837, ..., 0.1493, 0.1272, 0.2084]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0719, 0.4122, 0.7875, ..., 0.0407, 0.8322, 0.6511]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.025844097137451172 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '40628', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.4707634449005127} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 18, ..., 99974, 99987, + 100000]), + col_indices=tensor([ 792, 1032, 1238, ..., 8561, 8731, 9370]), + values=tensor([0.4488, 0.9659, 0.1268, ..., 0.7863, 0.6709, 0.3638]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8213, 0.7389, 0.9585, ..., 0.8858, 0.0787, 0.3979]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 2.4707634449005127 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '172656', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.366683721542358} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 24, ..., 99973, 99986, + 100000]), + col_indices=tensor([ 684, 3301, 3344, ..., 8499, 8709, 9229]), + values=tensor([0.0104, 0.6771, 0.5927, ..., 0.6883, 0.2524, 0.4550]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4786, 0.6837, 0.1379, ..., 0.3005, 0.2266, 0.1673]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 9.366683721542358 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '193546', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.657205820083618} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 13, 24, ..., 99982, 99990, + 100000]), + col_indices=tensor([ 667, 823, 2535, ..., 7218, 8112, 8309]), + values=tensor([0.9044, 0.9079, 0.6825, ..., 0.1587, 0.6143, 0.0618]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.5914, 0.6686, 0.5823, ..., 0.5362, 0.3609, 0.2297]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.657205820083618 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 13, 24, ..., 99982, 99990, + 100000]), + col_indices=tensor([ 667, 823, 2535, ..., 7218, 8112, 8309]), + values=tensor([0.9044, 0.9079, 0.6825, ..., 0.1587, 0.6143, 0.0618]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.5914, 0.6686, 0.5823, ..., 0.5362, 0.3609, 0.2297]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.657205820083618 seconds + +[40.9, 39.63, 39.53, 39.38, 39.44, 39.45, 39.92, 39.64, 39.85, 39.99] +[108.67] +13.106364250183105 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 193546, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.657205820083618, 'TIME_S_1KI': 0.05506290917964524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.268603067398, 'W': 108.67} +[40.9, 39.63, 39.53, 39.38, 39.44, 39.45, 39.92, 39.64, 39.85, 39.99, 39.96, 39.48, 39.45, 39.41, 39.63, 39.38, 40.94, 39.79, 39.91, 39.74] +715.125 +35.75625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 193546, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.657205820083618, 'TIME_S_1KI': 0.05506290917964524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.268603067398, 'W': 108.67, 'J_1KI': 7.358811874528009, 'W_1KI': 0.5614685914459612, 'W_D': 72.91375, 'J_D': 955.6341663467883, 'W_D_1KI': 0.3767256879501513, 'J_D_1KI': 0.001946440060503195} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..4c6ec73 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102691, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.217256307601929, "TIME_S_1KI": 0.09949514862648069, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1713.9247414016722, "W": 132.92, "J_1KI": 16.690116382172462, "W_1KI": 1.294368542520766, "W_D": 96.75025, "J_D": 1247.5372194688318, "W_D_1KI": 0.9421492633239523, "J_D_1KI": 0.009174604038561825} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..7818ee2 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.027860403060913086} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 86, 184, ..., 999787, + 999901, 1000000]), + col_indices=tensor([ 81, 93, 211, ..., 9891, 9936, 9983]), + values=tensor([0.0273, 0.9948, 0.2764, ..., 0.0318, 0.5538, 0.8532]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8459, 0.7440, 0.9932, ..., 0.5464, 0.7654, 0.2266]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.027860403060913086 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37687', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.853411912918091} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 90, 178, ..., 999807, + 999899, 1000000]), + col_indices=tensor([ 9, 87, 435, ..., 9776, 9821, 9947]), + values=tensor([0.6051, 0.3509, 0.6551, ..., 0.3060, 0.1178, 0.2325]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6802, 0.0969, 0.8232, ..., 0.8757, 0.6573, 0.4893]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 3.853411912918091 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102691', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.217256307601929} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 98, 191, ..., 999800, + 999904, 1000000]), + col_indices=tensor([ 18, 19, 89, ..., 9675, 9719, 9959]), + values=tensor([0.5811, 0.2000, 0.4195, ..., 0.8918, 0.7545, 0.5786]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.3032, 0.6522, 0.8844, ..., 0.7793, 0.6874, 0.5546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.217256307601929 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 98, 191, ..., 999800, + 999904, 1000000]), + col_indices=tensor([ 18, 19, 89, ..., 9675, 9719, 9959]), + values=tensor([0.5811, 0.2000, 0.4195, ..., 0.8918, 0.7545, 0.5786]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.3032, 0.6522, 0.8844, ..., 0.7793, 0.6874, 0.5546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.217256307601929 seconds + +[40.93, 39.91, 40.43, 39.4, 39.51, 40.07, 39.51, 39.35, 39.39, 44.7] +[132.92] +12.894408226013184 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102691, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.217256307601929, 'TIME_S_1KI': 0.09949514862648069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.9247414016722, 'W': 132.92} +[40.93, 39.91, 40.43, 39.4, 39.51, 40.07, 39.51, 39.35, 39.39, 44.7, 40.04, 39.9, 39.44, 39.42, 39.81, 39.87, 45.14, 39.82, 39.78, 39.62] +723.395 +36.16975 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102691, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.217256307601929, 'TIME_S_1KI': 0.09949514862648069, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.9247414016722, 'W': 132.92, 'J_1KI': 16.690116382172462, 'W_1KI': 1.294368542520766, 'W_D': 96.75025, 'J_D': 1247.5372194688318, 'W_D_1KI': 0.9421492633239523, 'J_D_1KI': 0.009174604038561825} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..52cad91 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27775, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.346900463104248, "TIME_S_1KI": 0.3725256692386768, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2138.1740951538086, "W": 151.25, "J_1KI": 76.98196562209931, "W_1KI": 5.445544554455445, "W_D": 115.164, "J_D": 1628.037563598633, "W_D_1KI": 4.146318631863187, "J_D_1KI": 0.1492823989869734} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..19caf4c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07912850379943848} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 485, 973, ..., 4998984, + 4999512, 5000000]), + col_indices=tensor([ 23, 33, 35, ..., 9878, 9920, 9946]), + values=tensor([0.8956, 0.5440, 0.5650, ..., 0.6571, 0.0981, 0.4530]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3320, 0.0557, 0.6993, ..., 0.8374, 0.3528, 0.6849]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 0.07912850379943848 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13269', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.016182899475098} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 490, 975, ..., 4999017, + 4999511, 5000000]), + col_indices=tensor([ 5, 7, 17, ..., 9925, 9927, 9956]), + values=tensor([0.3061, 0.0982, 0.7519, ..., 0.4711, 0.1343, 0.2753]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.4300, 0.5593, 0.7816, ..., 0.7590, 0.1985, 0.5681]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 5.016182899475098 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27775', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.346900463104248} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 516, 985, ..., 4998986, + 4999503, 5000000]), + col_indices=tensor([ 0, 38, 62, ..., 9969, 9984, 9993]), + values=tensor([0.4538, 0.1922, 0.3497, ..., 0.8541, 0.7038, 0.0561]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3516, 0.9610, 0.6827, ..., 0.5287, 0.4040, 0.0575]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.346900463104248 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 516, 985, ..., 4998986, + 4999503, 5000000]), + col_indices=tensor([ 0, 38, 62, ..., 9969, 9984, 9993]), + values=tensor([0.4538, 0.1922, 0.3497, ..., 0.8541, 0.7038, 0.0561]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3516, 0.9610, 0.6827, ..., 0.5287, 0.4040, 0.0575]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.346900463104248 seconds + +[41.25, 39.91, 39.68, 39.83, 39.58, 41.43, 40.59, 39.78, 40.26, 39.69] +[151.25] +14.136688232421875 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.346900463104248, 'TIME_S_1KI': 0.3725256692386768, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2138.1740951538086, 'W': 151.25} +[41.25, 39.91, 39.68, 39.83, 39.58, 41.43, 40.59, 39.78, 40.26, 39.69, 40.82, 40.72, 40.11, 39.71, 39.6, 39.85, 39.65, 39.99, 39.77, 40.76] +721.72 +36.086 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.346900463104248, 'TIME_S_1KI': 0.3725256692386768, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2138.1740951538086, 'W': 151.25, 'J_1KI': 76.98196562209931, 'W_1KI': 5.445544554455445, 'W_D': 115.164, 'J_D': 1628.037563598633, 'W_D_1KI': 4.146318631863187, 'J_D_1KI': 0.1492823989869734} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..6336d4d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4427, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.78234076499939, "TIME_S_1KI": 2.4355863485428935, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1855.6078462219239, "W": 121.94, "J_1KI": 419.15695645401485, "W_1KI": 27.544612604472555, "W_D": 85.48649999999999, "J_D": 1300.8809262428283, "W_D_1KI": 19.310255251863563, "J_D_1KI": 4.361927999065633} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..68c803b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.23713254928588867} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 967, 1927, ..., 9997983, + 9998974, 10000000]), + col_indices=tensor([ 2, 7, 17, ..., 9977, 9981, 9986]), + values=tensor([0.0113, 0.4578, 0.3712, ..., 0.8300, 0.4518, 0.5288]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6464, 0.5946, 0.9135, ..., 0.7384, 0.8851, 0.3138]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 0.23713254928588867 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4427', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.78234076499939} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1000, 1968, ..., 9997976, + 9998957, 10000000]), + col_indices=tensor([ 18, 35, 37, ..., 9972, 9974, 9993]), + values=tensor([0.5495, 0.5155, 0.6909, ..., 0.5748, 0.2988, 0.6189]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2327, 0.3005, 0.5005, ..., 0.5867, 0.2890, 0.0524]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.78234076499939 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1000, 1968, ..., 9997976, + 9998957, 10000000]), + col_indices=tensor([ 18, 35, 37, ..., 9972, 9974, 9993]), + values=tensor([0.5495, 0.5155, 0.6909, ..., 0.5748, 0.2988, 0.6189]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2327, 0.3005, 0.5005, ..., 0.5867, 0.2890, 0.0524]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.78234076499939 seconds + +[40.15, 39.73, 39.55, 39.93, 39.94, 39.96, 44.96, 39.82, 40.15, 39.43] +[121.94] +15.217384338378906 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.78234076499939, 'TIME_S_1KI': 2.4355863485428935, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1855.6078462219239, 'W': 121.94} +[40.15, 39.73, 39.55, 39.93, 39.94, 39.96, 44.96, 39.82, 40.15, 39.43, 40.99, 45.07, 39.68, 40.13, 40.01, 39.56, 39.72, 40.75, 40.01, 39.63] +729.07 +36.453500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.78234076499939, 'TIME_S_1KI': 2.4355863485428935, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1855.6078462219239, 'W': 121.94, 'J_1KI': 419.15695645401485, 'W_1KI': 27.544612604472555, 'W_D': 85.48649999999999, 'J_D': 1300.8809262428283, 'W_D_1KI': 19.310255251863563, 'J_D_1KI': 4.361927999065633} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..3673122 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2210, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.475984573364258, "TIME_S_1KI": 4.740264512834506, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2065.6901198005676, "W": 119.44, "J_1KI": 934.701411674465, "W_1KI": 54.04524886877828, "W_D": 83.048, "J_D": 1436.2979995746614, "W_D_1KI": 37.57828054298643, "J_D_1KI": 17.00374685203006} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..5e9953a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.5103092193603516} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2001, 3993, ..., 19996027, + 19997998, 20000000]), + col_indices=tensor([ 4, 8, 12, ..., 9988, 9991, 9998]), + values=tensor([0.1397, 0.5991, 0.8904, ..., 0.1163, 0.3047, 0.7503]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7325, 0.0863, 0.4494, ..., 0.5445, 0.3494, 0.7015]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 0.5103092193603516 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2057', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.769307136535645} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1965, 3996, ..., 19995929, + 19997992, 20000000]), + col_indices=tensor([ 4, 9, 15, ..., 9975, 9986, 9992]), + values=tensor([0.0708, 0.7889, 0.9973, ..., 0.4384, 0.2830, 0.3299]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.8359, 0.1884, 0.2769, ..., 0.8252, 0.8191, 0.5472]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 9.769307136535645 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2210', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.475984573364258} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2066, 4070, ..., 19995990, + 19998002, 20000000]), + col_indices=tensor([ 1, 2, 8, ..., 9986, 9990, 9993]), + values=tensor([0.6258, 0.8376, 0.0180, ..., 0.7990, 0.4511, 0.0511]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7373, 0.4078, 0.5568, ..., 0.6016, 0.2858, 0.4434]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.475984573364258 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2066, 4070, ..., 19995990, + 19998002, 20000000]), + col_indices=tensor([ 1, 2, 8, ..., 9986, 9990, 9993]), + values=tensor([0.6258, 0.8376, 0.0180, ..., 0.7990, 0.4511, 0.0511]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7373, 0.4078, 0.5568, ..., 0.6016, 0.2858, 0.4434]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.475984573364258 seconds + +[40.41, 45.21, 40.37, 40.41, 40.26, 39.65, 40.47, 41.87, 39.92, 39.65] +[119.44] +17.294793367385864 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2210, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.475984573364258, 'TIME_S_1KI': 4.740264512834506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.6901198005676, 'W': 119.44} +[40.41, 45.21, 40.37, 40.41, 40.26, 39.65, 40.47, 41.87, 39.92, 39.65, 40.39, 39.85, 39.69, 40.06, 40.15, 39.58, 40.59, 39.58, 39.95, 40.01] +727.8399999999999 +36.391999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2210, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.475984573364258, 'TIME_S_1KI': 4.740264512834506, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.6901198005676, 'W': 119.44, 'J_1KI': 934.701411674465, 'W_1KI': 54.04524886877828, 'W_D': 83.048, 'J_D': 1436.2979995746614, 'W_D_1KI': 37.57828054298643, 'J_D_1KI': 17.00374685203006} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..21b0219 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1434, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.430570602416992, "TIME_S_1KI": 7.273759136971403, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2171.006755914688, "W": 115.95999999999998, "J_1KI": 1513.9517126322787, "W_1KI": 80.8647140864714, "W_D": 79.27299999999998, "J_D": 1484.1515915973182, "W_D_1KI": 55.281032078103195, "J_D_1KI": 38.55023157468842} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..29bb118 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.7320935726165771} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2976, 5887, ..., 29993981, + 29996974, 30000000]), + col_indices=tensor([ 2, 12, 13, ..., 9995, 9997, 9999]), + values=tensor([0.2872, 0.6919, 0.0045, ..., 0.7234, 0.8152, 0.1470]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3759, 0.5048, 0.7452, ..., 0.9323, 0.0206, 0.6020]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 0.7320935726165771 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1434', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.430570602416992} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2946, 5856, ..., 29993956, + 29997054, 30000000]), + col_indices=tensor([ 1, 3, 10, ..., 9992, 9994, 9995]), + values=tensor([0.6658, 0.8893, 0.2640, ..., 0.2436, 0.9944, 0.7745]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7478, 0.4417, 0.0487, ..., 0.7713, 0.8445, 0.5646]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.430570602416992 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2946, 5856, ..., 29993956, + 29997054, 30000000]), + col_indices=tensor([ 1, 3, 10, ..., 9992, 9994, 9995]), + values=tensor([0.6658, 0.8893, 0.2640, ..., 0.2436, 0.9944, 0.7745]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7478, 0.4417, 0.0487, ..., 0.7713, 0.8445, 0.5646]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.430570602416992 seconds + +[40.39, 40.27, 40.31, 40.01, 39.78, 40.12, 45.36, 39.94, 40.02, 52.21] +[115.96] +18.722031354904175 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1434, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.430570602416992, 'TIME_S_1KI': 7.273759136971403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2171.006755914688, 'W': 115.95999999999998} +[40.39, 40.27, 40.31, 40.01, 39.78, 40.12, 45.36, 39.94, 40.02, 52.21, 41.63, 40.16, 40.0, 40.35, 41.93, 39.55, 39.78, 39.55, 39.63, 39.73] +733.74 +36.687 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1434, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.430570602416992, 'TIME_S_1KI': 7.273759136971403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2171.006755914688, 'W': 115.95999999999998, 'J_1KI': 1513.9517126322787, 'W_1KI': 80.8647140864714, 'W_D': 79.27299999999998, 'J_D': 1484.1515915973182, 'W_D_1KI': 55.281032078103195, 'J_D_1KI': 38.55023157468842} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..7eea71b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 349456, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.173964023590088, "TIME_S_1KI": 0.03197531026392475, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1252.9686733055114, "W": 97.63, "J_1KI": 3.585483360724988, "W_1KI": 0.2793770889611282, "W_D": 61.798, "J_D": 793.1061976127625, "W_D_1KI": 0.17684057506524428, "J_D_1KI": 0.0005060453249200021} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..518e71c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1307 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.018783092498779297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7710, 770, 4407, 1870, 7174, 5318, 1450, 1184, 2850, + 4403, 2123, 255, 810, 7400, 565, 3550, 9613, 7496, + 6015, 975, 9271, 7248, 4063, 4805, 7196, 7869, 4589, + 3665, 14, 7810, 5136, 5919, 2466, 4173, 9881, 3733, + 1870, 1230, 6453, 5008, 3988, 9434, 1211, 7162, 7902, + 9745, 4411, 5286, 1509, 4833, 7496, 1798, 4, 3586, + 7060, 2811, 8112, 4764, 9435, 3836, 5882, 3125, 9850, + 2118, 4178, 9802, 8785, 5451, 5614, 8254, 6937, 7688, + 7331, 3332, 6913, 3221, 6350, 4032, 1334, 7216, 129, + 9945, 7932, 9294, 1550, 7754, 4771, 5013, 294, 9524, + 9420, 8530, 7837, 2039, 9838, 1792, 5287, 5887, 573, + 7513, 5802, 9931, 5757, 4848, 2498, 9852, 172, 7356, + 7917, 1181, 539, 4514, 1534, 8369, 4234, 9382, 7442, + 6827, 9408, 9891, 2006, 5444, 9700, 4515, 5848, 7202, + 8465, 8202, 7227, 5428, 6226, 7163, 1915, 9424, 7937, + 3286, 7876, 401, 9372, 5269, 534, 8486, 5070, 2186, + 2780, 8211, 5164, 9491, 8801, 2669, 4834, 6041, 906, + 6304, 6419, 5278, 8990, 5734, 1221, 935, 3464, 2356, + 7735, 7209, 5212, 1318, 8268, 9084, 3729, 231, 1156, + 1335, 6818, 8619, 6611, 8933, 2141, 101, 2488, 1604, + 1138, 1432, 2355, 1314, 1098, 1619, 4109, 4763, 1605, + 2774, 7361, 843, 6381, 1534, 9790, 4775, 7164, 7105, + 6239, 3980, 382, 411, 7505, 7659, 3367, 1766, 3195, + 9470, 2528, 9322, 6959, 2216, 491, 8725, 2371, 4382, + 5718, 5217, 7058, 4457, 5435, 1249, 9086, 4957, 5280, + 7131, 4958, 8168, 4939, 4037, 3075, 3367, 7266, 6066, + 5464, 2058, 4013, 2658, 7499, 9138, 8214, 6222, 4674, + 6822, 9675, 5801, 8254, 8831, 8871, 6911, 8523, 885, + 5318, 2866, 8999, 8361, 5772, 5641, 171, 8207, 6823, + 1799, 2385, 3933, 8099, 9437, 4173, 4273, 4399, 7947, + 538, 8128, 6522, 1001, 6458, 9133, 842, 3215, 3937, + 8256, 4826, 8778, 6085, 7131, 1758, 1134, 7002, 8167, + 700, 4064, 7323, 8384, 4717, 7001, 6775, 3204, 701, + 2756, 9559, 5146, 8344, 1565, 2384, 9287, 6270, 6517, + 9569, 3605, 1309, 1171, 4748, 2704, 3906, 6066, 1315, + 4722, 9012, 2854, 9405, 9514, 7715, 2699, 2312, 9518, + 1435, 2191, 85, 1881, 1234, 2331, 4237, 5757, 7932, + 9712, 3332, 7705, 1404, 8567, 6763, 9879, 1736, 3894, + 5687, 8039, 1164, 621, 2677, 2713, 7254, 2093, 2442, + 8214, 7080, 8260, 2130, 5394, 902, 5406, 5871, 8131, + 8425, 1840, 8258, 10, 3515, 7795, 4351, 23, 2317, + 4596, 1004, 626, 9911, 3655, 1067, 5724, 9150, 3004, + 1248, 7595, 4785, 5244, 8336, 4341, 9387, 1506, 8501, + 2147, 1566, 6495, 4554, 7326, 6889, 8027, 651, 2541, + 5072, 7045, 8106, 4631, 6832, 1063, 4955, 3837, 5979, + 5449, 2272, 9439, 522, 2753, 9349, 9909, 7912, 2140, + 3823, 565, 5366, 4263, 9476, 8456, 9778, 4716, 8861, + 3068, 7529, 8311, 2167, 8097, 9726, 5963, 8961, 8951, + 8141, 2407, 5529, 1677, 2350, 6893, 7197, 3753, 5887, + 8876, 4491, 4643, 8990, 596, 8161, 7846, 2289, 9180, + 7037, 5854, 6062, 8432, 824, 4812, 3129, 8178, 4110, + 9628, 3692, 997, 3675, 9501, 8580, 8343, 4389, 3215, + 2282, 3336, 4379, 5429, 3859, 5531, 1391, 1842, 9054, + 3059, 3924, 4109, 4585, 1402, 5590, 2428, 5822, 4347, + 9652, 4937, 8518, 2936, 4541, 4664, 7316, 3628, 1576, + 100, 6717, 6869, 5782, 4769, 6381, 420, 7325, 9706, + 5457, 6444, 6589, 843, 7648, 9948, 200, 1616, 3739, + 1287, 4261, 1655, 8236, 7002, 480, 4835, 8836, 2589, + 7842, 2577, 3714, 9973, 9407, 6133, 4479, 8904, 166, + 7918, 5703, 6259, 4920, 9996, 9898, 9260, 9581, 5155, + 2417, 2267, 2847, 3259, 4611, 7016, 3338, 8196, 6441, + 9820, 9477, 466, 5776, 9899, 731, 9644, 9431, 476, + 9327, 5566, 7989, 3039, 9320, 1202, 381, 1915, 2369, + 3818, 8016, 6121, 6692, 1702, 1859, 3194, 8553, 3794, + 1815, 8135, 7504, 9443, 5902, 8225, 1819, 3757, 9067, + 7582, 7642, 3549, 7259, 1959, 3004, 6866, 1165, 2517, + 9690, 7905, 3279, 6036, 7950, 6116, 3751, 5294, 4615, + 9314, 6307, 3673, 1490, 4111, 1240, 8263, 4110, 4571, + 2905, 2774, 1985, 3126, 6084, 5700, 1229, 5879, 8154, + 2148, 4874, 2092, 9170, 3482, 664, 5554, 1113, 9484, + 3978, 7225, 3749, 8894, 8534, 5256, 5887, 8419, 5698, + 8427, 8697, 7604, 2812, 2806, 1845, 6003, 3130, 760, + 2497, 2541, 9127, 1210, 1651, 530, 1994, 1040, 8645, + 8666, 9304, 4335, 8289, 1101, 6332, 1007, 8169, 9969, + 943, 4525, 8690, 4159, 1817, 6073, 4957, 596, 6283, + 3051, 4485, 7766, 8516, 526, 7053, 8582, 7056, 943, + 6579, 9174, 2254, 8501, 1773, 956, 877, 7705, 4613, + 5648, 4661, 1809, 2688, 8514, 8802, 4722, 545, 4747, + 6633, 3814, 8935, 7588, 1678, 8488, 5607, 673, 2681, + 8017, 6281, 3685, 3530, 8546, 4879, 7593, 4273, 2876, + 3735, 2848, 836, 940, 4597, 1074, 1122, 8693, 963, + 3887, 7554, 930, 4424, 9656, 8086, 135, 4899, 5218, + 8994, 418, 969, 6615, 8605, 1505, 6523, 1169, 6709, + 5427, 5511, 3558, 5322, 8458, 850, 2892, 4597, 5142, + 8980, 9053, 5609, 1077, 5309, 1757, 306, 460, 2458, + 4048, 8475, 4050, 6980, 4499, 6977, 1259, 1464, 3611, + 8751, 6728, 7071, 9544, 9297, 512, 4042, 6462, 2944, + 5752, 9601, 679, 9648, 7279, 2893, 973, 4996, 1919, + 5484, 6026, 3293, 7948, 7029, 8673, 3172, 2794, 8323, + 821, 1937, 6356, 4283, 5725, 2893, 1377, 9981, 7003, + 5977, 8646, 3216, 6885, 4725, 8141, 3585, 6384, 9261, + 9676, 1311, 3308, 6435, 8132, 8525, 2917, 7023, 2344, + 7600, 4383, 4886, 6233, 7280, 8202, 1726, 1918, 3206, + 8062, 434, 361, 124, 6241, 4326, 7134, 295, 2286, + 4397, 3630, 485, 8219, 5832, 5393, 8505, 8369, 461, + 8188, 4054, 8534, 5850, 8902, 4999, 6423, 3476, 6192, + 6572, 6196, 5040, 4242, 7067, 7952, 3299, 5653, 1326, + 858, 1204, 3430, 462, 9954, 4911, 2910, 3563, 4861, + 4090, 3934, 3405, 8602, 1698, 9993, 6680, 3885, 6072, + 4399, 7396, 8831, 6459, 8768, 6290, 6186, 7949, 3125, + 1261, 9770, 5246, 3605, 357, 7933, 8926, 5531, 3614, + 3729, 5340, 883, 1173, 7534, 8589, 6681, 4108, 8234, + 2703, 7749, 2006, 4313, 509, 3164, 3554, 5906, 4437, + 7829, 1278, 3055, 4732, 6488, 1268, 6449, 6632, 7614, + 45, 1360, 9281, 7504, 2281, 4706, 4960, 4576, 206, + 6864, 5459, 1834, 4347, 1803, 929, 7593, 5081, 9952, + 4949, 323, 8662, 2381, 5486, 2263, 306, 3850, 8532, + 8650, 6620, 5629, 3893, 7949, 275, 3546, 8768, 8253, + 9465, 120, 747, 2625, 603, 4903, 2906, 4444, 8192, + 5259, 2313, 293, 2063, 856, 8668, 8628, 7481, 5244, + 9114, 8455, 3952, 4502, 7495, 3074, 8209, 6343, 4118, + 8429]), + values=tensor([1.0187e-01, 9.3060e-02, 7.1844e-01, 6.0591e-01, + 1.7420e-01, 4.7341e-01, 4.6521e-01, 2.2759e-01, + 8.5069e-01, 3.6280e-01, 9.4888e-01, 6.2180e-01, + 8.8663e-01, 8.0888e-01, 1.3511e-01, 8.9212e-01, + 4.9632e-01, 7.1062e-02, 8.8913e-01, 3.6555e-01, + 5.7802e-01, 9.5902e-01, 6.7906e-01, 5.7533e-03, + 9.6566e-01, 6.2483e-01, 5.0879e-01, 6.7668e-01, + 9.1177e-01, 4.3629e-01, 2.3313e-01, 8.4588e-01, + 4.5185e-01, 2.4600e-01, 8.2141e-01, 2.8976e-01, + 9.1767e-01, 3.5967e-01, 1.4506e-01, 7.2241e-01, + 6.2974e-01, 6.3780e-01, 8.8090e-01, 2.9092e-01, + 1.6965e-01, 3.6359e-01, 7.3928e-01, 8.5548e-01, + 5.7369e-01, 3.7085e-01, 5.3463e-01, 1.7885e-02, + 8.9381e-01, 5.2509e-01, 7.4178e-01, 4.7149e-01, + 8.9602e-01, 8.5585e-01, 4.7822e-01, 9.9631e-01, + 1.3915e-01, 5.7821e-01, 7.0603e-02, 7.6275e-01, + 3.7581e-01, 4.5089e-01, 1.0040e-01, 2.8821e-01, + 4.0836e-02, 8.7711e-01, 2.5635e-01, 4.7419e-01, + 3.1472e-01, 4.7404e-01, 6.0210e-01, 1.7740e-01, + 3.7222e-01, 1.8530e-02, 6.6757e-01, 7.5811e-02, + 1.3391e-01, 9.8196e-01, 3.7396e-01, 5.2924e-02, + 5.7431e-01, 4.5457e-03, 6.3555e-01, 6.9843e-01, + 8.9610e-01, 8.6400e-01, 8.8516e-01, 3.9290e-01, + 7.0320e-01, 6.1583e-01, 8.4624e-01, 7.7953e-01, + 9.0684e-01, 4.3665e-01, 8.3774e-01, 1.8001e-01, + 4.1709e-01, 3.5144e-01, 3.1082e-02, 5.4723e-01, + 8.2229e-01, 4.0371e-01, 4.8804e-01, 8.0640e-01, + 7.9319e-01, 3.0761e-01, 3.5246e-01, 6.6259e-01, + 7.5795e-01, 3.3689e-03, 2.9805e-01, 4.4254e-01, + 7.3029e-01, 1.1907e-01, 4.1236e-01, 3.9221e-01, + 2.1364e-01, 6.5107e-01, 1.3201e-01, 6.9985e-01, + 2.8995e-01, 6.0845e-01, 7.6109e-01, 5.2480e-01, + 4.7204e-01, 1.9708e-01, 7.3808e-02, 8.5301e-01, + 5.7930e-02, 5.6419e-03, 6.5604e-01, 1.1150e-01, + 2.7579e-01, 5.0248e-01, 8.6125e-01, 5.2749e-01, + 4.8346e-01, 5.3141e-01, 9.3706e-01, 4.5452e-01, + 9.6538e-02, 5.6358e-01, 1.0907e-01, 2.9151e-02, + 2.9892e-01, 8.9018e-01, 6.1924e-02, 4.5372e-02, + 4.1819e-01, 4.4428e-01, 3.8712e-01, 2.9124e-01, + 6.8060e-02, 4.4495e-01, 8.6639e-01, 1.2161e-01, + 1.9770e-01, 8.9914e-01, 7.8519e-01, 7.4160e-01, + 6.3062e-01, 9.6251e-01, 6.3046e-01, 5.4702e-01, + 3.9281e-02, 6.5735e-01, 1.2776e-01, 4.1871e-01, + 9.6472e-01, 8.5551e-01, 3.7850e-01, 2.9965e-01, + 8.3225e-01, 2.3553e-02, 8.0975e-01, 9.3845e-02, + 9.4994e-01, 4.7182e-01, 6.9838e-01, 2.9612e-01, + 5.5496e-01, 6.5603e-01, 4.6633e-01, 4.0017e-01, + 5.0322e-01, 7.8427e-01, 4.3211e-01, 2.4713e-01, + 6.8107e-01, 7.3829e-01, 7.4132e-01, 8.6413e-01, + 9.8284e-01, 4.1509e-01, 4.1459e-01, 5.9436e-01, + 7.1965e-01, 3.9605e-01, 7.9076e-01, 1.5670e-01, + 7.7054e-01, 8.0907e-01, 5.7976e-01, 8.4957e-01, + 7.5938e-01, 2.6316e-01, 3.6742e-01, 6.7139e-01, + 6.2663e-01, 2.8545e-01, 8.4638e-01, 3.3650e-01, + 1.9393e-01, 8.2539e-01, 8.7496e-01, 4.5566e-01, + 3.5352e-01, 6.8968e-01, 9.7918e-01, 8.4556e-01, + 9.0911e-01, 5.3111e-01, 4.9952e-01, 3.4829e-01, + 9.9674e-01, 9.1414e-01, 9.0159e-01, 7.1385e-01, + 4.0399e-01, 1.9201e-02, 6.1073e-02, 9.3500e-01, + 7.7952e-01, 6.8003e-01, 7.1526e-03, 2.5124e-01, + 8.2126e-02, 2.2497e-01, 8.0977e-01, 1.9978e-01, + 9.0065e-01, 6.1026e-01, 1.6002e-01, 9.5304e-01, + 6.4606e-01, 8.9434e-01, 6.6189e-01, 8.8742e-01, + 7.8087e-01, 9.0584e-01, 7.2847e-01, 5.8028e-01, + 6.0780e-01, 4.9432e-01, 9.2651e-01, 9.0691e-01, + 4.3390e-01, 1.2290e-01, 8.1558e-02, 4.9520e-01, + 2.3303e-01, 6.8030e-01, 8.5078e-02, 5.9635e-01, + 4.9854e-01, 3.5607e-01, 2.3071e-02, 2.0342e-01, + 3.3403e-01, 4.7020e-01, 1.1812e-01, 1.9543e-01, + 2.3376e-01, 3.8392e-01, 8.7559e-02, 3.5012e-01, + 3.1852e-01, 4.5980e-01, 3.7863e-01, 4.7870e-01, + 8.9047e-01, 9.6861e-01, 1.7266e-01, 8.3507e-01, + 4.3536e-02, 5.5165e-01, 3.4361e-01, 8.9113e-01, + 2.0902e-01, 8.3518e-01, 6.6187e-01, 3.8314e-01, + 8.0300e-01, 6.5042e-01, 6.9120e-01, 6.0188e-01, + 4.8010e-01, 2.7651e-01, 9.0978e-01, 7.1078e-01, + 5.9103e-01, 6.3327e-01, 5.3087e-01, 2.9399e-01, + 7.3804e-01, 4.2389e-01, 1.6557e-01, 8.5186e-01, + 2.3146e-01, 6.6881e-01, 2.2386e-01, 1.5332e-01, + 2.7304e-01, 1.0240e-01, 4.3476e-01, 7.8105e-01, + 5.8972e-02, 7.5568e-01, 7.1605e-01, 8.5101e-01, + 8.7931e-01, 4.0044e-01, 2.4097e-01, 8.2776e-01, + 6.5361e-01, 4.0509e-01, 5.9794e-01, 7.8265e-01, + 3.4318e-01, 5.8254e-01, 2.8341e-01, 6.9728e-01, + 6.3303e-01, 7.0794e-01, 6.7661e-01, 8.4756e-01, + 9.3971e-01, 2.7594e-01, 3.4066e-01, 8.1946e-01, + 5.6985e-01, 2.8557e-01, 3.6155e-01, 9.1018e-01, + 4.4262e-01, 9.1349e-01, 6.9133e-01, 1.1543e-01, + 8.8510e-01, 2.8478e-01, 5.3501e-01, 9.5937e-01, + 3.1066e-01, 2.5479e-01, 6.5123e-01, 2.6004e-01, + 3.6510e-01, 3.3774e-01, 1.0742e-01, 1.6244e-01, + 9.6209e-01, 7.8917e-01, 5.9193e-01, 1.4050e-01, + 4.6784e-01, 1.3862e-01, 6.8783e-01, 3.8758e-01, + 3.8488e-01, 4.5045e-01, 9.8813e-01, 3.1351e-02, + 9.9879e-01, 3.6004e-01, 2.5033e-01, 4.8943e-01, + 9.7100e-01, 8.4453e-01, 6.6768e-01, 8.8569e-02, + 7.1994e-01, 9.2108e-01, 3.8162e-01, 2.4625e-01, + 3.0837e-01, 1.7831e-01, 1.5312e-01, 6.7478e-01, + 3.7105e-01, 2.7460e-01, 5.6371e-01, 7.5959e-01, + 3.7224e-01, 2.1087e-01, 6.5640e-01, 1.1956e-01, + 1.8089e-01, 3.5428e-01, 1.1136e-02, 7.2999e-01, + 4.7290e-02, 3.2416e-01, 9.4583e-01, 2.1888e-01, + 6.7653e-01, 6.1755e-01, 7.2680e-01, 9.9049e-01, + 2.7798e-02, 1.3499e-01, 2.4043e-01, 8.1927e-01, + 6.0770e-01, 9.3564e-01, 4.0376e-01, 6.0613e-01, + 4.2311e-01, 2.4050e-01, 4.2569e-01, 3.2700e-01, + 6.6681e-01, 9.1598e-01, 2.4610e-02, 3.1564e-01, + 7.7390e-01, 5.8683e-01, 2.3132e-01, 2.3585e-01, + 4.6167e-01, 2.3949e-01, 2.4135e-01, 3.5397e-01, + 9.1516e-01, 4.1043e-01, 7.7809e-01, 7.3179e-02, + 5.8349e-01, 2.8828e-01, 6.1494e-01, 2.4378e-01, + 8.2912e-01, 7.3234e-01, 6.8131e-01, 3.3134e-02, + 7.6679e-02, 5.6862e-01, 5.4287e-01, 7.6513e-01, + 6.2055e-01, 5.7089e-01, 7.2783e-01, 2.7541e-01, + 9.3864e-01, 5.8064e-01, 9.2294e-01, 5.6055e-01, + 7.2024e-01, 2.5839e-01, 8.3608e-02, 7.3547e-01, + 7.6127e-02, 3.2870e-01, 7.7548e-02, 6.2544e-01, + 3.8702e-01, 7.9350e-01, 4.0659e-01, 2.4674e-01, + 2.7607e-01, 5.0202e-01, 9.5886e-01, 9.5818e-01, + 3.3974e-02, 9.7595e-01, 7.9134e-01, 9.5878e-01, + 1.0231e-01, 7.7522e-01, 7.3580e-01, 9.2291e-01, + 9.1663e-01, 4.4586e-01, 4.6182e-01, 7.2430e-01, + 9.2178e-04, 8.5347e-01, 5.4921e-01, 4.5417e-01, + 2.2747e-02, 2.5915e-01, 4.5402e-01, 3.1192e-01, + 8.2311e-01, 9.7335e-01, 2.7230e-01, 5.4272e-01, + 8.7519e-01, 9.1728e-01, 2.9016e-01, 1.6495e-01, + 4.2395e-01, 8.5072e-01, 1.9714e-03, 2.7308e-01, + 9.7770e-01, 9.1682e-01, 9.9182e-01, 3.7958e-01, + 7.9282e-01, 1.7761e-01, 6.2555e-01, 6.8101e-01, + 6.7394e-01, 8.8343e-01, 3.4308e-01, 8.5129e-01, + 4.1297e-01, 7.1079e-01, 5.1222e-02, 5.7159e-01, + 6.0001e-01, 5.4284e-01, 5.7793e-01, 3.1507e-01, + 9.8278e-01, 8.2539e-01, 4.6261e-01, 7.3225e-02, + 8.8612e-01, 6.5413e-01, 3.0173e-01, 2.1194e-02, + 3.2441e-01, 3.9085e-01, 8.6803e-01, 7.0461e-01, + 7.6602e-01, 1.9908e-01, 2.9866e-01, 9.3469e-01, + 9.5369e-02, 2.5631e-01, 9.0341e-01, 5.0032e-01, + 6.7851e-01, 9.1421e-01, 5.2777e-01, 2.0974e-01, + 6.0356e-01, 7.4990e-01, 2.5120e-01, 7.3056e-01, + 5.7654e-01, 8.6192e-01, 4.5282e-01, 4.8927e-01, + 1.0209e-01, 1.4365e-01, 1.0649e-01, 1.5559e-01, + 3.7894e-02, 6.8080e-01, 3.9419e-01, 1.0516e-02, + 2.1907e-01, 7.3503e-01, 4.9516e-01, 9.0746e-01, + 4.9746e-01, 9.1123e-01, 4.1414e-01, 8.3851e-02, + 1.0745e-01, 9.4031e-01, 6.3535e-01, 8.9329e-02, + 7.6318e-01, 5.4906e-01, 3.5425e-01, 2.6120e-01, + 8.3019e-01, 8.3127e-01, 2.5971e-02, 7.0373e-01, + 4.5245e-01, 1.0757e-02, 1.2942e-01, 8.2025e-01, + 7.7421e-01, 1.9773e-01, 2.3885e-01, 9.0482e-01, + 2.8218e-01, 7.5291e-01, 7.6466e-01, 5.2751e-01, + 3.9001e-02, 3.3145e-01, 7.2533e-01, 3.6741e-01, + 9.0607e-01, 7.0040e-01, 4.2489e-01, 4.9042e-01, + 2.3157e-01, 2.5353e-01, 4.2850e-01, 1.3589e-01, + 2.8043e-01, 5.4906e-01, 9.4282e-01, 8.9276e-02, + 1.1162e-01, 8.5177e-01, 7.7036e-01, 7.0436e-02, + 5.3515e-01, 9.3477e-01, 5.8106e-01, 9.6481e-01, + 9.4024e-01, 7.2166e-01, 2.4153e-01, 4.2523e-01, + 3.5659e-01, 6.0512e-01, 2.9269e-01, 8.0189e-01, + 4.4130e-01, 3.9543e-01, 6.0493e-01, 6.3652e-01, + 6.5235e-02, 2.5942e-01, 6.6071e-01, 2.2841e-01, + 3.8493e-02, 3.4606e-01, 6.8945e-01, 3.6160e-01, + 4.8673e-01, 8.7753e-01, 6.6910e-01, 8.8066e-01, + 7.5074e-01, 7.3308e-01, 2.4525e-01, 1.5114e-01, + 2.3688e-01, 6.3304e-01, 1.9778e-01, 6.5651e-01, + 2.9999e-01, 6.9139e-01, 9.5397e-01, 9.8093e-01, + 3.1673e-01, 1.1094e-02, 8.6636e-01, 7.6271e-01, + 4.9755e-02, 3.7028e-01, 7.5132e-01, 1.0659e-01, + 7.1971e-02, 4.3799e-01, 3.8666e-01, 8.3342e-01, + 6.1201e-01, 8.8591e-01, 9.8481e-01, 6.1100e-01, + 2.3665e-01, 8.8731e-01, 3.7244e-01, 9.9093e-01, + 1.0523e-01, 4.3914e-01, 6.7271e-01, 7.1795e-02, + 4.5039e-01, 8.5481e-01, 6.8785e-01, 1.2909e-01, + 5.7161e-01, 4.2390e-01, 7.6292e-01, 7.7430e-01, + 3.4998e-01, 8.0385e-01, 2.4192e-01, 1.8999e-01, + 3.6927e-02, 6.7924e-01, 3.3984e-01, 4.8839e-01, + 5.0435e-01, 9.1106e-01, 8.7183e-01, 9.3404e-01, + 7.4605e-01, 1.5313e-01, 9.5520e-01, 2.0581e-01, + 8.7214e-01, 1.4978e-01, 9.4661e-02, 7.5151e-01, + 5.6273e-01, 4.2754e-01, 2.4315e-03, 1.1371e-02, + 8.0237e-01, 5.4754e-01, 2.6562e-01, 6.4585e-01, + 2.4939e-01, 5.2355e-01, 7.6659e-01, 1.1318e-01, + 6.1209e-01, 3.7856e-01, 7.3262e-01, 5.2861e-01, + 6.9979e-01, 3.6198e-01, 8.5596e-01, 9.1891e-01, + 7.8492e-01, 6.1844e-01, 8.1427e-01, 3.6322e-01, + 7.6290e-02, 3.5808e-01, 5.2445e-02, 9.3998e-01, + 8.7392e-01, 3.5312e-01, 5.3949e-01, 6.0424e-01, + 1.2667e-01, 5.7768e-01, 8.0920e-02, 5.9784e-01, + 1.6667e-01, 5.3329e-01, 7.7051e-01, 2.4444e-01, + 5.4399e-01, 1.7330e-01, 9.2604e-01, 7.7836e-01, + 7.0428e-01, 9.4967e-01, 6.7980e-01, 6.7804e-01, + 3.5864e-02, 2.5274e-01, 9.2254e-01, 7.9618e-01, + 4.4765e-01, 7.0517e-01, 9.5345e-01, 8.4152e-01, + 1.7730e-01, 8.2868e-01, 9.0518e-01, 6.7487e-02, + 9.0151e-01, 1.0078e-02, 1.8471e-01, 7.8913e-01, + 4.7998e-01, 7.7939e-01, 1.8432e-01, 6.7453e-01, + 4.6409e-01, 3.5097e-01, 5.1731e-01, 6.5674e-01, + 5.7423e-01, 9.0521e-01, 7.9832e-01, 5.1312e-01, + 7.4221e-01, 6.1487e-01, 7.2648e-01, 5.1655e-01, + 8.6038e-01, 4.0014e-01, 7.3141e-01, 1.9754e-01, + 6.2998e-02, 1.4990e-01, 6.3575e-01, 4.4178e-01, + 9.7002e-01, 4.0844e-01, 8.9150e-01, 9.0679e-01, + 1.1578e-01, 6.5042e-02, 6.7489e-01, 6.4330e-01, + 4.7278e-01, 4.3936e-02, 1.5943e-01, 1.6540e-01, + 6.8697e-01, 8.3274e-01, 6.2265e-01, 6.5555e-01, + 7.5682e-04, 9.0246e-01, 7.4646e-02, 1.0355e-01, + 4.0761e-01, 7.4609e-01, 2.0902e-01, 3.1042e-01, + 1.7561e-01, 6.6688e-02, 6.0252e-01, 1.8240e-01, + 2.0381e-02, 6.2593e-02, 6.0495e-01, 1.1569e-01, + 7.6802e-02, 7.6070e-01, 8.1046e-01, 6.4437e-01, + 3.0459e-01, 8.7910e-01, 3.7994e-01, 2.7927e-01, + 6.6902e-02, 4.4550e-01, 8.7868e-01, 5.1148e-02, + 3.7950e-01, 7.1735e-01, 7.3888e-01, 9.8846e-01, + 9.4200e-01, 3.9824e-01, 2.8120e-01, 6.0547e-01, + 7.3369e-01, 3.7128e-01, 2.6315e-01, 2.1216e-01, + 5.2662e-01, 1.2747e-02, 6.0734e-02, 9.7093e-01, + 5.7339e-01, 8.1099e-01, 2.1891e-01, 9.8452e-01, + 8.1244e-01, 5.1478e-01, 8.6700e-01, 4.3527e-01, + 1.5690e-01, 5.7105e-01, 2.3991e-01, 2.5589e-01, + 1.2496e-01, 2.7035e-01, 9.8064e-02, 4.6583e-01, + 6.9609e-01, 6.3492e-01, 6.7879e-01, 2.9228e-01, + 3.7713e-01, 8.8354e-01, 7.9828e-01, 2.7429e-01, + 3.2010e-01, 3.4870e-01, 7.3549e-01, 9.9698e-01, + 8.0784e-01, 2.8592e-01, 4.7241e-01, 7.1215e-01, + 9.8854e-01, 9.2548e-01, 3.7415e-01, 6.5025e-01, + 5.6132e-01, 5.7550e-01, 1.9049e-01, 9.3661e-02, + 7.0370e-01, 2.4399e-01, 8.8730e-01, 3.4681e-01, + 8.1782e-01, 7.0824e-01, 4.1793e-01, 4.9273e-01, + 1.5299e-01, 9.9841e-01, 4.9900e-01, 4.2334e-01, + 7.8859e-01, 3.3689e-01, 1.3827e-01, 5.2274e-01, + 8.2524e-01, 4.7324e-01, 1.6809e-01, 8.3103e-01, + 2.3078e-01, 2.4739e-02, 3.6660e-01, 2.9660e-01, + 8.3582e-01, 6.6770e-01, 5.1626e-01, 1.7800e-02, + 4.9796e-01, 5.8092e-01, 3.3015e-01, 7.0134e-01, + 3.7040e-01, 3.8461e-01, 6.1818e-01, 3.8031e-01, + 7.7129e-01, 1.1370e-02, 4.1709e-01, 3.1538e-01, + 4.6468e-01, 2.7931e-01, 7.6062e-01, 9.2389e-01, + 1.6115e-01, 5.9450e-01, 7.3925e-01, 5.6612e-01, + 8.8551e-01, 3.5357e-01, 5.2400e-02, 2.6849e-01, + 9.4801e-01, 5.3116e-01, 3.0980e-01, 4.0228e-01, + 6.9226e-01, 4.7104e-01, 1.8029e-01, 6.9745e-01, + 4.2109e-01, 5.1660e-01, 8.5430e-01, 5.8448e-01, + 2.6279e-01, 3.2066e-01, 1.2135e-01, 1.3921e-01, + 8.6237e-01, 7.7077e-01, 5.2734e-01, 6.8075e-01, + 4.5462e-01, 9.2154e-02, 4.7049e-01, 6.0273e-01, + 8.9532e-01, 7.8371e-01, 1.1158e-01, 4.5607e-01, + 4.3482e-01, 2.9216e-01, 5.6974e-01, 5.3652e-01, + 4.7961e-01, 5.8789e-01, 7.9824e-02, 8.6520e-01, + 5.4582e-01, 4.2012e-01, 6.6184e-01, 5.0529e-01, + 4.4942e-01, 7.7487e-01, 1.9653e-01, 1.0956e-01, + 5.8909e-01, 6.1073e-01, 5.5245e-01, 6.0942e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4323, 0.3291, 0.3785, ..., 0.2185, 0.3372, 0.4003]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.018783092498779297 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '55901', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.679638147354126} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7474, 402, 4271, 3233, 8082, 7229, 4634, 7787, 4139, + 3184, 8224, 1171, 3944, 602, 9794, 2437, 4751, 9537, + 8646, 3618, 186, 1642, 8812, 1242, 6520, 4714, 2375, + 2593, 5999, 3487, 2542, 2298, 7454, 1919, 5592, 9881, + 6340, 6606, 1382, 7859, 8424, 8722, 1593, 4453, 3038, + 9025, 3357, 7978, 9937, 4294, 2449, 9575, 6564, 6276, + 883, 9180, 2863, 9885, 8632, 9035, 8425, 5850, 610, + 1914, 1347, 7858, 6976, 4743, 6956, 1699, 1661, 6170, + 336, 195, 1588, 176, 1410, 3810, 8376, 7753, 8420, + 5111, 9143, 2252, 3445, 968, 2649, 4802, 4386, 4390, + 3856, 5946, 9251, 5561, 7673, 4626, 6108, 802, 2218, + 8978, 5078, 9764, 5448, 4147, 233, 4734, 7018, 8424, + 5226, 2598, 3429, 28, 4519, 7944, 9855, 3672, 3809, + 5753, 8466, 4146, 6093, 810, 8061, 4052, 9601, 4604, + 818, 6420, 4798, 7209, 3129, 9918, 6820, 1159, 4560, + 3413, 3996, 900, 8498, 4167, 8888, 5552, 1503, 8624, + 8190, 8742, 253, 6403, 2361, 9538, 7803, 9212, 3306, + 561, 4066, 1545, 8624, 9070, 817, 9505, 5304, 9902, + 9588, 4190, 4564, 1941, 5789, 4881, 7808, 2742, 9896, + 9814, 8308, 9966, 306, 3204, 483, 7727, 7743, 781, + 1084, 1996, 9435, 2002, 1910, 9365, 9283, 1276, 9833, + 2109, 8937, 8736, 1298, 9466, 2942, 4000, 6727, 6715, + 5783, 3492, 4474, 2726, 7300, 8062, 8665, 182, 4401, + 2159, 8816, 1208, 122, 1779, 9734, 1941, 8593, 4497, + 8591, 2043, 7547, 200, 3582, 5291, 5588, 8296, 7784, + 5573, 53, 5585, 6863, 7705, 8912, 113, 6393, 7106, + 1899, 4638, 1767, 509, 5809, 3099, 2012, 4241, 3965, + 6050, 4378, 8158, 2543, 8935, 2526, 8040, 7916, 1251, + 8490, 1561, 7903, 7690, 3879, 5358, 4429, 5514, 1218, + 3181, 2273, 5271, 1374, 6798, 3010, 1069, 7396, 4595, + 4108, 3810, 5708, 259, 495, 452, 9294, 2754, 9327, + 5207, 2007, 7593, 8569, 4568, 4691, 8679, 4288, 2957, + 7944, 5890, 6915, 2899, 6454, 4798, 8719, 7914, 697, + 5029, 6407, 1105, 1480, 7252, 9894, 4850, 778, 1134, + 7817, 1564, 3689, 9871, 7592, 7381, 1205, 2768, 1398, + 2224, 3669, 4966, 2550, 6171, 4076, 569, 837, 1810, + 1565, 4292, 9588, 8580, 4720, 6023, 3245, 902, 3324, + 2882, 6953, 3218, 1304, 2699, 534, 9799, 9291, 5836, + 2609, 4360, 3972, 550, 3300, 9664, 5239, 7619, 5234, + 3913, 9667, 3284, 8888, 6535, 1525, 8741, 9815, 3428, + 6435, 7905, 5354, 8689, 2951, 4126, 6289, 8090, 8264, + 7102, 591, 6806, 9220, 9527, 6407, 2999, 4388, 5788, + 7840, 8653, 7644, 5096, 2704, 5155, 7094, 8131, 964, + 2988, 6101, 3237, 6695, 2278, 2679, 1832, 9266, 822, + 413, 20, 1159, 2063, 1047, 13, 4951, 4358, 2145, + 3079, 4853, 5128, 3539, 379, 4698, 535, 2243, 9366, + 2957, 8548, 2180, 957, 8760, 3114, 6528, 8576, 740, + 2977, 9372, 7207, 5125, 7766, 9898, 1447, 7912, 2688, + 8526, 7536, 9880, 7461, 314, 3118, 4813, 9877, 2664, + 6841, 1277, 4285, 3049, 3052, 6605, 7912, 1719, 1547, + 1830, 754, 4077, 8603, 2931, 1461, 7307, 7420, 5528, + 7680, 3465, 9981, 3600, 2391, 5835, 5280, 2395, 7655, + 3823, 4572, 4401, 9732, 8071, 8770, 9859, 6200, 7651, + 7986, 4568, 3522, 7352, 4261, 14, 2749, 8122, 4595, + 4871, 4534, 8033, 6163, 9675, 4652, 9505, 5908, 2354, + 4770, 696, 8663, 201, 9616, 2877, 726, 9392, 4651, + 1284, 7500, 1237, 7136, 9476, 1417, 4564, 5733, 1714, + 2018, 4023, 8645, 1744, 8472, 4682, 3125, 1339, 5985, + 8156, 7481, 3542, 4239, 5035, 4828, 295, 9246, 919, + 1946, 7126, 8957, 1335, 1854, 4461, 2038, 4656, 9164, + 6688, 4854, 2201, 2533, 6427, 5659, 6924, 9837, 9889, + 896, 1352, 2294, 2913, 9291, 8315, 7856, 6541, 1652, + 5978, 6806, 4867, 7270, 6309, 878, 6658, 3440, 7761, + 8300, 363, 3538, 3015, 6228, 677, 6171, 2029, 7473, + 8711, 6599, 5750, 7300, 1942, 5038, 4232, 8403, 9898, + 2781, 8752, 6564, 9145, 7463, 7228, 6378, 7276, 760, + 6688, 4993, 7962, 1680, 6992, 5243, 8266, 7625, 3992, + 1426, 3102, 8691, 6168, 4229, 3244, 1791, 752, 2816, + 7848, 8662, 1165, 8574, 678, 5737, 5869, 4196, 8060, + 2226, 3670, 2019, 1405, 2650, 1175, 5521, 886, 8428, + 8384, 9876, 6357, 7281, 7347, 2948, 3477, 9159, 4891, + 9619, 7188, 1356, 4593, 9718, 829, 6537, 9136, 1319, + 9881, 1493, 3880, 4611, 904, 1159, 6772, 947, 9108, + 3713, 6279, 5568, 6957, 9320, 8603, 3315, 7261, 1083, + 7955, 6619, 1533, 4035, 1644, 6380, 3159, 7664, 6635, + 7010, 4484, 7555, 4741, 2029, 9507, 9463, 7724, 3939, + 6630, 3148, 5513, 6818, 5349, 9426, 2527, 9088, 5175, + 3398, 3562, 3698, 5546, 9677, 9645, 1089, 4987, 1588, + 5400, 997, 8204, 7080, 1581, 6450, 5721, 2570, 8985, + 9655, 7207, 384, 487, 9982, 884, 3779, 2760, 5096, + 1349, 1888, 4225, 3395, 3970, 4646, 7583, 9619, 3024, + 226, 9037, 2697, 4047, 9011, 9620, 6892, 9658, 8405, + 9974, 4082, 2185, 8401, 6453, 4955, 9910, 2090, 7872, + 2973, 4384, 4551, 5019, 2072, 3985, 5566, 5818, 2739, + 7315, 3918, 7030, 2407, 1085, 759, 7595, 1724, 8345, + 4101, 915, 233, 8369, 8491, 6816, 6359, 2000, 6105, + 5328, 7003, 1174, 3746, 9600, 4454, 8912, 6274, 812, + 5016, 7930, 3968, 4977, 2233, 9247, 632, 534, 6275, + 4526, 9112, 1607, 6533, 7979, 5590, 6773, 2615, 6613, + 2449, 8635, 4735, 3037, 3616, 5594, 7556, 815, 9178, + 233, 966, 8555, 5895, 3211, 3489, 4949, 4399, 2015, + 4444, 9117, 1201, 1746, 5340, 7041, 9002, 4285, 8899, + 8799, 4926, 9759, 4966, 2564, 7672, 3383, 5379, 1321, + 488, 8135, 4402, 2898, 8060, 6845, 910, 9794, 2420, + 2773, 5897, 4123, 2035, 1978, 7074, 626, 2443, 5219, + 4190, 1817, 5925, 2603, 27, 5888, 5141, 5613, 7485, + 3221, 2555, 2781, 2440, 1444, 2932, 9889, 3263, 8142, + 1522, 8477, 736, 6138, 5110, 1155, 1226, 4897, 782, + 1576, 5524, 7332, 4653, 896, 1859, 1494, 8755, 1068, + 2329, 8628, 6988, 8155, 2169, 8370, 9504, 6113, 7726, + 1798, 1231, 4601, 1532, 8271, 5057, 5133, 1550, 8115, + 4603, 9258, 4758, 4981, 6773, 6250, 2595, 6963, 7362, + 8398, 4742, 9289, 5866, 7042, 2952, 9132, 5059, 5200, + 1236, 5265, 6298, 8382, 9486, 7607, 2, 1189, 6527, + 9288, 783, 3711, 4547, 589, 3935, 7821, 5344, 7939, + 2620, 4710, 7997, 3225, 3819, 3494, 762, 1601, 6882, + 1087, 4501, 3360, 7246, 4102, 4591, 331, 2833, 4244, + 7334, 7150, 2705, 1786, 1120, 7881, 1701, 7739, 6367, + 3566, 8207, 3982, 7649, 4077, 4547, 3041, 7716, 2492, + 1787, 4571, 2117, 777, 4970, 7293, 9061, 999, 9637, + 6539, 4905, 3402, 2942, 3746, 2946, 453, 9138, 9033, + 52]), + values=tensor([6.5468e-01, 1.4127e-01, 9.2428e-02, 5.9432e-01, + 6.5125e-01, 1.7186e-02, 9.5240e-01, 7.4522e-01, + 6.1665e-01, 9.7634e-01, 8.0519e-01, 2.5245e-01, + 5.9150e-01, 2.5213e-02, 1.2295e-01, 5.0980e-01, + 9.3599e-01, 9.5879e-02, 6.2288e-01, 7.3794e-01, + 4.6483e-01, 2.7344e-01, 7.8522e-01, 4.5768e-01, + 5.0021e-01, 2.1527e-01, 4.8710e-01, 7.8890e-01, + 9.6876e-01, 2.1589e-01, 6.7212e-01, 5.0782e-02, + 6.5297e-01, 9.2223e-01, 6.4293e-01, 7.3252e-01, + 5.7216e-01, 8.1965e-01, 6.4715e-01, 5.7331e-01, + 3.4892e-01, 7.1783e-01, 5.9625e-01, 5.2807e-01, + 3.2411e-01, 4.1204e-03, 4.2174e-01, 2.6368e-01, + 9.3583e-01, 3.2853e-01, 7.7884e-01, 9.3648e-01, + 2.4406e-01, 8.3021e-01, 8.9553e-01, 9.5575e-01, + 8.2181e-01, 8.3678e-01, 6.6541e-01, 7.9095e-01, + 2.7910e-01, 1.3023e-02, 8.3080e-01, 1.8069e-01, + 8.4296e-02, 9.0477e-01, 9.2088e-01, 7.8766e-01, + 6.7970e-01, 6.3044e-01, 6.4393e-01, 2.5343e-02, + 9.4176e-01, 7.7297e-03, 2.9408e-03, 1.1966e-02, + 4.5537e-01, 8.5939e-01, 2.1291e-01, 7.3170e-01, + 1.3142e-01, 2.3822e-01, 3.6079e-01, 2.1476e-01, + 2.2288e-01, 5.3629e-01, 1.9466e-01, 2.2744e-01, + 4.9497e-01, 4.2304e-01, 2.2479e-01, 2.5827e-01, + 6.0850e-01, 7.9026e-01, 9.2549e-01, 4.4427e-01, + 7.3695e-01, 1.4928e-01, 8.5298e-01, 6.8729e-01, + 8.7128e-01, 1.9788e-01, 1.6850e-01, 2.3224e-01, + 7.3516e-01, 7.9172e-01, 5.9172e-01, 4.2299e-01, + 1.9668e-01, 2.9149e-01, 9.2247e-01, 3.6969e-01, + 3.5744e-01, 8.0301e-01, 4.5058e-01, 4.3867e-01, + 7.2633e-01, 2.0703e-01, 8.1141e-01, 5.6927e-01, + 4.2970e-01, 8.9740e-01, 4.4980e-03, 1.4946e-01, + 6.6063e-01, 9.5086e-01, 5.4202e-01, 4.5991e-01, + 6.1559e-01, 1.1459e-01, 1.1966e-01, 1.5409e-01, + 9.1550e-02, 8.1800e-01, 6.5368e-01, 6.6150e-01, + 8.3864e-01, 8.0492e-01, 3.1258e-01, 7.2768e-01, + 2.5795e-01, 9.0517e-01, 3.9278e-02, 1.2073e-01, + 7.4348e-01, 2.4786e-01, 7.9648e-01, 9.1521e-01, + 9.3120e-01, 6.7637e-01, 8.2965e-01, 5.5535e-01, + 4.5810e-01, 8.7278e-01, 8.9360e-01, 7.2430e-01, + 8.7103e-01, 8.9370e-01, 4.6244e-01, 6.2316e-01, + 1.7015e-02, 7.8933e-01, 8.3526e-01, 2.4711e-01, + 4.4930e-02, 1.8375e-01, 1.0750e-02, 5.6333e-01, + 2.6142e-01, 7.2245e-01, 7.2771e-01, 5.8421e-01, + 8.8772e-01, 9.9297e-01, 5.0362e-01, 5.6718e-01, + 3.7669e-01, 7.6483e-01, 1.1379e-03, 5.6667e-02, + 3.8279e-01, 8.1423e-01, 7.2754e-01, 7.7699e-01, + 8.6811e-01, 5.1382e-01, 6.3474e-01, 4.6463e-01, + 2.1111e-01, 4.6935e-01, 3.3339e-01, 5.6516e-01, + 5.4652e-01, 7.4076e-01, 6.7244e-01, 7.7735e-01, + 3.9087e-01, 4.5901e-01, 5.5385e-01, 6.8343e-01, + 3.5511e-01, 9.7234e-01, 2.6306e-01, 3.0582e-01, + 4.3505e-01, 7.9083e-01, 1.3138e-01, 9.8500e-01, + 6.1727e-01, 5.7070e-01, 2.6766e-01, 9.8175e-01, + 7.4367e-01, 6.4888e-01, 7.9271e-01, 4.5871e-01, + 6.5195e-01, 5.1494e-01, 2.4723e-01, 2.2316e-01, + 1.6363e-02, 3.2662e-01, 8.4102e-01, 7.5499e-01, + 5.1587e-01, 2.0677e-01, 8.1494e-02, 3.6295e-01, + 9.9312e-01, 7.7645e-02, 4.9895e-01, 7.0330e-01, + 2.0760e-02, 2.9267e-01, 7.2543e-01, 4.0653e-01, + 9.5142e-01, 4.4437e-01, 6.7606e-01, 2.5093e-01, + 8.6923e-01, 4.3498e-01, 6.2468e-01, 8.8420e-01, + 8.7101e-01, 2.9757e-01, 8.0408e-01, 3.1026e-01, + 9.8217e-01, 2.5237e-01, 9.8783e-02, 1.7549e-02, + 3.4025e-01, 5.4986e-01, 9.8266e-02, 9.7562e-01, + 8.5991e-01, 3.8755e-01, 6.8829e-01, 3.8024e-01, + 6.3418e-02, 4.0307e-01, 8.2416e-01, 6.7926e-01, + 1.4744e-01, 3.7698e-01, 1.5581e-01, 1.0774e-02, + 1.7693e-01, 8.6081e-01, 7.4972e-01, 3.3748e-01, + 8.1871e-01, 8.4121e-01, 8.1049e-02, 9.2583e-02, + 7.8722e-01, 6.8745e-01, 2.7982e-01, 9.7242e-01, + 7.2433e-01, 8.4648e-01, 3.6121e-01, 5.7503e-01, + 6.4006e-01, 3.0470e-01, 6.4213e-01, 6.6883e-01, + 4.9966e-01, 8.0032e-01, 2.8840e-01, 4.9434e-01, + 9.0461e-01, 4.8427e-01, 9.6448e-01, 8.4693e-02, + 5.4306e-01, 4.0319e-01, 7.6967e-02, 6.6348e-01, + 6.9692e-01, 3.7373e-01, 2.0746e-01, 5.5117e-01, + 5.0787e-01, 9.7123e-03, 1.9341e-01, 7.6638e-01, + 9.0915e-01, 4.8971e-01, 1.5754e-01, 4.0329e-01, + 7.0863e-01, 5.7428e-01, 3.2445e-01, 3.7045e-02, + 7.6333e-01, 3.2380e-01, 4.3436e-01, 4.3863e-01, + 6.6006e-01, 8.8965e-01, 6.4759e-01, 6.8022e-01, + 5.0229e-01, 5.7813e-01, 1.6286e-02, 6.7272e-01, + 7.8948e-01, 7.6149e-01, 3.7736e-01, 6.5737e-01, + 2.7235e-01, 5.4383e-01, 3.2413e-01, 1.7815e-02, + 8.4094e-01, 7.8613e-01, 7.6281e-01, 8.9462e-01, + 8.3942e-01, 4.8247e-01, 6.2688e-01, 6.4393e-01, + 4.4888e-01, 3.6597e-01, 7.3200e-01, 5.3547e-01, + 1.1008e-01, 3.6434e-01, 2.5069e-01, 7.4258e-01, + 4.9019e-01, 8.4201e-01, 8.0305e-01, 7.7847e-01, + 7.8059e-01, 3.7616e-01, 8.1493e-01, 5.1010e-01, + 4.5501e-01, 5.5581e-01, 4.4741e-02, 9.9913e-02, + 3.7803e-01, 3.9358e-01, 1.5395e-01, 3.4878e-01, + 9.0430e-01, 6.0733e-02, 5.7344e-01, 2.5730e-01, + 7.7168e-01, 3.6176e-02, 9.4183e-01, 9.1086e-01, + 6.4821e-01, 4.9507e-01, 7.2036e-01, 1.4739e-01, + 6.3503e-01, 1.9288e-01, 4.2898e-02, 5.6821e-01, + 8.8470e-01, 4.5399e-01, 8.8041e-03, 1.5915e-01, + 7.0359e-01, 5.7698e-01, 8.8443e-01, 3.4394e-01, + 3.0485e-01, 4.4242e-01, 3.4913e-01, 7.9540e-01, + 5.5193e-02, 4.3887e-02, 6.1585e-01, 7.0753e-01, + 5.1805e-01, 6.1761e-01, 6.8989e-01, 2.1920e-02, + 3.8471e-01, 4.0989e-01, 5.5155e-01, 6.9767e-01, + 5.2420e-01, 3.7373e-01, 3.2347e-01, 9.1508e-01, + 3.6032e-01, 7.9389e-01, 6.6820e-01, 8.6269e-01, + 6.7740e-01, 6.3416e-01, 4.8768e-01, 4.0602e-01, + 4.9812e-01, 8.4111e-01, 8.4279e-01, 5.5415e-02, + 8.1731e-01, 1.4413e-01, 6.5361e-01, 1.7266e-01, + 4.1468e-01, 2.0062e-01, 7.2803e-01, 6.2329e-01, + 1.0537e-01, 5.5486e-01, 3.2931e-01, 7.7181e-01, + 3.4008e-01, 7.8573e-02, 8.1483e-02, 3.2319e-01, + 5.5933e-01, 4.0264e-01, 2.3848e-01, 7.8816e-01, + 9.0911e-01, 6.3020e-01, 3.6047e-01, 3.6079e-01, + 2.2380e-01, 4.8150e-01, 4.9177e-01, 8.1853e-01, + 4.0528e-01, 2.9267e-01, 8.7137e-01, 4.4712e-01, + 7.8846e-01, 4.5490e-01, 3.4711e-01, 4.8177e-01, + 3.4496e-01, 6.1416e-01, 7.6675e-01, 6.6943e-01, + 2.9519e-01, 6.5316e-01, 8.7975e-01, 7.8729e-01, + 9.1207e-01, 8.4710e-01, 2.7400e-01, 3.1100e-01, + 8.2089e-02, 9.4351e-02, 2.4426e-01, 5.9016e-02, + 4.9262e-01, 6.9916e-02, 2.2964e-02, 5.0292e-01, + 9.1043e-01, 9.4623e-01, 4.6797e-01, 3.9579e-01, + 8.6736e-01, 7.6480e-01, 7.9036e-01, 3.2983e-01, + 4.2598e-01, 5.3954e-01, 9.9338e-01, 1.5591e-03, + 5.1260e-01, 7.1729e-01, 7.0432e-01, 6.4879e-01, + 2.0475e-01, 1.3935e-01, 4.2309e-01, 3.2121e-01, + 5.5416e-01, 5.8721e-01, 2.1480e-01, 6.1175e-01, + 3.9361e-01, 3.6750e-01, 8.0177e-01, 4.1339e-01, + 6.9380e-01, 7.5490e-01, 8.6979e-01, 9.3450e-01, + 4.8427e-01, 6.5258e-01, 2.2409e-01, 2.6382e-01, + 7.1099e-01, 2.0483e-01, 5.7935e-01, 2.2667e-01, + 4.0693e-01, 3.4867e-01, 6.5637e-01, 6.6115e-01, + 9.7780e-01, 1.8873e-01, 2.5250e-01, 9.0736e-01, + 8.1793e-01, 5.3495e-01, 8.8298e-01, 3.0053e-02, + 7.0905e-01, 3.5784e-01, 4.0520e-01, 8.0837e-01, + 7.7093e-01, 7.3130e-01, 5.7167e-01, 2.3679e-01, + 9.2100e-01, 3.3596e-01, 9.3138e-02, 4.5639e-01, + 1.1073e-01, 7.1548e-01, 5.4077e-01, 4.3521e-01, + 6.9792e-01, 7.3280e-02, 9.4501e-01, 5.6171e-01, + 9.2932e-01, 6.2774e-01, 8.4495e-01, 6.6811e-01, + 3.4443e-01, 2.0575e-01, 1.2763e-01, 2.8694e-01, + 4.9355e-01, 4.7717e-01, 2.1047e-01, 6.0261e-01, + 3.7883e-01, 7.5454e-01, 8.2122e-01, 5.7723e-01, + 7.7142e-01, 5.5063e-02, 6.0631e-01, 8.2983e-01, + 7.4182e-02, 3.3580e-01, 8.5823e-01, 5.3831e-01, + 4.5795e-01, 8.3477e-01, 5.1350e-01, 9.3417e-01, + 5.4356e-01, 2.5336e-01, 6.1465e-01, 3.4845e-01, + 1.8660e-01, 6.6369e-01, 1.3363e-01, 2.2671e-02, + 5.3456e-02, 1.7256e-01, 6.1136e-01, 2.5285e-01, + 7.5915e-01, 9.3478e-01, 4.1491e-01, 2.6218e-02, + 3.5023e-01, 2.1390e-01, 6.4510e-01, 9.6921e-01, + 5.8877e-01, 4.9975e-01, 2.1645e-01, 3.8917e-01, + 7.1814e-01, 4.7198e-01, 7.4306e-01, 4.1652e-01, + 8.0271e-01, 4.2433e-01, 8.8599e-01, 7.2558e-01, + 3.0764e-01, 9.3710e-01, 5.6792e-01, 6.9030e-01, + 9.3938e-01, 8.8332e-01, 4.5213e-01, 6.4834e-01, + 1.9065e-01, 2.9216e-01, 7.9948e-01, 9.8870e-01, + 2.6744e-01, 8.7429e-02, 1.6926e-01, 9.8162e-01, + 3.1604e-01, 2.8905e-01, 6.9247e-01, 7.0212e-01, + 6.2858e-01, 2.5725e-01, 4.1328e-01, 3.9903e-01, + 1.7692e-01, 6.0405e-01, 9.0258e-01, 1.6863e-01, + 2.1314e-01, 7.8599e-01, 7.8953e-01, 9.3110e-01, + 3.4957e-01, 8.0046e-01, 1.6543e-01, 3.2844e-01, + 5.9632e-03, 6.5679e-01, 9.2040e-02, 3.3725e-01, + 6.7726e-01, 7.1442e-01, 1.8081e-01, 4.4483e-01, + 7.0590e-01, 1.0540e-01, 8.2332e-01, 1.9922e-01, + 1.3106e-01, 1.4727e-01, 1.7056e-01, 2.2487e-01, + 8.1177e-01, 4.5516e-01, 3.6043e-01, 8.3065e-01, + 8.9321e-02, 6.4483e-01, 9.4118e-01, 7.2658e-01, + 6.0324e-01, 3.2634e-02, 4.9464e-01, 2.9326e-01, + 1.4061e-01, 5.4972e-01, 3.8987e-01, 1.5617e-02, + 7.2013e-01, 2.5322e-01, 6.2362e-01, 8.6245e-01, + 7.2103e-01, 7.2221e-02, 8.1671e-01, 5.6625e-01, + 6.9137e-01, 9.5519e-01, 7.0531e-01, 7.9257e-02, + 4.0152e-02, 6.3328e-01, 8.7228e-01, 4.2235e-02, + 5.5240e-01, 1.8645e-01, 4.4119e-01, 5.1872e-01, + 3.0241e-01, 4.6970e-02, 3.3567e-01, 7.3336e-01, + 4.3809e-01, 6.0532e-01, 9.1692e-01, 2.7482e-01, + 3.3255e-01, 5.6474e-01, 5.1644e-01, 1.5826e-01, + 1.7806e-01, 4.3779e-01, 8.8205e-01, 4.6870e-01, + 9.3173e-01, 7.9373e-01, 4.0371e-01, 7.6367e-01, + 7.7558e-01, 8.4337e-01, 7.3356e-01, 2.5059e-01, + 4.3406e-01, 7.9969e-01, 3.2800e-01, 8.2351e-01, + 9.8393e-01, 4.2720e-01, 6.4308e-01, 4.1650e-01, + 4.5876e-01, 6.0922e-01, 6.6100e-01, 2.6225e-01, + 7.5511e-01, 7.4482e-01, 8.5879e-01, 1.5262e-01, + 5.5190e-01, 9.3653e-02, 2.4859e-01, 1.2244e-01, + 7.2058e-01, 6.8568e-01, 2.9328e-01, 3.0763e-02, + 7.3235e-01, 1.0132e-01, 1.3963e-02, 1.3632e-01, + 3.2207e-01, 2.5190e-01, 3.2095e-01, 9.5220e-01, + 2.2414e-01, 9.5574e-01, 6.6512e-01, 2.1393e-01, + 4.3569e-01, 6.5079e-01, 6.1620e-01, 4.8482e-01, + 2.7463e-01, 4.2786e-01, 3.6777e-01, 8.0895e-01, + 5.0708e-01, 5.4724e-02, 8.9217e-01, 5.0493e-01, + 7.4006e-01, 4.3982e-01, 1.3634e-01, 4.0648e-01, + 3.1583e-01, 2.9091e-01, 6.8608e-01, 8.8614e-01, + 5.0014e-01, 1.9714e-01, 9.0919e-01, 1.0143e-01, + 3.3742e-01, 7.9946e-01, 1.6835e-01, 9.3829e-01, + 3.9281e-01, 1.0649e-01, 2.5096e-01, 9.3147e-01, + 9.6402e-01, 6.3148e-01, 7.3458e-01, 1.6974e-01, + 4.7950e-01, 5.1505e-01, 8.8022e-01, 8.7978e-01, + 1.9932e-01, 5.8288e-01, 2.8453e-01, 7.0267e-01, + 1.3926e-01, 5.5163e-01, 9.0355e-01, 1.9504e-01, + 6.1798e-01, 1.4056e-01, 5.6601e-03, 9.6328e-01, + 9.0382e-01, 5.4381e-01, 9.7316e-01, 1.1029e-01, + 4.5424e-01, 5.9510e-01, 6.7831e-01, 7.2744e-01, + 8.0666e-01, 8.5186e-01, 3.0594e-01, 1.2956e-01, + 5.1455e-01, 5.0766e-01, 4.8751e-01, 9.1844e-01, + 4.6636e-01, 5.6813e-02, 3.7275e-01, 9.0117e-01, + 3.4902e-01, 2.3409e-01, 2.4325e-02, 6.8071e-01, + 6.3166e-01, 7.4586e-01, 3.5234e-01, 4.3537e-03, + 7.3571e-01, 3.9115e-01, 9.1061e-01, 5.0277e-02, + 6.2925e-01, 4.1784e-01, 7.3323e-02, 7.3164e-01, + 4.8462e-01, 7.6654e-01, 4.4826e-01, 3.9398e-02, + 6.9417e-01, 7.4395e-01, 6.1366e-01, 4.9977e-01, + 4.1028e-01, 6.6273e-01, 8.3655e-01, 6.0125e-01, + 4.6044e-01, 9.2920e-01, 2.1166e-01, 7.5961e-01, + 9.6397e-01, 6.7286e-02, 2.6344e-01, 2.0692e-01, + 1.1673e-01, 4.3703e-02, 5.2607e-01, 1.0118e-01, + 2.2023e-01, 9.8295e-01, 7.7590e-01, 6.2933e-01, + 7.0037e-01, 1.6284e-01, 3.1877e-01, 7.7901e-01, + 2.1463e-01, 8.4394e-01, 7.3672e-02, 5.4399e-01, + 6.9698e-01, 2.5617e-01, 4.5407e-01, 5.6064e-01, + 8.1694e-01, 3.4660e-01, 7.0410e-01, 6.7050e-01, + 2.3489e-01, 1.8168e-01, 7.9661e-01, 8.3635e-01, + 5.8756e-01, 8.8388e-01, 4.7640e-01, 9.5453e-01, + 5.4838e-01, 1.0417e-01, 3.4849e-01, 1.8089e-01, + 7.5269e-01, 6.1014e-01, 4.0905e-01, 6.2377e-01, + 4.3600e-01, 5.8630e-01, 7.4917e-01, 4.4090e-01, + 8.8014e-01, 8.0938e-01, 7.9802e-01, 2.1206e-01, + 2.7673e-01, 1.0645e-01, 6.0725e-01, 3.0038e-01, + 6.4655e-01, 8.6462e-01, 9.6500e-01, 1.8784e-01, + 1.7909e-01, 5.6496e-01, 5.1067e-01, 3.3174e-01, + 1.8409e-01, 3.6191e-02, 8.8537e-01, 3.4596e-02, + 5.4171e-01, 8.8436e-01, 3.4009e-02, 7.4543e-02, + 7.6922e-01, 6.0467e-02, 5.3161e-02, 5.1383e-02, + 1.3021e-01, 6.6623e-01, 3.3683e-01, 8.7086e-01, + 3.4739e-01, 9.4711e-03, 4.9679e-01, 9.8853e-01, + 5.0873e-01, 2.8727e-01, 8.4395e-01, 5.7766e-02, + 4.5070e-01, 4.5636e-01, 8.9634e-01, 3.9843e-01, + 7.8137e-01, 9.3941e-01, 9.5578e-01, 9.2556e-02, + 8.1074e-02, 1.1447e-01, 6.3234e-01, 9.5197e-01, + 4.1311e-01, 1.8941e-01, 8.9359e-01, 6.9563e-01, + 8.1728e-01, 1.2234e-02, 2.1731e-01, 7.6004e-01, + 6.1252e-01, 5.0718e-01, 1.6782e-01, 4.0585e-01, + 7.8919e-01, 5.1338e-01, 8.9386e-01, 7.3014e-02, + 3.4906e-01, 4.2267e-02, 9.2989e-01, 9.1272e-04, + 7.8198e-01, 3.0679e-01, 4.8281e-02, 2.7044e-01, + 1.2871e-01, 4.4472e-02, 4.7767e-01, 6.8016e-01, + 8.0017e-01, 2.1361e-02, 3.1301e-01, 4.8972e-01, + 4.0420e-01, 7.3745e-01, 2.3749e-01, 2.0239e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.3431, 0.4493, 0.0337, ..., 0.1419, 0.4440, 0.1516]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 1.679638147354126 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '349456', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.173964023590088} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([1074, 7022, 4087, 4354, 1115, 9252, 2574, 3979, 915, + 3835, 8518, 2769, 112, 3623, 1291, 615, 191, 66, + 1214, 5633, 6476, 7907, 7161, 7265, 5563, 2980, 7771, + 8906, 5431, 4607, 4770, 626, 9524, 3730, 3866, 8666, + 6423, 5296, 979, 1727, 6975, 9285, 7075, 7134, 6667, + 7325, 5220, 9414, 9490, 8118, 9426, 4227, 7390, 7605, + 9168, 8384, 6401, 6035, 7830, 9972, 5870, 7420, 9935, + 3091, 834, 8862, 5740, 3062, 5112, 2908, 639, 6091, + 436, 2916, 4734, 7108, 513, 2302, 7905, 4519, 3497, + 5454, 9753, 2744, 5180, 8794, 3393, 8207, 4827, 280, + 5568, 3468, 6511, 9773, 2254, 47, 1155, 342, 995, + 9068, 3240, 1324, 1365, 6342, 2285, 1870, 8660, 4106, + 1186, 2521, 7988, 2968, 4045, 8137, 1538, 2198, 760, + 50, 2598, 195, 7444, 4957, 6809, 4374, 6456, 7197, + 9552, 1863, 5023, 3277, 4323, 9903, 7260, 7125, 6873, + 513, 9772, 5373, 7137, 4122, 7749, 5169, 1971, 6272, + 9111, 6015, 736, 3176, 6128, 5202, 6847, 7163, 5150, + 4610, 4581, 9646, 6385, 4193, 8418, 6616, 2414, 1755, + 8625, 575, 1748, 6885, 2435, 2666, 6742, 5614, 244, + 879, 6562, 1359, 3194, 5727, 3551, 7083, 1842, 9514, + 4863, 8479, 941, 2601, 7999, 6494, 5141, 4403, 7993, + 1185, 1573, 5845, 2439, 1922, 1733, 8393, 7228, 6830, + 5908, 6228, 9466, 2785, 4515, 6730, 3151, 3419, 3419, + 776, 3574, 2419, 4836, 3293, 8124, 1875, 7572, 2958, + 4651, 9262, 660, 8285, 6455, 653, 672, 0, 5228, + 9450, 3945, 8974, 8121, 9650, 2145, 4773, 7437, 4493, + 2675, 1190, 9285, 6100, 4192, 7922, 232, 671, 1102, + 7639, 9274, 9775, 7093, 5205, 6899, 4017, 9239, 5474, + 4012, 2403, 7759, 1283, 4106, 9062, 6550, 1753, 6450, + 8863, 2663, 1355, 6905, 1092, 2895, 2727, 2867, 1269, + 6117, 7865, 2347, 1781, 2367, 3514, 5957, 7481, 9656, + 4374, 661, 4235, 5018, 7284, 6968, 9032, 5003, 1196, + 3640, 4689, 1986, 9599, 2163, 9857, 2986, 6217, 1124, + 3869, 6932, 9440, 839, 1387, 3226, 4043, 7157, 7379, + 8428, 8483, 4369, 6851, 8809, 7897, 703, 2976, 3699, + 3438, 2355, 6736, 7238, 176, 7739, 889, 2831, 6969, + 8789, 5800, 3110, 6498, 4705, 6469, 9448, 7282, 2134, + 3678, 5467, 1622, 3274, 5066, 5127, 8447, 100, 2983, + 4463, 3099, 8883, 2393, 5221, 5531, 6706, 7577, 5800, + 8450, 3440, 6855, 944, 6894, 3303, 1082, 5849, 737, + 1687, 3315, 15, 7406, 3990, 2259, 7989, 8348, 7160, + 9351, 3002, 3148, 9126, 6367, 8090, 2998, 8051, 9049, + 2364, 8211, 5174, 5618, 94, 3866, 7923, 9652, 3819, + 3703, 4529, 1291, 2502, 976, 2602, 9342, 2433, 3989, + 7743, 4041, 7702, 1609, 486, 6429, 3042, 7286, 1452, + 35, 1305, 4754, 7796, 3723, 7653, 6695, 5085, 8985, + 3829, 2905, 9438, 2209, 6382, 4736, 9664, 3410, 9780, + 7415, 5568, 4279, 7830, 7106, 4004, 4722, 8650, 6975, + 2172, 2395, 8553, 3340, 3045, 7146, 5817, 3873, 8869, + 6247, 1175, 2122, 3744, 5725, 6888, 1097, 33, 3388, + 835, 6001, 1599, 2732, 8805, 8439, 7209, 2690, 5523, + 678, 764, 9297, 4921, 6303, 5169, 995, 8534, 8061, + 778, 3083, 4497, 1526, 7166, 9518, 9740, 5623, 1874, + 4112, 6650, 4724, 3864, 1864, 8807, 4615, 8323, 2087, + 3311, 7066, 3549, 9226, 6333, 9414, 1630, 6645, 4844, + 115, 7748, 2922, 9690, 9811, 2934, 356, 3565, 7527, + 5670, 3614, 2851, 2629, 3790, 7609, 2460, 7042, 9397, + 5659, 2806, 8290, 2345, 2448, 7257, 2879, 1633, 6514, + 6562, 6696, 7140, 2633, 5425, 7796, 6662, 1393, 2046, + 8374, 7261, 3390, 3981, 7552, 254, 5767, 2429, 5613, + 4296, 4489, 6937, 299, 9296, 2149, 2092, 8555, 8603, + 3237, 1398, 9762, 2121, 3314, 6752, 4047, 3531, 2282, + 9126, 155, 6181, 9517, 318, 9471, 241, 3861, 4693, + 7187, 8904, 9238, 3275, 8438, 9069, 1864, 9231, 9667, + 4009, 9288, 4307, 8620, 7353, 7065, 3457, 9361, 7063, + 4553, 4913, 406, 2735, 5848, 6549, 3893, 4278, 1320, + 8124, 6472, 1508, 6416, 9674, 982, 6780, 7269, 2117, + 4160, 2842, 7527, 8109, 2175, 6435, 756, 2829, 4551, + 1101, 5613, 2799, 6159, 3030, 9674, 3371, 4668, 2685, + 9264, 9477, 5536, 5293, 9200, 7997, 9530, 5953, 890, + 5904, 2629, 8659, 5653, 9317, 4267, 9976, 2511, 6842, + 90, 512, 2068, 6073, 7386, 8238, 8424, 3409, 3193, + 1239, 1431, 3902, 4600, 1601, 3567, 9765, 5150, 8916, + 2840, 3297, 5983, 970, 249, 369, 3172, 7109, 7253, + 6112, 1285, 9881, 3414, 9391, 3724, 571, 8346, 8358, + 4162, 7136, 6172, 8809, 6689, 815, 5048, 3642, 959, + 6179, 9525, 7319, 8094, 4231, 3522, 6081, 9527, 8667, + 4007, 6492, 5206, 2671, 131, 2593, 293, 1464, 8045, + 2173, 608, 8313, 546, 7335, 3665, 2688, 8988, 3584, + 6303, 4955, 3386, 1723, 7105, 4572, 2844, 5600, 6702, + 5410, 4316, 2976, 4686, 3464, 2186, 1556, 2021, 8745, + 13, 3322, 1670, 708, 3608, 2776, 8220, 2918, 692, + 5296, 5873, 4095, 1083, 1521, 7223, 7591, 5664, 4154, + 6765, 3312, 7008, 4212, 7168, 7463, 2544, 4892, 8746, + 7430, 8621, 2945, 8225, 1113, 3741, 6687, 6930, 3774, + 7211, 1927, 7164, 9820, 2083, 2916, 9934, 6793, 5105, + 4835, 3112, 3094, 1326, 7804, 7578, 4182, 739, 1123, + 1767, 8724, 9857, 2143, 6971, 6801, 376, 4107, 954, + 5018, 8170, 7753, 3368, 2333, 1, 2240, 2793, 1283, + 7038, 8646, 9694, 359, 2258, 5039, 1080, 5544, 9098, + 4490, 5886, 352, 1011, 1449, 531, 9355, 9342, 2004, + 9274, 7734, 8205, 2292, 9551, 5966, 9837, 1814, 4079, + 8382, 4410, 2600, 7625, 9363, 1842, 2879, 1253, 809, + 7014, 5812, 209, 5239, 2093, 1770, 314, 8424, 2510, + 8571, 9426, 7724, 9614, 5503, 3753, 2866, 1758, 9962, + 6, 7364, 8977, 2787, 766, 2450, 2844, 7089, 3871, + 3747, 9673, 87, 5211, 3728, 3843, 1425, 9293, 1646, + 8032, 5484, 3610, 2890, 6677, 6468, 1558, 3598, 3915, + 1993, 1609, 8274, 966, 8311, 8117, 5411, 9792, 5362, + 4456, 6051, 3532, 1282, 4790, 4302, 1110, 1051, 5344, + 197, 7166, 3635, 796, 2322, 8834, 7592, 9685, 6175, + 1580, 7579, 7995, 5470, 7573, 5255, 5134, 8199, 4635, + 6559, 7004, 535, 234, 482, 8638, 109, 9961, 4999, + 7801, 2361, 4476, 9026, 8063, 7010, 7559, 3651, 6179, + 1451, 928, 8960, 630, 76, 1209, 5158, 5630, 9966, + 5033, 8228, 9559, 9213, 1023, 7495, 1852, 4508, 8270, + 1627, 1030, 4994, 541, 3130, 827, 3601, 8830, 6755, + 4725, 6005, 7586, 3820, 5409, 4677, 5963, 549, 7869, + 8701, 7230, 9014, 1899, 4054, 7192, 5440, 4348, 1719, + 2600, 6431, 2356, 4202, 427, 9630, 7862, 4930, 8419, + 7485, 2384, 6304, 9889, 7541, 9723, 7193, 9094, 6906, + 3017]), + values=tensor([0.9304, 0.0325, 0.3106, 0.4781, 0.2864, 0.4721, 0.3382, + 0.8821, 0.2738, 0.6632, 0.0397, 0.5410, 0.8129, 0.5302, + 0.8584, 0.6728, 0.7839, 0.2842, 0.6220, 0.2198, 0.9853, + 0.4195, 0.8925, 0.9780, 0.4936, 0.9314, 0.4002, 0.0589, + 0.0362, 0.6841, 0.4069, 0.9644, 0.1471, 0.1097, 0.7122, + 0.6469, 0.7726, 0.7037, 0.8236, 0.5724, 0.6757, 0.0210, + 0.7908, 0.1342, 0.5146, 0.6874, 0.0836, 0.6105, 0.6489, + 0.7550, 0.2489, 0.2644, 0.8196, 0.5567, 0.9361, 0.0192, + 0.7166, 0.4988, 0.6757, 0.6719, 0.0246, 0.9112, 0.3677, + 0.3643, 0.2678, 0.8468, 0.3772, 0.5807, 0.7059, 0.0144, + 0.4017, 0.2251, 0.1445, 0.5897, 0.5839, 0.8811, 0.5419, + 0.1899, 0.6807, 0.8358, 0.3278, 0.1587, 0.4733, 0.8342, + 0.8985, 0.6485, 0.7474, 0.6612, 0.0844, 0.3828, 0.0157, + 0.7714, 0.6608, 0.1525, 0.9463, 0.3394, 0.3867, 0.0432, + 0.8090, 0.3213, 0.9897, 0.9084, 0.9424, 0.2045, 0.4579, + 0.7852, 0.4647, 0.4513, 0.6407, 0.5200, 0.9790, 0.1694, + 0.8201, 0.7817, 0.9650, 0.1348, 0.2654, 0.0726, 0.6476, + 0.9696, 0.1144, 0.2269, 0.5974, 0.7825, 0.6712, 0.1593, + 0.9034, 0.9204, 0.3969, 0.4522, 0.3790, 0.7055, 0.0019, + 0.1878, 0.4210, 0.9245, 0.3068, 0.7871, 0.0815, 0.1037, + 0.9467, 0.2547, 0.9280, 0.3139, 0.5680, 0.6516, 0.6068, + 0.5981, 0.3913, 0.0407, 0.0947, 0.2105, 0.0303, 0.5718, + 0.2321, 0.5689, 0.9476, 0.1321, 0.0247, 0.4729, 0.7661, + 0.8935, 0.9971, 0.2980, 0.9100, 0.1945, 0.7887, 0.3662, + 0.6700, 0.4029, 0.0956, 0.0108, 0.3242, 0.1912, 0.0388, + 0.8159, 0.1239, 0.6482, 0.0548, 0.7241, 0.4628, 0.1188, + 0.3048, 0.7968, 0.6538, 0.0308, 0.2637, 0.2733, 0.6327, + 0.6480, 0.2813, 0.5175, 0.6726, 0.0450, 0.6176, 0.2589, + 0.7109, 0.0980, 0.7310, 0.5738, 0.9989, 0.1733, 0.0559, + 0.0624, 0.6747, 0.0930, 0.2298, 0.6306, 0.1193, 0.8276, + 0.2679, 0.3372, 0.0565, 0.5821, 0.6562, 0.4518, 0.8406, + 0.7838, 0.3267, 0.0377, 0.0535, 0.4407, 0.8150, 0.3303, + 0.6133, 0.4116, 0.9870, 0.8925, 0.8368, 0.9237, 0.9331, + 0.5461, 0.4353, 0.9658, 0.0052, 0.5013, 0.8249, 0.8077, + 0.8369, 0.9914, 0.7366, 0.6685, 0.5210, 0.7940, 0.8071, + 0.1086, 0.3441, 0.1479, 0.3668, 0.5370, 0.5706, 0.8571, + 0.3522, 0.2921, 0.1107, 0.4358, 0.9740, 0.6370, 0.7121, + 0.3797, 0.6431, 0.6432, 0.8570, 0.3341, 0.8876, 0.4912, + 0.3012, 0.2244, 0.8304, 0.8708, 0.6993, 0.6415, 0.1354, + 0.4973, 0.6766, 0.5583, 0.8158, 0.1846, 0.7576, 0.3301, + 0.2284, 0.7380, 0.2497, 0.8663, 0.4244, 0.8319, 0.8766, + 0.3127, 0.9083, 0.6717, 0.2607, 0.9060, 0.3022, 0.6929, + 0.7216, 0.1864, 0.9103, 0.0908, 0.8280, 0.6772, 0.9170, + 0.1666, 0.0432, 0.9895, 0.8882, 0.4445, 0.2577, 0.6991, + 0.0134, 0.7908, 0.1595, 0.2586, 0.1384, 0.8815, 0.9964, + 0.6719, 0.3849, 0.3745, 0.5578, 0.6641, 0.8020, 0.8256, + 0.8774, 0.1735, 0.0160, 0.3321, 0.9095, 0.6863, 0.8819, + 0.2760, 0.0176, 0.9191, 0.2224, 0.5883, 0.6735, 0.2168, + 0.8084, 0.2051, 0.7731, 0.7349, 0.4183, 0.4549, 0.7852, + 0.1645, 0.2619, 0.7500, 0.6211, 0.9320, 0.1410, 0.8013, + 0.3936, 0.8135, 0.1711, 0.7508, 0.1565, 0.5072, 0.4150, + 0.0222, 0.7654, 0.2057, 0.7224, 0.5103, 0.0219, 0.2565, + 0.1947, 0.2598, 0.2876, 0.9465, 0.1945, 0.3773, 0.5036, + 0.9181, 0.1480, 0.8127, 0.9489, 0.5086, 0.2695, 0.5627, + 0.6161, 0.4583, 0.0870, 0.7396, 0.9559, 0.7672, 0.7594, + 0.2165, 0.4330, 0.5886, 0.0477, 0.3072, 0.0691, 0.3499, + 0.5368, 0.0098, 0.1604, 0.5787, 0.5961, 0.6748, 0.5240, + 0.6174, 0.6377, 0.0557, 0.8169, 0.1661, 0.8698, 0.1999, + 0.5072, 0.0521, 0.7366, 0.9190, 0.1330, 0.7979, 0.2571, + 0.8104, 0.6892, 0.6507, 0.8704, 0.6904, 0.5395, 0.5915, + 0.1731, 0.6768, 0.9334, 0.5447, 0.3147, 0.7565, 0.2840, + 0.8455, 0.7829, 0.1389, 0.9161, 0.9734, 0.2521, 0.5519, + 0.3612, 0.4687, 0.1449, 0.2398, 0.6605, 0.8039, 0.8419, + 0.6954, 0.8245, 0.4611, 0.7124, 0.8750, 0.0816, 0.7331, + 0.2216, 0.8005, 0.9334, 0.8519, 0.5866, 0.8732, 0.9519, + 0.4971, 0.6068, 0.1175, 0.8603, 0.6101, 0.3972, 0.0193, + 0.1596, 0.3517, 0.7516, 0.7870, 0.6434, 0.0362, 0.2214, + 0.6042, 0.3964, 0.6328, 0.2889, 0.2855, 0.8849, 0.5080, + 0.7797, 0.4322, 0.0814, 0.3647, 0.2061, 0.6314, 0.3193, + 0.1895, 0.3658, 0.6642, 0.1466, 0.1805, 0.6190, 0.9850, + 0.3325, 0.8191, 0.3665, 0.4316, 0.4284, 0.3112, 0.9226, + 0.5944, 0.0376, 0.1741, 0.8903, 0.0662, 0.9770, 0.9188, + 0.9310, 0.5606, 0.4272, 0.1108, 0.4718, 0.4460, 0.6248, + 0.5358, 0.2156, 0.0885, 0.3174, 0.8396, 0.1886, 0.2096, + 0.3417, 0.8097, 0.5694, 0.1045, 0.8763, 0.1113, 0.1353, + 0.0123, 0.9512, 0.5017, 0.3234, 0.1403, 0.0730, 0.8981, + 0.2740, 0.4134, 0.2135, 0.9805, 0.0445, 0.9458, 0.7869, + 0.3360, 0.7234, 0.2980, 0.1314, 0.3499, 0.6698, 0.4526, + 0.6499, 0.4686, 0.7291, 0.1916, 0.4110, 0.7064, 0.0622, + 0.1843, 0.1217, 0.1311, 0.8602, 0.9506, 0.1258, 0.9113, + 0.9310, 0.4848, 0.0104, 0.0267, 0.5186, 0.0305, 0.5081, + 0.5501, 0.3248, 0.8113, 0.3173, 0.7019, 0.0515, 0.0562, + 0.1638, 0.4617, 0.6547, 0.3705, 0.4788, 0.0628, 0.4462, + 0.3249, 0.8781, 0.7038, 0.4954, 0.3617, 0.8045, 0.1896, + 0.8468, 0.7628, 0.4651, 0.3750, 0.9370, 0.8226, 0.5039, + 0.7669, 0.5888, 0.7467, 0.1323, 0.9814, 0.9275, 0.4832, + 0.2850, 0.4635, 0.1488, 0.7094, 0.2071, 0.4950, 0.1863, + 0.2851, 0.7798, 0.3730, 0.3994, 0.2529, 0.5052, 0.4832, + 0.3839, 0.7730, 0.1994, 0.8801, 0.9634, 0.1279, 0.5202, + 0.4480, 0.2752, 0.8425, 0.2605, 0.6678, 0.0019, 0.1146, + 0.5118, 0.8404, 0.4252, 0.8911, 0.6844, 0.6707, 0.4919, + 0.4044, 0.9689, 0.9549, 0.5260, 0.2040, 0.3758, 0.8436, + 0.0719, 0.9238, 0.4595, 0.7501, 0.7038, 0.0842, 0.8019, + 0.5135, 0.4312, 0.0042, 0.3774, 0.0300, 0.3045, 0.1310, + 0.9498, 0.2047, 0.0065, 0.4750, 0.7205, 0.8268, 0.2414, + 0.5067, 0.3174, 0.7835, 0.1750, 0.2497, 0.8182, 0.4410, + 0.4937, 0.0668, 0.7211, 0.3732, 0.1165, 0.5206, 0.1429, + 0.5890, 0.1326, 0.1841, 0.2299, 0.6975, 0.0815, 0.8063, + 0.7601, 0.5190, 0.4973, 0.7820, 0.2718, 0.3645, 0.2388, + 0.6349, 0.4919, 0.5469, 0.5835, 0.1693, 0.2555, 0.6997, + 0.0080, 0.8024, 0.8773, 0.4755, 0.9793, 0.3227, 0.7826, + 0.4776, 0.5918, 0.9313, 0.3366, 0.3665, 0.6137, 0.6442, + 0.7228, 0.1110, 0.6812, 0.4938, 0.1352, 0.2780, 0.6843, + 0.3593, 0.6866, 0.5996, 0.6977, 0.2843, 0.3178, 0.7528, + 0.8342, 0.5386, 0.6190, 0.0664, 0.9083, 0.4500, 0.1762, + 0.6839, 0.7444, 0.6654, 0.5698, 0.6462, 0.6689, 0.6885, + 0.6494, 0.1899, 0.7462, 0.4931, 0.7785, 0.4078, 0.0875, + 0.6143, 0.8570, 0.9888, 0.3955, 0.6640, 0.6587, 0.7544, + 0.9891, 0.6031, 0.0927, 0.6392, 0.0282, 0.2897, 0.0799, + 0.3039, 0.7537, 0.0064, 0.0830, 0.2575, 0.1555, 0.1809, + 0.6300, 0.6627, 0.5442, 0.5686, 0.4545, 0.6247, 0.0230, + 0.1211, 0.9727, 0.2738, 0.3745, 0.1564, 0.8676, 0.3949, + 0.4342, 0.6528, 0.7121, 0.9790, 0.6180, 0.8497, 0.6934, + 0.8114, 0.4502, 0.2751, 0.7561, 0.6667, 0.0282, 0.9822, + 0.8863, 0.6138, 0.1747, 0.4926, 0.0927, 0.1322, 0.4515, + 0.6778, 0.3334, 0.7750, 0.2319, 0.6137, 0.2868, 0.8848, + 0.0267, 0.5097, 0.9541, 0.4569, 0.9397, 0.3945, 0.3763, + 0.9350, 0.7996, 0.5528, 0.9459, 0.6150, 0.5344, 0.9224, + 0.4343, 0.2583, 0.5743, 0.1810, 0.4732, 0.1012, 0.2292, + 0.5431, 0.6259, 0.8319, 0.9927, 0.5847, 0.8053, 0.0391, + 0.3854, 0.8999, 0.7842, 0.4838, 0.5113, 0.9715, 0.1757, + 0.7568, 0.4008, 0.5341, 0.1516, 0.0878, 0.5977, 0.5872, + 0.7439, 0.8081, 0.9535, 0.6902, 0.8931, 0.9586, 0.7881, + 0.0686, 0.4547, 0.0160, 0.3146, 0.6264, 0.2480, 0.7559, + 0.5560, 0.9085, 0.3908, 0.0424, 0.3481, 0.0393, 0.1234, + 0.5520, 0.5796, 0.8048, 0.0202, 0.8271, 0.9243, 0.6015, + 0.0508, 0.8893, 0.9673, 0.3880, 0.3853, 0.1352, 0.4800, + 0.1939, 0.7035, 0.2054, 0.1146, 0.2407, 0.4234, 0.9640, + 0.4558, 0.8502, 0.7625, 0.3075, 0.9902, 0.1845, 0.0707, + 0.8518, 0.7267, 0.6494, 0.4761, 0.1632, 0.1248, 0.8200, + 0.2043, 0.6022, 0.7800, 0.0537, 0.1505, 0.0646, 0.9228, + 0.0966, 0.8036, 0.2431, 0.5992, 0.1858, 0.5672, 0.8294, + 0.0135, 0.2238, 0.0068, 0.8473, 0.0323, 0.2138, 0.3134, + 0.2674, 0.4860, 0.8071, 0.7400, 0.9168, 0.0701, 0.7855, + 0.7080, 0.5714, 0.5288, 0.1187, 0.1954, 0.0067, 0.7680, + 0.5930, 0.8250, 0.6028, 0.2144, 0.0255, 0.2917, 0.4790, + 0.3892, 0.3563, 0.0423, 0.3253, 0.0092, 0.8956, 0.2515, + 0.1414, 0.9761, 0.8159, 0.7089, 0.4956, 0.0026, 0.1488, + 0.2902, 0.9089, 0.4432, 0.7989, 0.9160, 0.2680, 0.3317, + 0.6128, 0.6111, 0.3647, 0.4016, 0.8650, 0.7226, 0.2642, + 0.4868, 0.9208, 0.7252, 0.5230, 0.1652, 0.0793, 0.9874, + 0.5129, 0.3412, 0.3833, 0.8354, 0.9507, 0.1921, 0.2168, + 0.6983, 0.4500, 0.7444, 0.9235, 0.5009, 0.2575]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5348, 0.7986, 0.2200, ..., 0.0453, 0.2085, 0.0080]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 11.173964023590088 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([1074, 7022, 4087, 4354, 1115, 9252, 2574, 3979, 915, + 3835, 8518, 2769, 112, 3623, 1291, 615, 191, 66, + 1214, 5633, 6476, 7907, 7161, 7265, 5563, 2980, 7771, + 8906, 5431, 4607, 4770, 626, 9524, 3730, 3866, 8666, + 6423, 5296, 979, 1727, 6975, 9285, 7075, 7134, 6667, + 7325, 5220, 9414, 9490, 8118, 9426, 4227, 7390, 7605, + 9168, 8384, 6401, 6035, 7830, 9972, 5870, 7420, 9935, + 3091, 834, 8862, 5740, 3062, 5112, 2908, 639, 6091, + 436, 2916, 4734, 7108, 513, 2302, 7905, 4519, 3497, + 5454, 9753, 2744, 5180, 8794, 3393, 8207, 4827, 280, + 5568, 3468, 6511, 9773, 2254, 47, 1155, 342, 995, + 9068, 3240, 1324, 1365, 6342, 2285, 1870, 8660, 4106, + 1186, 2521, 7988, 2968, 4045, 8137, 1538, 2198, 760, + 50, 2598, 195, 7444, 4957, 6809, 4374, 6456, 7197, + 9552, 1863, 5023, 3277, 4323, 9903, 7260, 7125, 6873, + 513, 9772, 5373, 7137, 4122, 7749, 5169, 1971, 6272, + 9111, 6015, 736, 3176, 6128, 5202, 6847, 7163, 5150, + 4610, 4581, 9646, 6385, 4193, 8418, 6616, 2414, 1755, + 8625, 575, 1748, 6885, 2435, 2666, 6742, 5614, 244, + 879, 6562, 1359, 3194, 5727, 3551, 7083, 1842, 9514, + 4863, 8479, 941, 2601, 7999, 6494, 5141, 4403, 7993, + 1185, 1573, 5845, 2439, 1922, 1733, 8393, 7228, 6830, + 5908, 6228, 9466, 2785, 4515, 6730, 3151, 3419, 3419, + 776, 3574, 2419, 4836, 3293, 8124, 1875, 7572, 2958, + 4651, 9262, 660, 8285, 6455, 653, 672, 0, 5228, + 9450, 3945, 8974, 8121, 9650, 2145, 4773, 7437, 4493, + 2675, 1190, 9285, 6100, 4192, 7922, 232, 671, 1102, + 7639, 9274, 9775, 7093, 5205, 6899, 4017, 9239, 5474, + 4012, 2403, 7759, 1283, 4106, 9062, 6550, 1753, 6450, + 8863, 2663, 1355, 6905, 1092, 2895, 2727, 2867, 1269, + 6117, 7865, 2347, 1781, 2367, 3514, 5957, 7481, 9656, + 4374, 661, 4235, 5018, 7284, 6968, 9032, 5003, 1196, + 3640, 4689, 1986, 9599, 2163, 9857, 2986, 6217, 1124, + 3869, 6932, 9440, 839, 1387, 3226, 4043, 7157, 7379, + 8428, 8483, 4369, 6851, 8809, 7897, 703, 2976, 3699, + 3438, 2355, 6736, 7238, 176, 7739, 889, 2831, 6969, + 8789, 5800, 3110, 6498, 4705, 6469, 9448, 7282, 2134, + 3678, 5467, 1622, 3274, 5066, 5127, 8447, 100, 2983, + 4463, 3099, 8883, 2393, 5221, 5531, 6706, 7577, 5800, + 8450, 3440, 6855, 944, 6894, 3303, 1082, 5849, 737, + 1687, 3315, 15, 7406, 3990, 2259, 7989, 8348, 7160, + 9351, 3002, 3148, 9126, 6367, 8090, 2998, 8051, 9049, + 2364, 8211, 5174, 5618, 94, 3866, 7923, 9652, 3819, + 3703, 4529, 1291, 2502, 976, 2602, 9342, 2433, 3989, + 7743, 4041, 7702, 1609, 486, 6429, 3042, 7286, 1452, + 35, 1305, 4754, 7796, 3723, 7653, 6695, 5085, 8985, + 3829, 2905, 9438, 2209, 6382, 4736, 9664, 3410, 9780, + 7415, 5568, 4279, 7830, 7106, 4004, 4722, 8650, 6975, + 2172, 2395, 8553, 3340, 3045, 7146, 5817, 3873, 8869, + 6247, 1175, 2122, 3744, 5725, 6888, 1097, 33, 3388, + 835, 6001, 1599, 2732, 8805, 8439, 7209, 2690, 5523, + 678, 764, 9297, 4921, 6303, 5169, 995, 8534, 8061, + 778, 3083, 4497, 1526, 7166, 9518, 9740, 5623, 1874, + 4112, 6650, 4724, 3864, 1864, 8807, 4615, 8323, 2087, + 3311, 7066, 3549, 9226, 6333, 9414, 1630, 6645, 4844, + 115, 7748, 2922, 9690, 9811, 2934, 356, 3565, 7527, + 5670, 3614, 2851, 2629, 3790, 7609, 2460, 7042, 9397, + 5659, 2806, 8290, 2345, 2448, 7257, 2879, 1633, 6514, + 6562, 6696, 7140, 2633, 5425, 7796, 6662, 1393, 2046, + 8374, 7261, 3390, 3981, 7552, 254, 5767, 2429, 5613, + 4296, 4489, 6937, 299, 9296, 2149, 2092, 8555, 8603, + 3237, 1398, 9762, 2121, 3314, 6752, 4047, 3531, 2282, + 9126, 155, 6181, 9517, 318, 9471, 241, 3861, 4693, + 7187, 8904, 9238, 3275, 8438, 9069, 1864, 9231, 9667, + 4009, 9288, 4307, 8620, 7353, 7065, 3457, 9361, 7063, + 4553, 4913, 406, 2735, 5848, 6549, 3893, 4278, 1320, + 8124, 6472, 1508, 6416, 9674, 982, 6780, 7269, 2117, + 4160, 2842, 7527, 8109, 2175, 6435, 756, 2829, 4551, + 1101, 5613, 2799, 6159, 3030, 9674, 3371, 4668, 2685, + 9264, 9477, 5536, 5293, 9200, 7997, 9530, 5953, 890, + 5904, 2629, 8659, 5653, 9317, 4267, 9976, 2511, 6842, + 90, 512, 2068, 6073, 7386, 8238, 8424, 3409, 3193, + 1239, 1431, 3902, 4600, 1601, 3567, 9765, 5150, 8916, + 2840, 3297, 5983, 970, 249, 369, 3172, 7109, 7253, + 6112, 1285, 9881, 3414, 9391, 3724, 571, 8346, 8358, + 4162, 7136, 6172, 8809, 6689, 815, 5048, 3642, 959, + 6179, 9525, 7319, 8094, 4231, 3522, 6081, 9527, 8667, + 4007, 6492, 5206, 2671, 131, 2593, 293, 1464, 8045, + 2173, 608, 8313, 546, 7335, 3665, 2688, 8988, 3584, + 6303, 4955, 3386, 1723, 7105, 4572, 2844, 5600, 6702, + 5410, 4316, 2976, 4686, 3464, 2186, 1556, 2021, 8745, + 13, 3322, 1670, 708, 3608, 2776, 8220, 2918, 692, + 5296, 5873, 4095, 1083, 1521, 7223, 7591, 5664, 4154, + 6765, 3312, 7008, 4212, 7168, 7463, 2544, 4892, 8746, + 7430, 8621, 2945, 8225, 1113, 3741, 6687, 6930, 3774, + 7211, 1927, 7164, 9820, 2083, 2916, 9934, 6793, 5105, + 4835, 3112, 3094, 1326, 7804, 7578, 4182, 739, 1123, + 1767, 8724, 9857, 2143, 6971, 6801, 376, 4107, 954, + 5018, 8170, 7753, 3368, 2333, 1, 2240, 2793, 1283, + 7038, 8646, 9694, 359, 2258, 5039, 1080, 5544, 9098, + 4490, 5886, 352, 1011, 1449, 531, 9355, 9342, 2004, + 9274, 7734, 8205, 2292, 9551, 5966, 9837, 1814, 4079, + 8382, 4410, 2600, 7625, 9363, 1842, 2879, 1253, 809, + 7014, 5812, 209, 5239, 2093, 1770, 314, 8424, 2510, + 8571, 9426, 7724, 9614, 5503, 3753, 2866, 1758, 9962, + 6, 7364, 8977, 2787, 766, 2450, 2844, 7089, 3871, + 3747, 9673, 87, 5211, 3728, 3843, 1425, 9293, 1646, + 8032, 5484, 3610, 2890, 6677, 6468, 1558, 3598, 3915, + 1993, 1609, 8274, 966, 8311, 8117, 5411, 9792, 5362, + 4456, 6051, 3532, 1282, 4790, 4302, 1110, 1051, 5344, + 197, 7166, 3635, 796, 2322, 8834, 7592, 9685, 6175, + 1580, 7579, 7995, 5470, 7573, 5255, 5134, 8199, 4635, + 6559, 7004, 535, 234, 482, 8638, 109, 9961, 4999, + 7801, 2361, 4476, 9026, 8063, 7010, 7559, 3651, 6179, + 1451, 928, 8960, 630, 76, 1209, 5158, 5630, 9966, + 5033, 8228, 9559, 9213, 1023, 7495, 1852, 4508, 8270, + 1627, 1030, 4994, 541, 3130, 827, 3601, 8830, 6755, + 4725, 6005, 7586, 3820, 5409, 4677, 5963, 549, 7869, + 8701, 7230, 9014, 1899, 4054, 7192, 5440, 4348, 1719, + 2600, 6431, 2356, 4202, 427, 9630, 7862, 4930, 8419, + 7485, 2384, 6304, 9889, 7541, 9723, 7193, 9094, 6906, + 3017]), + values=tensor([0.9304, 0.0325, 0.3106, 0.4781, 0.2864, 0.4721, 0.3382, + 0.8821, 0.2738, 0.6632, 0.0397, 0.5410, 0.8129, 0.5302, + 0.8584, 0.6728, 0.7839, 0.2842, 0.6220, 0.2198, 0.9853, + 0.4195, 0.8925, 0.9780, 0.4936, 0.9314, 0.4002, 0.0589, + 0.0362, 0.6841, 0.4069, 0.9644, 0.1471, 0.1097, 0.7122, + 0.6469, 0.7726, 0.7037, 0.8236, 0.5724, 0.6757, 0.0210, + 0.7908, 0.1342, 0.5146, 0.6874, 0.0836, 0.6105, 0.6489, + 0.7550, 0.2489, 0.2644, 0.8196, 0.5567, 0.9361, 0.0192, + 0.7166, 0.4988, 0.6757, 0.6719, 0.0246, 0.9112, 0.3677, + 0.3643, 0.2678, 0.8468, 0.3772, 0.5807, 0.7059, 0.0144, + 0.4017, 0.2251, 0.1445, 0.5897, 0.5839, 0.8811, 0.5419, + 0.1899, 0.6807, 0.8358, 0.3278, 0.1587, 0.4733, 0.8342, + 0.8985, 0.6485, 0.7474, 0.6612, 0.0844, 0.3828, 0.0157, + 0.7714, 0.6608, 0.1525, 0.9463, 0.3394, 0.3867, 0.0432, + 0.8090, 0.3213, 0.9897, 0.9084, 0.9424, 0.2045, 0.4579, + 0.7852, 0.4647, 0.4513, 0.6407, 0.5200, 0.9790, 0.1694, + 0.8201, 0.7817, 0.9650, 0.1348, 0.2654, 0.0726, 0.6476, + 0.9696, 0.1144, 0.2269, 0.5974, 0.7825, 0.6712, 0.1593, + 0.9034, 0.9204, 0.3969, 0.4522, 0.3790, 0.7055, 0.0019, + 0.1878, 0.4210, 0.9245, 0.3068, 0.7871, 0.0815, 0.1037, + 0.9467, 0.2547, 0.9280, 0.3139, 0.5680, 0.6516, 0.6068, + 0.5981, 0.3913, 0.0407, 0.0947, 0.2105, 0.0303, 0.5718, + 0.2321, 0.5689, 0.9476, 0.1321, 0.0247, 0.4729, 0.7661, + 0.8935, 0.9971, 0.2980, 0.9100, 0.1945, 0.7887, 0.3662, + 0.6700, 0.4029, 0.0956, 0.0108, 0.3242, 0.1912, 0.0388, + 0.8159, 0.1239, 0.6482, 0.0548, 0.7241, 0.4628, 0.1188, + 0.3048, 0.7968, 0.6538, 0.0308, 0.2637, 0.2733, 0.6327, + 0.6480, 0.2813, 0.5175, 0.6726, 0.0450, 0.6176, 0.2589, + 0.7109, 0.0980, 0.7310, 0.5738, 0.9989, 0.1733, 0.0559, + 0.0624, 0.6747, 0.0930, 0.2298, 0.6306, 0.1193, 0.8276, + 0.2679, 0.3372, 0.0565, 0.5821, 0.6562, 0.4518, 0.8406, + 0.7838, 0.3267, 0.0377, 0.0535, 0.4407, 0.8150, 0.3303, + 0.6133, 0.4116, 0.9870, 0.8925, 0.8368, 0.9237, 0.9331, + 0.5461, 0.4353, 0.9658, 0.0052, 0.5013, 0.8249, 0.8077, + 0.8369, 0.9914, 0.7366, 0.6685, 0.5210, 0.7940, 0.8071, + 0.1086, 0.3441, 0.1479, 0.3668, 0.5370, 0.5706, 0.8571, + 0.3522, 0.2921, 0.1107, 0.4358, 0.9740, 0.6370, 0.7121, + 0.3797, 0.6431, 0.6432, 0.8570, 0.3341, 0.8876, 0.4912, + 0.3012, 0.2244, 0.8304, 0.8708, 0.6993, 0.6415, 0.1354, + 0.4973, 0.6766, 0.5583, 0.8158, 0.1846, 0.7576, 0.3301, + 0.2284, 0.7380, 0.2497, 0.8663, 0.4244, 0.8319, 0.8766, + 0.3127, 0.9083, 0.6717, 0.2607, 0.9060, 0.3022, 0.6929, + 0.7216, 0.1864, 0.9103, 0.0908, 0.8280, 0.6772, 0.9170, + 0.1666, 0.0432, 0.9895, 0.8882, 0.4445, 0.2577, 0.6991, + 0.0134, 0.7908, 0.1595, 0.2586, 0.1384, 0.8815, 0.9964, + 0.6719, 0.3849, 0.3745, 0.5578, 0.6641, 0.8020, 0.8256, + 0.8774, 0.1735, 0.0160, 0.3321, 0.9095, 0.6863, 0.8819, + 0.2760, 0.0176, 0.9191, 0.2224, 0.5883, 0.6735, 0.2168, + 0.8084, 0.2051, 0.7731, 0.7349, 0.4183, 0.4549, 0.7852, + 0.1645, 0.2619, 0.7500, 0.6211, 0.9320, 0.1410, 0.8013, + 0.3936, 0.8135, 0.1711, 0.7508, 0.1565, 0.5072, 0.4150, + 0.0222, 0.7654, 0.2057, 0.7224, 0.5103, 0.0219, 0.2565, + 0.1947, 0.2598, 0.2876, 0.9465, 0.1945, 0.3773, 0.5036, + 0.9181, 0.1480, 0.8127, 0.9489, 0.5086, 0.2695, 0.5627, + 0.6161, 0.4583, 0.0870, 0.7396, 0.9559, 0.7672, 0.7594, + 0.2165, 0.4330, 0.5886, 0.0477, 0.3072, 0.0691, 0.3499, + 0.5368, 0.0098, 0.1604, 0.5787, 0.5961, 0.6748, 0.5240, + 0.6174, 0.6377, 0.0557, 0.8169, 0.1661, 0.8698, 0.1999, + 0.5072, 0.0521, 0.7366, 0.9190, 0.1330, 0.7979, 0.2571, + 0.8104, 0.6892, 0.6507, 0.8704, 0.6904, 0.5395, 0.5915, + 0.1731, 0.6768, 0.9334, 0.5447, 0.3147, 0.7565, 0.2840, + 0.8455, 0.7829, 0.1389, 0.9161, 0.9734, 0.2521, 0.5519, + 0.3612, 0.4687, 0.1449, 0.2398, 0.6605, 0.8039, 0.8419, + 0.6954, 0.8245, 0.4611, 0.7124, 0.8750, 0.0816, 0.7331, + 0.2216, 0.8005, 0.9334, 0.8519, 0.5866, 0.8732, 0.9519, + 0.4971, 0.6068, 0.1175, 0.8603, 0.6101, 0.3972, 0.0193, + 0.1596, 0.3517, 0.7516, 0.7870, 0.6434, 0.0362, 0.2214, + 0.6042, 0.3964, 0.6328, 0.2889, 0.2855, 0.8849, 0.5080, + 0.7797, 0.4322, 0.0814, 0.3647, 0.2061, 0.6314, 0.3193, + 0.1895, 0.3658, 0.6642, 0.1466, 0.1805, 0.6190, 0.9850, + 0.3325, 0.8191, 0.3665, 0.4316, 0.4284, 0.3112, 0.9226, + 0.5944, 0.0376, 0.1741, 0.8903, 0.0662, 0.9770, 0.9188, + 0.9310, 0.5606, 0.4272, 0.1108, 0.4718, 0.4460, 0.6248, + 0.5358, 0.2156, 0.0885, 0.3174, 0.8396, 0.1886, 0.2096, + 0.3417, 0.8097, 0.5694, 0.1045, 0.8763, 0.1113, 0.1353, + 0.0123, 0.9512, 0.5017, 0.3234, 0.1403, 0.0730, 0.8981, + 0.2740, 0.4134, 0.2135, 0.9805, 0.0445, 0.9458, 0.7869, + 0.3360, 0.7234, 0.2980, 0.1314, 0.3499, 0.6698, 0.4526, + 0.6499, 0.4686, 0.7291, 0.1916, 0.4110, 0.7064, 0.0622, + 0.1843, 0.1217, 0.1311, 0.8602, 0.9506, 0.1258, 0.9113, + 0.9310, 0.4848, 0.0104, 0.0267, 0.5186, 0.0305, 0.5081, + 0.5501, 0.3248, 0.8113, 0.3173, 0.7019, 0.0515, 0.0562, + 0.1638, 0.4617, 0.6547, 0.3705, 0.4788, 0.0628, 0.4462, + 0.3249, 0.8781, 0.7038, 0.4954, 0.3617, 0.8045, 0.1896, + 0.8468, 0.7628, 0.4651, 0.3750, 0.9370, 0.8226, 0.5039, + 0.7669, 0.5888, 0.7467, 0.1323, 0.9814, 0.9275, 0.4832, + 0.2850, 0.4635, 0.1488, 0.7094, 0.2071, 0.4950, 0.1863, + 0.2851, 0.7798, 0.3730, 0.3994, 0.2529, 0.5052, 0.4832, + 0.3839, 0.7730, 0.1994, 0.8801, 0.9634, 0.1279, 0.5202, + 0.4480, 0.2752, 0.8425, 0.2605, 0.6678, 0.0019, 0.1146, + 0.5118, 0.8404, 0.4252, 0.8911, 0.6844, 0.6707, 0.4919, + 0.4044, 0.9689, 0.9549, 0.5260, 0.2040, 0.3758, 0.8436, + 0.0719, 0.9238, 0.4595, 0.7501, 0.7038, 0.0842, 0.8019, + 0.5135, 0.4312, 0.0042, 0.3774, 0.0300, 0.3045, 0.1310, + 0.9498, 0.2047, 0.0065, 0.4750, 0.7205, 0.8268, 0.2414, + 0.5067, 0.3174, 0.7835, 0.1750, 0.2497, 0.8182, 0.4410, + 0.4937, 0.0668, 0.7211, 0.3732, 0.1165, 0.5206, 0.1429, + 0.5890, 0.1326, 0.1841, 0.2299, 0.6975, 0.0815, 0.8063, + 0.7601, 0.5190, 0.4973, 0.7820, 0.2718, 0.3645, 0.2388, + 0.6349, 0.4919, 0.5469, 0.5835, 0.1693, 0.2555, 0.6997, + 0.0080, 0.8024, 0.8773, 0.4755, 0.9793, 0.3227, 0.7826, + 0.4776, 0.5918, 0.9313, 0.3366, 0.3665, 0.6137, 0.6442, + 0.7228, 0.1110, 0.6812, 0.4938, 0.1352, 0.2780, 0.6843, + 0.3593, 0.6866, 0.5996, 0.6977, 0.2843, 0.3178, 0.7528, + 0.8342, 0.5386, 0.6190, 0.0664, 0.9083, 0.4500, 0.1762, + 0.6839, 0.7444, 0.6654, 0.5698, 0.6462, 0.6689, 0.6885, + 0.6494, 0.1899, 0.7462, 0.4931, 0.7785, 0.4078, 0.0875, + 0.6143, 0.8570, 0.9888, 0.3955, 0.6640, 0.6587, 0.7544, + 0.9891, 0.6031, 0.0927, 0.6392, 0.0282, 0.2897, 0.0799, + 0.3039, 0.7537, 0.0064, 0.0830, 0.2575, 0.1555, 0.1809, + 0.6300, 0.6627, 0.5442, 0.5686, 0.4545, 0.6247, 0.0230, + 0.1211, 0.9727, 0.2738, 0.3745, 0.1564, 0.8676, 0.3949, + 0.4342, 0.6528, 0.7121, 0.9790, 0.6180, 0.8497, 0.6934, + 0.8114, 0.4502, 0.2751, 0.7561, 0.6667, 0.0282, 0.9822, + 0.8863, 0.6138, 0.1747, 0.4926, 0.0927, 0.1322, 0.4515, + 0.6778, 0.3334, 0.7750, 0.2319, 0.6137, 0.2868, 0.8848, + 0.0267, 0.5097, 0.9541, 0.4569, 0.9397, 0.3945, 0.3763, + 0.9350, 0.7996, 0.5528, 0.9459, 0.6150, 0.5344, 0.9224, + 0.4343, 0.2583, 0.5743, 0.1810, 0.4732, 0.1012, 0.2292, + 0.5431, 0.6259, 0.8319, 0.9927, 0.5847, 0.8053, 0.0391, + 0.3854, 0.8999, 0.7842, 0.4838, 0.5113, 0.9715, 0.1757, + 0.7568, 0.4008, 0.5341, 0.1516, 0.0878, 0.5977, 0.5872, + 0.7439, 0.8081, 0.9535, 0.6902, 0.8931, 0.9586, 0.7881, + 0.0686, 0.4547, 0.0160, 0.3146, 0.6264, 0.2480, 0.7559, + 0.5560, 0.9085, 0.3908, 0.0424, 0.3481, 0.0393, 0.1234, + 0.5520, 0.5796, 0.8048, 0.0202, 0.8271, 0.9243, 0.6015, + 0.0508, 0.8893, 0.9673, 0.3880, 0.3853, 0.1352, 0.4800, + 0.1939, 0.7035, 0.2054, 0.1146, 0.2407, 0.4234, 0.9640, + 0.4558, 0.8502, 0.7625, 0.3075, 0.9902, 0.1845, 0.0707, + 0.8518, 0.7267, 0.6494, 0.4761, 0.1632, 0.1248, 0.8200, + 0.2043, 0.6022, 0.7800, 0.0537, 0.1505, 0.0646, 0.9228, + 0.0966, 0.8036, 0.2431, 0.5992, 0.1858, 0.5672, 0.8294, + 0.0135, 0.2238, 0.0068, 0.8473, 0.0323, 0.2138, 0.3134, + 0.2674, 0.4860, 0.8071, 0.7400, 0.9168, 0.0701, 0.7855, + 0.7080, 0.5714, 0.5288, 0.1187, 0.1954, 0.0067, 0.7680, + 0.5930, 0.8250, 0.6028, 0.2144, 0.0255, 0.2917, 0.4790, + 0.3892, 0.3563, 0.0423, 0.3253, 0.0092, 0.8956, 0.2515, + 0.1414, 0.9761, 0.8159, 0.7089, 0.4956, 0.0026, 0.1488, + 0.2902, 0.9089, 0.4432, 0.7989, 0.9160, 0.2680, 0.3317, + 0.6128, 0.6111, 0.3647, 0.4016, 0.8650, 0.7226, 0.2642, + 0.4868, 0.9208, 0.7252, 0.5230, 0.1652, 0.0793, 0.9874, + 0.5129, 0.3412, 0.3833, 0.8354, 0.9507, 0.1921, 0.2168, + 0.6983, 0.4500, 0.7444, 0.9235, 0.5009, 0.2575]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5348, 0.7986, 0.2200, ..., 0.0453, 0.2085, 0.0080]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 11.173964023590088 seconds + +[40.48, 41.2, 39.7, 39.62, 39.96, 39.55, 39.84, 39.41, 40.04, 39.52] +[97.63] +12.83384895324707 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349456, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.173964023590088, 'TIME_S_1KI': 0.03197531026392475, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1252.9686733055114, 'W': 97.63} +[40.48, 41.2, 39.7, 39.62, 39.96, 39.55, 39.84, 39.41, 40.04, 39.52, 40.24, 39.6, 39.87, 39.86, 39.87, 39.92, 39.37, 39.31, 39.74, 39.32] +716.6399999999999 +35.831999999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349456, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.173964023590088, 'TIME_S_1KI': 0.03197531026392475, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1252.9686733055114, 'W': 97.63, 'J_1KI': 3.585483360724988, 'W_1KI': 0.2793770889611282, 'W_D': 61.798, 'J_D': 793.1061976127625, 'W_D_1KI': 0.17684057506524428, 'J_D_1KI': 0.0005060453249200021} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..6ded063 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 308023, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.28289532661438, "TIME_S_1KI": 0.03338353086170311, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1226.4741826629638, "W": 98.08, "J_1KI": 3.9817616952726382, "W_1KI": 0.3184177804904179, "W_D": 61.967, "J_D": 774.8870888772011, "W_D_1KI": 0.20117653551845155, "J_D_1KI": 0.0006531217977828004} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..df70699 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019933462142944336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 4999, 5000, 5000]), + col_indices=tensor([4080, 6557, 3158, ..., 4357, 6307, 2550]), + values=tensor([0.9910, 0.3414, 0.4855, ..., 0.2598, 0.6108, 0.2815]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.2787, 0.7388, 0.8319, ..., 0.5413, 0.0496, 0.2437]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.019933462142944336 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52675', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.7956035137176514} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([ 408, 476, 3837, ..., 3097, 8388, 8856]), + values=tensor([0.3698, 0.9808, 0.6496, ..., 0.7839, 0.4021, 0.3346]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.1775, 0.8809, 0.7204, ..., 0.4994, 0.6943, 0.3851]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 1.7956035137176514 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '308023', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.28289532661438} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), + col_indices=tensor([2483, 6584, 3017, ..., 870, 3138, 2052]), + values=tensor([0.7385, 0.7043, 0.9061, ..., 0.4377, 0.8515, 0.3180]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.1916, 0.9837, 0.2990, ..., 0.4110, 0.2807, 0.4933]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.28289532661438 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 5000, 5000]), + col_indices=tensor([2483, 6584, 3017, ..., 870, 3138, 2052]), + values=tensor([0.7385, 0.7043, 0.9061, ..., 0.4377, 0.8515, 0.3180]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.1916, 0.9837, 0.2990, ..., 0.4110, 0.2807, 0.4933]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.28289532661438 seconds + +[40.25, 39.88, 39.44, 44.89, 40.01, 39.55, 39.45, 39.48, 39.91, 41.29] +[98.08] +12.504834651947021 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 308023, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.28289532661438, 'TIME_S_1KI': 0.03338353086170311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1226.4741826629638, 'W': 98.08} +[40.25, 39.88, 39.44, 44.89, 40.01, 39.55, 39.45, 39.48, 39.91, 41.29, 44.96, 39.78, 39.79, 40.18, 39.36, 39.46, 39.39, 39.37, 39.43, 39.28] +722.26 +36.113 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 308023, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.28289532661438, 'TIME_S_1KI': 0.03338353086170311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1226.4741826629638, 'W': 98.08, 'J_1KI': 3.9817616952726382, 'W_1KI': 0.3184177804904179, 'W_D': 61.967, 'J_D': 774.8870888772011, 'W_D_1KI': 0.20117653551845155, 'J_D_1KI': 0.0006531217977828004} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..f68b6b9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1275, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.370937824249268, "TIME_S_1KI": 8.134068881764131, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2317.954887828827, "W": 118.94, "J_1KI": 1818.0038335912368, "W_1KI": 93.28627450980392, "W_D": 82.976, "J_D": 1617.072681793213, "W_D_1KI": 65.07921568627451, "J_D_1KI": 51.04252210688197} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..8fe4739 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8229217529296875} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 92, ..., 24999906, + 24999954, 25000000]), + col_indices=tensor([ 13687, 16103, 22085, ..., 466250, 497468, + 498839]), + values=tensor([0.1763, 0.0612, 0.1831, ..., 0.7206, 0.9735, 0.4201]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.0392, 0.3068, 0.8540, ..., 0.0771, 0.2433, 0.8939]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 0.8229217529296875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1275', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.370937824249268} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 89, ..., 24999893, + 24999957, 25000000]), + col_indices=tensor([ 25264, 35882, 38786, ..., 487781, 491680, + 492236]), + values=tensor([0.0901, 0.4292, 0.0295, ..., 0.7641, 0.5758, 0.3435]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7878, 0.6485, 0.9023, ..., 0.5055, 0.2764, 0.4227]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.370937824249268 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 89, ..., 24999893, + 24999957, 25000000]), + col_indices=tensor([ 25264, 35882, 38786, ..., 487781, 491680, + 492236]), + values=tensor([0.0901, 0.4292, 0.0295, ..., 0.7641, 0.5758, 0.3435]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7878, 0.6485, 0.9023, ..., 0.5055, 0.2764, 0.4227]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.370937824249268 seconds + +[40.75, 40.02, 39.59, 39.62, 39.74, 39.84, 39.97, 39.74, 40.73, 39.5] +[118.94] +19.488438606262207 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.370937824249268, 'TIME_S_1KI': 8.134068881764131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2317.954887828827, 'W': 118.94} +[40.75, 40.02, 39.59, 39.62, 39.74, 39.84, 39.97, 39.74, 40.73, 39.5, 41.45, 40.23, 40.12, 39.57, 39.77, 39.94, 39.75, 39.69, 40.29, 39.64] +719.28 +35.964 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.370937824249268, 'TIME_S_1KI': 8.134068881764131, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2317.954887828827, 'W': 118.94, 'J_1KI': 1818.0038335912368, 'W_1KI': 93.28627450980392, 'W_D': 82.976, 'J_D': 1617.072681793213, 'W_D_1KI': 65.07921568627451, 'J_D_1KI': 51.04252210688197} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..24823a1 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 20602, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.051029920578003, "TIME_S_1KI": 0.4878667081146492, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2012.3513662075998, "W": 152.11, "J_1KI": 97.67747627451702, "W_1KI": 7.383263760799923, "W_D": 115.75800000000001, "J_D": 1531.429685421467, "W_D_1KI": 5.61877487622561, "J_D_1KI": 0.272729583352374} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..60d167b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,89 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08110809326171875} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 8, ..., 2499988, + 2499994, 2500000]), + col_indices=tensor([ 61750, 191731, 192878, ..., 292292, 347392, + 413452]), + values=tensor([0.4333, 0.7749, 0.6975, ..., 0.5571, 0.2303, 0.6423]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7573, 0.7811, 0.2609, ..., 0.7028, 0.0683, 0.1077]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.08110809326171875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12945', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.597289562225342} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 2499996, + 2499997, 2500000]), + col_indices=tensor([304373, 374974, 396567, ..., 161828, 243938, + 306700]), + values=tensor([0.0234, 0.0111, 0.7752, ..., 0.4123, 0.0911, 0.7333]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.2563, 0.2400, 0.1997, ..., 0.9331, 0.1838, 0.9541]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 6.597289562225342 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20602', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.051029920578003} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499995, + 2499998, 2500000]), + col_indices=tensor([ 84683, 221772, 250792, ..., 457280, 123381, + 490345]), + values=tensor([0.6671, 0.6498, 0.8275, ..., 0.5282, 0.6912, 0.3058]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8099, 0.6830, 0.6662, ..., 0.4435, 0.6731, 0.4595]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.051029920578003 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 2499995, + 2499998, 2500000]), + col_indices=tensor([ 84683, 221772, 250792, ..., 457280, 123381, + 490345]), + values=tensor([0.6671, 0.6498, 0.8275, ..., 0.5282, 0.6912, 0.3058]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8099, 0.6830, 0.6662, ..., 0.4435, 0.6731, 0.4595]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.051029920578003 seconds + +[40.55, 45.24, 39.91, 39.85, 39.84, 39.77, 40.43, 40.43, 42.0, 39.73] +[152.11] +13.22957968711853 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.051029920578003, 'TIME_S_1KI': 0.4878667081146492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2012.3513662075998, 'W': 152.11} +[40.55, 45.24, 39.91, 39.85, 39.84, 39.77, 40.43, 40.43, 42.0, 39.73, 40.67, 40.21, 39.79, 39.83, 39.69, 40.04, 39.86, 39.69, 40.08, 39.81] +727.04 +36.352 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 20602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.051029920578003, 'TIME_S_1KI': 0.4878667081146492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2012.3513662075998, 'W': 152.11, 'J_1KI': 97.67747627451702, 'W_1KI': 7.383263760799923, 'W_D': 115.75800000000001, 'J_D': 1531.429685421467, 'W_D_1KI': 5.61877487622561, 'J_D_1KI': 0.272729583352374} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..ac74c89 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2268, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.338330507278442, "TIME_S_1KI": 4.558346784514304, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1929.4526886749268, "W": 124.72, "J_1KI": 850.7286987102852, "W_1KI": 54.99118165784832, "W_D": 88.6155, "J_D": 1370.9061476368904, "W_D_1KI": 39.07208994708994, "J_D_1KI": 17.227552886723963} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..4f74cbe --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.46280455589294434} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 21, 39, ..., 12499960, + 12499981, 12500000]), + col_indices=tensor([ 5530, 18658, 36900, ..., 388989, 426254, + 497258]), + values=tensor([0.8053, 0.3880, 0.4779, ..., 0.4773, 0.4279, 0.6817]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.5886, 0.4606, 0.7255, ..., 0.1606, 0.2608, 0.5232]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 0.46280455589294434 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2268', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.338330507278442} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 63, ..., 12499957, + 12499979, 12500000]), + col_indices=tensor([ 14790, 16334, 55074, ..., 466420, 486794, + 499923]), + values=tensor([0.8543, 0.1686, 0.8292, ..., 0.6567, 0.2357, 0.6950]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.8639, 0.3423, 0.4800, ..., 0.1443, 0.7816, 0.0060]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.338330507278442 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 63, ..., 12499957, + 12499979, 12500000]), + col_indices=tensor([ 14790, 16334, 55074, ..., 466420, 486794, + 499923]), + values=tensor([0.8543, 0.1686, 0.8292, ..., 0.6567, 0.2357, 0.6950]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.8639, 0.3423, 0.4800, ..., 0.1443, 0.7816, 0.0060]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.338330507278442 seconds + +[40.4, 42.56, 40.55, 40.15, 39.72, 40.38, 39.75, 39.66, 39.85, 39.57] +[124.72] +15.470274925231934 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2268, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.338330507278442, 'TIME_S_1KI': 4.558346784514304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1929.4526886749268, 'W': 124.72} +[40.4, 42.56, 40.55, 40.15, 39.72, 40.38, 39.75, 39.66, 39.85, 39.57, 40.6, 40.35, 40.27, 40.13, 39.75, 39.68, 39.73, 39.6, 39.89, 39.57] +722.09 +36.1045 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2268, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.338330507278442, 'TIME_S_1KI': 4.558346784514304, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1929.4526886749268, 'W': 124.72, 'J_1KI': 850.7286987102852, 'W_1KI': 54.99118165784832, 'W_D': 88.6155, 'J_D': 1370.9061476368904, 'W_D_1KI': 39.07208994708994, 'J_D_1KI': 17.227552886723963} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..366bd80 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 89538, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.289806604385376, "TIME_S_1KI": 0.11492111287258344, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1521.4007213592529, "W": 116.96, "J_1KI": 16.991676398392332, "W_1KI": 1.3062610288369183, "W_D": 80.71074999999999, "J_D": 1049.8751134699583, "W_D_1KI": 0.9014133663919228, "J_D_1KI": 0.010067383305322017} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..aedb6df --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,105 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.049301862716674805} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 13, ..., 249993, 249996, + 250000]), + col_indices=tensor([ 1709, 19790, 28830, ..., 3831, 22257, 48856]), + values=tensor([0.9244, 0.7522, 0.6687, ..., 0.7540, 0.7318, 0.7260]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0785, 0.8938, 0.5541, ..., 0.5935, 0.2052, 0.2232]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.049301862716674805 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21297', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7824935913085938} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 8, ..., 249989, 249993, + 250000]), + col_indices=tensor([16415, 16632, 32449, ..., 45169, 45288, 48610]), + values=tensor([0.0101, 0.6954, 0.6241, ..., 0.3711, 0.7246, 0.3748]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6515, 0.7514, 0.0204, ..., 0.8861, 0.6124, 0.4798]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 2.7824935913085938 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '80366', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.42440915107727} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 10, ..., 249987, 249993, + 250000]), + col_indices=tensor([ 2445, 24855, 26173, ..., 23560, 26333, 46130]), + values=tensor([0.2012, 0.2713, 0.8391, ..., 0.5844, 0.7972, 0.4463]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5580, 0.1767, 0.6905, ..., 0.9860, 0.6709, 0.2165]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 9.42440915107727 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '89538', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.289806604385376} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 249988, 249996, + 250000]), + col_indices=tensor([ 2244, 34732, 7243, ..., 9132, 13610, 19520]), + values=tensor([0.6983, 0.0446, 0.9216, ..., 0.0232, 0.0374, 0.6300]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8539, 0.6321, 0.4259, ..., 0.2899, 0.6274, 0.3350]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.289806604385376 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 249988, 249996, + 250000]), + col_indices=tensor([ 2244, 34732, 7243, ..., 9132, 13610, 19520]), + values=tensor([0.6983, 0.0446, 0.9216, ..., 0.0232, 0.0374, 0.6300]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8539, 0.6321, 0.4259, ..., 0.2899, 0.6274, 0.3350]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.289806604385376 seconds + +[40.27, 39.54, 39.5, 39.65, 39.93, 39.44, 40.01, 39.51, 45.05, 39.37] +[116.96] +13.007872104644775 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 89538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.289806604385376, 'TIME_S_1KI': 0.11492111287258344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.4007213592529, 'W': 116.96} +[40.27, 39.54, 39.5, 39.65, 39.93, 39.44, 40.01, 39.51, 45.05, 39.37, 40.45, 39.41, 39.86, 39.36, 39.91, 44.54, 39.53, 39.97, 39.84, 39.78] +724.985 +36.24925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 89538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.289806604385376, 'TIME_S_1KI': 0.11492111287258344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1521.4007213592529, 'W': 116.96, 'J_1KI': 16.991676398392332, 'W_1KI': 1.3062610288369183, 'W_D': 80.71074999999999, 'J_D': 1049.8751134699583, 'W_D_1KI': 0.9014133663919228, 'J_D_1KI': 0.010067383305322017} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..16ebf8c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 45908, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.364075660705566, "TIME_S_1KI": 0.22575750763931268, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1982.6904290771486, "W": 146.58, "J_1KI": 43.18834253457238, "W_1KI": 3.192907554238913, "W_D": 110.64150000000001, "J_D": 1496.5741786651613, "W_D_1KI": 2.410070140280561, "J_D_1KI": 0.05249782478610615} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..f9b44d5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0605926513671875} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 42, 99, ..., 2499902, + 2499955, 2500000]), + col_indices=tensor([ 1009, 1628, 5292, ..., 43455, 47256, 47946]), + values=tensor([0.2339, 0.7843, 0.8407, ..., 0.0388, 0.2390, 0.6904]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3494, 0.3893, 0.8826, ..., 0.0693, 0.0070, 0.7582]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.0605926513671875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '17328', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.963207721710205} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 85, ..., 2499905, + 2499952, 2500000]), + col_indices=tensor([ 2138, 2192, 2629, ..., 48532, 49646, 49876]), + values=tensor([0.7824, 0.0061, 0.7967, ..., 0.1635, 0.4732, 0.5157]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8165, 0.7580, 0.0903, ..., 0.6290, 0.7559, 0.6116]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 3.963207721710205 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45908', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.364075660705566} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 99, ..., 2499901, + 2499955, 2500000]), + col_indices=tensor([ 2242, 2630, 4307, ..., 47333, 48170, 49131]), + values=tensor([0.3970, 0.2919, 0.1690, ..., 0.5693, 0.6652, 0.4283]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7545, 0.7866, 0.4331, ..., 0.1722, 0.5406, 0.9467]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.364075660705566 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 99, ..., 2499901, + 2499955, 2500000]), + col_indices=tensor([ 2242, 2630, 4307, ..., 47333, 48170, 49131]), + values=tensor([0.3970, 0.2919, 0.1690, ..., 0.5693, 0.6652, 0.4283]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7545, 0.7866, 0.4331, ..., 0.1722, 0.5406, 0.9467]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.364075660705566 seconds + +[40.59, 39.91, 39.52, 39.54, 39.7, 39.41, 39.47, 40.95, 40.05, 39.39] +[146.58] +13.526336669921875 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45908, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.364075660705566, 'TIME_S_1KI': 0.22575750763931268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1982.6904290771486, 'W': 146.58} +[40.59, 39.91, 39.52, 39.54, 39.7, 39.41, 39.47, 40.95, 40.05, 39.39, 40.78, 39.79, 39.69, 41.93, 40.24, 39.57, 39.58, 39.7, 39.52, 39.64] +718.77 +35.9385 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45908, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.364075660705566, 'TIME_S_1KI': 0.22575750763931268, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1982.6904290771486, 'W': 146.58, 'J_1KI': 43.18834253457238, 'W_1KI': 3.192907554238913, 'W_D': 110.64150000000001, 'J_D': 1496.5741786651613, 'W_D_1KI': 2.410070140280561, 'J_D_1KI': 0.05249782478610615} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..20ca6ca --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1726, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.692668676376343, "TIME_S_1KI": 6.1950571705540805, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2256.2120828294755, "W": 117.97000000000001, "J_1KI": 1307.1912415002755, "W_1KI": 68.34878331402086, "W_D": 81.57950000000002, "J_D": 1560.2327168872362, "W_D_1KI": 47.26506373117035, "J_D_1KI": 27.384162069044237} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..939a7fb --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6629180908203125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 996, ..., 24998956, + 24999472, 25000000]), + col_indices=tensor([ 1, 94, 348, ..., 49850, 49922, 49959]), + values=tensor([0.7408, 0.6252, 0.3689, ..., 0.4667, 0.9642, 0.0582]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9247, 0.2733, 0.8266, ..., 0.5422, 0.1520, 0.6812]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 0.6629180908203125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1583', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.627561807632446} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 562, 1043, ..., 24999032, + 24999504, 25000000]), + col_indices=tensor([ 4, 50, 78, ..., 49916, 49920, 49965]), + values=tensor([0.0759, 0.2514, 0.9400, ..., 0.2240, 0.9432, 0.3438]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3732, 0.6785, 0.5695, ..., 0.6003, 0.8169, 0.4003]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 9.627561807632446 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1726', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.692668676376343} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 457, 970, ..., 24999002, + 24999486, 25000000]), + col_indices=tensor([ 100, 360, 480, ..., 49859, 49889, 49953]), + values=tensor([0.3856, 0.6378, 0.2660, ..., 0.6784, 0.6537, 0.7029]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.4800, 0.0280, 0.3242, ..., 0.3544, 0.6298, 0.7207]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.692668676376343 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 457, 970, ..., 24999002, + 24999486, 25000000]), + col_indices=tensor([ 100, 360, 480, ..., 49859, 49889, 49953]), + values=tensor([0.3856, 0.6378, 0.2660, ..., 0.6784, 0.6537, 0.7029]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.4800, 0.0280, 0.3242, ..., 0.3544, 0.6298, 0.7207]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.692668676376343 seconds + +[41.47, 39.7, 39.74, 40.1, 40.28, 40.05, 40.37, 39.58, 39.73, 40.01] +[117.97] +19.125303745269775 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.692668676376343, 'TIME_S_1KI': 6.1950571705540805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2256.2120828294755, 'W': 117.97000000000001} +[41.47, 39.7, 39.74, 40.1, 40.28, 40.05, 40.37, 39.58, 39.73, 40.01, 41.18, 40.13, 45.73, 39.77, 39.88, 40.17, 39.76, 40.02, 41.27, 40.4] +727.81 +36.390499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.692668676376343, 'TIME_S_1KI': 6.1950571705540805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2256.2120828294755, 'W': 117.97000000000001, 'J_1KI': 1307.1912415002755, 'W_1KI': 68.34878331402086, 'W_D': 81.57950000000002, 'J_D': 1560.2327168872362, 'W_D_1KI': 47.26506373117035, 'J_D_1KI': 27.384162069044237} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..53cb9a4 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 125418, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.786596775054932, "TIME_S_1KI": 0.0860051729022543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1353.9629184293747, "W": 103.38, "J_1KI": 10.795602851499583, "W_1KI": 0.8242835956561259, "W_D": 67.61375, "J_D": 885.5340518084168, "W_D_1KI": 0.5391072254381349, "J_D_1KI": 0.004298483674098893} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..b030af9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05780529975891113} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([26335, 27290, 38418, ..., 19756, 20120, 4010]), + values=tensor([0.3389, 0.0656, 0.4529, ..., 0.8287, 0.8944, 0.8355]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6330, 0.8862, 0.5805, ..., 0.8180, 0.2124, 0.8337]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.05780529975891113 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18164', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5206849575042725} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([47070, 16594, 28343, ..., 43440, 28747, 28655]), + values=tensor([0.5955, 0.4100, 0.8378, ..., 0.8449, 0.3361, 0.6219]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0973, 0.1697, 0.0749, ..., 0.0145, 0.6554, 0.2719]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 1.5206849575042725 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '125418', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.786596775054932} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([27120, 12941, 15664, ..., 3161, 41560, 29450]), + values=tensor([0.4509, 0.2974, 0.8733, ..., 0.8770, 0.0483, 0.7990]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0817, 0.8974, 0.0414, ..., 0.9825, 0.3309, 0.2047]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.786596775054932 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([27120, 12941, 15664, ..., 3161, 41560, 29450]), + values=tensor([0.4509, 0.2974, 0.8733, ..., 0.8770, 0.0483, 0.7990]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0817, 0.8974, 0.0414, ..., 0.9825, 0.3309, 0.2047]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.786596775054932 seconds + +[40.88, 40.06, 39.87, 40.02, 39.61, 39.65, 39.87, 39.87, 39.51, 39.91] +[103.38] +13.096952199935913 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 125418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.786596775054932, 'TIME_S_1KI': 0.0860051729022543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1353.9629184293747, 'W': 103.38} +[40.88, 40.06, 39.87, 40.02, 39.61, 39.65, 39.87, 39.87, 39.51, 39.91, 40.56, 39.51, 39.82, 39.66, 39.34, 39.38, 39.55, 39.32, 39.66, 39.9] +715.325 +35.76625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 125418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.786596775054932, 'TIME_S_1KI': 0.0860051729022543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1353.9629184293747, 'W': 103.38, 'J_1KI': 10.795602851499583, 'W_1KI': 0.8242835956561259, 'W_D': 67.61375, 'J_D': 885.5340518084168, 'W_D_1KI': 0.5391072254381349, 'J_D_1KI': 0.004298483674098893} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..d831091 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 106823, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.303539752960205, "TIME_S_1KI": 0.09645431932224527, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1429.2441740632057, "W": 111.25, "J_1KI": 13.379554721953191, "W_1KI": 1.0414423860030144, "W_D": 75.065, "J_D": 964.37046225667, "W_D_1KI": 0.7027044737556518, "J_D_1KI": 0.006578213247668122} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..a27a2d5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.05483698844909668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 124996, 124998, + 125000]), + col_indices=tensor([28119, 29640, 21715, ..., 29199, 13516, 45728]), + values=tensor([0.3782, 0.4368, 0.3959, ..., 0.8630, 0.5532, 0.4165]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.8607, 0.2103, 0.4385, ..., 0.0263, 0.3906, 0.3161]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.05483698844909668 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19147', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.8820090293884277} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 6, ..., 124991, 124992, + 125000]), + col_indices=tensor([32530, 36762, 311, ..., 24158, 32618, 44758]), + values=tensor([0.9615, 0.3318, 0.5732, ..., 0.8773, 0.1422, 0.4683]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.1372, 0.3779, 0.3457, ..., 0.7036, 0.6193, 0.2501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 1.8820090293884277 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '106823', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.303539752960205} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 124996, 124998, + 125000]), + col_indices=tensor([16502, 37527, 11294, ..., 28497, 8084, 35661]), + values=tensor([0.2823, 0.8232, 0.0849, ..., 0.6885, 0.2665, 0.0851]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.8734, 0.5898, 0.3749, ..., 0.2817, 0.4056, 0.3872]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.303539752960205 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 124996, 124998, + 125000]), + col_indices=tensor([16502, 37527, 11294, ..., 28497, 8084, 35661]), + values=tensor([0.2823, 0.8232, 0.0849, ..., 0.6885, 0.2665, 0.0851]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.8734, 0.5898, 0.3749, ..., 0.2817, 0.4056, 0.3872]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.303539752960205 seconds + +[40.0, 39.65, 39.5, 39.28, 44.73, 39.57, 39.57, 39.75, 39.32, 39.43] +[111.25] +12.84713864326477 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.303539752960205, 'TIME_S_1KI': 0.09645431932224527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1429.2441740632057, 'W': 111.25} +[40.0, 39.65, 39.5, 39.28, 44.73, 39.57, 39.57, 39.75, 39.32, 39.43, 41.22, 44.93, 39.95, 39.46, 39.81, 39.42, 39.55, 39.68, 39.52, 39.37] +723.7 +36.185 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.303539752960205, 'TIME_S_1KI': 0.09645431932224527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1429.2441740632057, 'W': 111.25, 'J_1KI': 13.379554721953191, 'W_1KI': 1.0414423860030144, 'W_D': 75.065, 'J_D': 964.37046225667, 'W_D_1KI': 0.7027044737556518, 'J_D_1KI': 0.006578213247668122} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..59bfbf5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 423606, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.169308185577393, "TIME_S_1KI": 0.024006525369275677, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1199.050221145153, "W": 95.75, "J_1KI": 2.8305789369016328, "W_1KI": 0.2260355141334164, "W_D": 60.24975, "J_D": 754.4906116077303, "W_D_1KI": 0.14223063412699538, "J_D_1KI": 0.0003357616136858198} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..ae86751 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018892288208007812} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 4, ..., 2500, 2500, 2500]), + col_indices=tensor([1108, 1116, 4456, ..., 2396, 548, 1385]), + values=tensor([0.8638, 0.8794, 0.8595, ..., 0.2787, 0.2270, 0.2436]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1028, 0.3454, 0.0668, ..., 0.0203, 0.1099, 0.2752]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.018892288208007812 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '55578', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.3776216506958008} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([1067, 4726, 2617, ..., 4515, 4937, 207]), + values=tensor([0.7749, 0.8447, 0.6931, ..., 0.5698, 0.5658, 0.7624]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8259, 0.6183, 0.2744, ..., 0.6644, 0.1716, 0.4385]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.3776216506958008 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '423606', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.169308185577393} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([ 723, 3357, 4021, ..., 1038, 2195, 2669]), + values=tensor([0.5380, 0.6250, 0.5522, ..., 0.2239, 0.7354, 0.7870]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8647, 0.0404, 0.2214, ..., 0.2716, 0.6887, 0.8481]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.169308185577393 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([ 723, 3357, 4021, ..., 1038, 2195, 2669]), + values=tensor([0.5380, 0.6250, 0.5522, ..., 0.2239, 0.7354, 0.7870]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8647, 0.0404, 0.2214, ..., 0.2716, 0.6887, 0.8481]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.169308185577393 seconds + +[40.09, 39.04, 39.2, 39.28, 39.5, 39.12, 39.02, 39.36, 39.0, 39.1] +[95.75] +12.522717714309692 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 423606, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.169308185577393, 'TIME_S_1KI': 0.024006525369275677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1199.050221145153, 'W': 95.75} +[40.09, 39.04, 39.2, 39.28, 39.5, 39.12, 39.02, 39.36, 39.0, 39.1, 39.76, 39.19, 39.37, 38.99, 39.74, 39.45, 39.52, 39.05, 39.58, 44.24] +710.005 +35.50025 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 423606, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.169308185577393, 'TIME_S_1KI': 0.024006525369275677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1199.050221145153, 'W': 95.75, 'J_1KI': 2.8305789369016328, 'W_1KI': 0.2260355141334164, 'W_D': 60.24975, 'J_D': 754.4906116077303, 'W_D_1KI': 0.14223063412699538, 'J_D_1KI': 0.0003357616136858198} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..1eeb5de --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 247437, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.32590627670288, "TIME_S_1KI": 0.04173145599365851, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1233.848487944603, "W": 98.03, "J_1KI": 4.986515710846005, "W_1KI": 0.39618165432008956, "W_D": 62.5795, "J_D": 787.6529781835079, "W_D_1KI": 0.2529108419516887, "J_D_1KI": 0.001022122164234487} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..f512a45 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.016702651977539062} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 24993, 24996, 25000]), + col_indices=tensor([ 203, 3164, 3874, ..., 1660, 2575, 4898]), + values=tensor([0.2509, 0.0733, 0.7857, ..., 0.9782, 0.1584, 0.7182]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6754, 0.2035, 0.5445, ..., 0.8964, 0.5875, 0.7630]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.016702651977539062 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '62864', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.6676299571990967} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 24992, 24998, 25000]), + col_indices=tensor([1528, 1565, 2407, ..., 4843, 196, 1526]), + values=tensor([0.9615, 0.4377, 0.6921, ..., 0.0433, 0.3280, 0.0962]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1897, 0.1898, 0.6419, ..., 0.9248, 0.4513, 0.5147]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 2.6676299571990967 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '247437', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.32590627670288} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 13, ..., 24992, 24996, 25000]), + col_indices=tensor([ 429, 548, 735, ..., 2923, 3331, 3611]), + values=tensor([0.1470, 0.7094, 0.7244, ..., 0.3013, 0.3840, 0.1701]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0349, 0.9280, 0.8549, ..., 0.2131, 0.1223, 0.0130]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.32590627670288 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 13, ..., 24992, 24996, 25000]), + col_indices=tensor([ 429, 548, 735, ..., 2923, 3331, 3611]), + values=tensor([0.1470, 0.7094, 0.7244, ..., 0.3013, 0.3840, 0.1701]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0349, 0.9280, 0.8549, ..., 0.2131, 0.1223, 0.0130]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.32590627670288 seconds + +[40.5, 39.52, 39.15, 39.13, 39.57, 40.5, 39.17, 39.06, 39.52, 39.1] +[98.03] +12.586437702178955 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 247437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.32590627670288, 'TIME_S_1KI': 0.04173145599365851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1233.848487944603, 'W': 98.03} +[40.5, 39.52, 39.15, 39.13, 39.57, 40.5, 39.17, 39.06, 39.52, 39.1, 40.17, 39.6, 39.6, 39.15, 39.15, 39.05, 39.05, 39.08, 39.25, 39.15] +709.01 +35.4505 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 247437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.32590627670288, 'TIME_S_1KI': 0.04173145599365851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1233.848487944603, 'W': 98.03, 'J_1KI': 4.986515710846005, 'W_1KI': 0.39618165432008956, 'W_D': 62.5795, 'J_D': 787.6529781835079, 'W_D_1KI': 0.2529108419516887, 'J_D_1KI': 0.001022122164234487} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..685c3bf --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 163068, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.72166895866394, "TIME_S_1KI": 0.07188209187985345, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1665.769057817459, "W": 116.82, "J_1KI": 10.215180524796153, "W_1KI": 0.7163882552064169, "W_D": 81.24949999999998, "J_D": 1158.5593482549189, "W_D_1KI": 0.4982553290651751, "J_D_1KI": 0.0030555064700933054} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..bc4d2ed --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.02829742431640625} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 58, 108, ..., 249919, 249959, + 250000]), + col_indices=tensor([ 73, 104, 551, ..., 4719, 4888, 4958]), + values=tensor([0.4939, 0.4915, 0.0888, ..., 0.3493, 0.1552, 0.1459]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1377, 0.0837, 0.3150, ..., 0.5794, 0.8670, 0.3865]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.02829742431640625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37105', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.3891899585723877} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 99, ..., 249890, 249937, + 250000]), + col_indices=tensor([ 6, 32, 44, ..., 4844, 4921, 4988]), + values=tensor([0.1281, 0.2469, 0.7745, ..., 0.0638, 0.9042, 0.9189]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6302, 0.1474, 0.6987, ..., 0.1092, 0.0062, 0.2645]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 2.3891899585723877 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '163068', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.72166895866394} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 94, ..., 249900, 249951, + 250000]), + col_indices=tensor([ 17, 114, 188, ..., 4806, 4921, 4968]), + values=tensor([0.1229, 0.8785, 0.6808, ..., 0.9268, 0.7326, 0.7148]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4545, 0.7872, 0.8321, ..., 0.0206, 0.6423, 0.1627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 11.72166895866394 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 94, ..., 249900, 249951, + 250000]), + col_indices=tensor([ 17, 114, 188, ..., 4806, 4921, 4968]), + values=tensor([0.1229, 0.8785, 0.6808, ..., 0.9268, 0.7326, 0.7148]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4545, 0.7872, 0.8321, ..., 0.0206, 0.6423, 0.1627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 11.72166895866394 seconds + +[40.87, 39.29, 39.18, 39.72, 39.08, 39.54, 39.6, 39.54, 39.86, 39.27] +[116.82] +14.259279727935791 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 163068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.72166895866394, 'TIME_S_1KI': 0.07188209187985345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.769057817459, 'W': 116.82} +[40.87, 39.29, 39.18, 39.72, 39.08, 39.54, 39.6, 39.54, 39.86, 39.27, 40.71, 39.19, 39.43, 39.27, 39.46, 39.83, 39.54, 39.53, 39.37, 39.11] +711.4100000000001 +35.5705 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 163068, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.72166895866394, 'TIME_S_1KI': 0.07188209187985345, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1665.769057817459, 'W': 116.82, 'J_1KI': 10.215180524796153, 'W_1KI': 0.7163882552064169, 'W_D': 81.24949999999998, 'J_D': 1158.5593482549189, 'W_D_1KI': 0.4982553290651751, 'J_D_1KI': 0.0030555064700933054} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..022fbf8 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 90395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.401180028915405, "TIME_S_1KI": 0.11506366534559882, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1713.355565071106, "W": 132.0, "J_1KI": 18.954096632237466, "W_1KI": 1.4602577576193374, "W_D": 96.25175, "J_D": 1249.344481138885, "W_D_1KI": 1.0647906410752808, "J_D_1KI": 0.011779309044474592} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..6ede2ab --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03801298141479492} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 279, 546, ..., 1249528, + 1249760, 1250000]), + col_indices=tensor([ 17, 21, 26, ..., 4944, 4980, 4991]), + values=tensor([0.9138, 0.1459, 0.7159, ..., 0.0773, 0.4834, 0.3377]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.0568, 0.1587, 0.8688, ..., 0.8476, 0.8640, 0.6593]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.03801298141479492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27622', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.2084648609161377} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 251, 530, ..., 1249473, + 1249739, 1250000]), + col_indices=tensor([ 53, 63, 72, ..., 4941, 4984, 4995]), + values=tensor([0.4190, 0.6332, 0.1682, ..., 0.1102, 0.0295, 0.1696]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4081, 0.8253, 0.9060, ..., 0.4379, 0.8960, 0.7193]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 3.2084648609161377 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '90395', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.401180028915405} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 225, 477, ..., 1249532, + 1249754, 1250000]), + col_indices=tensor([ 0, 4, 16, ..., 4911, 4963, 4980]), + values=tensor([0.5027, 0.8615, 0.8405, ..., 0.5051, 0.1395, 0.2376]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7929, 0.2058, 0.1103, ..., 0.0989, 0.8674, 0.8642]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.401180028915405 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 225, 477, ..., 1249532, + 1249754, 1250000]), + col_indices=tensor([ 0, 4, 16, ..., 4911, 4963, 4980]), + values=tensor([0.5027, 0.8615, 0.8405, ..., 0.5051, 0.1395, 0.2376]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7929, 0.2058, 0.1103, ..., 0.0989, 0.8674, 0.8642]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.401180028915405 seconds + +[40.46, 40.0, 39.58, 39.65, 39.39, 39.96, 39.38, 39.29, 39.31, 39.34] +[132.0] +12.979966402053833 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 90395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.401180028915405, 'TIME_S_1KI': 0.11506366534559882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.355565071106, 'W': 132.0} +[40.46, 40.0, 39.58, 39.65, 39.39, 39.96, 39.38, 39.29, 39.31, 39.34, 40.66, 39.72, 41.06, 39.66, 39.37, 39.71, 39.69, 39.36, 39.44, 40.33] +714.9649999999999 +35.74825 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 90395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.401180028915405, 'TIME_S_1KI': 0.11506366534559882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1713.355565071106, 'W': 132.0, 'J_1KI': 18.954096632237466, 'W_1KI': 1.4602577576193374, 'W_D': 96.25175, 'J_D': 1249.344481138885, 'W_D_1KI': 1.0647906410752808, 'J_D_1KI': 0.011779309044474592} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..28b4bfb --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52843, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.472357749938965, "TIME_S_1KI": 0.19817871335728413, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1834.1895242333412, "W": 138.65, "J_1KI": 34.710170206713116, "W_1KI": 2.623810154608936, "W_D": 102.62450000000001, "J_D": 1357.611127513051, "W_D_1KI": 1.9420642279961398, "J_D_1KI": 0.036751589198117815} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..05a194a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.05003714561462402} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 498, 977, ..., 2499006, + 2499489, 2500000]), + col_indices=tensor([ 2, 20, 22, ..., 4935, 4945, 4946]), + values=tensor([0.7520, 0.3359, 0.1395, ..., 0.8155, 0.8337, 0.5892]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3287, 0.7670, 0.4633, ..., 0.8662, 0.8996, 0.4236]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.05003714561462402 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20984', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 4.169527769088745} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 527, 1017, ..., 2498957, + 2499469, 2500000]), + col_indices=tensor([ 0, 2, 46, ..., 4971, 4981, 4983]), + values=tensor([0.4264, 0.7891, 0.1289, ..., 0.9402, 0.9265, 0.8274]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1592, 0.0229, 0.7345, ..., 0.9022, 0.9396, 0.4003]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 4.169527769088745 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52843', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.472357749938965} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 536, 1043, ..., 2499033, + 2499531, 2500000]), + col_indices=tensor([ 2, 4, 9, ..., 4990, 4992, 4998]), + values=tensor([0.3950, 0.1857, 0.2386, ..., 0.4312, 0.4990, 0.5416]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5233, 0.3838, 0.2090, ..., 0.9440, 0.1891, 0.8384]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.472357749938965 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 536, 1043, ..., 2499033, + 2499531, 2500000]), + col_indices=tensor([ 2, 4, 9, ..., 4990, 4992, 4998]), + values=tensor([0.3950, 0.1857, 0.2386, ..., 0.4312, 0.4990, 0.5416]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5233, 0.3838, 0.2090, ..., 0.9440, 0.1891, 0.8384]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.472357749938965 seconds + +[40.4, 39.54, 40.0, 39.38, 39.54, 39.86, 39.47, 40.22, 39.37, 39.4] +[138.65] +13.228918313980103 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.472357749938965, 'TIME_S_1KI': 0.19817871335728413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1834.1895242333412, 'W': 138.65} +[40.4, 39.54, 40.0, 39.38, 39.54, 39.86, 39.47, 40.22, 39.37, 39.4, 41.49, 40.09, 39.59, 39.6, 39.65, 39.39, 39.42, 39.69, 45.2, 39.71] +720.51 +36.0255 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.472357749938965, 'TIME_S_1KI': 0.19817871335728413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1834.1895242333412, 'W': 138.65, 'J_1KI': 34.710170206713116, 'W_1KI': 2.623810154608936, 'W_D': 102.62450000000001, 'J_D': 1357.611127513051, 'W_D_1KI': 1.9420642279961398, 'J_D_1KI': 0.036751589198117815} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..3a471cf --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28798, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.444172620773315, "TIME_S_1KI": 0.36267006808713503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1937.146224603653, "W": 139.06, "J_1KI": 67.26669298575086, "W_1KI": 4.8288075560802834, "W_D": 102.9745, "J_D": 1434.4647195847035, "W_D_1KI": 3.5757517883186334, "J_D_1KI": 0.12416667089098664} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..fd0984f --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.06763482093811035} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 956, 1961, ..., 4997943, + 4998975, 5000000]), + col_indices=tensor([ 6, 18, 19, ..., 4986, 4993, 4998]), + values=tensor([0.4638, 0.8169, 0.7421, ..., 0.5926, 0.1207, 0.0279]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1902, 0.8341, 0.1608, ..., 0.1172, 0.3175, 0.0262]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 0.06763482093811035 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15524', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.659992933273315} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 987, 2000, ..., 4998018, + 4999001, 5000000]), + col_indices=tensor([ 4, 16, 17, ..., 4983, 4988, 4998]), + values=tensor([0.3168, 0.3066, 0.1113, ..., 0.0328, 0.5136, 0.1275]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9704, 0.5285, 0.3815, ..., 0.6149, 0.3291, 0.1983]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 5.659992933273315 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28798', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.444172620773315} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1005, 2014, ..., 4998103, + 4999056, 5000000]), + col_indices=tensor([ 4, 9, 14, ..., 4980, 4983, 4993]), + values=tensor([0.7293, 0.3445, 0.3834, ..., 0.7374, 0.4715, 0.7945]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9531, 0.9906, 0.0327, ..., 0.2819, 0.9884, 0.0185]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.444172620773315 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1005, 2014, ..., 4998103, + 4999056, 5000000]), + col_indices=tensor([ 4, 9, 14, ..., 4980, 4983, 4993]), + values=tensor([0.7293, 0.3445, 0.3834, ..., 0.7374, 0.4715, 0.7945]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9531, 0.9906, 0.0327, ..., 0.2819, 0.9884, 0.0185]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.444172620773315 seconds + +[40.89, 39.68, 39.94, 39.87, 39.8, 39.94, 40.85, 40.96, 39.65, 40.01] +[139.06] +13.930290699005127 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28798, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.444172620773315, 'TIME_S_1KI': 0.36267006808713503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1937.146224603653, 'W': 139.06} +[40.89, 39.68, 39.94, 39.87, 39.8, 39.94, 40.85, 40.96, 39.65, 40.01, 41.32, 40.08, 40.09, 40.7, 40.16, 39.61, 39.76, 39.88, 39.81, 39.64] +721.71 +36.0855 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28798, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.444172620773315, 'TIME_S_1KI': 0.36267006808713503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1937.146224603653, 'W': 139.06, 'J_1KI': 67.26669298575086, 'W_1KI': 4.8288075560802834, 'W_D': 102.9745, 'J_D': 1434.4647195847035, 'W_D_1KI': 3.5757517883186334, 'J_D_1KI': 0.12416667089098664} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..6f0dd43 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 18401, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.148674011230469, "TIME_S_1KI": 0.5515283958062317, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1912.232966468334, "W": 137.63, "J_1KI": 103.92005687018825, "W_1KI": 7.479484810608119, "W_D": 101.70774999999999, "J_D": 1413.1287691296934, "W_D_1KI": 5.5272947122439, "J_D_1KI": 0.3003801267454975} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..60a702d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.093505859375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1476, 2946, ..., 7497092, + 7498499, 7500000]), + col_indices=tensor([ 3, 4, 9, ..., 4993, 4995, 4998]), + values=tensor([0.7692, 0.9577, 0.6421, ..., 0.4974, 0.8037, 0.4799]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.8552, 0.1634, 0.3191, ..., 0.0243, 0.9305, 0.7580]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 0.093505859375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11229', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.407301664352417} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1491, 2991, ..., 7496987, + 7498491, 7500000]), + col_indices=tensor([ 6, 7, 10, ..., 4987, 4988, 4999]), + values=tensor([0.9932, 0.6823, 0.0941, ..., 0.3170, 0.1700, 0.5277]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.7857, 0.3541, 0.7153, ..., 0.0858, 0.7918, 0.2952]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 6.407301664352417 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18401', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.148674011230469} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1524, 2999, ..., 7496978, + 7498500, 7500000]), + col_indices=tensor([ 0, 1, 3, ..., 4975, 4979, 4999]), + values=tensor([0.7847, 0.2112, 0.1435, ..., 0.9949, 0.2225, 0.8434]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.9436, 0.7820, 0.2976, ..., 0.7279, 0.8012, 0.5089]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.148674011230469 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1524, 2999, ..., 7496978, + 7498500, 7500000]), + col_indices=tensor([ 0, 1, 3, ..., 4975, 4979, 4999]), + values=tensor([0.7847, 0.2112, 0.1435, ..., 0.9949, 0.2225, 0.8434]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.9436, 0.7820, 0.2976, ..., 0.7279, 0.8012, 0.5089]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.148674011230469 seconds + +[40.88, 40.15, 40.48, 39.7, 39.9, 39.72, 39.73, 39.77, 39.68, 39.5] +[137.63] +13.894012689590454 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 18401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.148674011230469, 'TIME_S_1KI': 0.5515283958062317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1912.232966468334, 'W': 137.63} +[40.88, 40.15, 40.48, 39.7, 39.9, 39.72, 39.73, 39.77, 39.68, 39.5, 40.73, 39.74, 41.44, 39.63, 39.52, 39.59, 39.98, 39.43, 39.67, 39.52] +718.445 +35.922250000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 18401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.148674011230469, 'TIME_S_1KI': 0.5515283958062317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1912.232966468334, 'W': 137.63, 'J_1KI': 103.92005687018825, 'W_1KI': 7.479484810608119, 'W_D': 101.70774999999999, 'J_D': 1413.1287691296934, 'W_D_1KI': 5.5272947122439, 'J_D_1KI': 0.3003801267454975} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..b33500c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4623, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.869210720062256, "TIME_S_1KI": 2.3511163140952314, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1715.391318473816, "W": 123.94000000000001, "J_1KI": 371.0558768059303, "W_1KI": 26.809431105342853, "W_D": 87.73350000000002, "J_D": 1214.2753287019732, "W_D_1KI": 18.977611940298512, "J_D_1KI": 4.105042600107833} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..915d5a7 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.2556135654449463} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2026, 4021, ..., 9995990, + 9997978, 10000000]), + col_indices=tensor([ 1, 6, 9, ..., 4991, 4993, 4997]), + values=tensor([0.3323, 0.6585, 0.3485, ..., 0.6316, 0.2886, 0.7495]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3914, 0.7431, 0.0627, ..., 0.4218, 0.6007, 0.5832]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 0.2556135654449463 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4107', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.327423810958862} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2032, 4046, ..., 9995943, + 9997941, 10000000]), + col_indices=tensor([ 1, 2, 4, ..., 4988, 4990, 4991]), + values=tensor([0.7887, 0.1218, 0.0752, ..., 0.7697, 0.6176, 0.4928]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3655, 0.9637, 0.0803, ..., 0.0942, 0.4831, 0.3974]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 9.327423810958862 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4623', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.869210720062256} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2080, 4074, ..., 9996120, + 9998055, 10000000]), + col_indices=tensor([ 0, 1, 3, ..., 4988, 4989, 4998]), + values=tensor([0.8721, 0.2802, 0.5674, ..., 0.7807, 0.4474, 0.7441]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.9197, 0.0161, 0.2580, ..., 0.5344, 0.2373, 0.6957]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.869210720062256 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2080, 4074, ..., 9996120, + 9998055, 10000000]), + col_indices=tensor([ 0, 1, 3, ..., 4988, 4989, 4998]), + values=tensor([0.8721, 0.2802, 0.5674, ..., 0.7807, 0.4474, 0.7441]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.9197, 0.0161, 0.2580, ..., 0.5344, 0.2373, 0.6957]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.869210720062256 seconds + +[40.45, 40.3, 45.54, 40.13, 39.83, 39.68, 39.66, 40.04, 39.63, 41.25] +[123.94] +13.840497970581055 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.869210720062256, 'TIME_S_1KI': 2.3511163140952314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1715.391318473816, 'W': 123.94000000000001} +[40.45, 40.3, 45.54, 40.13, 39.83, 39.68, 39.66, 40.04, 39.63, 41.25, 41.52, 40.34, 39.67, 39.9, 39.74, 39.58, 39.63, 39.49, 39.57, 39.58] +724.1299999999999 +36.20649999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.869210720062256, 'TIME_S_1KI': 2.3511163140952314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1715.391318473816, 'W': 123.94000000000001, 'J_1KI': 371.0558768059303, 'W_1KI': 26.809431105342853, 'W_D': 87.73350000000002, 'J_D': 1214.2753287019732, 'W_D_1KI': 18.977611940298512, 'J_D_1KI': 4.105042600107833} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..ef8b561 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3775, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.108771324157715, "TIME_S_1KI": 2.94272088057158, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1948.3000312638283, "W": 122.57, "J_1KI": 516.1059685467094, "W_1KI": 32.46887417218543, "W_D": 86.1335, "J_D": 1369.1270355132817, "W_D_1KI": 22.816821192052977, "J_D_1KI": 6.044191044252445} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..05d0f73 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.2961087226867676} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2490, 4975, ..., 12494955, + 12497491, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4994, 4996, 4998]), + values=tensor([0.9230, 0.7404, 0.0716, ..., 0.8209, 0.3183, 0.8676]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4086, 0.9880, 0.1016, ..., 0.8907, 0.8066, 0.6446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 0.2961087226867676 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3545', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.858964443206787} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2500, 5017, ..., 12494975, + 12497542, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4994, 4997, 4999]), + values=tensor([0.5138, 0.3202, 0.6371, ..., 0.0572, 0.7854, 0.0609]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.5537, 0.0044, 0.4461, ..., 0.4637, 0.2205, 0.0434]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 9.858964443206787 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3775', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 11.108771324157715} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2457, 4970, ..., 12495118, + 12497586, 12500000]), + col_indices=tensor([ 0, 5, 6, ..., 4996, 4997, 4999]), + values=tensor([0.5982, 0.0244, 0.5821, ..., 0.2791, 0.2569, 0.9852]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.1130, 0.4034, 0.5297, ..., 0.0598, 0.7079, 0.8853]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 11.108771324157715 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2457, 4970, ..., 12495118, + 12497586, 12500000]), + col_indices=tensor([ 0, 5, 6, ..., 4996, 4997, 4999]), + values=tensor([0.5982, 0.0244, 0.5821, ..., 0.2791, 0.2569, 0.9852]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.1130, 0.4034, 0.5297, ..., 0.0598, 0.7079, 0.8853]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 11.108771324157715 seconds + +[40.6, 40.02, 45.54, 40.47, 40.31, 40.18, 40.34, 40.25, 40.23, 39.68] +[122.57] +15.89540696144104 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.108771324157715, 'TIME_S_1KI': 2.94272088057158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1948.3000312638283, 'W': 122.57} +[40.6, 40.02, 45.54, 40.47, 40.31, 40.18, 40.34, 40.25, 40.23, 39.68, 40.3, 40.16, 40.89, 40.3, 40.11, 40.08, 40.03, 39.96, 39.79, 39.56] +728.73 +36.4365 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3775, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 11.108771324157715, 'TIME_S_1KI': 2.94272088057158, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1948.3000312638283, 'W': 122.57, 'J_1KI': 516.1059685467094, 'W_1KI': 32.46887417218543, 'W_D': 86.1335, 'J_D': 1369.1270355132817, 'W_D_1KI': 22.816821192052977, 'J_D_1KI': 6.044191044252445} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..aad5720 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 475914, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.308423519134521, "TIME_S_1KI": 0.021660265340239036, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1189.9414702892302, "W": 94.85, "J_1KI": 2.5003287784961783, "W_1KI": 0.19930071399454524, "W_D": 59.248999999999995, "J_D": 743.3088262853622, "W_D_1KI": 0.12449518190261265, "J_D_1KI": 0.000261591762172604} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..c0883dc --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04127812385559082} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 980, 1760, 2093, 785, 4671, 1948, 723, 1459, 1601, + 3059, 3354, 3009, 3506, 1670, 2673, 2868, 3157, 4447, + 1531, 659, 4281, 2627, 1377, 2950, 2583, 1810, 1775, + 4655, 1859, 3752, 2371, 1562, 678, 1349, 716, 1427, + 4744, 619, 1269, 399, 1864, 2208, 1480, 456, 208, + 1221, 4437, 899, 3205, 761, 3187, 2953, 2815, 148, + 2514, 1056, 1872, 3736, 1283, 2888, 2704, 2532, 4106, + 3966, 441, 2333, 946, 4406, 4968, 4149, 1833, 4307, + 4337, 2109, 4989, 3863, 2823, 4489, 3013, 4755, 4523, + 3606, 244, 568, 1109, 1740, 4991, 4457, 3089, 4428, + 4324, 4720, 4209, 3750, 3625, 450, 2141, 1794, 3260, + 4714, 3864, 2541, 1784, 2782, 3268, 3685, 3828, 1583, + 1640, 2421, 3816, 1055, 144, 4272, 260, 1691, 1506, + 1638, 1666, 89, 0, 4364, 3266, 7, 888, 1591, + 2436, 524, 4077, 4995, 1729, 2451, 3127, 2410, 3894, + 4663, 2351, 1158, 1336, 2596, 2075, 768, 1048, 2564, + 3613, 2403, 1235, 967, 1065, 2176, 1047, 2886, 1645, + 3823, 3916, 3782, 98, 2899, 2287, 3273, 4032, 4889, + 1097, 3448, 4328, 2582, 4575, 4148, 3284, 2665, 249, + 1314, 3187, 3430, 4704, 97, 1632, 931, 382, 3849, + 3930, 2162, 1791, 4253, 2474, 3110, 4678, 4692, 474, + 3715, 235, 3222, 4417, 4989, 3151, 1089, 1264, 3742, + 4431, 906, 341, 4151, 340, 1004, 4160, 1654, 1155, + 188, 4818, 2576, 447, 1886, 1055, 2211, 1261, 3706, + 4104, 4081, 2022, 1659, 4405, 105, 257, 1519, 1573, + 2647, 334, 3953, 2722, 2554, 628, 1350, 4587, 1239, + 3896, 49, 2034, 3369, 750, 1198, 743, 3025, 4827, + 4181, 3975, 2556, 3695, 4820, 3552, 3237]), + values=tensor([0.9726, 0.4932, 0.3912, 0.9738, 0.2382, 0.2194, 0.1583, + 0.5409, 0.3403, 0.0032, 0.6572, 0.0849, 0.3926, 0.9886, + 0.6424, 0.9153, 0.1836, 0.7579, 0.8401, 0.5061, 0.5178, + 0.6259, 0.6574, 0.6207, 0.6672, 0.0690, 0.9817, 0.7086, + 0.9781, 0.5592, 0.4518, 0.3448, 0.6644, 0.1469, 0.4823, + 0.3431, 0.4389, 0.7337, 0.3527, 0.9829, 0.5078, 0.8332, + 0.7767, 0.9645, 0.7365, 0.4985, 0.9411, 0.0544, 0.1772, + 0.3375, 0.9957, 0.2067, 0.3235, 0.4169, 0.2185, 0.9745, + 0.4024, 0.1951, 0.5401, 0.1889, 0.4891, 0.1494, 0.3864, + 0.9968, 0.5170, 0.5194, 0.2311, 0.7378, 0.0181, 0.8480, + 0.8709, 0.1149, 0.7166, 0.5193, 0.8973, 0.0220, 0.9337, + 0.7205, 0.2663, 0.4873, 0.6865, 0.7454, 0.1670, 0.6733, + 0.5028, 0.8469, 0.5015, 0.0183, 0.9101, 0.5044, 0.6164, + 0.0686, 0.2021, 0.0101, 0.2356, 0.6960, 0.5089, 0.6651, + 0.9123, 0.3294, 0.9018, 0.9354, 0.4913, 0.5484, 0.4661, + 0.9948, 0.4189, 0.4723, 0.9512, 0.6341, 0.6798, 0.5802, + 0.6623, 0.0188, 0.0129, 0.1565, 0.2382, 0.5939, 0.9749, + 0.6008, 0.1917, 0.4414, 0.2563, 0.1692, 0.9585, 0.0472, + 0.8804, 0.5964, 0.2177, 0.3908, 0.2373, 0.0395, 0.7888, + 0.2774, 0.1837, 0.8164, 0.9307, 0.6092, 0.7632, 0.0856, + 0.4941, 0.2709, 0.6765, 0.8735, 0.4097, 0.3394, 0.4724, + 0.7144, 0.5758, 0.2577, 0.2371, 0.8221, 0.9059, 0.5592, + 0.3289, 0.0171, 0.1330, 0.9826, 0.1215, 0.7990, 0.5552, + 0.1342, 0.7031, 0.1802, 0.8204, 0.3147, 0.1663, 0.8508, + 0.4793, 0.5702, 0.1389, 0.1455, 0.9150, 0.8457, 0.2787, + 0.6364, 0.0265, 0.9823, 0.1357, 0.7315, 0.5366, 0.2059, + 0.1967, 0.4234, 0.3059, 0.0470, 0.8504, 0.4796, 0.2864, + 0.4369, 0.4481, 0.7929, 0.8452, 0.0958, 0.5059, 0.6459, + 0.8388, 0.7761, 0.1938, 0.8212, 0.1919, 0.8336, 0.3582, + 0.0664, 0.5404, 0.5179, 0.4249, 0.9245, 0.0841, 0.7587, + 0.7759, 0.0382, 0.0647, 0.6744, 0.6055, 0.1518, 0.6047, + 0.4888, 0.3468, 0.0096, 0.8517, 0.8972, 0.6357, 0.5150, + 0.0879, 0.9902, 0.5975, 0.0921, 0.1553, 0.8301, 0.3740, + 0.3374, 0.9082, 0.3028, 0.9956, 0.7728, 0.1467, 0.8331, + 0.1315, 0.6195, 0.4346, 0.3339, 0.9836, 0.5643, 0.7400, + 0.8152, 0.3695, 0.3467, 0.0410, 0.0075]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4444, 0.7904, 0.6664, ..., 0.7984, 0.5487, 0.5407]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.04127812385559082 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25437', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5612115859985352} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4513, 4874, 3027, 172, 3763, 1079, 239, 4529, 4955, + 608, 1066, 2737, 4445, 4271, 3658, 687, 3210, 2517, + 2807, 1524, 3953, 1461, 3621, 1960, 4173, 4521, 4104, + 3884, 561, 4646, 4065, 4845, 1534, 1821, 1059, 2352, + 1896, 3528, 4564, 4230, 603, 1565, 2747, 4968, 3463, + 2434, 4028, 3145, 1082, 2478, 4336, 3976, 44, 4019, + 3984, 3758, 2982, 3832, 4392, 3709, 4379, 1452, 4965, + 778, 3505, 1776, 1029, 1733, 4085, 16, 3544, 611, + 487, 4244, 3327, 1553, 300, 454, 615, 1019, 1638, + 2606, 4708, 2838, 1644, 2517, 3419, 4512, 1409, 4514, + 98, 975, 2304, 4532, 578, 3866, 4888, 1913, 1427, + 2350, 3190, 471, 3972, 3754, 1263, 2752, 1137, 2063, + 1340, 4788, 4731, 1452, 253, 4150, 2425, 3321, 2705, + 2043, 4482, 3426, 39, 1965, 4794, 2137, 4312, 4929, + 4149, 4158, 2375, 1520, 2947, 1537, 114, 3706, 3423, + 2525, 3424, 2121, 1841, 2281, 281, 582, 587, 2819, + 1328, 2949, 349, 1435, 37, 2197, 94, 1382, 1590, + 4694, 734, 27, 2763, 3346, 997, 4500, 432, 3423, + 1345, 2417, 4661, 2561, 4893, 2705, 322, 3467, 246, + 371, 460, 4416, 3517, 2533, 3314, 4727, 4068, 2071, + 89, 3259, 446, 2255, 4308, 1260, 2389, 3030, 3160, + 928, 1241, 1770, 729, 2255, 2572, 3942, 459, 250, + 4351, 2164, 642, 3643, 4211, 2537, 3823, 703, 3549, + 2055, 653, 1366, 4832, 1570, 1682, 1477, 4511, 1980, + 2756, 1220, 4509, 2420, 623, 993, 3225, 1492, 4751, + 3238, 2111, 484, 1439, 2447, 2514, 4082, 4164, 1688, + 2786, 558, 866, 946, 3251, 3488, 4976, 3700, 3363, + 3272, 1906, 4483, 1350, 885, 3495, 1900]), + values=tensor([0.7997, 0.6693, 0.9766, 0.8132, 0.4494, 0.7325, 0.7457, + 0.7556, 0.9866, 0.9047, 0.1758, 0.6478, 0.3725, 0.9918, + 0.9691, 0.3899, 0.4189, 0.2206, 0.4455, 0.8056, 0.8850, + 0.7612, 0.3869, 0.5377, 0.9465, 0.2694, 0.7714, 0.3970, + 0.0074, 0.7396, 0.6772, 0.0796, 0.1408, 0.9831, 0.1256, + 0.1621, 0.7601, 0.7241, 0.4318, 0.4425, 0.2980, 0.3108, + 0.6562, 0.1074, 0.6925, 0.0813, 0.3905, 0.9992, 0.5983, + 0.9779, 0.9273, 0.2532, 0.6041, 0.4773, 0.3669, 0.1258, + 0.1311, 0.3773, 0.8821, 0.3058, 0.0597, 0.9674, 0.6666, + 0.7396, 0.6305, 0.7148, 0.8106, 0.4078, 0.2370, 0.3958, + 0.1089, 0.2905, 0.3401, 0.3586, 0.4906, 0.7927, 0.7147, + 0.3090, 0.2058, 0.2236, 0.1502, 0.7530, 0.1376, 0.7384, + 0.1472, 0.7286, 0.2760, 0.2427, 0.0512, 0.9512, 0.6082, + 0.0272, 0.0487, 0.6103, 0.0226, 0.4108, 0.7461, 0.2695, + 0.7017, 0.7638, 0.0407, 0.0211, 0.7975, 0.8394, 0.7173, + 0.8557, 0.1529, 0.3846, 0.8527, 0.7100, 0.7245, 0.3654, + 0.7311, 0.7142, 0.9375, 0.5321, 0.1856, 0.8364, 0.4430, + 0.4779, 0.6050, 0.4883, 0.1682, 0.5943, 0.1945, 0.2069, + 0.1270, 0.4728, 0.3645, 0.8206, 0.5488, 0.4640, 0.7881, + 0.3157, 0.0026, 0.7089, 0.5313, 0.9513, 0.9856, 0.8065, + 0.3755, 0.7881, 0.5548, 0.8650, 0.7960, 0.2190, 0.4494, + 0.4403, 0.9214, 0.6868, 0.3061, 0.1264, 0.0552, 0.0018, + 0.0148, 0.0640, 0.2837, 0.7551, 0.1142, 0.0916, 0.0958, + 0.1039, 0.5333, 0.1734, 0.0796, 0.6961, 0.6442, 0.5756, + 0.9631, 0.0278, 0.9861, 0.3260, 0.4440, 0.2476, 0.8157, + 0.9861, 0.8239, 0.5108, 0.1860, 0.3033, 0.0741, 0.8427, + 0.8545, 0.2806, 0.0718, 0.7219, 0.2946, 0.3868, 0.6450, + 0.1470, 0.4273, 0.0373, 0.4562, 0.7872, 0.0251, 0.8634, + 0.3919, 0.4240, 0.0414, 0.7931, 0.4445, 0.1790, 0.3828, + 0.7421, 0.3011, 0.1605, 0.1136, 0.9314, 0.3920, 0.9924, + 0.0352, 0.6870, 0.4156, 0.4859, 0.8722, 0.6951, 0.2675, + 0.8061, 0.5063, 0.5828, 0.5303, 0.7965, 0.2479, 0.8340, + 0.3931, 0.9858, 0.1292, 0.6472, 0.7465, 0.0833, 0.0197, + 0.8484, 0.0914, 0.1498, 0.8894, 0.1548, 0.5990, 0.0393, + 0.7324, 0.7542, 0.7672, 0.8989, 0.1970, 0.1932, 0.9622, + 0.5932, 0.9630, 0.7336, 0.7453, 0.9290]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8285, 0.4420, 0.7749, ..., 0.7287, 0.4578, 0.9435]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.5612115859985352 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '475914', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.308423519134521} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 698, 3054, 4592, 1629, 4601, 3886, 4804, 318, 1415, + 433, 1872, 1429, 1550, 3511, 4304, 3637, 1101, 2710, + 4079, 541, 1194, 97, 2807, 2811, 3206, 3991, 2286, + 2681, 4835, 38, 1361, 1702, 1987, 1831, 485, 140, + 4362, 3450, 2222, 295, 3370, 591, 1718, 4950, 1639, + 3575, 1461, 4389, 3994, 2356, 1105, 1104, 1761, 4007, + 4669, 3008, 4553, 4279, 1484, 3371, 4533, 863, 587, + 1360, 4727, 3879, 832, 240, 2132, 2582, 1372, 4190, + 3588, 2592, 4310, 2614, 1567, 1660, 604, 4488, 1313, + 3610, 3188, 2899, 3261, 1055, 1112, 4180, 1426, 3909, + 3409, 4510, 3025, 703, 1794, 225, 659, 2212, 1407, + 4739, 1542, 2238, 2858, 4535, 4405, 3841, 3716, 4156, + 4416, 2641, 4372, 1051, 980, 3180, 2782, 497, 394, + 605, 971, 455, 3831, 523, 3209, 733, 4726, 2765, + 226, 3470, 1720, 2299, 2372, 1447, 3202, 1153, 3498, + 4698, 2998, 1466, 363, 4324, 2506, 2090, 2285, 2204, + 303, 4406, 1500, 1826, 4080, 1378, 1816, 980, 66, + 4392, 4266, 2875, 275, 1828, 3108, 989, 3152, 2508, + 1550, 2008, 1066, 529, 18, 1, 3369, 4893, 2556, + 1068, 1671, 1086, 2667, 3511, 2891, 1749, 4140, 3150, + 43, 1921, 1837, 980, 1293, 1802, 2390, 249, 768, + 4105, 1721, 1435, 4658, 2745, 4338, 2320, 3879, 2641, + 3097, 1585, 3887, 4913, 4556, 2794, 2705, 299, 61, + 2384, 1270, 740, 2129, 3392, 3774, 4644, 4192, 4506, + 149, 4748, 3571, 1159, 4587, 1920, 982, 2347, 1650, + 1882, 1955, 3910, 197, 4484, 3655, 4387, 968, 3452, + 181, 2166, 1855, 2452, 189, 3074, 2522, 3426, 135, + 1267, 666, 4928, 2908, 4053, 4593, 448]), + values=tensor([0.8620, 0.7365, 0.7531, 0.5477, 0.5295, 0.4268, 0.2903, + 0.3296, 0.4852, 0.0295, 0.6605, 0.4770, 0.1707, 0.4638, + 0.1229, 0.6813, 0.2237, 0.9317, 0.9546, 0.4741, 0.7915, + 0.3909, 0.2549, 0.7853, 0.8207, 0.9924, 0.8328, 0.0293, + 0.3281, 0.4028, 0.9335, 0.8141, 0.3687, 0.4243, 0.5386, + 0.2123, 0.0695, 0.2792, 0.2453, 0.4935, 0.1675, 0.4387, + 0.5777, 0.6384, 0.5870, 0.2050, 0.9519, 0.0161, 0.0462, + 0.8312, 0.5114, 0.5703, 0.5170, 0.0110, 0.6229, 0.7339, + 0.2337, 0.7709, 0.7844, 0.2062, 0.2004, 0.9990, 0.4625, + 0.4209, 0.7064, 0.0680, 0.6043, 0.0073, 0.1383, 0.5359, + 0.1641, 0.0316, 0.0479, 0.9788, 0.0764, 0.0936, 0.1603, + 0.1581, 0.8855, 0.4285, 0.9101, 0.6054, 0.5164, 0.1839, + 0.2783, 0.0513, 0.4451, 0.7375, 0.3333, 0.1348, 0.3539, + 0.0102, 0.1620, 0.4960, 0.1201, 0.8615, 0.2151, 0.0085, + 0.8133, 0.8439, 0.5713, 0.6595, 0.6728, 0.2738, 0.1487, + 0.3205, 0.6933, 0.0963, 0.6731, 0.6903, 0.0043, 0.7900, + 0.7911, 0.9496, 0.4295, 0.5758, 0.2659, 0.3025, 0.6145, + 0.1511, 0.7265, 0.9480, 0.6751, 0.8138, 0.6361, 0.4149, + 0.8899, 0.3218, 0.3413, 0.2054, 0.1555, 0.4398, 0.9946, + 0.6820, 0.1566, 0.7238, 0.9562, 0.1023, 0.0696, 0.9724, + 0.8182, 0.0031, 0.0289, 0.5187, 0.0063, 0.5262, 0.9232, + 0.3694, 0.0136, 0.3019, 0.9633, 0.8770, 0.0826, 0.1792, + 0.6372, 0.5719, 0.7979, 0.1369, 0.9923, 0.7514, 0.0627, + 0.3337, 0.0132, 0.9026, 0.1169, 0.4065, 0.7302, 0.3087, + 0.4276, 0.6874, 0.0705, 0.4727, 0.3286, 0.7188, 0.3727, + 0.5310, 0.1979, 0.9773, 0.3076, 0.3372, 0.2546, 0.3340, + 0.4532, 0.0609, 0.2279, 0.8651, 0.8162, 0.8251, 0.1216, + 0.3049, 0.0805, 0.1284, 0.1859, 0.3690, 0.3435, 0.7762, + 0.7083, 0.6529, 0.8556, 0.1421, 0.4528, 0.4045, 0.9221, + 0.6914, 0.6437, 0.8815, 0.0609, 0.9680, 0.2115, 0.5295, + 0.5418, 0.8646, 0.6735, 0.1927, 0.2578, 0.4564, 0.0603, + 0.1414, 0.3382, 0.0772, 0.6503, 0.3586, 0.8775, 0.8840, + 0.9215, 0.4825, 0.2733, 0.0423, 0.6825, 0.8144, 0.0837, + 0.2758, 0.6188, 0.3276, 0.0762, 0.7932, 0.5621, 0.9067, + 0.7339, 0.1976, 0.8462, 0.5736, 0.2659, 0.7486, 0.3053, + 0.7429, 0.4272, 0.8072, 0.5183, 0.4677]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4130, 0.3379, 0.7498, ..., 0.0848, 0.9618, 0.5893]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.308423519134521 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 698, 3054, 4592, 1629, 4601, 3886, 4804, 318, 1415, + 433, 1872, 1429, 1550, 3511, 4304, 3637, 1101, 2710, + 4079, 541, 1194, 97, 2807, 2811, 3206, 3991, 2286, + 2681, 4835, 38, 1361, 1702, 1987, 1831, 485, 140, + 4362, 3450, 2222, 295, 3370, 591, 1718, 4950, 1639, + 3575, 1461, 4389, 3994, 2356, 1105, 1104, 1761, 4007, + 4669, 3008, 4553, 4279, 1484, 3371, 4533, 863, 587, + 1360, 4727, 3879, 832, 240, 2132, 2582, 1372, 4190, + 3588, 2592, 4310, 2614, 1567, 1660, 604, 4488, 1313, + 3610, 3188, 2899, 3261, 1055, 1112, 4180, 1426, 3909, + 3409, 4510, 3025, 703, 1794, 225, 659, 2212, 1407, + 4739, 1542, 2238, 2858, 4535, 4405, 3841, 3716, 4156, + 4416, 2641, 4372, 1051, 980, 3180, 2782, 497, 394, + 605, 971, 455, 3831, 523, 3209, 733, 4726, 2765, + 226, 3470, 1720, 2299, 2372, 1447, 3202, 1153, 3498, + 4698, 2998, 1466, 363, 4324, 2506, 2090, 2285, 2204, + 303, 4406, 1500, 1826, 4080, 1378, 1816, 980, 66, + 4392, 4266, 2875, 275, 1828, 3108, 989, 3152, 2508, + 1550, 2008, 1066, 529, 18, 1, 3369, 4893, 2556, + 1068, 1671, 1086, 2667, 3511, 2891, 1749, 4140, 3150, + 43, 1921, 1837, 980, 1293, 1802, 2390, 249, 768, + 4105, 1721, 1435, 4658, 2745, 4338, 2320, 3879, 2641, + 3097, 1585, 3887, 4913, 4556, 2794, 2705, 299, 61, + 2384, 1270, 740, 2129, 3392, 3774, 4644, 4192, 4506, + 149, 4748, 3571, 1159, 4587, 1920, 982, 2347, 1650, + 1882, 1955, 3910, 197, 4484, 3655, 4387, 968, 3452, + 181, 2166, 1855, 2452, 189, 3074, 2522, 3426, 135, + 1267, 666, 4928, 2908, 4053, 4593, 448]), + values=tensor([0.8620, 0.7365, 0.7531, 0.5477, 0.5295, 0.4268, 0.2903, + 0.3296, 0.4852, 0.0295, 0.6605, 0.4770, 0.1707, 0.4638, + 0.1229, 0.6813, 0.2237, 0.9317, 0.9546, 0.4741, 0.7915, + 0.3909, 0.2549, 0.7853, 0.8207, 0.9924, 0.8328, 0.0293, + 0.3281, 0.4028, 0.9335, 0.8141, 0.3687, 0.4243, 0.5386, + 0.2123, 0.0695, 0.2792, 0.2453, 0.4935, 0.1675, 0.4387, + 0.5777, 0.6384, 0.5870, 0.2050, 0.9519, 0.0161, 0.0462, + 0.8312, 0.5114, 0.5703, 0.5170, 0.0110, 0.6229, 0.7339, + 0.2337, 0.7709, 0.7844, 0.2062, 0.2004, 0.9990, 0.4625, + 0.4209, 0.7064, 0.0680, 0.6043, 0.0073, 0.1383, 0.5359, + 0.1641, 0.0316, 0.0479, 0.9788, 0.0764, 0.0936, 0.1603, + 0.1581, 0.8855, 0.4285, 0.9101, 0.6054, 0.5164, 0.1839, + 0.2783, 0.0513, 0.4451, 0.7375, 0.3333, 0.1348, 0.3539, + 0.0102, 0.1620, 0.4960, 0.1201, 0.8615, 0.2151, 0.0085, + 0.8133, 0.8439, 0.5713, 0.6595, 0.6728, 0.2738, 0.1487, + 0.3205, 0.6933, 0.0963, 0.6731, 0.6903, 0.0043, 0.7900, + 0.7911, 0.9496, 0.4295, 0.5758, 0.2659, 0.3025, 0.6145, + 0.1511, 0.7265, 0.9480, 0.6751, 0.8138, 0.6361, 0.4149, + 0.8899, 0.3218, 0.3413, 0.2054, 0.1555, 0.4398, 0.9946, + 0.6820, 0.1566, 0.7238, 0.9562, 0.1023, 0.0696, 0.9724, + 0.8182, 0.0031, 0.0289, 0.5187, 0.0063, 0.5262, 0.9232, + 0.3694, 0.0136, 0.3019, 0.9633, 0.8770, 0.0826, 0.1792, + 0.6372, 0.5719, 0.7979, 0.1369, 0.9923, 0.7514, 0.0627, + 0.3337, 0.0132, 0.9026, 0.1169, 0.4065, 0.7302, 0.3087, + 0.4276, 0.6874, 0.0705, 0.4727, 0.3286, 0.7188, 0.3727, + 0.5310, 0.1979, 0.9773, 0.3076, 0.3372, 0.2546, 0.3340, + 0.4532, 0.0609, 0.2279, 0.8651, 0.8162, 0.8251, 0.1216, + 0.3049, 0.0805, 0.1284, 0.1859, 0.3690, 0.3435, 0.7762, + 0.7083, 0.6529, 0.8556, 0.1421, 0.4528, 0.4045, 0.9221, + 0.6914, 0.6437, 0.8815, 0.0609, 0.9680, 0.2115, 0.5295, + 0.5418, 0.8646, 0.6735, 0.1927, 0.2578, 0.4564, 0.0603, + 0.1414, 0.3382, 0.0772, 0.6503, 0.3586, 0.8775, 0.8840, + 0.9215, 0.4825, 0.2733, 0.0423, 0.6825, 0.8144, 0.0837, + 0.2758, 0.6188, 0.3276, 0.0762, 0.7932, 0.5621, 0.9067, + 0.7339, 0.1976, 0.8462, 0.5736, 0.2659, 0.7486, 0.3053, + 0.7429, 0.4272, 0.8072, 0.5183, 0.4677]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4130, 0.3379, 0.7498, ..., 0.0848, 0.9618, 0.5893]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.308423519134521 seconds + +[39.69, 38.64, 38.76, 38.66, 38.74, 43.79, 39.13, 39.04, 39.27, 38.98] +[94.85] +12.54550838470459 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 475914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.308423519134521, 'TIME_S_1KI': 0.021660265340239036, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1189.9414702892302, 'W': 94.85} +[39.69, 38.64, 38.76, 38.66, 38.74, 43.79, 39.13, 39.04, 39.27, 38.98, 39.59, 39.06, 44.18, 38.71, 38.85, 39.28, 38.92, 38.81, 38.79, 40.52] +712.02 +35.601 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 475914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.308423519134521, 'TIME_S_1KI': 0.021660265340239036, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1189.9414702892302, 'W': 94.85, 'J_1KI': 2.5003287784961783, 'W_1KI': 0.19930071399454524, 'W_D': 59.248999999999995, 'J_D': 743.3088262853622, 'W_D_1KI': 0.12449518190261265, 'J_D_1KI': 0.000261591762172604} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..ae25247 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 463602, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329545974731445, "TIME_S_1KI": 0.02228106430673605, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1198.2176897239685, "W": 95.11, "J_1KI": 2.5845826586683587, "W_1KI": 0.20515442124926123, "W_D": 59.724, "J_D": 752.4167101364135, "W_D_1KI": 0.12882601886963388, "J_D_1KI": 0.00027788063655815524} diff --git a/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..3c48d35 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/epyc_7313p_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01933002471923828} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([ 393, 4092, 1605, ..., 4543, 205, 1898]), + values=tensor([0.0363, 0.1593, 0.8850, ..., 0.0884, 0.4054, 0.0261]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.7170, 0.2316, 0.8921, ..., 0.0306, 0.1187, 0.4918]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.01933002471923828 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '54319', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.230255126953125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4735, 1903, 2985, ..., 3889, 4420, 4686]), + values=tensor([0.8501, 0.7899, 0.6223, ..., 0.9437, 0.2014, 0.9727]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.5741, 0.3449, 0.6519, ..., 0.7953, 0.3519, 0.0286]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 1.230255126953125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '463602', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.329545974731445} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), + col_indices=tensor([1727, 4803, 1040, ..., 3710, 1053, 4648]), + values=tensor([0.0640, 0.8338, 0.2393, ..., 0.0278, 0.9877, 0.3687]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8593, 0.3881, 0.7226, ..., 0.2122, 0.1433, 0.4534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.329545974731445 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), + col_indices=tensor([1727, 4803, 1040, ..., 3710, 1053, 4648]), + values=tensor([0.0640, 0.8338, 0.2393, ..., 0.0278, 0.9877, 0.3687]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8593, 0.3881, 0.7226, ..., 0.2122, 0.1433, 0.4534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.329545974731445 seconds + +[40.79, 39.42, 39.16, 38.83, 39.21, 39.28, 39.68, 38.8, 39.35, 39.35] +[95.11] +12.598230361938477 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 463602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329545974731445, 'TIME_S_1KI': 0.02228106430673605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1198.2176897239685, 'W': 95.11} +[40.79, 39.42, 39.16, 38.83, 39.21, 39.28, 39.68, 38.8, 39.35, 39.35, 40.35, 38.9, 38.98, 39.04, 38.98, 40.01, 39.46, 39.09, 39.7, 39.17] +707.72 +35.386 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 463602, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.329545974731445, 'TIME_S_1KI': 0.02228106430673605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1198.2176897239685, 'W': 95.11, 'J_1KI': 2.5845826586683587, 'W_1KI': 0.20515442124926123, 'W_D': 59.724, 'J_D': 752.4167101364135, 'W_D_1KI': 0.12882601886963388, 'J_D_1KI': 0.00027788063655815524} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..c7b5a2b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33105, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.270436525344849, "TIME_S_1KI": 0.31023822761953934, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1243.494291753769, "W": 88.28, "J_1KI": 37.56212933858236, "W_1KI": 2.6666666666666665, "W_D": 71.58725, "J_D": 1008.3635788100362, "W_D_1KI": 2.1624301465035494, "J_D_1KI": 0.0653203487842788} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..31fde2c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.047393798828125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 25, ..., 999973, + 999987, 1000000]), + col_indices=tensor([ 2538, 10020, 11588, ..., 84720, 92719, 95287]), + values=tensor([0.8172, 0.5815, 0.2513, ..., 0.2819, 0.8178, 0.1271]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9323, 0.3660, 0.2073, ..., 0.7127, 0.8566, 0.0523]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.047393798828125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22154', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.0265889167785645} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 20, ..., 999976, + 999985, 1000000]), + col_indices=tensor([10428, 14843, 15503, ..., 86013, 91025, 96391]), + values=tensor([0.0148, 0.3731, 0.6426, ..., 0.4125, 0.5086, 0.3848]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0982, 0.0147, 0.0440, ..., 0.9267, 0.0489, 0.6248]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 7.0265889167785645 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33105', '-ss', '100000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.270436525344849} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 18, ..., 999974, + 999988, 1000000]), + col_indices=tensor([ 697, 33076, 59577, ..., 88840, 91058, 94574]), + values=tensor([0.8969, 0.3012, 0.5025, ..., 0.5812, 0.6517, 0.5598]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0089, 0.5150, 0.8606, ..., 0.9603, 0.9290, 0.1786]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.270436525344849 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 18, ..., 999974, + 999988, 1000000]), + col_indices=tensor([ 697, 33076, 59577, ..., 88840, 91058, 94574]), + values=tensor([0.8969, 0.3012, 0.5025, ..., 0.5812, 0.6517, 0.5598]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0089, 0.5150, 0.8606, ..., 0.9603, 0.9290, 0.1786]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.270436525344849 seconds + +[18.44, 17.49, 17.8, 17.92, 17.7, 20.94, 18.06, 17.75, 21.44, 18.07] +[88.28] +14.085798501968384 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33105, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.270436525344849, 'TIME_S_1KI': 0.31023822761953934, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.494291753769, 'W': 88.28} +[18.44, 17.49, 17.8, 17.92, 17.7, 20.94, 18.06, 17.75, 21.44, 18.07, 18.18, 18.15, 17.95, 18.02, 21.88, 18.26, 18.15, 17.92, 17.92, 18.32] +333.855 +16.69275 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33105, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.270436525344849, 'TIME_S_1KI': 0.31023822761953934, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.494291753769, 'W': 88.28, 'J_1KI': 37.56212933858236, 'W_1KI': 2.6666666666666665, 'W_D': 71.58725, 'J_D': 1008.3635788100362, 'W_D_1KI': 2.1624301465035494, 'J_D_1KI': 0.0653203487842788} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..8d42109 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2748, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.477163553237915, "TIME_S_1KI": 3.8126504924446563, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1424.6626942634582, "W": 81.1, "J_1KI": 518.4362060638495, "W_1KI": 29.512372634643373, "W_D": 64.67649999999999, "J_D": 1136.1553236193656, "W_D_1KI": 23.535844250363898, "J_D_1KI": 8.564717703916994} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..280d994 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.4098339080810547} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 105, 217, ..., 9999774, + 9999878, 10000000]), + col_indices=tensor([ 2925, 3045, 3251, ..., 98848, 99298, 99703]), + values=tensor([0.4813, 0.4380, 0.0490, ..., 0.5758, 0.0326, 0.9259]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.0648, 0.1204, 0.6207, ..., 0.1724, 0.6764, 0.4459]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 0.4098339080810547 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2562', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.787212610244751} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 90, 201, ..., 9999780, + 9999895, 10000000]), + col_indices=tensor([ 1242, 4056, 4707, ..., 96589, 97728, 98727]), + values=tensor([0.8897, 0.2716, 0.4760, ..., 0.6356, 0.3047, 0.7796]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4134, 0.7359, 0.5031, ..., 0.9568, 0.9528, 0.4063]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 9.787212610244751 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2748', '-ss', '100000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.477163553237915} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 82, 171, ..., 9999801, + 9999897, 10000000]), + col_indices=tensor([ 1661, 2279, 2856, ..., 99449, 99691, 99739]), + values=tensor([0.1663, 0.0376, 0.1009, ..., 0.7118, 0.9261, 0.1836]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7724, 0.0559, 0.5235, ..., 0.7708, 0.2517, 0.0642]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.477163553237915 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 82, 171, ..., 9999801, + 9999897, 10000000]), + col_indices=tensor([ 1661, 2279, 2856, ..., 99449, 99691, 99739]), + values=tensor([0.1663, 0.0376, 0.1009, ..., 0.7118, 0.9261, 0.1836]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7724, 0.0559, 0.5235, ..., 0.7708, 0.2517, 0.0642]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.477163553237915 seconds + +[18.37, 17.8, 17.96, 17.86, 22.53, 18.03, 17.75, 18.1, 18.23, 17.82] +[81.1] +17.56674098968506 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.477163553237915, 'TIME_S_1KI': 3.8126504924446563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.6626942634582, 'W': 81.1} +[18.37, 17.8, 17.96, 17.86, 22.53, 18.03, 17.75, 18.1, 18.23, 17.82, 18.37, 17.83, 17.94, 18.39, 17.8, 17.92, 17.99, 17.94, 18.19, 17.86] +328.47 +16.4235 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2748, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.477163553237915, 'TIME_S_1KI': 3.8126504924446563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1424.6626942634582, 'W': 81.1, 'J_1KI': 518.4362060638495, 'W_1KI': 29.512372634643373, 'W_D': 64.67649999999999, 'J_D': 1136.1553236193656, 'W_D_1KI': 23.535844250363898, 'J_D_1KI': 8.564717703916994} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..d39bedd --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 65044, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.857811450958252, "TIME_S_1KI": 0.1669302541503944, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1160.4457891416548, "W": 82.65999999999998, "J_1KI": 17.840935199890147, "W_1KI": 1.2708320521493142, "W_D": 66.37149999999998, "J_D": 931.7750749336478, "W_D_1KI": 1.0204092614230365, "J_D_1KI": 0.015687984463179334} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..a139e8c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03411436080932617} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99997, 99998, + 100000]), + col_indices=tensor([61814, 31861, 93735, ..., 37976, 26709, 88923]), + values=tensor([0.4964, 0.9275, 0.0463, ..., 0.6388, 0.5613, 0.1901]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.9466, 0.9805, 0.4146, ..., 0.4981, 0.9805, 0.0095]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.03411436080932617 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '30778', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.968464136123657} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 99993, 99997, + 100000]), + col_indices=tensor([86302, 87189, 44148, ..., 4090, 44893, 91495]), + values=tensor([0.5947, 0.5243, 0.4325, ..., 0.8552, 0.8488, 0.7980]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2800, 0.7786, 0.6115, ..., 0.5946, 0.9897, 0.5537]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 4.968464136123657 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '65044', '-ss', '100000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.857811450958252} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 99999, + 100000]), + col_indices=tensor([27172, 43192, 23755, ..., 52370, 88374, 3897]), + values=tensor([0.0211, 0.7600, 0.3262, ..., 0.1220, 0.7210, 0.9662]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8254, 0.5550, 0.3634, ..., 0.5298, 0.8710, 0.0274]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.857811450958252 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 99999, + 100000]), + col_indices=tensor([27172, 43192, 23755, ..., 52370, 88374, 3897]), + values=tensor([0.0211, 0.7600, 0.3262, ..., 0.1220, 0.7210, 0.9662]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8254, 0.5550, 0.3634, ..., 0.5298, 0.8710, 0.0274]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.857811450958252 seconds + +[18.23, 17.9, 18.0, 17.99, 18.15, 18.02, 18.22, 19.12, 18.05, 18.53] +[82.66] +14.038782835006714 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65044, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.857811450958252, 'TIME_S_1KI': 0.1669302541503944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.4457891416548, 'W': 82.65999999999998} +[18.23, 17.9, 18.0, 17.99, 18.15, 18.02, 18.22, 19.12, 18.05, 18.53, 18.28, 17.9, 18.17, 17.79, 18.21, 17.83, 17.84, 17.79, 18.22, 18.1] +325.77 +16.2885 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 65044, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.857811450958252, 'TIME_S_1KI': 0.1669302541503944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.4457891416548, 'W': 82.65999999999998, 'J_1KI': 17.840935199890147, 'W_1KI': 1.2708320521493142, 'W_D': 66.37149999999998, 'J_D': 931.7750749336478, 'W_D_1KI': 1.0204092614230365, 'J_D_1KI': 0.015687984463179334} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..4c80d3b --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 45510, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.42077898979187, "TIME_S_1KI": 0.22897778487787013, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1214.5695349788666, "W": 87.39, "J_1KI": 26.687970445591443, "W_1KI": 1.920237310481213, "W_D": 71.04925, "J_D": 987.4614318926334, "W_D_1KI": 1.561178861788618, "J_D_1KI": 0.03430408397689778} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..d883b1c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.039800167083740234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 10, ..., 499993, 499997, + 500000]), + col_indices=tensor([ 4828, 48889, 52503, ..., 31911, 36084, 76746]), + values=tensor([0.4793, 0.0828, 0.1169, ..., 0.5530, 0.3033, 0.0718]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.5059, 0.6298, 0.1664, ..., 0.1879, 0.5431, 0.8952]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.039800167083740234 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '26381', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.086489200592041} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 499989, 499994, + 500000]), + col_indices=tensor([14519, 22983, 80951, ..., 84187, 95762, 97051]), + values=tensor([0.1542, 0.8524, 0.3039, ..., 0.4189, 0.6409, 0.4295]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.0352, 0.2725, 0.4170, ..., 0.1491, 0.4370, 0.8032]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 6.086489200592041 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '45510', '-ss', '100000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.42077898979187} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 19, ..., 499990, 499995, + 500000]), + col_indices=tensor([14786, 31808, 59751, ..., 39791, 89593, 95677]), + values=tensor([0.6756, 0.3891, 0.0863, ..., 0.6881, 0.4209, 0.2818]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.4948, 0.2234, 0.2049, ..., 0.0447, 0.7948, 0.3022]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.42077898979187 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 19, ..., 499990, 499995, + 500000]), + col_indices=tensor([14786, 31808, 59751, ..., 39791, 89593, 95677]), + values=tensor([0.6756, 0.3891, 0.0863, ..., 0.6881, 0.4209, 0.2818]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.4948, 0.2234, 0.2049, ..., 0.0447, 0.7948, 0.3022]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.42077898979187 seconds + +[19.52, 19.27, 18.28, 17.93, 17.92, 18.18, 17.97, 18.17, 18.27, 18.17] +[87.39] +13.898266792297363 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.42077898979187, 'TIME_S_1KI': 0.22897778487787013, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1214.5695349788666, 'W': 87.39} +[19.52, 19.27, 18.28, 17.93, 17.92, 18.18, 17.97, 18.17, 18.27, 18.17, 18.18, 17.95, 17.9, 18.1, 18.19, 18.03, 17.88, 17.96, 18.05, 17.66] +326.815 +16.34075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.42077898979187, 'TIME_S_1KI': 0.22897778487787013, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1214.5695349788666, 'W': 87.39, 'J_1KI': 26.687970445591443, 'W_1KI': 1.920237310481213, 'W_D': 71.04925, 'J_D': 987.4614318926334, 'W_D_1KI': 1.561178861788618, 'J_D_1KI': 0.03430408397689778} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..df907c2 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 251263, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.831458568572998, "TIME_S_1KI": 0.04310805239359952, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1035.0438996100424, "W": 74.32999999999998, "J_1KI": 4.119364568639403, "W_1KI": 0.2958254896264073, "W_D": 58.06174999999998, "J_D": 808.5088139134047, "W_D_1KI": 0.2310795859318721, "J_D_1KI": 0.0009196721599752933} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..bb6d734 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.019912242889404297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9999, 9999, 10000]), + col_indices=tensor([1836, 4826, 4334, ..., 9720, 1658, 3253]), + values=tensor([0.6220, 0.1290, 0.9015, ..., 0.3260, 0.3650, 0.7979]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.5720, 0.7293, 0.6280, ..., 0.0388, 0.6575, 0.1842]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.019912242889404297 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52731', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.203561544418335} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 5, ..., 9999, 9999, 10000]), + col_indices=tensor([1066, 3027, 5018, ..., 516, 4404, 8191]), + values=tensor([0.9408, 0.5840, 0.0232, ..., 0.8231, 0.8506, 0.7636]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.2181, 0.5672, 0.6634, ..., 0.2110, 0.3174, 0.6218]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.203561544418335 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '251263', '-ss', '10000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.831458568572998} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), + col_indices=tensor([6180, 2035, 3071, ..., 490, 6496, 2315]), + values=tensor([0.2073, 0.5439, 0.2551, ..., 0.7953, 0.4550, 0.0057]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4967, 0.8117, 0.4603, ..., 0.8210, 0.9832, 0.0501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.831458568572998 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9999, 10000]), + col_indices=tensor([6180, 2035, 3071, ..., 490, 6496, 2315]), + values=tensor([0.2073, 0.5439, 0.2551, ..., 0.7953, 0.4550, 0.0057]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4967, 0.8117, 0.4603, ..., 0.8210, 0.9832, 0.0501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.831458568572998 seconds + +[18.25, 18.21, 18.06, 17.61, 18.2, 17.92, 18.19, 17.85, 18.22, 17.78] +[74.33] +13.924981832504272 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 251263, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.831458568572998, 'TIME_S_1KI': 0.04310805239359952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1035.0438996100424, 'W': 74.32999999999998} +[18.25, 18.21, 18.06, 17.61, 18.2, 17.92, 18.19, 17.85, 18.22, 17.78, 19.64, 17.79, 18.1, 18.15, 18.23, 17.78, 18.02, 18.14, 18.11, 17.9] +325.365 +16.268250000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 251263, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.831458568572998, 'TIME_S_1KI': 0.04310805239359952, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1035.0438996100424, 'W': 74.32999999999998, 'J_1KI': 4.119364568639403, 'W_1KI': 0.2958254896264073, 'W_D': 58.06174999999998, 'J_D': 808.5088139134047, 'W_D_1KI': 0.2310795859318721, 'J_D_1KI': 0.0009196721599752933} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..610ea48 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 195195, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.530660390853882, "TIME_S_1KI": 0.05394943718258092, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1115.5897302937508, "W": 79.81, "J_1KI": 5.7152577181472415, "W_1KI": 0.40887317810394735, "W_D": 63.323, "J_D": 885.1332977244854, "W_D_1KI": 0.3244089244089244, "J_D_1KI": 0.0016619735362531027} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..d29559f --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02070331573486328} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 12, 18, ..., 99984, 99988, + 100000]), + col_indices=tensor([ 729, 732, 881, ..., 6002, 8211, 9107]), + values=tensor([0.1473, 0.0535, 0.1985, ..., 0.7529, 0.2592, 0.5040]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7970, 0.9066, 0.2901, ..., 0.5249, 0.8444, 0.0204]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.02070331573486328 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50716', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.728132724761963} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 15, 22, ..., 99985, 99992, + 100000]), + col_indices=tensor([ 992, 2241, 2699, ..., 7485, 9702, 9755]), + values=tensor([0.1587, 0.3354, 0.8907, ..., 0.7458, 0.3952, 0.4445]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7531, 0.7793, 0.1410, ..., 0.7186, 0.3031, 0.2892]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 2.728132724761963 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '195195', '-ss', '10000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.530660390853882} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 99979, 99991, + 100000]), + col_indices=tensor([ 74, 913, 1678, ..., 8042, 8094, 8596]), + values=tensor([0.3009, 0.6152, 0.9919, ..., 0.0065, 0.7111, 0.2350]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.1186, 0.1112, 0.0471, ..., 0.5653, 0.6270, 0.7376]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.530660390853882 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 99979, 99991, + 100000]), + col_indices=tensor([ 74, 913, 1678, ..., 8042, 8094, 8596]), + values=tensor([0.3009, 0.6152, 0.9919, ..., 0.0065, 0.7111, 0.2350]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.1186, 0.1112, 0.0471, ..., 0.5653, 0.6270, 0.7376]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.530660390853882 seconds + +[18.85, 21.66, 18.61, 18.04, 18.26, 18.19, 18.11, 17.83, 18.59, 17.86] +[79.81] +13.978069543838501 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.530660390853882, 'TIME_S_1KI': 0.05394943718258092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.5897302937508, 'W': 79.81} +[18.85, 21.66, 18.61, 18.04, 18.26, 18.19, 18.11, 17.83, 18.59, 17.86, 18.27, 17.86, 18.11, 17.94, 18.2, 17.78, 18.07, 17.82, 17.85, 18.66] +329.74 +16.487000000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.530660390853882, 'TIME_S_1KI': 0.05394943718258092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.5897302937508, 'W': 79.81, 'J_1KI': 5.7152577181472415, 'W_1KI': 0.40887317810394735, 'W_D': 63.323, 'J_D': 885.1332977244854, 'W_D_1KI': 0.3244089244089244, 'J_D_1KI': 0.0016619735362531027} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..9a2b891 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 58200, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524534702301025, "TIME_S_1KI": 0.18083392959280112, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1230.7857535743713, "W": 87.63, "J_1KI": 21.147521539078543, "W_1KI": 1.5056701030927835, "W_D": 71.18625, "J_D": 999.8290807986259, "W_D_1KI": 1.223131443298969, "J_D_1KI": 0.02101600418039466} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..b0a27e2 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.032953500747680664} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 87, 187, ..., 999811, + 999895, 1000000]), + col_indices=tensor([ 60, 162, 170, ..., 9440, 9828, 9931]), + values=tensor([0.9691, 0.2545, 0.9233, ..., 0.5616, 0.3084, 0.5234]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0498, 0.1923, 0.6628, ..., 0.9993, 0.6267, 0.7810]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.032953500747680664 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31863', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.748403549194336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 110, 213, ..., 999801, + 999901, 1000000]), + col_indices=tensor([ 77, 119, 129, ..., 9737, 9950, 9990]), + values=tensor([0.4475, 0.9311, 0.1906, ..., 0.1630, 0.9417, 0.6731]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2219, 0.3377, 0.8817, ..., 0.6372, 0.3631, 0.6898]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 5.748403549194336 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '58200', '-ss', '10000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524534702301025} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 116, 213, ..., 999804, + 999907, 1000000]), + col_indices=tensor([ 96, 100, 135, ..., 9713, 9783, 9969]), + values=tensor([0.2374, 0.5111, 0.2281, ..., 0.8006, 0.1634, 0.0785]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4187, 0.6286, 0.6485, ..., 0.1996, 0.5955, 0.8769]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.524534702301025 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 116, 213, ..., 999804, + 999907, 1000000]), + col_indices=tensor([ 96, 100, 135, ..., 9713, 9783, 9969]), + values=tensor([0.2374, 0.5111, 0.2281, ..., 0.8006, 0.1634, 0.0785]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4187, 0.6286, 0.6485, ..., 0.1996, 0.5955, 0.8769]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.524534702301025 seconds + +[18.15, 17.92, 18.14, 18.3, 18.24, 17.98, 17.94, 18.28, 17.96, 17.99] +[87.63] +14.045255661010742 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524534702301025, 'TIME_S_1KI': 0.18083392959280112, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7857535743713, 'W': 87.63} +[18.15, 17.92, 18.14, 18.3, 18.24, 17.98, 17.94, 18.28, 17.96, 17.99, 18.28, 18.07, 17.88, 17.88, 17.9, 18.02, 21.74, 18.02, 18.41, 17.97] +328.875 +16.44375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 58200, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524534702301025, 'TIME_S_1KI': 0.18083392959280112, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7857535743713, 'W': 87.63, 'J_1KI': 21.147521539078543, 'W_1KI': 1.5056701030927835, 'W_D': 71.18625, 'J_D': 999.8290807986259, 'W_D_1KI': 1.223131443298969, 'J_D_1KI': 0.02101600418039466} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..6f3f4c5 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8756, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44620156288147, "TIME_S_1KI": 1.193033527053617, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1333.577045211792, "W": 83.47, "J_1KI": 152.30436788622566, "W_1KI": 9.532891731384193, "W_D": 66.97425, "J_D": 1070.0290214481354, "W_D_1KI": 7.648955002284148, "J_D_1KI": 0.8735672684198433} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..e9fd20d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.14447855949401855} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 507, 1042, ..., 4998981, + 4999514, 5000000]), + col_indices=tensor([ 8, 38, 72, ..., 9951, 9971, 9980]), + values=tensor([0.6058, 0.5976, 0.8000, ..., 0.1658, 0.6430, 0.8003]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9071, 0.4156, 0.9536, ..., 0.8291, 0.1377, 0.0392]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 0.14447855949401855 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7267', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 8.714413404464722} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 473, 945, ..., 4999048, + 4999511, 5000000]), + col_indices=tensor([ 4, 25, 47, ..., 9937, 9967, 9993]), + values=tensor([0.2180, 0.8351, 0.6646, ..., 0.1409, 0.2302, 0.7325]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9797, 0.6380, 0.6196, ..., 0.0914, 0.5364, 0.9534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 8.714413404464722 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8756', '-ss', '10000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44620156288147} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 484, 1014, ..., 4998952, + 4999472, 5000000]), + col_indices=tensor([ 28, 62, 89, ..., 9928, 9935, 9940]), + values=tensor([0.3908, 0.2484, 0.4500, ..., 0.3668, 0.3711, 0.8718]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.8601, 0.7620, 0.0732, ..., 0.0545, 0.3750, 0.1934]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.44620156288147 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 484, 1014, ..., 4998952, + 4999472, 5000000]), + col_indices=tensor([ 28, 62, 89, ..., 9928, 9935, 9940]), + values=tensor([0.3908, 0.2484, 0.4500, ..., 0.3668, 0.3711, 0.8718]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.8601, 0.7620, 0.0732, ..., 0.0545, 0.3750, 0.1934]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.44620156288147 seconds + +[18.13, 18.04, 18.48, 17.86, 18.0, 18.3, 18.04, 17.84, 17.94, 18.14] +[83.47] +15.976722717285156 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8756, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44620156288147, 'TIME_S_1KI': 1.193033527053617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.577045211792, 'W': 83.47} +[18.13, 18.04, 18.48, 17.86, 18.0, 18.3, 18.04, 17.84, 17.94, 18.14, 18.29, 18.21, 17.99, 18.15, 21.9, 18.49, 18.31, 17.94, 18.08, 18.13] +329.91499999999996 +16.495749999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8756, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44620156288147, 'TIME_S_1KI': 1.193033527053617, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.577045211792, 'W': 83.47, 'J_1KI': 152.30436788622566, 'W_1KI': 9.532891731384193, 'W_D': 66.97425, 'J_D': 1070.0290214481354, 'W_D_1KI': 7.648955002284148, 'J_D_1KI': 0.8735672684198433} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..646ac12 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2958, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.525683641433716, "TIME_S_1KI": 3.558378512993143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.5020074129104, "W": 79.35, "J_1KI": 507.94523577177495, "W_1KI": 26.825557809330626, "W_D": 62.803999999999995, "J_D": 1189.2014628047941, "W_D_1KI": 21.231913455037187, "J_D_1KI": 7.177793595347258} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..cd10b80 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.38048672676086426} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 2001, ..., 9997917, + 9998993, 10000000]), + col_indices=tensor([ 10, 12, 31, ..., 9968, 9976, 9993]), + values=tensor([0.1521, 0.7718, 0.5784, ..., 0.3138, 0.0420, 0.0283]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8445, 0.5776, 0.6277, ..., 0.5230, 0.9454, 0.0151]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 0.38048672676086426 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2759', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.790999174118042} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 961, 1957, ..., 9997943, + 9998964, 10000000]), + col_indices=tensor([ 0, 2, 15, ..., 9987, 9990, 9997]), + values=tensor([0.8878, 0.8149, 0.0468, ..., 0.0944, 0.2051, 0.2941]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8720, 0.1434, 0.3774, ..., 0.9472, 0.6076, 0.2537]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 9.790999174118042 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2958', '-ss', '10000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.525683641433716} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1002, 2060, ..., 9998101, + 9999084, 10000000]), + col_indices=tensor([ 6, 12, 22, ..., 9993, 9996, 9999]), + values=tensor([0.4647, 0.6377, 0.7581, ..., 0.1422, 0.4549, 0.7257]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2726, 0.9874, 0.6365, ..., 0.6635, 0.0461, 0.2273]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.525683641433716 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1002, 2060, ..., 9998101, + 9999084, 10000000]), + col_indices=tensor([ 6, 12, 22, ..., 9993, 9996, 9999]), + values=tensor([0.4647, 0.6377, 0.7581, ..., 0.1422, 0.4549, 0.7257]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2726, 0.9874, 0.6365, ..., 0.6635, 0.0461, 0.2273]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.525683641433716 seconds + +[18.46, 22.29, 18.08, 17.85, 18.08, 18.14, 17.95, 17.97, 18.17, 18.1] +[79.35] +18.935122966766357 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2958, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.525683641433716, 'TIME_S_1KI': 3.558378512993143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.5020074129104, 'W': 79.35} +[18.46, 22.29, 18.08, 17.85, 18.08, 18.14, 17.95, 17.97, 18.17, 18.1, 18.4, 17.86, 18.25, 18.05, 17.97, 17.82, 20.01, 18.07, 17.96, 17.84] +330.91999999999996 +16.546 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2958, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.525683641433716, 'TIME_S_1KI': 3.558378512993143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.5020074129104, 'W': 79.35, 'J_1KI': 507.94523577177495, 'W_1KI': 26.825557809330626, 'W_D': 62.803999999999995, 'J_D': 1189.2014628047941, 'W_D_1KI': 21.231913455037187, 'J_D_1KI': 7.177793595347258} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..600b3de --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1431, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.125213623046875, "TIME_S_1KI": 7.07562098046602, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1986.3526240086555, "W": 63.01, "J_1KI": 1388.087088755175, "W_1KI": 44.032145352900066, "W_D": 46.759, "J_D": 1474.0495531823635, "W_D_1KI": 32.67575122292103, "J_D_1KI": 22.83420770294971} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..b3d4b08 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.7335808277130127} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1938, 3929, ..., 19996007, + 19998018, 20000000]), + col_indices=tensor([ 10, 23, 25, ..., 9992, 9994, 9995]), + values=tensor([0.0730, 0.5628, 0.7699, ..., 0.2806, 0.9097, 0.3889]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.2309, 0.6404, 0.8370, ..., 0.6670, 0.0943, 0.6898]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 0.7335808277130127 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1431', '-ss', '10000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.125213623046875} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2012, 4019, ..., 19995967, + 19998000, 20000000]), + col_indices=tensor([ 4, 7, 8, ..., 9979, 9980, 9988]), + values=tensor([0.3444, 0.9651, 0.7506, ..., 0.6074, 0.5252, 0.1862]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7493, 0.9404, 0.6976, ..., 0.3307, 0.3774, 0.7329]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.125213623046875 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2012, 4019, ..., 19995967, + 19998000, 20000000]), + col_indices=tensor([ 4, 7, 8, ..., 9979, 9980, 9988]), + values=tensor([0.3444, 0.9651, 0.7506, ..., 0.6074, 0.5252, 0.1862]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7493, 0.9404, 0.6976, ..., 0.3307, 0.3774, 0.7329]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.125213623046875 seconds + +[18.03, 17.87, 18.44, 17.79, 18.02, 17.71, 17.95, 17.85, 17.83, 17.74] +[63.01] +31.524402856826782 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1431, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.125213623046875, 'TIME_S_1KI': 7.07562098046602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.3526240086555, 'W': 63.01} +[18.03, 17.87, 18.44, 17.79, 18.02, 17.71, 17.95, 17.85, 17.83, 17.74, 18.68, 17.68, 18.59, 19.66, 17.91, 17.67, 18.09, 18.04, 17.79, 17.81] +325.02 +16.250999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1431, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.125213623046875, 'TIME_S_1KI': 7.07562098046602, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1986.3526240086555, 'W': 63.01, 'J_1KI': 1388.087088755175, 'W_1KI': 44.032145352900066, 'W_D': 46.759, 'J_D': 1474.0495531823635, 'W_D_1KI': 32.67575122292103, 'J_D_1KI': 22.83420770294971} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..913ab31 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 887, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.198672533035278, "TIME_S_1KI": 11.497939721573031, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3521.5318365383146, "W": 51.94, "J_1KI": 3970.1599059056534, "W_1KI": 58.55693348365276, "W_D": 35.649249999999995, "J_D": 2417.019037807345, "W_D_1KI": 40.19081172491544, "J_D_1KI": 45.31094895706363} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..697edf6 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,105 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.9959039688110352} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2964, 5964, ..., 29994054, + 29997049, 30000000]), + col_indices=tensor([ 0, 6, 7, ..., 9989, 9993, 9996]), + values=tensor([0.3352, 0.3012, 0.1376, ..., 0.4634, 0.9038, 0.2157]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3407, 0.2089, 0.1462, ..., 0.7488, 0.0030, 0.5159]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 1.9959039688110352 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '526', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 7.3105573654174805} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2980, 5935, ..., 29993957, + 29996987, 30000000]), + col_indices=tensor([ 2, 4, 5, ..., 9985, 9987, 9990]), + values=tensor([0.7275, 0.2529, 0.2202, ..., 0.8048, 0.1786, 0.5578]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3722, 0.8340, 0.1775, ..., 0.2787, 0.3419, 0.3614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 7.3105573654174805 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '755', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 8.937042713165283} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3082, 6089, ..., 29994069, + 29996991, 30000000]), + col_indices=tensor([ 3, 5, 7, ..., 9989, 9990, 9999]), + values=tensor([0.1400, 0.4959, 0.3565, ..., 0.5786, 0.8662, 0.8079]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3795, 0.0549, 0.6386, ..., 0.9156, 0.7490, 0.5099]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 8.937042713165283 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '887', '-ss', '10000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.198672533035278} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3035, 5994, ..., 29994113, + 29997092, 30000000]), + col_indices=tensor([ 5, 10, 14, ..., 9987, 9996, 9999]), + values=tensor([0.8098, 0.4554, 0.6671, ..., 0.4349, 0.8044, 0.2223]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3423, 0.6279, 0.0055, ..., 0.7400, 0.5417, 0.1422]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.198672533035278 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3035, 5994, ..., 29994113, + 29997092, 30000000]), + col_indices=tensor([ 5, 10, 14, ..., 9987, 9996, 9999]), + values=tensor([0.8098, 0.4554, 0.6671, ..., 0.4349, 0.8044, 0.2223]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3423, 0.6279, 0.0055, ..., 0.7400, 0.5417, 0.1422]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.198672533035278 seconds + +[18.62, 17.81, 18.39, 18.03, 17.98, 18.26, 18.01, 18.0, 17.9, 17.68] +[51.94] +67.79999685287476 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.198672533035278, 'TIME_S_1KI': 11.497939721573031, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3521.5318365383146, 'W': 51.94} +[18.62, 17.81, 18.39, 18.03, 17.98, 18.26, 18.01, 18.0, 17.9, 17.68, 18.52, 17.91, 18.3, 18.31, 18.44, 18.04, 17.96, 18.17, 17.99, 17.81] +325.815 +16.29075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 887, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.198672533035278, 'TIME_S_1KI': 11.497939721573031, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3521.5318365383146, 'W': 51.94, 'J_1KI': 3970.1599059056534, 'W_1KI': 58.55693348365276, 'W_D': 35.649249999999995, 'J_D': 2417.019037807345, 'W_D_1KI': 40.19081172491544, 'J_D_1KI': 45.31094895706363} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..4a5c029 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 285101, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.263301372528076, "TIME_S_1KI": 0.03599882628446788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1023.6102543663977, "W": 73.21, "J_1KI": 3.5903425605886956, "W_1KI": 0.2567861915601839, "W_D": 56.916999999999994, "J_D": 795.8041913368701, "W_D_1KI": 0.1996380230164047, "J_D_1KI": 0.0007002361374264022} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..aea26a1 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1307 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019815444946289062} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([ 147, 628, 1125, 5287, 8823, 6934, 2121, 9045, 1741, + 1008, 777, 1781, 8765, 5338, 5590, 4011, 9135, 3712, + 8048, 6348, 8740, 9035, 822, 3133, 9984, 8122, 9554, + 3712, 3272, 5515, 8959, 6829, 83, 3899, 7199, 7801, + 5285, 8079, 5441, 7100, 7667, 5056, 6862, 2526, 7349, + 1728, 3499, 3354, 7526, 5044, 3630, 6886, 4310, 5869, + 2649, 6497, 3797, 8787, 1590, 3717, 600, 9128, 9514, + 219, 9480, 7496, 7514, 5942, 3564, 2833, 1156, 3271, + 9892, 8323, 4817, 8727, 4029, 1225, 635, 3756, 3854, + 1580, 9309, 2348, 2287, 6728, 2416, 8735, 1996, 9982, + 2260, 9743, 6241, 6259, 3363, 2128, 9721, 8593, 9287, + 6303, 6167, 7208, 4450, 1275, 2370, 2881, 3104, 7161, + 9133, 226, 5026, 8446, 4133, 9547, 9259, 5653, 4323, + 9414, 5933, 1536, 7990, 6794, 7612, 5173, 6568, 8498, + 2655, 2146, 8320, 4457, 6510, 276, 9447, 6969, 2617, + 7175, 1400, 1929, 524, 929, 472, 5246, 4656, 4620, + 489, 4946, 342, 261, 5266, 9154, 4961, 841, 6798, + 6887, 9276, 203, 2231, 9861, 5260, 5410, 4989, 3777, + 5518, 2156, 807, 5251, 7532, 8301, 8516, 2536, 7883, + 4202, 2101, 5422, 6588, 505, 5736, 1011, 9457, 514, + 9520, 284, 2054, 2592, 5274, 6220, 4951, 8010, 3966, + 4007, 1997, 1966, 7251, 3207, 5838, 9132, 4982, 4877, + 8655, 8284, 876, 5569, 757, 2680, 1245, 1459, 64, + 365, 4927, 7024, 7351, 643, 3222, 4840, 6222, 7194, + 1143, 9570, 6110, 514, 6176, 3515, 236, 14, 9751, + 3159, 5832, 1147, 6202, 9999, 7112, 2023, 1940, 2795, + 6251, 8906, 5208, 3243, 7604, 6854, 2354, 2890, 2740, + 454, 4997, 9488, 8203, 5802, 49, 4850, 8249, 7823, + 1150, 8721, 9200, 1431, 3413, 5962, 300, 7898, 4190, + 1359, 832, 9165, 8870, 2860, 5248, 4896, 4884, 9315, + 6156, 4153, 5251, 9843, 5642, 7880, 3504, 3324, 7744, + 7101, 8647, 7753, 6224, 4352, 1703, 8312, 455, 3521, + 536, 6524, 6799, 7876, 7371, 8323, 7238, 5827, 4495, + 6175, 5872, 9038, 7184, 3438, 9862, 9699, 746, 3390, + 8840, 1555, 6503, 5323, 281, 1915, 41, 9952, 630, + 2524, 81, 7205, 5517, 7630, 9980, 8564, 2454, 5345, + 6893, 2025, 7310, 6297, 7720, 9244, 9072, 6121, 526, + 8438, 6441, 784, 8517, 5423, 2370, 1040, 9954, 5590, + 9371, 3627, 9962, 8967, 738, 9590, 2685, 5587, 8151, + 98, 9310, 2936, 529, 1987, 5122, 8678, 5271, 6797, + 8491, 7804, 9346, 6958, 1197, 3159, 3495, 4161, 7372, + 2305, 7596, 9495, 2623, 4621, 1213, 574, 4322, 5020, + 8442, 5191, 8488, 8804, 3945, 8995, 9875, 8614, 9285, + 5845, 4077, 4304, 1222, 7152, 3293, 357, 9313, 2693, + 5120, 942, 8204, 3155, 4829, 2464, 4701, 3688, 607, + 5172, 151, 3028, 4014, 1336, 976, 4925, 5627, 1156, + 9625, 5767, 8183, 5688, 8528, 9294, 4747, 8880, 7735, + 5253, 9274, 6989, 3528, 8002, 6326, 509, 3423, 8496, + 9738, 8213, 8077, 7550, 7738, 8069, 93, 3909, 5783, + 9543, 120, 9437, 6763, 7953, 9131, 7685, 7049, 6280, + 5195, 6701, 8174, 8541, 1693, 1873, 1258, 8665, 484, + 4711, 8704, 5880, 6139, 5452, 3962, 584, 5287, 6792, + 2514, 4003, 9542, 4836, 1519, 4148, 4206, 2568, 4631, + 2566, 3432, 2366, 2787, 6206, 7242, 5947, 1135, 3254, + 2113, 6329, 5453, 9815, 5777, 9459, 1217, 265, 3194, + 4738, 1004, 6119, 7621, 718, 5091, 7111, 8995, 5329, + 9627, 9467, 2122, 4961, 1162, 6369, 2607, 1279, 7314, + 4043, 9227, 6168, 4214, 1156, 6289, 4153, 2703, 9977, + 4203, 4485, 2363, 2942, 2937, 7682, 1361, 3609, 9757, + 8109, 4417, 1482, 5111, 1389, 2296, 737, 5089, 1163, + 4190, 4070, 610, 4301, 95, 453, 2406, 4010, 8161, + 762, 2233, 5838, 5351, 8383, 1707, 4709, 6703, 5945, + 4949, 1124, 3676, 9285, 7158, 3499, 7227, 1568, 2973, + 1428, 2316, 6991, 9226, 8007, 735, 3310, 6820, 9505, + 7011, 4908, 3243, 2472, 1968, 5864, 7326, 9622, 2967, + 5217, 8990, 3943, 387, 5835, 3583, 1763, 7032, 3775, + 4192, 6405, 7533, 1351, 805, 4502, 8291, 5499, 4398, + 7000, 8794, 1522, 2359, 7891, 6226, 7994, 1689, 6323, + 3184, 7685, 4860, 6403, 9802, 6255, 1821, 7732, 4494, + 8136, 7965, 2804, 7520, 2384, 6618, 7928, 6910, 9527, + 5014, 9950, 9409, 3928, 7865, 282, 2164, 9709, 3423, + 4150, 8746, 2581, 6098, 4292, 1480, 5919, 9694, 5887, + 6173, 9603, 4877, 6138, 1293, 3703, 2957, 2971, 131, + 2304, 7972, 1723, 1558, 5610, 5607, 4064, 6099, 1287, + 6409, 1303, 2598, 7669, 21, 1272, 118, 8324, 1938, + 2731, 4676, 7652, 6310, 6662, 2345, 232, 4946, 4044, + 5562, 4317, 383, 7955, 1674, 2856, 3908, 692, 9659, + 2014, 9423, 4192, 7578, 2859, 1638, 9429, 4145, 8698, + 9380, 7375, 7178, 5811, 1590, 199, 2988, 8205, 9471, + 7617, 1811, 5995, 2386, 2786, 4693, 6295, 485, 8968, + 3066, 5805, 3154, 3848, 1759, 3221, 5904, 1069, 3533, + 309, 7347, 7846, 6274, 7652, 6373, 3481, 6353, 9650, + 1516, 8787, 7982, 5148, 4709, 8309, 4232, 9115, 1418, + 5274, 395, 5709, 3174, 68, 3804, 7295, 9536, 3414, + 9104, 924, 3572, 6817, 8563, 561, 9344, 3220, 2063, + 7444, 7018, 707, 7026, 2578, 179, 5410, 8966, 9805, + 9112, 5656, 3110, 3676, 1235, 9544, 1495, 5008, 7289, + 727, 5468, 5698, 8696, 3288, 2439, 2448, 7548, 1409, + 3012, 7997, 4416, 7301, 2138, 3939, 8274, 9381, 88, + 583, 8683, 9292, 6899, 6134, 1702, 4060, 3625, 8216, + 7762, 6579, 8766, 6500, 9863, 474, 5904, 1723, 5347, + 8855, 8569, 5341, 7635, 8364, 1259, 7057, 2725, 6056, + 7618, 8691, 7207, 2875, 7923, 4624, 1246, 7177, 9717, + 3416, 2221, 2213, 5331, 7348, 4898, 7801, 7311, 6564, + 8018, 3122, 3971, 2426, 7210, 2126, 5178, 5966, 4092, + 1656, 7574, 6717, 1713, 5351, 7205, 1072, 1567, 4916, + 3143, 3039, 494, 5212, 2734, 8298, 5666, 1616, 991, + 5392, 219, 4095, 7599, 4336, 3201, 4103, 4232, 7641, + 6901, 3161, 911, 2271, 3833, 2362, 450, 702, 4414, + 1985, 9874, 7045, 303, 2861, 6503, 8031, 6998, 981, + 6383, 3804, 2132, 7446, 3522, 8554, 7230, 7247, 6842, + 8740, 6185, 4789, 9401, 7169, 4828, 5223, 2972, 315, + 9277, 5159, 6582, 3209, 3738, 2614, 6186, 7932, 8653, + 8758, 7882, 7681, 6252, 7053, 4207, 5682, 8498, 8562, + 9350, 9645, 4139, 5790, 9077, 5393, 6059, 4141, 5770, + 4443, 1642, 3655, 1526, 4326, 4199, 2823, 9283, 4707, + 4922, 7330, 7654, 5307, 5122, 5645, 5858, 9038, 8521, + 4752, 9830, 517, 6062, 7448, 3954, 8600, 4313, 3222, + 3628, 5629, 9654, 6445, 4277, 9969, 3299, 6212, 4242, + 5088, 3231, 2968, 3697, 4789, 7868, 6625, 5588, 531, + 6783, 3545, 9416, 3488, 5893, 600, 2516, 5328, 2390, + 6963]), + values=tensor([5.2560e-01, 4.5182e-01, 2.3298e-01, 1.2745e-01, + 6.7432e-01, 1.6305e-01, 6.7850e-01, 4.8297e-02, + 4.2986e-02, 4.6503e-01, 5.1950e-01, 9.2514e-01, + 3.0211e-02, 5.8682e-01, 1.6128e-01, 9.9806e-01, + 6.5284e-01, 4.7123e-01, 9.0892e-01, 7.2129e-01, + 1.4353e-01, 7.5709e-01, 6.9051e-01, 2.2834e-01, + 3.7105e-01, 5.2802e-01, 6.8136e-01, 5.9089e-01, + 3.2433e-01, 6.7476e-01, 6.9094e-01, 4.2556e-01, + 1.5749e-01, 7.8965e-01, 5.0067e-01, 9.5478e-01, + 7.8984e-01, 3.4707e-01, 4.1861e-02, 9.5964e-01, + 7.4705e-02, 6.3404e-01, 6.7261e-01, 1.7905e-01, + 8.5368e-01, 1.8088e-01, 7.0364e-01, 1.3954e-01, + 6.4433e-01, 2.6339e-01, 5.1852e-01, 1.7690e-01, + 3.6218e-01, 8.9644e-01, 9.8381e-01, 4.4953e-01, + 5.3938e-02, 8.2339e-01, 4.5796e-01, 9.4213e-02, + 3.3168e-02, 3.7108e-01, 8.3725e-01, 1.4729e-01, + 5.8007e-01, 6.0673e-01, 9.9643e-01, 7.2226e-01, + 7.1339e-01, 8.4565e-01, 4.7404e-01, 6.2064e-01, + 5.8324e-02, 2.8401e-01, 5.6260e-01, 8.1231e-01, + 5.0034e-01, 4.1114e-01, 4.3338e-01, 9.7312e-01, + 6.0643e-01, 4.1709e-01, 6.0714e-02, 7.0392e-01, + 3.0608e-01, 9.7941e-01, 3.7985e-01, 5.9718e-01, + 7.0541e-01, 6.7003e-01, 8.0980e-01, 1.1730e-02, + 5.2983e-01, 1.1390e-01, 9.7596e-01, 4.4723e-01, + 1.4768e-01, 6.1947e-01, 9.3278e-01, 6.5142e-02, + 3.7915e-01, 8.5646e-01, 4.6630e-01, 1.1336e-01, + 5.5405e-01, 6.7251e-01, 2.2618e-01, 5.7297e-02, + 8.3920e-01, 6.4694e-01, 2.9986e-01, 7.2391e-01, + 3.8923e-01, 8.9915e-02, 1.7642e-01, 1.2665e-01, + 1.1954e-01, 1.2088e-01, 1.4362e-01, 9.5926e-01, + 9.6244e-01, 8.2548e-01, 6.8499e-01, 7.3075e-01, + 5.3887e-01, 9.8974e-01, 3.7081e-01, 1.6977e-01, + 7.0021e-02, 8.2089e-01, 2.9793e-01, 4.7021e-02, + 7.1563e-01, 1.4888e-01, 3.8166e-01, 8.0958e-01, + 5.9825e-01, 4.8717e-02, 9.7864e-02, 4.0284e-01, + 9.2565e-01, 5.9953e-01, 5.3686e-01, 3.4093e-01, + 2.5458e-01, 6.5661e-01, 1.5050e-01, 1.0070e-01, + 9.2754e-01, 2.6429e-02, 8.6963e-01, 3.9150e-01, + 3.0459e-01, 9.0345e-01, 8.7288e-01, 1.4733e-01, + 5.5407e-01, 4.2790e-01, 3.4163e-01, 8.7761e-01, + 8.6974e-01, 9.4480e-01, 8.5739e-01, 3.5148e-01, + 7.1780e-01, 5.6444e-01, 3.1054e-01, 2.7980e-01, + 7.6360e-01, 2.0659e-01, 8.7043e-01, 8.6826e-01, + 6.1911e-01, 2.4894e-01, 1.7003e-01, 9.8128e-01, + 1.0649e-01, 4.0739e-01, 1.6335e-01, 5.4390e-01, + 5.5668e-01, 7.5228e-03, 5.5173e-01, 4.2472e-01, + 6.3659e-01, 1.2333e-02, 1.6682e-01, 7.0490e-01, + 1.7784e-01, 2.4643e-01, 9.9365e-01, 5.7578e-01, + 1.4710e-01, 7.3059e-01, 1.0914e-01, 1.5979e-02, + 8.4679e-01, 8.8970e-02, 4.2280e-01, 3.2340e-01, + 9.5658e-01, 1.8028e-01, 7.6493e-02, 9.1788e-01, + 3.5415e-01, 8.1832e-01, 9.8353e-01, 6.2593e-01, + 5.0861e-01, 6.5047e-01, 9.9291e-01, 2.6001e-01, + 5.3761e-01, 3.3600e-02, 1.8506e-01, 2.9837e-01, + 4.7576e-01, 3.3503e-01, 1.7338e-02, 1.0681e-02, + 4.8168e-01, 3.3971e-01, 3.0402e-01, 3.5616e-01, + 6.9091e-02, 4.0202e-01, 7.9475e-01, 9.6864e-01, + 5.8040e-01, 5.7299e-01, 8.5674e-01, 6.6006e-01, + 2.7936e-01, 1.3390e-01, 1.8119e-01, 7.3696e-01, + 2.1656e-02, 5.0719e-01, 7.9270e-01, 7.9608e-01, + 4.6938e-02, 6.4955e-01, 1.1024e-01, 3.3072e-01, + 7.0495e-01, 7.9866e-01, 2.9836e-01, 5.3080e-02, + 4.5780e-01, 3.2462e-01, 2.3343e-01, 3.4916e-01, + 4.9296e-01, 2.0091e-01, 7.6547e-01, 1.3016e-01, + 6.0404e-02, 8.2176e-01, 8.6472e-01, 1.0995e-01, + 3.7648e-01, 7.3952e-01, 9.6968e-01, 6.3923e-01, + 7.2826e-01, 7.1776e-01, 6.8745e-01, 2.7331e-01, + 1.2680e-01, 3.7805e-02, 4.0262e-01, 8.2898e-01, + 3.9871e-01, 5.1570e-01, 9.1900e-01, 4.6037e-01, + 6.9803e-01, 3.7678e-01, 6.0374e-01, 8.0866e-01, + 1.2782e-01, 1.3656e-01, 6.7064e-01, 9.7129e-01, + 9.1569e-01, 8.7380e-01, 6.8149e-01, 7.9829e-01, + 9.4074e-01, 4.9967e-01, 7.6367e-01, 4.1375e-01, + 3.3762e-01, 8.4138e-01, 5.7388e-01, 1.0199e-02, + 6.1160e-01, 2.7149e-01, 7.9430e-01, 6.0606e-01, + 3.8213e-01, 6.9915e-01, 7.9717e-01, 6.4264e-01, + 1.2570e-01, 4.2091e-01, 7.7556e-03, 4.2787e-01, + 2.4645e-01, 7.0922e-01, 9.7475e-01, 1.7749e-01, + 7.7920e-01, 8.8611e-01, 7.3655e-01, 2.6627e-02, + 4.3960e-01, 7.0921e-01, 1.2495e-01, 7.9556e-01, + 2.4612e-01, 2.1105e-01, 1.3608e-01, 2.6228e-01, + 5.4161e-01, 9.6676e-01, 7.1936e-01, 5.0925e-01, + 3.3548e-01, 4.0907e-01, 2.5859e-01, 7.8072e-01, + 5.3413e-01, 6.4004e-01, 4.0338e-01, 4.6080e-01, + 2.2626e-01, 6.6418e-02, 4.6412e-01, 3.7270e-01, + 4.9191e-01, 1.9996e-01, 7.9189e-01, 1.1419e-01, + 3.0279e-01, 5.9447e-01, 1.0009e-01, 3.9196e-01, + 8.7322e-02, 3.6585e-01, 5.0558e-01, 5.9756e-01, + 5.1223e-01, 7.7059e-01, 5.6411e-01, 8.6785e-01, + 4.9763e-01, 2.7060e-01, 4.6230e-01, 2.8485e-01, + 5.6333e-01, 6.4385e-01, 2.4189e-01, 4.3952e-01, + 9.1360e-01, 6.8688e-01, 2.3180e-01, 3.6477e-01, + 1.8153e-01, 7.8891e-01, 9.7111e-01, 5.2860e-01, + 7.2300e-01, 8.4822e-02, 6.4799e-01, 7.4738e-01, + 9.3333e-01, 3.0742e-01, 2.0477e-01, 7.5682e-01, + 3.8206e-01, 7.5470e-01, 1.5177e-01, 4.5879e-01, + 2.0623e-01, 2.9029e-01, 6.1429e-01, 8.4124e-01, + 3.1046e-01, 6.1053e-01, 5.9478e-01, 7.4185e-01, + 8.5712e-01, 1.5446e-01, 6.2325e-01, 1.5679e-01, + 8.9475e-02, 5.3521e-01, 5.0307e-02, 4.9942e-01, + 1.6993e-01, 9.1214e-01, 1.0981e-01, 5.4953e-01, + 1.3202e-04, 2.0668e-02, 2.6934e-02, 2.8142e-02, + 5.9963e-01, 6.2907e-01, 1.9020e-01, 8.4416e-01, + 4.7828e-01, 6.2037e-02, 8.4804e-01, 2.8713e-01, + 8.7914e-01, 5.2289e-02, 8.8096e-01, 7.7464e-02, + 1.8152e-01, 4.5361e-01, 9.2307e-02, 7.7685e-01, + 4.8882e-01, 3.8849e-02, 2.4650e-01, 9.4929e-01, + 4.8155e-01, 2.3940e-01, 6.0404e-01, 2.8524e-01, + 3.1995e-01, 1.7781e-01, 3.6728e-01, 9.4366e-01, + 6.2074e-01, 6.7714e-01, 9.4887e-01, 9.6136e-01, + 6.5217e-01, 4.4854e-01, 3.6396e-01, 2.0775e-01, + 9.0861e-01, 1.7247e-01, 2.0122e-01, 3.1052e-01, + 9.7944e-01, 4.6799e-01, 5.9893e-01, 1.8430e-01, + 6.1849e-01, 6.7723e-01, 8.1238e-01, 8.5110e-01, + 7.7606e-01, 4.8711e-01, 3.1304e-01, 7.0698e-01, + 6.6410e-01, 1.2702e-01, 6.6704e-01, 9.5507e-01, + 8.2579e-01, 9.9875e-01, 2.2652e-01, 7.2346e-01, + 5.4255e-01, 7.3418e-01, 5.4150e-02, 7.6473e-01, + 8.4496e-01, 2.7827e-02, 3.1883e-01, 6.2547e-01, + 3.3673e-01, 7.0332e-01, 7.4524e-01, 1.7202e-01, + 4.7220e-01, 2.5484e-01, 6.3754e-01, 2.6972e-01, + 5.7555e-01, 2.5729e-02, 8.9240e-01, 9.3243e-02, + 2.0048e-01, 3.9184e-01, 7.9079e-01, 7.6417e-01, + 3.6297e-01, 1.0701e-01, 7.3829e-01, 5.7379e-01, + 7.8135e-01, 5.9905e-01, 1.7596e-02, 2.0084e-01, + 2.6002e-01, 5.7793e-01, 1.1292e-01, 5.5716e-01, + 1.2785e-01, 3.4844e-01, 2.6200e-01, 9.5494e-01, + 8.4409e-01, 8.8543e-01, 8.3785e-01, 2.5559e-01, + 3.7511e-01, 8.7394e-02, 3.0524e-01, 1.7522e-01, + 7.9539e-01, 3.3646e-02, 5.2387e-01, 3.7442e-01, + 1.3978e-02, 1.8117e-01, 5.3870e-02, 9.4073e-01, + 9.3325e-01, 1.3931e-01, 9.5806e-03, 2.1513e-01, + 7.5372e-01, 8.8366e-01, 7.3370e-02, 3.8591e-01, + 4.2813e-01, 6.9476e-01, 8.6124e-01, 4.2190e-01, + 5.7470e-01, 3.1136e-01, 8.5208e-01, 7.0394e-01, + 1.8636e-01, 5.9006e-01, 5.1801e-01, 5.2659e-01, + 5.0269e-01, 7.7233e-01, 7.8972e-01, 8.3201e-01, + 4.3437e-01, 6.4845e-01, 5.5414e-01, 4.4734e-01, + 5.1213e-01, 1.6050e-01, 7.1213e-01, 1.6252e-01, + 8.9777e-01, 6.6440e-01, 4.4346e-02, 7.4956e-01, + 5.2652e-01, 8.8268e-02, 2.4019e-01, 1.9801e-01, + 8.9331e-01, 7.6786e-01, 1.5987e-01, 2.2736e-01, + 7.3131e-01, 6.1032e-01, 3.1343e-01, 4.4597e-02, + 5.9683e-01, 1.9413e-01, 1.9830e-01, 1.4040e-01, + 1.3606e-01, 6.0248e-02, 7.0841e-02, 8.1665e-01, + 1.6396e-01, 1.9495e-01, 7.9996e-01, 3.2143e-01, + 8.0826e-01, 7.2033e-01, 6.2158e-01, 2.8202e-01, + 8.5930e-01, 8.9293e-01, 4.3542e-01, 5.5467e-01, + 6.4518e-01, 6.6044e-01, 2.4172e-01, 6.8673e-01, + 2.6429e-01, 6.5201e-01, 7.9924e-01, 9.2253e-01, + 9.3220e-01, 3.3890e-01, 4.3856e-01, 4.7886e-01, + 2.4920e-01, 1.1320e-01, 8.1552e-01, 4.8103e-01, + 9.9413e-01, 1.2151e-01, 7.3907e-01, 5.9053e-01, + 3.2489e-01, 7.0052e-01, 1.4705e-01, 2.5163e-01, + 6.2341e-01, 2.4832e-01, 1.0102e-01, 6.1087e-01, + 4.9169e-01, 6.3088e-01, 6.5857e-02, 1.2103e-01, + 5.3802e-01, 4.3476e-01, 8.5162e-02, 9.5909e-02, + 2.7539e-01, 6.3053e-01, 8.2462e-01, 6.8849e-01, + 6.8116e-01, 4.4265e-01, 2.5334e-01, 5.7535e-01, + 7.3906e-01, 2.6868e-01, 8.2246e-01, 7.5326e-01, + 2.3140e-01, 6.7734e-01, 1.5208e-01, 4.3675e-01, + 8.5422e-01, 1.3327e-01, 2.9050e-01, 4.6933e-02, + 2.8711e-01, 6.4456e-01, 6.6952e-01, 9.7136e-01, + 8.7064e-01, 4.4199e-01, 9.0821e-01, 2.2048e-01, + 6.2945e-02, 6.9894e-01, 4.6657e-01, 4.9677e-01, + 5.3803e-01, 1.6106e-01, 1.9190e-01, 9.6721e-01, + 8.6755e-01, 6.7380e-01, 6.3082e-01, 6.2585e-01, + 9.8280e-01, 3.2073e-01, 7.9868e-01, 5.3679e-01, + 2.3558e-01, 7.8221e-01, 9.6604e-01, 1.5512e-01, + 2.7020e-01, 5.4598e-01, 7.2565e-01, 9.0695e-01, + 7.1888e-01, 5.5173e-01, 2.1659e-01, 7.2627e-01, + 1.6656e-02, 2.4187e-01, 2.0943e-01, 7.3096e-01, + 5.3434e-01, 5.8941e-01, 7.8362e-01, 3.0880e-01, + 5.8657e-01, 5.3713e-01, 1.5416e-01, 7.7206e-01, + 7.7405e-01, 2.2208e-01, 2.7616e-02, 4.1381e-02, + 3.6096e-01, 2.9990e-01, 7.2871e-01, 4.6144e-01, + 2.6081e-01, 1.1160e-01, 4.6879e-02, 9.6260e-01, + 6.3027e-02, 9.2736e-01, 5.1303e-01, 7.8810e-01, + 9.4326e-01, 8.3384e-01, 5.2389e-01, 3.7333e-01, + 7.0704e-01, 2.0490e-02, 9.7841e-01, 4.6449e-01, + 2.0052e-01, 1.6703e-01, 2.3432e-02, 8.7757e-01, + 4.3288e-01, 9.0692e-01, 8.0465e-01, 6.8254e-01, + 7.1185e-01, 6.0360e-01, 3.0568e-01, 3.3586e-01, + 8.0849e-01, 9.2920e-01, 1.8771e-01, 1.6081e-01, + 6.2577e-01, 8.0494e-01, 2.5995e-01, 6.9688e-01, + 8.1340e-01, 7.6095e-01, 4.3346e-01, 9.4688e-01, + 7.6554e-02, 3.0534e-01, 1.9986e-02, 6.3233e-01, + 7.6106e-01, 4.6982e-01, 6.3199e-01, 3.8163e-01, + 4.8463e-01, 8.4737e-01, 9.7787e-01, 6.8558e-02, + 8.4902e-01, 8.4066e-02, 9.5446e-01, 9.5276e-01, + 1.1721e-01, 6.6180e-01, 1.7366e-01, 5.8689e-01, + 3.5482e-01, 9.7331e-01, 3.4294e-01, 2.8538e-01, + 5.1595e-01, 3.1709e-01, 5.5250e-01, 5.6823e-01, + 8.1852e-01, 7.0949e-01, 8.0023e-01, 1.9327e-01, + 6.3159e-01, 8.3741e-01, 4.2915e-01, 4.5280e-01, + 3.6497e-02, 4.6729e-01, 1.7655e-01, 9.0173e-01, + 8.8293e-01, 3.8356e-01, 3.7646e-01, 7.9430e-01, + 6.1564e-01, 8.3823e-01, 6.4739e-01, 2.5501e-01, + 1.7745e-02, 2.0888e-01, 1.6213e-01, 1.4289e-01, + 4.3243e-01, 8.9590e-02, 6.2675e-01, 1.6139e-01, + 4.9212e-02, 7.8304e-01, 3.4728e-01, 1.1716e-01, + 6.6748e-01, 6.8148e-01, 6.0697e-01, 9.6199e-01, + 2.0651e-01, 7.4950e-01, 1.2579e-01, 8.0246e-01, + 1.6417e-01, 7.2462e-01, 2.5278e-02, 4.6134e-02, + 6.2831e-01, 5.8610e-01, 2.5098e-01, 3.0610e-01, + 5.9318e-01, 7.4726e-01, 6.9463e-01, 9.1050e-01, + 5.1888e-01, 7.0514e-01, 7.0205e-01, 7.2207e-01, + 8.0443e-01, 8.7596e-02, 4.7888e-01, 8.3782e-01, + 2.4008e-01, 8.0627e-01, 8.5026e-01, 9.1172e-01, + 8.6693e-01, 5.8864e-01, 7.3220e-01, 9.5311e-01, + 2.3545e-01, 5.9781e-01, 4.2475e-01, 5.7120e-01, + 7.9825e-01, 1.1148e-01, 9.6872e-01, 1.8660e-01, + 1.2651e-01, 6.4452e-01, 6.0136e-01, 4.0304e-01, + 3.2066e-01, 9.6919e-01, 8.9438e-01, 1.9197e-01, + 6.2802e-01, 1.2447e-01, 9.2235e-01, 5.3612e-01, + 7.6692e-01, 6.0437e-01, 3.9696e-01, 2.4947e-01, + 4.7984e-01, 2.4304e-01, 8.7832e-01, 4.4963e-01, + 1.4427e-01, 5.7088e-01, 7.6046e-01, 6.8213e-01, + 8.0719e-01, 4.0864e-01, 5.2984e-01, 6.0712e-01, + 9.0793e-01, 7.5984e-01, 6.8009e-01, 9.3405e-03, + 9.6069e-02, 1.2806e-01, 3.5111e-01, 6.9311e-01, + 8.2085e-01, 8.2835e-01, 9.1010e-01, 6.9786e-01, + 8.9484e-01, 6.1428e-01, 5.8339e-01, 7.7197e-01, + 9.0580e-01, 9.3461e-01, 5.1191e-01, 6.0450e-01, + 1.1201e-01, 9.4214e-01, 1.2487e-01, 9.0601e-01, + 2.9108e-01, 6.7647e-01, 9.4078e-01, 2.8280e-01, + 1.9018e-01, 8.2373e-01, 1.9392e-01, 2.5619e-01, + 4.7560e-01, 9.5113e-02, 7.0953e-01, 1.8132e-01, + 7.8736e-01, 2.9743e-01, 6.6220e-01, 7.0073e-01, + 4.2373e-01, 8.5630e-02, 1.5746e-01, 2.2218e-01, + 7.1860e-01, 7.1300e-01, 2.1873e-01, 5.3912e-01, + 3.1159e-01, 4.0946e-01, 3.6675e-01, 1.7526e-01, + 3.9314e-01, 6.8523e-01, 8.9266e-01, 2.6076e-01, + 1.9742e-01, 7.1643e-01, 5.7968e-02, 2.9522e-01, + 2.6531e-01, 9.4045e-01, 2.8546e-01, 5.0832e-01, + 2.7122e-01, 9.1348e-02, 4.6177e-01, 5.4269e-01, + 9.6392e-01, 5.7908e-01, 4.4952e-02, 4.4291e-01, + 1.9518e-01, 5.7031e-01, 4.6612e-01, 6.5682e-01, + 7.5169e-01, 7.1827e-01, 6.9783e-01, 8.7240e-01, + 9.7588e-01, 4.6460e-01, 4.9550e-01, 1.1087e-01, + 4.9333e-01, 4.5081e-01, 5.3065e-02, 1.0936e-01, + 2.3607e-01, 3.1568e-01, 4.5569e-02, 3.3065e-01, + 6.3115e-01, 3.4755e-01, 4.1582e-01, 6.8376e-01, + 7.2990e-02, 4.2965e-01, 9.9497e-01, 2.6721e-01, + 7.7370e-02, 9.9769e-01, 8.7087e-01, 8.0430e-01, + 2.4436e-01, 8.4694e-02, 2.4607e-01, 3.7822e-01, + 4.9074e-01, 2.9051e-01, 4.6776e-01, 9.0178e-01, + 1.2981e-01, 3.4897e-01, 4.9289e-01, 4.9671e-01, + 4.4983e-01, 5.6051e-01, 9.3221e-01, 5.3076e-01, + 3.0281e-01, 7.1212e-01, 7.5584e-01, 8.5415e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.7030, 0.4856, 0.8302, ..., 0.1030, 0.0064, 0.5187]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.019815444946289062 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52988', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9514954090118408} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([1913, 7654, 2578, 7438, 161, 1784, 4399, 9342, 5973, + 9317, 2386, 5390, 6826, 8246, 3533, 5774, 7850, 3210, + 5098, 9081, 1419, 9854, 4633, 4094, 2460, 1982, 5224, + 7488, 9470, 9542, 7636, 1813, 3516, 6772, 9387, 3527, + 2143, 2579, 3758, 6675, 2400, 6857, 5478, 3695, 3795, + 3756, 3719, 3404, 9547, 8983, 7175, 4037, 9581, 7185, + 929, 5259, 781, 9068, 9465, 4965, 5893, 2696, 2105, + 4213, 4999, 2582, 8142, 1368, 9056, 5530, 8794, 3411, + 8012, 2760, 7505, 2582, 7967, 1442, 5100, 4547, 8139, + 4597, 3901, 8732, 616, 6824, 2406, 5427, 99, 1530, + 6480, 9734, 2031, 8017, 9192, 5440, 6935, 2243, 5699, + 2761, 4735, 1992, 3936, 8939, 2912, 3975, 1293, 2526, + 1242, 1543, 6007, 2170, 8914, 4313, 7065, 1733, 3488, + 4650, 2435, 7570, 6849, 1021, 3506, 2267, 4325, 8183, + 4797, 2128, 7956, 4452, 1357, 2467, 9292, 7529, 2665, + 6939, 4926, 53, 3719, 2716, 4371, 322, 8247, 6141, + 3643, 4161, 1218, 4631, 45, 5094, 2416, 6978, 9777, + 8969, 1493, 6000, 920, 357, 7359, 4419, 5789, 2824, + 9510, 2214, 2666, 6941, 5654, 5280, 2914, 7629, 6757, + 5329, 6096, 4785, 7842, 272, 553, 1821, 2930, 6978, + 9552, 3585, 2668, 4929, 8592, 9008, 3436, 6246, 2869, + 9508, 6305, 674, 5548, 2178, 4623, 2766, 9426, 6636, + 2734, 5700, 6697, 4397, 1950, 4820, 2898, 9874, 4309, + 7607, 72, 4311, 993, 3186, 2078, 2317, 7450, 1239, + 2314, 5387, 5556, 1929, 9624, 7824, 8662, 8410, 6503, + 1443, 483, 7970, 4732, 6257, 7787, 3330, 448, 9261, + 4951, 1319, 3950, 7599, 1261, 2621, 8652, 8132, 3731, + 4079, 251, 4113, 1307, 6290, 2770, 662, 6841, 6792, + 6294, 900, 5110, 4306, 6618, 9974, 8342, 9695, 3143, + 349, 8326, 9528, 3464, 1504, 8313, 3204, 7429, 1054, + 9404, 1518, 4565, 6436, 3850, 3090, 1967, 9212, 1515, + 5737, 4332, 1307, 7812, 5169, 7957, 9263, 7549, 8305, + 2285, 6467, 7071, 9999, 316, 9584, 950, 4240, 3058, + 1871, 7709, 7377, 8353, 8543, 8976, 4692, 7226, 6989, + 5913, 9913, 4304, 5467, 6510, 8272, 6971, 8286, 4630, + 2781, 8362, 8692, 4774, 6297, 7725, 1668, 6756, 7100, + 1790, 4362, 6628, 6041, 4431, 2206, 3651, 7414, 1881, + 4217, 723, 6273, 1887, 8487, 8200, 432, 2818, 2742, + 7506, 5950, 5296, 9718, 1865, 3321, 5712, 5309, 379, + 7907, 5022, 6883, 7634, 8109, 8581, 2524, 5363, 4385, + 9481, 7050, 3722, 6905, 4123, 1193, 980, 1309, 149, + 1486, 4890, 4713, 1439, 1191, 9691, 6210, 5838, 8904, + 3708, 9438, 9896, 5287, 3948, 1147, 1698, 4269, 5066, + 6607, 1091, 9583, 1892, 4545, 7581, 5502, 5593, 3633, + 8334, 7742, 989, 5454, 5419, 8285, 7032, 5460, 8123, + 7047, 9497, 2742, 7941, 4354, 2115, 2169, 3579, 2975, + 9962, 9131, 790, 9536, 934, 6531, 4130, 6559, 8852, + 1912, 4091, 5108, 4143, 2911, 8718, 7157, 9474, 440, + 4893, 2348, 2435, 3348, 2896, 5362, 2471, 3557, 3045, + 5520, 967, 2083, 6596, 2972, 3187, 3743, 3692, 5305, + 3194, 8270, 1499, 3112, 1629, 7988, 1569, 6475, 4212, + 9528, 5004, 1871, 4699, 4105, 275, 5746, 4518, 8991, + 612, 6367, 1921, 7216, 7507, 1384, 2081, 8222, 1925, + 7535, 1383, 9622, 5576, 8183, 8495, 5340, 2634, 2966, + 1601, 3637, 3327, 1226, 7790, 4507, 184, 2680, 4609, + 3145, 7485, 1595, 8527, 372, 8745, 5290, 9948, 6519, + 4807, 6324, 2180, 9833, 1872, 4100, 7766, 6247, 620, + 5081, 2716, 5159, 2362, 801, 9878, 6460, 9085, 7887, + 3582, 5814, 6271, 8162, 1564, 2970, 6301, 6686, 5968, + 1206, 5937, 9353, 9579, 7815, 8300, 2535, 8236, 8452, + 3825, 4328, 576, 4303, 7075, 155, 3636, 8510, 2678, + 5217, 3095, 9634, 4084, 3672, 3215, 5812, 8839, 9347, + 309, 7836, 94, 6329, 4746, 3609, 9296, 317, 2851, + 46, 1020, 3989, 3035, 3996, 5929, 1518, 5537, 6352, + 4863, 4746, 9733, 302, 4341, 4989, 4874, 7046, 5778, + 5478, 7701, 7095, 9882, 3225, 2841, 7350, 9700, 4660, + 1226, 2708, 7505, 6041, 8700, 5831, 1161, 7332, 1204, + 9006, 5168, 5623, 3352, 6653, 1866, 2464, 4026, 7957, + 3439, 5787, 6667, 6239, 4086, 1384, 5108, 9156, 1458, + 2719, 6125, 3779, 1062, 9282, 1403, 7331, 1773, 6329, + 9804, 5498, 6916, 3918, 4475, 3365, 6047, 5222, 3317, + 6537, 509, 4180, 521, 4580, 4289, 5484, 7623, 3868, + 4419, 5070, 6414, 6858, 2146, 7503, 7930, 3541, 7363, + 8451, 3603, 8358, 1634, 6410, 6234, 8188, 7703, 3620, + 2497, 3571, 3652, 8113, 179, 7177, 3202, 9610, 1271, + 7092, 4329, 9137, 5792, 7968, 9987, 4895, 24, 7376, + 4933, 4134, 7890, 2095, 8514, 3695, 7529, 3931, 9993, + 6758, 9985, 2616, 3644, 5578, 7035, 179, 108, 2456, + 5883, 9468, 4254, 1026, 9434, 5692, 8912, 5618, 8685, + 7416, 7309, 9289, 2276, 7230, 1611, 6179, 8849, 4226, + 9492, 2313, 5622, 5323, 5301, 5407, 5574, 1863, 9491, + 8356, 8284, 7695, 2173, 8063, 7848, 2350, 8642, 9823, + 4277, 9963, 1573, 8018, 1052, 830, 8774, 1018, 172, + 1853, 1423, 6245, 8795, 9938, 6596, 9610, 862, 6125, + 2315, 4519, 6858, 5217, 8716, 1961, 2531, 1268, 7387, + 6952, 7916, 7660, 6607, 736, 3473, 3680, 805, 4228, + 6961, 6256, 8564, 3586, 5472, 6691, 3409, 2877, 3014, + 1535, 4145, 4790, 81, 5553, 8859, 301, 6125, 1877, + 154, 4300, 2111, 1019, 1624, 3701, 9220, 8623, 2362, + 829, 739, 7388, 915, 693, 7458, 9852, 8907, 5923, + 5956, 4414, 5133, 4520, 6933, 1176, 2370, 2636, 3929, + 4820, 9022, 540, 5654, 5463, 6385, 2215, 1288, 212, + 1094, 7155, 4886, 3573, 3936, 9921, 639, 1616, 3331, + 4763, 9667, 7478, 1120, 5091, 8732, 148, 6718, 9795, + 7275, 5297, 2632, 6832, 1603, 2702, 4697, 9374, 3919, + 6930, 8975, 1858, 1630, 4399, 4416, 2167, 8305, 3364, + 9978, 4771, 8200, 8150, 408, 9331, 1152, 4518, 4395, + 6258, 7579, 7384, 2197, 8376, 1280, 5621, 5003, 9089, + 7213, 8905, 4272, 872, 6014, 5018, 3312, 2473, 8110, + 2480, 4059, 1406, 2485, 4649, 4024, 1652, 5673, 1179, + 7991, 1865, 5736, 2314, 6696, 5611, 1890, 165, 7210, + 251, 3129, 8459, 6624, 5937, 9617, 3100, 1031, 2156, + 7215, 1201, 3895, 8947, 3151, 6187, 5537, 5189, 1415, + 2673, 3741, 3405, 3568, 1498, 8041, 8172, 9828, 2910, + 6307, 2704, 3112, 4789, 5911, 386, 6854, 7793, 6204, + 9467, 5350, 6683, 8303, 7420, 9997, 6967, 7155, 6481, + 8863, 6132, 9098, 3951, 6636, 6217, 281, 2164, 3250, + 1107, 2597, 8420, 140, 9581, 4141, 400, 4846, 3527, + 491, 8715, 153, 7242, 9297, 5207, 161, 4132, 8004, + 200, 9190, 3169, 6815, 3771, 6417, 8492, 4422, 9154, + 9220, 4572, 4257, 9842, 7045, 319, 3874, 3294, 9153, + 8782]), + values=tensor([3.8610e-01, 8.5377e-01, 1.8511e-02, 7.1095e-01, + 8.1683e-01, 1.2439e-01, 3.5277e-01, 4.2816e-01, + 7.4779e-01, 1.4578e-01, 3.9159e-01, 7.8942e-02, + 5.3719e-01, 6.3787e-01, 5.1539e-01, 9.1352e-01, + 9.8974e-01, 5.5580e-01, 9.2020e-01, 5.8107e-01, + 8.2675e-01, 8.9227e-01, 2.6957e-01, 4.1450e-01, + 6.4995e-01, 1.4668e-01, 8.5365e-01, 5.6586e-02, + 6.0310e-01, 1.6855e-01, 7.1945e-01, 4.5680e-01, + 6.4410e-01, 8.7497e-01, 2.0513e-01, 8.1387e-01, + 4.2941e-01, 9.2609e-02, 5.4848e-01, 6.0317e-01, + 4.9057e-02, 7.6647e-01, 8.1898e-01, 3.6237e-01, + 2.6095e-01, 8.1706e-01, 1.8677e-01, 6.6214e-01, + 3.4065e-02, 6.2301e-03, 8.1049e-01, 6.6202e-01, + 9.2002e-01, 9.4613e-01, 4.2801e-01, 2.5149e-01, + 3.7474e-01, 4.6613e-02, 7.6286e-01, 9.5591e-01, + 5.6819e-01, 7.2473e-01, 4.5420e-01, 1.5093e-02, + 7.5841e-01, 8.6923e-01, 8.2963e-01, 9.1618e-01, + 6.3268e-01, 3.7006e-01, 2.3403e-01, 3.7409e-01, + 9.4862e-01, 3.9135e-01, 3.1036e-01, 4.5561e-01, + 7.5923e-01, 4.9378e-01, 2.4860e-01, 2.0726e-02, + 9.3079e-01, 5.3296e-01, 8.8197e-01, 7.4151e-01, + 1.9571e-01, 6.5860e-01, 4.6394e-01, 1.0891e-02, + 7.8440e-01, 2.8027e-01, 7.0002e-01, 4.8993e-01, + 6.0351e-01, 3.4450e-01, 2.4156e-01, 9.3632e-01, + 4.7883e-01, 5.3698e-01, 6.6794e-01, 6.3277e-01, + 1.6690e-01, 9.6142e-01, 8.1668e-01, 7.2409e-01, + 9.5039e-01, 4.1018e-01, 7.6718e-02, 1.1277e-01, + 7.6653e-01, 5.7939e-02, 8.8132e-01, 2.3071e-01, + 1.1627e-01, 4.4793e-01, 1.8370e-01, 5.1300e-01, + 9.5201e-01, 8.6851e-01, 4.2478e-01, 4.8308e-01, + 3.7728e-01, 8.5856e-01, 4.4730e-01, 3.2080e-01, + 5.4767e-02, 9.7707e-01, 2.2574e-01, 5.5285e-01, + 5.6905e-01, 3.6943e-01, 7.5952e-03, 3.3257e-01, + 7.2686e-01, 4.2285e-01, 6.9822e-01, 4.3587e-01, + 7.2988e-01, 6.7748e-01, 1.8964e-01, 8.7304e-01, + 6.2990e-01, 5.3509e-01, 1.3803e-01, 8.4206e-01, + 4.7388e-01, 9.0321e-02, 3.6124e-01, 6.9940e-01, + 3.5166e-01, 6.4487e-02, 6.4762e-01, 8.7857e-01, + 8.1881e-01, 5.3164e-02, 2.4895e-01, 3.6164e-01, + 1.3464e-01, 2.9827e-01, 7.3571e-01, 3.0674e-01, + 6.0528e-01, 6.7903e-01, 3.1282e-01, 7.3350e-01, + 2.3174e-01, 3.6415e-01, 9.6921e-01, 5.4876e-01, + 5.4685e-01, 2.5853e-01, 9.7955e-01, 9.8906e-01, + 2.3474e-01, 7.2967e-01, 9.9754e-01, 4.1724e-01, + 6.2470e-01, 1.6830e-01, 9.6734e-01, 3.9468e-01, + 8.9653e-01, 4.4589e-01, 3.6177e-01, 3.5755e-01, + 4.1151e-01, 5.2403e-04, 8.0776e-01, 2.9056e-01, + 2.7543e-01, 9.3152e-01, 7.3236e-01, 6.5313e-01, + 1.0061e-01, 6.2182e-01, 6.9045e-01, 5.5055e-01, + 1.5520e-02, 1.7346e-02, 3.4910e-03, 6.4128e-01, + 7.5031e-01, 7.5949e-01, 9.3468e-02, 7.6496e-01, + 4.8529e-01, 6.2334e-01, 9.4296e-01, 3.5853e-01, + 7.2756e-01, 8.7206e-01, 5.2816e-01, 4.4438e-02, + 5.6329e-01, 2.2828e-01, 8.4849e-01, 9.4579e-01, + 8.4626e-01, 6.7086e-01, 2.8491e-01, 6.4624e-01, + 8.7513e-01, 2.6528e-02, 4.4953e-01, 9.1746e-01, + 7.8027e-01, 2.2730e-01, 7.9321e-01, 9.8797e-01, + 2.2814e-02, 8.7637e-01, 3.8994e-01, 3.2441e-01, + 6.8336e-01, 7.9926e-01, 3.8718e-01, 3.4786e-03, + 1.1534e-01, 7.4916e-01, 2.7534e-01, 9.8439e-02, + 9.1131e-02, 9.1836e-01, 6.1330e-01, 1.3102e-01, + 5.4847e-03, 8.6561e-01, 5.7958e-01, 8.0518e-01, + 2.8465e-01, 7.7917e-01, 4.3560e-01, 5.6084e-01, + 7.5286e-01, 6.3866e-01, 7.2910e-01, 3.0899e-01, + 9.1412e-01, 2.6555e-03, 2.3826e-01, 5.6212e-01, + 9.9504e-01, 5.0756e-01, 9.6585e-01, 8.8996e-01, + 4.2407e-01, 7.6316e-01, 5.6013e-01, 6.6055e-01, + 8.3748e-01, 6.2385e-01, 1.5983e-01, 9.3548e-01, + 6.6605e-01, 9.8112e-01, 5.8093e-01, 5.0434e-02, + 4.9205e-01, 5.1871e-01, 1.2512e-01, 5.0351e-01, + 1.8898e-01, 9.4631e-01, 4.2293e-02, 3.1088e-01, + 3.4520e-01, 5.0879e-01, 3.5372e-01, 7.2930e-01, + 5.2299e-01, 7.0660e-01, 7.7671e-01, 8.5673e-01, + 5.0209e-01, 7.6491e-01, 1.9923e-01, 1.5535e-01, + 8.8992e-01, 6.8837e-01, 2.4943e-02, 3.0610e-01, + 4.3221e-01, 4.6599e-01, 1.3047e-01, 4.3773e-01, + 3.6251e-01, 4.6205e-01, 1.0793e-01, 3.0838e-01, + 6.4493e-02, 5.2499e-01, 2.5719e-01, 7.0242e-01, + 2.3217e-01, 2.9568e-01, 8.8686e-01, 6.9929e-01, + 5.0218e-01, 6.2510e-01, 5.9748e-01, 8.6062e-01, + 9.3881e-01, 5.5598e-01, 8.9438e-01, 2.0280e-01, + 8.2777e-01, 9.5075e-01, 4.8198e-01, 6.2637e-01, + 6.3249e-01, 8.9193e-01, 9.1039e-01, 6.4972e-02, + 8.9820e-01, 5.2690e-01, 5.8321e-01, 7.9388e-01, + 7.0610e-01, 2.6038e-01, 2.6840e-01, 7.2624e-01, + 7.5041e-01, 4.5002e-01, 8.1630e-01, 5.7319e-02, + 9.1830e-01, 5.0848e-01, 8.3542e-01, 7.1332e-01, + 7.1294e-01, 9.2191e-01, 5.1896e-01, 1.2938e-01, + 9.5730e-01, 3.2242e-02, 6.5653e-01, 8.2194e-02, + 8.7798e-01, 8.3940e-01, 2.2781e-01, 5.9478e-01, + 7.2513e-01, 5.8600e-01, 5.5875e-01, 5.7178e-01, + 3.3916e-01, 9.8096e-01, 7.7440e-01, 8.1663e-01, + 4.5610e-01, 6.9842e-01, 8.7732e-01, 6.5963e-01, + 4.5872e-01, 1.5583e-01, 6.3053e-01, 8.6648e-01, + 3.9767e-01, 7.5734e-01, 7.1577e-01, 1.4568e-01, + 4.0268e-02, 7.2023e-01, 4.8936e-01, 9.0829e-01, + 5.9611e-01, 6.9061e-01, 8.1805e-01, 8.9041e-01, + 3.1423e-01, 1.5868e-01, 6.1366e-01, 9.2687e-01, + 8.9448e-01, 1.4864e-01, 7.1271e-01, 4.6104e-01, + 7.6188e-03, 2.2407e-01, 9.2907e-01, 1.1651e-01, + 6.1987e-01, 2.4907e-01, 9.6456e-01, 8.0986e-01, + 6.9956e-01, 9.0349e-01, 1.2001e-01, 3.6328e-01, + 1.6739e-01, 7.2792e-01, 8.4913e-01, 1.3978e-01, + 3.6502e-01, 8.3758e-03, 8.5686e-01, 1.3986e-01, + 8.7100e-01, 4.5934e-01, 7.1594e-01, 6.7867e-01, + 8.6897e-01, 6.6908e-01, 8.4732e-01, 6.6558e-01, + 5.1856e-01, 4.0954e-01, 6.9826e-01, 4.7976e-01, + 5.2070e-01, 1.3197e-01, 1.4248e-01, 7.8979e-01, + 3.8558e-01, 1.1534e-01, 7.6786e-01, 4.1531e-01, + 9.6341e-01, 3.9001e-01, 8.2289e-02, 8.0837e-01, + 7.1545e-01, 2.5727e-01, 7.6374e-01, 8.4542e-01, + 8.5587e-02, 5.2770e-01, 6.5768e-01, 8.0251e-01, + 2.2113e-01, 9.9737e-01, 6.1830e-01, 3.1632e-01, + 1.2613e-01, 4.1147e-01, 2.1405e-01, 1.2762e-01, + 1.1111e-01, 5.4527e-01, 1.6500e-01, 4.2444e-01, + 5.8801e-01, 1.6257e-01, 6.6900e-01, 7.6794e-01, + 4.0036e-01, 5.3006e-02, 3.5051e-02, 9.5344e-01, + 4.8371e-01, 4.9565e-01, 9.0943e-01, 2.3101e-01, + 2.0090e-01, 4.8225e-01, 3.0712e-02, 7.5834e-01, + 6.7334e-01, 1.1353e-01, 3.9036e-02, 6.9230e-01, + 9.0893e-02, 7.5872e-01, 8.7894e-01, 5.9393e-01, + 2.6585e-01, 1.6676e-01, 4.8532e-01, 5.9465e-01, + 4.4222e-01, 2.0170e-01, 9.2328e-01, 6.8154e-01, + 8.4402e-01, 7.0562e-01, 2.6520e-01, 8.8657e-01, + 5.4459e-01, 7.8087e-01, 2.0055e-01, 8.1105e-01, + 3.4705e-01, 7.8759e-01, 8.9155e-01, 6.5331e-01, + 6.7577e-01, 1.4852e-01, 9.2796e-01, 9.0692e-01, + 6.3434e-01, 3.1111e-01, 5.7140e-01, 9.0780e-01, + 4.6895e-01, 2.0287e-01, 3.3299e-01, 7.6187e-01, + 5.7104e-01, 2.4415e-01, 7.6380e-01, 1.7908e-01, + 8.0681e-01, 1.2670e-02, 1.6363e-01, 2.2970e-01, + 6.1572e-01, 5.6717e-01, 4.6578e-01, 8.6430e-01, + 2.4153e-01, 9.1603e-01, 5.3253e-01, 5.0666e-03, + 9.1421e-02, 5.1296e-02, 1.0304e-01, 9.9405e-01, + 9.5872e-01, 1.3353e-01, 3.5694e-01, 5.4543e-01, + 6.2537e-01, 2.8650e-01, 7.9959e-01, 8.2562e-01, + 2.7264e-01, 5.2583e-01, 6.3161e-01, 1.9397e-02, + 6.9587e-01, 3.3762e-01, 4.4200e-01, 1.8451e-01, + 3.6916e-01, 2.8108e-02, 7.7783e-02, 2.3773e-01, + 9.0051e-01, 2.8460e-01, 8.1089e-01, 9.4479e-01, + 9.7155e-01, 4.0098e-01, 6.8592e-01, 7.5205e-01, + 2.9211e-02, 5.6538e-02, 9.3352e-01, 7.7910e-03, + 8.0618e-01, 4.0845e-01, 2.0409e-02, 3.5837e-01, + 1.8328e-01, 3.9909e-01, 4.1918e-01, 4.8170e-01, + 9.9960e-01, 5.9804e-01, 2.0861e-01, 1.0751e-02, + 8.0381e-01, 5.3288e-01, 1.3326e-01, 8.6459e-01, + 5.5114e-01, 2.9667e-01, 7.3363e-01, 3.0714e-01, + 9.1825e-01, 7.6928e-02, 2.7699e-01, 6.2910e-01, + 3.6845e-01, 3.7640e-01, 6.0170e-01, 9.6457e-01, + 4.3097e-02, 6.8313e-01, 1.2273e-01, 5.5638e-01, + 9.3322e-01, 2.6998e-02, 6.8256e-01, 2.8860e-01, + 3.2874e-01, 6.0891e-02, 4.4244e-02, 5.2327e-01, + 7.6976e-02, 6.1998e-02, 3.1748e-01, 4.9074e-01, + 3.2159e-01, 1.1840e-01, 3.3449e-01, 9.8168e-01, + 2.0415e-01, 5.4059e-01, 6.0272e-01, 5.1307e-01, + 1.8527e-01, 4.1767e-01, 7.1985e-01, 5.0042e-01, + 8.5363e-01, 4.6183e-01, 7.7202e-01, 7.8648e-01, + 6.9129e-01, 9.0911e-01, 3.2770e-01, 5.1269e-01, + 6.9536e-01, 3.6706e-01, 9.1723e-01, 3.9701e-01, + 6.7993e-01, 3.2314e-01, 6.8715e-02, 8.2246e-02, + 2.8073e-01, 1.1831e-01, 6.2561e-01, 3.6529e-01, + 3.3884e-01, 3.6135e-01, 5.4802e-01, 6.7573e-01, + 5.0165e-01, 8.1397e-01, 8.5096e-01, 1.1027e-01, + 6.5757e-01, 6.0696e-01, 7.2836e-01, 5.8805e-01, + 9.2758e-01, 2.4110e-01, 3.3509e-01, 7.7407e-01, + 9.2691e-01, 5.3329e-01, 8.2629e-01, 7.0255e-01, + 7.7681e-01, 6.8983e-01, 2.5614e-02, 6.3758e-01, + 7.2682e-01, 8.5240e-01, 5.2949e-01, 7.1705e-01, + 8.5069e-01, 4.2186e-01, 3.0347e-01, 9.0121e-01, + 8.2273e-01, 4.1961e-01, 5.9519e-01, 7.2062e-01, + 5.8577e-01, 8.5471e-01, 4.2713e-01, 6.4293e-02, + 6.3663e-01, 6.6423e-01, 2.2498e-01, 5.1988e-01, + 6.5365e-01, 9.3245e-01, 1.7852e-01, 8.1156e-01, + 6.7345e-02, 2.4846e-01, 9.9602e-01, 9.1934e-02, + 4.1853e-01, 3.7241e-01, 9.4301e-01, 1.7991e-01, + 5.9621e-01, 6.9136e-01, 5.2793e-01, 1.9118e-01, + 1.9793e-01, 6.0317e-01, 9.6882e-01, 1.9873e-01, + 8.3014e-01, 7.9369e-01, 2.1870e-01, 1.9086e-01, + 6.9388e-01, 7.9719e-01, 4.1397e-01, 5.7192e-01, + 4.9351e-01, 2.4139e-01, 5.9855e-01, 7.0341e-01, + 1.4905e-02, 2.4436e-01, 9.2685e-01, 4.5334e-01, + 7.4665e-01, 5.1727e-01, 8.2996e-01, 2.0033e-01, + 9.2888e-01, 5.0175e-01, 5.4601e-01, 6.1251e-01, + 5.4993e-01, 9.6161e-01, 1.2794e-01, 3.2116e-01, + 8.4421e-01, 7.0886e-01, 3.5791e-01, 6.6184e-01, + 1.8923e-02, 2.5903e-01, 8.1313e-01, 4.1493e-01, + 8.8826e-01, 5.0309e-01, 5.5726e-01, 7.3682e-01, + 1.2897e-01, 2.6581e-01, 4.6832e-01, 7.7950e-01, + 2.3775e-01, 6.2503e-01, 2.5789e-01, 4.4719e-01, + 3.2605e-01, 1.6738e-01, 1.3471e-01, 5.8312e-01, + 2.6249e-01, 9.6756e-01, 4.5744e-01, 2.1122e-01, + 5.6749e-01, 1.6200e-01, 2.4114e-01, 3.4166e-01, + 3.6612e-01, 1.0509e-02, 5.1611e-01, 9.0580e-01, + 3.3763e-01, 9.2852e-01, 1.7939e-02, 8.7145e-01, + 8.8206e-01, 5.6772e-01, 3.3696e-01, 3.5279e-01, + 5.1465e-01, 6.8700e-01, 3.4390e-01, 6.7643e-01, + 6.6427e-01, 3.5840e-01, 1.5537e-02, 1.8473e-01, + 4.9930e-01, 3.1875e-01, 4.9058e-01, 9.2293e-01, + 5.6647e-01, 8.1066e-01, 5.2922e-01, 6.1174e-01, + 9.4076e-01, 4.0053e-01, 5.6874e-01, 9.8550e-01, + 7.6898e-01, 4.4491e-01, 9.4084e-01, 7.9641e-01, + 1.9048e-01, 6.7339e-01, 2.7576e-01, 3.5103e-01, + 8.8099e-01, 2.7326e-01, 8.0563e-01, 9.3555e-01, + 3.1474e-01, 4.7528e-01, 2.1165e-01, 5.4853e-01, + 6.8904e-01, 6.7265e-01, 1.7309e-02, 1.6399e-01, + 7.6108e-01, 2.3543e-01, 2.6175e-02, 9.0095e-01, + 6.2691e-01, 2.8392e-01, 7.7971e-01, 8.3351e-01, + 4.8417e-01, 5.2438e-01, 8.9569e-01, 3.2099e-01, + 1.8007e-01, 6.4459e-01, 3.2528e-02, 8.0234e-01, + 4.2430e-01, 2.4034e-01, 6.8400e-01, 6.6685e-01, + 8.1383e-01, 3.3149e-01, 3.7153e-01, 4.3221e-01, + 5.1683e-01, 4.8905e-01, 5.5635e-01, 4.2207e-01, + 8.0436e-01, 1.6030e-01, 4.4301e-01, 8.0845e-01, + 8.7237e-01, 2.8558e-01, 6.1831e-01, 7.4397e-01, + 6.6461e-01, 7.2410e-01, 9.6853e-01, 3.9492e-01, + 3.8188e-01, 5.0831e-01, 2.1636e-01, 3.5060e-01, + 2.3646e-01, 5.6253e-01, 6.9733e-01, 2.7720e-01, + 5.3757e-01, 8.8048e-01, 7.1650e-01, 9.6798e-01, + 7.7762e-01, 5.5153e-01, 6.1393e-01, 4.9133e-01, + 6.8381e-01, 6.6995e-02, 2.2135e-01, 5.2027e-01, + 2.3479e-01, 9.4917e-01, 4.1574e-01, 8.1023e-01, + 2.7657e-01, 4.8275e-01, 6.6333e-01, 6.5800e-01, + 9.8084e-01, 7.7307e-01, 2.9222e-02, 7.2481e-01, + 1.9828e-02, 2.7554e-01, 9.6906e-01, 1.0409e-01, + 6.4309e-01, 9.2824e-01, 7.2999e-01, 5.6517e-01, + 5.0434e-01, 4.7459e-01, 7.8320e-01, 3.3435e-01, + 2.4506e-01, 9.0578e-01, 5.1393e-01, 6.3417e-01, + 9.1206e-01, 1.7725e-01, 7.9767e-01, 8.8333e-01, + 7.5073e-01, 5.1574e-01, 5.4984e-01, 7.6118e-01, + 2.1850e-01, 1.6803e-01, 4.0066e-01, 8.0536e-02, + 8.9116e-01, 5.0132e-01, 9.0927e-01, 8.7983e-01, + 1.8210e-01, 9.5577e-01, 5.7504e-01, 6.1325e-01, + 5.4429e-01, 2.3767e-01, 3.5357e-01, 7.5911e-01, + 8.6730e-01, 4.9418e-02, 1.7969e-01, 8.5584e-01, + 3.1254e-01, 9.5390e-01, 3.9608e-01, 7.2622e-01, + 3.6906e-01, 5.6365e-01, 4.7089e-01, 9.1569e-02, + 7.5723e-01, 6.6632e-01, 1.8609e-01, 1.0110e-01, + 5.6893e-01, 9.3812e-01, 9.0428e-01, 1.8241e-01, + 1.4446e-01, 5.7043e-01, 8.7504e-03, 9.7478e-01, + 5.0913e-01, 2.0822e-01, 1.6866e-01, 8.7459e-01, + 7.2484e-02, 2.6479e-01, 6.7042e-01, 7.2394e-01, + 1.3267e-01, 5.8664e-01, 6.4844e-01, 1.5520e-01, + 4.8434e-01, 9.0714e-02, 9.9916e-02, 4.3195e-02, + 4.7733e-01, 6.8749e-01, 8.3543e-01, 4.4062e-01, + 5.9982e-01, 2.5620e-01, 3.7227e-01, 6.7200e-01, + 7.5098e-01, 9.3886e-01, 8.9364e-01, 7.4407e-02, + 1.5111e-01, 3.7773e-01, 3.3716e-01, 8.3074e-01, + 6.6617e-01, 1.1146e-01, 5.2723e-02, 8.9229e-01, + 9.9407e-01, 7.6735e-01, 1.3311e-01, 5.2952e-01, + 7.5053e-02, 7.9242e-01, 2.9142e-01, 5.4645e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.6534, 0.1395, 0.5723, ..., 0.2846, 0.6527, 0.4839]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 1.9514954090118408 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '285101', '-ss', '10000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.263301372528076} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7666, 3284, 1699, 7007, 1865, 642, 2301, 7889, 3112, + 4841, 4554, 2643, 3511, 9405, 9949, 4868, 7053, 8979, + 7155, 3080, 3345, 5376, 5682, 4591, 5457, 4012, 9907, + 4608, 8275, 6785, 214, 8256, 6706, 6096, 396, 213, + 9694, 8210, 4776, 4951, 9163, 2885, 6391, 5210, 5362, + 2964, 8727, 2087, 6653, 557, 2807, 404, 7235, 7348, + 5591, 9937, 5151, 9696, 3619, 4512, 6156, 4095, 5236, + 6999, 8669, 5309, 6087, 4118, 2455, 1780, 9742, 3315, + 7998, 8891, 4943, 9849, 3316, 2894, 3974, 2248, 8409, + 9978, 2602, 229, 3004, 5173, 479, 6004, 6080, 4963, + 6862, 9389, 8433, 2783, 2848, 968, 8951, 3760, 8407, + 7534, 2609, 3709, 9648, 2412, 7425, 8619, 7347, 9064, + 4963, 1354, 5818, 7386, 7369, 2381, 5800, 6012, 3532, + 214, 9362, 4731, 8238, 4276, 2221, 3400, 540, 4177, + 2164, 1191, 3257, 9047, 364, 2899, 2221, 8232, 4361, + 1480, 3950, 9412, 570, 6740, 5525, 7995, 856, 1549, + 3242, 5640, 6435, 1568, 3375, 4060, 6793, 1686, 9865, + 9598, 3307, 6572, 3171, 2008, 7637, 4922, 386, 6366, + 7528, 2850, 6318, 2345, 368, 5786, 9710, 7772, 1761, + 6140, 301, 570, 8828, 8741, 9119, 4206, 3780, 4780, + 707, 3068, 8924, 3477, 4065, 1036, 560, 3678, 9660, + 4487, 8282, 6249, 7109, 3781, 7458, 2555, 3159, 5856, + 7260, 2615, 6118, 4364, 9114, 9280, 8926, 3395, 7988, + 2698, 8725, 6447, 7235, 3203, 5300, 4394, 3058, 7417, + 5517, 1817, 6921, 9211, 8306, 7080, 8460, 7810, 7848, + 4752, 270, 8368, 8576, 7374, 8967, 2824, 9004, 8383, + 4024, 4694, 919, 6859, 4607, 3536, 5606, 5137, 6430, + 1226, 8149, 7263, 7513, 1278, 5694, 380, 1738, 433, + 9251, 916, 5723, 4516, 8117, 800, 5078, 9205, 3076, + 2156, 1929, 248, 8896, 6899, 3847, 2048, 8775, 3158, + 9717, 4304, 9305, 9027, 8444, 7211, 3882, 2571, 7542, + 1042, 2864, 4697, 4933, 9597, 2106, 1874, 1663, 8974, + 5845, 9264, 40, 4452, 2566, 3363, 7670, 7586, 7479, + 5981, 1928, 2498, 8107, 6502, 5725, 1280, 4382, 7811, + 4150, 2407, 5646, 7384, 6368, 9330, 4277, 4360, 9293, + 8848, 3141, 3719, 4630, 418, 68, 3077, 304, 5534, + 3069, 4046, 9645, 797, 7332, 2456, 818, 6040, 233, + 1356, 6755, 4249, 1643, 5939, 6192, 5023, 2182, 3038, + 615, 962, 6584, 6465, 4700, 5188, 378, 2580, 7736, + 8469, 7964, 4215, 5350, 5035, 723, 2366, 9255, 3296, + 2487, 9749, 9356, 2120, 2719, 1534, 5352, 4142, 2840, + 3874, 3309, 2282, 4165, 8221, 3151, 7532, 9093, 1914, + 875, 3267, 7484, 5334, 246, 69, 1760, 3158, 9485, + 2968, 8655, 8484, 1925, 6269, 8467, 4053, 8242, 2491, + 9174, 9141, 8207, 3765, 7176, 8536, 4547, 5155, 9600, + 59, 7491, 3224, 3860, 3405, 5155, 5872, 5087, 1030, + 5823, 67, 9402, 2052, 3137, 8203, 5381, 5014, 2445, + 2598, 6064, 583, 6264, 8514, 2132, 8834, 5724, 5040, + 3049, 7546, 5472, 9227, 9155, 4856, 8701, 9805, 8423, + 5471, 9572, 1489, 1146, 2738, 6131, 4467, 3775, 7623, + 3128, 1626, 4245, 5851, 8311, 5249, 9163, 5646, 9700, + 5912, 1064, 3427, 9742, 4822, 1609, 1247, 3225, 1349, + 1112, 7136, 4620, 9398, 180, 852, 8483, 99, 5880, + 6936, 3985, 7199, 43, 619, 1433, 378, 5613, 8778, + 9714, 6327, 2214, 4270, 2616, 7836, 6036, 4038, 1349, + 3609, 500, 8725, 5242, 3213, 7450, 9547, 6518, 3900, + 9820, 6085, 3210, 7810, 5855, 7609, 1890, 2354, 6091, + 8514, 7897, 3931, 2880, 6281, 3385, 5921, 9952, 5094, + 8855, 172, 3302, 3010, 4642, 2734, 9166, 8421, 2234, + 8565, 528, 8176, 6482, 994, 3099, 9724, 6379, 405, + 8321, 239, 6594, 996, 3440, 8976, 435, 2375, 6431, + 3253, 1630, 7853, 2956, 8895, 986, 6476, 1010, 8489, + 8021, 872, 1473, 1100, 5352, 9179, 2417, 9244, 9167, + 7160, 3376, 6327, 3920, 5098, 7118, 4103, 8332, 3090, + 9060, 6774, 7505, 4316, 2580, 8806, 3234, 5576, 1591, + 9446, 2876, 3861, 9007, 4047, 9387, 1828, 9480, 300, + 5810, 1681, 7603, 9696, 3967, 205, 4720, 6724, 1706, + 1179, 7731, 9912, 9119, 3539, 3641, 2691, 1738, 2069, + 64, 5901, 6907, 7329, 5038, 8087, 2346, 1417, 9802, + 2245, 9442, 3124, 6856, 7605, 3350, 9096, 9607, 6580, + 7026, 4159, 2283, 388, 6926, 8970, 9000, 5902, 3616, + 8251, 2022, 1387, 2040, 3499, 7352, 9723, 4720, 4072, + 3030, 8642, 1561, 3550, 728, 3483, 9957, 2852, 9769, + 530, 8950, 8791, 1868, 3201, 4039, 7668, 6271, 5458, + 8517, 9781, 4046, 3225, 9954, 206, 6894, 7597, 7497, + 6306, 3298, 6872, 2066, 7591, 1890, 8097, 1280, 7096, + 8226, 1359, 216, 3321, 8096, 7997, 7290, 8655, 8538, + 5466, 3483, 6990, 5527, 3778, 5009, 8586, 5007, 7530, + 2615, 6501, 9092, 3603, 3220, 7154, 9702, 5045, 8352, + 2617, 771, 5086, 2751, 4678, 9150, 7949, 9854, 737, + 4795, 2071, 9389, 7313, 9799, 3943, 6232, 1410, 8989, + 7328, 6562, 7366, 5222, 441, 8345, 6712, 3926, 515, + 5956, 5254, 6129, 5268, 1272, 9590, 5597, 1532, 744, + 7243, 1823, 6966, 9821, 5101, 1854, 836, 8195, 8533, + 9772, 6888, 999, 42, 8452, 7406, 756, 2186, 7921, + 7527, 6586, 9178, 9190, 6661, 1985, 2124, 5708, 2885, + 5344, 8420, 6388, 4916, 5751, 5447, 1612, 7115, 4012, + 4211, 4491, 2685, 8469, 3264, 3933, 8307, 2069, 5721, + 6785, 7552, 3662, 9294, 3272, 4972, 4038, 8502, 7978, + 730, 6090, 2443, 3573, 9783, 6945, 8456, 7714, 2278, + 1207, 9072, 1346, 555, 8623, 6821, 2442, 455, 1202, + 8844, 3296, 1101, 8043, 8852, 2122, 3173, 1731, 9938, + 5593, 93, 8956, 6237, 8545, 5229, 9239, 8804, 3713, + 4281, 5382, 8333, 8801, 93, 2212, 5217, 7239, 9312, + 2195, 1969, 8659, 674, 2456, 1219, 684, 5633, 7633, + 1822, 5479, 5138, 2448, 4428, 1533, 6135, 585, 2626, + 1535, 2390, 2292, 3199, 9797, 3083, 7782, 3749, 5972, + 5041, 873, 1762, 6907, 2439, 176, 7886, 8698, 5610, + 4063, 7434, 7641, 5504, 8492, 8226, 4607, 3891, 971, + 446, 5185, 8037, 5578, 2205, 6200, 5210, 7918, 8694, + 8369, 5656, 8140, 720, 6980, 4361, 6048, 5637, 1525, + 3287, 4786, 6431, 419, 584, 7972, 6800, 6611, 3925, + 1361, 345, 5934, 221, 3976, 5263, 273, 1124, 6638, + 9587, 1234, 7547, 9811, 1651, 2418, 9523, 5719, 2357, + 9754, 4924, 9608, 7139, 1769, 2001, 6705, 554, 4744, + 2302, 8692, 6947, 8654, 8683, 9773, 4469, 8661, 3479, + 222, 2813, 8116, 1105, 4667, 9744, 3524, 7923, 7688, + 5748, 4643, 3806, 589, 8275, 7300, 9905, 8566, 3005, + 8277, 3362, 9117, 4507, 4746, 196, 1702, 5992, 1090, + 7587, 5382, 7431, 8949, 1611, 5830, 5183, 8125, 3201, + 6255, 9408, 1748, 1554, 1906, 729, 8466, 4725, 4398, + 3941]), + values=tensor([0.6959, 0.7960, 0.0781, 0.6007, 0.3460, 0.7293, 0.2445, + 0.3377, 0.4176, 0.3625, 0.7235, 0.1442, 0.0335, 0.4983, + 0.5500, 0.1056, 0.9940, 0.8727, 0.8069, 0.4130, 0.1455, + 0.6420, 0.2494, 0.3495, 0.7133, 0.4395, 0.2694, 0.2841, + 0.5549, 0.0490, 0.7986, 0.5242, 0.3692, 0.2460, 0.8859, + 0.0105, 0.6123, 0.7972, 0.2878, 0.9495, 0.0416, 0.7368, + 0.5235, 0.2640, 0.4232, 0.7400, 0.4660, 0.1817, 0.7149, + 0.8459, 0.3355, 0.5095, 0.8904, 0.2582, 0.0507, 0.5007, + 0.7876, 0.1015, 0.4881, 0.3695, 0.9348, 0.8654, 0.8541, + 0.1701, 0.5411, 0.8559, 0.0858, 0.6724, 0.4755, 0.9937, + 0.3688, 0.4551, 0.1705, 0.5343, 0.7652, 0.5023, 0.4761, + 0.8553, 0.5042, 0.1378, 0.3402, 0.0214, 0.3317, 0.7760, + 0.6136, 0.0754, 0.7266, 0.3108, 0.1557, 0.5342, 0.2368, + 0.6255, 0.6338, 0.2103, 0.3547, 0.7898, 0.3532, 0.6879, + 0.5756, 0.5446, 0.6549, 0.1833, 0.3545, 0.3075, 0.8999, + 0.0866, 0.3178, 0.1771, 0.9904, 0.5584, 0.7474, 0.3985, + 0.7736, 0.7195, 0.1836, 0.1731, 0.6012, 0.6546, 0.5119, + 0.7668, 0.7804, 0.2026, 0.4119, 0.1043, 0.5762, 0.3249, + 0.0350, 0.9469, 0.9597, 0.3663, 0.4396, 0.3526, 0.6112, + 0.2028, 0.6398, 0.7385, 0.7867, 0.3703, 0.9189, 0.5053, + 0.2346, 0.3574, 0.9106, 0.6490, 0.0571, 0.7581, 0.0037, + 0.2000, 0.0078, 0.3345, 0.5987, 0.1644, 0.3401, 0.1628, + 0.6190, 0.0170, 0.3230, 0.7145, 0.0065, 0.2218, 0.4568, + 0.5352, 0.7506, 0.8970, 0.0769, 0.7770, 0.0375, 0.5961, + 0.1851, 0.1482, 0.0403, 0.1972, 0.9194, 0.1678, 0.8901, + 0.8766, 0.0092, 0.2714, 0.5465, 0.2373, 0.1403, 0.0755, + 0.7137, 0.4544, 0.3006, 0.0421, 0.8286, 0.0333, 0.4192, + 0.1514, 0.5585, 0.9974, 0.8611, 0.3646, 0.5144, 0.1233, + 0.0881, 0.9866, 0.2420, 0.9365, 0.3756, 0.2095, 0.4280, + 0.2119, 0.0313, 0.7899, 0.6265, 0.8099, 0.3894, 0.1829, + 0.4932, 0.0535, 0.2941, 0.7543, 0.3779, 0.6784, 0.2573, + 0.6186, 0.1475, 0.2682, 0.6713, 0.2567, 0.7592, 0.7252, + 0.8305, 0.0652, 0.2366, 0.0143, 0.5795, 0.7446, 0.7182, + 0.2430, 0.3788, 0.2324, 0.6863, 0.2975, 0.5231, 0.1994, + 0.6772, 0.0117, 0.2661, 0.4983, 0.2255, 0.1217, 0.5970, + 0.4181, 0.1888, 0.5662, 0.8459, 0.4818, 0.5664, 0.7551, + 0.3425, 0.7495, 0.5635, 0.3037, 0.9253, 0.1746, 0.0949, + 0.1444, 0.1382, 0.6207, 0.2484, 0.6872, 0.6011, 0.8909, + 0.5037, 0.7646, 0.8169, 0.6714, 0.6011, 0.0228, 0.7584, + 0.5712, 0.9573, 0.5293, 0.0187, 0.0769, 0.3750, 0.5318, + 0.5469, 0.0475, 0.6832, 0.1012, 0.3177, 0.3205, 0.9912, + 0.7967, 0.3297, 0.8600, 0.8649, 0.1408, 0.1008, 0.6301, + 0.4827, 0.2363, 0.4056, 0.0155, 0.7202, 0.0029, 0.5561, + 0.7290, 0.5676, 0.5965, 0.5363, 0.0921, 0.1850, 0.8448, + 0.7451, 0.5778, 0.2613, 0.9353, 0.9780, 0.5328, 0.8762, + 0.7506, 0.6058, 0.6343, 0.8670, 0.6461, 0.0050, 0.0322, + 0.7176, 0.9509, 0.0141, 0.2849, 0.4851, 0.7572, 0.2430, + 0.0132, 0.9420, 0.2551, 0.1494, 0.1934, 0.9914, 0.0389, + 0.5433, 0.7196, 0.7758, 0.0818, 0.2780, 0.1470, 0.9575, + 0.0039, 0.7218, 0.1816, 0.4767, 0.9657, 0.5165, 0.5374, + 0.9692, 0.8055, 0.2454, 0.3490, 0.9352, 0.0487, 0.6894, + 0.9618, 0.0152, 0.3404, 0.6613, 0.6038, 0.4468, 0.2839, + 0.1984, 0.0949, 0.4107, 0.3440, 0.6650, 0.9782, 0.7647, + 0.3472, 0.8008, 0.6515, 0.2253, 0.1516, 0.0721, 0.0532, + 0.1489, 0.2701, 0.4286, 0.6556, 0.2800, 0.4837, 0.1700, + 0.4031, 0.7582, 0.2764, 0.1452, 0.4317, 0.8870, 0.8904, + 0.2127, 0.1224, 0.0877, 0.6062, 0.7479, 0.8289, 0.8979, + 0.5176, 0.3928, 0.1454, 0.7837, 0.1713, 0.5346, 0.7913, + 0.1056, 0.5996, 0.7813, 0.5215, 0.9208, 0.9514, 0.2525, + 0.4305, 0.2552, 0.7349, 0.3762, 0.7076, 0.1290, 0.1830, + 0.8907, 0.4787, 0.2419, 0.4726, 0.6765, 0.2643, 0.8126, + 0.8757, 0.7952, 0.1205, 0.1298, 0.3806, 0.3136, 0.8469, + 0.0054, 0.5516, 0.9247, 0.4707, 0.9515, 0.8050, 0.4300, + 0.7760, 0.9682, 0.9549, 0.3683, 0.9720, 0.3750, 0.1016, + 0.1056, 0.6301, 0.1820, 0.6373, 0.4706, 0.8360, 0.8820, + 0.8684, 0.5608, 0.4950, 0.7219, 0.7477, 0.3138, 0.4091, + 0.3672, 0.6820, 0.1598, 0.4262, 0.8790, 0.5580, 0.7601, + 0.6733, 0.7879, 0.3866, 0.8117, 0.6549, 0.4184, 0.9377, + 0.0728, 0.7220, 0.5621, 0.8045, 0.9104, 0.1723, 0.5263, + 0.7555, 0.7751, 0.2045, 0.2067, 0.3395, 0.0619, 0.6870, + 0.6295, 0.5145, 0.4071, 0.2642, 0.1755, 0.1193, 0.9114, + 0.5765, 0.1704, 0.3481, 0.6444, 0.1809, 0.0182, 0.0117, + 0.5466, 0.1851, 0.8523, 0.0951, 0.8918, 0.4279, 0.3555, + 0.1480, 0.9159, 0.1610, 0.2426, 0.6025, 0.7344, 0.3367, + 0.2722, 0.6946, 0.3251, 0.3742, 0.1985, 0.3410, 0.7549, + 0.7152, 0.6307, 0.2442, 0.6188, 0.4939, 0.3315, 0.6606, + 0.3168, 0.5301, 0.3465, 0.9986, 0.1887, 0.1622, 0.9650, + 0.5986, 0.8953, 0.8288, 0.8866, 0.2675, 0.7017, 0.2466, + 0.8798, 0.3267, 0.8028, 0.3919, 0.3126, 0.5504, 0.3778, + 0.7414, 0.0821, 0.0301, 0.4972, 0.4197, 0.6222, 0.4811, + 0.0587, 0.6802, 0.1914, 0.3226, 0.5795, 0.4956, 0.3432, + 0.0689, 0.7911, 0.3203, 0.5275, 0.7748, 0.2775, 0.7469, + 0.8369, 0.2113, 0.0507, 0.9763, 0.3517, 0.6939, 0.7093, + 0.5765, 0.2753, 0.5658, 0.3861, 0.2320, 0.5725, 0.7333, + 0.9639, 0.9626, 0.9957, 0.2482, 0.2599, 0.2589, 0.0011, + 0.5195, 0.4786, 0.6863, 0.9629, 0.0159, 0.8498, 0.9892, + 0.4985, 0.5712, 0.8487, 0.3862, 0.6221, 0.2142, 0.4490, + 0.8922, 0.7035, 0.5839, 0.3975, 0.4180, 0.6609, 0.9340, + 0.0330, 0.6173, 0.9389, 0.6227, 0.3648, 0.5424, 0.3871, + 0.5239, 0.6250, 0.8573, 0.0643, 0.3775, 0.5397, 0.5294, + 0.8842, 0.4971, 0.4772, 0.0587, 0.4167, 0.3990, 0.8149, + 0.3392, 0.7700, 0.3194, 0.6137, 0.4234, 0.6017, 0.1367, + 0.5706, 0.9656, 0.6379, 0.8556, 0.0899, 0.6243, 0.0430, + 0.0444, 0.2529, 0.6988, 0.6018, 0.0937, 0.9108, 0.0874, + 0.8000, 0.6934, 0.6987, 0.3747, 0.5361, 0.5595, 0.4715, + 0.8963, 0.2831, 0.7115, 0.8126, 0.3770, 0.9565, 0.0805, + 0.5965, 0.4760, 0.3946, 0.5740, 0.5718, 0.8089, 0.3195, + 0.6564, 0.2940, 0.1074, 0.1914, 0.3616, 0.2781, 0.5799, + 0.2506, 0.9786, 0.8031, 0.2290, 0.7132, 0.8597, 0.1502, + 0.5501, 0.8473, 0.0961, 0.3091, 0.2671, 0.6716, 0.2749, + 0.8922, 0.9331, 0.7831, 0.3662, 0.4411, 0.0261, 0.1996, + 0.1425, 0.1518, 0.1002, 0.5879, 0.8248, 0.4447, 0.4024, + 0.2709, 0.9656, 0.2968, 0.8687, 0.7429, 0.2038, 0.6212, + 0.5590, 0.3980, 0.6056, 0.7683, 0.9384, 0.6645, 0.3018, + 0.9914, 0.4717, 0.9312, 0.2458, 0.6373, 0.3662, 0.7338, + 0.8539, 0.2373, 0.8468, 0.9313, 0.9985, 0.9108, 0.2928, + 0.5675, 0.4918, 0.6788, 0.6532, 0.8578, 0.9756, 0.7977, + 0.7426, 0.3820, 0.0559, 0.7421, 0.6276, 0.5143, 0.1295, + 0.1380, 0.3699, 0.7837, 0.8925, 0.0521, 0.7792, 0.8213, + 0.9725, 0.3183, 0.1008, 0.4133, 0.0178, 0.2000, 0.5122, + 0.9307, 0.7232, 0.2629, 0.4049, 0.1523, 0.9357, 0.6589, + 0.3894, 0.5017, 0.4405, 0.3895, 0.3989, 0.4152, 0.3842, + 0.2309, 0.0256, 0.1505, 0.6988, 0.2384, 0.4692, 0.2583, + 0.3660, 0.9101, 0.3800, 0.5037, 0.1464, 0.3283, 0.7269, + 0.8936, 0.2059, 0.3661, 0.4488, 0.0114, 0.3810, 0.0566, + 0.9065, 0.6607, 0.4611, 0.6537, 0.1070, 0.8951, 0.1591, + 0.3640, 0.0681, 0.1975, 0.8229, 0.3451, 0.9813, 0.1129, + 0.2130, 0.1791, 0.6985, 0.3325, 0.8236, 0.5894, 0.8602, + 0.7245, 0.8692, 0.0849, 0.3757, 0.4767, 0.2866, 0.8776, + 0.1222, 0.4554, 0.1437, 0.8286, 0.4194, 0.9171, 0.1793, + 0.4714, 0.7967, 0.3271, 0.9508, 0.9596, 0.9553, 0.2671, + 0.5226, 0.2545, 0.2904, 0.3931, 0.1604, 0.9966, 0.9778, + 0.1750, 0.2027, 0.1076, 0.4041, 0.9413, 0.1702, 0.9720, + 0.8515, 0.2862, 0.5026, 0.1329, 0.7104, 0.6485, 0.6173, + 0.1418, 0.0456, 0.2708, 0.2491, 0.4879, 0.6711, 0.3460, + 0.2430, 0.0081, 0.6377, 0.4017, 0.6538, 0.0909, 0.0154, + 0.6563, 0.2168, 0.7672, 0.2347, 0.2597, 0.5759, 0.0490, + 0.0102, 0.2013, 0.8005, 0.6998, 0.2945, 0.4318, 0.5007, + 0.1644, 0.7497, 0.5000, 0.5912, 0.9818, 0.5953, 0.9114, + 0.1285, 0.8707, 0.3809, 0.7743, 0.4282, 0.7606, 0.5688, + 0.3152, 0.8989, 0.1583, 0.8766, 0.6326, 0.8886, 0.2620, + 0.3086, 0.3602, 0.7866, 0.9547, 0.2244, 0.2724, 0.9583, + 0.7470, 0.9623, 0.1429, 0.8508, 0.5799, 0.9955, 0.6647, + 0.1399, 0.1801, 0.9233, 0.2772, 0.2445, 0.0531, 0.7452, + 0.0969, 0.7391, 0.2375, 0.8335, 0.2274, 0.5999, 0.0299, + 0.1018, 0.7622, 0.6825, 0.5068, 0.5262, 0.2390, 0.3450, + 0.7486, 0.6337, 0.3874, 0.1886, 0.2751, 0.8412, 0.0794, + 0.2398, 0.8937, 0.9405, 0.8703, 0.2503, 0.9532, 0.7538, + 0.3676, 0.7234, 0.4557, 0.9672, 0.5687, 0.6227, 0.7700, + 0.6581, 0.6226, 0.6058, 0.8459, 0.4844, 0.8195, 0.3510, + 0.5321, 0.7146, 0.4273, 0.4330, 0.8640, 0.8088, 0.5159, + 0.8375, 0.9589, 0.0615, 0.2675, 0.6890, 0.9951, 0.0718, + 0.5084, 0.2519, 0.6662, 0.9841, 0.4845, 0.3961]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4905, 0.9858, 0.4305, ..., 0.2183, 0.4525, 0.0992]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.263301372528076 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7666, 3284, 1699, 7007, 1865, 642, 2301, 7889, 3112, + 4841, 4554, 2643, 3511, 9405, 9949, 4868, 7053, 8979, + 7155, 3080, 3345, 5376, 5682, 4591, 5457, 4012, 9907, + 4608, 8275, 6785, 214, 8256, 6706, 6096, 396, 213, + 9694, 8210, 4776, 4951, 9163, 2885, 6391, 5210, 5362, + 2964, 8727, 2087, 6653, 557, 2807, 404, 7235, 7348, + 5591, 9937, 5151, 9696, 3619, 4512, 6156, 4095, 5236, + 6999, 8669, 5309, 6087, 4118, 2455, 1780, 9742, 3315, + 7998, 8891, 4943, 9849, 3316, 2894, 3974, 2248, 8409, + 9978, 2602, 229, 3004, 5173, 479, 6004, 6080, 4963, + 6862, 9389, 8433, 2783, 2848, 968, 8951, 3760, 8407, + 7534, 2609, 3709, 9648, 2412, 7425, 8619, 7347, 9064, + 4963, 1354, 5818, 7386, 7369, 2381, 5800, 6012, 3532, + 214, 9362, 4731, 8238, 4276, 2221, 3400, 540, 4177, + 2164, 1191, 3257, 9047, 364, 2899, 2221, 8232, 4361, + 1480, 3950, 9412, 570, 6740, 5525, 7995, 856, 1549, + 3242, 5640, 6435, 1568, 3375, 4060, 6793, 1686, 9865, + 9598, 3307, 6572, 3171, 2008, 7637, 4922, 386, 6366, + 7528, 2850, 6318, 2345, 368, 5786, 9710, 7772, 1761, + 6140, 301, 570, 8828, 8741, 9119, 4206, 3780, 4780, + 707, 3068, 8924, 3477, 4065, 1036, 560, 3678, 9660, + 4487, 8282, 6249, 7109, 3781, 7458, 2555, 3159, 5856, + 7260, 2615, 6118, 4364, 9114, 9280, 8926, 3395, 7988, + 2698, 8725, 6447, 7235, 3203, 5300, 4394, 3058, 7417, + 5517, 1817, 6921, 9211, 8306, 7080, 8460, 7810, 7848, + 4752, 270, 8368, 8576, 7374, 8967, 2824, 9004, 8383, + 4024, 4694, 919, 6859, 4607, 3536, 5606, 5137, 6430, + 1226, 8149, 7263, 7513, 1278, 5694, 380, 1738, 433, + 9251, 916, 5723, 4516, 8117, 800, 5078, 9205, 3076, + 2156, 1929, 248, 8896, 6899, 3847, 2048, 8775, 3158, + 9717, 4304, 9305, 9027, 8444, 7211, 3882, 2571, 7542, + 1042, 2864, 4697, 4933, 9597, 2106, 1874, 1663, 8974, + 5845, 9264, 40, 4452, 2566, 3363, 7670, 7586, 7479, + 5981, 1928, 2498, 8107, 6502, 5725, 1280, 4382, 7811, + 4150, 2407, 5646, 7384, 6368, 9330, 4277, 4360, 9293, + 8848, 3141, 3719, 4630, 418, 68, 3077, 304, 5534, + 3069, 4046, 9645, 797, 7332, 2456, 818, 6040, 233, + 1356, 6755, 4249, 1643, 5939, 6192, 5023, 2182, 3038, + 615, 962, 6584, 6465, 4700, 5188, 378, 2580, 7736, + 8469, 7964, 4215, 5350, 5035, 723, 2366, 9255, 3296, + 2487, 9749, 9356, 2120, 2719, 1534, 5352, 4142, 2840, + 3874, 3309, 2282, 4165, 8221, 3151, 7532, 9093, 1914, + 875, 3267, 7484, 5334, 246, 69, 1760, 3158, 9485, + 2968, 8655, 8484, 1925, 6269, 8467, 4053, 8242, 2491, + 9174, 9141, 8207, 3765, 7176, 8536, 4547, 5155, 9600, + 59, 7491, 3224, 3860, 3405, 5155, 5872, 5087, 1030, + 5823, 67, 9402, 2052, 3137, 8203, 5381, 5014, 2445, + 2598, 6064, 583, 6264, 8514, 2132, 8834, 5724, 5040, + 3049, 7546, 5472, 9227, 9155, 4856, 8701, 9805, 8423, + 5471, 9572, 1489, 1146, 2738, 6131, 4467, 3775, 7623, + 3128, 1626, 4245, 5851, 8311, 5249, 9163, 5646, 9700, + 5912, 1064, 3427, 9742, 4822, 1609, 1247, 3225, 1349, + 1112, 7136, 4620, 9398, 180, 852, 8483, 99, 5880, + 6936, 3985, 7199, 43, 619, 1433, 378, 5613, 8778, + 9714, 6327, 2214, 4270, 2616, 7836, 6036, 4038, 1349, + 3609, 500, 8725, 5242, 3213, 7450, 9547, 6518, 3900, + 9820, 6085, 3210, 7810, 5855, 7609, 1890, 2354, 6091, + 8514, 7897, 3931, 2880, 6281, 3385, 5921, 9952, 5094, + 8855, 172, 3302, 3010, 4642, 2734, 9166, 8421, 2234, + 8565, 528, 8176, 6482, 994, 3099, 9724, 6379, 405, + 8321, 239, 6594, 996, 3440, 8976, 435, 2375, 6431, + 3253, 1630, 7853, 2956, 8895, 986, 6476, 1010, 8489, + 8021, 872, 1473, 1100, 5352, 9179, 2417, 9244, 9167, + 7160, 3376, 6327, 3920, 5098, 7118, 4103, 8332, 3090, + 9060, 6774, 7505, 4316, 2580, 8806, 3234, 5576, 1591, + 9446, 2876, 3861, 9007, 4047, 9387, 1828, 9480, 300, + 5810, 1681, 7603, 9696, 3967, 205, 4720, 6724, 1706, + 1179, 7731, 9912, 9119, 3539, 3641, 2691, 1738, 2069, + 64, 5901, 6907, 7329, 5038, 8087, 2346, 1417, 9802, + 2245, 9442, 3124, 6856, 7605, 3350, 9096, 9607, 6580, + 7026, 4159, 2283, 388, 6926, 8970, 9000, 5902, 3616, + 8251, 2022, 1387, 2040, 3499, 7352, 9723, 4720, 4072, + 3030, 8642, 1561, 3550, 728, 3483, 9957, 2852, 9769, + 530, 8950, 8791, 1868, 3201, 4039, 7668, 6271, 5458, + 8517, 9781, 4046, 3225, 9954, 206, 6894, 7597, 7497, + 6306, 3298, 6872, 2066, 7591, 1890, 8097, 1280, 7096, + 8226, 1359, 216, 3321, 8096, 7997, 7290, 8655, 8538, + 5466, 3483, 6990, 5527, 3778, 5009, 8586, 5007, 7530, + 2615, 6501, 9092, 3603, 3220, 7154, 9702, 5045, 8352, + 2617, 771, 5086, 2751, 4678, 9150, 7949, 9854, 737, + 4795, 2071, 9389, 7313, 9799, 3943, 6232, 1410, 8989, + 7328, 6562, 7366, 5222, 441, 8345, 6712, 3926, 515, + 5956, 5254, 6129, 5268, 1272, 9590, 5597, 1532, 744, + 7243, 1823, 6966, 9821, 5101, 1854, 836, 8195, 8533, + 9772, 6888, 999, 42, 8452, 7406, 756, 2186, 7921, + 7527, 6586, 9178, 9190, 6661, 1985, 2124, 5708, 2885, + 5344, 8420, 6388, 4916, 5751, 5447, 1612, 7115, 4012, + 4211, 4491, 2685, 8469, 3264, 3933, 8307, 2069, 5721, + 6785, 7552, 3662, 9294, 3272, 4972, 4038, 8502, 7978, + 730, 6090, 2443, 3573, 9783, 6945, 8456, 7714, 2278, + 1207, 9072, 1346, 555, 8623, 6821, 2442, 455, 1202, + 8844, 3296, 1101, 8043, 8852, 2122, 3173, 1731, 9938, + 5593, 93, 8956, 6237, 8545, 5229, 9239, 8804, 3713, + 4281, 5382, 8333, 8801, 93, 2212, 5217, 7239, 9312, + 2195, 1969, 8659, 674, 2456, 1219, 684, 5633, 7633, + 1822, 5479, 5138, 2448, 4428, 1533, 6135, 585, 2626, + 1535, 2390, 2292, 3199, 9797, 3083, 7782, 3749, 5972, + 5041, 873, 1762, 6907, 2439, 176, 7886, 8698, 5610, + 4063, 7434, 7641, 5504, 8492, 8226, 4607, 3891, 971, + 446, 5185, 8037, 5578, 2205, 6200, 5210, 7918, 8694, + 8369, 5656, 8140, 720, 6980, 4361, 6048, 5637, 1525, + 3287, 4786, 6431, 419, 584, 7972, 6800, 6611, 3925, + 1361, 345, 5934, 221, 3976, 5263, 273, 1124, 6638, + 9587, 1234, 7547, 9811, 1651, 2418, 9523, 5719, 2357, + 9754, 4924, 9608, 7139, 1769, 2001, 6705, 554, 4744, + 2302, 8692, 6947, 8654, 8683, 9773, 4469, 8661, 3479, + 222, 2813, 8116, 1105, 4667, 9744, 3524, 7923, 7688, + 5748, 4643, 3806, 589, 8275, 7300, 9905, 8566, 3005, + 8277, 3362, 9117, 4507, 4746, 196, 1702, 5992, 1090, + 7587, 5382, 7431, 8949, 1611, 5830, 5183, 8125, 3201, + 6255, 9408, 1748, 1554, 1906, 729, 8466, 4725, 4398, + 3941]), + values=tensor([0.6959, 0.7960, 0.0781, 0.6007, 0.3460, 0.7293, 0.2445, + 0.3377, 0.4176, 0.3625, 0.7235, 0.1442, 0.0335, 0.4983, + 0.5500, 0.1056, 0.9940, 0.8727, 0.8069, 0.4130, 0.1455, + 0.6420, 0.2494, 0.3495, 0.7133, 0.4395, 0.2694, 0.2841, + 0.5549, 0.0490, 0.7986, 0.5242, 0.3692, 0.2460, 0.8859, + 0.0105, 0.6123, 0.7972, 0.2878, 0.9495, 0.0416, 0.7368, + 0.5235, 0.2640, 0.4232, 0.7400, 0.4660, 0.1817, 0.7149, + 0.8459, 0.3355, 0.5095, 0.8904, 0.2582, 0.0507, 0.5007, + 0.7876, 0.1015, 0.4881, 0.3695, 0.9348, 0.8654, 0.8541, + 0.1701, 0.5411, 0.8559, 0.0858, 0.6724, 0.4755, 0.9937, + 0.3688, 0.4551, 0.1705, 0.5343, 0.7652, 0.5023, 0.4761, + 0.8553, 0.5042, 0.1378, 0.3402, 0.0214, 0.3317, 0.7760, + 0.6136, 0.0754, 0.7266, 0.3108, 0.1557, 0.5342, 0.2368, + 0.6255, 0.6338, 0.2103, 0.3547, 0.7898, 0.3532, 0.6879, + 0.5756, 0.5446, 0.6549, 0.1833, 0.3545, 0.3075, 0.8999, + 0.0866, 0.3178, 0.1771, 0.9904, 0.5584, 0.7474, 0.3985, + 0.7736, 0.7195, 0.1836, 0.1731, 0.6012, 0.6546, 0.5119, + 0.7668, 0.7804, 0.2026, 0.4119, 0.1043, 0.5762, 0.3249, + 0.0350, 0.9469, 0.9597, 0.3663, 0.4396, 0.3526, 0.6112, + 0.2028, 0.6398, 0.7385, 0.7867, 0.3703, 0.9189, 0.5053, + 0.2346, 0.3574, 0.9106, 0.6490, 0.0571, 0.7581, 0.0037, + 0.2000, 0.0078, 0.3345, 0.5987, 0.1644, 0.3401, 0.1628, + 0.6190, 0.0170, 0.3230, 0.7145, 0.0065, 0.2218, 0.4568, + 0.5352, 0.7506, 0.8970, 0.0769, 0.7770, 0.0375, 0.5961, + 0.1851, 0.1482, 0.0403, 0.1972, 0.9194, 0.1678, 0.8901, + 0.8766, 0.0092, 0.2714, 0.5465, 0.2373, 0.1403, 0.0755, + 0.7137, 0.4544, 0.3006, 0.0421, 0.8286, 0.0333, 0.4192, + 0.1514, 0.5585, 0.9974, 0.8611, 0.3646, 0.5144, 0.1233, + 0.0881, 0.9866, 0.2420, 0.9365, 0.3756, 0.2095, 0.4280, + 0.2119, 0.0313, 0.7899, 0.6265, 0.8099, 0.3894, 0.1829, + 0.4932, 0.0535, 0.2941, 0.7543, 0.3779, 0.6784, 0.2573, + 0.6186, 0.1475, 0.2682, 0.6713, 0.2567, 0.7592, 0.7252, + 0.8305, 0.0652, 0.2366, 0.0143, 0.5795, 0.7446, 0.7182, + 0.2430, 0.3788, 0.2324, 0.6863, 0.2975, 0.5231, 0.1994, + 0.6772, 0.0117, 0.2661, 0.4983, 0.2255, 0.1217, 0.5970, + 0.4181, 0.1888, 0.5662, 0.8459, 0.4818, 0.5664, 0.7551, + 0.3425, 0.7495, 0.5635, 0.3037, 0.9253, 0.1746, 0.0949, + 0.1444, 0.1382, 0.6207, 0.2484, 0.6872, 0.6011, 0.8909, + 0.5037, 0.7646, 0.8169, 0.6714, 0.6011, 0.0228, 0.7584, + 0.5712, 0.9573, 0.5293, 0.0187, 0.0769, 0.3750, 0.5318, + 0.5469, 0.0475, 0.6832, 0.1012, 0.3177, 0.3205, 0.9912, + 0.7967, 0.3297, 0.8600, 0.8649, 0.1408, 0.1008, 0.6301, + 0.4827, 0.2363, 0.4056, 0.0155, 0.7202, 0.0029, 0.5561, + 0.7290, 0.5676, 0.5965, 0.5363, 0.0921, 0.1850, 0.8448, + 0.7451, 0.5778, 0.2613, 0.9353, 0.9780, 0.5328, 0.8762, + 0.7506, 0.6058, 0.6343, 0.8670, 0.6461, 0.0050, 0.0322, + 0.7176, 0.9509, 0.0141, 0.2849, 0.4851, 0.7572, 0.2430, + 0.0132, 0.9420, 0.2551, 0.1494, 0.1934, 0.9914, 0.0389, + 0.5433, 0.7196, 0.7758, 0.0818, 0.2780, 0.1470, 0.9575, + 0.0039, 0.7218, 0.1816, 0.4767, 0.9657, 0.5165, 0.5374, + 0.9692, 0.8055, 0.2454, 0.3490, 0.9352, 0.0487, 0.6894, + 0.9618, 0.0152, 0.3404, 0.6613, 0.6038, 0.4468, 0.2839, + 0.1984, 0.0949, 0.4107, 0.3440, 0.6650, 0.9782, 0.7647, + 0.3472, 0.8008, 0.6515, 0.2253, 0.1516, 0.0721, 0.0532, + 0.1489, 0.2701, 0.4286, 0.6556, 0.2800, 0.4837, 0.1700, + 0.4031, 0.7582, 0.2764, 0.1452, 0.4317, 0.8870, 0.8904, + 0.2127, 0.1224, 0.0877, 0.6062, 0.7479, 0.8289, 0.8979, + 0.5176, 0.3928, 0.1454, 0.7837, 0.1713, 0.5346, 0.7913, + 0.1056, 0.5996, 0.7813, 0.5215, 0.9208, 0.9514, 0.2525, + 0.4305, 0.2552, 0.7349, 0.3762, 0.7076, 0.1290, 0.1830, + 0.8907, 0.4787, 0.2419, 0.4726, 0.6765, 0.2643, 0.8126, + 0.8757, 0.7952, 0.1205, 0.1298, 0.3806, 0.3136, 0.8469, + 0.0054, 0.5516, 0.9247, 0.4707, 0.9515, 0.8050, 0.4300, + 0.7760, 0.9682, 0.9549, 0.3683, 0.9720, 0.3750, 0.1016, + 0.1056, 0.6301, 0.1820, 0.6373, 0.4706, 0.8360, 0.8820, + 0.8684, 0.5608, 0.4950, 0.7219, 0.7477, 0.3138, 0.4091, + 0.3672, 0.6820, 0.1598, 0.4262, 0.8790, 0.5580, 0.7601, + 0.6733, 0.7879, 0.3866, 0.8117, 0.6549, 0.4184, 0.9377, + 0.0728, 0.7220, 0.5621, 0.8045, 0.9104, 0.1723, 0.5263, + 0.7555, 0.7751, 0.2045, 0.2067, 0.3395, 0.0619, 0.6870, + 0.6295, 0.5145, 0.4071, 0.2642, 0.1755, 0.1193, 0.9114, + 0.5765, 0.1704, 0.3481, 0.6444, 0.1809, 0.0182, 0.0117, + 0.5466, 0.1851, 0.8523, 0.0951, 0.8918, 0.4279, 0.3555, + 0.1480, 0.9159, 0.1610, 0.2426, 0.6025, 0.7344, 0.3367, + 0.2722, 0.6946, 0.3251, 0.3742, 0.1985, 0.3410, 0.7549, + 0.7152, 0.6307, 0.2442, 0.6188, 0.4939, 0.3315, 0.6606, + 0.3168, 0.5301, 0.3465, 0.9986, 0.1887, 0.1622, 0.9650, + 0.5986, 0.8953, 0.8288, 0.8866, 0.2675, 0.7017, 0.2466, + 0.8798, 0.3267, 0.8028, 0.3919, 0.3126, 0.5504, 0.3778, + 0.7414, 0.0821, 0.0301, 0.4972, 0.4197, 0.6222, 0.4811, + 0.0587, 0.6802, 0.1914, 0.3226, 0.5795, 0.4956, 0.3432, + 0.0689, 0.7911, 0.3203, 0.5275, 0.7748, 0.2775, 0.7469, + 0.8369, 0.2113, 0.0507, 0.9763, 0.3517, 0.6939, 0.7093, + 0.5765, 0.2753, 0.5658, 0.3861, 0.2320, 0.5725, 0.7333, + 0.9639, 0.9626, 0.9957, 0.2482, 0.2599, 0.2589, 0.0011, + 0.5195, 0.4786, 0.6863, 0.9629, 0.0159, 0.8498, 0.9892, + 0.4985, 0.5712, 0.8487, 0.3862, 0.6221, 0.2142, 0.4490, + 0.8922, 0.7035, 0.5839, 0.3975, 0.4180, 0.6609, 0.9340, + 0.0330, 0.6173, 0.9389, 0.6227, 0.3648, 0.5424, 0.3871, + 0.5239, 0.6250, 0.8573, 0.0643, 0.3775, 0.5397, 0.5294, + 0.8842, 0.4971, 0.4772, 0.0587, 0.4167, 0.3990, 0.8149, + 0.3392, 0.7700, 0.3194, 0.6137, 0.4234, 0.6017, 0.1367, + 0.5706, 0.9656, 0.6379, 0.8556, 0.0899, 0.6243, 0.0430, + 0.0444, 0.2529, 0.6988, 0.6018, 0.0937, 0.9108, 0.0874, + 0.8000, 0.6934, 0.6987, 0.3747, 0.5361, 0.5595, 0.4715, + 0.8963, 0.2831, 0.7115, 0.8126, 0.3770, 0.9565, 0.0805, + 0.5965, 0.4760, 0.3946, 0.5740, 0.5718, 0.8089, 0.3195, + 0.6564, 0.2940, 0.1074, 0.1914, 0.3616, 0.2781, 0.5799, + 0.2506, 0.9786, 0.8031, 0.2290, 0.7132, 0.8597, 0.1502, + 0.5501, 0.8473, 0.0961, 0.3091, 0.2671, 0.6716, 0.2749, + 0.8922, 0.9331, 0.7831, 0.3662, 0.4411, 0.0261, 0.1996, + 0.1425, 0.1518, 0.1002, 0.5879, 0.8248, 0.4447, 0.4024, + 0.2709, 0.9656, 0.2968, 0.8687, 0.7429, 0.2038, 0.6212, + 0.5590, 0.3980, 0.6056, 0.7683, 0.9384, 0.6645, 0.3018, + 0.9914, 0.4717, 0.9312, 0.2458, 0.6373, 0.3662, 0.7338, + 0.8539, 0.2373, 0.8468, 0.9313, 0.9985, 0.9108, 0.2928, + 0.5675, 0.4918, 0.6788, 0.6532, 0.8578, 0.9756, 0.7977, + 0.7426, 0.3820, 0.0559, 0.7421, 0.6276, 0.5143, 0.1295, + 0.1380, 0.3699, 0.7837, 0.8925, 0.0521, 0.7792, 0.8213, + 0.9725, 0.3183, 0.1008, 0.4133, 0.0178, 0.2000, 0.5122, + 0.9307, 0.7232, 0.2629, 0.4049, 0.1523, 0.9357, 0.6589, + 0.3894, 0.5017, 0.4405, 0.3895, 0.3989, 0.4152, 0.3842, + 0.2309, 0.0256, 0.1505, 0.6988, 0.2384, 0.4692, 0.2583, + 0.3660, 0.9101, 0.3800, 0.5037, 0.1464, 0.3283, 0.7269, + 0.8936, 0.2059, 0.3661, 0.4488, 0.0114, 0.3810, 0.0566, + 0.9065, 0.6607, 0.4611, 0.6537, 0.1070, 0.8951, 0.1591, + 0.3640, 0.0681, 0.1975, 0.8229, 0.3451, 0.9813, 0.1129, + 0.2130, 0.1791, 0.6985, 0.3325, 0.8236, 0.5894, 0.8602, + 0.7245, 0.8692, 0.0849, 0.3757, 0.4767, 0.2866, 0.8776, + 0.1222, 0.4554, 0.1437, 0.8286, 0.4194, 0.9171, 0.1793, + 0.4714, 0.7967, 0.3271, 0.9508, 0.9596, 0.9553, 0.2671, + 0.5226, 0.2545, 0.2904, 0.3931, 0.1604, 0.9966, 0.9778, + 0.1750, 0.2027, 0.1076, 0.4041, 0.9413, 0.1702, 0.9720, + 0.8515, 0.2862, 0.5026, 0.1329, 0.7104, 0.6485, 0.6173, + 0.1418, 0.0456, 0.2708, 0.2491, 0.4879, 0.6711, 0.3460, + 0.2430, 0.0081, 0.6377, 0.4017, 0.6538, 0.0909, 0.0154, + 0.6563, 0.2168, 0.7672, 0.2347, 0.2597, 0.5759, 0.0490, + 0.0102, 0.2013, 0.8005, 0.6998, 0.2945, 0.4318, 0.5007, + 0.1644, 0.7497, 0.5000, 0.5912, 0.9818, 0.5953, 0.9114, + 0.1285, 0.8707, 0.3809, 0.7743, 0.4282, 0.7606, 0.5688, + 0.3152, 0.8989, 0.1583, 0.8766, 0.6326, 0.8886, 0.2620, + 0.3086, 0.3602, 0.7866, 0.9547, 0.2244, 0.2724, 0.9583, + 0.7470, 0.9623, 0.1429, 0.8508, 0.5799, 0.9955, 0.6647, + 0.1399, 0.1801, 0.9233, 0.2772, 0.2445, 0.0531, 0.7452, + 0.0969, 0.7391, 0.2375, 0.8335, 0.2274, 0.5999, 0.0299, + 0.1018, 0.7622, 0.6825, 0.5068, 0.5262, 0.2390, 0.3450, + 0.7486, 0.6337, 0.3874, 0.1886, 0.2751, 0.8412, 0.0794, + 0.2398, 0.8937, 0.9405, 0.8703, 0.2503, 0.9532, 0.7538, + 0.3676, 0.7234, 0.4557, 0.9672, 0.5687, 0.6227, 0.7700, + 0.6581, 0.6226, 0.6058, 0.8459, 0.4844, 0.8195, 0.3510, + 0.5321, 0.7146, 0.4273, 0.4330, 0.8640, 0.8088, 0.5159, + 0.8375, 0.9589, 0.0615, 0.2675, 0.6890, 0.9951, 0.0718, + 0.5084, 0.2519, 0.6662, 0.9841, 0.4845, 0.3961]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4905, 0.9858, 0.4305, ..., 0.2183, 0.4525, 0.0992]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.263301372528076 seconds + +[18.52, 17.73, 17.82, 17.87, 17.98, 18.03, 17.91, 19.05, 18.64, 18.02] +[73.21] +13.981836557388306 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 285101, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.263301372528076, 'TIME_S_1KI': 0.03599882628446788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1023.6102543663977, 'W': 73.21} +[18.52, 17.73, 17.82, 17.87, 17.98, 18.03, 17.91, 19.05, 18.64, 18.02, 18.85, 18.33, 18.07, 17.99, 17.95, 17.9, 17.98, 18.04, 17.93, 17.89] +325.86 +16.293 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 285101, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.263301372528076, 'TIME_S_1KI': 0.03599882628446788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1023.6102543663977, 'W': 73.21, 'J_1KI': 3.5903425605886956, 'W_1KI': 0.2567861915601839, 'W_D': 56.916999999999994, 'J_D': 795.8041913368701, 'W_D_1KI': 0.1996380230164047, 'J_D_1KI': 0.0007002361374264022} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..0ec114e --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 259324, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.257395267486572, "TIME_S_1KI": 0.03955436159972302, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1007.3763615226745, "W": 73.52, "J_1KI": 3.8846244910716883, "W_1KI": 0.28350634727213836, "W_D": 56.727999999999994, "J_D": 777.2911620845794, "W_D_1KI": 0.21875337415742466, "J_D_1KI": 0.000843552367530289} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..838ccfc --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.021072864532470703} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), + col_indices=tensor([6834, 1931, 4346, ..., 6725, 2972, 1681]), + values=tensor([0.7465, 0.7749, 0.3553, ..., 0.3449, 0.2710, 0.4644]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.4472, 0.0239, 0.4773, ..., 0.7523, 0.1836, 0.2389]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.021072864532470703 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49827', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.0174834728240967} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([6306, 9296, 8522, ..., 5641, 7164, 5943]), + values=tensor([0.9605, 0.0866, 0.1892, ..., 0.4816, 0.2836, 0.0365]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.7120, 0.8205, 0.0862, ..., 0.5109, 0.7192, 0.7608]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 2.0174834728240967 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '259324', '-ss', '10000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.257395267486572} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([2130, 3883, 5256, ..., 7000, 8219, 5132]), + values=tensor([0.8393, 0.8650, 0.4056, ..., 0.4895, 0.0562, 0.7603]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.9495, 0.1374, 0.1837, ..., 0.6231, 0.7099, 0.0387]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.257395267486572 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([2130, 3883, 5256, ..., 7000, 8219, 5132]), + values=tensor([0.8393, 0.8650, 0.4056, ..., 0.4895, 0.0562, 0.7603]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.9495, 0.1374, 0.1837, ..., 0.6231, 0.7099, 0.0387]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.257395267486572 seconds + +[18.17, 18.06, 17.92, 18.22, 18.12, 18.42, 22.11, 19.11, 18.46, 18.0] +[73.52] +13.702072381973267 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 259324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.257395267486572, 'TIME_S_1KI': 0.03955436159972302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1007.3763615226745, 'W': 73.52} +[18.17, 18.06, 17.92, 18.22, 18.12, 18.42, 22.11, 19.11, 18.46, 18.0, 18.1, 18.11, 18.16, 22.77, 18.06, 18.3, 17.88, 17.93, 18.0, 18.15] +335.84000000000003 +16.792 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 259324, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.257395267486572, 'TIME_S_1KI': 0.03955436159972302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1007.3763615226745, 'W': 73.52, 'J_1KI': 3.8846244910716883, 'W_1KI': 0.28350634727213836, 'W_D': 56.727999999999994, 'J_D': 777.2911620845794, 'W_D_1KI': 0.21875337415742466, 'J_D_1KI': 0.000843552367530289} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..da0856a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 665, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.126577615737915, "TIME_S_1KI": 15.22793626426754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3144.6024104833605, "W": 46.7, "J_1KI": 4728.725429298286, "W_1KI": 70.22556390977444, "W_D": 20.655250000000002, "J_D": 1390.846872358382, "W_D_1KI": 31.060526315789478, "J_D_1KI": 46.707558369608236} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..4beed36 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,89 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.0304152965545654} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 59, 94, ..., 24999900, + 24999951, 25000000]), + col_indices=tensor([ 1276, 15885, 34398, ..., 446460, 484343, + 488114]), + values=tensor([0.3408, 0.7505, 0.4683, ..., 0.6426, 0.2990, 0.1628]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.4984, 0.2348, 0.7546, ..., 0.4897, 0.9555, 0.5266]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 2.0304152965545654 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '517', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.159608364105225} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 46, 98, ..., 24999895, + 24999952, 25000000]), + col_indices=tensor([ 1859, 10480, 11583, ..., 471819, 483100, + 486034]), + values=tensor([0.5566, 0.4872, 0.1210, ..., 0.2476, 0.9480, 0.3070]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1091, 0.0853, 0.9295, ..., 0.2076, 0.8766, 0.1664]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 8.159608364105225 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '665', '-ss', '500000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.126577615737915} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 66, 119, ..., 24999902, + 24999949, 25000000]), + col_indices=tensor([ 3829, 12709, 24306, ..., 491038, 494248, + 495364]), + values=tensor([0.8354, 0.9747, 0.5569, ..., 0.5257, 0.7884, 0.2877]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.8209, 0.7651, 0.4978, ..., 0.6892, 0.4643, 0.4864]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.126577615737915 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 66, 119, ..., 24999902, + 24999949, 25000000]), + col_indices=tensor([ 3829, 12709, 24306, ..., 491038, 494248, + 495364]), + values=tensor([0.8354, 0.9747, 0.5569, ..., 0.5257, 0.7884, 0.2877]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.8209, 0.7651, 0.4978, ..., 0.6892, 0.4643, 0.4864]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.126577615737915 seconds + +[18.36, 18.09, 18.07, 17.97, 17.94, 18.13, 18.13, 17.77, 17.92, 18.01] +[46.7] +67.33624005317688 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.126577615737915, 'TIME_S_1KI': 15.22793626426754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3144.6024104833605, 'W': 46.7} +[18.36, 18.09, 18.07, 17.97, 17.94, 18.13, 18.13, 17.77, 17.92, 18.01, 39.86, 40.69, 39.54, 40.28, 39.05, 39.85, 39.62, 39.36, 40.17, 40.4] +520.895 +26.04475 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 665, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.126577615737915, 'TIME_S_1KI': 15.22793626426754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3144.6024104833605, 'W': 46.7, 'J_1KI': 4728.725429298286, 'W_1KI': 70.22556390977444, 'W_D': 20.655250000000002, 'J_D': 1390.846872358382, 'W_D_1KI': 31.060526315789478, 'J_D_1KI': 46.707558369608236} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..ac8e083 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8088, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521591186523438, "TIME_S_1KI": 1.30088911801724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1272.5733924508095, "W": 87.25, "J_1KI": 157.34092389352244, "W_1KI": 10.787586547972305, "W_D": 70.764, "J_D": 1032.1190090932846, "W_D_1KI": 8.749258160237389, "J_D_1KI": 1.0817579327692124} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..a65a620 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,89 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14249539375305176} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 6, ..., 2499988, + 2499994, 2500000]), + col_indices=tensor([159074, 199303, 338786, ..., 336877, 376694, + 404714]), + values=tensor([0.7251, 0.9700, 0.9965, ..., 0.4798, 0.8363, 0.5285]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5063, 0.7490, 0.8579, ..., 0.3117, 0.7674, 0.7165]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.14249539375305176 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7368', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.564131021499634} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 11, ..., 2499988, + 2499994, 2500000]), + col_indices=tensor([ 45977, 46883, 132654, ..., 283974, 337716, + 438050]), + values=tensor([0.6941, 0.4659, 0.2903, ..., 0.1328, 0.8033, 0.9427]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8274, 0.6187, 0.7071, ..., 0.9433, 0.5745, 0.3570]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.564131021499634 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8088', '-ss', '500000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521591186523438} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 2499988, + 2499995, 2500000]), + col_indices=tensor([ 55151, 55855, 262240, ..., 129037, 280325, + 497898]), + values=tensor([0.5548, 0.3291, 0.4545, ..., 0.6191, 0.2200, 0.6842]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8740, 0.8697, 0.8262, ..., 0.4420, 0.1114, 0.8177]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.521591186523438 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 2499988, + 2499995, 2500000]), + col_indices=tensor([ 55151, 55855, 262240, ..., 129037, 280325, + 497898]), + values=tensor([0.5548, 0.3291, 0.4545, ..., 0.6191, 0.2200, 0.6842]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8740, 0.8697, 0.8262, ..., 0.4420, 0.1114, 0.8177]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.521591186523438 seconds + +[19.45, 17.88, 18.22, 21.5, 18.64, 18.15, 18.16, 17.8, 17.97, 17.95] +[87.25] +14.585368394851685 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8088, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521591186523438, 'TIME_S_1KI': 1.30088911801724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.5733924508095, 'W': 87.25} +[19.45, 17.88, 18.22, 21.5, 18.64, 18.15, 18.16, 17.8, 17.97, 17.95, 18.5, 17.88, 17.95, 18.22, 18.0, 18.18, 18.21, 18.1, 18.01, 17.8] +329.72 +16.486 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8088, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521591186523438, 'TIME_S_1KI': 1.30088911801724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.5733924508095, 'W': 87.25, 'J_1KI': 157.34092389352244, 'W_1KI': 10.787586547972305, 'W_D': 70.764, 'J_D': 1032.1190090932846, 'W_D_1KI': 8.749258160237389, 'J_D_1KI': 1.0817579327692124} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..17825c9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1356, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.575294256210327, "TIME_S_1KI": 7.798889569476643, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.6908141350748, "W": 76.43, "J_1KI": 1108.179066471294, "W_1KI": 56.36430678466077, "W_D": 60.11625000000001, "J_D": 1181.9460507032277, "W_D_1KI": 44.333517699115056, "J_D_1KI": 32.694334586368036} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..b27b9d0 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.7741575241088867} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 27, 54, ..., 12499941, + 12499975, 12500000]), + col_indices=tensor([ 19879, 19996, 22547, ..., 457855, 459779, + 462945]), + values=tensor([0.0262, 0.3741, 0.0922, ..., 0.5524, 0.1014, 0.8276]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.7209, 0.9008, 0.7814, ..., 0.2206, 0.4926, 0.1534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 0.7741575241088867 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1356', '-ss', '500000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.575294256210327} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 59, ..., 12499949, + 12499973, 12500000]), + col_indices=tensor([ 13892, 45461, 46784, ..., 469557, 488276, + 489508]), + values=tensor([0.8469, 0.7554, 0.2394, ..., 0.6309, 0.5261, 0.2516]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9763, 0.3043, 0.0965, ..., 0.1822, 0.5455, 0.4604]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.575294256210327 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 59, ..., 12499949, + 12499973, 12500000]), + col_indices=tensor([ 13892, 45461, 46784, ..., 469557, 488276, + 489508]), + values=tensor([0.8469, 0.7554, 0.2394, ..., 0.6309, 0.5261, 0.2516]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9763, 0.3043, 0.0965, ..., 0.1822, 0.5455, 0.4604]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.575294256210327 seconds + +[18.28, 17.9, 18.0, 17.69, 18.19, 17.78, 17.99, 17.71, 17.86, 17.72] +[76.43] +19.66100764274597 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1356, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.575294256210327, 'TIME_S_1KI': 7.798889569476643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.6908141350748, 'W': 76.43} +[18.28, 17.9, 18.0, 17.69, 18.19, 17.78, 17.99, 17.71, 17.86, 17.72, 22.73, 18.23, 18.33, 18.05, 18.19, 17.82, 18.25, 18.16, 17.83, 17.86] +326.275 +16.31375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1356, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.575294256210327, 'TIME_S_1KI': 7.798889569476643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.6908141350748, 'W': 76.43, 'J_1KI': 1108.179066471294, 'W_1KI': 56.36430678466077, 'W_D': 60.11625000000001, 'J_D': 1181.9460507032277, 'W_D_1KI': 44.333517699115056, 'J_D_1KI': 32.694334586368036} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..8609bc2 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 80207, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.527606010437012, "TIME_S_1KI": 0.13125545164932004, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1181.3196375966072, "W": 83.35, "J_1KI": 14.728385771773127, "W_1KI": 1.039186105950852, "W_D": 67.15325, "J_D": 951.7630828246474, "W_D_1KI": 0.8372492425848118, "J_D_1KI": 0.010438605640216089} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..381203d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03568005561828613} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 9, ..., 249987, 249994, + 250000]), + col_indices=tensor([ 1312, 19953, 25282, ..., 26652, 33001, 38879]), + values=tensor([0.3658, 0.9367, 0.4335, ..., 0.7027, 0.8564, 0.9906]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4942, 0.6881, 0.2872, ..., 0.3001, 0.6556, 0.7300]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.03568005561828613 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '29428', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.8524389266967773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 8, ..., 249998, 250000, + 250000]), + col_indices=tensor([ 4346, 9215, 13661, ..., 37674, 16332, 22572]), + values=tensor([0.5552, 0.4398, 0.7001, ..., 0.6234, 0.7005, 0.0878]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5304, 0.8900, 0.0447, ..., 0.3418, 0.1958, 0.2486]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 3.8524389266967773 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '80207', '-ss', '50000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.527606010437012} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 19, ..., 249993, 249998, + 250000]), + col_indices=tensor([ 8045, 12111, 14477, ..., 47402, 12160, 19361]), + values=tensor([0.9649, 0.3819, 0.5636, ..., 0.2633, 0.1370, 0.0196]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0927, 0.4142, 0.0895, ..., 0.8219, 0.5339, 0.8064]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.527606010437012 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 19, ..., 249993, 249998, + 250000]), + col_indices=tensor([ 8045, 12111, 14477, ..., 47402, 12160, 19361]), + values=tensor([0.9649, 0.3819, 0.5636, ..., 0.2633, 0.1370, 0.0196]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0927, 0.4142, 0.0895, ..., 0.8219, 0.5339, 0.8064]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.527606010437012 seconds + +[18.32, 17.98, 17.86, 18.86, 18.68, 17.81, 17.68, 18.11, 17.76, 17.85] +[83.35] +14.173001050949097 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80207, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.527606010437012, 'TIME_S_1KI': 0.13125545164932004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.3196375966072, 'W': 83.35} +[18.32, 17.98, 17.86, 18.86, 18.68, 17.81, 17.68, 18.11, 17.76, 17.85, 18.01, 18.02, 17.88, 17.81, 17.97, 17.93, 17.96, 17.74, 17.82, 17.95] +323.93499999999995 +16.196749999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80207, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.527606010437012, 'TIME_S_1KI': 0.13125545164932004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1181.3196375966072, 'W': 83.35, 'J_1KI': 14.728385771773127, 'W_1KI': 1.039186105950852, 'W_D': 67.15325, 'J_D': 951.7630828246474, 'W_D_1KI': 0.8372492425848118, 'J_D_1KI': 0.010438605640216089} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..1ef0389 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17086, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.693793773651123, "TIME_S_1KI": 0.6258804737007564, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.9557392597198, "W": 87.4, "J_1KI": 74.61990748330328, "W_1KI": 5.1152990752663, "W_D": 70.92850000000001, "J_D": 1034.6761802297833, "W_D_1KI": 4.151264192906474, "J_D_1KI": 0.24296290488742092} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..168fd00 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,86 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07488131523132324} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 64, 108, ..., 2499910, + 2499955, 2500000]), + col_indices=tensor([ 984, 1625, 1972, ..., 46651, 48149, 48861]), + values=tensor([5.1121e-01, 5.6272e-01, 1.1145e-04, ..., + 9.0355e-01, 4.1789e-01, 4.2355e-01]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3839, 0.9792, 0.8841, ..., 0.7211, 0.2437, 0.3590]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.07488131523132324 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '14022', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.616644144058228} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 54, 105, ..., 2499907, + 2499960, 2500000]), + col_indices=tensor([ 369, 1157, 3425, ..., 45077, 46820, 49764]), + values=tensor([0.6429, 0.4063, 0.5775, ..., 0.7664, 0.6925, 0.8507]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4381, 0.1550, 0.0791, ..., 0.0177, 0.9903, 0.0608]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 8.616644144058228 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17086', '-ss', '50000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.693793773651123} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 91, ..., 2499883, + 2499936, 2500000]), + col_indices=tensor([ 188, 2361, 2646, ..., 48274, 48923, 49377]), + values=tensor([0.2734, 0.1056, 0.9298, ..., 0.4005, 0.9270, 0.9473]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1965, 0.7813, 0.8576, ..., 0.6695, 0.1581, 0.1443]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.693793773651123 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 91, ..., 2499883, + 2499936, 2500000]), + col_indices=tensor([ 188, 2361, 2646, ..., 48274, 48923, 49377]), + values=tensor([0.2734, 0.1056, 0.9298, ..., 0.4005, 0.9270, 0.9473]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1965, 0.7813, 0.8576, ..., 0.6695, 0.1581, 0.1443]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.693793773651123 seconds + +[18.43, 18.16, 17.96, 18.08, 18.33, 18.04, 18.36, 18.18, 18.22, 18.06] +[87.4] +14.587594270706177 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17086, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.693793773651123, 'TIME_S_1KI': 0.6258804737007564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.9557392597198, 'W': 87.4} +[18.43, 18.16, 17.96, 18.08, 18.33, 18.04, 18.36, 18.18, 18.22, 18.06, 18.48, 17.93, 19.19, 18.85, 18.16, 18.04, 18.63, 18.25, 18.43, 18.27] +329.43 +16.4715 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17086, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.693793773651123, 'TIME_S_1KI': 0.6258804737007564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.9557392597198, 'W': 87.4, 'J_1KI': 74.61990748330328, 'W_1KI': 5.1152990752663, 'W_D': 70.92850000000001, 'J_D': 1034.6761802297833, 'W_D_1KI': 4.151264192906474, 'J_D_1KI': 0.24296290488742092} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..6f4838a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1159, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524485111236572, "TIME_S_1KI": 9.080660147745101, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2883.053437728882, "W": 54.72, "J_1KI": 2487.5353215952387, "W_1KI": 47.21311475409836, "W_D": 38.2975, "J_D": 2017.7949384397268, "W_D_1KI": 33.043572044866266, "J_D_1KI": 28.51041591446615} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..c966aa4 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,105 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.2965989112854004} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 462, 963, ..., 24999009, + 24999507, 25000000]), + col_indices=tensor([ 19, 54, 59, ..., 49770, 49789, 49840]), + values=tensor([0.0062, 0.3047, 0.0339, ..., 0.6533, 0.8264, 0.4065]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.0572, 0.3375, 0.5398, ..., 0.2388, 0.0349, 0.7555]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 1.2965989112854004 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '809', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.848743677139282} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 486, 980, ..., 24998984, + 24999490, 25000000]), + col_indices=tensor([ 15, 197, 386, ..., 49782, 49793, 49889]), + values=tensor([0.3923, 0.3887, 0.8681, ..., 0.2288, 0.1762, 0.4981]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2965, 0.2935, 0.4053, ..., 0.9117, 0.1428, 0.4127]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 7.848743677139282 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1082', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.800410747528076} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 497, 982, ..., 24999036, + 24999517, 25000000]), + col_indices=tensor([ 46, 129, 426, ..., 49653, 49766, 49830]), + values=tensor([0.6195, 0.6207, 0.9497, ..., 0.4637, 0.0557, 0.5508]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1589, 0.3084, 0.3696, ..., 0.3780, 0.7461, 0.4084]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 9.800410747528076 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1159', '-ss', '50000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.524485111236572} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 513, 1013, ..., 24998968, + 24999473, 25000000]), + col_indices=tensor([ 48, 124, 131, ..., 49410, 49843, 49893]), + values=tensor([0.3835, 0.3241, 0.0409, ..., 0.5767, 0.7491, 0.8402]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3332, 0.0589, 0.5895, ..., 0.7200, 0.0490, 0.9504]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.524485111236572 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 513, 1013, ..., 24998968, + 24999473, 25000000]), + col_indices=tensor([ 48, 124, 131, ..., 49410, 49843, 49893]), + values=tensor([0.3835, 0.3241, 0.0409, ..., 0.5767, 0.7491, 0.8402]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3332, 0.0589, 0.5895, ..., 0.7200, 0.0490, 0.9504]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.524485111236572 seconds + +[18.54, 17.98, 18.07, 17.85, 18.22, 17.93, 18.48, 18.6, 18.31, 18.9] +[54.72] +52.68738007545471 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1159, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524485111236572, 'TIME_S_1KI': 9.080660147745101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2883.053437728882, 'W': 54.72} +[18.54, 17.98, 18.07, 17.85, 18.22, 17.93, 18.48, 18.6, 18.31, 18.9, 18.48, 18.15, 18.06, 18.06, 17.98, 17.84, 18.37, 17.81, 17.96, 21.64] +328.45 +16.4225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1159, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.524485111236572, 'TIME_S_1KI': 9.080660147745101, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2883.053437728882, 'W': 54.72, 'J_1KI': 2487.5353215952387, 'W_1KI': 47.21311475409836, 'W_D': 38.2975, 'J_D': 2017.7949384397268, 'W_D_1KI': 33.043572044866266, 'J_D_1KI': 28.51041591446615} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..be4ba65 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 113077, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.7483811378479, "TIME_S_1KI": 0.09505364608052831, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1073.5031987619402, "W": 76.43000000000002, "J_1KI": 9.49355924513332, "W_1KI": 0.6759111048223778, "W_D": 60.05525000000002, "J_D": 843.5104406312707, "W_D_1KI": 0.5311004890472866, "J_D_1KI": 0.004696803850891751} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..8dbecc1 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02812027931213379} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([42362, 37248, 40868, ..., 37764, 10134, 17711]), + values=tensor([0.4763, 0.9715, 0.1475, ..., 0.3126, 0.8815, 0.0115]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9973, 0.6491, 0.6388, ..., 0.7622, 0.1974, 0.5505]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.02812027931213379 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '37339', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.4671623706817627} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([ 629, 39590, 3531, ..., 29068, 27842, 31077]), + values=tensor([0.8879, 0.6863, 0.1252, ..., 0.4874, 0.7763, 0.0925]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1798, 0.4085, 0.6464, ..., 0.4512, 0.1603, 0.8018]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 3.4671623706817627 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '113077', '-ss', '50000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.7483811378479} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 24998, 24999, 25000]), + col_indices=tensor([ 3755, 45041, 41651, ..., 28239, 26624, 23506]), + values=tensor([0.7255, 0.9443, 0.1927, ..., 0.4549, 0.6422, 0.3790]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1750, 0.3712, 0.8832, ..., 0.2728, 0.8510, 0.3193]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.7483811378479 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 24998, 24999, 25000]), + col_indices=tensor([ 3755, 45041, 41651, ..., 28239, 26624, 23506]), + values=tensor([0.7255, 0.9443, 0.1927, ..., 0.4549, 0.6422, 0.3790]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1750, 0.3712, 0.8832, ..., 0.2728, 0.8510, 0.3193]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.7483811378479 seconds + +[18.23, 17.8, 18.68, 17.79, 18.26, 17.97, 18.22, 17.87, 18.18, 18.24] +[76.43] +14.045573711395264 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 113077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.7483811378479, 'TIME_S_1KI': 0.09505364608052831, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1073.5031987619402, 'W': 76.43000000000002} +[18.23, 17.8, 18.68, 17.79, 18.26, 17.97, 18.22, 17.87, 18.18, 18.24, 18.73, 17.83, 18.03, 17.81, 18.07, 18.07, 17.89, 17.96, 20.53, 17.87] +327.495 +16.37475 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 113077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.7483811378479, 'TIME_S_1KI': 0.09505364608052831, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1073.5031987619402, 'W': 76.43000000000002, 'J_1KI': 9.49355924513332, 'W_1KI': 0.6759111048223778, 'W_D': 60.05525000000002, 'J_D': 843.5104406312707, 'W_D_1KI': 0.5311004890472866, 'J_D_1KI': 0.004696803850891751} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..e084c9d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 88156, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.868364572525024, "TIME_S_1KI": 0.12328559113985461, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1132.9122091054917, "W": 79.86, "J_1KI": 12.851220666834834, "W_1KI": 0.9058940968283498, "W_D": 63.193749999999994, "J_D": 896.4809781387447, "W_D_1KI": 0.716840033576841, "J_D_1KI": 0.00813149455030674} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..a0c02dc --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.028945207595825195} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 124995, 124996, + 125000]), + col_indices=tensor([ 6853, 13029, 2913, ..., 16543, 38720, 48018]), + values=tensor([0.0310, 0.8074, 0.8860, ..., 0.7640, 0.7803, 0.3703]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5471, 0.4507, 0.3833, ..., 0.1733, 0.0716, 0.3889]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.028945207595825195 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '36275', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.320595741271973} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 6, ..., 124997, 124999, + 125000]), + col_indices=tensor([26872, 2155, 12844, ..., 14460, 31839, 14088]), + values=tensor([0.4897, 0.3509, 0.0171, ..., 0.2036, 0.0300, 0.0283]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.1984, 0.6261, 0.3986, ..., 0.2975, 0.8868, 0.2739]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 4.320595741271973 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '88156', '-ss', '50000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.868364572525024} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 6, ..., 124997, 124998, + 125000]), + col_indices=tensor([17427, 27540, 29335, ..., 48451, 4975, 32778]), + values=tensor([0.3820, 0.7973, 0.1142, ..., 0.5863, 0.3733, 0.5873]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7507, 0.0731, 0.4946, ..., 0.5316, 0.6298, 0.9971]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.868364572525024 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 6, ..., 124997, 124998, + 125000]), + col_indices=tensor([17427, 27540, 29335, ..., 48451, 4975, 32778]), + values=tensor([0.3820, 0.7973, 0.1142, ..., 0.5863, 0.3733, 0.5873]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7507, 0.0731, 0.4946, ..., 0.5316, 0.6298, 0.9971]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.868364572525024 seconds + +[18.49, 17.88, 18.37, 17.92, 18.86, 17.84, 18.14, 20.87, 18.57, 18.13] +[79.86] +14.186228513717651 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 88156, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.868364572525024, 'TIME_S_1KI': 0.12328559113985461, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.9122091054917, 'W': 79.86} +[18.49, 17.88, 18.37, 17.92, 18.86, 17.84, 18.14, 20.87, 18.57, 18.13, 18.45, 17.94, 18.01, 21.62, 18.32, 18.14, 18.03, 18.0, 18.36, 17.84] +333.32500000000005 +16.66625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 88156, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.868364572525024, 'TIME_S_1KI': 0.12328559113985461, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.9122091054917, 'W': 79.86, 'J_1KI': 12.851220666834834, 'W_1KI': 0.9058940968283498, 'W_D': 63.193749999999994, 'J_D': 896.4809781387447, 'W_D_1KI': 0.716840033576841, 'J_D_1KI': 0.00813149455030674} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..63a6b21 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 339415, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661308526992798, "TIME_S_1KI": 0.031410834898259646, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1042.263754928112, "W": 73.63, "J_1KI": 3.0707651545397576, "W_1KI": 0.2169320743043177, "W_D": 56.882, "J_D": 805.1887397503853, "W_D_1KI": 0.1675883505443189, "J_D_1KI": 0.0004937564649303033} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..4acadea --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,82 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02093958854675293} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([ 145, 181, 299, ..., 1340, 4416, 166]), + values=tensor([0.2713, 0.7441, 0.5681, ..., 0.3863, 0.3329, 0.3299]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([6.8436e-01, 7.7662e-01, 5.3826e-02, ..., 2.4557e-01, 1.4818e-01, + 6.5619e-04]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.02093958854675293 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50144', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.5512330532073975} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([ 261, 2124, 2825, ..., 2342, 1684, 3815]), + values=tensor([0.1132, 0.9807, 0.3410, ..., 0.0783, 0.3569, 0.0713]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8182, 0.0907, 0.1359, ..., 0.4059, 0.0754, 0.0727]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.5512330532073975 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '339415', '-ss', '5000', '-sd', '0.0001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.661308526992798} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([ 91, 2131, 2855, ..., 2446, 470, 1581]), + values=tensor([0.9229, 0.3729, 0.6792, ..., 0.1416, 0.2267, 0.3921]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5750, 0.0749, 0.6665, ..., 0.8045, 0.0578, 0.3106]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.661308526992798 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2497, 2498, 2500]), + col_indices=tensor([ 91, 2131, 2855, ..., 2446, 470, 1581]), + values=tensor([0.9229, 0.3729, 0.6792, ..., 0.1416, 0.2267, 0.3921]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5750, 0.0749, 0.6665, ..., 0.8045, 0.0578, 0.3106]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.661308526992798 seconds + +[18.68, 17.92, 18.02, 18.26, 18.14, 18.26, 18.3, 17.84, 18.15, 17.87] +[73.63] +14.155422449111938 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 339415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661308526992798, 'TIME_S_1KI': 0.031410834898259646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1042.263754928112, 'W': 73.63} +[18.68, 17.92, 18.02, 18.26, 18.14, 18.26, 18.3, 17.84, 18.15, 17.87, 18.33, 18.37, 18.02, 18.02, 22.34, 18.22, 18.25, 22.38, 17.9, 18.26] +334.9599999999999 +16.747999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 339415, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.661308526992798, 'TIME_S_1KI': 0.031410834898259646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1042.263754928112, 'W': 73.63, 'J_1KI': 3.0707651545397576, 'W_1KI': 0.2169320743043177, 'W_D': 56.882, 'J_D': 805.1887397503853, 'W_D_1KI': 0.1675883505443189, 'J_D_1KI': 0.0004937564649303033} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..4f7c98c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 242735, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6920804977417, "TIME_S_1KI": 0.044048367552028754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1022.2163880014419, "W": 74.91, "J_1KI": 4.211244311703883, "W_1KI": 0.30860815292397054, "W_D": 58.500499999999995, "J_D": 798.2935496766567, "W_D_1KI": 0.241005623416483, "J_D_1KI": 0.000992875454369922} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..6818997 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.019707918167114258} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24992, 24998, 25000]), + col_indices=tensor([2443, 3271, 4233, ..., 3520, 3792, 4350]), + values=tensor([0.8426, 0.5824, 0.3389, ..., 0.9840, 0.1147, 0.8239]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5471, 0.6226, 0.3278, ..., 0.2451, 0.7959, 0.6112]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.019707918167114258 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53278', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.304640293121338} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 13, ..., 24990, 24994, 25000]), + col_indices=tensor([ 876, 897, 2274, ..., 2103, 3712, 4740]), + values=tensor([0.9508, 0.2626, 0.2379, ..., 0.2341, 0.9066, 0.6182]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8511, 0.0306, 0.7639, ..., 0.5025, 0.8599, 0.9690]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 2.304640293121338 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '242735', '-ss', '5000', '-sd', '0.001', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6920804977417} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 10, ..., 24991, 24997, 25000]), + col_indices=tensor([1152, 1177, 2929, ..., 1264, 2609, 4571]), + values=tensor([0.4587, 0.9835, 0.0653, ..., 0.8655, 0.2110, 0.7343]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5612, 0.6517, 0.4101, ..., 0.1691, 0.2414, 0.3717]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.6920804977417 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 10, ..., 24991, 24997, 25000]), + col_indices=tensor([1152, 1177, 2929, ..., 1264, 2609, 4571]), + values=tensor([0.4587, 0.9835, 0.0653, ..., 0.8655, 0.2110, 0.7343]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5612, 0.6517, 0.4101, ..., 0.1691, 0.2414, 0.3717]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.6920804977417 seconds + +[18.79, 17.83, 18.25, 17.86, 18.02, 18.39, 18.09, 17.82, 18.07, 17.75] +[74.91] +13.64592695236206 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6920804977417, 'TIME_S_1KI': 0.044048367552028754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.2163880014419, 'W': 74.91} +[18.79, 17.83, 18.25, 17.86, 18.02, 18.39, 18.09, 17.82, 18.07, 17.75, 18.54, 18.01, 18.05, 19.12, 19.51, 18.12, 18.41, 17.83, 18.29, 17.96] +328.19 +16.4095 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 242735, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6920804977417, 'TIME_S_1KI': 0.044048367552028754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.2163880014419, 'W': 74.91, 'J_1KI': 4.211244311703883, 'W_1KI': 0.30860815292397054, 'W_D': 58.500499999999995, 'J_D': 798.2935496766567, 'W_D_1KI': 0.241005623416483, 'J_D_1KI': 0.000992875454369922} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..64ab37a --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 161950, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.493181943893433, "TIME_S_1KI": 0.06479272580360255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1161.5640621185303, "W": 84.0, "J_1KI": 7.172362223640199, "W_1KI": 0.5186786045075641, "W_D": 67.18725, "J_D": 929.0749408639671, "W_D_1KI": 0.41486415560358136, "J_D_1KI": 0.0025616804915318393} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..f4d043e --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.022989749908447266} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 112, ..., 249913, 249957, + 250000]), + col_indices=tensor([ 48, 54, 234, ..., 4368, 4853, 4864]), + values=tensor([0.3452, 0.1008, 0.0125, ..., 0.4983, 0.8936, 0.7126]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5619, 0.6285, 0.8433, ..., 0.7149, 0.5039, 0.6932]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.022989749908447266 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '45672', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.961124897003174} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 98, ..., 249913, 249948, + 250000]), + col_indices=tensor([ 202, 295, 369, ..., 4836, 4929, 4943]), + values=tensor([0.0950, 0.5576, 0.0327, ..., 0.3192, 0.9052, 0.1110]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6070, 0.1874, 0.1819, ..., 0.4756, 0.1999, 0.3064]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 2.961124897003174 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '161950', '-ss', '5000', '-sd', '0.01', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.493181943893433} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 111, ..., 249911, 249964, + 250000]), + col_indices=tensor([ 19, 43, 144, ..., 4843, 4924, 4947]), + values=tensor([0.0652, 0.5238, 0.9360, ..., 0.5118, 0.2782, 0.1343]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4213, 0.5109, 0.7737, ..., 0.0670, 0.5810, 0.9899]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.493181943893433 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 111, ..., 249911, 249964, + 250000]), + col_indices=tensor([ 19, 43, 144, ..., 4843, 4924, 4947]), + values=tensor([0.0652, 0.5238, 0.9360, ..., 0.5118, 0.2782, 0.1343]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4213, 0.5109, 0.7737, ..., 0.0670, 0.5810, 0.9899]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.493181943893433 seconds + +[18.86, 18.02, 19.18, 18.0, 18.43, 21.89, 19.39, 17.96, 18.27, 17.81] +[84.0] +13.82814359664917 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 161950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.493181943893433, 'TIME_S_1KI': 0.06479272580360255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1161.5640621185303, 'W': 84.0} +[18.86, 18.02, 19.18, 18.0, 18.43, 21.89, 19.39, 17.96, 18.27, 17.81, 18.1, 20.23, 20.02, 18.19, 17.98, 17.88, 18.2, 17.87, 18.32, 18.08] +336.255 +16.81275 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 161950, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.493181943893433, 'TIME_S_1KI': 0.06479272580360255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1161.5640621185303, 'W': 84.0, 'J_1KI': 7.172362223640199, 'W_1KI': 0.5186786045075641, 'W_D': 67.18725, 'J_D': 929.0749408639671, 'W_D_1KI': 0.41486415560358136, 'J_D_1KI': 0.0025616804915318393} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..c0ca085 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46969, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.723698854446411, "TIME_S_1KI": 0.22831439575989293, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1262.0827813720703, "W": 87.97, "J_1KI": 26.87054826315379, "W_1KI": 1.8729374693947072, "W_D": 71.6635, "J_D": 1028.1376537780761, "W_D_1KI": 1.5257616725925611, "J_D_1KI": 0.03248444021785776} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..62668bd --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03811240196228027} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 252, 486, ..., 1249499, + 1249768, 1250000]), + col_indices=tensor([ 15, 65, 81, ..., 4947, 4948, 4952]), + values=tensor([0.2497, 0.0794, 0.3182, ..., 0.5399, 0.7483, 0.2341]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6929, 0.1024, 0.7145, ..., 0.7803, 0.6014, 0.5585]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.03811240196228027 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27550', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.158770799636841} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 250, 496, ..., 1249504, + 1249751, 1250000]), + col_indices=tensor([ 13, 88, 93, ..., 4888, 4919, 4936]), + values=tensor([0.3293, 0.4307, 0.3782, ..., 0.2045, 0.2965, 0.3765]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2742, 0.5184, 0.9225, ..., 0.0830, 0.2762, 0.4744]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 6.158770799636841 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46969', '-ss', '5000', '-sd', '0.05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.723698854446411} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 267, 518, ..., 1249533, + 1249757, 1250000]), + col_indices=tensor([ 4, 12, 26, ..., 4948, 4976, 4977]), + values=tensor([0.8262, 0.2304, 0.8718, ..., 0.9371, 0.9418, 0.0811]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8290, 0.5732, 0.0178, ..., 0.9134, 0.7238, 0.4621]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.723698854446411 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 267, 518, ..., 1249533, + 1249757, 1250000]), + col_indices=tensor([ 4, 12, 26, ..., 4948, 4976, 4977]), + values=tensor([0.8262, 0.2304, 0.8718, ..., 0.9371, 0.9418, 0.0811]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8290, 0.5732, 0.0178, ..., 0.9134, 0.7238, 0.4621]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.723698854446411 seconds + +[18.22, 17.77, 18.1, 17.81, 18.08, 18.08, 18.29, 17.93, 18.14, 17.97] +[87.97] +14.34674072265625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.723698854446411, 'TIME_S_1KI': 0.22831439575989293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.0827813720703, 'W': 87.97} +[18.22, 17.77, 18.1, 17.81, 18.08, 18.08, 18.29, 17.93, 18.14, 17.97, 18.49, 18.0, 18.1, 18.25, 18.1, 17.98, 17.87, 18.07, 17.92, 20.6] +326.13 +16.3065 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46969, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.723698854446411, 'TIME_S_1KI': 0.22831439575989293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1262.0827813720703, 'W': 87.97, 'J_1KI': 26.87054826315379, 'W_1KI': 1.8729374693947072, 'W_D': 71.6635, 'J_D': 1028.1376537780761, 'W_D_1KI': 1.5257616725925611, 'J_D_1KI': 0.03248444021785776} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..fe585f9 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19220, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.306657314300537, "TIME_S_1KI": 0.536246478371516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1258.2980075454711, "W": 87.22999999999999, "J_1KI": 65.46815856115875, "W_1KI": 4.538501560874089, "W_D": 70.87349999999999, "J_D": 1022.354509202957, "W_D_1KI": 3.6874869927159204, "J_D_1KI": 0.19185676340873678} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..2db63b3 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06937050819396973} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 517, 1013, ..., 2498965, + 2499518, 2500000]), + col_indices=tensor([ 16, 31, 39, ..., 4973, 4987, 4996]), + values=tensor([0.8092, 0.6907, 0.1859, ..., 0.6156, 0.1820, 0.0827]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0341, 0.1297, 0.9553, ..., 0.9560, 0.1422, 0.0438]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.06937050819396973 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15136', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.268742084503174} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 498, 1015, ..., 2498995, + 2499510, 2500000]), + col_indices=tensor([ 1, 5, 12, ..., 4987, 4992, 4994]), + values=tensor([0.2706, 0.5291, 0.0606, ..., 0.9998, 0.6766, 0.8077]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5238, 0.3873, 0.9372, ..., 0.7751, 0.3587, 0.1743]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.268742084503174 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19220', '-ss', '5000', '-sd', '0.1', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.306657314300537} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 976, ..., 2498986, + 2499483, 2500000]), + col_indices=tensor([ 0, 15, 20, ..., 4988, 4995, 4997]), + values=tensor([0.9696, 0.2544, 0.3304, ..., 0.5139, 0.4686, 0.4850]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8873, 0.6521, 0.3260, ..., 0.9177, 0.3863, 0.5956]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.306657314300537 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 976, ..., 2498986, + 2499483, 2500000]), + col_indices=tensor([ 0, 15, 20, ..., 4988, 4995, 4997]), + values=tensor([0.9696, 0.2544, 0.3304, ..., 0.5139, 0.4686, 0.4850]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8873, 0.6521, 0.3260, ..., 0.9177, 0.3863, 0.5956]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.306657314300537 seconds + +[18.67, 18.15, 17.98, 18.09, 18.08, 18.12, 18.09, 17.98, 17.86, 18.03] +[87.23] +14.425060272216797 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.306657314300537, 'TIME_S_1KI': 0.536246478371516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.2980075454711, 'W': 87.22999999999999} +[18.67, 18.15, 17.98, 18.09, 18.08, 18.12, 18.09, 17.98, 17.86, 18.03, 18.76, 17.96, 18.22, 17.88, 18.31, 18.19, 18.22, 17.8, 18.27, 20.4] +327.13 +16.3565 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19220, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.306657314300537, 'TIME_S_1KI': 0.536246478371516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.2980075454711, 'W': 87.22999999999999, 'J_1KI': 65.46815856115875, 'W_1KI': 4.538501560874089, 'W_D': 70.87349999999999, 'J_D': 1022.354509202957, 'W_D_1KI': 3.6874869927159204, 'J_D_1KI': 0.19185676340873678} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..5da43df --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9074, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.487424612045288, "TIME_S_1KI": 1.1557664328901573, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1301.6191907930374, "W": 85.83, "J_1KI": 143.44491853571054, "W_1KI": 9.458893541988099, "W_D": 69.53375, "J_D": 1054.4851847583054, "W_D_1KI": 7.662965616045845, "J_D_1KI": 0.8444969821518454} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..4c63e35 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.12977051734924316} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1037, 2033, ..., 4997947, + 4998969, 5000000]), + col_indices=tensor([ 0, 3, 11, ..., 4985, 4988, 4990]), + values=tensor([0.1539, 0.2882, 0.0917, ..., 0.8336, 0.9260, 0.3814]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.9778, 0.3097, 0.5480, ..., 0.9590, 0.3024, 0.0294]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 0.12977051734924316 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8091', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.36232042312622} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 972, 1989, ..., 4997978, + 4998985, 5000000]), + col_indices=tensor([ 0, 9, 19, ..., 4989, 4992, 4995]), + values=tensor([0.2585, 0.0110, 0.4823, ..., 0.4314, 0.8099, 0.9487]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7513, 0.6352, 0.8184, ..., 0.0273, 0.2479, 0.5631]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 9.36232042312622 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9074', '-ss', '5000', '-sd', '0.2', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.487424612045288} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1061, 2118, ..., 4997993, + 4998997, 5000000]), + col_indices=tensor([ 3, 6, 10, ..., 4996, 4998, 4999]), + values=tensor([0.5066, 0.7039, 0.5374, ..., 0.1064, 0.9581, 0.5937]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1982, 0.1257, 0.6934, ..., 0.0401, 0.8872, 0.0311]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.487424612045288 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1061, 2118, ..., 4997993, + 4998997, 5000000]), + col_indices=tensor([ 3, 6, 10, ..., 4996, 4998, 4999]), + values=tensor([0.5066, 0.7039, 0.5374, ..., 0.1064, 0.9581, 0.5937]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1982, 0.1257, 0.6934, ..., 0.0401, 0.8872, 0.0311]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.487424612045288 seconds + +[18.43, 18.11, 17.89, 18.52, 18.92, 18.0, 18.0, 17.79, 18.16, 17.76] +[85.83] +15.16508436203003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9074, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.487424612045288, 'TIME_S_1KI': 1.1557664328901573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.6191907930374, 'W': 85.83} +[18.43, 18.11, 17.89, 18.52, 18.92, 18.0, 18.0, 17.79, 18.16, 17.76, 18.23, 17.97, 18.42, 17.87, 17.92, 18.08, 18.11, 17.82, 17.87, 18.53] +325.925 +16.29625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9074, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.487424612045288, 'TIME_S_1KI': 1.1557664328901573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1301.6191907930374, 'W': 85.83, 'J_1KI': 143.44491853571054, 'W_1KI': 9.458893541988099, 'W_D': 69.53375, 'J_D': 1054.4851847583054, 'W_D_1KI': 7.662965616045845, 'J_D_1KI': 0.8444969821518454} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..c0e5e12 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5607, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.746073484420776, "TIME_S_1KI": 1.9165460111326513, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1333.0484231472017, "W": 84.51, "J_1KI": 237.7471773046552, "W_1KI": 15.07223113964687, "W_D": 68.30000000000001, "J_D": 1077.3542456626894, "W_D_1KI": 12.18120206884252, "J_D_1KI": 2.1724990313612484} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..4501c06 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.20109891891479492} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1526, 3035, ..., 7496941, + 7498453, 7500000]), + col_indices=tensor([ 2, 6, 10, ..., 4993, 4994, 4996]), + values=tensor([0.3579, 0.5981, 0.5931, ..., 0.3837, 0.5123, 0.1240]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.0391, 0.3406, 0.5361, ..., 0.5039, 0.0205, 0.2852]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 0.20109891891479492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5221', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.77708387374878} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1505, 2964, ..., 7496939, + 7498442, 7500000]), + col_indices=tensor([ 2, 4, 5, ..., 4996, 4997, 4999]), + values=tensor([0.1729, 0.9195, 0.5163, ..., 0.0436, 0.1803, 0.9350]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.3955, 0.7196, 0.4505, ..., 0.8904, 0.4770, 0.0844]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 9.77708387374878 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5607', '-ss', '5000', '-sd', '0.3', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.746073484420776} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1451, 2905, ..., 7497061, + 7498533, 7500000]), + col_indices=tensor([ 4, 9, 12, ..., 4994, 4998, 4999]), + values=tensor([0.3703, 0.3823, 0.6108, ..., 0.0984, 0.8524, 0.0373]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.6688, 0.5149, 0.3458, ..., 0.1151, 0.9310, 0.8037]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.746073484420776 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1451, 2905, ..., 7497061, + 7498533, 7500000]), + col_indices=tensor([ 4, 9, 12, ..., 4994, 4998, 4999]), + values=tensor([0.3703, 0.3823, 0.6108, ..., 0.0984, 0.8524, 0.0373]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.6688, 0.5149, 0.3458, ..., 0.1151, 0.9310, 0.8037]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.746073484420776 seconds + +[18.36, 17.95, 18.08, 17.67, 18.17, 17.84, 18.12, 17.84, 18.09, 18.06] +[84.51] +15.77385425567627 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5607, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.746073484420776, 'TIME_S_1KI': 1.9165460111326513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.0484231472017, 'W': 84.51} +[18.36, 17.95, 18.08, 17.67, 18.17, 17.84, 18.12, 17.84, 18.09, 18.06, 18.35, 17.89, 17.87, 18.07, 18.23, 17.83, 18.12, 17.92, 18.19, 17.87] +324.20000000000005 +16.21 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5607, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.746073484420776, 'TIME_S_1KI': 1.9165460111326513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.0484231472017, 'W': 84.51, 'J_1KI': 237.7471773046552, 'W_1KI': 15.07223113964687, 'W_D': 68.30000000000001, 'J_D': 1077.3542456626894, 'W_D_1KI': 12.18120206884252, 'J_D_1KI': 2.1724990313612484} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..363edc0 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2843, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.510854721069336, "TIME_S_1KI": 3.6970997963662806, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1362.9070807647706, "W": 82.31, "J_1KI": 479.39046104986653, "W_1KI": 28.951811466760464, "W_D": 66.04650000000001, "J_D": 1093.6124712638857, "W_D_1KI": 23.23126978543792, "J_D_1KI": 8.171392819359099} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..26353c7 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.3692905902862549} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1942, 3937, ..., 9996021, + 9998014, 10000000]), + col_indices=tensor([ 1, 6, 11, ..., 4995, 4996, 4997]), + values=tensor([0.9006, 0.6515, 0.7177, ..., 0.3221, 0.1090, 0.6573]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3731, 0.3806, 0.5183, ..., 0.1665, 0.0476, 0.3514]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 0.3692905902862549 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2843', '-ss', '5000', '-sd', '0.4', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.510854721069336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2016, 4009, ..., 9996028, + 9998016, 10000000]), + col_indices=tensor([ 0, 3, 5, ..., 4991, 4992, 4997]), + values=tensor([0.1122, 0.1212, 0.4120, ..., 0.9869, 0.5095, 0.1756]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1243, 0.0753, 0.2426, ..., 0.5390, 0.1485, 0.6469]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.510854721069336 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2016, 4009, ..., 9996028, + 9998016, 10000000]), + col_indices=tensor([ 0, 3, 5, ..., 4991, 4992, 4997]), + values=tensor([0.1122, 0.1212, 0.4120, ..., 0.9869, 0.5095, 0.1756]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1243, 0.0753, 0.2426, ..., 0.5390, 0.1485, 0.6469]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.510854721069336 seconds + +[18.34, 18.07, 17.87, 17.95, 18.1, 18.25, 18.13, 17.86, 18.35, 17.99] +[82.31] +16.55821990966797 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.510854721069336, 'TIME_S_1KI': 3.6970997963662806, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.9070807647706, 'W': 82.31} +[18.34, 18.07, 17.87, 17.95, 18.1, 18.25, 18.13, 17.86, 18.35, 17.99, 18.23, 18.01, 18.07, 17.94, 18.3, 17.93, 18.17, 17.9, 18.02, 18.14] +325.2699999999999 +16.263499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2843, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.510854721069336, 'TIME_S_1KI': 3.6970997963662806, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1362.9070807647706, 'W': 82.31, 'J_1KI': 479.39046104986653, 'W_1KI': 28.951811466760464, 'W_D': 66.04650000000001, 'J_D': 1093.6124712638857, 'W_D_1KI': 23.23126978543792, 'J_D_1KI': 8.171392819359099} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..5cc798d --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2367, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.31517243385315, "TIME_S_1KI": 4.35790977349098, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1435.467025976181, "W": 77.42, "J_1KI": 606.4499476029494, "W_1KI": 32.708069286016055, "W_D": 60.97375, "J_D": 1130.5322600764036, "W_D_1KI": 25.7599281791297, "J_D_1KI": 10.882943886408832} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..9c08cbf --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.4716074466705322} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2514, 4962, ..., 12495038, + 12497495, 12500000]), + col_indices=tensor([ 0, 2, 3, ..., 4994, 4998, 4999]), + values=tensor([0.6154, 0.9669, 0.2665, ..., 0.0694, 0.4098, 0.1560]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.5763, 0.6030, 0.9940, ..., 0.2125, 0.0764, 0.1654]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 0.4716074466705322 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2226', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.872556447982788} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2494, 5060, ..., 12495015, + 12497540, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4996, 4997, 4998]), + values=tensor([0.6641, 0.1400, 0.8724, ..., 0.1588, 0.2497, 0.0435]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4856, 0.9202, 0.9997, ..., 0.6539, 0.7245, 0.6538]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 9.872556447982788 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2367', '-ss', '5000', '-sd', '0.5', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.31517243385315} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2524, 5066, ..., 12495116, + 12497547, 12500000]), + col_indices=tensor([ 0, 3, 4, ..., 4997, 4998, 4999]), + values=tensor([0.7920, 0.5049, 0.7981, ..., 0.7837, 0.6861, 0.6359]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9717, 0.2840, 0.2667, ..., 0.4259, 0.7903, 0.5279]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.31517243385315 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2524, 5066, ..., 12495116, + 12497547, 12500000]), + col_indices=tensor([ 0, 3, 4, ..., 4997, 4998, 4999]), + values=tensor([0.7920, 0.5049, 0.7981, ..., 0.7837, 0.6861, 0.6359]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9717, 0.2840, 0.2667, ..., 0.4259, 0.7903, 0.5279]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.31517243385315 seconds + +[18.22, 18.01, 18.1, 18.03, 17.97, 17.83, 18.06, 17.94, 17.8, 18.08] +[77.42] +18.54129457473755 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.31517243385315, 'TIME_S_1KI': 4.35790977349098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1435.467025976181, 'W': 77.42} +[18.22, 18.01, 18.1, 18.03, 17.97, 17.83, 18.06, 17.94, 17.8, 18.08, 18.25, 18.11, 18.19, 18.18, 17.9, 17.94, 21.89, 18.55, 18.24, 17.82] +328.925 +16.44625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.31517243385315, 'TIME_S_1KI': 4.35790977349098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1435.467025976181, 'W': 77.42, 'J_1KI': 606.4499476029494, 'W_1KI': 32.708069286016055, 'W_D': 60.97375, 'J_D': 1130.5322600764036, 'W_D_1KI': 25.7599281791297, 'J_D_1KI': 10.882943886408832} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..f03b074 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 352628, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.273355960845947, "TIME_S_1KI": 0.029133693186150694, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 997.6947084188461, "W": 73.02, "J_1KI": 2.8293122168938547, "W_1KI": 0.20707374343500798, "W_D": 56.76349999999999, "J_D": 775.5771512097119, "W_D_1KI": 0.16097275315630066, "J_D_1KI": 0.00045649453008921774} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..a48462c --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,356 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03506755828857422} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2913, 1648, 2644, 3381, 1372, 3923, 4637, 4740, 3605, + 3437, 243, 589, 1110, 3938, 3652, 4390, 556, 1399, + 147, 3692, 103, 2333, 953, 1384, 659, 1993, 868, + 614, 4322, 4844, 1783, 3862, 2228, 274, 4521, 1878, + 2924, 2109, 11, 387, 2654, 1288, 3264, 2715, 783, + 864, 4478, 4578, 483, 3494, 1607, 265, 2566, 3626, + 4895, 3427, 3874, 3738, 1296, 657, 640, 614, 787, + 1550, 2439, 4660, 269, 3982, 4338, 3845, 315, 2485, + 3306, 2923, 3930, 969, 1457, 1460, 4340, 2708, 47, + 565, 464, 4753, 1236, 577, 1132, 636, 2214, 288, + 1601, 2014, 3937, 1117, 3005, 646, 3874, 3732, 3404, + 1250, 1263, 2388, 4327, 3524, 3464, 3950, 1309, 1452, + 2645, 1166, 3908, 4608, 3126, 3808, 84, 3605, 3573, + 3190, 4563, 3165, 2986, 3344, 1519, 2294, 4187, 3643, + 4112, 3606, 2436, 2384, 625, 3600, 188, 4651, 2237, + 3672, 1930, 392, 4612, 4270, 771, 4369, 1789, 974, + 1168, 2614, 2307, 3695, 2412, 1459, 1895, 4730, 4451, + 4843, 4273, 1205, 3679, 3774, 564, 1427, 336, 3203, + 3958, 2398, 726, 4337, 2127, 409, 1972, 1019, 1933, + 580, 2862, 277, 4275, 3026, 969, 4172, 2441, 3256, + 2388, 1358, 3834, 3780, 4305, 2776, 2801, 3127, 4359, + 3593, 1929, 1465, 4820, 2245, 3253, 2958, 788, 2326, + 4311, 1014, 3630, 3362, 3298, 1282, 4253, 3492, 4064, + 3984, 424, 3900, 4395, 2947, 1349, 3432, 2872, 1837, + 3566, 2145, 4347, 1140, 404, 290, 1225, 228, 177, + 2040, 2532, 4933, 2387, 3999, 2398, 1862, 25, 1623, + 3988, 4385, 855, 1469, 4176, 2016, 3588, 1388, 4820, + 138, 2359, 3521, 918, 3462, 3885, 3534]), + values=tensor([8.7230e-01, 2.2609e-01, 1.4440e-01, 9.6081e-01, + 3.3014e-01, 2.6737e-01, 1.3690e-01, 4.9883e-01, + 1.9471e-01, 7.1750e-01, 8.2877e-01, 7.8140e-02, + 4.1936e-01, 2.9344e-01, 1.7249e-01, 6.6013e-03, + 2.1956e-01, 3.1653e-02, 5.7945e-01, 7.5527e-01, + 7.0478e-02, 1.5299e-01, 2.9192e-01, 9.5876e-01, + 1.5930e-01, 5.3970e-01, 2.0450e-01, 6.3456e-01, + 2.1730e-01, 5.0751e-01, 5.7632e-01, 6.8165e-01, + 4.1461e-01, 3.3339e-01, 2.7547e-01, 9.8206e-02, + 2.4585e-02, 5.1512e-01, 1.4352e-01, 1.0422e-01, + 9.9198e-01, 6.0170e-01, 1.7000e-01, 2.2028e-01, + 8.5709e-01, 1.0687e-01, 5.2938e-02, 2.0130e-01, + 4.0830e-01, 6.9702e-01, 7.4460e-02, 5.4586e-03, + 2.2453e-01, 8.8854e-02, 9.3819e-01, 3.1406e-01, + 4.2855e-01, 5.9846e-01, 4.2003e-01, 5.0858e-01, + 7.3609e-01, 6.1148e-01, 7.1703e-01, 6.0561e-01, + 4.5768e-01, 1.2397e-01, 3.5627e-01, 8.0143e-01, + 6.1472e-01, 2.3014e-01, 6.6661e-01, 2.9043e-01, + 6.4900e-01, 1.0069e-01, 1.8357e-01, 7.5440e-01, + 8.4145e-01, 9.4584e-01, 6.2814e-01, 7.3938e-01, + 4.9410e-01, 3.8555e-01, 8.5109e-01, 9.4921e-01, + 2.4875e-01, 7.9168e-02, 8.0965e-02, 6.7150e-01, + 4.1002e-01, 8.0019e-01, 7.4899e-01, 3.7086e-01, + 2.2021e-01, 2.3499e-01, 1.8010e-01, 6.8475e-01, + 7.1328e-01, 6.7819e-01, 9.7254e-01, 9.5271e-01, + 3.5404e-01, 5.3603e-01, 8.9419e-01, 6.8372e-02, + 1.6953e-01, 6.5824e-01, 7.6890e-01, 9.3812e-02, + 3.8563e-01, 4.0621e-01, 9.1471e-01, 1.9823e-01, + 4.3999e-01, 3.0395e-01, 7.1340e-01, 2.5131e-01, + 8.7557e-01, 1.5075e-01, 9.8650e-01, 5.3651e-01, + 5.0817e-01, 5.9476e-01, 1.9720e-01, 9.0115e-01, + 9.2163e-01, 9.8019e-01, 7.8150e-01, 5.5400e-01, + 1.5169e-01, 3.9261e-01, 9.8313e-01, 2.7392e-01, + 9.8061e-01, 8.9033e-01, 1.7201e-01, 1.4114e-01, + 8.8501e-01, 9.2742e-01, 1.7757e-01, 4.1985e-01, + 6.1529e-01, 7.8249e-01, 3.0982e-01, 1.9173e-01, + 4.3381e-01, 8.0061e-01, 4.6306e-01, 2.6376e-03, + 9.0015e-01, 8.6386e-01, 2.7373e-04, 8.2181e-01, + 1.7497e-01, 8.6446e-01, 3.8278e-02, 9.3738e-01, + 1.4478e-01, 3.6074e-01, 4.9870e-01, 4.1068e-01, + 2.2159e-02, 9.1232e-01, 4.8533e-01, 3.4753e-01, + 1.7253e-01, 7.5020e-01, 9.1742e-01, 5.3659e-01, + 1.8035e-01, 2.3939e-01, 4.1854e-01, 7.0508e-01, + 2.9775e-01, 6.8891e-01, 5.7259e-01, 2.1233e-01, + 2.8848e-02, 5.9922e-01, 1.3774e-01, 9.7988e-02, + 2.9594e-02, 9.9852e-01, 6.2377e-01, 1.2506e-01, + 6.2903e-01, 2.4709e-01, 7.9481e-01, 2.8351e-01, + 7.5214e-01, 8.0716e-01, 4.7242e-01, 4.4288e-02, + 1.8578e-02, 9.9935e-01, 2.7602e-01, 1.0075e-01, + 6.3752e-01, 2.7320e-01, 1.0876e-01, 8.9150e-01, + 5.5601e-01, 4.9492e-01, 5.2617e-01, 3.1376e-01, + 6.2485e-02, 6.1369e-01, 8.9174e-01, 2.5796e-01, + 3.3213e-01, 2.3179e-01, 9.8658e-01, 2.3060e-01, + 9.3135e-01, 5.2215e-02, 9.7039e-01, 5.8413e-01, + 1.2826e-01, 4.7981e-01, 9.6357e-01, 3.2682e-02, + 1.6298e-02, 1.5945e-01, 9.8437e-01, 2.0032e-01, + 9.5029e-01, 6.2119e-01, 2.5447e-01, 3.9302e-01, + 7.6479e-01, 4.3998e-01, 4.2741e-02, 8.0712e-01, + 7.5958e-01, 9.4342e-01, 6.1220e-02, 8.5277e-01, + 8.0569e-01, 8.5113e-02, 3.3815e-01, 2.2293e-01, + 7.4069e-01, 9.6487e-01, 9.2541e-01, 4.0658e-01, + 7.8100e-01, 1.8005e-01, 7.5104e-01, 2.0211e-01, + 1.3360e-01, 5.3522e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.6988, 0.9907, 0.6239, ..., 0.8127, 0.8805, 0.8314]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.03506755828857422 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '29942', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.8915646076202393} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3138, 4671, 1014, 2355, 4387, 3668, 1195, 4247, 491, + 1252, 2094, 1714, 1299, 3079, 802, 3268, 2381, 2379, + 4459, 4147, 1428, 4131, 184, 2357, 1540, 3877, 1899, + 1523, 3927, 3281, 842, 2521, 2709, 1317, 126, 4208, + 1524, 1551, 2327, 636, 2832, 4733, 2849, 2855, 4835, + 772, 892, 1005, 3131, 4981, 1907, 3534, 4774, 4049, + 1270, 2687, 3294, 2436, 4012, 573, 1486, 3183, 4324, + 4017, 2252, 3315, 416, 1701, 4054, 3869, 3901, 4397, + 823, 3663, 4776, 1010, 4393, 780, 1327, 4090, 2288, + 2171, 4207, 3745, 3322, 1764, 3400, 1657, 1112, 3388, + 2470, 1470, 3180, 2688, 2946, 2902, 1005, 2105, 1503, + 2575, 2457, 2786, 3986, 3537, 3241, 1529, 183, 137, + 1518, 3278, 1251, 4619, 3661, 1392, 2046, 4296, 1832, + 956, 2052, 4152, 1659, 837, 4363, 3579, 287, 2170, + 416, 3532, 4324, 1141, 3724, 3599, 1673, 3014, 4249, + 3374, 3935, 2108, 3990, 1261, 1792, 2644, 1713, 4760, + 4851, 2651, 534, 3407, 667, 503, 11, 3298, 4145, + 866, 1624, 2824, 3873, 374, 2059, 1354, 4922, 4948, + 4789, 3518, 4827, 1892, 755, 1448, 1250, 4481, 4595, + 1189, 4206, 1769, 3228, 4907, 1812, 3137, 28, 3706, + 4874, 758, 3184, 4721, 3431, 1443, 4896, 2475, 3238, + 2981, 3892, 1841, 3186, 3895, 4298, 4039, 2742, 396, + 3291, 3897, 2330, 4741, 2415, 80, 1815, 1491, 991, + 2013, 2616, 1000, 1578, 818, 4909, 1615, 1781, 539, + 3952, 4002, 1509, 2715, 4070, 3688, 3477, 3166, 2780, + 265, 1338, 4642, 4409, 4565, 4567, 435, 4855, 4082, + 1256, 4264, 2072, 151, 2943, 1612, 2224, 2929, 923, + 3069, 3516, 3474, 16, 4892, 1006, 3067]), + values=tensor([0.1423, 0.3764, 0.8726, 0.6883, 0.7846, 0.4459, 0.0452, + 0.1595, 0.8993, 0.6993, 0.8582, 0.8926, 0.6386, 0.2255, + 0.1989, 0.6924, 0.6856, 0.9311, 0.6401, 0.4844, 0.1827, + 0.6094, 0.0844, 0.8088, 0.7780, 0.4677, 0.5210, 0.2681, + 0.6746, 0.2234, 0.2579, 0.1990, 0.4883, 0.0424, 0.2782, + 0.3929, 0.1674, 0.3710, 0.6509, 0.3822, 0.8632, 0.4975, + 0.5252, 0.8601, 0.4531, 0.6836, 0.5476, 0.3247, 0.1133, + 0.1630, 0.8905, 0.4050, 0.0529, 0.3709, 0.6633, 0.0041, + 0.8263, 0.2824, 0.9484, 0.9316, 0.1253, 0.0387, 0.8159, + 0.2554, 0.3130, 0.0737, 0.4738, 0.5116, 0.7090, 0.2759, + 0.6768, 0.9020, 0.6712, 0.8917, 0.8115, 0.3531, 0.4688, + 0.4566, 0.9670, 0.6423, 0.7005, 0.5390, 0.9066, 0.6596, + 0.7123, 0.3209, 0.0601, 0.3019, 0.6328, 0.0158, 0.7210, + 0.6919, 0.8834, 0.4854, 0.1747, 0.7990, 0.5800, 0.5557, + 0.1228, 0.3669, 0.1142, 0.1249, 0.9221, 0.7233, 0.8693, + 0.8032, 0.3909, 0.5535, 0.3233, 0.2959, 0.5645, 0.9214, + 0.1205, 0.5140, 0.3231, 0.3354, 0.2668, 0.9663, 0.9554, + 0.5077, 0.0968, 0.7096, 0.1594, 0.4013, 0.3294, 0.5998, + 0.4436, 0.2240, 0.9058, 0.0648, 0.8462, 0.2153, 0.7426, + 0.6462, 0.4532, 0.1398, 0.7161, 0.9030, 0.7302, 0.9922, + 0.7361, 0.0549, 0.7258, 0.7856, 0.3469, 0.7982, 0.7709, + 0.2339, 0.9960, 0.4194, 0.7112, 0.5143, 0.2695, 0.8909, + 0.6861, 0.0216, 0.5087, 0.4296, 0.4732, 0.2124, 0.0993, + 0.1882, 0.5905, 0.6824, 0.3641, 0.2671, 0.8679, 0.5636, + 0.0946, 0.2765, 0.6901, 0.1089, 0.9019, 0.8860, 0.2216, + 0.8984, 0.5901, 0.3288, 0.4042, 0.3888, 0.6821, 0.5168, + 0.1585, 0.6704, 0.7681, 0.8172, 0.4528, 0.4017, 0.4631, + 0.8088, 0.1020, 0.1485, 0.7270, 0.4608, 0.5168, 0.6847, + 0.9585, 0.6296, 0.5947, 0.3092, 0.4016, 0.0159, 0.5160, + 0.0621, 0.9856, 0.0778, 0.2539, 0.3235, 0.9242, 0.1079, + 0.9852, 0.7752, 0.1954, 0.3552, 0.8036, 0.4824, 0.2198, + 0.6211, 0.1556, 0.7647, 0.6061, 0.7231, 0.7227, 0.4738, + 0.4499, 0.9377, 0.6610, 0.2220, 0.5305, 0.8038, 0.7592, + 0.9215, 0.9933, 0.6030, 0.5785, 0.4115, 0.6221, 0.6776, + 0.4489, 0.6315, 0.2327, 0.4513, 0.7262, 0.7754, 0.6206, + 0.4823, 0.8933, 0.7206, 0.5757, 0.6875]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4051, 0.4452, 0.8286, ..., 0.6416, 0.7748, 0.9825]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.8915646076202393 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '352628', '-ss', '5000', '-sd', '1e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.273355960845947} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1604, 4, 806, 370, 4659, 2266, 385, 4480, 1740, + 2477, 1011, 4368, 1436, 1511, 582, 2881, 3146, 679, + 1335, 340, 2368, 3531, 2793, 4894, 1704, 800, 4449, + 2819, 3830, 944, 715, 291, 1651, 2756, 3425, 4366, + 1387, 4461, 4123, 3059, 45, 97, 307, 3123, 2010, + 4861, 3262, 819, 2940, 4148, 3668, 1416, 2946, 250, + 2020, 1865, 1972, 4176, 4993, 3807, 275, 1852, 2676, + 3641, 2214, 2133, 3702, 1587, 40, 2796, 4522, 2611, + 2391, 408, 3928, 2051, 4768, 4967, 847, 3011, 916, + 2658, 2737, 3985, 653, 1850, 4982, 4426, 3558, 4287, + 1078, 1321, 2196, 3153, 3474, 1886, 4386, 4813, 4479, + 1247, 1097, 4368, 3496, 4390, 2441, 28, 3845, 3018, + 574, 3154, 4908, 4477, 1259, 4186, 1078, 3130, 3163, + 3314, 3378, 1369, 3074, 965, 3176, 3034, 4337, 2593, + 2468, 3576, 1728, 3670, 4022, 4287, 319, 2341, 1420, + 3431, 3473, 1919, 368, 2943, 1836, 2897, 4091, 4055, + 2042, 694, 518, 3464, 437, 2319, 2327, 4527, 3332, + 286, 2756, 1769, 821, 2234, 2362, 3901, 2835, 3532, + 56, 1262, 1926, 2816, 573, 4537, 612, 849, 3556, + 1060, 4100, 3259, 4604, 1644, 551, 216, 1429, 4706, + 4000, 2046, 67, 4772, 4808, 2103, 2457, 770, 19, + 3752, 4627, 3183, 2351, 2290, 676, 4693, 3832, 2391, + 2085, 4458, 4110, 3726, 941, 4345, 4377, 4491, 3791, + 4120, 2339, 4337, 3728, 1293, 1315, 3558, 3212, 1812, + 4592, 3120, 1244, 3477, 1623, 73, 4441, 1447, 3927, + 4954, 4072, 1985, 625, 3210, 3147, 4908, 2800, 1924, + 230, 2107, 2981, 3816, 1923, 3645, 3133, 4236, 4114, + 2851, 2177, 1793, 4717, 666, 1768, 4852]), + values=tensor([0.8340, 0.7919, 0.0199, 0.7955, 0.8390, 0.2112, 0.2062, + 0.2416, 0.0078, 0.1110, 0.8766, 0.2461, 0.5766, 0.2522, + 0.8938, 0.7197, 0.9623, 0.5758, 0.3379, 0.5611, 0.1986, + 0.0227, 0.4551, 0.8241, 0.0932, 0.4545, 0.5055, 0.7378, + 0.9811, 0.7838, 0.9261, 0.3312, 0.1662, 0.7114, 0.8864, + 0.9809, 0.1390, 0.6532, 0.2965, 0.0298, 0.8840, 0.9398, + 0.6219, 0.4181, 0.3747, 0.5146, 0.8402, 0.0806, 0.9003, + 0.4097, 0.4861, 0.8634, 0.8848, 0.4692, 0.4523, 0.5039, + 0.7094, 0.3166, 0.2806, 0.4769, 0.9739, 0.8634, 0.3699, + 0.8453, 0.0189, 0.8787, 0.8196, 0.8724, 0.2325, 0.0224, + 0.5326, 0.1429, 0.6605, 0.4303, 0.9331, 0.8262, 0.4714, + 0.3810, 0.9149, 0.4305, 0.2891, 0.7127, 0.3828, 0.4241, + 0.9483, 0.5644, 0.3167, 0.4464, 0.4110, 0.1906, 0.8227, + 0.3284, 0.6812, 0.4592, 0.8170, 0.4218, 0.2545, 0.2861, + 0.1807, 0.3784, 0.2316, 0.6484, 0.4370, 0.4606, 0.6060, + 0.1427, 0.6182, 0.8321, 0.4963, 0.9467, 0.6222, 0.0087, + 0.8644, 0.1970, 0.6141, 0.5044, 0.8825, 0.7629, 0.0116, + 0.7947, 0.1399, 0.5336, 0.5972, 0.1395, 0.9791, 0.9029, + 0.5148, 0.1269, 0.3422, 0.7435, 0.2942, 0.7550, 0.2954, + 0.5429, 0.3946, 0.1495, 0.9295, 0.4788, 0.3075, 0.4290, + 0.1023, 0.3547, 0.2906, 0.5885, 0.8529, 0.0126, 0.2314, + 0.8888, 0.5984, 0.0063, 0.0122, 0.5164, 0.6866, 0.4135, + 0.9434, 0.8529, 0.4727, 0.6175, 0.7220, 0.1600, 0.7729, + 0.7553, 0.8476, 0.2583, 0.1648, 0.3383, 0.1827, 0.5841, + 0.8183, 0.2678, 0.2397, 0.1691, 0.8089, 0.7103, 0.0096, + 0.5130, 0.0577, 0.3835, 0.4322, 0.8199, 0.2829, 0.8244, + 0.4148, 0.6484, 0.7719, 0.3598, 0.6003, 0.6391, 0.8970, + 0.2186, 0.5556, 0.9770, 0.6002, 0.0280, 0.6160, 0.1589, + 0.7241, 0.2905, 0.4033, 0.4301, 0.8521, 0.8618, 0.5604, + 0.1077, 0.2810, 0.1105, 0.5637, 0.8228, 0.0305, 0.7660, + 0.3373, 0.7652, 0.7287, 0.6077, 0.2858, 0.4001, 0.8614, + 0.8105, 0.5021, 0.3182, 0.2015, 0.3600, 0.7160, 0.9874, + 0.0572, 0.8754, 0.4725, 0.5233, 0.0364, 0.1500, 0.2431, + 0.3915, 0.8270, 0.2064, 0.5104, 0.9129, 0.4413, 0.8801, + 0.9179, 0.9739, 0.2250, 0.5404, 0.9261, 0.5735, 0.2090, + 0.7470, 0.4131, 0.1494, 0.0532, 0.6628]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.3633, 0.6430, 0.8109, ..., 0.6589, 0.7112, 0.9999]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.273355960845947 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1604, 4, 806, 370, 4659, 2266, 385, 4480, 1740, + 2477, 1011, 4368, 1436, 1511, 582, 2881, 3146, 679, + 1335, 340, 2368, 3531, 2793, 4894, 1704, 800, 4449, + 2819, 3830, 944, 715, 291, 1651, 2756, 3425, 4366, + 1387, 4461, 4123, 3059, 45, 97, 307, 3123, 2010, + 4861, 3262, 819, 2940, 4148, 3668, 1416, 2946, 250, + 2020, 1865, 1972, 4176, 4993, 3807, 275, 1852, 2676, + 3641, 2214, 2133, 3702, 1587, 40, 2796, 4522, 2611, + 2391, 408, 3928, 2051, 4768, 4967, 847, 3011, 916, + 2658, 2737, 3985, 653, 1850, 4982, 4426, 3558, 4287, + 1078, 1321, 2196, 3153, 3474, 1886, 4386, 4813, 4479, + 1247, 1097, 4368, 3496, 4390, 2441, 28, 3845, 3018, + 574, 3154, 4908, 4477, 1259, 4186, 1078, 3130, 3163, + 3314, 3378, 1369, 3074, 965, 3176, 3034, 4337, 2593, + 2468, 3576, 1728, 3670, 4022, 4287, 319, 2341, 1420, + 3431, 3473, 1919, 368, 2943, 1836, 2897, 4091, 4055, + 2042, 694, 518, 3464, 437, 2319, 2327, 4527, 3332, + 286, 2756, 1769, 821, 2234, 2362, 3901, 2835, 3532, + 56, 1262, 1926, 2816, 573, 4537, 612, 849, 3556, + 1060, 4100, 3259, 4604, 1644, 551, 216, 1429, 4706, + 4000, 2046, 67, 4772, 4808, 2103, 2457, 770, 19, + 3752, 4627, 3183, 2351, 2290, 676, 4693, 3832, 2391, + 2085, 4458, 4110, 3726, 941, 4345, 4377, 4491, 3791, + 4120, 2339, 4337, 3728, 1293, 1315, 3558, 3212, 1812, + 4592, 3120, 1244, 3477, 1623, 73, 4441, 1447, 3927, + 4954, 4072, 1985, 625, 3210, 3147, 4908, 2800, 1924, + 230, 2107, 2981, 3816, 1923, 3645, 3133, 4236, 4114, + 2851, 2177, 1793, 4717, 666, 1768, 4852]), + values=tensor([0.8340, 0.7919, 0.0199, 0.7955, 0.8390, 0.2112, 0.2062, + 0.2416, 0.0078, 0.1110, 0.8766, 0.2461, 0.5766, 0.2522, + 0.8938, 0.7197, 0.9623, 0.5758, 0.3379, 0.5611, 0.1986, + 0.0227, 0.4551, 0.8241, 0.0932, 0.4545, 0.5055, 0.7378, + 0.9811, 0.7838, 0.9261, 0.3312, 0.1662, 0.7114, 0.8864, + 0.9809, 0.1390, 0.6532, 0.2965, 0.0298, 0.8840, 0.9398, + 0.6219, 0.4181, 0.3747, 0.5146, 0.8402, 0.0806, 0.9003, + 0.4097, 0.4861, 0.8634, 0.8848, 0.4692, 0.4523, 0.5039, + 0.7094, 0.3166, 0.2806, 0.4769, 0.9739, 0.8634, 0.3699, + 0.8453, 0.0189, 0.8787, 0.8196, 0.8724, 0.2325, 0.0224, + 0.5326, 0.1429, 0.6605, 0.4303, 0.9331, 0.8262, 0.4714, + 0.3810, 0.9149, 0.4305, 0.2891, 0.7127, 0.3828, 0.4241, + 0.9483, 0.5644, 0.3167, 0.4464, 0.4110, 0.1906, 0.8227, + 0.3284, 0.6812, 0.4592, 0.8170, 0.4218, 0.2545, 0.2861, + 0.1807, 0.3784, 0.2316, 0.6484, 0.4370, 0.4606, 0.6060, + 0.1427, 0.6182, 0.8321, 0.4963, 0.9467, 0.6222, 0.0087, + 0.8644, 0.1970, 0.6141, 0.5044, 0.8825, 0.7629, 0.0116, + 0.7947, 0.1399, 0.5336, 0.5972, 0.1395, 0.9791, 0.9029, + 0.5148, 0.1269, 0.3422, 0.7435, 0.2942, 0.7550, 0.2954, + 0.5429, 0.3946, 0.1495, 0.9295, 0.4788, 0.3075, 0.4290, + 0.1023, 0.3547, 0.2906, 0.5885, 0.8529, 0.0126, 0.2314, + 0.8888, 0.5984, 0.0063, 0.0122, 0.5164, 0.6866, 0.4135, + 0.9434, 0.8529, 0.4727, 0.6175, 0.7220, 0.1600, 0.7729, + 0.7553, 0.8476, 0.2583, 0.1648, 0.3383, 0.1827, 0.5841, + 0.8183, 0.2678, 0.2397, 0.1691, 0.8089, 0.7103, 0.0096, + 0.5130, 0.0577, 0.3835, 0.4322, 0.8199, 0.2829, 0.8244, + 0.4148, 0.6484, 0.7719, 0.3598, 0.6003, 0.6391, 0.8970, + 0.2186, 0.5556, 0.9770, 0.6002, 0.0280, 0.6160, 0.1589, + 0.7241, 0.2905, 0.4033, 0.4301, 0.8521, 0.8618, 0.5604, + 0.1077, 0.2810, 0.1105, 0.5637, 0.8228, 0.0305, 0.7660, + 0.3373, 0.7652, 0.7287, 0.6077, 0.2858, 0.4001, 0.8614, + 0.8105, 0.5021, 0.3182, 0.2015, 0.3600, 0.7160, 0.9874, + 0.0572, 0.8754, 0.4725, 0.5233, 0.0364, 0.1500, 0.2431, + 0.3915, 0.8270, 0.2064, 0.5104, 0.9129, 0.4413, 0.8801, + 0.9179, 0.9739, 0.2250, 0.5404, 0.9261, 0.5735, 0.2090, + 0.7470, 0.4131, 0.1494, 0.0532, 0.6628]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.3633, 0.6430, 0.8109, ..., 0.6589, 0.7112, 0.9999]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.273355960845947 seconds + +[18.62, 17.97, 17.77, 17.77, 18.2, 18.07, 17.98, 17.9, 18.36, 17.75] +[73.02] +13.663307428359985 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 352628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.273355960845947, 'TIME_S_1KI': 0.029133693186150694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.6947084188461, 'W': 73.02} +[18.62, 17.97, 17.77, 17.77, 18.2, 18.07, 17.98, 17.9, 18.36, 17.75, 18.68, 18.07, 18.34, 18.06, 18.18, 17.85, 18.03, 18.15, 17.94, 17.93] +325.13 +16.2565 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 352628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.273355960845947, 'TIME_S_1KI': 0.029133693186150694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 997.6947084188461, 'W': 73.02, 'J_1KI': 2.8293122168938547, 'W_1KI': 0.20707374343500798, 'W_D': 56.76349999999999, 'J_D': 775.5771512097119, 'W_D_1KI': 0.16097275315630066, 'J_D_1KI': 0.00045649453008921774} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..f8ae472 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 348362, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331048488616943, "TIME_S_1KI": 0.029656071812129176, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1019.3632760500908, "W": 73.39, "J_1KI": 2.9261609361815895, "W_1KI": 0.2106716576434858, "W_D": 57.056, "J_D": 792.4893184127807, "W_D_1KI": 0.1637836503407375, "J_D_1KI": 0.0004701536055618509} diff --git a/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..7fb17e1 --- /dev/null +++ b/pytorch/output_synthetic_16core_old2/xeon_4216_16_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01948690414428711} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([3543, 2601, 4811, ..., 3171, 1181, 2171]), + values=tensor([0.9467, 0.6961, 0.1720, ..., 0.4974, 0.2968, 0.3956]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9876, 0.5815, 0.6649, ..., 0.6796, 0.2344, 0.7286]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.01948690414428711 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53882', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6240589618682861} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([ 267, 783, 3915, ..., 3618, 4520, 1464]), + values=tensor([0.2837, 0.8920, 0.5250, ..., 0.9331, 0.1091, 0.1041]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.6941, 0.6065, 0.9857, ..., 0.4180, 0.3910, 0.2569]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 1.6240589618682861 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:16}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '348362', '-ss', '5000', '-sd', '5e-05', '-c', '16'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331048488616943} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([2866, 1356, 2436, ..., 421, 1796, 3666]), + values=tensor([0.0261, 0.5356, 0.3907, ..., 0.0828, 0.6288, 0.2100]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.2739, 0.9422, 0.5483, ..., 0.7719, 0.1377, 0.3851]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.331048488616943 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([2866, 1356, 2436, ..., 421, 1796, 3666]), + values=tensor([0.0261, 0.5356, 0.3907, ..., 0.0828, 0.6288, 0.2100]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.2739, 0.9422, 0.5483, ..., 0.7719, 0.1377, 0.3851]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.331048488616943 seconds + +[18.79, 18.13, 18.25, 18.09, 18.59, 17.94, 18.05, 18.04, 18.23, 18.09] +[73.39] +13.889675378799438 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 348362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331048488616943, 'TIME_S_1KI': 0.029656071812129176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1019.3632760500908, 'W': 73.39} +[18.79, 18.13, 18.25, 18.09, 18.59, 17.94, 18.05, 18.04, 18.23, 18.09, 18.49, 18.19, 18.09, 17.88, 18.04, 18.12, 18.21, 18.0, 18.16, 17.97] +326.68 +16.334 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 348362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331048488616943, 'TIME_S_1KI': 0.029656071812129176, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1019.3632760500908, 'W': 73.39, 'J_1KI': 2.9261609361815895, 'W_1KI': 0.2106716576434858, 'W_D': 57.056, 'J_D': 792.4893184127807, 'W_D_1KI': 0.1637836503407375, 'J_D_1KI': 0.0004701536055618509} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..947b6c6 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 369, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.636317253112793, "TIME_S_1KI": 28.8247080030157, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 287.8413551712036, "W": 19.688717909745684, "J_1KI": 780.0578730926927, "W_1KI": 53.35695910500185, "W_D": 4.731717909745685, "J_D": 69.17586516666414, "W_D_1KI": 12.823083766248468, "J_D_1KI": 34.750904515578505} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..5ebe6c0 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.2987680435180664} + +tensor(indices=tensor([[ 1599, 16094, 97143, ..., 81645, 43768, 7920], + [59011, 57210, 21615, ..., 30247, 31254, 79279]]), + values=tensor([0.6051, 0.0438, 0.3131, ..., 0.2534, 0.4034, 0.8759]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2165, 0.3396, 0.3912, ..., 0.3966, 0.2262, 0.3988]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.2987680435180664 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 351 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.965375661849976} + +tensor(indices=tensor([[80952, 18985, 24865, ..., 14259, 52599, 27564], + [89651, 11301, 24814, ..., 88133, 30872, 53417]]), + values=tensor([0.9997, 0.1015, 0.0555, ..., 0.1398, 0.2814, 0.9012]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3343, 0.5677, 0.5407, ..., 0.3047, 0.9425, 0.9356]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 9.965375661849976 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 369 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.636317253112793} + +tensor(indices=tensor([[17813, 31686, 77007, ..., 76242, 32068, 60813], + [21862, 20635, 39803, ..., 26873, 9840, 1439]]), + values=tensor([0.4517, 0.4792, 0.8950, ..., 0.6326, 0.2135, 0.1529]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2396, 0.6523, 0.3657, ..., 0.4429, 0.8206, 0.8591]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.636317253112793 seconds + +tensor(indices=tensor([[17813, 31686, 77007, ..., 76242, 32068, 60813], + [21862, 20635, 39803, ..., 26873, 9840, 1439]]), + values=tensor([0.4517, 0.4792, 0.8950, ..., 0.6326, 0.2135, 0.1529]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2396, 0.6523, 0.3657, ..., 0.4429, 0.8206, 0.8591]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.636317253112793 seconds + +[16.4, 16.36, 16.48, 16.56, 16.56, 16.64, 16.96, 16.8, 16.96, 16.84] +[16.72, 16.64, 19.32, 20.28, 22.04, 22.96, 23.96, 22.0, 22.2, 22.2, 21.48, 21.48, 21.32, 21.08] +14.619608879089355 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 369, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.636317253112793, 'TIME_S_1KI': 28.8247080030157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.8413551712036, 'W': 19.688717909745684} +[16.4, 16.36, 16.48, 16.56, 16.56, 16.64, 16.96, 16.8, 16.96, 16.84, 16.28, 16.32, 16.44, 16.44, 16.6, 16.76, 16.8, 16.72, 16.6, 16.76] +299.14 +14.956999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 369, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.636317253112793, 'TIME_S_1KI': 28.8247080030157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.8413551712036, 'W': 19.688717909745684, 'J_1KI': 780.0578730926927, 'W_1KI': 53.35695910500185, 'W_D': 4.731717909745685, 'J_D': 69.17586516666414, 'W_D_1KI': 12.823083766248468, 'J_D_1KI': 34.750904515578505} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..577e93b --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 35, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.216949462890625, "TIME_S_1KI": 291.912841796875, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.8894660186768, "W": 19.469103396520016, "J_1KI": 9311.127600533622, "W_1KI": 556.260097043429, "W_D": 4.649103396520017, "J_D": 77.82042102813728, "W_D_1KI": 132.83152561485764, "J_D_1KI": 3795.1864461387895} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..4053eb6 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.9354641437530518} + +tensor(indices=tensor([[78627, 99104, 53027, ..., 93765, 95772, 47077], + [82008, 95322, 75085, ..., 45673, 65677, 99671]]), + values=tensor([0.8931, 0.8701, 0.8243, ..., 0.1592, 0.8895, 0.8046]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9682, 0.5052, 0.6794, ..., 0.0172, 0.4061, 0.3412]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 2.9354641437530518 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 35 -ss 100000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.216949462890625} + +tensor(indices=tensor([[20982, 13078, 89450, ..., 9018, 34468, 27606], + [44656, 67622, 31508, ..., 2134, 40017, 4986]]), + values=tensor([0.4445, 0.6928, 0.7291, ..., 0.7392, 0.2700, 0.6814]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7065, 0.6896, 0.6113, ..., 0.9609, 0.4323, 0.0107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.216949462890625 seconds + +tensor(indices=tensor([[20982, 13078, 89450, ..., 9018, 34468, 27606], + [44656, 67622, 31508, ..., 2134, 40017, 4986]]), + values=tensor([0.4445, 0.6928, 0.7291, ..., 0.7392, 0.2700, 0.6814]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7065, 0.6896, 0.6113, ..., 0.9609, 0.4323, 0.0107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.216949462890625 seconds + +[16.32, 16.32, 15.88, 15.92, 16.12, 16.36, 16.84, 17.0, 16.72, 16.72] +[16.72, 16.52, 16.64, 18.12, 20.12, 21.68, 22.92, 23.04, 22.24, 21.8, 21.68, 21.56, 21.6, 21.6, 21.4, 21.48] +16.738801956176758 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 35, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.216949462890625, 'TIME_S_1KI': 291.912841796875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.8894660186768, 'W': 19.469103396520016} +[16.32, 16.32, 15.88, 15.92, 16.12, 16.36, 16.84, 17.0, 16.72, 16.72, 16.48, 16.44, 16.52, 16.52, 16.36, 16.48, 16.56, 16.6, 16.64, 16.72] +296.4 +14.819999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 35, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.216949462890625, 'TIME_S_1KI': 291.912841796875, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.8894660186768, 'W': 19.469103396520016, 'J_1KI': 9311.127600533622, 'W_1KI': 556.260097043429, 'W_D': 4.649103396520017, 'J_D': 77.82042102813728, 'W_D_1KI': 132.83152561485764, 'J_D_1KI': 3795.1864461387895} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..f8f53b9 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 29.01852059364319, "TIME_S_1KI": 2901.852059364319, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1503.351203136444, "W": 21.43916375621773, "J_1KI": 150335.12031364438, "W_1KI": 2143.916375621773, "W_D": 6.590163756217731, "J_D": 462.1136684448718, "W_D_1KI": 659.016375621773, "J_D_1KI": 65901.6375621773} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..57b28d4 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 29.01852059364319} + +tensor(indices=tensor([[96780, 46343, 29710, ..., 56060, 28091, 15859], + [18304, 11599, 76997, ..., 42553, 23029, 97297]]), + values=tensor([0.1022, 0.1200, 0.8656, ..., 0.5081, 0.7415, 0.2865]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.8148, 0.3595, 0.5366, ..., 0.1661, 0.8821, 0.2459]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 29.01852059364319 seconds + +tensor(indices=tensor([[96780, 46343, 29710, ..., 56060, 28091, 15859], + [18304, 11599, 76997, ..., 42553, 23029, 97297]]), + values=tensor([0.1022, 0.1200, 0.8656, ..., 0.5081, 0.7415, 0.2865]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.8148, 0.3595, 0.5366, ..., 0.1661, 0.8821, 0.2459]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 29.01852059364319 seconds + +[16.64, 16.48, 16.84, 16.8, 16.48, 16.56, 16.56, 16.64, 16.36, 16.48] +[16.52, 16.44, 17.04, 20.44, 22.96, 24.04, 25.0, 22.72, 22.68, 22.08, 24.28, 26.04, 26.04, 27.08, 26.8, 25.04, 23.36, 21.4, 21.88, 23.88, 25.4, 25.88, 25.4, 24.6, 22.6, 21.76, 21.76, 22.04, 21.88, 21.76, 21.68, 21.64, 21.4, 21.4, 21.4, 21.32, 21.48, 21.48, 21.44, 21.6, 21.72, 21.56, 21.52, 21.6, 21.64, 21.4, 21.6, 21.52, 21.44, 23.12, 23.48, 24.28, 24.68, 23.96, 23.96, 22.12, 21.72, 21.76, 21.68, 22.4, 23.44, 23.88, 24.32, 24.76, 24.8, 24.64, 24.08] +70.12172770500183 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 29.01852059364319, 'TIME_S_1KI': 2901.852059364319, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1503.351203136444, 'W': 21.43916375621773} +[16.64, 16.48, 16.84, 16.8, 16.48, 16.56, 16.56, 16.64, 16.36, 16.48, 16.52, 16.48, 16.2, 16.2, 16.16, 16.32, 16.32, 16.52, 16.76, 16.96] +296.98 +14.849 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 29.01852059364319, 'TIME_S_1KI': 2901.852059364319, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1503.351203136444, 'W': 21.43916375621773, 'J_1KI': 150335.12031364438, 'W_1KI': 2143.916375621773, 'W_D': 6.590163756217731, 'J_D': 462.1136684448718, 'W_D_1KI': 659.016375621773, 'J_D_1KI': 65901.6375621773} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..4548586 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3402, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.295332193374634, "TIME_S_1KI": 3.0262587282112388, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 287.25050638198854, "W": 19.695916403319558, "J_1KI": 84.43577495061392, "W_1KI": 5.789510994508983, "W_D": 4.859916403319556, "J_D": 70.87831910133363, "W_D_1KI": 1.4285468557670653, "J_D_1KI": 0.4199138317951397} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..68f3479 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03710126876831055} + +tensor(indices=tensor([[32568, 76933, 50955, ..., 66639, 68949, 87031], + [44781, 58787, 9312, ..., 51710, 91653, 87454]]), + values=tensor([0.7274, 0.3126, 0.2247, ..., 0.0950, 0.9436, 0.5851]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9113, 0.7737, 0.1895, ..., 0.7678, 0.0169, 0.0837]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.03710126876831055 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2830 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.732119798660278} + +tensor(indices=tensor([[70674, 47167, 62372, ..., 95535, 60187, 86998], + [12626, 86296, 54871, ..., 2788, 9085, 4226]]), + values=tensor([0.9887, 0.4689, 0.4221, ..., 0.5354, 0.2285, 0.8231]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6255, 0.2927, 0.2043, ..., 0.0532, 0.0945, 0.0371]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 8.732119798660278 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3402 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.295332193374634} + +tensor(indices=tensor([[21608, 22349, 45789, ..., 12102, 19961, 98888], + [86931, 75008, 74993, ..., 7171, 21797, 43614]]), + values=tensor([0.4811, 0.8092, 0.3501, ..., 0.5042, 0.0125, 0.8005]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4627, 0.8170, 0.5623, ..., 0.9318, 0.0590, 0.2429]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.295332193374634 seconds + +tensor(indices=tensor([[21608, 22349, 45789, ..., 12102, 19961, 98888], + [86931, 75008, 74993, ..., 7171, 21797, 43614]]), + values=tensor([0.4811, 0.8092, 0.3501, ..., 0.5042, 0.0125, 0.8005]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4627, 0.8170, 0.5623, ..., 0.9318, 0.0590, 0.2429]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.295332193374634 seconds + +[16.48, 16.32, 16.2, 16.36, 16.52, 16.4, 16.72, 16.76, 16.56, 16.48] +[16.64, 16.52, 18.64, 19.88, 19.88, 21.8, 23.12, 24.32, 22.88, 22.16, 22.16, 21.96, 21.8, 21.96] +14.584267139434814 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3402, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.295332193374634, 'TIME_S_1KI': 3.0262587282112388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.25050638198854, 'W': 19.695916403319558} +[16.48, 16.32, 16.2, 16.36, 16.52, 16.4, 16.72, 16.76, 16.56, 16.48, 16.52, 16.36, 16.36, 16.44, 16.44, 16.44, 16.6, 16.6, 16.64, 16.52] +296.72 +14.836000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3402, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.295332193374634, 'TIME_S_1KI': 3.0262587282112388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.25050638198854, 'W': 19.695916403319558, 'J_1KI': 84.43577495061392, 'W_1KI': 5.789510994508983, 'W_D': 4.859916403319556, 'J_D': 70.87831910133363, 'W_D_1KI': 1.4285468557670653, 'J_D_1KI': 0.4199138317951397} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..f628df3 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 704, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.229285955429077, "TIME_S_1KI": 14.530235732143575, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 284.75872134208674, "W": 19.55448674984675, "J_1KI": 404.48682008819145, "W_1KI": 27.776259587850497, "W_D": 4.499486749846751, "J_D": 65.52297229647635, "W_D_1KI": 6.391316406032317, "J_D_1KI": 9.078574440386815} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..9e47d69 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.14911699295043945} + +tensor(indices=tensor([[51523, 73916, 3227, ..., 39487, 17141, 79218], + [ 9238, 53437, 22521, ..., 25103, 47143, 55286]]), + values=tensor([0.3926, 0.6886, 0.1018, ..., 0.3187, 0.8883, 0.5690]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.0155, 0.9391, 0.6263, ..., 0.8726, 0.4573, 0.9030]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.14911699295043945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 704 -ss 100000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.229285955429077} + +tensor(indices=tensor([[95897, 483, 33080, ..., 96155, 67869, 39681], + [23844, 28018, 19444, ..., 36412, 80610, 18759]]), + values=tensor([0.4057, 0.1297, 0.2349, ..., 0.0414, 0.1821, 0.5004]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.4846, 0.2133, 0.4080, ..., 0.6793, 0.9557, 0.8419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.229285955429077 seconds + +tensor(indices=tensor([[95897, 483, 33080, ..., 96155, 67869, 39681], + [23844, 28018, 19444, ..., 36412, 80610, 18759]]), + values=tensor([0.4057, 0.1297, 0.2349, ..., 0.0414, 0.1821, 0.5004]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.4846, 0.2133, 0.4080, ..., 0.6793, 0.9557, 0.8419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.229285955429077 seconds + +[16.68, 16.92, 16.84, 16.48, 16.52, 16.52, 16.52, 16.2, 16.4, 16.36] +[16.64, 16.48, 16.6, 20.64, 22.64, 23.72, 24.8, 22.24, 22.04, 21.4, 21.04, 21.04, 21.32, 21.16] +14.562321424484253 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.229285955429077, 'TIME_S_1KI': 14.530235732143575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 284.75872134208674, 'W': 19.55448674984675} +[16.68, 16.92, 16.84, 16.48, 16.52, 16.52, 16.52, 16.2, 16.4, 16.36, 17.0, 17.24, 17.08, 16.88, 16.68, 16.76, 16.88, 16.88, 16.88, 16.8] +301.09999999999997 +15.054999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.229285955429077, 'TIME_S_1KI': 14.530235732143575, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 284.75872134208674, 'W': 19.55448674984675, 'J_1KI': 404.48682008819145, 'W_1KI': 27.776259587850497, 'W_D': 4.499486749846751, 'J_D': 65.52297229647635, 'W_D_1KI': 6.391316406032317, 'J_D_1KI': 9.078574440386815} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..c4c82c8 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 46780, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.876380205154419, "TIME_S_1KI": 0.2325006456852163, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 270.0029137420654, "W": 18.561677792152317, "J_1KI": 5.771759592605075, "W_1KI": 0.39678661376982294, "W_D": 3.7106777921523175, "J_D": 53.976468456029885, "W_D_1KI": 0.07932188525336292, "J_D_1KI": 0.0016956367091355905} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..11eb6e5 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,75 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.008706092834472656} + +tensor(indices=tensor([[ 813, 7065, 2372, ..., 9412, 1888, 4409], + [7714, 6847, 9093, ..., 7700, 8488, 5233]]), + values=tensor([0.3937, 0.0619, 0.2907, ..., 0.8877, 0.5458, 0.4258]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.1532, 0.0730, 0.1937, ..., 0.5886, 0.2052, 0.4583]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.008706092834472656 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12060 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.706918954849243} + +tensor(indices=tensor([[8718, 1591, 4493, ..., 3876, 8184, 8826], + [3654, 2361, 9351, ..., 8542, 1848, 8631]]), + values=tensor([0.6585, 0.2637, 0.4834, ..., 0.1601, 0.4607, 0.2778]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7084, 0.7410, 0.4425, ..., 0.5955, 0.5576, 0.7847]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.706918954849243 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 46780 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.876380205154419} + +tensor(indices=tensor([[1690, 6741, 4994, ..., 1920, 1494, 1138], + [5930, 3128, 6123, ..., 1761, 1213, 2911]]), + values=tensor([1.3860e-05, 3.0585e-01, 6.8161e-01, ..., + 3.8007e-01, 5.2299e-02, 8.9853e-01]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7620, 0.0021, 0.9371, ..., 0.7641, 0.4461, 0.6599]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.876380205154419 seconds + +tensor(indices=tensor([[1690, 6741, 4994, ..., 1920, 1494, 1138], + [5930, 3128, 6123, ..., 1761, 1213, 2911]]), + values=tensor([1.3860e-05, 3.0585e-01, 6.8161e-01, ..., + 3.8007e-01, 5.2299e-02, 8.9853e-01]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7620, 0.0021, 0.9371, ..., 0.7641, 0.4461, 0.6599]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.876380205154419 seconds + +[16.28, 16.24, 16.48, 16.64, 16.76, 17.16, 17.2, 16.96, 16.72, 16.52] +[16.52, 16.28, 16.36, 20.0, 21.16, 23.0, 23.6, 21.6, 21.04, 19.76, 19.6, 19.48, 19.44, 19.52] +14.546255826950073 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 46780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.876380205154419, 'TIME_S_1KI': 0.2325006456852163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.0029137420654, 'W': 18.561677792152317} +[16.28, 16.24, 16.48, 16.64, 16.76, 17.16, 17.2, 16.96, 16.72, 16.52, 16.4, 16.12, 16.2, 16.32, 16.32, 16.12, 16.32, 16.32, 16.4, 16.28] +297.02 +14.850999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 46780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.876380205154419, 'TIME_S_1KI': 0.2325006456852163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.0029137420654, 'W': 18.561677792152317, 'J_1KI': 5.771759592605075, 'W_1KI': 0.39678661376982294, 'W_D': 3.7106777921523175, 'J_D': 53.976468456029885, 'W_D_1KI': 0.07932188525336292, 'J_D_1KI': 0.0016956367091355905} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..7ef7193 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4656, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.50959324836731, "TIME_S_1KI": 2.2572150447524293, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 251.83274530410765, "W": 18.665438169905315, "J_1KI": 54.08778893988566, "W_1KI": 4.008899950581038, "W_D": 3.9154381699053147, "J_D": 52.82680933713909, "W_D_1KI": 0.8409446241205573, "J_D_1KI": 0.18061525432142553} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..2fc6b06 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02801656723022461} + +tensor(indices=tensor([[3339, 7627, 7981, ..., 6491, 8253, 5568], + [8466, 7972, 922, ..., 928, 52, 595]]), + values=tensor([0.0473, 0.1604, 0.9975, ..., 0.9859, 0.8513, 0.0846]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9855, 0.2654, 0.8651, ..., 0.1300, 0.7609, 0.5597]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.02801656723022461 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3747 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.449877977371216} + +tensor(indices=tensor([[2586, 6680, 7111, ..., 6784, 633, 640], + [7279, 6825, 4611, ..., 7487, 4080, 9864]]), + values=tensor([0.7798, 0.0797, 0.8738, ..., 0.8562, 0.8012, 0.7799]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6244, 0.1474, 0.9183, ..., 0.3881, 0.4184, 0.6912]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 8.449877977371216 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 4656 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.50959324836731} + +tensor(indices=tensor([[4801, 9374, 6797, ..., 7984, 5291, 4925], + [8691, 7672, 9246, ..., 8773, 4702, 4773]]), + values=tensor([0.1348, 0.1988, 0.9495, ..., 0.5256, 0.1495, 0.2297]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4525, 0.7832, 0.2239, ..., 0.0248, 0.4456, 0.3895]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.50959324836731 seconds + +tensor(indices=tensor([[4801, 9374, 6797, ..., 7984, 5291, 4925], + [8691, 7672, 9246, ..., 8773, 4702, 4773]]), + values=tensor([0.1348, 0.1988, 0.9495, ..., 0.5256, 0.1495, 0.2297]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4525, 0.7832, 0.2239, ..., 0.0248, 0.4456, 0.3895]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.50959324836731 seconds + +[16.04, 16.2, 16.44, 16.64, 16.76, 16.64, 16.36, 16.36, 16.48, 16.36] +[16.4, 16.36, 16.92, 18.88, 20.88, 21.8, 22.52, 22.4, 21.36, 20.6, 20.48, 20.88, 20.72] +13.491927862167358 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4656, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.50959324836731, 'TIME_S_1KI': 2.2572150447524293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 251.83274530410765, 'W': 18.665438169905315} +[16.04, 16.2, 16.44, 16.64, 16.76, 16.64, 16.36, 16.36, 16.48, 16.36, 16.68, 16.64, 16.64, 16.44, 16.36, 16.08, 15.96, 16.12, 16.12, 16.44] +295.0 +14.75 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4656, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.50959324836731, 'TIME_S_1KI': 2.2572150447524293, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 251.83274530410765, 'W': 18.665438169905315, 'J_1KI': 54.08778893988566, 'W_1KI': 4.008899950581038, 'W_D': 3.9154381699053147, 'J_D': 52.82680933713909, 'W_D_1KI': 0.8409446241205573, 'J_D_1KI': 0.18061525432142553} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..653e208 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 477, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.350482940673828, "TIME_S_1KI": 21.69912566179, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 273.24414005279544, "W": 18.805205722391392, "J_1KI": 572.8388680352106, "W_1KI": 39.42391136769684, "W_D": 3.9402057223913936, "J_D": 57.25213220953946, "W_D_1KI": 8.260389355118225, "J_D_1KI": 17.31737810297322} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..6f424ff --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.2197110652923584} + +tensor(indices=tensor([[5627, 903, 4331, ..., 4124, 7573, 8341], + [7291, 8085, 1071, ..., 5790, 8982, 2696]]), + values=tensor([0.5875, 0.4754, 0.2800, ..., 0.9318, 0.8148, 0.1899]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.9086, 0.4261, 0.9149, ..., 0.3266, 0.8572, 0.1589]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.2197110652923584 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 477 -ss 10000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.350482940673828} + +tensor(indices=tensor([[7663, 1266, 7574, ..., 519, 2734, 3377], + [ 477, 1141, 8408, ..., 6475, 7728, 7821]]), + values=tensor([0.8249, 0.3039, 0.2597, ..., 0.4592, 0.1269, 0.4461]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.5762, 0.8293, 0.3297, ..., 0.4309, 0.0281, 0.4079]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.350482940673828 seconds + +tensor(indices=tensor([[7663, 1266, 7574, ..., 519, 2734, 3377], + [ 477, 1141, 8408, ..., 6475, 7728, 7821]]), + values=tensor([0.8249, 0.3039, 0.2597, ..., 0.4592, 0.1269, 0.4461]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.5762, 0.8293, 0.3297, ..., 0.4309, 0.0281, 0.4079]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.350482940673828 seconds + +[16.44, 16.44, 16.44, 16.52, 16.72, 16.56, 16.56, 16.56, 16.56, 16.52] +[16.68, 16.64, 17.88, 19.44, 19.44, 21.04, 22.04, 23.04, 21.8, 20.88, 20.56, 20.44, 20.44, 20.64] +14.530239343643188 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 477, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.350482940673828, 'TIME_S_1KI': 21.69912566179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 273.24414005279544, 'W': 18.805205722391392} +[16.44, 16.44, 16.44, 16.52, 16.72, 16.56, 16.56, 16.56, 16.56, 16.52, 16.44, 16.52, 16.28, 16.16, 16.24, 16.48, 16.68, 16.68, 16.88, 16.64] +297.29999999999995 +14.864999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 477, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.350482940673828, 'TIME_S_1KI': 21.69912566179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 273.24414005279544, 'W': 18.805205722391392, 'J_1KI': 572.8388680352106, 'W_1KI': 39.42391136769684, 'W_D': 3.9402057223913936, 'J_D': 57.25213220953946, 'W_D_1KI': 8.260389355118225, 'J_D_1KI': 17.31737810297322} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..8549b34 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 96, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.743469476699829, "TIME_S_1KI": 111.91114038228989, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 304.21499929428103, "W": 19.37542273156749, "J_1KI": 3168.906242648761, "W_1KI": 201.82732012049468, "W_D": 4.35542273156749, "J_D": 68.38482656955723, "W_D_1KI": 45.368986787161354, "J_D_1KI": 472.59361236626415} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..81af6dd --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.0898561477661133} + +tensor(indices=tensor([[2227, 9732, 7034, ..., 1307, 224, 1713], + [1385, 9060, 5093, ..., 4175, 9950, 9089]]), + values=tensor([0.4970, 0.6132, 0.8255, ..., 0.6073, 0.5626, 0.6289]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.1199, 0.8136, 0.1887, ..., 0.2949, 0.2338, 0.4082]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 1.0898561477661133 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 96 -ss 10000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.743469476699829} + +tensor(indices=tensor([[9978, 3547, 7257, ..., 6903, 2190, 6405], + [8692, 6555, 1876, ..., 2855, 1869, 2395]]), + values=tensor([0.7732, 0.4099, 0.1758, ..., 0.7036, 0.7191, 0.3466]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.0803, 0.3928, 0.7050, ..., 0.0602, 0.9136, 0.4377]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.743469476699829 seconds + +tensor(indices=tensor([[9978, 3547, 7257, ..., 6903, 2190, 6405], + [8692, 6555, 1876, ..., 2855, 1869, 2395]]), + values=tensor([0.7732, 0.4099, 0.1758, ..., 0.7036, 0.7191, 0.3466]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.0803, 0.3928, 0.7050, ..., 0.0602, 0.9136, 0.4377]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.743469476699829 seconds + +[16.28, 16.12, 16.24, 16.64, 16.8, 16.72, 16.84, 16.88, 16.88, 16.92] +[16.88, 16.96, 16.96, 18.04, 19.76, 22.16, 22.96, 23.48, 22.44, 21.52, 21.2, 21.48, 21.56, 21.6, 21.36] +15.701076745986938 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 96, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.743469476699829, 'TIME_S_1KI': 111.91114038228989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 304.21499929428103, 'W': 19.37542273156749} +[16.28, 16.12, 16.24, 16.64, 16.8, 16.72, 16.84, 16.88, 16.88, 16.92, 16.12, 16.28, 16.6, 16.84, 16.76, 17.12, 17.12, 16.96, 16.6, 16.68] +300.4 +15.02 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 96, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.743469476699829, 'TIME_S_1KI': 111.91114038228989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 304.21499929428103, 'W': 19.37542273156749, 'J_1KI': 3168.906242648761, 'W_1KI': 201.82732012049468, 'W_D': 4.35542273156749, 'J_D': 68.38482656955723, 'W_D_1KI': 45.368986787161354, 'J_D_1KI': 472.59361236626415} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..76b6015 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 48, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.366862535476685, "TIME_S_1KI": 215.97630282243094, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 344.07916982650755, "W": 20.55669881076274, "J_1KI": 7168.31603805224, "W_1KI": 428.26455855755705, "W_D": 5.548698810762739, "J_D": 92.87442979049678, "W_D_1KI": 115.59789189089038, "J_D_1KI": 2408.2894143935496} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..45037c3 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.1872427463531494} + +tensor(indices=tensor([[7349, 1588, 419, ..., 7604, 8652, 6956], + [ 99, 194, 9219, ..., 1, 9912, 8378]]), + values=tensor([0.4491, 0.6838, 0.4787, ..., 0.1418, 0.1171, 0.4676]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7041, 0.1272, 0.3495, ..., 0.0494, 0.8938, 0.9263]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.1872427463531494 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 48 -ss 10000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.366862535476685} + +tensor(indices=tensor([[5673, 8284, 6399, ..., 8723, 6114, 30], + [6200, 5210, 3227, ..., 71, 1664, 1621]]), + values=tensor([0.3871, 0.8340, 0.9018, ..., 0.3816, 0.2204, 0.9897]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4626, 0.7788, 0.8960, ..., 0.8435, 0.1770, 0.8946]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.366862535476685 seconds + +tensor(indices=tensor([[5673, 8284, 6399, ..., 8723, 6114, 30], + [6200, 5210, 3227, ..., 71, 1664, 1621]]), + values=tensor([0.3871, 0.8340, 0.9018, ..., 0.3816, 0.2204, 0.9897]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4626, 0.7788, 0.8960, ..., 0.8435, 0.1770, 0.8946]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.366862535476685 seconds + +[16.64, 16.76, 16.76, 16.76, 16.6, 16.72, 16.88, 16.64, 16.24, 16.6] +[16.4, 16.28, 19.44, 20.8, 23.48, 25.64, 26.68, 24.04, 24.04, 23.76, 21.32, 21.16, 21.2, 21.12, 20.92, 21.0] +16.73805570602417 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.366862535476685, 'TIME_S_1KI': 215.97630282243094, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 344.07916982650755, 'W': 20.55669881076274} +[16.64, 16.76, 16.76, 16.76, 16.6, 16.72, 16.88, 16.64, 16.24, 16.6, 16.32, 16.4, 16.44, 16.48, 16.76, 16.84, 16.96, 16.88, 16.84, 16.84] +300.16 +15.008000000000001 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.366862535476685, 'TIME_S_1KI': 215.97630282243094, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 344.07916982650755, 'W': 20.55669881076274, 'J_1KI': 7168.31603805224, 'W_1KI': 428.26455855755705, 'W_D': 5.548698810762739, 'J_D': 92.87442979049678, 'W_D_1KI': 115.59789189089038, 'J_D_1KI': 2408.2894143935496} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..a4dacc1 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 24, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.46686863899231, "TIME_S_1KI": 436.11952662467957, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 399.554963350296, "W": 20.111512880144534, "J_1KI": 16648.123472929, "W_1KI": 837.9797033393555, "W_D": 5.111512880144534, "J_D": 101.55030870437619, "W_D_1KI": 212.97970333935558, "J_D_1KI": 8874.154305806484} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..1daf88e --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 4.359426021575928} + +tensor(indices=tensor([[9262, 5815, 6282, ..., 9673, 3204, 5162], + [4188, 3998, 9847, ..., 123, 7399, 474]]), + values=tensor([0.5058, 0.7166, 0.6001, ..., 0.1092, 0.3759, 0.5726]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.2267, 0.1952, 0.5201, ..., 0.2235, 0.6837, 0.0426]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 4.359426021575928 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 24 -ss 10000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.46686863899231} + +tensor(indices=tensor([[5089, 3001, 4694, ..., 80, 5122, 777], + [8836, 8209, 8178, ..., 2588, 6506, 5890]]), + values=tensor([0.2146, 0.8681, 0.3626, ..., 0.7301, 0.1725, 0.1861]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.5665, 0.1605, 0.2932, ..., 0.7000, 0.5518, 0.8677]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.46686863899231 seconds + +tensor(indices=tensor([[5089, 3001, 4694, ..., 80, 5122, 777], + [8836, 8209, 8178, ..., 2588, 6506, 5890]]), + values=tensor([0.2146, 0.8681, 0.3626, ..., 0.7301, 0.1725, 0.1861]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.5665, 0.1605, 0.2932, ..., 0.7000, 0.5518, 0.8677]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.46686863899231 seconds + +[16.92, 16.84, 16.56, 16.88, 16.92, 16.8, 16.76, 16.76, 16.72, 16.72] +[16.8, 16.68, 16.88, 20.16, 20.8, 23.2, 25.0, 23.36, 23.2, 21.88, 21.36, 20.56, 20.76, 20.96, 21.04, 21.2, 21.44, 22.0, 22.44] +19.866976976394653 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.46686863899231, 'TIME_S_1KI': 436.11952662467957, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 399.554963350296, 'W': 20.111512880144534} +[16.92, 16.84, 16.56, 16.88, 16.92, 16.8, 16.76, 16.76, 16.72, 16.72, 16.6, 16.52, 16.52, 16.32, 16.44, 16.52, 16.6, 16.64, 16.8, 16.56] +300.0 +15.0 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.46686863899231, 'TIME_S_1KI': 436.11952662467957, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 399.554963350296, 'W': 20.111512880144534, 'J_1KI': 16648.123472929, 'W_1KI': 837.9797033393555, 'W_D': 5.111512880144534, 'J_D': 101.55030870437619, 'W_D_1KI': 212.97970333935558, 'J_D_1KI': 8874.154305806484} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..b68356d --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 16, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.419291257858276, "TIME_S_1KI": 651.2057036161423, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 465.23378977775576, "W": 20.16260305783142, "J_1KI": 29077.111861109734, "W_1KI": 1260.1626911144638, "W_D": 5.332603057831417, "J_D": 123.04498198270794, "W_D_1KI": 333.28769111446354, "J_D_1KI": 20830.480694653972} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..c393ebd --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.534708738327026} + +tensor(indices=tensor([[5409, 261, 6608, ..., 8866, 2880, 8958], + [3148, 5606, 2549, ..., 3991, 4282, 2139]]), + values=tensor([0.5858, 0.3755, 0.5222, ..., 0.2426, 0.1269, 0.2047]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.8512, 0.6856, 0.8124, ..., 0.2420, 0.1613, 0.1403]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 6.534708738327026 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 10000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.419291257858276} + +tensor(indices=tensor([[8141, 394, 167, ..., 1231, 8668, 380], + [2053, 2354, 4579, ..., 9452, 8582, 5183]]), + values=tensor([0.1866, 0.0856, 0.1286, ..., 0.4015, 0.1846, 0.4239]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.5627, 0.2231, 0.8263, ..., 0.7185, 0.1248, 0.8760]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.419291257858276 seconds + +tensor(indices=tensor([[8141, 394, 167, ..., 1231, 8668, 380], + [2053, 2354, 4579, ..., 9452, 8582, 5183]]), + values=tensor([0.1866, 0.0856, 0.1286, ..., 0.4015, 0.1846, 0.4239]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.5627, 0.2231, 0.8263, ..., 0.7185, 0.1248, 0.8760]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.419291257858276 seconds + +[16.52, 16.4, 16.44, 16.44, 16.8, 16.88, 16.84, 16.72, 16.64, 16.76] +[16.8, 16.72, 17.2, 22.16, 23.72, 24.96, 26.92, 24.6, 23.36, 22.64, 22.64, 22.08, 21.04, 21.48, 21.72, 21.8, 21.92, 21.72, 21.84, 22.4, 22.48, 23.4] +23.07409358024597 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.419291257858276, 'TIME_S_1KI': 651.2057036161423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 465.23378977775576, 'W': 20.16260305783142} +[16.52, 16.4, 16.44, 16.44, 16.8, 16.88, 16.84, 16.72, 16.64, 16.76, 16.64, 16.48, 16.36, 16.28, 16.24, 16.2, 16.24, 16.24, 16.28, 16.32] +296.6 +14.830000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.419291257858276, 'TIME_S_1KI': 651.2057036161423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 465.23378977775576, 'W': 20.16260305783142, 'J_1KI': 29077.111861109734, 'W_1KI': 1260.1626911144638, 'W_D': 5.332603057831417, 'J_D': 123.04498198270794, 'W_D_1KI': 333.28769111446354, 'J_D_1KI': 20830.480694653972} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..b44a83e --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 12, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.442309617996216, "TIME_S_1KI": 870.1924681663513, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 529.9962892150882, "W": 20.250347694110115, "J_1KI": 44166.35743459068, "W_1KI": 1687.5289745091761, "W_D": 5.387347694110115, "J_D": 140.9987783775332, "W_D_1KI": 448.945641175843, "J_D_1KI": 37412.13676465358} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..337cbf6 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 8.713038206100464} + +tensor(indices=tensor([[7405, 7985, 5217, ..., 3242, 86, 2527], + [4324, 5636, 3501, ..., 7942, 1067, 6520]]), + values=tensor([0.1179, 0.1799, 0.4441, ..., 0.8872, 0.1633, 0.3956]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.4345, 0.4222, 0.9610, ..., 0.8204, 0.1992, 0.8862]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 8.713038206100464 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12 -ss 10000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.442309617996216} + +tensor(indices=tensor([[8492, 2131, 540, ..., 1988, 5264, 3205], + [7986, 6150, 5150, ..., 335, 6093, 5419]]), + values=tensor([0.1635, 0.7253, 0.1233, ..., 0.7716, 0.4598, 0.3409]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.1812, 0.1265, 0.6146, ..., 0.9438, 0.8607, 0.1191]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.442309617996216 seconds + +tensor(indices=tensor([[8492, 2131, 540, ..., 1988, 5264, 3205], + [7986, 6150, 5150, ..., 335, 6093, 5419]]), + values=tensor([0.1635, 0.7253, 0.1233, ..., 0.7716, 0.4598, 0.3409]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.1812, 0.1265, 0.6146, ..., 0.9438, 0.8607, 0.1191]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.442309617996216 seconds + +[16.72, 16.68, 16.48, 16.56, 16.64, 16.6, 16.52, 16.44, 16.08, 16.0] +[16.04, 16.28, 17.4, 19.32, 19.32, 21.8, 22.44, 23.88, 24.88, 24.24, 23.92, 23.8, 22.4, 21.16, 21.16, 21.16, 21.12, 21.08, 21.52, 22.72, 23.0, 23.84, 24.84, 24.16, 24.6] +26.17220687866211 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.442309617996216, 'TIME_S_1KI': 870.1924681663513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 529.9962892150882, 'W': 20.250347694110115} +[16.72, 16.68, 16.48, 16.56, 16.64, 16.6, 16.52, 16.44, 16.08, 16.0, 16.44, 16.48, 16.48, 16.56, 16.64, 16.68, 16.44, 16.6, 16.6, 16.4] +297.26 +14.863 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.442309617996216, 'TIME_S_1KI': 870.1924681663513, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 529.9962892150882, 'W': 20.250347694110115, 'J_1KI': 44166.35743459068, 'W_1KI': 1687.5289745091761, 'W_D': 5.387347694110115, 'J_D': 140.9987783775332, 'W_D_1KI': 448.945641175843, 'J_D_1KI': 37412.13676465358} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..fa0f9ba --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.87259030342102, "TIME_S_1KI": 1087.259030342102, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 602.8853721618653, "W": 20.565581162180006, "J_1KI": 60288.53721618653, "W_1KI": 2056.5581162180006, "W_D": 5.704581162180007, "J_D": 167.2312836611273, "W_D_1KI": 570.4581162180007, "J_D_1KI": 57045.81162180007} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..e94cb40 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.5 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.87259030342102} + +tensor(indices=tensor([[4102, 1045, 486, ..., 6701, 5797, 7630], + [7983, 3209, 8760, ..., 1810, 4381, 7012]]), + values=tensor([0.7925, 0.8059, 0.1616, ..., 0.2951, 0.6443, 0.2469]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8238, 0.8906, 0.4976, ..., 0.6553, 0.6875, 0.0676]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.87259030342102 seconds + +tensor(indices=tensor([[4102, 1045, 486, ..., 6701, 5797, 7630], + [7983, 3209, 8760, ..., 1810, 4381, 7012]]), + values=tensor([0.7925, 0.8059, 0.1616, ..., 0.2951, 0.6443, 0.2469]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8238, 0.8906, 0.4976, ..., 0.6553, 0.6875, 0.0676]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.87259030342102 seconds + +[16.72, 16.48, 16.48, 16.64, 16.72, 16.72, 16.72, 16.76, 16.64, 16.72] +[16.52, 16.56, 16.84, 19.12, 20.76, 22.04, 22.92, 24.52, 25.6, 24.8, 24.68, 24.48, 22.28, 22.28, 20.8, 20.84, 20.84, 20.96, 21.12, 21.0, 22.48, 23.4, 23.6, 23.92, 23.76, 22.76, 22.96, 23.68] +29.315260648727417 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.87259030342102, 'TIME_S_1KI': 1087.259030342102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 602.8853721618653, 'W': 20.565581162180006} +[16.72, 16.48, 16.48, 16.64, 16.72, 16.72, 16.72, 16.76, 16.64, 16.72, 16.4, 16.4, 16.08, 16.12, 16.28, 16.28, 16.48, 16.6, 16.64, 16.52] +297.21999999999997 +14.860999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.87259030342102, 'TIME_S_1KI': 1087.259030342102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 602.8853721618653, 'W': 20.565581162180006, 'J_1KI': 60288.53721618653, 'W_1KI': 2056.5581162180006, 'W_D': 5.704581162180007, 'J_D': 167.2312836611273, 'W_D_1KI': 570.4581162180007, 'J_D_1KI': 57045.81162180007} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..0a0b522 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 326348, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.479444980621338, "TIME_S_1KI": 0.03211125847445469, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 268.45111671447756, "W": 18.4341820755598, "J_1KI": 0.8225915792787992, "W_1KI": 0.0564862725543279, "W_D": 3.7161820755598, "J_D": 54.11757484054569, "W_D_1KI": 0.011387175884515304, "J_D_1KI": 3.4892739911123416e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..a264d3c --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,962 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.006699800491333008} + +tensor(indices=tensor([[ 530, 3328, 6336, ..., 4241, 1583, 6370], + [ 625, 5511, 8368, ..., 4250, 7123, 8279]]), + values=tensor([4.8645e-01, 5.7329e-01, 3.2806e-01, 8.6670e-01, + 2.2636e-01, 5.2197e-01, 6.8623e-01, 6.4236e-01, + 8.4913e-01, 3.9651e-01, 8.9521e-02, 7.3696e-01, + 1.4845e-01, 7.1445e-01, 6.9745e-01, 6.8238e-01, + 6.7313e-01, 5.4733e-02, 3.9650e-01, 4.1780e-01, + 4.6763e-01, 4.6979e-01, 2.2423e-01, 2.8477e-01, + 9.2289e-01, 8.8274e-01, 1.3886e-01, 7.5985e-01, + 2.7814e-01, 6.4740e-01, 5.7017e-01, 2.9137e-01, + 8.0203e-01, 9.6732e-01, 3.3839e-01, 3.9147e-01, + 1.7736e-01, 4.3033e-02, 1.8226e-01, 9.5043e-01, + 9.7570e-01, 8.2902e-01, 1.1998e-01, 7.4447e-01, + 2.8582e-01, 2.9554e-01, 6.8280e-01, 2.1348e-01, + 2.9549e-01, 4.9069e-01, 2.8006e-01, 3.1620e-01, + 7.6901e-01, 9.8465e-01, 9.4553e-01, 3.2010e-02, + 8.8628e-01, 3.6107e-01, 4.6371e-01, 4.3797e-01, + 7.9200e-01, 7.1287e-01, 6.0287e-01, 5.2026e-01, + 3.5499e-01, 6.4552e-01, 5.9470e-01, 8.9930e-01, + 5.0034e-01, 6.1357e-01, 8.3370e-01, 3.7631e-01, + 2.7362e-01, 9.6283e-01, 9.1291e-02, 1.4612e-01, + 3.0549e-02, 6.3133e-01, 2.2183e-01, 8.3743e-01, + 1.8247e-01, 4.9230e-01, 1.2743e-01, 4.8255e-01, + 6.6573e-01, 5.5014e-01, 2.5893e-01, 3.7358e-01, + 2.5836e-01, 4.8853e-02, 9.0899e-03, 6.3487e-01, + 7.9174e-01, 5.6074e-01, 3.7577e-01, 2.2948e-01, + 6.5927e-01, 3.7216e-01, 1.0228e-01, 9.6432e-01, + 9.4383e-01, 8.0253e-01, 4.6111e-01, 4.2112e-02, + 3.8033e-01, 7.2336e-02, 1.4475e-01, 6.5473e-01, + 3.7061e-01, 4.7885e-01, 5.9970e-01, 2.6483e-01, + 8.6229e-01, 3.5489e-01, 9.0368e-01, 7.0664e-01, + 5.0220e-01, 6.8761e-01, 5.5128e-01, 8.9457e-01, + 2.8859e-01, 4.9135e-01, 7.1326e-01, 4.0840e-01, + 8.4587e-01, 5.0507e-01, 6.6573e-02, 5.5630e-01, + 3.6783e-03, 7.3578e-01, 3.1994e-01, 9.0602e-01, + 4.9288e-02, 8.9104e-01, 4.9748e-01, 9.4531e-01, + 3.9447e-01, 9.0207e-01, 4.7691e-01, 4.7757e-01, + 8.5290e-01, 6.3207e-01, 3.2731e-01, 7.1159e-01, + 3.5670e-01, 4.9513e-02, 9.8453e-01, 1.3585e-01, + 6.9497e-01, 9.5390e-01, 2.9320e-01, 2.4561e-01, + 8.1400e-01, 3.3159e-01, 5.5549e-01, 7.3149e-01, + 9.5139e-01, 5.0106e-01, 1.8298e-01, 7.8281e-03, + 1.3158e-01, 2.2831e-01, 9.5611e-01, 4.4622e-02, + 8.9351e-01, 4.7286e-01, 3.2684e-01, 1.5004e-02, + 1.3190e-01, 7.9361e-01, 8.0624e-01, 2.4956e-01, + 9.0081e-01, 5.7168e-01, 3.4001e-02, 4.2314e-01, + 7.4821e-01, 4.9596e-01, 1.1094e-01, 8.2021e-01, + 9.0591e-01, 6.9326e-01, 5.3026e-02, 3.3990e-02, + 5.9626e-01, 3.3134e-01, 4.6839e-01, 2.0881e-01, + 2.8580e-01, 7.1462e-01, 9.0049e-01, 9.8407e-01, + 7.1593e-01, 7.0413e-01, 2.8256e-01, 6.8790e-01, + 7.6217e-01, 3.7701e-01, 5.1322e-01, 7.3163e-01, + 5.6654e-01, 2.0341e-01, 6.1604e-01, 5.0903e-01, + 2.6791e-01, 5.7347e-01, 7.8992e-01, 1.3126e-02, + 9.4516e-01, 8.7180e-01, 7.9089e-01, 5.7080e-01, + 4.4603e-01, 2.1177e-01, 6.7440e-01, 8.8710e-01, + 1.3519e-01, 7.7990e-01, 9.1314e-01, 2.5113e-01, + 8.0170e-02, 1.8508e-01, 4.2038e-01, 6.9780e-04, + 5.9478e-01, 6.3514e-01, 7.4800e-01, 8.2975e-01, + 3.9991e-01, 1.9862e-01, 6.9512e-02, 8.8746e-01, + 6.5386e-01, 2.8752e-01, 4.4391e-01, 8.2437e-01, + 8.5494e-02, 5.1775e-01, 7.2167e-01, 9.5027e-01, + 7.9293e-01, 5.6009e-01, 2.4219e-01, 8.7197e-01, + 4.2957e-01, 5.0873e-01, 7.3975e-01, 2.4206e-01, + 8.8900e-01, 1.5552e-01, 1.7722e-01, 1.3849e-01, + 4.9978e-02, 6.2044e-01, 7.5904e-01, 2.0629e-01, + 8.6993e-01, 3.6349e-01, 8.7848e-01, 6.6765e-01, + 2.1732e-01, 9.1240e-01, 6.9962e-01, 8.3082e-01, + 6.3244e-01, 8.9102e-01, 5.8276e-01, 2.0525e-01, + 4.2950e-01, 8.1644e-01, 3.5082e-01, 3.6519e-01, + 4.8998e-01, 3.2641e-01, 5.4754e-01, 4.0886e-01, + 2.0440e-01, 1.3234e-01, 8.0186e-01, 6.3299e-01, + 1.0953e-02, 9.3186e-01, 8.1420e-01, 7.5978e-01, + 1.8893e-01, 9.8992e-01, 7.0028e-01, 9.6594e-01, + 5.6958e-02, 5.8315e-01, 7.6652e-01, 7.4205e-01, + 7.6844e-01, 9.4892e-01, 7.7256e-01, 8.1506e-01, + 5.3982e-01, 4.3652e-01, 4.8544e-01, 6.6088e-01, + 4.9884e-01, 6.4885e-01, 1.2762e-01, 8.0742e-01, + 6.6694e-01, 1.4241e-01, 4.1933e-01, 6.0407e-01, + 1.0223e-01, 6.5461e-01, 8.6028e-01, 3.1931e-01, + 1.3122e-01, 9.3185e-01, 3.5897e-01, 3.3124e-01, + 9.9043e-01, 4.6139e-01, 3.6325e-02, 8.7986e-01, + 9.3456e-01, 6.2243e-01, 8.8495e-01, 8.3742e-01, + 2.2863e-01, 4.9185e-01, 6.7491e-01, 7.3301e-02, + 4.9127e-01, 1.4380e-01, 3.7621e-01, 9.2048e-02, + 3.0268e-01, 5.1724e-01, 9.6774e-01, 4.0680e-01, + 6.0310e-02, 8.9688e-01, 6.1285e-01, 5.7112e-01, + 8.6731e-01, 9.5446e-01, 4.6727e-01, 3.5004e-02, + 9.8342e-01, 8.1882e-01, 4.0782e-01, 1.6292e-01, + 4.3356e-01, 6.2224e-01, 2.9837e-01, 1.4603e-01, + 3.7996e-01, 8.0963e-01, 8.3690e-02, 6.3480e-01, + 9.0492e-01, 1.2088e-01, 4.2548e-01, 6.8439e-01, + 8.7840e-01, 7.7565e-01, 6.5259e-01, 9.2935e-01, + 2.5223e-01, 2.6370e-02, 2.8224e-01, 1.8273e-01, + 1.4611e-01, 5.0563e-01, 8.6818e-01, 8.5384e-01, + 6.6109e-01, 8.7791e-01, 4.2601e-01, 6.8830e-01, + 8.0228e-01, 1.5383e-01, 3.9311e-01, 6.3610e-03, + 9.2926e-01, 4.5180e-01, 4.3376e-02, 8.2246e-01, + 9.5964e-01, 2.6638e-01, 4.5724e-01, 8.5546e-01, + 7.1052e-01, 7.2205e-01, 2.1608e-01, 5.1010e-01, + 5.2914e-01, 2.5988e-01, 6.5731e-02, 7.0217e-01, + 3.0854e-01, 2.7893e-01, 4.9316e-01, 9.7228e-01, + 6.3534e-01, 7.0517e-01, 3.4879e-02, 6.8114e-01, + 1.9494e-01, 9.3008e-01, 8.4069e-01, 3.9854e-01, + 7.6845e-01, 7.1454e-01, 4.7374e-02, 8.7304e-01, + 1.5535e-01, 9.4884e-02, 5.0086e-01, 9.0695e-01, + 4.2387e-01, 8.2157e-01, 1.2502e-01, 1.5087e-01, + 1.3765e-01, 1.1560e-02, 1.8842e-01, 4.5016e-01, + 5.3603e-01, 6.5751e-02, 9.9959e-01, 2.7210e-01, + 4.8691e-01, 4.6716e-02, 2.0670e-02, 5.1458e-01, + 3.2633e-01, 5.6460e-01, 5.4145e-01, 1.1757e-01, + 1.9389e-01, 8.7833e-01, 1.1450e-01, 7.7762e-01, + 1.9510e-01, 5.5866e-01, 4.5439e-01, 7.6694e-01, + 7.5306e-01, 5.6063e-01, 8.2008e-01, 7.4078e-01, + 5.1007e-01, 3.9143e-01, 1.7221e-01, 9.7370e-01, + 4.1993e-01, 9.8473e-01, 4.3371e-01, 3.8475e-01, + 3.6492e-01, 9.0977e-01, 3.0429e-02, 8.6046e-01, + 3.1412e-01, 1.6488e-01, 6.6888e-01, 1.7196e-01, + 8.2187e-01, 4.5845e-01, 8.5605e-01, 4.4966e-01, + 7.4323e-01, 2.8612e-01, 3.1248e-01, 4.8967e-02, + 3.9395e-01, 2.0558e-01, 5.1947e-01, 4.4416e-01, + 9.0516e-01, 7.3142e-01, 3.5309e-01, 8.7967e-01, + 1.2485e-01, 2.3152e-02, 1.4299e-01, 4.7052e-01, + 8.4053e-01, 1.3931e-01, 4.3597e-01, 3.0001e-01, + 2.0432e-01, 3.5886e-01, 5.5473e-01, 4.5026e-02, + 8.8128e-01, 8.0682e-01, 1.5968e-01, 4.6277e-01, + 9.3718e-01, 9.7884e-01, 9.2695e-01, 2.8939e-01, + 2.7194e-01, 8.0578e-01, 6.4998e-02, 6.7262e-01, + 8.7628e-01, 3.0974e-02, 1.1813e-01, 4.8840e-01, + 5.4388e-01, 6.1631e-01, 8.6324e-01, 3.5915e-01, + 6.0396e-02, 6.8184e-01, 8.2191e-01, 4.4637e-01, + 2.6335e-01, 3.4131e-01, 5.5125e-01, 6.1226e-01, + 5.1669e-01, 5.5029e-01, 4.9656e-01, 7.4722e-01, + 7.1508e-01, 4.8236e-02, 6.3442e-01, 9.5933e-01, + 9.2101e-02, 4.8796e-01, 7.4423e-02, 2.6179e-01, + 6.1832e-01, 4.4606e-01, 2.3737e-01, 1.6221e-02, + 4.2269e-01, 3.3699e-01, 4.6366e-01, 9.0841e-01, + 2.4839e-01, 9.1205e-01, 1.5781e-01, 4.2890e-01, + 7.7165e-01, 9.0966e-01, 2.6909e-01, 5.3420e-01, + 2.1539e-01, 8.8069e-01, 7.0244e-02, 1.0868e-01, + 8.1880e-01, 5.6845e-01, 8.2157e-01, 1.4202e-02, + 5.8080e-01, 6.5084e-01, 5.7540e-02, 6.9491e-01, + 1.5917e-01, 6.7379e-01, 5.4967e-01, 4.9959e-01, + 3.5296e-01, 6.6860e-01, 9.3327e-01, 4.3214e-01, + 6.3514e-01, 9.6306e-02, 9.7815e-02, 7.4938e-01, + 8.8748e-01, 5.2725e-01, 3.3662e-01, 5.7772e-01, + 1.4548e-01, 5.8068e-02, 3.5421e-01, 5.3001e-01, + 8.6340e-02, 5.1854e-01, 5.4745e-01, 1.9925e-02, + 8.5646e-01, 7.2560e-01, 4.9874e-04, 2.2361e-01, + 6.5128e-01, 8.4195e-01, 2.5353e-01, 1.1549e-01, + 1.1823e-01, 8.8423e-01, 9.9198e-01, 2.7822e-01, + 2.7171e-01, 7.0992e-01, 7.0112e-01, 4.6346e-01, + 4.7385e-01, 8.1857e-01, 3.6380e-01, 8.7731e-01, + 2.4452e-01, 9.6853e-01, 5.8353e-01, 4.9447e-01, + 6.0279e-01, 8.9383e-01, 9.2526e-02, 5.6272e-01, + 8.2246e-01, 6.9196e-01, 8.8334e-01, 2.8950e-01, + 4.9844e-01, 2.7792e-01, 1.1869e-01, 4.4552e-01, + 7.0978e-01, 5.0397e-01, 8.2864e-01, 5.7273e-01, + 7.5317e-01, 3.0885e-01, 7.3348e-01, 8.7400e-01, + 3.8004e-01, 6.0196e-01, 7.0085e-01, 8.8911e-01, + 1.7998e-02, 9.9504e-02, 4.2520e-01, 1.7335e-01, + 6.4486e-01, 3.8163e-01, 2.3380e-03, 1.9331e-01, + 5.0640e-01, 5.9497e-01, 4.9490e-01, 1.5435e-01, + 3.0150e-02, 3.7429e-01, 8.5736e-01, 5.7429e-01, + 4.0016e-01, 3.7924e-01, 4.1065e-01, 5.5853e-01, + 6.7220e-01, 7.3712e-01, 2.8890e-01, 4.9929e-01, + 4.3098e-01, 5.4037e-01, 9.8114e-01, 2.1578e-01, + 6.2673e-01, 8.3021e-01, 4.9371e-01, 1.6507e-01, + 7.6118e-02, 6.3258e-01, 3.3912e-01, 1.1993e-03, + 3.7378e-01, 9.2814e-01, 5.6128e-01, 1.9466e-01, + 5.1846e-01, 5.3761e-01, 9.2165e-01, 8.4905e-01, + 6.7438e-02, 3.5999e-01, 4.0758e-01, 8.1529e-01, + 9.9524e-01, 2.8727e-01, 6.7564e-01, 4.3579e-01, + 4.0122e-01, 6.1245e-01, 7.5375e-01, 3.8815e-01, + 9.8722e-01, 8.4095e-01, 6.3379e-01, 4.0246e-01, + 4.2106e-01, 7.2630e-01, 5.1241e-01, 6.4511e-01, + 1.4378e-01, 6.4218e-01, 3.2887e-01, 1.6846e-01, + 5.2449e-01, 1.2568e-01, 2.0152e-01, 1.5345e-01, + 8.6325e-01, 7.8956e-01, 3.4346e-01, 5.8268e-01, + 9.3981e-03, 9.0875e-01, 3.7598e-01, 8.2102e-02, + 8.8600e-01, 9.3170e-02, 7.8470e-01, 1.0614e-01, + 5.4125e-01, 9.9317e-01, 4.3875e-01, 7.9433e-02, + 6.7030e-01, 2.9202e-01, 8.7653e-01, 1.3516e-01, + 5.1990e-01, 2.8091e-01, 1.3598e-01, 9.4240e-02, + 6.8438e-01, 3.9884e-01, 1.7047e-01, 4.4468e-02, + 1.0071e-01, 1.6187e-01, 3.2932e-01, 5.7177e-01, + 7.3620e-01, 1.8920e-01, 5.2652e-01, 1.7567e-01, + 4.1953e-01, 6.4060e-01, 6.7206e-01, 4.1754e-03, + 3.7223e-01, 9.6030e-01, 9.1844e-01, 5.3276e-01, + 9.6459e-02, 7.2305e-01, 5.4058e-01, 4.6723e-01, + 4.7845e-01, 1.7632e-01, 5.2822e-01, 4.6763e-01, + 1.1855e-01, 4.6046e-01, 7.0566e-01, 4.4920e-01, + 5.6163e-01, 8.1439e-01, 1.9972e-01, 2.2248e-01, + 7.6117e-01, 7.4565e-01, 9.5332e-02, 1.4110e-01, + 2.1983e-01, 1.3044e-01, 4.7292e-01, 4.4526e-01, + 8.5063e-02, 4.8501e-01, 3.3742e-01, 8.3483e-01, + 5.2427e-02, 3.3358e-01, 5.7648e-01, 1.2839e-01, + 8.0725e-02, 7.9882e-01, 9.1469e-01, 9.3069e-02, + 7.3146e-01, 2.6957e-01, 4.1717e-01, 3.7395e-01, + 7.2644e-01, 8.1649e-01, 2.3882e-01, 1.0256e-02, + 7.5023e-01, 3.2471e-01, 4.5523e-01, 6.3116e-02, + 3.7980e-01, 8.2417e-01, 2.2949e-01, 8.5718e-01, + 4.3049e-01, 3.5121e-01, 8.1378e-01, 8.7087e-01, + 2.0785e-02, 3.5659e-01, 6.5902e-01, 2.3431e-01, + 9.9084e-01, 4.4114e-01, 7.0508e-01, 1.7818e-03, + 4.9946e-01, 1.0311e-01, 1.9469e-01, 1.6826e-01, + 8.3542e-01, 8.4401e-01, 3.7934e-01, 3.1887e-01, + 9.0190e-02, 3.1956e-02, 5.1027e-02, 5.2263e-01, + 8.7403e-01, 6.9315e-01, 8.3181e-01, 3.7356e-01, + 2.7743e-01, 4.0330e-01, 8.8198e-01, 8.8845e-01, + 2.1888e-01, 2.5026e-01, 2.3395e-01, 1.0503e-01, + 2.8726e-01, 6.9101e-01, 2.3156e-01, 9.9080e-01, + 3.6887e-01, 1.4815e-01, 4.2553e-01, 5.6537e-01, + 9.2837e-01, 3.4486e-01, 3.9804e-01, 9.6253e-01, + 6.2060e-01, 9.6807e-01, 4.1139e-01, 3.8113e-01, + 6.5933e-01, 4.9755e-02, 6.8372e-01, 7.7984e-01, + 9.1759e-01, 5.3586e-01, 4.0266e-01, 3.0545e-02, + 5.4012e-01, 3.2417e-01, 2.7385e-02, 4.1792e-01, + 7.7272e-01, 1.2322e-01, 2.4701e-01, 4.8124e-01, + 9.6199e-02, 1.4694e-01, 4.5664e-01, 4.6824e-01, + 5.8265e-01, 1.0997e-01, 8.6483e-01, 3.5718e-01, + 5.0639e-01, 7.9186e-01, 4.7107e-01, 5.2066e-01, + 2.6718e-02, 8.3190e-01, 9.3055e-01, 9.3947e-01, + 7.3370e-01, 5.9720e-01, 3.4803e-02, 1.3743e-01, + 1.9148e-01, 7.2315e-01, 9.4387e-01, 3.2057e-01, + 8.8584e-01, 5.4086e-01, 7.4433e-02, 6.6820e-01, + 3.0078e-01, 1.6312e-01, 8.5874e-01, 6.4476e-01, + 7.4355e-02, 3.7768e-01, 7.7093e-01, 9.8274e-01, + 7.1970e-01, 7.2849e-01, 2.7709e-01, 9.9667e-01, + 9.6443e-01, 7.1508e-01, 6.3307e-01, 6.7834e-01, + 8.0242e-01, 9.2759e-01, 6.8310e-01, 1.5953e-01, + 3.9812e-01, 9.0380e-01, 5.3952e-02, 4.6583e-01, + 6.3858e-02, 8.9865e-01, 8.9778e-01, 5.3875e-02, + 1.2580e-01, 7.3230e-01, 6.9544e-01, 5.9456e-01, + 6.4392e-01, 7.9456e-01, 5.0904e-01, 1.6127e-01, + 3.2726e-01, 4.0005e-01, 3.0258e-01, 5.6420e-01, + 2.3136e-03, 4.9494e-01, 5.9099e-01, 7.6398e-02, + 6.4328e-01, 4.8183e-01, 9.6353e-01, 5.2242e-01, + 3.2929e-01, 8.8372e-01, 2.1264e-01, 5.9356e-01, + 9.3604e-01, 2.2214e-01, 6.0225e-01, 3.2329e-01, + 8.3541e-01, 3.2358e-01, 8.0690e-01, 7.7775e-01, + 5.0812e-01, 8.9521e-01, 8.1651e-01, 3.5140e-01, + 9.6388e-01, 3.3747e-01, 8.8269e-02, 8.7833e-01, + 1.9329e-02, 9.7543e-01, 5.5399e-01, 3.1796e-01, + 9.8277e-01, 2.3374e-01, 1.3556e-01, 9.9998e-01, + 6.6864e-02, 3.0924e-01, 8.3696e-01, 1.4052e-01, + 5.8411e-01, 3.1917e-01, 1.3044e-01, 1.1754e-01, + 2.0482e-01, 7.8514e-01, 9.9336e-01, 1.9915e-02, + 7.5440e-01, 4.7684e-01, 8.3793e-01, 6.3713e-01, + 6.0634e-02, 2.1682e-01, 8.0705e-01, 4.0441e-01, + 8.9760e-01, 3.9201e-01, 1.0901e-01, 8.1237e-01, + 2.4748e-01, 5.1673e-01, 8.1261e-01, 4.8745e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8333, 0.6445, 0.9029, ..., 0.6329, 0.5569, 0.4349]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.006699800491333008 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15672 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5042340755462646} + +tensor(indices=tensor([[7252, 8345, 40, ..., 1502, 6970, 3572], + [6999, 5461, 3615, ..., 3491, 4596, 1404]]), + values=tensor([0.0385, 0.0479, 0.6167, 0.5428, 0.9574, 0.1321, 0.8245, + 0.5441, 0.4656, 0.2329, 0.8032, 0.0405, 0.1039, 0.6839, + 0.1362, 0.8342, 0.4934, 0.3293, 0.9674, 0.4300, 0.4229, + 0.6994, 0.5725, 0.3106, 0.2983, 0.1421, 0.9397, 0.6129, + 0.9006, 0.5377, 0.2070, 0.4173, 0.5232, 0.5850, 0.4975, + 0.4144, 0.8001, 0.5889, 0.4225, 0.2452, 0.3436, 0.7840, + 0.5775, 0.5625, 0.2766, 0.0737, 0.5697, 0.4303, 0.8994, + 0.4495, 0.9871, 0.9592, 0.7652, 0.6357, 0.5940, 0.5506, + 0.0941, 0.9744, 0.8501, 0.9595, 0.8835, 0.0253, 0.3865, + 0.5567, 0.4585, 0.4777, 0.2211, 0.6750, 0.0714, 0.2295, + 0.1069, 0.8612, 0.9011, 0.7175, 0.0470, 0.8873, 0.9954, + 0.0790, 0.5757, 0.1106, 0.5096, 0.4349, 0.3221, 0.7106, + 0.6661, 0.9750, 0.2093, 0.0154, 0.1894, 0.5537, 0.1155, + 0.6724, 0.3135, 0.7351, 0.1950, 0.1386, 0.8647, 0.9277, + 0.5365, 0.5548, 0.2013, 0.8463, 0.0952, 0.7102, 0.9568, + 0.9133, 0.4584, 0.5531, 0.4159, 0.5990, 0.8474, 0.3029, + 0.1021, 0.1939, 0.2372, 0.3028, 0.3301, 0.6991, 0.6907, + 0.7822, 0.8387, 0.3115, 0.2577, 0.0304, 0.5316, 0.2806, + 0.2734, 0.7240, 0.3405, 0.9746, 0.5608, 0.9007, 0.8390, + 0.1113, 0.5443, 0.7216, 0.1372, 0.0969, 0.3878, 0.7725, + 0.5835, 0.6404, 0.0329, 0.9598, 0.3335, 0.4662, 0.1170, + 0.8866, 0.1498, 0.8538, 0.5270, 0.8165, 0.3781, 0.0674, + 0.3503, 0.9158, 0.3730, 0.1330, 0.7177, 0.1674, 0.8264, + 0.7680, 0.6705, 0.1141, 0.1086, 0.3016, 0.3386, 0.3587, + 0.7048, 0.6781, 0.1210, 0.1093, 0.1878, 0.9558, 0.2874, + 0.1840, 0.3037, 0.1698, 0.2805, 0.9971, 0.3084, 0.0489, + 0.9124, 0.6053, 0.8362, 0.7887, 0.5522, 0.4832, 0.9068, + 0.8848, 0.5944, 0.5433, 0.8749, 0.1058, 0.9525, 0.7681, + 0.5558, 0.9650, 0.2293, 0.2435, 0.7887, 0.3694, 0.0480, + 0.7073, 0.9456, 0.9520, 0.9687, 0.8991, 0.1023, 0.0248, + 0.7482, 0.6580, 0.1694, 0.4046, 0.5633, 0.1134, 0.5246, + 0.5319, 0.4551, 0.1862, 0.7451, 0.5793, 0.8843, 0.0278, + 0.0506, 0.2946, 0.7367, 0.0627, 0.6838, 0.4407, 0.7786, + 0.1880, 0.8441, 0.5566, 0.7444, 0.5026, 0.0386, 0.3749, + 0.4915, 0.7147, 0.1918, 0.0089, 0.9546, 0.5133, 0.1737, + 0.3545, 0.9713, 0.3557, 0.8151, 0.0596, 0.2917, 0.7099, + 0.2149, 0.9346, 0.5441, 0.5973, 0.0416, 0.0201, 0.3913, + 0.3038, 0.4568, 0.9285, 0.6752, 0.1751, 0.0594, 0.0842, + 0.0203, 0.7900, 0.7602, 0.4823, 0.9729, 0.8796, 0.6646, + 0.3902, 0.2507, 0.1426, 0.1910, 0.5711, 0.1676, 0.1068, + 0.8726, 0.7233, 0.7356, 0.7027, 0.5030, 0.1097, 0.8573, + 0.8241, 0.3986, 0.5449, 0.2347, 0.9037, 0.0463, 0.1630, + 0.8515, 0.5927, 0.7887, 0.3328, 0.3040, 0.6618, 0.3387, + 0.8959, 0.7071, 0.5180, 0.2857, 0.5419, 0.5936, 0.9283, + 0.5947, 0.4323, 0.0800, 0.3784, 0.9672, 0.7526, 0.4477, + 0.5281, 0.7818, 0.0637, 0.7770, 0.9549, 0.1295, 0.7359, + 0.5243, 0.5731, 0.2030, 0.8561, 0.8485, 0.5422, 0.7100, + 0.5289, 0.3938, 0.2708, 0.9335, 0.8077, 0.1524, 0.9802, + 0.9322, 0.0354, 0.6981, 0.7232, 0.5293, 0.0364, 0.1385, + 0.8644, 0.5567, 0.4689, 0.0974, 0.7795, 0.8820, 0.8826, + 0.0438, 0.3708, 0.7737, 0.7605, 0.9957, 0.3219, 0.1458, + 0.9061, 0.5392, 0.8015, 0.5002, 0.6472, 0.1342, 0.0312, + 0.6648, 0.1380, 0.0843, 0.2995, 0.2542, 0.4964, 0.8432, + 0.6736, 0.6224, 0.9825, 0.8312, 0.1118, 0.9566, 0.7731, + 0.5163, 0.0882, 0.8567, 0.7867, 0.7242, 0.4244, 0.4535, + 0.2498, 0.0800, 0.2565, 0.7621, 0.2711, 0.2164, 0.9494, + 0.9982, 0.1499, 0.0390, 0.8478, 0.9842, 0.1805, 0.7293, + 0.2602, 0.8980, 0.4278, 0.4635, 0.7425, 0.4221, 0.2923, + 0.4866, 0.6869, 0.9770, 0.7029, 0.2887, 0.8305, 0.5594, + 0.0580, 0.5649, 0.4312, 0.2618, 0.3019, 0.8850, 0.1776, + 0.7495, 0.2642, 0.9493, 0.5339, 0.5302, 0.2077, 0.7837, + 0.2919, 0.6311, 0.5071, 0.4948, 0.9668, 0.1272, 0.4349, + 0.9447, 0.5609, 0.6485, 0.2033, 0.6217, 0.4272, 0.6662, + 0.0650, 0.3200, 0.1582, 0.3797, 0.1704, 0.8343, 0.8189, + 0.7107, 0.4625, 0.7914, 0.2160, 0.1283, 0.7691, 0.3127, + 0.4487, 0.6762, 0.1530, 0.2028, 0.8620, 0.4575, 0.1742, + 0.8419, 0.5685, 0.0628, 0.9265, 0.4966, 0.3136, 0.5895, + 0.2761, 0.7411, 0.5179, 0.1742, 0.6620, 0.8988, 0.7570, + 0.2832, 0.6037, 0.1099, 0.0285, 0.8570, 0.6317, 0.1784, + 0.3934, 0.2275, 0.6816, 0.5623, 0.8538, 0.0128, 0.0963, + 0.9274, 0.8651, 0.1147, 0.2838, 0.8750, 0.5741, 0.9816, + 0.1892, 0.4918, 0.4718, 0.2983, 0.2029, 0.1405, 0.0722, + 0.3955, 0.3692, 0.5205, 0.2424, 0.8679, 0.5138, 0.0397, + 0.2531, 0.7980, 0.6518, 0.0123, 0.8246, 0.2469, 0.3488, + 0.9445, 0.2976, 0.9142, 0.7331, 0.7517, 0.4107, 0.1002, + 0.8699, 0.4479, 0.5060, 0.9655, 0.9776, 0.8327, 0.5826, + 0.6496, 0.8480, 0.9932, 0.1898, 0.9206, 0.9141, 0.2502, + 0.9292, 0.0744, 0.7154, 0.0931, 0.3255, 0.7711, 0.1637, + 0.0110, 0.6863, 0.4911, 0.4573, 0.0959, 0.9739, 0.3510, + 0.9156, 0.3674, 0.5265, 0.6204, 0.6800, 0.7174, 0.4142, + 0.3921, 0.6833, 0.1739, 0.9599, 0.3479, 0.1937, 0.3862, + 0.1926, 0.0386, 0.9502, 0.0575, 0.2573, 0.1825, 0.2241, + 0.7838, 0.6132, 0.9072, 0.0291, 0.9227, 0.3595, 0.8362, + 0.3542, 0.6655, 0.2218, 0.2991, 0.3542, 0.8970, 0.0867, + 0.1735, 0.1360, 0.7951, 0.1992, 0.0937, 0.4493, 0.5293, + 0.9135, 0.2229, 0.1245, 0.4816, 0.4209, 0.6846, 0.9123, + 0.6745, 0.1865, 0.5352, 0.3739, 0.8439, 0.5906, 0.1072, + 0.5198, 0.6797, 0.7925, 0.0651, 0.0165, 0.4438, 0.7432, + 0.5795, 0.7559, 0.0481, 0.2193, 0.2606, 0.3340, 0.1332, + 0.6177, 0.0714, 0.9538, 0.0450, 0.7526, 0.7254, 0.5604, + 0.5184, 0.4542, 0.9649, 0.4086, 0.8534, 0.8594, 0.2320, + 0.5364, 0.2291, 0.9744, 0.3666, 0.2960, 0.3603, 0.5667, + 0.9368, 0.2923, 0.9882, 0.2362, 0.7214, 0.6899, 0.3982, + 0.4445, 0.1613, 0.7993, 0.0685, 0.1472, 0.2955, 0.8177, + 0.8335, 0.8549, 0.6403, 0.0601, 0.0580, 0.3672, 0.9722, + 0.6672, 0.3285, 0.9387, 0.6791, 0.7030, 0.0400, 0.4327, + 0.2037, 0.0467, 0.9071, 0.4003, 0.9097, 0.4849, 0.3634, + 0.3845, 0.1651, 0.1549, 0.4468, 0.4628, 0.2958, 0.1783, + 0.4021, 0.1125, 0.6296, 0.1789, 0.6172, 0.3540, 0.3757, + 0.9476, 0.6428, 0.0719, 0.5104, 0.8964, 0.6611, 0.4084, + 0.2565, 0.4864, 0.7492, 0.3252, 0.5523, 0.5950, 0.0164, + 0.2186, 0.6718, 0.5770, 0.7655, 0.5893, 0.2494, 0.4890, + 0.6569, 0.4607, 0.6630, 0.7476, 0.2372, 0.5530, 0.1188, + 0.0930, 0.1952, 0.1583, 0.3445, 0.4302, 0.5799, 0.3849, + 0.0774, 0.3088, 0.7380, 0.9259, 0.4084, 0.2226, 0.8644, + 0.7879, 0.7873, 0.9580, 0.1252, 0.4528, 0.1397, 0.1923, + 0.1701, 0.8816, 0.7404, 0.8293, 0.6017, 0.1090, 0.7025, + 0.0125, 0.6330, 0.0987, 0.5433, 0.7724, 0.7158, 0.7592, + 0.5959, 0.0157, 0.5002, 0.5403, 0.0923, 0.2947, 0.9372, + 0.4866, 0.6994, 0.5799, 0.9133, 0.0647, 0.4712, 0.1825, + 0.4290, 0.3464, 0.9861, 0.7505, 0.3891, 0.1415, 0.6799, + 0.0029, 0.8892, 0.1311, 0.4143, 0.1288, 0.4100, 0.2514, + 0.7892, 0.6601, 0.8454, 0.2071, 0.2533, 0.1058, 0.8660, + 0.8279, 0.2062, 0.8780, 0.4591, 0.9617, 0.9103, 0.1649, + 0.6919, 0.0200, 0.9872, 0.1439, 0.2122, 0.8533, 0.9761, + 0.6385, 0.3452, 0.4532, 0.0439, 0.5415, 0.8996, 0.5953, + 0.3797, 0.8872, 0.4492, 0.9620, 0.0017, 0.9999, 0.0257, + 0.5657, 0.0062, 0.6023, 0.4258, 0.3674, 0.8863, 0.7218, + 0.8362, 0.4589, 0.2969, 0.2498, 0.2804, 0.1529, 0.8267, + 0.3651, 0.3297, 0.1626, 0.4514, 0.6367, 0.7388, 0.7363, + 0.1056, 0.3612, 0.6802, 0.1290, 0.1736, 0.5661, 0.0426, + 0.6325, 0.4305, 0.0200, 0.1978, 0.7042, 0.8648, 0.3735, + 0.7439, 0.4649, 0.2139, 0.8117, 0.4670, 0.9368, 0.2307, + 0.3541, 0.8092, 0.9745, 0.9409, 0.7621, 0.8607, 0.1306, + 0.1996, 0.9581, 0.8227, 0.2018, 0.6842, 0.0646, 0.2196, + 0.6116, 0.5747, 0.9903, 0.1853, 0.6773, 0.1478, 0.6232, + 0.0292, 0.3547, 0.6867, 0.5060, 0.4535, 0.6383, 0.0341, + 0.3467, 0.2850, 0.0668, 0.3664, 0.0837, 0.6853, 0.1836, + 0.6476, 0.7569, 0.1380, 0.8239, 0.6398, 0.9474, 0.5355, + 0.5254, 0.6105, 0.0248, 0.1733, 0.1847, 0.5938, 0.6247, + 0.3364, 0.1481, 0.6473, 0.2150, 0.8396, 0.9238, 0.5524, + 0.6236, 0.6863, 0.6985, 0.3161, 0.2748, 0.0148, 0.6229, + 0.8882, 0.0318, 0.9541, 0.1378, 0.4252, 0.0327, 0.3743, + 0.2877, 0.2469, 0.7570, 0.3245, 0.5972, 0.0378, 0.2970, + 0.2162, 0.0619, 0.6705, 0.8274, 0.2152, 0.9553, 0.6278, + 0.4043, 0.4639, 0.4936, 0.7735, 0.0511, 0.0143, 0.6780, + 0.9728, 0.9880, 0.0101, 0.2097, 0.7715, 0.8489, 0.7366, + 0.8590, 0.8505, 0.6482, 0.0212, 0.1124, 0.3247, 0.0224, + 0.3770, 0.6499, 0.9361, 0.8757, 0.5666, 0.7957, 0.4616, + 0.4254, 0.7349, 0.9702, 0.7074, 0.9836, 0.4657, 0.5009, + 0.4579, 0.9746, 0.3168, 0.5401, 0.0843, 0.8626, 0.0269, + 0.9307, 0.6440, 0.8148, 0.3894, 0.0850, 0.9978, 0.8435, + 0.2074, 0.8884, 0.4985, 0.6584, 0.8221, 0.1763]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.1392, 0.8299, 0.9207, ..., 0.6716, 0.7310, 0.1233]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.5042340755462646 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 326348 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.479444980621338} + +tensor(indices=tensor([[9172, 2429, 6353, ..., 7714, 7689, 8820], + [7688, 4648, 6706, ..., 1606, 9148, 2345]]), + values=tensor([4.8366e-01, 3.9355e-01, 2.6159e-01, 5.8335e-02, + 7.9376e-01, 6.5605e-01, 3.2307e-01, 8.4396e-01, + 6.2539e-01, 5.0905e-01, 4.4674e-01, 4.3477e-01, + 7.3522e-01, 6.7016e-01, 2.8906e-01, 6.1057e-01, + 7.1029e-01, 1.8830e-01, 9.1667e-01, 1.6986e-01, + 6.6532e-01, 5.8859e-01, 4.5671e-01, 6.8187e-01, + 1.7162e-01, 8.7160e-01, 9.4088e-01, 4.4511e-01, + 7.0663e-02, 1.4066e-01, 2.6662e-01, 2.4471e-01, + 4.3443e-01, 8.0422e-01, 5.9526e-01, 7.0805e-01, + 9.0799e-01, 6.6537e-01, 7.0350e-01, 4.4167e-02, + 9.3859e-01, 1.8677e-01, 2.4750e-01, 1.4583e-01, + 2.2921e-01, 9.8350e-01, 2.7472e-01, 6.4488e-01, + 3.2366e-01, 3.3439e-01, 2.6999e-02, 9.7001e-01, + 4.0220e-01, 9.5746e-01, 2.1405e-01, 6.4061e-02, + 6.2274e-01, 4.9218e-01, 9.1948e-01, 6.1329e-01, + 8.2108e-01, 4.9761e-01, 4.9290e-01, 7.3546e-01, + 3.9070e-01, 8.7681e-02, 9.6972e-01, 4.0275e-01, + 5.1401e-01, 7.9433e-01, 8.6020e-01, 2.6074e-01, + 7.3241e-01, 7.0533e-01, 2.6028e-01, 8.1899e-01, + 8.4613e-01, 9.2740e-01, 6.7538e-01, 7.5467e-01, + 4.5208e-01, 1.2277e-01, 7.6667e-01, 7.4566e-01, + 2.3237e-01, 9.6943e-01, 8.2722e-02, 2.4302e-01, + 7.2879e-01, 6.3808e-01, 1.3969e-01, 6.3873e-01, + 7.3513e-01, 2.2494e-01, 4.6020e-02, 5.0965e-01, + 2.2016e-01, 3.9471e-01, 5.4120e-01, 8.1608e-02, + 2.4547e-01, 3.0715e-01, 4.3574e-01, 2.9563e-01, + 7.0165e-01, 2.5212e-01, 6.4378e-01, 4.9896e-01, + 3.9709e-01, 5.1103e-01, 4.3351e-01, 4.7314e-01, + 4.8033e-01, 7.1991e-01, 5.2059e-01, 5.1962e-01, + 3.1207e-01, 8.6598e-01, 8.0973e-01, 9.1107e-01, + 7.7134e-01, 9.6560e-02, 6.6471e-01, 6.9354e-01, + 7.8067e-01, 3.4045e-01, 5.8287e-01, 9.2695e-01, + 9.1201e-01, 9.6246e-01, 9.4255e-01, 8.5062e-01, + 1.4343e-01, 9.9676e-02, 9.1028e-01, 2.8288e-01, + 3.4679e-01, 4.3676e-01, 5.2874e-03, 8.8073e-02, + 4.8823e-02, 8.3610e-01, 3.0562e-01, 2.0389e-01, + 5.9459e-01, 5.6710e-01, 1.7357e-01, 5.8366e-01, + 6.2691e-01, 6.0607e-01, 2.3966e-01, 8.7998e-01, + 5.6240e-01, 3.7765e-02, 1.0355e-01, 3.3334e-01, + 9.3467e-01, 3.7856e-01, 6.2793e-02, 8.3406e-02, + 7.8200e-01, 2.6424e-02, 9.6791e-01, 4.8640e-01, + 3.4668e-02, 3.8390e-01, 1.7441e-01, 5.5482e-01, + 9.5989e-01, 7.8101e-01, 7.5774e-02, 2.1170e-01, + 1.6418e-01, 7.0756e-02, 5.4844e-02, 2.3706e-01, + 4.7321e-01, 6.6053e-01, 5.6799e-02, 6.7572e-01, + 7.4163e-01, 3.8591e-01, 6.6393e-01, 9.4134e-01, + 7.7028e-01, 4.6113e-01, 3.0666e-02, 4.1197e-01, + 7.5174e-01, 6.7355e-01, 6.3323e-01, 6.4926e-01, + 6.6143e-01, 6.5110e-01, 6.7032e-01, 4.1882e-01, + 9.7379e-01, 3.7505e-01, 5.7684e-01, 8.6197e-02, + 2.0046e-01, 2.9844e-01, 4.5758e-01, 8.7198e-01, + 4.7791e-01, 9.6753e-01, 7.1265e-01, 1.3496e-01, + 4.5119e-02, 9.4931e-01, 9.9138e-01, 3.6250e-01, + 8.2610e-01, 7.4795e-02, 2.8908e-01, 5.7194e-01, + 2.3893e-01, 7.0059e-01, 1.6897e-01, 1.4648e-01, + 7.6706e-02, 7.5732e-01, 7.1938e-01, 6.7577e-01, + 4.4335e-01, 1.1199e-01, 4.3027e-01, 6.6742e-01, + 5.1258e-01, 3.7022e-01, 9.4545e-01, 8.1899e-01, + 1.9034e-01, 8.6603e-01, 6.7967e-01, 9.3142e-01, + 3.9865e-01, 1.3182e-01, 8.7938e-01, 9.5611e-01, + 4.4102e-01, 1.8913e-01, 5.5603e-01, 5.3075e-01, + 1.8889e-02, 7.0653e-04, 3.9023e-01, 4.2248e-02, + 4.3166e-01, 9.3899e-01, 2.3071e-01, 2.7792e-01, + 4.1888e-01, 7.5847e-03, 2.4539e-01, 3.2887e-01, + 9.7236e-01, 7.8483e-01, 2.6044e-01, 7.5927e-01, + 9.1136e-01, 6.2480e-01, 5.5890e-01, 2.9877e-01, + 9.2552e-01, 5.1656e-01, 7.7842e-01, 4.8503e-01, + 2.8569e-01, 2.9511e-01, 7.9276e-01, 8.6266e-01, + 8.7156e-01, 7.5421e-01, 9.5567e-01, 7.7697e-01, + 5.7681e-01, 6.7453e-01, 4.7668e-02, 1.5258e-01, + 6.7824e-01, 1.2033e-01, 4.8097e-01, 4.9992e-01, + 2.3426e-01, 9.4416e-01, 3.9313e-01, 2.8711e-01, + 6.1285e-01, 8.6592e-01, 4.9012e-01, 5.3029e-01, + 6.0155e-01, 5.2056e-01, 3.3041e-01, 7.7049e-01, + 2.4367e-01, 6.8252e-01, 7.8802e-01, 7.0757e-01, + 6.1274e-01, 6.7679e-01, 9.8172e-01, 2.2015e-01, + 6.1708e-02, 1.3019e-01, 8.9863e-01, 2.2791e-01, + 6.9285e-01, 9.1611e-02, 6.4287e-01, 7.0090e-01, + 6.0467e-01, 3.5816e-01, 4.6181e-02, 5.8360e-01, + 9.8221e-01, 5.8765e-01, 7.4240e-01, 6.5850e-02, + 8.0175e-01, 1.5937e-01, 2.6052e-01, 2.3928e-01, + 5.6318e-01, 2.2281e-01, 9.1181e-01, 6.7775e-01, + 5.3528e-01, 5.1545e-01, 5.1387e-01, 2.4604e-01, + 2.9211e-01, 1.7920e-01, 8.2015e-01, 6.4112e-01, + 4.3519e-01, 7.6063e-01, 5.5332e-01, 2.2743e-01, + 2.9349e-01, 5.8850e-01, 3.5769e-02, 4.5193e-01, + 3.1998e-01, 7.7707e-01, 3.3129e-02, 7.4088e-01, + 7.6528e-01, 5.5631e-01, 7.3546e-01, 3.7740e-01, + 7.6441e-01, 6.3968e-01, 5.7361e-01, 8.3172e-01, + 7.0719e-01, 8.5432e-02, 9.6624e-01, 8.8384e-01, + 4.8040e-01, 4.6951e-01, 6.3662e-01, 3.9071e-01, + 5.8382e-01, 5.3997e-02, 2.9334e-01, 7.3939e-01, + 5.8036e-02, 8.7515e-02, 2.0045e-01, 8.5240e-01, + 6.5693e-01, 7.0643e-01, 1.6919e-01, 6.0965e-01, + 9.0594e-01, 5.4865e-01, 5.0710e-01, 2.5384e-01, + 4.2834e-01, 7.4821e-01, 7.1738e-01, 6.5093e-01, + 3.6264e-01, 1.3919e-01, 9.5427e-01, 6.1267e-01, + 3.8076e-01, 5.2669e-01, 9.6300e-01, 9.2278e-01, + 2.4006e-01, 8.9546e-01, 4.9214e-01, 8.1677e-01, + 9.2016e-01, 8.4240e-01, 7.9755e-01, 6.4797e-01, + 4.6573e-01, 6.9013e-01, 9.1859e-01, 3.3628e-01, + 8.7033e-01, 8.5344e-01, 5.4119e-01, 8.8083e-01, + 7.8018e-01, 7.1882e-01, 3.2215e-01, 9.6905e-01, + 9.8826e-01, 2.0282e-04, 7.3931e-01, 1.7460e-01, + 9.5877e-01, 5.2985e-01, 3.3480e-01, 1.5497e-01, + 8.6819e-02, 9.9658e-01, 2.0924e-01, 4.5869e-01, + 7.9179e-01, 3.7743e-01, 5.7045e-01, 5.0963e-01, + 7.8285e-01, 7.9341e-01, 5.8662e-01, 7.5922e-01, + 7.9313e-01, 3.7007e-01, 8.4428e-01, 9.1531e-01, + 1.4746e-01, 2.9424e-01, 1.1936e-01, 5.4159e-01, + 5.1407e-01, 8.3861e-01, 2.3164e-02, 2.0412e-01, + 9.1715e-01, 8.8040e-01, 6.6586e-01, 6.2717e-01, + 2.9639e-01, 4.8296e-01, 9.0129e-01, 1.0753e-02, + 6.4312e-03, 1.9745e-01, 2.6278e-01, 8.2869e-01, + 2.6370e-01, 1.8432e-01, 7.9801e-01, 8.1826e-01, + 4.8748e-02, 9.1808e-01, 4.5492e-02, 2.1211e-01, + 5.5441e-01, 4.7292e-01, 7.4636e-01, 8.2844e-01, + 8.7312e-01, 8.0603e-01, 5.7595e-01, 7.9347e-01, + 6.0872e-01, 1.5495e-01, 2.5539e-01, 8.2452e-01, + 3.2318e-01, 1.8774e-01, 8.6187e-01, 4.7792e-01, + 6.1444e-01, 1.5352e-01, 8.4322e-02, 3.5586e-02, + 9.0188e-01, 1.7482e-01, 5.2028e-01, 6.5634e-01, + 4.9285e-02, 4.4791e-01, 2.8668e-01, 7.5348e-01, + 3.1031e-04, 6.4549e-01, 2.7721e-01, 1.4701e-01, + 7.5656e-01, 7.8954e-01, 9.8003e-01, 6.3521e-02, + 2.3553e-01, 5.6783e-01, 8.6132e-01, 5.4903e-01, + 7.1377e-01, 6.3587e-02, 1.9102e-01, 2.6056e-01, + 3.1931e-01, 8.9053e-01, 4.1281e-01, 5.3931e-01, + 1.2814e-01, 5.8284e-01, 4.3902e-01, 3.9263e-01, + 9.9609e-01, 7.4829e-01, 3.2914e-01, 1.6642e-01, + 3.8965e-01, 2.9626e-01, 1.8320e-01, 2.6940e-01, + 3.2273e-01, 8.6447e-01, 5.4752e-02, 5.5482e-01, + 4.6625e-01, 2.1042e-01, 1.7733e-01, 2.1574e-01, + 3.2090e-01, 9.3050e-01, 3.0749e-01, 4.6511e-01, + 8.9439e-01, 7.0167e-01, 9.0285e-01, 6.1802e-01, + 4.1629e-02, 9.8434e-01, 1.5911e-01, 3.7657e-01, + 4.4523e-01, 5.5572e-01, 7.1956e-01, 3.6162e-01, + 9.4532e-01, 9.7088e-02, 2.2900e-01, 3.2226e-01, + 4.7835e-01, 3.5746e-01, 1.7574e-02, 9.7349e-01, + 6.6122e-01, 9.2451e-01, 9.8805e-01, 7.3399e-01, + 1.4402e-01, 1.2531e-01, 4.7684e-01, 8.2450e-01, + 9.1720e-01, 4.5828e-01, 7.8499e-01, 2.2210e-01, + 7.3784e-01, 9.1802e-01, 7.2350e-01, 8.2621e-01, + 4.6411e-01, 6.7975e-01, 3.1113e-01, 9.6243e-01, + 3.8949e-02, 2.1081e-01, 4.5029e-01, 9.7497e-01, + 6.6687e-01, 7.7118e-01, 8.0086e-01, 1.1061e-01, + 4.5056e-01, 3.5519e-01, 9.1050e-01, 3.7707e-01, + 3.6540e-01, 4.1732e-01, 3.8517e-01, 4.4625e-01, + 6.1686e-01, 9.2023e-01, 8.3373e-01, 1.6176e-02, + 2.1617e-01, 7.6827e-01, 6.7185e-01, 7.7670e-01, + 4.1237e-01, 8.9243e-01, 4.0012e-02, 3.4428e-01, + 9.1232e-01, 7.7951e-02, 4.2551e-01, 3.6695e-01, + 2.5261e-01, 3.6701e-01, 8.3455e-01, 3.3580e-02, + 6.1940e-01, 2.7511e-01, 6.9744e-01, 1.6603e-01, + 2.1142e-02, 6.7177e-01, 3.6917e-01, 6.1120e-02, + 8.1283e-01, 2.2435e-01, 2.7523e-01, 2.2864e-01, + 6.1958e-01, 2.2791e-01, 9.7316e-01, 5.3317e-01, + 8.1484e-01, 8.0708e-01, 1.1810e-01, 9.8123e-01, + 7.1224e-01, 7.5814e-01, 6.5290e-01, 1.7947e-01, + 2.5657e-02, 6.2436e-01, 1.8969e-01, 7.1058e-01, + 5.9308e-01, 9.8643e-01, 8.4550e-01, 2.4804e-01, + 3.8073e-01, 3.1296e-01, 3.9128e-02, 3.3224e-01, + 1.8456e-01, 5.3023e-01, 6.9432e-02, 5.3587e-01, + 1.9614e-01, 7.3887e-01, 5.7570e-01, 5.4720e-01, + 2.5498e-03, 4.4351e-01, 7.7455e-01, 4.9992e-01, + 6.6660e-01, 8.7419e-01, 8.9134e-02, 3.1002e-01, + 3.0193e-01, 5.6203e-01, 3.7269e-01, 7.7426e-01, + 3.5420e-02, 7.8405e-01, 2.0538e-01, 3.9263e-01, + 1.4023e-01, 1.8331e-01, 7.1382e-01, 5.9417e-01, + 6.5223e-01, 5.3873e-01, 2.6739e-01, 5.4240e-01, + 1.7510e-01, 1.4232e-01, 8.7694e-01, 2.3087e-01, + 6.4513e-01, 6.4312e-01, 3.1202e-01, 9.9481e-01, + 6.9916e-02, 6.8821e-01, 3.1687e-01, 7.5978e-01, + 3.5627e-01, 9.4563e-01, 3.5964e-01, 2.0506e-01, + 4.5446e-01, 6.9075e-01, 3.0355e-01, 3.0531e-02, + 1.3375e-01, 9.4252e-01, 4.3528e-01, 3.0526e-01, + 4.6327e-01, 4.6100e-01, 5.3258e-01, 3.5702e-01, + 6.0418e-01, 5.1673e-01, 1.3892e-01, 2.6807e-01, + 5.9205e-01, 4.0025e-01, 3.6834e-01, 4.6333e-02, + 1.7861e-01, 5.5707e-02, 6.3862e-01, 5.6881e-02, + 2.1076e-01, 3.8528e-01, 2.1007e-01, 1.1846e-01, + 4.6632e-01, 6.5782e-01, 2.6916e-01, 4.7947e-01, + 3.2009e-01, 7.1530e-01, 5.2339e-01, 3.0106e-01, + 9.4899e-01, 1.1394e-01, 2.7710e-01, 9.2652e-01, + 4.2162e-01, 3.0528e-01, 6.9020e-01, 7.2013e-01, + 9.4560e-01, 6.5089e-01, 4.8179e-01, 1.9096e-01, + 6.5947e-01, 2.7371e-01, 8.5785e-01, 3.4400e-01, + 7.5307e-01, 2.5075e-01, 3.7516e-01, 3.0734e-01, + 8.7404e-01, 5.9911e-01, 8.3048e-01, 8.0707e-01, + 5.4730e-01, 3.6593e-01, 3.3340e-01, 7.3418e-01, + 5.0639e-01, 3.5549e-01, 1.6334e-01, 1.3804e-01, + 1.5397e-01, 6.6612e-02, 5.0566e-01, 7.2011e-01, + 6.8673e-01, 9.2644e-01, 3.3890e-01, 1.0703e-01, + 9.1836e-01, 4.1466e-01, 7.6488e-01, 1.9997e-01, + 6.2792e-02, 8.6920e-02, 9.5924e-01, 4.0157e-01, + 8.0304e-01, 3.8374e-01, 2.7015e-04, 5.1118e-01, + 1.3370e-01, 6.6907e-01, 4.7691e-01, 7.7245e-01, + 5.8945e-01, 3.4755e-01, 5.4556e-01, 9.6137e-01, + 7.3983e-01, 9.6435e-01, 9.4132e-01, 4.9153e-02, + 7.6688e-01, 2.2584e-01, 4.4546e-02, 6.3856e-01, + 7.3561e-01, 4.3606e-01, 4.3727e-01, 7.6287e-01, + 8.5471e-01, 7.1309e-01, 4.0321e-02, 1.7754e-01, + 2.6178e-01, 8.0163e-01, 9.6040e-01, 1.2002e-01, + 7.4781e-01, 5.6186e-01, 7.0276e-01, 9.6832e-01, + 6.8920e-01, 3.6408e-01, 3.2048e-02, 6.5097e-01, + 7.2571e-01, 2.4505e-01, 3.3514e-01, 1.1693e-01, + 1.6097e-01, 4.4042e-02, 8.9815e-01, 5.4774e-01, + 7.4648e-01, 2.7651e-01, 6.3963e-01, 2.2376e-01, + 9.9665e-01, 4.3024e-01, 9.1693e-01, 9.0810e-02, + 4.0292e-01, 5.0369e-01, 5.7128e-01, 8.9241e-01, + 4.8618e-01, 7.0729e-01, 5.3213e-01, 6.6612e-01, + 4.4699e-02, 8.1988e-01, 5.0857e-03, 1.2224e-01, + 8.0655e-01, 4.3952e-02, 7.5237e-01, 2.0445e-02, + 9.4240e-01, 3.4627e-02, 6.8566e-01, 1.1342e-01, + 8.8814e-01, 8.9913e-02, 1.3972e-01, 2.2246e-01, + 5.2701e-01, 8.4407e-01, 2.7994e-01, 2.4114e-01, + 2.5241e-01, 1.1663e-01, 2.3689e-01, 9.0715e-01, + 6.4255e-02, 1.3441e-01, 4.9780e-01, 3.6677e-01, + 8.9300e-01, 4.8662e-01, 1.8815e-01, 7.1599e-01, + 5.4652e-01, 3.8971e-01, 4.1855e-01, 8.9766e-01, + 2.8787e-02, 1.4484e-01, 7.7527e-01, 1.4256e-01, + 4.3971e-02, 6.5700e-01, 9.6517e-01, 3.8399e-02, + 3.5927e-01, 9.8146e-01, 4.7971e-01, 6.2689e-01, + 7.9814e-01, 3.1719e-01, 8.8887e-01, 2.8431e-01, + 3.9627e-01, 6.4465e-01, 3.1542e-01, 8.2727e-01, + 4.0962e-01, 9.5078e-01, 5.7378e-01, 6.4573e-01, + 6.1080e-01, 7.6026e-01, 4.0688e-01, 6.8706e-01, + 9.7360e-01, 1.9641e-01, 3.1404e-01, 8.7030e-01, + 8.7519e-01, 9.1294e-01, 3.1332e-01, 8.2378e-01, + 7.6002e-01, 6.6759e-01, 7.5209e-01, 2.5197e-01, + 5.8185e-01, 8.3603e-01, 9.8402e-01, 7.7933e-01, + 9.6031e-01, 4.1020e-02, 2.1551e-01, 4.3735e-01, + 3.5324e-01, 4.5137e-01, 9.4070e-01, 2.0743e-01, + 4.8632e-01, 2.7466e-01, 9.7228e-01, 2.2309e-01, + 3.0599e-01, 6.2066e-01, 2.0469e-01, 4.2748e-01, + 1.2112e-01, 8.0833e-01, 4.4980e-01, 3.7398e-01, + 6.6951e-01, 9.3775e-01, 9.2812e-01, 9.7717e-01, + 2.0515e-01, 4.0684e-02, 2.4987e-01, 6.5034e-01, + 2.1947e-01, 2.8376e-01, 9.7474e-01, 5.3040e-02, + 2.3000e-01, 7.9019e-01, 9.9718e-01, 8.0065e-01, + 4.8460e-01, 6.0165e-01, 2.5735e-01, 3.0706e-01, + 8.5565e-01, 6.4195e-01, 1.9554e-01, 6.8642e-01, + 5.4102e-01, 6.2865e-01, 2.5232e-01, 2.7384e-01, + 1.5202e-01, 2.6178e-02, 2.2088e-02, 3.0220e-01, + 7.0468e-01, 6.6998e-01, 7.4849e-01, 4.2361e-01, + 3.0687e-01, 8.9260e-01, 1.7687e-01, 4.3603e-01, + 2.6202e-01, 1.5569e-01, 7.9666e-01, 3.1539e-01, + 3.2757e-01, 4.1810e-01, 5.8467e-01, 4.3041e-01, + 5.2110e-01, 9.5297e-01, 2.8793e-01, 4.2003e-03, + 7.5721e-01, 6.8193e-01, 3.9603e-02, 2.7535e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.6919, 0.2407, 0.4104, ..., 0.1381, 0.4123, 0.9576]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.479444980621338 seconds + +tensor(indices=tensor([[9172, 2429, 6353, ..., 7714, 7689, 8820], + [7688, 4648, 6706, ..., 1606, 9148, 2345]]), + values=tensor([4.8366e-01, 3.9355e-01, 2.6159e-01, 5.8335e-02, + 7.9376e-01, 6.5605e-01, 3.2307e-01, 8.4396e-01, + 6.2539e-01, 5.0905e-01, 4.4674e-01, 4.3477e-01, + 7.3522e-01, 6.7016e-01, 2.8906e-01, 6.1057e-01, + 7.1029e-01, 1.8830e-01, 9.1667e-01, 1.6986e-01, + 6.6532e-01, 5.8859e-01, 4.5671e-01, 6.8187e-01, + 1.7162e-01, 8.7160e-01, 9.4088e-01, 4.4511e-01, + 7.0663e-02, 1.4066e-01, 2.6662e-01, 2.4471e-01, + 4.3443e-01, 8.0422e-01, 5.9526e-01, 7.0805e-01, + 9.0799e-01, 6.6537e-01, 7.0350e-01, 4.4167e-02, + 9.3859e-01, 1.8677e-01, 2.4750e-01, 1.4583e-01, + 2.2921e-01, 9.8350e-01, 2.7472e-01, 6.4488e-01, + 3.2366e-01, 3.3439e-01, 2.6999e-02, 9.7001e-01, + 4.0220e-01, 9.5746e-01, 2.1405e-01, 6.4061e-02, + 6.2274e-01, 4.9218e-01, 9.1948e-01, 6.1329e-01, + 8.2108e-01, 4.9761e-01, 4.9290e-01, 7.3546e-01, + 3.9070e-01, 8.7681e-02, 9.6972e-01, 4.0275e-01, + 5.1401e-01, 7.9433e-01, 8.6020e-01, 2.6074e-01, + 7.3241e-01, 7.0533e-01, 2.6028e-01, 8.1899e-01, + 8.4613e-01, 9.2740e-01, 6.7538e-01, 7.5467e-01, + 4.5208e-01, 1.2277e-01, 7.6667e-01, 7.4566e-01, + 2.3237e-01, 9.6943e-01, 8.2722e-02, 2.4302e-01, + 7.2879e-01, 6.3808e-01, 1.3969e-01, 6.3873e-01, + 7.3513e-01, 2.2494e-01, 4.6020e-02, 5.0965e-01, + 2.2016e-01, 3.9471e-01, 5.4120e-01, 8.1608e-02, + 2.4547e-01, 3.0715e-01, 4.3574e-01, 2.9563e-01, + 7.0165e-01, 2.5212e-01, 6.4378e-01, 4.9896e-01, + 3.9709e-01, 5.1103e-01, 4.3351e-01, 4.7314e-01, + 4.8033e-01, 7.1991e-01, 5.2059e-01, 5.1962e-01, + 3.1207e-01, 8.6598e-01, 8.0973e-01, 9.1107e-01, + 7.7134e-01, 9.6560e-02, 6.6471e-01, 6.9354e-01, + 7.8067e-01, 3.4045e-01, 5.8287e-01, 9.2695e-01, + 9.1201e-01, 9.6246e-01, 9.4255e-01, 8.5062e-01, + 1.4343e-01, 9.9676e-02, 9.1028e-01, 2.8288e-01, + 3.4679e-01, 4.3676e-01, 5.2874e-03, 8.8073e-02, + 4.8823e-02, 8.3610e-01, 3.0562e-01, 2.0389e-01, + 5.9459e-01, 5.6710e-01, 1.7357e-01, 5.8366e-01, + 6.2691e-01, 6.0607e-01, 2.3966e-01, 8.7998e-01, + 5.6240e-01, 3.7765e-02, 1.0355e-01, 3.3334e-01, + 9.3467e-01, 3.7856e-01, 6.2793e-02, 8.3406e-02, + 7.8200e-01, 2.6424e-02, 9.6791e-01, 4.8640e-01, + 3.4668e-02, 3.8390e-01, 1.7441e-01, 5.5482e-01, + 9.5989e-01, 7.8101e-01, 7.5774e-02, 2.1170e-01, + 1.6418e-01, 7.0756e-02, 5.4844e-02, 2.3706e-01, + 4.7321e-01, 6.6053e-01, 5.6799e-02, 6.7572e-01, + 7.4163e-01, 3.8591e-01, 6.6393e-01, 9.4134e-01, + 7.7028e-01, 4.6113e-01, 3.0666e-02, 4.1197e-01, + 7.5174e-01, 6.7355e-01, 6.3323e-01, 6.4926e-01, + 6.6143e-01, 6.5110e-01, 6.7032e-01, 4.1882e-01, + 9.7379e-01, 3.7505e-01, 5.7684e-01, 8.6197e-02, + 2.0046e-01, 2.9844e-01, 4.5758e-01, 8.7198e-01, + 4.7791e-01, 9.6753e-01, 7.1265e-01, 1.3496e-01, + 4.5119e-02, 9.4931e-01, 9.9138e-01, 3.6250e-01, + 8.2610e-01, 7.4795e-02, 2.8908e-01, 5.7194e-01, + 2.3893e-01, 7.0059e-01, 1.6897e-01, 1.4648e-01, + 7.6706e-02, 7.5732e-01, 7.1938e-01, 6.7577e-01, + 4.4335e-01, 1.1199e-01, 4.3027e-01, 6.6742e-01, + 5.1258e-01, 3.7022e-01, 9.4545e-01, 8.1899e-01, + 1.9034e-01, 8.6603e-01, 6.7967e-01, 9.3142e-01, + 3.9865e-01, 1.3182e-01, 8.7938e-01, 9.5611e-01, + 4.4102e-01, 1.8913e-01, 5.5603e-01, 5.3075e-01, + 1.8889e-02, 7.0653e-04, 3.9023e-01, 4.2248e-02, + 4.3166e-01, 9.3899e-01, 2.3071e-01, 2.7792e-01, + 4.1888e-01, 7.5847e-03, 2.4539e-01, 3.2887e-01, + 9.7236e-01, 7.8483e-01, 2.6044e-01, 7.5927e-01, + 9.1136e-01, 6.2480e-01, 5.5890e-01, 2.9877e-01, + 9.2552e-01, 5.1656e-01, 7.7842e-01, 4.8503e-01, + 2.8569e-01, 2.9511e-01, 7.9276e-01, 8.6266e-01, + 8.7156e-01, 7.5421e-01, 9.5567e-01, 7.7697e-01, + 5.7681e-01, 6.7453e-01, 4.7668e-02, 1.5258e-01, + 6.7824e-01, 1.2033e-01, 4.8097e-01, 4.9992e-01, + 2.3426e-01, 9.4416e-01, 3.9313e-01, 2.8711e-01, + 6.1285e-01, 8.6592e-01, 4.9012e-01, 5.3029e-01, + 6.0155e-01, 5.2056e-01, 3.3041e-01, 7.7049e-01, + 2.4367e-01, 6.8252e-01, 7.8802e-01, 7.0757e-01, + 6.1274e-01, 6.7679e-01, 9.8172e-01, 2.2015e-01, + 6.1708e-02, 1.3019e-01, 8.9863e-01, 2.2791e-01, + 6.9285e-01, 9.1611e-02, 6.4287e-01, 7.0090e-01, + 6.0467e-01, 3.5816e-01, 4.6181e-02, 5.8360e-01, + 9.8221e-01, 5.8765e-01, 7.4240e-01, 6.5850e-02, + 8.0175e-01, 1.5937e-01, 2.6052e-01, 2.3928e-01, + 5.6318e-01, 2.2281e-01, 9.1181e-01, 6.7775e-01, + 5.3528e-01, 5.1545e-01, 5.1387e-01, 2.4604e-01, + 2.9211e-01, 1.7920e-01, 8.2015e-01, 6.4112e-01, + 4.3519e-01, 7.6063e-01, 5.5332e-01, 2.2743e-01, + 2.9349e-01, 5.8850e-01, 3.5769e-02, 4.5193e-01, + 3.1998e-01, 7.7707e-01, 3.3129e-02, 7.4088e-01, + 7.6528e-01, 5.5631e-01, 7.3546e-01, 3.7740e-01, + 7.6441e-01, 6.3968e-01, 5.7361e-01, 8.3172e-01, + 7.0719e-01, 8.5432e-02, 9.6624e-01, 8.8384e-01, + 4.8040e-01, 4.6951e-01, 6.3662e-01, 3.9071e-01, + 5.8382e-01, 5.3997e-02, 2.9334e-01, 7.3939e-01, + 5.8036e-02, 8.7515e-02, 2.0045e-01, 8.5240e-01, + 6.5693e-01, 7.0643e-01, 1.6919e-01, 6.0965e-01, + 9.0594e-01, 5.4865e-01, 5.0710e-01, 2.5384e-01, + 4.2834e-01, 7.4821e-01, 7.1738e-01, 6.5093e-01, + 3.6264e-01, 1.3919e-01, 9.5427e-01, 6.1267e-01, + 3.8076e-01, 5.2669e-01, 9.6300e-01, 9.2278e-01, + 2.4006e-01, 8.9546e-01, 4.9214e-01, 8.1677e-01, + 9.2016e-01, 8.4240e-01, 7.9755e-01, 6.4797e-01, + 4.6573e-01, 6.9013e-01, 9.1859e-01, 3.3628e-01, + 8.7033e-01, 8.5344e-01, 5.4119e-01, 8.8083e-01, + 7.8018e-01, 7.1882e-01, 3.2215e-01, 9.6905e-01, + 9.8826e-01, 2.0282e-04, 7.3931e-01, 1.7460e-01, + 9.5877e-01, 5.2985e-01, 3.3480e-01, 1.5497e-01, + 8.6819e-02, 9.9658e-01, 2.0924e-01, 4.5869e-01, + 7.9179e-01, 3.7743e-01, 5.7045e-01, 5.0963e-01, + 7.8285e-01, 7.9341e-01, 5.8662e-01, 7.5922e-01, + 7.9313e-01, 3.7007e-01, 8.4428e-01, 9.1531e-01, + 1.4746e-01, 2.9424e-01, 1.1936e-01, 5.4159e-01, + 5.1407e-01, 8.3861e-01, 2.3164e-02, 2.0412e-01, + 9.1715e-01, 8.8040e-01, 6.6586e-01, 6.2717e-01, + 2.9639e-01, 4.8296e-01, 9.0129e-01, 1.0753e-02, + 6.4312e-03, 1.9745e-01, 2.6278e-01, 8.2869e-01, + 2.6370e-01, 1.8432e-01, 7.9801e-01, 8.1826e-01, + 4.8748e-02, 9.1808e-01, 4.5492e-02, 2.1211e-01, + 5.5441e-01, 4.7292e-01, 7.4636e-01, 8.2844e-01, + 8.7312e-01, 8.0603e-01, 5.7595e-01, 7.9347e-01, + 6.0872e-01, 1.5495e-01, 2.5539e-01, 8.2452e-01, + 3.2318e-01, 1.8774e-01, 8.6187e-01, 4.7792e-01, + 6.1444e-01, 1.5352e-01, 8.4322e-02, 3.5586e-02, + 9.0188e-01, 1.7482e-01, 5.2028e-01, 6.5634e-01, + 4.9285e-02, 4.4791e-01, 2.8668e-01, 7.5348e-01, + 3.1031e-04, 6.4549e-01, 2.7721e-01, 1.4701e-01, + 7.5656e-01, 7.8954e-01, 9.8003e-01, 6.3521e-02, + 2.3553e-01, 5.6783e-01, 8.6132e-01, 5.4903e-01, + 7.1377e-01, 6.3587e-02, 1.9102e-01, 2.6056e-01, + 3.1931e-01, 8.9053e-01, 4.1281e-01, 5.3931e-01, + 1.2814e-01, 5.8284e-01, 4.3902e-01, 3.9263e-01, + 9.9609e-01, 7.4829e-01, 3.2914e-01, 1.6642e-01, + 3.8965e-01, 2.9626e-01, 1.8320e-01, 2.6940e-01, + 3.2273e-01, 8.6447e-01, 5.4752e-02, 5.5482e-01, + 4.6625e-01, 2.1042e-01, 1.7733e-01, 2.1574e-01, + 3.2090e-01, 9.3050e-01, 3.0749e-01, 4.6511e-01, + 8.9439e-01, 7.0167e-01, 9.0285e-01, 6.1802e-01, + 4.1629e-02, 9.8434e-01, 1.5911e-01, 3.7657e-01, + 4.4523e-01, 5.5572e-01, 7.1956e-01, 3.6162e-01, + 9.4532e-01, 9.7088e-02, 2.2900e-01, 3.2226e-01, + 4.7835e-01, 3.5746e-01, 1.7574e-02, 9.7349e-01, + 6.6122e-01, 9.2451e-01, 9.8805e-01, 7.3399e-01, + 1.4402e-01, 1.2531e-01, 4.7684e-01, 8.2450e-01, + 9.1720e-01, 4.5828e-01, 7.8499e-01, 2.2210e-01, + 7.3784e-01, 9.1802e-01, 7.2350e-01, 8.2621e-01, + 4.6411e-01, 6.7975e-01, 3.1113e-01, 9.6243e-01, + 3.8949e-02, 2.1081e-01, 4.5029e-01, 9.7497e-01, + 6.6687e-01, 7.7118e-01, 8.0086e-01, 1.1061e-01, + 4.5056e-01, 3.5519e-01, 9.1050e-01, 3.7707e-01, + 3.6540e-01, 4.1732e-01, 3.8517e-01, 4.4625e-01, + 6.1686e-01, 9.2023e-01, 8.3373e-01, 1.6176e-02, + 2.1617e-01, 7.6827e-01, 6.7185e-01, 7.7670e-01, + 4.1237e-01, 8.9243e-01, 4.0012e-02, 3.4428e-01, + 9.1232e-01, 7.7951e-02, 4.2551e-01, 3.6695e-01, + 2.5261e-01, 3.6701e-01, 8.3455e-01, 3.3580e-02, + 6.1940e-01, 2.7511e-01, 6.9744e-01, 1.6603e-01, + 2.1142e-02, 6.7177e-01, 3.6917e-01, 6.1120e-02, + 8.1283e-01, 2.2435e-01, 2.7523e-01, 2.2864e-01, + 6.1958e-01, 2.2791e-01, 9.7316e-01, 5.3317e-01, + 8.1484e-01, 8.0708e-01, 1.1810e-01, 9.8123e-01, + 7.1224e-01, 7.5814e-01, 6.5290e-01, 1.7947e-01, + 2.5657e-02, 6.2436e-01, 1.8969e-01, 7.1058e-01, + 5.9308e-01, 9.8643e-01, 8.4550e-01, 2.4804e-01, + 3.8073e-01, 3.1296e-01, 3.9128e-02, 3.3224e-01, + 1.8456e-01, 5.3023e-01, 6.9432e-02, 5.3587e-01, + 1.9614e-01, 7.3887e-01, 5.7570e-01, 5.4720e-01, + 2.5498e-03, 4.4351e-01, 7.7455e-01, 4.9992e-01, + 6.6660e-01, 8.7419e-01, 8.9134e-02, 3.1002e-01, + 3.0193e-01, 5.6203e-01, 3.7269e-01, 7.7426e-01, + 3.5420e-02, 7.8405e-01, 2.0538e-01, 3.9263e-01, + 1.4023e-01, 1.8331e-01, 7.1382e-01, 5.9417e-01, + 6.5223e-01, 5.3873e-01, 2.6739e-01, 5.4240e-01, + 1.7510e-01, 1.4232e-01, 8.7694e-01, 2.3087e-01, + 6.4513e-01, 6.4312e-01, 3.1202e-01, 9.9481e-01, + 6.9916e-02, 6.8821e-01, 3.1687e-01, 7.5978e-01, + 3.5627e-01, 9.4563e-01, 3.5964e-01, 2.0506e-01, + 4.5446e-01, 6.9075e-01, 3.0355e-01, 3.0531e-02, + 1.3375e-01, 9.4252e-01, 4.3528e-01, 3.0526e-01, + 4.6327e-01, 4.6100e-01, 5.3258e-01, 3.5702e-01, + 6.0418e-01, 5.1673e-01, 1.3892e-01, 2.6807e-01, + 5.9205e-01, 4.0025e-01, 3.6834e-01, 4.6333e-02, + 1.7861e-01, 5.5707e-02, 6.3862e-01, 5.6881e-02, + 2.1076e-01, 3.8528e-01, 2.1007e-01, 1.1846e-01, + 4.6632e-01, 6.5782e-01, 2.6916e-01, 4.7947e-01, + 3.2009e-01, 7.1530e-01, 5.2339e-01, 3.0106e-01, + 9.4899e-01, 1.1394e-01, 2.7710e-01, 9.2652e-01, + 4.2162e-01, 3.0528e-01, 6.9020e-01, 7.2013e-01, + 9.4560e-01, 6.5089e-01, 4.8179e-01, 1.9096e-01, + 6.5947e-01, 2.7371e-01, 8.5785e-01, 3.4400e-01, + 7.5307e-01, 2.5075e-01, 3.7516e-01, 3.0734e-01, + 8.7404e-01, 5.9911e-01, 8.3048e-01, 8.0707e-01, + 5.4730e-01, 3.6593e-01, 3.3340e-01, 7.3418e-01, + 5.0639e-01, 3.5549e-01, 1.6334e-01, 1.3804e-01, + 1.5397e-01, 6.6612e-02, 5.0566e-01, 7.2011e-01, + 6.8673e-01, 9.2644e-01, 3.3890e-01, 1.0703e-01, + 9.1836e-01, 4.1466e-01, 7.6488e-01, 1.9997e-01, + 6.2792e-02, 8.6920e-02, 9.5924e-01, 4.0157e-01, + 8.0304e-01, 3.8374e-01, 2.7015e-04, 5.1118e-01, + 1.3370e-01, 6.6907e-01, 4.7691e-01, 7.7245e-01, + 5.8945e-01, 3.4755e-01, 5.4556e-01, 9.6137e-01, + 7.3983e-01, 9.6435e-01, 9.4132e-01, 4.9153e-02, + 7.6688e-01, 2.2584e-01, 4.4546e-02, 6.3856e-01, + 7.3561e-01, 4.3606e-01, 4.3727e-01, 7.6287e-01, + 8.5471e-01, 7.1309e-01, 4.0321e-02, 1.7754e-01, + 2.6178e-01, 8.0163e-01, 9.6040e-01, 1.2002e-01, + 7.4781e-01, 5.6186e-01, 7.0276e-01, 9.6832e-01, + 6.8920e-01, 3.6408e-01, 3.2048e-02, 6.5097e-01, + 7.2571e-01, 2.4505e-01, 3.3514e-01, 1.1693e-01, + 1.6097e-01, 4.4042e-02, 8.9815e-01, 5.4774e-01, + 7.4648e-01, 2.7651e-01, 6.3963e-01, 2.2376e-01, + 9.9665e-01, 4.3024e-01, 9.1693e-01, 9.0810e-02, + 4.0292e-01, 5.0369e-01, 5.7128e-01, 8.9241e-01, + 4.8618e-01, 7.0729e-01, 5.3213e-01, 6.6612e-01, + 4.4699e-02, 8.1988e-01, 5.0857e-03, 1.2224e-01, + 8.0655e-01, 4.3952e-02, 7.5237e-01, 2.0445e-02, + 9.4240e-01, 3.4627e-02, 6.8566e-01, 1.1342e-01, + 8.8814e-01, 8.9913e-02, 1.3972e-01, 2.2246e-01, + 5.2701e-01, 8.4407e-01, 2.7994e-01, 2.4114e-01, + 2.5241e-01, 1.1663e-01, 2.3689e-01, 9.0715e-01, + 6.4255e-02, 1.3441e-01, 4.9780e-01, 3.6677e-01, + 8.9300e-01, 4.8662e-01, 1.8815e-01, 7.1599e-01, + 5.4652e-01, 3.8971e-01, 4.1855e-01, 8.9766e-01, + 2.8787e-02, 1.4484e-01, 7.7527e-01, 1.4256e-01, + 4.3971e-02, 6.5700e-01, 9.6517e-01, 3.8399e-02, + 3.5927e-01, 9.8146e-01, 4.7971e-01, 6.2689e-01, + 7.9814e-01, 3.1719e-01, 8.8887e-01, 2.8431e-01, + 3.9627e-01, 6.4465e-01, 3.1542e-01, 8.2727e-01, + 4.0962e-01, 9.5078e-01, 5.7378e-01, 6.4573e-01, + 6.1080e-01, 7.6026e-01, 4.0688e-01, 6.8706e-01, + 9.7360e-01, 1.9641e-01, 3.1404e-01, 8.7030e-01, + 8.7519e-01, 9.1294e-01, 3.1332e-01, 8.2378e-01, + 7.6002e-01, 6.6759e-01, 7.5209e-01, 2.5197e-01, + 5.8185e-01, 8.3603e-01, 9.8402e-01, 7.7933e-01, + 9.6031e-01, 4.1020e-02, 2.1551e-01, 4.3735e-01, + 3.5324e-01, 4.5137e-01, 9.4070e-01, 2.0743e-01, + 4.8632e-01, 2.7466e-01, 9.7228e-01, 2.2309e-01, + 3.0599e-01, 6.2066e-01, 2.0469e-01, 4.2748e-01, + 1.2112e-01, 8.0833e-01, 4.4980e-01, 3.7398e-01, + 6.6951e-01, 9.3775e-01, 9.2812e-01, 9.7717e-01, + 2.0515e-01, 4.0684e-02, 2.4987e-01, 6.5034e-01, + 2.1947e-01, 2.8376e-01, 9.7474e-01, 5.3040e-02, + 2.3000e-01, 7.9019e-01, 9.9718e-01, 8.0065e-01, + 4.8460e-01, 6.0165e-01, 2.5735e-01, 3.0706e-01, + 8.5565e-01, 6.4195e-01, 1.9554e-01, 6.8642e-01, + 5.4102e-01, 6.2865e-01, 2.5232e-01, 2.7384e-01, + 1.5202e-01, 2.6178e-02, 2.2088e-02, 3.0220e-01, + 7.0468e-01, 6.6998e-01, 7.4849e-01, 4.2361e-01, + 3.0687e-01, 8.9260e-01, 1.7687e-01, 4.3603e-01, + 2.6202e-01, 1.5569e-01, 7.9666e-01, 3.1539e-01, + 3.2757e-01, 4.1810e-01, 5.8467e-01, 4.3041e-01, + 5.2110e-01, 9.5297e-01, 2.8793e-01, 4.2003e-03, + 7.5721e-01, 6.8193e-01, 3.9603e-02, 2.7535e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.6919, 0.2407, 0.4104, ..., 0.1381, 0.4123, 0.9576]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.479444980621338 seconds + +[16.04, 16.2, 16.12, 16.16, 16.16, 16.04, 16.2, 16.12, 16.16, 16.2] +[16.32, 16.44, 16.64, 18.8, 20.52, 21.28, 21.96, 21.96, 20.84, 20.84, 20.0, 19.88, 19.92, 19.96] +14.562681198120117 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 326348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.479444980621338, 'TIME_S_1KI': 0.03211125847445469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 268.45111671447756, 'W': 18.4341820755598} +[16.04, 16.2, 16.12, 16.16, 16.16, 16.04, 16.2, 16.12, 16.16, 16.2, 16.12, 16.16, 16.28, 16.52, 16.68, 16.72, 16.64, 16.88, 16.8, 16.68] +294.36 +14.718 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 326348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.479444980621338, 'TIME_S_1KI': 0.03211125847445469, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 268.45111671447756, 'W': 18.4341820755598, 'J_1KI': 0.8225915792787992, 'W_1KI': 0.0564862725543279, 'W_D': 3.7161820755598, 'J_D': 54.11757484054569, 'W_D_1KI': 0.011387175884515304, 'J_D_1KI': 3.4892739911123416e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..2f81ef8 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 88829, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.388752937316895, "TIME_S_1KI": 0.11695226713479714, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 247.13048832893372, "W": 18.320263868681366, "J_1KI": 2.7820924284741886, "W_1KI": 0.20624192401897315, "W_D": 3.265263868681364, "J_D": 44.04665021061895, "W_D_1KI": 0.03675898488873413, "J_D_1KI": 0.0004138173894644106} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..c9fcfcb --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0076372623443603516} + +tensor(indices=tensor([[6127, 8808, 7987, ..., 9863, 7301, 2093], + [7296, 2755, 3923, ..., 2397, 6103, 8152]]), + values=tensor([0.5892, 0.5981, 0.8168, ..., 0.7578, 0.3505, 0.3535]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.6753, 0.1273, 0.1843, ..., 0.6313, 0.9814, 0.0462]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.0076372623443603516 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 13748 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6250598430633545} + +tensor(indices=tensor([[8545, 9615, 6776, ..., 4541, 7389, 1783], + [2725, 2801, 7177, ..., 4412, 1297, 2977]]), + values=tensor([0.4451, 0.0263, 0.6494, ..., 0.7710, 0.9307, 0.1457]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.6714, 0.0844, 0.0355, ..., 0.7557, 0.6199, 0.2211]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 1.6250598430633545 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 88829 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.388752937316895} + +tensor(indices=tensor([[7123, 9581, 502, ..., 8292, 1811, 1740], + [2309, 180, 1199, ..., 9453, 142, 9522]]), + values=tensor([0.2607, 0.4465, 0.9826, ..., 0.6284, 0.3000, 0.9777]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8554, 0.1490, 0.6747, ..., 0.4174, 0.0830, 0.5143]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.388752937316895 seconds + +tensor(indices=tensor([[7123, 9581, 502, ..., 8292, 1811, 1740], + [2309, 180, 1199, ..., 9453, 142, 9522]]), + values=tensor([0.2607, 0.4465, 0.9826, ..., 0.6284, 0.3000, 0.9777]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8554, 0.1490, 0.6747, ..., 0.4174, 0.0830, 0.5143]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.388752937316895 seconds + +[16.72, 16.84, 16.88, 17.0, 17.04, 16.96, 16.88, 16.84, 16.84, 16.6] +[16.6, 16.48, 16.6, 18.72, 19.96, 21.64, 22.24, 22.2, 21.28, 19.92, 20.08, 20.04, 19.88] +13.489461183547974 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 88829, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.388752937316895, 'TIME_S_1KI': 0.11695226713479714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 247.13048832893372, 'W': 18.320263868681366} +[16.72, 16.84, 16.88, 17.0, 17.04, 16.96, 16.88, 16.84, 16.84, 16.6, 16.2, 16.0, 16.12, 16.12, 16.24, 16.64, 17.12, 17.2, 17.12, 17.0] +301.1 +15.055000000000001 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 88829, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.388752937316895, 'TIME_S_1KI': 0.11695226713479714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 247.13048832893372, 'W': 18.320263868681366, 'J_1KI': 2.7820924284741886, 'W_1KI': 0.20624192401897315, 'W_D': 3.265263868681364, 'J_D': 44.04665021061895, 'W_D_1KI': 0.03675898488873413, 'J_D_1KI': 0.0004138173894644106} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..8146996 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.447106838226318, "TIME_S_1KI": 1244.7106838226318, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 545.6883810901641, "W": 21.69642899467973, "J_1KI": 54568.838109016404, "W_1KI": 2169.642899467973, "W_D": 6.708428994679732, "J_D": 168.72416002941117, "W_D_1KI": 670.8428994679731, "J_D_1KI": 67084.28994679732} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..83c964d --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.447106838226318} + +tensor(indices=tensor([[430864, 114536, 162035, ..., 20553, 101395, 344727], + [252328, 491190, 71567, ..., 148343, 25625, 273876]]), + values=tensor([0.9753, 0.6637, 0.8754, ..., 0.0468, 0.0692, 0.0504]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.8450, 0.9324, 0.0414, ..., 0.3360, 0.7077, 0.0366]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.447106838226318 seconds + +tensor(indices=tensor([[430864, 114536, 162035, ..., 20553, 101395, 344727], + [252328, 491190, 71567, ..., 148343, 25625, 273876]]), + values=tensor([0.9753, 0.6637, 0.8754, ..., 0.0468, 0.0692, 0.0504]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.8450, 0.9324, 0.0414, ..., 0.3360, 0.7077, 0.0366]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.447106838226318 seconds + +[16.44, 16.24, 16.24, 16.44, 16.28, 16.44, 16.6, 16.52, 16.52, 16.48] +[16.68, 16.52, 16.84, 18.72, 19.56, 21.64, 23.64, 24.28, 24.56, 24.84, 25.36, 25.56, 25.72, 25.6, 25.56, 25.2, 25.24, 25.24, 25.24, 25.28, 25.64, 25.96, 25.08, 24.68] +25.151068925857544 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.447106838226318, 'TIME_S_1KI': 1244.7106838226318, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 545.6883810901641, 'W': 21.69642899467973} +[16.44, 16.24, 16.24, 16.44, 16.28, 16.44, 16.6, 16.52, 16.52, 16.48, 16.64, 16.8, 16.84, 17.08, 17.08, 16.96, 16.96, 16.84, 16.8, 16.68] +299.76 +14.988 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.447106838226318, 'TIME_S_1KI': 1244.7106838226318, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 545.6883810901641, 'W': 21.69642899467973, 'J_1KI': 54568.838109016404, 'W_1KI': 2169.642899467973, 'W_D': 6.708428994679732, 'J_D': 168.72416002941117, 'W_D_1KI': 670.8428994679731, 'J_D_1KI': 67084.28994679732} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..74ba008 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 86, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.337146759033203, "TIME_S_1KI": 120.19938091899074, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 307.06461547851563, "W": 20.99871927741268, "J_1KI": 3570.5187846339027, "W_1KI": 244.1711543885195, "W_D": 5.908719277412679, "J_D": 86.40329864501958, "W_D_1KI": 68.70603810944975, "J_D_1KI": 798.9074198773227} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..3348752 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.2120747566223145} + +tensor(indices=tensor([[413059, 275540, 280559, ..., 345842, 306728, 411449], + [ 59667, 76449, 374923, ..., 99306, 177534, 378784]]), + values=tensor([0.7643, 0.0523, 0.8887, ..., 0.6501, 0.8600, 0.0503]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6145, 0.8452, 0.3319, ..., 0.1013, 0.5028, 0.4698]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 1.2120747566223145 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 86 -ss 500000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.337146759033203} + +tensor(indices=tensor([[286796, 427387, 167769, ..., 427116, 489937, 240863], + [ 4663, 101443, 293775, ..., 251562, 248767, 123135]]), + values=tensor([0.9410, 0.8203, 0.0498, ..., 0.3422, 0.6502, 0.9382]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4402, 0.2612, 0.5607, ..., 0.2898, 0.5426, 0.0715]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.337146759033203 seconds + +tensor(indices=tensor([[286796, 427387, 167769, ..., 427116, 489937, 240863], + [ 4663, 101443, 293775, ..., 251562, 248767, 123135]]), + values=tensor([0.9410, 0.8203, 0.0498, ..., 0.3422, 0.6502, 0.9382]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4402, 0.2612, 0.5607, ..., 0.2898, 0.5426, 0.0715]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.337146759033203 seconds + +[16.96, 16.88, 16.84, 16.56, 16.56, 16.36, 16.44, 16.48, 16.4, 16.52] +[16.52, 16.64, 17.48, 18.6, 20.2, 21.76, 23.6, 24.48, 25.52, 25.44, 25.44, 25.44, 25.68, 25.4] +14.623016357421875 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 86, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.337146759033203, 'TIME_S_1KI': 120.19938091899074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 307.06461547851563, 'W': 20.99871927741268} +[16.96, 16.88, 16.84, 16.56, 16.56, 16.36, 16.44, 16.48, 16.4, 16.52, 16.56, 16.84, 16.76, 16.88, 17.0, 17.0, 17.0, 17.08, 17.2, 17.0] +301.8 +15.09 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 86, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.337146759033203, 'TIME_S_1KI': 120.19938091899074, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 307.06461547851563, 'W': 20.99871927741268, 'J_1KI': 3570.5187846339027, 'W_1KI': 244.1711543885195, 'W_D': 5.908719277412679, 'J_D': 86.40329864501958, 'W_D_1KI': 68.70603810944975, 'J_D_1KI': 798.9074198773227} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..366d210 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.561193466186523, "TIME_S_1KI": 621.2466744815603, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 424.08064146041875, "W": 22.53224837245354, "J_1KI": 24945.920085906982, "W_1KI": 1325.4263748502083, "W_D": 7.344248372453542, "J_D": 138.22648806953435, "W_D_1KI": 432.014610144326, "J_D_1KI": 25412.624126136823} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..b494b3e --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,124 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.200084686279297} + +tensor(indices=tensor([[ 38943, 130389, 282474, ..., 181276, 331935, 106414], + [ 62341, 307141, 341558, ..., 358880, 275485, 117212]]), + values=tensor([0.6945, 0.8406, 0.9177, ..., 0.2544, 0.6321, 0.6753]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.6559, 0.1148, 0.8235, ..., 0.0523, 0.8557, 0.6638]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 6.200084686279297 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.90022897720337} + +tensor(indices=tensor([[ 7056, 281493, 312685, ..., 124538, 355567, 365993], + [ 24191, 147681, 30359, ..., 194020, 997, 67871]]), + values=tensor([0.4372, 0.5882, 0.9665, ..., 0.1024, 0.5821, 0.6790]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.6773, 0.0865, 0.2432, ..., 0.7077, 0.6085, 0.1452]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 9.90022897720337 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.911021709442139} + +tensor(indices=tensor([[452767, 276230, 375881, ..., 357348, 440744, 112902], + [ 44901, 176029, 347663, ..., 109977, 439140, 10880]]), + values=tensor([0.8286, 0.7086, 0.1489, ..., 0.0108, 0.0113, 0.5191]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3597, 0.2504, 0.1711, ..., 0.3031, 0.9212, 0.7136]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 9.911021709442139 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.913240909576416} + +tensor(indices=tensor([[185837, 478235, 192067, ..., 436864, 223581, 238467], + [102078, 256777, 216250, ..., 93751, 177263, 214786]]), + values=tensor([0.4875, 0.8082, 0.8860, ..., 0.5242, 0.4532, 0.8359]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.8159, 0.9913, 0.9862, ..., 0.0668, 0.3126, 0.3720]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 9.913240909576416 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.865718841552734} + +tensor(indices=tensor([[297414, 210281, 479021, ..., 410872, 301887, 837], + [263931, 210728, 170331, ..., 133992, 34240, 234909]]), + values=tensor([0.0802, 0.4449, 0.0504, ..., 0.0813, 0.4872, 0.6997]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1808, 0.1281, 0.4310, ..., 0.1208, 0.7365, 0.0946]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 9.865718841552734 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 17 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.561193466186523} + +tensor(indices=tensor([[ 94404, 107750, 145888, ..., 95623, 193626, 62994], + [ 12022, 181335, 9773, ..., 110825, 265777, 262171]]), + values=tensor([0.4318, 0.9373, 0.6419, ..., 0.7476, 0.7496, 0.4614]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1637, 0.1602, 0.4619, ..., 0.1342, 0.9467, 0.4462]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.561193466186523 seconds + +tensor(indices=tensor([[ 94404, 107750, 145888, ..., 95623, 193626, 62994], + [ 12022, 181335, 9773, ..., 110825, 265777, 262171]]), + values=tensor([0.4318, 0.9373, 0.6419, ..., 0.7476, 0.7496, 0.4614]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1637, 0.1602, 0.4619, ..., 0.1342, 0.9467, 0.4462]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.561193466186523 seconds + +[16.52, 16.6, 16.84, 16.8, 17.12, 16.84, 16.84, 16.48, 16.48, 16.4] +[16.4, 16.44, 16.6, 20.6, 22.6, 23.92, 26.28, 25.12, 25.88, 26.16, 26.08, 25.8, 25.6, 25.36, 25.32, 25.6, 25.6, 24.92] +18.82105302810669 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.561193466186523, 'TIME_S_1KI': 621.2466744815603, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 424.08064146041875, 'W': 22.53224837245354} +[16.52, 16.6, 16.84, 16.8, 17.12, 16.84, 16.84, 16.48, 16.48, 16.4, 17.2, 17.2, 17.12, 17.28, 17.08, 16.96, 16.96, 16.96, 16.8, 16.68] +303.76 +15.187999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.561193466186523, 'TIME_S_1KI': 621.2466744815603, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 424.08064146041875, 'W': 22.53224837245354, 'J_1KI': 24945.920085906982, 'W_1KI': 1325.4263748502083, 'W_D': 7.344248372453542, 'J_D': 138.22648806953435, 'W_D_1KI': 432.014610144326, 'J_D_1KI': 25412.624126136823} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..17a88bd --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1804, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.599600553512573, "TIME_S_1KI": 5.875610062922712, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 291.9548739528656, "W": 20.05641515041327, "J_1KI": 161.8375132776417, "W_1KI": 11.117746757435294, "W_D": 5.124415150413267, "J_D": 74.59448600864411, "W_D_1KI": 2.840584894907576, "J_D_1KI": 1.5746036002813613} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..e769a27 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0649104118347168} + +tensor(indices=tensor([[ 7674, 26050, 38285, ..., 25570, 35382, 9115], + [30244, 34831, 44807, ..., 28351, 12267, 20355]]), + values=tensor([0.9990, 0.9847, 0.8367, ..., 0.5627, 0.0353, 0.8285]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.9681, 0.8462, 0.2277, ..., 0.4222, 0.7770, 0.3767]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.0649104118347168 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1617 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.406485319137573} + +tensor(indices=tensor([[22492, 2338, 29813, ..., 7054, 15843, 19628], + [48955, 4502, 10187, ..., 8386, 19620, 25080]]), + values=tensor([0.1497, 0.6214, 0.4653, ..., 0.8321, 0.7345, 0.1929]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7966, 0.4220, 0.7998, ..., 0.4239, 0.8321, 0.0947]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 9.406485319137573 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1804 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.599600553512573} + +tensor(indices=tensor([[27812, 1388, 45818, ..., 45254, 27053, 45601], + [39267, 35335, 18688, ..., 29348, 14095, 30641]]), + values=tensor([0.4818, 0.1700, 0.8546, ..., 0.1770, 0.0458, 0.6666]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7147, 0.3916, 0.0975, ..., 0.4132, 0.5805, 0.9538]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.599600553512573 seconds + +tensor(indices=tensor([[27812, 1388, 45818, ..., 45254, 27053, 45601], + [39267, 35335, 18688, ..., 29348, 14095, 30641]]), + values=tensor([0.4818, 0.1700, 0.8546, ..., 0.1770, 0.0458, 0.6666]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7147, 0.3916, 0.0975, ..., 0.4132, 0.5805, 0.9538]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.599600553512573 seconds + +[16.64, 16.36, 16.36, 16.36, 16.36, 16.16, 16.52, 16.84, 16.8, 16.92] +[16.8, 16.68, 19.8, 21.48, 23.4, 24.08, 24.92, 22.8, 22.8, 22.0, 20.96, 21.12, 21.24, 21.08] +14.556682825088501 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.599600553512573, 'TIME_S_1KI': 5.875610062922712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 291.9548739528656, 'W': 20.05641515041327} +[16.64, 16.36, 16.36, 16.36, 16.36, 16.16, 16.52, 16.84, 16.8, 16.92, 16.6, 16.92, 16.92, 16.84, 16.76, 16.48, 16.48, 16.6, 16.6, 16.4] +298.64000000000004 +14.932000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1804, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.599600553512573, 'TIME_S_1KI': 5.875610062922712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 291.9548739528656, 'W': 20.05641515041327, 'J_1KI': 161.8375132776417, 'W_1KI': 11.117746757435294, 'W_D': 5.124415150413267, 'J_D': 74.59448600864411, 'W_D_1KI': 2.840584894907576, 'J_D_1KI': 1.5746036002813613} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..f559a71 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 184, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.258989095687866, "TIME_S_1KI": 55.75537552004275, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 287.2100740814209, "W": 19.641423027147898, "J_1KI": 1560.9243156598961, "W_1KI": 106.7468642779777, "W_D": 4.749423027147898, "J_D": 69.44925210285186, "W_D_1KI": 25.81208166928206, "J_D_1KI": 140.28305255044594} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..0a36e41 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5693700313568115} + +tensor(indices=tensor([[34782, 36736, 38121, ..., 39882, 5476, 4755], + [24220, 22752, 31802, ..., 6182, 16900, 31426]]), + values=tensor([0.2777, 0.0186, 0.6201, ..., 0.0244, 0.4312, 0.0613]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.2795, 0.1118, 0.0812, ..., 0.7647, 0.2197, 0.1337]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.5693700313568115 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 184 -ss 50000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.258989095687866} + +tensor(indices=tensor([[41611, 47546, 2509, ..., 2674, 31150, 4261], + [18668, 47314, 31212, ..., 3668, 13241, 19028]]), + values=tensor([0.2195, 0.9962, 0.1489, ..., 0.8507, 0.3952, 0.2172]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7745, 0.8125, 0.1486, ..., 0.5225, 0.9580, 0.7923]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.258989095687866 seconds + +tensor(indices=tensor([[41611, 47546, 2509, ..., 2674, 31150, 4261], + [18668, 47314, 31212, ..., 3668, 13241, 19028]]), + values=tensor([0.2195, 0.9962, 0.1489, ..., 0.8507, 0.3952, 0.2172]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7745, 0.8125, 0.1486, ..., 0.5225, 0.9580, 0.7923]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.258989095687866 seconds + +[16.68, 17.0, 16.88, 16.68, 16.76, 16.84, 16.6, 16.68, 16.76, 16.56] +[16.2, 16.24, 16.24, 20.28, 21.44, 23.56, 24.84, 25.32, 22.0, 21.32, 21.2, 21.08, 21.52, 21.44] +14.622671365737915 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.258989095687866, 'TIME_S_1KI': 55.75537552004275, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.2100740814209, 'W': 19.641423027147898} +[16.68, 17.0, 16.88, 16.68, 16.76, 16.84, 16.6, 16.68, 16.76, 16.56, 15.84, 15.96, 15.96, 16.12, 16.28, 16.48, 16.6, 16.6, 16.76, 16.68] +297.84 +14.892 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.258989095687866, 'TIME_S_1KI': 55.75537552004275, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 287.2100740814209, 'W': 19.641423027147898, 'J_1KI': 1560.9243156598961, 'W_1KI': 106.7468642779777, 'W_D': 4.749423027147898, 'J_D': 69.44925210285186, 'W_D_1KI': 25.81208166928206, 'J_D_1KI': 140.28305255044594} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..12a9f16 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 18, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.369685411453247, "TIME_S_1KI": 576.0936339696249, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 449.13890366554256, "W": 19.515930971213347, "J_1KI": 24952.161314752364, "W_1KI": 1084.2183872896305, "W_D": 4.650930971213347, "J_D": 107.03635099530214, "W_D_1KI": 258.3850539562971, "J_D_1KI": 14354.725219794282} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..3f1acc5 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.72832727432251} + +tensor(indices=tensor([[35202, 36095, 49918, ..., 41797, 28954, 15408], + [24218, 14545, 2373, ..., 38841, 14111, 39259]]), + values=tensor([0.1919, 0.5466, 0.7826, ..., 0.2086, 0.5287, 0.5441]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6878, 0.6799, 0.3935, ..., 0.7783, 0.9505, 0.7595]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 5.72832727432251 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 18 -ss 50000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.369685411453247} + +tensor(indices=tensor([[26107, 20811, 45076, ..., 38718, 16677, 14618], + [ 2364, 34098, 38083, ..., 28766, 27440, 46036]]), + values=tensor([0.6510, 0.0816, 0.6946, ..., 0.7920, 0.3845, 0.8190]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3845, 0.7282, 0.9205, ..., 0.6198, 0.5850, 0.9405]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.369685411453247 seconds + +tensor(indices=tensor([[26107, 20811, 45076, ..., 38718, 16677, 14618], + [ 2364, 34098, 38083, ..., 28766, 27440, 46036]]), + values=tensor([0.6510, 0.0816, 0.6946, ..., 0.7920, 0.3845, 0.8190]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3845, 0.7282, 0.9205, ..., 0.6198, 0.5850, 0.9405]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.369685411453247 seconds + +[16.84, 16.84, 16.84, 16.76, 16.76, 16.6, 16.6, 16.6, 16.44, 16.64] +[16.48, 16.68, 16.96, 18.12, 20.12, 21.2, 23.6, 23.92, 24.76, 23.96, 23.52, 22.32, 22.32, 21.36, 21.2, 21.08, 21.16, 21.32, 21.52, 21.88, 22.16, 22.84] +23.013962507247925 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 18, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.369685411453247, 'TIME_S_1KI': 576.0936339696249, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 449.13890366554256, 'W': 19.515930971213347} +[16.84, 16.84, 16.84, 16.76, 16.76, 16.6, 16.6, 16.6, 16.44, 16.64, 16.4, 16.32, 16.4, 16.28, 16.28, 16.4, 16.28, 16.4, 16.4, 16.32] +297.3 +14.865 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 18, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.369685411453247, 'TIME_S_1KI': 576.0936339696249, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 449.13890366554256, 'W': 19.515930971213347, 'J_1KI': 24952.161314752364, 'W_1KI': 1084.2183872896305, 'W_D': 4.650930971213347, 'J_D': 107.03635099530214, 'W_D_1KI': 258.3850539562971, 'J_D_1KI': 14354.725219794282} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..4160e7b --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 16590, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.300405502319336, "TIME_S_1KI": 0.6811576553537876, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 261.3678216838837, "W": 19.315252430576436, "J_1KI": 15.754540185888104, "W_1KI": 1.164270791475373, "W_D": 4.295252430576436, "J_D": 58.121982893943795, "W_D_1KI": 0.25890611395879665, "J_D_1KI": 0.015606155151223428} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..8d0cf47 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013538360595703125} + +tensor(indices=tensor([[43693, 35981, 11907, ..., 25448, 38997, 20750], + [37384, 41060, 13822, ..., 47305, 43246, 11711]]), + values=tensor([0.9030, 0.7487, 0.3286, ..., 0.6217, 0.1145, 0.5674]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0873, 0.8444, 0.0285, ..., 0.4774, 0.7606, 0.1091]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.013538360595703125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 7755 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.908142805099487} + +tensor(indices=tensor([[43674, 11867, 14510, ..., 47445, 25075, 11642], + [49508, 3695, 9632, ..., 21425, 37360, 26959]]), + values=tensor([0.7610, 0.0772, 0.4249, ..., 0.5807, 0.0606, 0.3961]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3065, 0.4555, 0.7883, ..., 0.2923, 0.4688, 0.7205]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 4.908142805099487 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16590 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.300405502319336} + +tensor(indices=tensor([[36335, 42115, 41907, ..., 43095, 10060, 25481], + [47734, 21376, 3766, ..., 4567, 22675, 39134]]), + values=tensor([0.5233, 0.9781, 0.5001, ..., 0.9337, 0.8974, 0.0878]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9655, 0.2158, 0.9932, ..., 0.6362, 0.0888, 0.8904]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 11.300405502319336 seconds + +tensor(indices=tensor([[36335, 42115, 41907, ..., 43095, 10060, 25481], + [47734, 21376, 3766, ..., 4567, 22675, 39134]]), + values=tensor([0.5233, 0.9781, 0.5001, ..., 0.9337, 0.8974, 0.0878]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9655, 0.2158, 0.9932, ..., 0.6362, 0.0888, 0.8904]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 11.300405502319336 seconds + +[16.4, 16.4, 16.64, 16.84, 17.0, 17.08, 16.8, 16.92, 16.88, 16.56] +[16.6, 16.36, 19.48, 20.68, 22.12, 22.12, 23.0, 24.08, 21.6, 20.48, 20.76, 20.64, 21.08] +13.531680345535278 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.300405502319336, 'TIME_S_1KI': 0.6811576553537876, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.3678216838837, 'W': 19.315252430576436} +[16.4, 16.4, 16.64, 16.84, 17.0, 17.08, 16.8, 16.92, 16.88, 16.56, 16.6, 16.8, 16.56, 16.52, 16.48, 16.52, 16.56, 16.6, 16.68, 16.68] +300.4 +15.02 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 16590, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.300405502319336, 'TIME_S_1KI': 0.6811576553537876, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 261.3678216838837, 'W': 19.315252430576436, 'J_1KI': 15.754540185888104, 'W_1KI': 1.164270791475373, 'W_D': 4.295252430576436, 'J_D': 58.121982893943795, 'W_D_1KI': 0.25890611395879665, 'J_D_1KI': 0.015606155151223428} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..2566adb --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3648, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.625307321548462, "TIME_S_1KI": 2.912639068406925, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 279.26390142440795, "W": 19.1978158057312, "J_1KI": 76.55260455712937, "W_1KI": 5.262559157272807, "W_D": 4.271815805731201, "J_D": 62.14060808515548, "W_D_1KI": 1.1710021397289476, "J_D_1KI": 0.32099839356604926} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..0a1510e --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.035521507263183594} + +tensor(indices=tensor([[ 867, 63, 2692, ..., 7603, 29002, 14945], + [14536, 32534, 33610, ..., 28676, 23124, 22731]]), + values=tensor([0.3219, 0.9722, 0.0915, ..., 0.8899, 0.4132, 0.4288]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.8998, 0.8322, 0.8839, ..., 0.9070, 0.7807, 0.8612]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.035521507263183594 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2955 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.505133628845215} + +tensor(indices=tensor([[13303, 49058, 44620, ..., 30187, 14420, 6538], + [40530, 2307, 14438, ..., 5925, 26636, 7122]]), + values=tensor([0.2077, 0.8929, 0.3017, ..., 0.5265, 0.9060, 0.4238]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.6878, 0.6668, 0.2502, ..., 0.1161, 0.9904, 0.5822]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 8.505133628845215 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 3648 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.625307321548462} + +tensor(indices=tensor([[35778, 47800, 2799, ..., 40686, 48133, 40348], + [40277, 33890, 3667, ..., 23487, 49290, 28941]]), + values=tensor([0.7672, 0.0843, 0.7089, ..., 0.5456, 0.6635, 0.3192]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.7106, 0.2065, 0.9873, ..., 0.1255, 0.3189, 0.2195]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.625307321548462 seconds + +tensor(indices=tensor([[35778, 47800, 2799, ..., 40686, 48133, 40348], + [40277, 33890, 3667, ..., 23487, 49290, 28941]]), + values=tensor([0.7672, 0.0843, 0.7089, ..., 0.5456, 0.6635, 0.3192]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.7106, 0.2065, 0.9873, ..., 0.1255, 0.3189, 0.2195]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.625307321548462 seconds + +[16.52, 16.6, 16.6, 16.56, 16.52, 16.56, 16.4, 16.64, 16.64, 16.6] +[16.68, 16.76, 17.36, 18.72, 20.68, 21.68, 21.68, 22.56, 22.32, 22.24, 21.4, 21.6, 21.4, 21.52] +14.546649694442749 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3648, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.625307321548462, 'TIME_S_1KI': 2.912639068406925, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 279.26390142440795, 'W': 19.1978158057312} +[16.52, 16.6, 16.6, 16.56, 16.52, 16.56, 16.4, 16.64, 16.64, 16.6, 16.52, 16.32, 16.08, 16.2, 16.52, 16.88, 17.12, 17.04, 16.68, 16.68] +298.52 +14.925999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3648, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.625307321548462, 'TIME_S_1KI': 2.912639068406925, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 279.26390142440795, 'W': 19.1978158057312, 'J_1KI': 76.55260455712937, 'W_1KI': 5.262559157272807, 'W_D': 4.271815805731201, 'J_D': 62.14060808515548, 'W_D_1KI': 1.1710021397289476, 'J_D_1KI': 0.32099839356604926} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..da426ad --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 169951, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.636537790298462, "TIME_S_1KI": 0.06258590882253393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 277.52449954032903, "W": 19.073681929489776, "J_1KI": 1.6329677350549807, "W_1KI": 0.11223047778177109, "W_D": 3.9656819294897776, "J_D": 57.70117677783974, "W_D_1KI": 0.02333426652087824, "J_D_1KI": 0.00013729996599536477} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..8370a4a --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.00698089599609375} + +tensor(indices=tensor([[1664, 3714, 2542, ..., 2278, 2091, 3859], + [4828, 1803, 3833, ..., 4200, 4607, 3969]]), + values=tensor([0.7364, 0.8639, 0.9145, ..., 0.3903, 0.7562, 0.3476]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.2699, 0.1259, 0.2902, ..., 0.5608, 0.2120, 0.6284]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.00698089599609375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15041 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.9292683601379395} + +tensor(indices=tensor([[2285, 3737, 2431, ..., 4030, 1329, 1531], + [ 31, 1090, 4568, ..., 275, 4383, 3233]]), + values=tensor([0.5405, 0.5215, 0.7223, ..., 0.9004, 0.2561, 0.8620]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.3501, 0.5305, 0.4834, ..., 0.1738, 0.7916, 0.5126]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.9292683601379395 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 169951 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.636537790298462} + +tensor(indices=tensor([[3417, 4774, 193, ..., 4571, 4333, 1690], + [2744, 4353, 4054, ..., 2930, 1905, 2277]]), + values=tensor([0.9217, 0.0233, 0.9368, ..., 0.5743, 0.3939, 0.4660]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8232, 0.9236, 0.6089, ..., 0.4781, 0.8744, 0.9643]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.636537790298462 seconds + +tensor(indices=tensor([[3417, 4774, 193, ..., 4571, 4333, 1690], + [2744, 4353, 4054, ..., 2930, 1905, 2277]]), + values=tensor([0.9217, 0.0233, 0.9368, ..., 0.5743, 0.3939, 0.4660]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8232, 0.9236, 0.6089, ..., 0.4781, 0.8744, 0.9643]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.636537790298462 seconds + +[17.0, 16.88, 16.68, 16.64, 16.4, 16.96, 17.0, 17.44, 17.44, 17.4] +[16.92, 16.92, 16.76, 20.04, 21.76, 23.48, 24.04, 24.68, 21.16, 20.08, 19.88, 19.8, 19.84, 19.64] +14.550127267837524 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 169951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.636537790298462, 'TIME_S_1KI': 0.06258590882253393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 277.52449954032903, 'W': 19.073681929489776} +[17.0, 16.88, 16.68, 16.64, 16.4, 16.96, 17.0, 17.44, 17.44, 17.4, 16.32, 16.4, 16.68, 16.84, 16.84, 16.84, 16.68, 16.52, 16.44, 16.24] +302.15999999999997 +15.107999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 169951, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.636537790298462, 'TIME_S_1KI': 0.06258590882253393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 277.52449954032903, 'W': 19.073681929489776, 'J_1KI': 1.6329677350549807, 'W_1KI': 0.11223047778177109, 'W_D': 3.9656819294897776, 'J_D': 57.70117677783974, 'W_D_1KI': 0.02333426652087824, 'J_D_1KI': 0.00013729996599536477} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..5e87b9a --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 18751, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.10068964958191, "TIME_S_1KI": 0.5386747186593733, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 295.44330593109134, "W": 21.817981289884255, "J_1KI": 15.75613598907212, "W_1KI": 1.16356361206785, "W_D": 4.697981289884254, "J_D": 63.6166611862183, "W_D_1KI": 0.2505456396930432, "J_D_1KI": 0.013361721491816074} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..c990150 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.011900663375854492} + +tensor(indices=tensor([[1125, 482, 181, ..., 445, 3490, 1861], + [1784, 823, 3863, ..., 4166, 1485, 4292]]), + values=tensor([0.8787, 0.0803, 0.2330, ..., 0.6673, 0.5288, 0.3871]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6797, 0.7397, 0.4754, ..., 0.8892, 0.4649, 0.1290]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.011900663375854492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 8823 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 4.940444707870483} + +tensor(indices=tensor([[4389, 4960, 1472, ..., 3460, 2561, 841], + [4438, 2013, 4463, ..., 3282, 1205, 1147]]), + values=tensor([0.9966, 0.8554, 0.7839, ..., 0.2685, 0.6284, 0.1990]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0893, 0.4143, 0.7101, ..., 0.5306, 0.6188, 0.7251]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 4.940444707870483 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 18751 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.10068964958191} + +tensor(indices=tensor([[1538, 2442, 1517, ..., 90, 2905, 3131], + [2671, 923, 3770, ..., 3769, 3512, 3895]]), + values=tensor([0.5192, 0.3345, 0.2578, ..., 0.8462, 0.8772, 0.8599]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1441, 0.7948, 0.3850, ..., 0.9910, 0.6698, 0.4107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.10068964958191 seconds + +tensor(indices=tensor([[1538, 2442, 1517, ..., 90, 2905, 3131], + [2671, 923, 3770, ..., 3769, 3512, 3895]]), + values=tensor([0.5192, 0.3345, 0.2578, ..., 0.8462, 0.8772, 0.8599]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1441, 0.7948, 0.3850, ..., 0.9910, 0.6698, 0.4107]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.10068964958191 seconds + +[16.6, 16.88, 16.88, 17.28, 18.12, 18.8, 19.92, 20.92, 21.16, 21.32] +[21.2, 20.68, 24.0, 26.04, 27.04, 26.88, 26.88, 26.84, 22.12, 20.4, 20.6, 21.08, 21.4] +13.541275978088379 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 18751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.10068964958191, 'TIME_S_1KI': 0.5386747186593733, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 295.44330593109134, 'W': 21.817981289884255} +[16.6, 16.88, 16.88, 17.28, 18.12, 18.8, 19.92, 20.92, 21.16, 21.32, 20.0, 19.48, 19.04, 18.88, 18.6, 18.64, 19.28, 19.68, 19.92, 19.92] +342.40000000000003 +17.12 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 18751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.10068964958191, 'TIME_S_1KI': 0.5386747186593733, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 295.44330593109134, 'W': 21.817981289884255, 'J_1KI': 15.75613598907212, 'W_1KI': 1.16356361206785, 'W_D': 4.697981289884254, 'J_D': 63.6166611862183, 'W_D_1KI': 0.2505456396930432, 'J_D_1KI': 0.013361721491816074} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..3913384 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1979, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.557055950164795, "TIME_S_1KI": 5.334540651927638, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 271.0158790588379, "W": 18.594151958930446, "J_1KI": 136.94587117677509, "W_1KI": 9.395731156609624, "W_D": 3.7621519589304473, "J_D": 54.83460189819342, "W_D_1KI": 1.901036866564147, "J_D_1KI": 0.9606047835089171} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..0f764de --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.05864453315734863} + +tensor(indices=tensor([[2332, 4332, 3518, ..., 633, 1623, 4344], + [4574, 1668, 67, ..., 4331, 1266, 4691]]), + values=tensor([0.8554, 0.2272, 0.6353, ..., 0.7683, 0.3740, 0.4992]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6440, 0.1544, 0.7551, ..., 0.1812, 0.3695, 0.6411]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.05864453315734863 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1790 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.494121789932251} + +tensor(indices=tensor([[1456, 3912, 1372, ..., 3224, 2683, 2635], + [4359, 2564, 1782, ..., 3947, 1983, 1482]]), + values=tensor([0.3693, 0.8541, 0.5662, ..., 0.2103, 0.3711, 0.1561]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.9821, 0.5645, 0.8278, ..., 0.2178, 0.8453, 0.2241]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 9.494121789932251 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1979 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.557055950164795} + +tensor(indices=tensor([[3490, 44, 483, ..., 3507, 727, 4557], + [3430, 1731, 4015, ..., 3115, 804, 4411]]), + values=tensor([0.0180, 0.0492, 0.1046, ..., 0.0585, 0.0345, 0.5719]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2852, 0.1727, 0.8475, ..., 0.8432, 0.2585, 0.8281]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.557055950164795 seconds + +tensor(indices=tensor([[3490, 44, 483, ..., 3507, 727, 4557], + [3430, 1731, 4015, ..., 3115, 804, 4411]]), + values=tensor([0.0180, 0.0492, 0.1046, ..., 0.0585, 0.0345, 0.5719]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2852, 0.1727, 0.8475, ..., 0.8432, 0.2585, 0.8281]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.557055950164795 seconds + +[16.56, 16.56, 16.56, 16.56, 16.52, 16.56, 16.52, 16.64, 16.68, 16.68] +[16.4, 16.36, 16.84, 17.92, 19.88, 21.08, 22.0, 21.72, 21.64, 21.64, 20.36, 20.24, 20.64, 20.92] +14.575328826904297 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1979, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.557055950164795, 'TIME_S_1KI': 5.334540651927638, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 271.0158790588379, 'W': 18.594151958930446} +[16.56, 16.56, 16.56, 16.56, 16.52, 16.56, 16.52, 16.64, 16.68, 16.68, 16.56, 16.44, 16.48, 16.6, 16.28, 16.16, 16.16, 16.44, 16.28, 16.6] +296.64 +14.831999999999999 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1979, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.557055950164795, 'TIME_S_1KI': 5.334540651927638, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 271.0158790588379, 'W': 18.594151958930446, 'J_1KI': 136.94587117677509, 'W_1KI': 9.395731156609624, 'W_D': 3.7621519589304473, 'J_D': 54.83460189819342, 'W_D_1KI': 1.901036866564147, 'J_D_1KI': 0.9606047835089171} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..8ba4eab --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.489762783050537, "TIME_S_1KI": 26.55636147607731, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 290.4808534240723, "W": 19.97757222631055, "J_1KI": 735.3945656305627, "W_1KI": 50.57613221850772, "W_D": 5.253572226310549, "J_D": 76.3887686920166, "W_D_1KI": 13.30018285141911, "J_D_1KI": 33.671348990934455} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..7ad567c --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.2918562889099121} + +tensor(indices=tensor([[1539, 2933, 1194, ..., 2168, 3159, 1106], + [ 480, 963, 4999, ..., 19, 246, 4737]]), + values=tensor([0.1599, 0.5410, 0.5419, ..., 0.8985, 0.8155, 0.2532]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.0837, 0.0388, 0.1288, ..., 0.9088, 0.4606, 0.2721]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.2918562889099121 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 359 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.519285917282104} + +tensor(indices=tensor([[1610, 3502, 571, ..., 1952, 4170, 292], + [1945, 2392, 4998, ..., 4956, 4725, 2691]]), + values=tensor([0.1419, 0.8689, 0.5529, ..., 0.8661, 0.6555, 0.2043]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.5639, 0.6023, 0.4237, ..., 0.4539, 0.2019, 0.0442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.519285917282104 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 395 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.489762783050537} + +tensor(indices=tensor([[1094, 1521, 4336, ..., 2656, 3242, 1068], + [3664, 3748, 1054, ..., 4750, 4286, 731]]), + values=tensor([0.8543, 0.2353, 0.7060, ..., 0.0727, 0.4494, 0.8191]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.2087, 0.5145, 0.5691, ..., 0.0211, 0.8852, 0.7818]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.489762783050537 seconds + +tensor(indices=tensor([[1094, 1521, 4336, ..., 2656, 3242, 1068], + [3664, 3748, 1054, ..., 4750, 4286, 731]]), + values=tensor([0.8543, 0.2353, 0.7060, ..., 0.0727, 0.4494, 0.8191]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.2087, 0.5145, 0.5691, ..., 0.0211, 0.8852, 0.7818]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.489762783050537 seconds + +[16.6, 16.6, 16.28, 16.24, 16.2, 16.4, 16.24, 16.36, 16.24, 16.32] +[16.48, 16.8, 20.16, 21.28, 23.64, 23.64, 24.12, 25.04, 22.28, 21.24, 20.64, 20.92, 20.8, 20.84] +14.540348052978516 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.489762783050537, 'TIME_S_1KI': 26.55636147607731, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 290.4808534240723, 'W': 19.97757222631055} +[16.6, 16.6, 16.28, 16.24, 16.2, 16.4, 16.24, 16.36, 16.24, 16.32, 16.44, 16.44, 16.56, 16.56, 16.44, 16.28, 16.4, 16.2, 16.2, 16.32] +294.48 +14.724 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.489762783050537, 'TIME_S_1KI': 26.55636147607731, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 290.4808534240723, 'W': 19.97757222631055, 'J_1KI': 735.3945656305627, 'W_1KI': 50.57613221850772, 'W_D': 5.253572226310549, 'J_D': 76.3887686920166, 'W_D_1KI': 13.30018285141911, 'J_D_1KI': 33.671348990934455} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..5c43705 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.465960264205933, "TIME_S_1KI": 53.12670184876108, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 271.0509253978729, "W": 18.50842661309495, "J_1KI": 1375.8930223242278, "W_1KI": 93.95140412738553, "W_D": 4.029426613094948, "J_D": 59.00986805272098, "W_D_1KI": 20.453942198451514, "J_D_1KI": 103.82711775863713} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..cbc69f8 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.5305633544921875} + +tensor(indices=tensor([[2211, 2064, 3651, ..., 4586, 2712, 2396], + [1419, 3548, 4377, ..., 39, 2918, 721]]), + values=tensor([0.7180, 0.2709, 0.5275, ..., 0.7109, 0.9108, 0.6833]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.3447, 0.8914, 0.4143, ..., 0.6063, 0.4789, 0.4808]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.5305633544921875 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 197 -ss 5000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.465960264205933} + +tensor(indices=tensor([[3946, 3115, 4265, ..., 3804, 3313, 4889], + [4180, 4359, 3393, ..., 1785, 4450, 705]]), + values=tensor([0.6824, 0.9404, 0.6692, ..., 0.7300, 0.6277, 0.9586]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5410, 0.8808, 0.3184, ..., 0.0754, 0.7513, 0.9025]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.465960264205933 seconds + +tensor(indices=tensor([[3946, 3115, 4265, ..., 3804, 3313, 4889], + [4180, 4359, 3393, ..., 1785, 4450, 705]]), + values=tensor([0.6824, 0.9404, 0.6692, ..., 0.7300, 0.6277, 0.9586]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5410, 0.8808, 0.3184, ..., 0.0754, 0.7513, 0.9025]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.465960264205933 seconds + +[16.0, 15.88, 15.92, 15.84, 15.88, 15.96, 15.84, 15.72, 15.92, 16.12] +[16.48, 16.52, 17.8, 17.8, 18.76, 20.2, 21.12, 21.64, 21.36, 20.68, 20.8, 20.92, 21.08, 21.08] +14.644730806350708 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.465960264205933, 'TIME_S_1KI': 53.12670184876108, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 271.0509253978729, 'W': 18.50842661309495} +[16.0, 15.88, 15.92, 15.84, 15.88, 15.96, 15.84, 15.72, 15.92, 16.12, 16.32, 16.56, 16.48, 16.4, 16.16, 16.0, 16.2, 16.2, 16.2, 16.4] +289.58000000000004 +14.479000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.465960264205933, 'TIME_S_1KI': 53.12670184876108, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 271.0509253978729, 'W': 18.50842661309495, 'J_1KI': 1375.8930223242278, 'W_1KI': 93.95140412738553, 'W_D': 4.029426613094948, 'J_D': 59.00986805272098, 'W_D_1KI': 20.453942198451514, 'J_D_1KI': 103.82711775863713} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..5dfbfa3 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 98, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.651377439498901, "TIME_S_1KI": 108.68752489284593, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 310.767177734375, "W": 19.797093759237278, "J_1KI": 3171.0936503507655, "W_1KI": 202.01116080854365, "W_D": 4.686093759237277, "J_D": 73.56050084257129, "W_D_1KI": 47.817283257523236, "J_D_1KI": 487.9314618114616} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..0f48fc2 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.0660912990570068} + +tensor(indices=tensor([[4675, 4200, 4844, ..., 3664, 29, 4927], + [ 603, 3385, 848, ..., 3466, 4882, 1542]]), + values=tensor([0.7004, 0.6041, 0.4004, ..., 0.9470, 0.0923, 0.5198]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9496, 0.4576, 0.9894, ..., 0.3671, 0.9771, 0.1259]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 1.0660912990570068 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 98 -ss 5000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.651377439498901} + +tensor(indices=tensor([[3606, 4389, 1461, ..., 4113, 101, 4260], + [2428, 1094, 2653, ..., 447, 925, 876]]), + values=tensor([0.8423, 0.2379, 0.8924, ..., 0.6901, 0.4660, 0.2887]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3289, 0.0960, 0.7467, ..., 0.9465, 0.0554, 0.8653]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.651377439498901 seconds + +tensor(indices=tensor([[3606, 4389, 1461, ..., 4113, 101, 4260], + [2428, 1094, 2653, ..., 447, 925, 876]]), + values=tensor([0.8423, 0.2379, 0.8924, ..., 0.6901, 0.4660, 0.2887]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3289, 0.0960, 0.7467, ..., 0.9465, 0.0554, 0.8653]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.651377439498901 seconds + +[16.48, 16.48, 16.64, 16.56, 16.76, 16.92, 16.88, 17.2, 17.44, 17.2] +[16.96, 16.96, 16.76, 19.84, 21.52, 23.88, 24.84, 25.68, 22.32, 21.56, 21.36, 21.2, 21.08, 20.76, 20.48] +15.69761610031128 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 98, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.651377439498901, 'TIME_S_1KI': 108.68752489284593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.767177734375, 'W': 19.797093759237278} +[16.48, 16.48, 16.64, 16.56, 16.76, 16.92, 16.88, 17.2, 17.44, 17.2, 16.12, 16.32, 16.28, 16.64, 16.76, 16.76, 16.96, 17.04, 17.16, 17.04] +302.22 +15.111 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 98, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.651377439498901, 'TIME_S_1KI': 108.68752489284593, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.767177734375, 'W': 19.797093759237278, 'J_1KI': 3171.0936503507655, 'W_1KI': 202.01116080854365, 'W_D': 4.686093759237277, 'J_D': 73.56050084257129, 'W_D_1KI': 47.817283257523236, 'J_D_1KI': 487.9314618114616} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..4ea350f --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 66, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.432002067565918, "TIME_S_1KI": 158.0606373873624, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 321.88242120742797, "W": 19.228251559320075, "J_1KI": 4877.006381930727, "W_1KI": 291.33714483818295, "W_D": 4.4532515593200745, "J_D": 74.54777621030804, "W_D_1KI": 67.47350847454658, "J_D_1KI": 1022.3258859779785} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..acc55ea --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.579437255859375} + +tensor(indices=tensor([[2597, 3041, 2728, ..., 3185, 648, 646], + [3216, 2189, 786, ..., 4635, 977, 3770]]), + values=tensor([0.0128, 0.2925, 0.8782, ..., 0.1093, 0.0456, 0.4190]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.0461, 0.1320, 0.4687, ..., 0.7223, 0.9563, 0.7445]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 1.579437255859375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 66 -ss 5000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.432002067565918} + +tensor(indices=tensor([[4061, 1433, 3539, ..., 20, 1590, 3318], + [1677, 2479, 1202, ..., 3468, 648, 2104]]), + values=tensor([0.8775, 0.4833, 0.3159, ..., 0.7620, 0.8717, 0.8538]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.0263, 0.1496, 0.5748, ..., 0.3621, 0.7692, 0.5063]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.432002067565918 seconds + +tensor(indices=tensor([[4061, 1433, 3539, ..., 20, 1590, 3318], + [1677, 2479, 1202, ..., 3468, 648, 2104]]), + values=tensor([0.8775, 0.4833, 0.3159, ..., 0.7620, 0.8717, 0.8538]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.0263, 0.1496, 0.5748, ..., 0.3621, 0.7692, 0.5063]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.432002067565918 seconds + +[16.24, 16.36, 16.48, 16.6, 16.6, 16.4, 16.64, 16.44, 16.24, 16.4] +[16.68, 16.4, 17.2, 18.36, 20.56, 22.08, 23.12, 22.76, 22.24, 22.24, 20.6, 20.4, 20.36, 20.56, 20.72, 20.92] +16.74007749557495 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 66, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.432002067565918, 'TIME_S_1KI': 158.0606373873624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.88242120742797, 'W': 19.228251559320075} +[16.24, 16.36, 16.48, 16.6, 16.6, 16.4, 16.64, 16.44, 16.24, 16.4, 16.08, 16.24, 16.36, 16.28, 16.32, 16.6, 16.52, 16.4, 16.56, 16.2] +295.5 +14.775 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 66, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.432002067565918, 'TIME_S_1KI': 158.0606373873624, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.88242120742797, 'W': 19.228251559320075, 'J_1KI': 4877.006381930727, 'W_1KI': 291.33714483818295, 'W_D': 4.4532515593200745, 'J_D': 74.54777621030804, 'W_D_1KI': 67.47350847454658, 'J_D_1KI': 1022.3258859779785} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..2247a4d --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 49, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.454669713973999, "TIME_S_1KI": 213.36060640763262, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 328.44201911926274, "W": 19.654567870656535, "J_1KI": 6702.898349372709, "W_1KI": 401.1136300133987, "W_D": 4.827567870656537, "J_D": 80.67214447593697, "W_D_1KI": 98.52179327870485, "J_D_1KI": 2010.648842422548} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..e86bcbe --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.131768226623535} + +tensor(indices=tensor([[4461, 4744, 3442, ..., 1406, 2965, 3630], + [ 320, 1844, 4039, ..., 199, 1958, 682]]), + values=tensor([0.5146, 0.7294, 0.9715, ..., 0.4598, 0.4111, 0.8946]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4868, 0.1533, 0.6166, ..., 0.1064, 0.1384, 0.4995]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 2.131768226623535 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 49 -ss 5000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.454669713973999} + +tensor(indices=tensor([[ 255, 4308, 4271, ..., 4895, 2785, 3300], + [4275, 2121, 2885, ..., 613, 3388, 1000]]), + values=tensor([0.3813, 0.4455, 0.7562, ..., 0.4968, 0.8450, 0.0168]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0492, 0.7146, 0.8002, ..., 0.8485, 0.2488, 0.9710]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.454669713973999 seconds + +tensor(indices=tensor([[ 255, 4308, 4271, ..., 4895, 2785, 3300], + [4275, 2121, 2885, ..., 613, 3388, 1000]]), + values=tensor([0.3813, 0.4455, 0.7562, ..., 0.4968, 0.8450, 0.0168]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0492, 0.7146, 0.8002, ..., 0.8485, 0.2488, 0.9710]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.454669713973999 seconds + +[16.56, 16.52, 16.4, 16.4, 16.32, 16.36, 16.36, 16.44, 16.56, 16.76] +[16.88, 16.8, 17.96, 19.08, 20.68, 22.6, 23.64, 23.64, 23.0, 22.88, 20.96, 20.76, 20.8, 20.76, 20.92, 21.08] +16.710721969604492 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 49, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.454669713973999, 'TIME_S_1KI': 213.36060640763262, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.44201911926274, 'W': 19.654567870656535} +[16.56, 16.52, 16.4, 16.4, 16.32, 16.36, 16.36, 16.44, 16.56, 16.76, 16.56, 16.6, 16.64, 16.68, 16.56, 16.44, 16.36, 16.36, 16.4, 16.4] +296.53999999999996 +14.826999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 49, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.454669713973999, 'TIME_S_1KI': 213.36060640763262, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.44201911926274, 'W': 19.654567870656535, 'J_1KI': 6702.898349372709, 'W_1KI': 401.1136300133987, 'W_D': 4.827567870656537, 'J_D': 80.67214447593697, 'W_D_1KI': 98.52179327870485, 'J_D_1KI': 2010.648842422548} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..73a1354 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 39, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.323477506637573, "TIME_S_1KI": 264.7045514522455, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 353.88449090957636, "W": 19.947987313164067, "J_1KI": 9073.961305373754, "W_1KI": 511.486854183694, "W_D": 5.0699873131640665, "J_D": 89.94340386676784, "W_D_1KI": 129.99967469651452, "J_D_1KI": 3333.324992218321} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..872645b --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.5 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 2.67789626121521} + +tensor(indices=tensor([[1170, 1455, 3029, ..., 4918, 3614, 4085], + [2865, 2412, 272, ..., 2800, 4482, 2090]]), + values=tensor([0.0035, 0.7337, 0.9019, ..., 0.6107, 0.8466, 0.6679]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.2336, 0.9524, 0.0619, ..., 0.3164, 0.0642, 0.1253]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 2.67789626121521 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 39 -ss 5000 -sd 0.5 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.323477506637573} + +tensor(indices=tensor([[2565, 3146, 2200, ..., 2876, 2955, 426], + [1225, 419, 2919, ..., 2160, 2679, 830]]), + values=tensor([0.3710, 0.0621, 0.9323, ..., 0.0796, 0.5384, 0.7362]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1985, 0.6566, 0.2918, ..., 0.5055, 0.9225, 0.8851]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.323477506637573 seconds + +tensor(indices=tensor([[2565, 3146, 2200, ..., 2876, 2955, 426], + [1225, 419, 2919, ..., 2160, 2679, 830]]), + values=tensor([0.3710, 0.0621, 0.9323, ..., 0.0796, 0.5384, 0.7362]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1985, 0.6566, 0.2918, ..., 0.5055, 0.9225, 0.8851]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.323477506637573 seconds + +[16.4, 16.32, 16.32, 16.52, 16.56, 16.92, 17.0, 16.88, 16.8, 16.84] +[16.48, 16.36, 19.6, 20.48, 20.48, 22.52, 24.72, 25.32, 22.76, 21.88, 20.64, 20.8, 20.76, 20.8, 21.12, 20.96, 21.32] +17.740360736846924 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.323477506637573, 'TIME_S_1KI': 264.7045514522455, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.88449090957636, 'W': 19.947987313164067} +[16.4, 16.32, 16.32, 16.52, 16.56, 16.92, 17.0, 16.88, 16.8, 16.84, 16.44, 16.32, 16.28, 16.32, 16.16, 16.36, 16.36, 16.52, 16.68, 16.8] +297.56 +14.878 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.323477506637573, 'TIME_S_1KI': 264.7045514522455, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.88449090957636, 'W': 19.947987313164067, 'J_1KI': 9073.961305373754, 'W_1KI': 511.486854183694, 'W_D': 5.0699873131640665, 'J_D': 89.94340386676784, 'W_D_1KI': 129.99967469651452, 'J_D_1KI': 3333.324992218321} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..0a64b2c --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 644136, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078837871551514, "TIME_S_1KI": 0.015647065016629276, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 245.636281709671, "W": 18.157029640045646, "J_1KI": 0.3813422657787657, "W_1KI": 0.028188192617778926, "W_D": 3.3670296400456454, "J_D": 45.550657656192776, "W_D_1KI": 0.005227203013099167, "J_D_1KI": 8.115061125444266e-06} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..1564166 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,456 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.008202552795410156} + +tensor(indices=tensor([[2252, 336, 1103, 4850, 3828, 1002, 2403, 1322, 4435, + 4149, 396, 2567, 2376, 4189, 709, 771, 3435, 3209, + 3347, 4564, 4888, 2182, 1796, 2315, 98, 3411, 3018, + 3752, 1628, 4655, 118, 3898, 3276, 1799, 4698, 1199, + 3935, 3931, 2972, 3319, 4757, 1214, 4704, 491, 3496, + 1235, 3453, 4579, 939, 1399, 581, 2517, 678, 2692, + 4431, 4063, 2756, 4579, 2338, 3774, 340, 2430, 1456, + 983, 2276, 1584, 942, 1706, 4030, 1521, 4760, 3623, + 3796, 4260, 4536, 3049, 3478, 634, 1517, 2441, 2910, + 287, 1807, 4530, 2267, 2849, 4673, 3892, 4260, 1320, + 3707, 3953, 302, 1400, 3831, 2113, 3001, 4963, 334, + 4858, 2028, 1771, 4165, 4914, 3329, 2530, 3351, 2922, + 966, 301, 2358, 1817, 4890, 3824, 3878, 154, 3747, + 1306, 794, 4311, 1596, 2485, 3444, 1689, 4394, 4036, + 175, 2202, 3864, 4590, 653, 660, 4297, 4230, 2604, + 3507, 4393, 4455, 2439, 2574, 1893, 2633, 778, 439, + 4110, 715, 2123, 4305, 2601, 1287, 4295, 4894, 2325, + 1864, 2008, 3746, 334, 3060, 2470, 2467, 3321, 4604, + 3685, 409, 4030, 4946, 579, 4863, 1225, 491, 2668, + 2531, 4181, 2730, 3788, 468, 1305, 4124, 4657, 4154, + 1199, 1965, 212, 2765, 817, 1329, 4871, 1433, 270, + 591, 3712, 4017, 1468, 1278, 2581, 3508, 649, 3339, + 249, 3906, 3005, 3578, 3347, 16, 4255, 838, 3217, + 607, 1853, 3945, 4782, 658, 3052, 2998, 2815, 4747, + 3939, 3676, 720, 2201, 2887, 979, 2269, 4739, 3707, + 572, 1839, 722, 3928, 1080, 2006, 852, 2115, 4575, + 2354, 3923, 1742, 1683, 2379, 2392, 2900, 841, 4463, + 2830, 4579, 2294, 1502, 1177, 3836, 2384], + [4699, 811, 2349, 468, 1192, 4102, 68, 3581, 3805, + 2677, 4352, 4166, 1460, 2382, 2336, 4991, 631, 1380, + 4738, 2283, 4423, 350, 1647, 3829, 4509, 496, 3558, + 746, 4852, 3779, 1210, 4378, 503, 2316, 3005, 3846, + 4334, 293, 4547, 822, 1453, 4488, 3800, 3006, 3226, + 3047, 334, 1521, 1891, 3438, 1297, 605, 3223, 3463, + 2130, 4064, 2184, 4392, 1014, 3134, 3772, 1770, 711, + 4886, 3096, 2730, 2266, 1781, 4794, 1589, 362, 1729, + 4999, 3651, 2970, 4457, 1937, 51, 2500, 2922, 1303, + 2890, 1344, 57, 4059, 2152, 816, 1506, 27, 2585, + 301, 588, 3886, 1220, 2662, 1655, 2205, 2866, 4179, + 4023, 590, 4029, 3104, 3646, 4285, 3600, 3755, 1359, + 3911, 1178, 2813, 1476, 946, 4862, 527, 4152, 4060, + 2912, 3932, 4862, 3378, 2508, 2259, 3044, 3294, 645, + 2529, 3464, 4308, 921, 1192, 4069, 2584, 1536, 4918, + 2122, 3091, 4388, 4668, 3435, 4967, 4349, 643, 2985, + 4335, 2992, 4578, 4243, 1547, 772, 1276, 659, 4556, + 341, 4289, 4602, 1464, 998, 4843, 1477, 4540, 4829, + 2587, 2023, 2263, 3936, 3414, 114, 2973, 1777, 1704, + 570, 1663, 1632, 1352, 1219, 1350, 3427, 432, 1887, + 3116, 253, 4025, 3716, 4503, 2060, 573, 4534, 2750, + 1724, 601, 4231, 4597, 195, 1843, 3916, 2687, 2605, + 1201, 1787, 468, 2341, 422, 4851, 3634, 364, 3901, + 459, 3256, 2302, 293, 1903, 4333, 3637, 419, 820, + 351, 3079, 3799, 807, 3447, 3605, 3326, 4572, 4997, + 2238, 3922, 3102, 3458, 2914, 3772, 305, 1261, 3915, + 1030, 2992, 194, 3355, 1937, 2935, 1468, 2978, 3300, + 1249, 625, 4228, 2468, 3146, 3318, 5]]), + values=tensor([0.4170, 0.9021, 0.2191, 0.5772, 0.1141, 0.3571, 0.0121, + 0.3738, 0.4116, 0.1368, 0.9024, 0.1042, 0.7971, 0.7801, + 0.1965, 0.6147, 0.5687, 0.4184, 0.6730, 0.3293, 0.5537, + 0.3220, 0.3375, 0.6184, 0.6151, 0.1935, 0.0492, 0.9893, + 0.3859, 0.7482, 0.7323, 0.4346, 0.9912, 0.3598, 0.1852, + 0.8602, 0.6891, 0.7945, 0.0049, 0.2874, 0.3938, 0.6914, + 0.3310, 0.5947, 0.7592, 0.6265, 0.6824, 0.8321, 0.1578, + 0.8683, 0.9464, 0.7295, 0.9574, 0.1765, 0.8699, 0.1604, + 0.1216, 0.7280, 0.1568, 0.3296, 0.7853, 0.9823, 0.0637, + 0.7057, 0.5138, 0.6136, 0.9659, 0.9379, 0.9296, 0.7746, + 0.6319, 0.5698, 0.2266, 0.0158, 0.7269, 0.9239, 0.7193, + 0.4976, 0.6466, 0.8383, 0.6494, 0.2425, 0.6287, 0.5034, + 0.1299, 0.4978, 0.8520, 0.9476, 0.0767, 0.2257, 0.6498, + 0.8485, 0.9091, 0.4959, 0.8071, 0.2530, 0.2854, 0.2183, + 0.8313, 0.0880, 0.1420, 0.5508, 0.6204, 0.1809, 0.2700, + 0.7948, 0.3691, 0.2325, 0.5382, 0.2522, 0.7708, 0.7964, + 0.9410, 0.9338, 0.4293, 0.0719, 0.3196, 0.2366, 0.7575, + 0.0032, 0.7115, 0.9309, 0.9912, 0.5761, 0.3714, 0.0419, + 0.4084, 0.9674, 0.9584, 0.8929, 0.0475, 0.0184, 0.5159, + 0.6943, 0.9252, 0.9035, 0.7202, 0.9437, 0.8139, 0.6379, + 0.6771, 0.6323, 0.4785, 0.6530, 0.6504, 0.4676, 0.6026, + 0.8531, 0.5223, 0.3944, 0.3831, 0.7278, 0.6842, 0.2239, + 0.4150, 0.0203, 0.2270, 0.8490, 0.9340, 0.7080, 0.9854, + 0.9919, 0.8148, 0.7539, 0.0517, 0.9221, 0.9546, 0.7827, + 0.6344, 0.7276, 0.8300, 0.5795, 0.2995, 0.4896, 0.6562, + 0.8691, 0.5553, 0.8160, 0.9308, 0.0172, 0.3391, 0.3728, + 0.4833, 0.2164, 0.1140, 0.9846, 0.2576, 0.7105, 0.4348, + 0.8585, 0.2455, 0.5289, 0.1525, 0.9729, 0.8262, 0.0221, + 0.7328, 0.5228, 0.5864, 0.7999, 0.7557, 0.3403, 0.5457, + 0.8549, 0.8909, 0.0589, 0.9294, 0.4939, 0.9034, 0.1819, + 0.1290, 0.9869, 0.2658, 0.3393, 0.8300, 0.6106, 0.6524, + 0.6463, 0.6663, 0.8168, 0.3688, 0.3897, 0.9020, 0.2802, + 0.7659, 0.5521, 0.1228, 0.2192, 0.5687, 0.0463, 0.8382, + 0.2209, 0.7519, 0.3344, 0.7919, 0.9752, 0.0833, 0.7742, + 0.5952, 0.5635, 0.3039, 0.6431, 0.5815, 0.3549, 0.0395, + 0.8224, 0.1832, 0.2542, 0.9100, 0.5628]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.5568, 0.1806, 0.2628, ..., 0.3711, 0.6602, 0.3297]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.008202552795410156 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12800 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.20865130424499512} + +tensor(indices=tensor([[ 836, 1579, 4154, 4319, 1499, 3613, 3400, 297, 1768, + 4872, 2742, 3909, 690, 2593, 4574, 881, 1234, 3153, + 4088, 3566, 3872, 1067, 1834, 4756, 3957, 994, 4672, + 1619, 129, 881, 2649, 846, 945, 1750, 511, 1157, + 1622, 4265, 2968, 3072, 1706, 4651, 4253, 1427, 1818, + 3849, 2721, 1795, 4270, 1803, 1762, 4276, 3938, 3343, + 3668, 3637, 2110, 3102, 1665, 2384, 616, 3330, 2604, + 3160, 2836, 4766, 2254, 3225, 1873, 3466, 1206, 4390, + 1921, 3575, 3642, 2358, 4308, 4356, 703, 1062, 2210, + 4670, 3398, 2717, 722, 1859, 4984, 2405, 21, 2259, + 1903, 2612, 649, 1633, 3709, 3133, 1324, 367, 3742, + 2359, 891, 2538, 4397, 2745, 3312, 62, 2263, 928, + 4148, 4400, 4500, 2943, 141, 1626, 1600, 362, 4735, + 4610, 2421, 2381, 294, 3441, 3632, 1077, 2678, 4721, + 349, 4636, 71, 530, 2053, 4577, 63, 921, 3544, + 679, 2977, 3324, 11, 4511, 3615, 4896, 2406, 2258, + 1864, 3077, 3493, 4707, 3010, 262, 3958, 496, 1377, + 487, 2691, 500, 482, 1105, 4141, 4216, 258, 4199, + 2473, 2035, 1880, 4882, 4687, 2693, 4378, 979, 275, + 3291, 2075, 3974, 1443, 2546, 3646, 2922, 593, 2962, + 3158, 230, 1436, 4228, 1327, 166, 5, 4442, 2053, + 4565, 4281, 3445, 317, 4454, 1048, 4336, 3689, 3338, + 3470, 111, 972, 3381, 3779, 3051, 187, 4961, 4122, + 3534, 1019, 2787, 2843, 1380, 404, 3341, 4472, 1726, + 3013, 3543, 1304, 1414, 4064, 4001, 1358, 3450, 4428, + 4213, 2147, 4687, 555, 2055, 3303, 827, 2606, 2158, + 106, 4924, 4415, 3838, 1402, 1986, 38, 2635, 3518, + 3588, 2335, 1536, 4020, 2269, 1416, 1920], + [3170, 79, 3608, 4911, 3184, 2979, 3355, 3713, 3495, + 1115, 4233, 1262, 4719, 4382, 180, 1521, 4310, 1683, + 828, 3686, 2249, 3603, 4171, 475, 1878, 436, 2067, + 1495, 2935, 466, 1227, 1856, 4462, 223, 3692, 4806, + 4870, 1912, 400, 982, 3711, 2295, 1015, 4745, 641, + 4760, 4586, 2357, 2534, 83, 1143, 3803, 275, 1878, + 1627, 3610, 3473, 903, 4472, 4399, 4042, 2401, 3650, + 1128, 4224, 3836, 2555, 3816, 2915, 652, 2435, 2908, + 906, 1097, 3781, 2171, 1266, 3457, 3658, 4715, 1172, + 1474, 967, 4353, 342, 2662, 4086, 3518, 2308, 2072, + 2296, 1832, 4636, 4683, 1348, 3664, 2430, 771, 1387, + 4220, 3904, 1160, 4582, 3345, 2117, 1743, 1059, 1486, + 4448, 3462, 4310, 2073, 2531, 1264, 3076, 768, 3065, + 2099, 22, 3858, 2117, 3071, 1870, 1908, 3066, 3936, + 4523, 3862, 3511, 1726, 3483, 2103, 899, 820, 4284, + 759, 2406, 4468, 4165, 498, 2386, 4511, 3869, 4730, + 344, 2104, 3661, 4408, 2078, 2465, 2304, 2251, 2124, + 2228, 2243, 1704, 1012, 833, 1205, 822, 769, 210, + 4021, 1250, 2758, 1353, 2534, 2009, 2297, 1317, 1960, + 4125, 827, 3560, 4284, 3656, 456, 840, 985, 4783, + 2207, 4758, 1206, 2542, 3955, 101, 3717, 819, 188, + 4957, 3187, 1212, 2275, 4009, 292, 2469, 2123, 2868, + 2580, 1676, 1102, 3687, 1081, 989, 2408, 2540, 4674, + 3273, 1936, 535, 1044, 1420, 2207, 2244, 716, 2832, + 3874, 1334, 1054, 3916, 1977, 47, 2184, 4123, 4203, + 2124, 1300, 4934, 1439, 2746, 2360, 3932, 4632, 3735, + 1858, 3325, 1246, 3422, 1076, 4923, 4490, 1482, 475, + 74, 3044, 212, 4786, 3431, 1105, 3669]]), + values=tensor([5.9885e-01, 8.1096e-01, 4.4897e-01, 2.1731e-01, + 7.0901e-02, 3.4210e-01, 5.8240e-01, 8.1315e-01, + 2.3762e-01, 7.4134e-01, 8.9810e-02, 2.1845e-01, + 8.3762e-01, 3.4309e-01, 5.4146e-01, 2.0706e-01, + 5.4142e-01, 6.6129e-01, 1.7534e-02, 4.8506e-01, + 6.9088e-01, 7.8290e-01, 6.3058e-01, 1.4084e-01, + 7.6928e-01, 7.3730e-01, 1.7503e-01, 4.4611e-01, + 3.7372e-01, 8.0127e-01, 5.4780e-01, 3.8282e-01, + 7.7213e-01, 3.1615e-01, 8.5043e-01, 4.7509e-01, + 8.2175e-01, 6.9654e-01, 3.0353e-01, 3.1758e-01, + 2.8905e-01, 5.0389e-01, 3.7720e-01, 4.9719e-01, + 4.6269e-01, 9.4183e-02, 8.4675e-01, 5.1296e-01, + 5.0067e-01, 7.4411e-01, 7.3911e-01, 3.4168e-01, + 4.1887e-02, 6.4098e-01, 5.0523e-01, 2.1087e-01, + 9.9028e-01, 8.0414e-01, 2.5682e-01, 4.5275e-01, + 5.9842e-01, 8.9192e-02, 6.2465e-01, 5.6104e-02, + 1.5213e-01, 4.8540e-02, 6.0256e-01, 4.3261e-01, + 5.0758e-02, 5.2704e-01, 6.2666e-01, 9.7424e-01, + 1.6385e-01, 8.7910e-01, 4.3482e-01, 1.8690e-01, + 3.8047e-01, 3.3206e-03, 3.8943e-02, 4.2826e-01, + 7.6229e-04, 5.7247e-01, 7.8778e-01, 1.6584e-02, + 6.0315e-03, 6.8232e-01, 8.0272e-01, 7.6518e-01, + 9.3612e-01, 4.7751e-01, 6.1964e-01, 3.9617e-01, + 5.4792e-02, 5.1730e-01, 5.4183e-01, 1.9033e-01, + 8.0412e-01, 8.2545e-01, 8.6834e-01, 9.1209e-01, + 1.2813e-01, 3.1443e-01, 5.1749e-01, 5.7715e-01, + 1.8450e-01, 1.0404e-03, 6.0253e-01, 1.7010e-01, + 5.3019e-02, 2.4196e-01, 2.1573e-01, 4.1807e-01, + 2.0131e-01, 8.7148e-01, 1.8333e-01, 9.2283e-01, + 4.0907e-01, 7.8314e-01, 9.8637e-01, 3.8265e-01, + 3.8088e-01, 5.1302e-01, 3.5749e-01, 1.3486e-01, + 2.0220e-01, 6.3379e-02, 9.5144e-01, 2.9654e-01, + 1.3694e-01, 5.7487e-02, 1.1217e-02, 5.2757e-01, + 6.8318e-01, 4.2558e-02, 1.4856e-01, 2.8028e-02, + 9.8864e-01, 1.4918e-01, 4.5257e-01, 8.5605e-01, + 1.5611e-01, 8.9891e-01, 4.5305e-01, 6.4239e-01, + 3.1540e-03, 1.3727e-01, 2.3263e-01, 3.9159e-01, + 5.9133e-01, 6.7135e-01, 7.3301e-01, 8.9210e-01, + 5.1799e-01, 8.8892e-01, 2.9588e-01, 3.9758e-01, + 5.9311e-01, 7.3824e-01, 2.2311e-02, 8.8633e-01, + 8.0754e-01, 5.0322e-01, 1.3492e-01, 2.6955e-01, + 2.7138e-01, 9.3999e-01, 7.4611e-01, 7.7141e-01, + 8.5368e-01, 7.3192e-01, 6.1900e-02, 8.4505e-01, + 3.2219e-01, 8.4804e-01, 4.4195e-01, 1.5508e-01, + 1.7539e-01, 6.5529e-01, 6.5333e-01, 2.7130e-01, + 8.4207e-01, 1.8031e-01, 7.3988e-01, 5.1935e-01, + 5.1052e-01, 1.1203e-01, 4.4140e-01, 6.3699e-01, + 6.1041e-01, 1.7684e-01, 6.3601e-02, 6.1397e-01, + 7.1982e-01, 6.2603e-01, 8.5978e-01, 9.5008e-01, + 8.2850e-01, 5.6955e-01, 1.3986e-01, 4.7161e-03, + 7.5560e-01, 7.2280e-01, 1.1840e-01, 6.8868e-01, + 5.1896e-01, 9.1902e-01, 1.8333e-02, 9.1067e-01, + 8.6824e-01, 5.5701e-01, 2.0667e-01, 5.1783e-01, + 8.1605e-01, 7.8862e-01, 8.4018e-01, 9.2218e-02, + 9.4304e-01, 1.5276e-01, 9.2787e-02, 1.5115e-01, + 9.8151e-01, 7.5283e-01, 7.1084e-01, 1.9846e-01, + 3.3435e-01, 4.7980e-01, 1.8473e-01, 6.7737e-01, + 5.8602e-01, 4.2490e-01, 3.9173e-02, 8.1337e-01, + 4.1555e-01, 2.0395e-02, 9.4457e-01, 9.6101e-01, + 8.9335e-01, 1.5658e-01, 3.9549e-01, 5.3850e-01, + 9.0696e-01, 1.9323e-01, 7.9593e-01, 6.0498e-01, + 5.4678e-01, 2.8350e-01, 6.7806e-01, 5.7632e-01, + 2.8163e-01, 6.7064e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4590, 0.1083, 0.1438, ..., 0.8360, 0.0459, 0.6462]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.20865130424499512 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 644136 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078837871551514} + +tensor(indices=tensor([[1848, 36, 479, 2449, 3061, 251, 2083, 3046, 235, + 1185, 2706, 3120, 3567, 4164, 2909, 702, 3592, 3705, + 865, 2916, 116, 1813, 2061, 1523, 4316, 3653, 1626, + 4342, 2735, 1397, 4191, 1087, 126, 3173, 3885, 4722, + 2608, 788, 3007, 2468, 941, 3247, 217, 2033, 2510, + 3255, 2538, 2651, 3742, 1824, 2891, 4469, 723, 3514, + 2400, 3230, 4149, 4751, 964, 632, 1969, 536, 127, + 3492, 2888, 2918, 1383, 4700, 3723, 1892, 3165, 4079, + 562, 2995, 4446, 2548, 554, 1110, 661, 1697, 844, + 1650, 2447, 181, 418, 3376, 3151, 610, 3506, 3940, + 4334, 3905, 3359, 501, 1193, 393, 2829, 4436, 32, + 1048, 1449, 262, 2442, 707, 3973, 693, 4771, 3997, + 295, 89, 2940, 4135, 2465, 4943, 2384, 1532, 4031, + 432, 2780, 2324, 1955, 650, 3823, 579, 1494, 3572, + 2964, 1979, 4852, 1127, 1192, 889, 222, 671, 3453, + 1962, 4595, 3212, 586, 4168, 2579, 3461, 1106, 4800, + 2358, 2414, 4251, 397, 287, 2182, 1764, 418, 4791, + 2168, 275, 4941, 4882, 3149, 821, 4242, 566, 4967, + 29, 3985, 2466, 2115, 3207, 272, 1796, 2417, 3492, + 4461, 4627, 1541, 4936, 395, 4400, 4979, 2266, 1508, + 1989, 1683, 2790, 919, 3929, 1051, 2925, 4635, 4476, + 3859, 4324, 584, 618, 842, 4403, 920, 94, 1905, + 3142, 4072, 1674, 604, 3057, 4702, 3092, 771, 647, + 4159, 154, 1891, 4706, 2357, 1706, 2178, 3601, 4841, + 3097, 3951, 1596, 3943, 3078, 548, 2426, 4943, 1194, + 2117, 4443, 2183, 963, 4742, 3176, 3558, 4298, 2129, + 1114, 2496, 1402, 1731, 839, 533, 4051, 3877, 2228, + 446, 2118, 1274, 213, 3773, 4529, 1557], + [1729, 2596, 3580, 3542, 2637, 2996, 4731, 4049, 1920, + 2172, 2484, 4263, 4404, 4085, 1165, 776, 4837, 3523, + 597, 461, 2349, 2248, 295, 3984, 1082, 2679, 2078, + 3784, 1354, 3159, 3486, 2461, 4974, 3804, 458, 2541, + 4316, 140, 4915, 3184, 1097, 1094, 2274, 4808, 1695, + 3073, 3679, 2105, 906, 461, 2331, 7, 4486, 413, + 4650, 3304, 3491, 1852, 18, 4785, 2127, 339, 2424, + 2304, 2684, 1240, 4159, 2793, 3808, 4150, 303, 854, + 2582, 4244, 2277, 4173, 2683, 728, 2029, 1168, 2207, + 2327, 3098, 4527, 1324, 3814, 2499, 155, 889, 866, + 3599, 4105, 3802, 2395, 4294, 277, 1895, 1129, 2323, + 2367, 839, 3119, 2653, 2774, 2189, 3005, 1116, 4919, + 4029, 621, 4657, 2827, 629, 1182, 1949, 2031, 2702, + 2428, 1467, 2050, 3218, 2755, 907, 4589, 4916, 2339, + 1176, 1095, 4300, 757, 3992, 2512, 2759, 1180, 4161, + 475, 714, 1968, 134, 963, 191, 4771, 4933, 1787, + 664, 947, 659, 611, 298, 2950, 3358, 3157, 3130, + 2015, 770, 1161, 3348, 3689, 501, 1730, 3410, 4438, + 1768, 2438, 48, 931, 2715, 1678, 1567, 420, 3013, + 4221, 130, 763, 1188, 1851, 768, 2274, 2131, 2922, + 4242, 600, 2888, 4926, 1574, 3402, 4055, 3661, 2571, + 4718, 1525, 4288, 2043, 1506, 4427, 721, 2512, 1185, + 113, 1073, 4980, 4693, 4510, 4748, 3972, 431, 4963, + 2385, 3609, 697, 3839, 3423, 4628, 2907, 2428, 125, + 534, 2818, 1319, 3863, 3836, 1858, 2763, 2350, 775, + 4422, 3092, 3537, 1590, 2045, 1434, 1281, 649, 3948, + 3013, 2992, 1786, 4667, 3709, 1126, 922, 1886, 3684, + 4970, 1090, 2639, 1036, 1494, 2502, 2675]]), + values=tensor([0.5457, 0.2434, 0.7108, 0.7507, 0.7543, 0.4044, 0.2490, + 0.3484, 0.0563, 0.8632, 0.8469, 0.6063, 0.4982, 0.7382, + 0.0920, 0.7934, 0.6275, 0.8451, 0.9765, 0.8422, 0.5453, + 0.6824, 0.3982, 0.3275, 0.3982, 0.6515, 0.9266, 0.0350, + 0.9226, 0.2044, 0.5409, 0.3159, 0.0524, 0.0868, 0.1924, + 0.2898, 0.6060, 0.7889, 0.4708, 0.3997, 0.7103, 0.2008, + 0.1922, 0.6889, 0.3362, 0.6076, 0.7152, 0.5431, 0.3115, + 0.8537, 0.8923, 0.0427, 0.2561, 0.1147, 0.2606, 0.0931, + 0.9942, 0.1117, 0.7539, 0.4265, 0.2354, 0.6347, 0.3712, + 0.0578, 0.7376, 0.5270, 0.5209, 0.9815, 0.7984, 0.5622, + 0.6952, 0.2874, 0.9946, 0.5702, 0.4925, 0.8830, 0.4735, + 0.4161, 0.3021, 0.2022, 0.1759, 0.6821, 0.0559, 0.5327, + 0.4970, 0.3094, 0.7372, 0.3752, 0.5387, 0.2290, 0.7585, + 0.5366, 0.3000, 0.4475, 0.8211, 0.2115, 0.9945, 0.1925, + 0.3862, 0.7093, 0.5246, 0.4338, 0.3784, 0.0832, 0.7334, + 0.4230, 0.3825, 0.3730, 0.2214, 0.1330, 0.2808, 0.5119, + 0.6352, 0.9224, 0.6875, 0.5134, 0.8407, 0.9158, 0.0674, + 0.5572, 0.9181, 0.0996, 0.5606, 0.2600, 0.3092, 0.4754, + 0.5580, 0.1983, 0.4106, 0.9771, 0.8674, 0.6024, 0.6179, + 0.8986, 0.1578, 0.3507, 0.4957, 0.5831, 0.8934, 0.2475, + 0.0158, 0.4860, 0.8212, 0.1045, 0.9247, 0.2765, 0.5018, + 0.1124, 0.7233, 0.7012, 0.9920, 0.3733, 0.2757, 0.8387, + 0.2556, 0.4277, 0.2407, 0.0032, 0.3344, 0.2063, 0.4992, + 0.3205, 0.9446, 0.9433, 0.5603, 0.9137, 0.0432, 0.7185, + 0.1399, 0.2737, 0.9596, 0.2396, 0.4630, 0.8879, 0.5789, + 0.2200, 0.6908, 0.8308, 0.5361, 0.0375, 0.5300, 0.7543, + 0.7717, 0.2598, 0.8762, 0.3597, 0.4314, 0.7718, 0.7415, + 0.8033, 0.2783, 0.1648, 0.9531, 0.7788, 0.9265, 0.8165, + 0.2968, 0.0515, 0.2449, 0.3942, 0.0584, 0.0407, 0.1599, + 0.6236, 0.5319, 0.6488, 0.4229, 0.3770, 0.9082, 0.7215, + 0.4753, 0.7735, 0.5148, 0.0310, 0.2633, 0.8698, 0.1896, + 0.1179, 0.7654, 0.3419, 0.9451, 0.2713, 0.0607, 0.6297, + 0.0023, 0.7314, 0.9668, 0.1747, 0.6239, 0.7830, 0.7407, + 0.2902, 0.1373, 0.5356, 0.9106, 0.6094, 0.3987, 0.1564, + 0.1719, 0.3329, 0.5923, 0.4512, 0.0359, 0.8957, 0.9353, + 0.5149, 0.5836, 0.2805, 0.0599, 0.7824]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.5826, 0.7495, 0.7612, ..., 0.8960, 0.6838, 0.9753]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.078837871551514 seconds + +tensor(indices=tensor([[1848, 36, 479, 2449, 3061, 251, 2083, 3046, 235, + 1185, 2706, 3120, 3567, 4164, 2909, 702, 3592, 3705, + 865, 2916, 116, 1813, 2061, 1523, 4316, 3653, 1626, + 4342, 2735, 1397, 4191, 1087, 126, 3173, 3885, 4722, + 2608, 788, 3007, 2468, 941, 3247, 217, 2033, 2510, + 3255, 2538, 2651, 3742, 1824, 2891, 4469, 723, 3514, + 2400, 3230, 4149, 4751, 964, 632, 1969, 536, 127, + 3492, 2888, 2918, 1383, 4700, 3723, 1892, 3165, 4079, + 562, 2995, 4446, 2548, 554, 1110, 661, 1697, 844, + 1650, 2447, 181, 418, 3376, 3151, 610, 3506, 3940, + 4334, 3905, 3359, 501, 1193, 393, 2829, 4436, 32, + 1048, 1449, 262, 2442, 707, 3973, 693, 4771, 3997, + 295, 89, 2940, 4135, 2465, 4943, 2384, 1532, 4031, + 432, 2780, 2324, 1955, 650, 3823, 579, 1494, 3572, + 2964, 1979, 4852, 1127, 1192, 889, 222, 671, 3453, + 1962, 4595, 3212, 586, 4168, 2579, 3461, 1106, 4800, + 2358, 2414, 4251, 397, 287, 2182, 1764, 418, 4791, + 2168, 275, 4941, 4882, 3149, 821, 4242, 566, 4967, + 29, 3985, 2466, 2115, 3207, 272, 1796, 2417, 3492, + 4461, 4627, 1541, 4936, 395, 4400, 4979, 2266, 1508, + 1989, 1683, 2790, 919, 3929, 1051, 2925, 4635, 4476, + 3859, 4324, 584, 618, 842, 4403, 920, 94, 1905, + 3142, 4072, 1674, 604, 3057, 4702, 3092, 771, 647, + 4159, 154, 1891, 4706, 2357, 1706, 2178, 3601, 4841, + 3097, 3951, 1596, 3943, 3078, 548, 2426, 4943, 1194, + 2117, 4443, 2183, 963, 4742, 3176, 3558, 4298, 2129, + 1114, 2496, 1402, 1731, 839, 533, 4051, 3877, 2228, + 446, 2118, 1274, 213, 3773, 4529, 1557], + [1729, 2596, 3580, 3542, 2637, 2996, 4731, 4049, 1920, + 2172, 2484, 4263, 4404, 4085, 1165, 776, 4837, 3523, + 597, 461, 2349, 2248, 295, 3984, 1082, 2679, 2078, + 3784, 1354, 3159, 3486, 2461, 4974, 3804, 458, 2541, + 4316, 140, 4915, 3184, 1097, 1094, 2274, 4808, 1695, + 3073, 3679, 2105, 906, 461, 2331, 7, 4486, 413, + 4650, 3304, 3491, 1852, 18, 4785, 2127, 339, 2424, + 2304, 2684, 1240, 4159, 2793, 3808, 4150, 303, 854, + 2582, 4244, 2277, 4173, 2683, 728, 2029, 1168, 2207, + 2327, 3098, 4527, 1324, 3814, 2499, 155, 889, 866, + 3599, 4105, 3802, 2395, 4294, 277, 1895, 1129, 2323, + 2367, 839, 3119, 2653, 2774, 2189, 3005, 1116, 4919, + 4029, 621, 4657, 2827, 629, 1182, 1949, 2031, 2702, + 2428, 1467, 2050, 3218, 2755, 907, 4589, 4916, 2339, + 1176, 1095, 4300, 757, 3992, 2512, 2759, 1180, 4161, + 475, 714, 1968, 134, 963, 191, 4771, 4933, 1787, + 664, 947, 659, 611, 298, 2950, 3358, 3157, 3130, + 2015, 770, 1161, 3348, 3689, 501, 1730, 3410, 4438, + 1768, 2438, 48, 931, 2715, 1678, 1567, 420, 3013, + 4221, 130, 763, 1188, 1851, 768, 2274, 2131, 2922, + 4242, 600, 2888, 4926, 1574, 3402, 4055, 3661, 2571, + 4718, 1525, 4288, 2043, 1506, 4427, 721, 2512, 1185, + 113, 1073, 4980, 4693, 4510, 4748, 3972, 431, 4963, + 2385, 3609, 697, 3839, 3423, 4628, 2907, 2428, 125, + 534, 2818, 1319, 3863, 3836, 1858, 2763, 2350, 775, + 4422, 3092, 3537, 1590, 2045, 1434, 1281, 649, 3948, + 3013, 2992, 1786, 4667, 3709, 1126, 922, 1886, 3684, + 4970, 1090, 2639, 1036, 1494, 2502, 2675]]), + values=tensor([0.5457, 0.2434, 0.7108, 0.7507, 0.7543, 0.4044, 0.2490, + 0.3484, 0.0563, 0.8632, 0.8469, 0.6063, 0.4982, 0.7382, + 0.0920, 0.7934, 0.6275, 0.8451, 0.9765, 0.8422, 0.5453, + 0.6824, 0.3982, 0.3275, 0.3982, 0.6515, 0.9266, 0.0350, + 0.9226, 0.2044, 0.5409, 0.3159, 0.0524, 0.0868, 0.1924, + 0.2898, 0.6060, 0.7889, 0.4708, 0.3997, 0.7103, 0.2008, + 0.1922, 0.6889, 0.3362, 0.6076, 0.7152, 0.5431, 0.3115, + 0.8537, 0.8923, 0.0427, 0.2561, 0.1147, 0.2606, 0.0931, + 0.9942, 0.1117, 0.7539, 0.4265, 0.2354, 0.6347, 0.3712, + 0.0578, 0.7376, 0.5270, 0.5209, 0.9815, 0.7984, 0.5622, + 0.6952, 0.2874, 0.9946, 0.5702, 0.4925, 0.8830, 0.4735, + 0.4161, 0.3021, 0.2022, 0.1759, 0.6821, 0.0559, 0.5327, + 0.4970, 0.3094, 0.7372, 0.3752, 0.5387, 0.2290, 0.7585, + 0.5366, 0.3000, 0.4475, 0.8211, 0.2115, 0.9945, 0.1925, + 0.3862, 0.7093, 0.5246, 0.4338, 0.3784, 0.0832, 0.7334, + 0.4230, 0.3825, 0.3730, 0.2214, 0.1330, 0.2808, 0.5119, + 0.6352, 0.9224, 0.6875, 0.5134, 0.8407, 0.9158, 0.0674, + 0.5572, 0.9181, 0.0996, 0.5606, 0.2600, 0.3092, 0.4754, + 0.5580, 0.1983, 0.4106, 0.9771, 0.8674, 0.6024, 0.6179, + 0.8986, 0.1578, 0.3507, 0.4957, 0.5831, 0.8934, 0.2475, + 0.0158, 0.4860, 0.8212, 0.1045, 0.9247, 0.2765, 0.5018, + 0.1124, 0.7233, 0.7012, 0.9920, 0.3733, 0.2757, 0.8387, + 0.2556, 0.4277, 0.2407, 0.0032, 0.3344, 0.2063, 0.4992, + 0.3205, 0.9446, 0.9433, 0.5603, 0.9137, 0.0432, 0.7185, + 0.1399, 0.2737, 0.9596, 0.2396, 0.4630, 0.8879, 0.5789, + 0.2200, 0.6908, 0.8308, 0.5361, 0.0375, 0.5300, 0.7543, + 0.7717, 0.2598, 0.8762, 0.3597, 0.4314, 0.7718, 0.7415, + 0.8033, 0.2783, 0.1648, 0.9531, 0.7788, 0.9265, 0.8165, + 0.2968, 0.0515, 0.2449, 0.3942, 0.0584, 0.0407, 0.1599, + 0.6236, 0.5319, 0.6488, 0.4229, 0.3770, 0.9082, 0.7215, + 0.4753, 0.7735, 0.5148, 0.0310, 0.2633, 0.8698, 0.1896, + 0.1179, 0.7654, 0.3419, 0.9451, 0.2713, 0.0607, 0.6297, + 0.0023, 0.7314, 0.9668, 0.1747, 0.6239, 0.7830, 0.7407, + 0.2902, 0.1373, 0.5356, 0.9106, 0.6094, 0.3987, 0.1564, + 0.1719, 0.3329, 0.5923, 0.4512, 0.0359, 0.8957, 0.9353, + 0.5149, 0.5836, 0.2805, 0.0599, 0.7824]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.5826, 0.7495, 0.7612, ..., 0.8960, 0.6838, 0.9753]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.078837871551514 seconds + +[16.6, 16.44, 16.32, 16.32, 16.32, 16.44, 16.48, 16.44, 16.68, 16.96] +[16.64, 16.56, 17.52, 18.2, 20.08, 20.96, 21.68, 21.08, 20.84, 20.84, 19.68, 19.76, 19.56] +13.52843976020813 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 644136, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078837871551514, 'TIME_S_1KI': 0.015647065016629276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 245.636281709671, 'W': 18.157029640045646} +[16.6, 16.44, 16.32, 16.32, 16.32, 16.44, 16.48, 16.44, 16.68, 16.96, 16.48, 16.6, 16.6, 16.6, 16.44, 16.12, 16.12, 16.24, 16.36, 16.52] +295.8 +14.790000000000001 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 644136, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078837871551514, 'TIME_S_1KI': 0.015647065016629276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 245.636281709671, 'W': 18.157029640045646, 'J_1KI': 0.3813422657787657, 'W_1KI': 0.028188192617778926, 'W_D': 3.3670296400456454, 'J_D': 45.550657656192776, 'W_D_1KI': 0.005227203013099167, 'J_D_1KI': 8.115061125444266e-06} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..0c86915 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 287374, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.518575429916382, "TIME_S_1KI": 0.0366023907170321, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 276.1757572364807, "W": 18.95351584212515, "J_1KI": 0.9610325124627862, "W_1KI": 0.06595417762958775, "W_D": 4.070515842125149, "J_D": 59.31236211824414, "W_D_1KI": 0.014164523729095702, "J_D_1KI": 4.928951028658021e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..f14bea5 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.006743192672729492} + +tensor(indices=tensor([[3198, 1885, 1309, ..., 2676, 1455, 1063], + [ 524, 2229, 4905, ..., 1515, 192, 4926]]), + values=tensor([0.2679, 0.5733, 0.1409, ..., 0.2159, 0.8027, 0.0882]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.3135, 0.4870, 0.4806, ..., 0.5781, 0.1076, 0.9212]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.006743192672729492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15571 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.568927526473999} + +tensor(indices=tensor([[1908, 3569, 1137, ..., 1588, 848, 1956], + [2393, 1784, 2395, ..., 970, 479, 2573]]), + values=tensor([0.6049, 0.0335, 0.2526, ..., 0.1747, 0.3877, 0.9979]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.2894, 0.0542, 0.1429, ..., 0.0093, 0.6267, 0.8974]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.568927526473999 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 287374 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.518575429916382} + +tensor(indices=tensor([[4672, 3564, 2873, ..., 1014, 1640, 4822], + [3531, 3077, 2005, ..., 4945, 1554, 4576]]), + values=tensor([0.2925, 0.5202, 0.6644, ..., 0.2251, 0.7779, 0.3865]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.3673, 0.6026, 0.5524, ..., 0.9329, 0.4025, 0.0452]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.518575429916382 seconds + +tensor(indices=tensor([[4672, 3564, 2873, ..., 1014, 1640, 4822], + [3531, 3077, 2005, ..., 4945, 1554, 4576]]), + values=tensor([0.2925, 0.5202, 0.6644, ..., 0.2251, 0.7779, 0.3865]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.3673, 0.6026, 0.5524, ..., 0.9329, 0.4025, 0.0452]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.518575429916382 seconds + +[16.36, 16.6, 16.72, 16.76, 16.84, 16.76, 16.72, 16.68, 16.52, 16.6] +[16.64, 16.56, 16.56, 20.08, 20.92, 22.72, 23.6, 24.28, 21.56, 20.32, 20.4, 19.84, 19.72, 19.88] +14.571215152740479 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 287374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.518575429916382, 'TIME_S_1KI': 0.0366023907170321, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.1757572364807, 'W': 18.95351584212515} +[16.36, 16.6, 16.72, 16.76, 16.84, 16.76, 16.72, 16.68, 16.52, 16.6, 16.8, 16.72, 16.48, 16.04, 16.28, 16.32, 16.32, 16.24, 16.52, 16.52] +297.66 +14.883000000000001 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 287374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.518575429916382, 'TIME_S_1KI': 0.0366023907170321, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 276.1757572364807, 'W': 18.95351584212515, 'J_1KI': 0.9610325124627862, 'W_1KI': 0.06595417762958775, 'W_D': 4.070515842125149, 'J_D': 59.31236211824414, 'W_D_1KI': 0.014164523729095702, 'J_D_1KI': 4.928951028658021e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json index 1bdafea..86e8888 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 426, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.062527418136597, "TIME_S_1KI": 23.62095638060234, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 310.93658995628357, "W": 22.910177826692795, "J_1KI": 729.8980984889287, "W_1KI": 53.77976015655585, "W_D": 4.4831778266927955, "J_D": 60.845622244596484, "W_D_1KI": 10.52389161195492, "J_D_1KI": 24.70397092008197} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 451, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.429136514663696, "TIME_S_1KI": 23.12447120768004, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 326.846988697052, "W": 22.30127348306853, "J_1KI": 724.7161611908026, "W_1KI": 49.44849996245794, "W_D": 3.7152734830685326, "J_D": 54.45096895694735, "W_D_1KI": 8.23785694693688, "J_D_1KI": 18.265758197199293} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output index 6496241..f8bc4a8 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.4617955684661865} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.325932502746582} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 23, ..., 999981, - 999993, 1000000]), - col_indices=tensor([ 4955, 8657, 25975, ..., 77712, 83219, 89598]), - values=tensor([0.6839, 0.0631, 0.2295, ..., 0.4308, 0.9509, 0.3745]), +tensor(crow_indices=tensor([ 0, 6, 14, ..., 999981, + 999990, 1000000]), + col_indices=tensor([22036, 42901, 43352, ..., 54182, 64519, 79020]), + values=tensor([0.6047, 0.8603, 0.3318, ..., 0.9013, 0.6992, 0.1471]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8318, 0.0587, 0.7825, ..., 0.4905, 0.7506, 0.0148]) +tensor([0.6244, 0.4084, 0.4276, ..., 0.6633, 0.7989, 0.2197]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 2.4617955684661865 seconds +Time: 2.325932502746582 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 426 -ss 100000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.062527418136597} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 451 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.429136514663696} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 16, ..., 999978, - 999987, 1000000]), - col_indices=tensor([ 4266, 12843, 25231, ..., 84479, 87700, 95752]), - values=tensor([0.9986, 0.4680, 0.6719, ..., 0.1198, 0.1607, 0.3222]), +tensor(crow_indices=tensor([ 0, 14, 30, ..., 999979, + 999990, 1000000]), + col_indices=tensor([ 3062, 9122, 10011, ..., 76699, 91842, 98883]), + values=tensor([0.0641, 0.1904, 0.9530, ..., 0.2612, 0.7327, 0.9970]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1538, 0.6601, 0.2448, ..., 0.8405, 0.0282, 0.7640]) +tensor([0.3371, 0.7219, 0.6879, ..., 0.7440, 0.2171, 0.2377]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.062527418136597 seconds +Time: 10.429136514663696 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 16, ..., 999978, - 999987, 1000000]), - col_indices=tensor([ 4266, 12843, 25231, ..., 84479, 87700, 95752]), - values=tensor([0.9986, 0.4680, 0.6719, ..., 0.1198, 0.1607, 0.3222]), +tensor(crow_indices=tensor([ 0, 14, 30, ..., 999979, + 999990, 1000000]), + col_indices=tensor([ 3062, 9122, 10011, ..., 76699, 91842, 98883]), + values=tensor([0.0641, 0.1904, 0.9530, ..., 0.2612, 0.7327, 0.9970]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1538, 0.6601, 0.2448, ..., 0.8405, 0.0282, 0.7640]) +tensor([0.3371, 0.7219, 0.6879, ..., 0.7440, 0.2171, 0.2377]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.062527418136597 seconds +Time: 10.429136514663696 seconds -[20.6, 20.44, 20.32, 20.4, 20.44, 20.84, 21.0, 20.88, 20.52, 20.4] -[20.4, 20.24, 20.28, 24.24, 25.56, 27.32, 28.24, 28.56, 25.84, 24.76, 24.56, 24.68, 24.68] -13.571985006332397 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.062527418136597, 'TIME_S_1KI': 23.62095638060234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.93658995628357, 'W': 22.910177826692795} -[20.6, 20.44, 20.32, 20.4, 20.44, 20.84, 21.0, 20.88, 20.52, 20.4, 20.64, 20.56, 20.4, 20.2, 20.36, 20.36, 20.12, 20.2, 20.4, 20.56] -368.53999999999996 -18.427 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.062527418136597, 'TIME_S_1KI': 23.62095638060234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.93658995628357, 'W': 22.910177826692795, 'J_1KI': 729.8980984889287, 'W_1KI': 53.77976015655585, 'W_D': 4.4831778266927955, 'J_D': 60.845622244596484, 'W_D_1KI': 10.52389161195492, 'J_D_1KI': 24.70397092008197} +[20.6, 20.72, 21.0, 21.12, 21.12, 21.0, 20.88, 20.48, 20.52, 20.48] +[20.56, 20.56, 21.24, 22.16, 24.12, 25.08, 25.96, 25.56, 25.32, 24.28, 24.44, 24.44, 24.6, 24.92] +14.655978679656982 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 451, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.429136514663696, 'TIME_S_1KI': 23.12447120768004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.846988697052, 'W': 22.30127348306853} +[20.6, 20.72, 21.0, 21.12, 21.12, 21.0, 20.88, 20.48, 20.52, 20.48, 20.28, 20.24, 20.28, 20.36, 20.52, 20.64, 20.76, 20.64, 20.52, 20.48] +371.71999999999997 +18.586 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 451, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.429136514663696, 'TIME_S_1KI': 23.12447120768004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.846988697052, 'W': 22.30127348306853, 'J_1KI': 724.7161611908026, 'W_1KI': 49.44849996245794, 'W_D': 3.7152734830685326, 'J_D': 54.45096895694735, 'W_D_1KI': 8.23785694693688, 'J_D_1KI': 18.265758197199293} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json index 9db277a..ccc36b0 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 22.583206176757812, "TIME_S_1KI": 225.83206176757812, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 712.6889356327057, "W": 23.468990304831248, "J_1KI": 7126.889356327057, "W_1KI": 234.68990304831246, "W_D": 4.866990304831251, "J_D": 147.7971610636712, "W_D_1KI": 48.66990304831251, "J_D_1KI": 486.6990304831251} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 23.465809106826782, "TIME_S_1KI": 234.65809106826782, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 674.7585023498535, "W": 22.99999113724053, "J_1KI": 6747.5850234985355, "W_1KI": 229.9999113724053, "W_D": 4.333991137240535, "J_D": 127.1477606887818, "W_D_1KI": 43.339911372405346, "J_D_1KI": 433.3991137240535} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output index 1c6b2fe..7117084 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 22.583206176757812} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 23.465809106826782} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 115, 205, ..., 9999778, - 9999875, 10000000]), - col_indices=tensor([ 1402, 2097, 3965, ..., 98532, 99293, 99429]), - values=tensor([0.3375, 0.2900, 0.6603, ..., 0.1611, 0.9536, 0.6072]), +tensor(crow_indices=tensor([ 0, 104, 211, ..., 9999807, + 9999895, 10000000]), + col_indices=tensor([ 67, 3512, 6711, ..., 97065, 98136, 98202]), + values=tensor([0.4389, 0.5287, 0.7636, ..., 0.3983, 0.3336, 0.7095]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8425, 0.9618, 0.5102, ..., 0.7524, 0.4133, 0.9192]) +tensor([0.8197, 0.9888, 0.1798, ..., 0.6227, 0.7072, 0.7961]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,16 +16,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 22.583206176757812 seconds +Time: 23.465809106826782 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 115, 205, ..., 9999778, - 9999875, 10000000]), - col_indices=tensor([ 1402, 2097, 3965, ..., 98532, 99293, 99429]), - values=tensor([0.3375, 0.2900, 0.6603, ..., 0.1611, 0.9536, 0.6072]), +tensor(crow_indices=tensor([ 0, 104, 211, ..., 9999807, + 9999895, 10000000]), + col_indices=tensor([ 67, 3512, 6711, ..., 97065, 98136, 98202]), + values=tensor([0.4389, 0.5287, 0.7636, ..., 0.3983, 0.3336, 0.7095]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8425, 0.9618, 0.5102, ..., 0.7524, 0.4133, 0.9192]) +tensor([0.8197, 0.9888, 0.1798, ..., 0.6227, 0.7072, 0.7961]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -33,13 +33,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 22.583206176757812 seconds +Time: 23.465809106826782 seconds -[20.72, 21.0, 21.0, 21.04, 20.76, 20.44, 20.16, 20.32, 20.24, 20.52] -[20.4, 20.36, 21.96, 22.92, 24.92, 26.56, 28.48, 28.48, 28.88, 28.96, 27.92, 26.48, 25.56, 24.72, 24.64, 24.76, 24.96, 24.76, 24.6, 24.56, 24.6, 24.36, 24.76, 24.88, 24.92, 24.68, 24.6, 24.36, 24.04] -30.367260217666626 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 22.583206176757812, 'TIME_S_1KI': 225.83206176757812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.6889356327057, 'W': 23.468990304831248} -[20.72, 21.0, 21.0, 21.04, 20.76, 20.44, 20.16, 20.32, 20.24, 20.52, 20.24, 20.24, 20.2, 20.4, 20.64, 21.08, 21.24, 21.12, 21.0, 20.84] -372.03999999999996 -18.601999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 22.583206176757812, 'TIME_S_1KI': 225.83206176757812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.6889356327057, 'W': 23.468990304831248, 'J_1KI': 7126.889356327057, 'W_1KI': 234.68990304831246, 'W_D': 4.866990304831251, 'J_D': 147.7971610636712, 'W_D_1KI': 48.66990304831251, 'J_D_1KI': 486.6990304831251} +[20.6, 20.76, 20.68, 20.68, 20.8, 20.8, 20.8, 20.92, 20.88, 20.52] +[20.24, 20.24, 20.72, 21.88, 23.68, 25.04, 27.2, 28.28, 28.24, 27.24, 26.04, 25.4, 24.8, 24.8, 24.92, 24.84, 24.52, 24.48, 24.36, 24.28, 24.6, 24.84, 24.96, 24.72, 24.64, 24.52, 24.8, 24.6] +29.337337493896484 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 23.465809106826782, 'TIME_S_1KI': 234.65809106826782, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 674.7585023498535, 'W': 22.99999113724053} +[20.6, 20.76, 20.68, 20.68, 20.8, 20.8, 20.8, 20.92, 20.88, 20.52, 20.48, 20.52, 20.76, 21.0, 20.76, 20.8, 20.8, 20.76, 20.64, 20.32] +373.31999999999994 +18.665999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 23.465809106826782, 'TIME_S_1KI': 234.65809106826782, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 674.7585023498535, 'W': 22.99999113724053, 'J_1KI': 6747.5850234985355, 'W_1KI': 229.9999113724053, 'W_D': 4.333991137240535, 'J_D': 127.1477606887818, 'W_D_1KI': 43.339911372405346, 'J_D_1KI': 433.3991137240535} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..c2d358d --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 225.18899512290955, "TIME_S_1KI": 2251.8899512290955, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6281.944063549044, "W": 24.226548404040123, "J_1KI": 62819.440635490435, "W_1KI": 242.26548404040125, "W_D": 5.685548404040119, "J_D": 1474.262715807439, "W_D_1KI": 56.855484040401194, "J_D_1KI": 568.5548404040119} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..f52b83d --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 225.18899512290955} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 1954, ..., + 99998005, 99998965, 100000000]), + col_indices=tensor([ 209, 344, 348, ..., 99501, 99539, 99765]), + values=tensor([0.9950, 0.2460, 0.5493, ..., 0.2748, 0.4865, 0.5723]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.9371, 0.2528, 0.1068, ..., 0.3070, 0.8056, 0.1542]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 225.18899512290955 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 1954, ..., + 99998005, 99998965, 100000000]), + col_indices=tensor([ 209, 344, 348, ..., 99501, 99539, 99765]), + values=tensor([0.9950, 0.2460, 0.5493, ..., 0.2748, 0.4865, 0.5723]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.9371, 0.2528, 0.1068, ..., 0.3070, 0.8056, 0.1542]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 225.18899512290955 seconds + +[22.56, 22.04, 21.08, 20.72, 20.6, 20.8, 20.88, 20.84, 20.6, 20.52] +[20.32, 20.16, 23.24, 23.24, 24.44, 27.2, 28.68, 29.56, 27.16, 26.44, 27.56, 29.08, 31.16, 30.96, 30.08, 28.12, 26.16, 25.6, 26.4, 28.84, 31.16, 32.92, 33.4, 32.0, 30.72, 30.72, 29.72, 28.08, 27.88, 27.2, 26.56, 26.96, 27.92, 28.84, 29.48, 30.08, 29.8, 29.44, 28.88, 28.36, 27.68, 26.88, 26.2, 25.88, 24.92, 24.84, 25.04, 25.04, 25.04, 24.88, 24.84, 24.88, 24.6, 24.68, 24.84, 24.76, 24.92, 25.04, 24.88, 24.64, 24.64, 24.64, 24.88, 24.76, 24.88, 24.92, 24.96, 25.0, 24.96, 24.96, 25.08, 25.2, 24.92, 25.52, 25.56, 25.6, 25.48, 25.64, 25.2, 25.08, 24.96, 25.04, 25.0, 25.2, 25.28, 25.08, 24.96, 24.84, 25.08, 25.12, 25.36, 25.36, 25.56, 25.52, 25.52, 25.6, 25.56, 25.28, 25.2, 25.08, 24.84, 24.88, 24.92, 25.12, 24.96, 25.12, 25.0, 25.08, 25.04, 25.08, 25.12, 25.08, 24.96, 24.96, 24.8, 24.72, 24.72, 24.68, 24.64, 24.68, 25.0, 24.84, 25.0, 24.96, 24.84, 24.76, 25.04, 24.88, 25.2, 25.08, 24.96, 24.76, 24.76, 24.92, 24.8, 24.8, 24.96, 25.0, 25.12, 25.04, 25.16, 24.96, 24.96, 24.64, 24.72, 24.92, 25.04, 25.04, 24.92, 24.64, 24.72, 24.56, 24.64, 24.64, 24.84, 24.8, 24.72, 24.72, 24.88, 25.12, 25.08, 25.2, 25.32, 25.12, 25.12, 25.24, 25.08, 24.88, 25.16, 25.04, 24.92, 25.12, 25.08, 24.76, 25.16, 25.08, 25.24, 25.32, 25.48, 25.48, 25.32, 25.24, 25.2, 24.92, 24.8, 24.72, 24.76, 24.68, 24.56, 24.88, 24.8, 24.68, 24.8, 24.6, 24.44, 24.64, 25.0, 24.96, 25.08, 25.0, 24.8, 24.8, 24.56, 24.44, 24.68, 24.64, 24.68, 24.92, 25.0, 25.12, 25.2, 25.0, 24.92, 24.72, 24.6, 25.04, 25.2, 25.4, 25.32, 25.2, 25.04, 24.8, 24.72, 24.72, 24.68, 24.68, 24.76, 24.8, 24.92, 24.92, 24.72, 24.76, 24.84, 24.76, 25.0, 25.12, 25.0, 24.92, 24.92, 24.68, 24.72, 24.52, 24.68, 24.72, 24.76, 24.76, 24.68] +259.30000257492065 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 225.18899512290955, 'TIME_S_1KI': 2251.8899512290955, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6281.944063549044, 'W': 24.226548404040123} +[22.56, 22.04, 21.08, 20.72, 20.6, 20.8, 20.88, 20.84, 20.6, 20.52, 20.16, 20.36, 20.12, 19.92, 19.96, 20.16, 20.2, 20.24, 20.48, 20.4] +370.82000000000005 +18.541000000000004 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 225.18899512290955, 'TIME_S_1KI': 2251.8899512290955, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6281.944063549044, 'W': 24.226548404040123, 'J_1KI': 62819.440635490435, 'W_1KI': 242.26548404040125, 'W_D': 5.685548404040119, 'J_D': 1474.262715807439, 'W_D_1KI': 56.855484040401194, 'J_D_1KI': 568.5548404040119} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json index 21e4694..d3bede1 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3104, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.44502592086792, "TIME_S_1KI": 3.3650212373930155, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 348.514190454483, "W": 23.934119588627873, "J_1KI": 112.27905620311952, "W_1KI": 7.710734403552794, "W_D": 5.569119588627871, "J_D": 81.09415505290025, "W_D_1KI": 1.794175125202278, "J_D_1KI": 0.578020336727538} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3013, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.526468276977539, "TIME_S_1KI": 3.4936834639819243, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 342.6458623886109, "W": 23.39607518698974, "J_1KI": 113.72249000617687, "W_1KI": 7.7650432084267305, "W_D": 5.13707518698974, "J_D": 75.2347367467881, "W_D_1KI": 1.7049701915000797, "J_D_1KI": 0.5658712882509392} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output index 9aacb6b..8bf6a02 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3382716178894043} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3484072685241699} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99998, 100000, 100000]), - col_indices=tensor([91034, 37166, 45389, ..., 40200, 40353, 102]), - values=tensor([0.2917, 0.4189, 0.5553, ..., 0.7170, 0.1120, 0.1885]), + col_indices=tensor([31509, 39574, 2838, ..., 40555, 30553, 63171]), + values=tensor([0.0296, 0.4364, 0.3072, ..., 0.7619, 0.5758, 0.8083]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2165, 0.9661, 0.1946, ..., 0.3640, 0.8184, 0.1773]) +tensor([0.7050, 0.3658, 0.2226, ..., 0.8215, 0.3821, 0.1635]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.3382716178894043 seconds +Time: 0.3484072685241699 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3104 -ss 100000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.44502592086792} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3013 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.526468276977539} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 2, ..., 99997, 99998, 100000]), - col_indices=tensor([ 252, 18132, 64781, ..., 90653, 85542, 48452]), - values=tensor([0.2676, 0.4026, 0.9927, ..., 0.1189, 0.3190, 0.1177]), + col_indices=tensor([49537, 94006, 52089, ..., 12901, 18523, 59775]), + values=tensor([0.4676, 0.9884, 0.1511, ..., 0.3386, 0.6520, 0.3542]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6783, 0.3478, 0.4100, ..., 0.2741, 0.0736, 0.8098]) +tensor([0.1323, 0.2430, 0.2483, ..., 0.0221, 0.3749, 0.4623]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.44502592086792 seconds +Time: 10.526468276977539 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 2, 2, ..., 99997, 99998, 100000]), - col_indices=tensor([ 252, 18132, 64781, ..., 90653, 85542, 48452]), - values=tensor([0.2676, 0.4026, 0.9927, ..., 0.1189, 0.3190, 0.1177]), + col_indices=tensor([49537, 94006, 52089, ..., 12901, 18523, 59775]), + values=tensor([0.4676, 0.9884, 0.1511, ..., 0.3386, 0.6520, 0.3542]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6783, 0.3478, 0.4100, ..., 0.2741, 0.0736, 0.8098]) +tensor([0.1323, 0.2430, 0.2483, ..., 0.0221, 0.3749, 0.4623]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.44502592086792 seconds +Time: 10.526468276977539 seconds -[20.36, 20.36, 20.36, 20.36, 20.36, 20.52, 20.6, 20.76, 20.96, 20.92] -[20.92, 20.6, 23.92, 25.4, 27.24, 28.24, 28.24, 29.08, 26.32, 25.76, 25.4, 25.2, 25.44, 25.64] -14.56139588356018 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.44502592086792, 'TIME_S_1KI': 3.3650212373930155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.514190454483, 'W': 23.934119588627873} -[20.36, 20.36, 20.36, 20.36, 20.36, 20.52, 20.6, 20.76, 20.96, 20.92, 20.24, 20.2, 20.48, 20.2, 20.2, 20.2, 20.12, 20.28, 20.28, 20.6] -367.30000000000007 -18.365000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.44502592086792, 'TIME_S_1KI': 3.3650212373930155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.514190454483, 'W': 23.934119588627873, 'J_1KI': 112.27905620311952, 'W_1KI': 7.710734403552794, 'W_D': 5.569119588627871, 'J_D': 81.09415505290025, 'W_D_1KI': 1.794175125202278, 'J_D_1KI': 0.578020336727538} +[20.36, 20.28, 20.28, 20.36, 20.48, 20.64, 20.6, 20.6, 20.32, 20.12] +[20.04, 20.24, 23.28, 24.48, 26.12, 27.16, 28.36, 25.92, 25.92, 25.8, 25.2, 25.28, 25.68, 25.48] +14.645442008972168 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3013, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.526468276977539, 'TIME_S_1KI': 3.4936834639819243, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.6458623886109, 'W': 23.39607518698974} +[20.36, 20.28, 20.28, 20.36, 20.48, 20.64, 20.6, 20.6, 20.32, 20.12, 20.04, 19.96, 19.8, 20.16, 20.2, 20.04, 20.2, 20.44, 20.32, 20.48] +365.18 +18.259 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3013, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.526468276977539, 'TIME_S_1KI': 3.4936834639819243, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.6458623886109, 'W': 23.39607518698974, 'J_1KI': 113.72249000617687, 'W_1KI': 7.7650432084267305, 'W_D': 5.13707518698974, 'J_D': 75.2347367467881, 'W_D_1KI': 1.7049701915000797, 'J_D_1KI': 0.5658712882509392} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.json index 9e7c05a..234b80f 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 868, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.049697399139404, "TIME_S_1KI": 12.730066128040788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 347.2841278934479, "W": 23.818433138573972, "J_1KI": 400.0969215362303, "W_1KI": 27.44059117347232, "W_D": 5.198433138573975, "J_D": 75.79563728809362, "W_D_1KI": 5.988978270246515, "J_D_1KI": 6.899744550975249} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 825, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.260325908660889, "TIME_S_1KI": 12.436758677164715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 303.46346472740174, "W": 22.318076871815833, "J_1KI": 367.8345026998809, "W_1KI": 27.052214390079797, "W_D": 3.8030768718158328, "J_D": 51.71121556639671, "W_D_1KI": 4.609790147655556, "J_D_1KI": 5.587624421400673} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.output index 5c6adba..89a3369 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.209580421447754} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.2715063095092773} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 11, ..., 499991, 499997, +tensor(crow_indices=tensor([ 0, 4, 6, ..., 499995, 499997, 500000]), - col_indices=tensor([ 8709, 33303, 39829, ..., 65447, 85964, 93697]), - values=tensor([0.2765, 0.3303, 0.4846, ..., 0.1571, 0.7749, 0.0327]), + col_indices=tensor([19868, 76207, 77915, ..., 16706, 63731, 82229]), + values=tensor([0.0924, 0.2177, 0.7599, ..., 0.8921, 0.4124, 0.6805]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.2847, 0.7446, 0.1507, ..., 0.7274, 0.5755, 0.0187]) +tensor([0.4234, 0.3489, 0.0579, ..., 0.4277, 0.7771, 0.0656]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 1.209580421447754 seconds +Time: 1.2715063095092773 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 868 -ss 100000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.049697399139404} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 825 -ss 100000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.260325908660889} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 19, ..., 499985, 499993, +tensor(crow_indices=tensor([ 0, 5, 13, ..., 499993, 499996, 500000]), - col_indices=tensor([ 930, 5720, 18229, ..., 18263, 29630, 53753]), - values=tensor([0.0983, 0.1468, 0.4729, ..., 0.5988, 0.3077, 0.5585]), + col_indices=tensor([ 8610, 58283, 67606, ..., 37260, 87311, 96234]), + values=tensor([0.5221, 0.7908, 0.9284, ..., 0.4846, 0.8145, 0.6299]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.4408, 0.1732, 0.5273, ..., 0.8772, 0.6136, 0.9894]) +tensor([0.0481, 0.3492, 0.7798, ..., 0.2984, 0.7068, 0.8860]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 11.049697399139404 seconds +Time: 10.260325908660889 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 19, ..., 499985, 499993, +tensor(crow_indices=tensor([ 0, 5, 13, ..., 499993, 499996, 500000]), - col_indices=tensor([ 930, 5720, 18229, ..., 18263, 29630, 53753]), - values=tensor([0.0983, 0.1468, 0.4729, ..., 0.5988, 0.3077, 0.5585]), + col_indices=tensor([ 8610, 58283, 67606, ..., 37260, 87311, 96234]), + values=tensor([0.5221, 0.7908, 0.9284, ..., 0.4846, 0.8145, 0.6299]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.4408, 0.1732, 0.5273, ..., 0.8772, 0.6136, 0.9894]) +tensor([0.0481, 0.3492, 0.7798, ..., 0.2984, 0.7068, 0.8860]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 11.049697399139404 seconds +Time: 10.260325908660889 seconds -[20.76, 20.76, 20.64, 20.6, 20.48, 20.24, 20.32, 20.36, 20.4, 20.64] -[20.8, 20.68, 23.8, 24.88, 26.88, 27.84, 27.84, 28.76, 26.08, 26.28, 25.56, 25.36, 25.48, 25.4] -14.580477476119995 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 868, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.049697399139404, 'TIME_S_1KI': 12.730066128040788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 347.2841278934479, 'W': 23.818433138573972} -[20.76, 20.76, 20.64, 20.6, 20.48, 20.24, 20.32, 20.36, 20.4, 20.64, 20.68, 20.8, 20.6, 20.6, 20.92, 21.0, 21.0, 21.08, 21.04, 21.04] -372.4 -18.619999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 868, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.049697399139404, 'TIME_S_1KI': 12.730066128040788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 347.2841278934479, 'W': 23.818433138573972, 'J_1KI': 400.0969215362303, 'W_1KI': 27.44059117347232, 'W_D': 5.198433138573975, 'J_D': 75.79563728809362, 'W_D_1KI': 5.988978270246515, 'J_D_1KI': 6.899744550975249} +[20.16, 20.24, 20.36, 20.52, 20.56, 20.56, 20.6, 20.6, 20.68, 20.64] +[20.8, 20.88, 21.32, 22.88, 24.76, 25.28, 26.12, 26.08, 25.2, 24.52, 24.56, 24.56, 24.52] +13.59720492362976 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 825, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.260325908660889, 'TIME_S_1KI': 12.436758677164715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 303.46346472740174, 'W': 22.318076871815833} +[20.16, 20.24, 20.36, 20.52, 20.56, 20.56, 20.6, 20.6, 20.68, 20.64, 21.08, 20.6, 20.44, 20.44, 20.6, 20.76, 20.88, 20.8, 20.44, 20.56] +370.3 +18.515 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 825, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.260325908660889, 'TIME_S_1KI': 12.436758677164715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 303.46346472740174, 'W': 22.318076871815833, 'J_1KI': 367.8345026998809, 'W_1KI': 27.052214390079797, 'W_D': 3.8030768718158328, 'J_D': 51.71121556639671, 'W_D_1KI': 4.609790147655556, 'J_D_1KI': 5.587624421400673} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json index 669cf9a..51ec237 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 32669, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.073116302490234, "TIME_S_1KI": 0.33894873741131454, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 342.2707403564453, "W": 23.410284009847928, "J_1KI": 10.476927373242074, "W_1KI": 0.7165901622286549, "W_D": 4.943284009847925, "J_D": 72.2734280853271, "W_D_1KI": 0.15131421255159097, "J_D_1KI": 0.004631736892821665} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 32768, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.543907403945923, "TIME_S_1KI": 0.32177451794268563, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 331.4041768455506, "W": 22.589687525895688, "J_1KI": 10.113652857835406, "W_1KI": 0.6893825538908596, "W_D": 4.118687525895684, "J_D": 60.423600266218195, "W_D_1KI": 0.1256923683439845, "J_D_1KI": 0.0038358266706538242} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output index 4a81d8b..67ba37e 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03962588310241699} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.040886878967285156} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9995, 9999, 10000]), - col_indices=tensor([5736, 4740, 5169, ..., 5050, 7314, 6933]), - values=tensor([0.2904, 0.9920, 0.0901, ..., 0.6475, 0.2992, 0.6153]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9998, 10000, 10000]), + col_indices=tensor([4512, 6064, 8080, ..., 2543, 8282, 8614]), + values=tensor([0.3924, 0.2795, 0.3542, ..., 0.2395, 0.4436, 0.6954]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.6798, 0.4263, 0.2506, ..., 0.2181, 0.0906, 0.7562]) +tensor([0.0035, 0.7615, 0.3613, ..., 0.8492, 0.8058, 0.7078]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.03962588310241699 seconds +Time: 0.040886878967285156 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 26497 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.51607346534729} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 25680 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.228740930557251} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9998, 10000]), - col_indices=tensor([5374, 4189, 5897, ..., 9913, 4567, 8496]), - values=tensor([0.8167, 0.6460, 0.7856, ..., 0.9381, 0.0308, 0.1187]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9994, 9997, 10000]), + col_indices=tensor([6444, 5673, 2875, ..., 1711, 2717, 9846]), + values=tensor([0.5286, 0.5997, 0.8297, ..., 0.2375, 0.6173, 0.2781]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9083, 0.0911, 0.6427, ..., 0.4641, 0.3576, 0.6926]) +tensor([0.4725, 0.3238, 0.8742, ..., 0.2370, 0.4782, 0.2133]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 8.51607346534729 seconds +Time: 8.228740930557251 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32669 -ss 10000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.073116302490234} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32768 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.543907403945923} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 9999, 10000, 10000]), - col_indices=tensor([2638, 262, 675, ..., 9893, 8606, 4272]), - values=tensor([0.0918, 0.5777, 0.8540, ..., 0.4523, 0.7955, 0.7135]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 10000, 10000, 10000]), + col_indices=tensor([2167, 2598, 1557, ..., 6726, 6700, 6866]), + values=tensor([0.0219, 0.6259, 0.0041, ..., 0.9746, 0.7379, 0.9575]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1165, 0.4058, 0.2834, ..., 0.7342, 0.6568, 0.2677]) +tensor([0.1977, 0.2810, 0.1152, ..., 0.4726, 0.1521, 0.8391]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 11.073116302490234 seconds +Time: 10.543907403945923 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 9999, 10000, 10000]), - col_indices=tensor([2638, 262, 675, ..., 9893, 8606, 4272]), - values=tensor([0.0918, 0.5777, 0.8540, ..., 0.4523, 0.7955, 0.7135]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 10000, 10000, 10000]), + col_indices=tensor([2167, 2598, 1557, ..., 6726, 6700, 6866]), + values=tensor([0.0219, 0.6259, 0.0041, ..., 0.9746, 0.7379, 0.9575]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1165, 0.4058, 0.2834, ..., 0.7342, 0.6568, 0.2677]) +tensor([0.1977, 0.2810, 0.1152, ..., 0.4726, 0.1521, 0.8391]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 11.073116302490234 seconds +Time: 10.543907403945923 seconds -[20.76, 20.6, 20.6, 20.6, 20.44, 20.48, 20.48, 20.48, 20.48, 20.64] -[20.44, 20.4, 23.56, 25.72, 27.84, 28.36, 29.08, 29.08, 26.36, 24.68, 23.72, 23.68, 23.44, 23.32] -14.620529174804688 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32669, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.073116302490234, 'TIME_S_1KI': 0.33894873741131454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.2707403564453, 'W': 23.410284009847928} -[20.76, 20.6, 20.6, 20.6, 20.44, 20.48, 20.48, 20.48, 20.48, 20.64, 20.6, 20.88, 20.92, 20.76, 20.64, 20.48, 20.16, 20.16, 20.08, 20.2] -369.34000000000003 -18.467000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32669, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.073116302490234, 'TIME_S_1KI': 0.33894873741131454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.2707403564453, 'W': 23.410284009847928, 'J_1KI': 10.476927373242074, 'W_1KI': 0.7165901622286549, 'W_D': 4.943284009847925, 'J_D': 72.2734280853271, 'W_D_1KI': 0.15131421255159097, 'J_D_1KI': 0.004631736892821665} +[20.56, 20.44, 20.44, 20.4, 20.4, 20.4, 20.44, 20.4, 20.44, 20.48] +[20.44, 20.6, 22.44, 24.08, 26.08, 26.52, 27.2, 27.2, 25.04, 24.76, 23.52, 23.44, 23.24, 23.12] +14.67059588432312 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32768, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.543907403945923, 'TIME_S_1KI': 0.32177451794268563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.4041768455506, 'W': 22.589687525895688} +[20.56, 20.44, 20.44, 20.4, 20.4, 20.4, 20.44, 20.4, 20.44, 20.48, 20.76, 20.8, 20.72, 20.52, 20.4, 20.48, 20.6, 20.64, 20.6, 20.8] +369.4200000000001 +18.471000000000004 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32768, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.543907403945923, 'TIME_S_1KI': 0.32177451794268563, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.4041768455506, 'W': 22.589687525895688, 'J_1KI': 10.113652857835406, 'W_1KI': 0.6893825538908596, 'W_D': 4.118687525895684, 'J_D': 60.423600266218195, 'W_D_1KI': 0.1256923683439845, 'J_D_1KI': 0.0038358266706538242} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json index 3604623..11b4007 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4348, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135539293289185, "TIME_S_1KI": 2.3310807942247433, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 299.55518106460573, "W": 22.140804095253714, "J_1KI": 68.89493584742543, "W_1KI": 5.092181254658168, "W_D": 3.514804095253716, "J_D": 47.55372806835178, "W_D_1KI": 0.8083726070040745, "J_D_1KI": 0.1859182628804219} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4555, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.090666770935059, "TIME_S_1KI": 2.2152945710065994, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 295.87606741905216, "W": 21.89535079560157, "J_1KI": 64.95632654644395, "W_1KI": 4.806882721317578, "W_D": 3.6443507956015715, "J_D": 49.246810054063886, "W_D_1KI": 0.8000770133044065, "J_D_1KI": 0.1756480819548642} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output index 4918d44..593256a 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2414851188659668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2305009365081787} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 99982, 99991, +tensor(crow_indices=tensor([ 0, 9, 18, ..., 99987, 99992, 100000]), - col_indices=tensor([ 302, 1349, 1385, ..., 9083, 9115, 9373]), - values=tensor([0.3908, 0.9700, 0.7778, ..., 0.9299, 0.7856, 0.5693]), + col_indices=tensor([ 358, 1147, 1177, ..., 5323, 6358, 7564]), + values=tensor([0.8466, 0.4120, 0.3659, ..., 0.5272, 0.1337, 0.6938]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9198, 0.1049, 0.3911, ..., 0.9152, 0.2471, 0.8814]) +tensor([0.9531, 0.6758, 0.0878, ..., 0.9084, 0.1040, 0.7470]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.2414851188659668 seconds +Time: 0.2305009365081787 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4348 -ss 10000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135539293289185} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4555 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.090666770935059} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 99987, 99990, +tensor(crow_indices=tensor([ 0, 10, 19, ..., 99964, 99978, 100000]), - col_indices=tensor([ 62, 627, 2703, ..., 9273, 9381, 9947]), - values=tensor([0.4329, 0.2872, 0.8964, ..., 0.9783, 0.1219, 0.9101]), + col_indices=tensor([1012, 1716, 2947, ..., 9010, 9137, 9342]), + values=tensor([0.7676, 0.8318, 0.5067, ..., 0.7706, 0.2340, 0.7681]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8278, 0.7584, 0.9132, ..., 0.6086, 0.4680, 0.0616]) +tensor([0.1100, 0.7323, 0.2241, ..., 0.7674, 0.2013, 0.4710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.135539293289185 seconds +Time: 10.090666770935059 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 23, ..., 99987, 99990, +tensor(crow_indices=tensor([ 0, 10, 19, ..., 99964, 99978, 100000]), - col_indices=tensor([ 62, 627, 2703, ..., 9273, 9381, 9947]), - values=tensor([0.4329, 0.2872, 0.8964, ..., 0.9783, 0.1219, 0.9101]), + col_indices=tensor([1012, 1716, 2947, ..., 9010, 9137, 9342]), + values=tensor([0.7676, 0.8318, 0.5067, ..., 0.7706, 0.2340, 0.7681]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8278, 0.7584, 0.9132, ..., 0.6086, 0.4680, 0.0616]) +tensor([0.1100, 0.7323, 0.2241, ..., 0.7674, 0.2013, 0.4710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.135539293289185 seconds +Time: 10.090666770935059 seconds -[20.76, 20.76, 21.0, 21.0, 20.84, 20.76, 20.68, 20.44, 20.6, 20.72] -[20.88, 20.96, 21.56, 22.88, 24.32, 25.12, 25.84, 25.2, 25.2, 24.92, 24.16, 24.04, 24.2] -13.529552936553955 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135539293289185, 'TIME_S_1KI': 2.3310807942247433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 299.55518106460573, 'W': 22.140804095253714} -[20.76, 20.76, 21.0, 21.0, 20.84, 20.76, 20.68, 20.44, 20.6, 20.72, 20.52, 20.44, 20.36, 20.68, 20.84, 20.96, 20.92, 20.68, 20.36, 20.4] -372.52 -18.625999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135539293289185, 'TIME_S_1KI': 2.3310807942247433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 299.55518106460573, 'W': 22.140804095253714, 'J_1KI': 68.89493584742543, 'W_1KI': 5.092181254658168, 'W_D': 3.514804095253716, 'J_D': 47.55372806835178, 'W_D_1KI': 0.8083726070040745, 'J_D_1KI': 0.1859182628804219} +[20.36, 20.24, 20.48, 20.48, 20.44, 20.36, 20.48, 20.24, 20.36, 20.52] +[20.48, 20.6, 20.96, 22.12, 23.84, 24.64, 25.4, 25.16, 25.16, 25.04, 24.2, 24.24, 23.96] +13.51319146156311 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4555, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.090666770935059, 'TIME_S_1KI': 2.2152945710065994, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 295.87606741905216, 'W': 21.89535079560157} +[20.36, 20.24, 20.48, 20.48, 20.44, 20.36, 20.48, 20.24, 20.36, 20.52, 20.32, 20.36, 20.2, 20.04, 20.0, 20.04, 20.16, 20.24, 20.24, 20.12] +365.02 +18.250999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4555, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.090666770935059, 'TIME_S_1KI': 2.2152945710065994, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 295.87606741905216, 'W': 21.89535079560157, 'J_1KI': 64.95632654644395, 'W_1KI': 4.806882721317578, 'W_D': 3.6443507956015715, 'J_D': 49.246810054063886, 'W_D_1KI': 0.8000770133044065, 'J_D_1KI': 0.1756480819548642} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json index 1a09527..8b4cbb0 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 493, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.442772150039673, "TIME_S_1KI": 21.18209361062814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 317.52540328025816, "W": 21.825092476629965, "J_1KI": 644.0677551323695, "W_1KI": 44.26996445563888, "W_D": 3.2900924766299617, "J_D": 47.86636948227874, "W_D_1KI": 6.673615571257529, "J_D_1KI": 13.536745580644075} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 482, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.540670394897461, "TIME_S_1KI": 23.94329957447606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 320.7209017944336, "W": 22.04720368625311, "J_1KI": 665.3960618141775, "W_1KI": 45.7410864860023, "W_D": 3.5162036862531103, "J_D": 51.15025166893007, "W_D_1KI": 7.295028394715996, "J_D_1KI": 15.134913681983395} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output index 449d7aa..0792bc2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,34 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1293396949768066} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1746718883514404} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 94, 210, ..., 999806, - 999898, 1000000]), - col_indices=tensor([ 197, 225, 349, ..., 9664, 9718, 9909]), - values=tensor([0.2825, 0.4405, 0.0615, ..., 0.4764, 0.3721, 0.7741]), - size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2003, 0.0291, 0.9415, ..., 0.2751, 0.8368, 0.8186]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000000 -Density: 0.01 -Time: 2.1293396949768066 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 493 -ss 10000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.442772150039673} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 211, ..., 999799, +tensor(crow_indices=tensor([ 0, 117, 206, ..., 999808, 999895, 1000000]), - col_indices=tensor([ 29, 259, 296, ..., 9649, 9833, 9895]), - values=tensor([0.6562, 0.6337, 0.8410, ..., 0.1779, 0.9179, 0.3279]), + col_indices=tensor([ 38, 49, 125, ..., 9767, 9798, 9825]), + values=tensor([0.7626, 0.4173, 0.7077, ..., 0.4492, 0.4327, 0.3876]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2837, 0.1453, 0.4499, ..., 0.4322, 0.7993, 0.4344]) +tensor([0.5141, 0.8808, 0.9438, ..., 0.4524, 0.8668, 0.5263]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.442772150039673 seconds +Time: 2.1746718883514404 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 482 -ss 10000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 11.540670394897461} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 103, 211, ..., 999799, - 999895, 1000000]), - col_indices=tensor([ 29, 259, 296, ..., 9649, 9833, 9895]), - values=tensor([0.6562, 0.6337, 0.8410, ..., 0.1779, 0.9179, 0.3279]), +tensor(crow_indices=tensor([ 0, 95, 189, ..., 999807, + 999906, 1000000]), + col_indices=tensor([ 184, 234, 387, ..., 9882, 9937, 9961]), + values=tensor([0.1456, 0.1844, 0.1237, ..., 0.5239, 0.9721, 0.9496]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2837, 0.1453, 0.4499, ..., 0.4322, 0.7993, 0.4344]) +tensor([0.3253, 0.6092, 0.1701, ..., 0.4726, 0.0642, 0.0427]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +36,30 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.442772150039673 seconds +Time: 11.540670394897461 seconds -[20.48, 20.72, 20.88, 20.72, 20.92, 20.68, 20.44, 20.48, 20.4, 20.4] -[20.48, 20.48, 20.76, 21.64, 23.08, 24.24, 25.0, 25.48, 24.92, 24.24, 24.2, 24.12, 24.0, 23.96] -14.54863953590393 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 493, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.442772150039673, 'TIME_S_1KI': 21.18209361062814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.52540328025816, 'W': 21.825092476629965} -[20.48, 20.72, 20.88, 20.72, 20.92, 20.68, 20.44, 20.48, 20.4, 20.4, 20.6, 20.6, 20.36, 20.32, 20.48, 20.48, 20.64, 20.64, 20.8, 20.8] -370.70000000000005 -18.535000000000004 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 493, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.442772150039673, 'TIME_S_1KI': 21.18209361062814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.52540328025816, 'W': 21.825092476629965, 'J_1KI': 644.0677551323695, 'W_1KI': 44.26996445563888, 'W_D': 3.2900924766299617, 'J_D': 47.86636948227874, 'W_D_1KI': 6.673615571257529, 'J_D_1KI': 13.536745580644075} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 189, ..., 999807, + 999906, 1000000]), + col_indices=tensor([ 184, 234, 387, ..., 9882, 9937, 9961]), + values=tensor([0.1456, 0.1844, 0.1237, ..., 0.5239, 0.9721, 0.9496]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.3253, 0.6092, 0.1701, ..., 0.4726, 0.0642, 0.0427]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 11.540670394897461 seconds + +[20.32, 20.24, 20.16, 20.2, 20.52, 20.4, 20.48, 20.56, 20.56, 20.56] +[20.52, 20.48, 20.48, 21.8, 22.52, 24.2, 24.92, 25.2, 25.04, 24.52, 24.84, 25.08, 25.08, 25.12] +14.54701042175293 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.540670394897461, 'TIME_S_1KI': 23.94329957447606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 320.7209017944336, 'W': 22.04720368625311} +[20.32, 20.24, 20.16, 20.2, 20.52, 20.4, 20.48, 20.56, 20.56, 20.56, 20.76, 20.88, 20.84, 20.84, 20.56, 20.6, 20.68, 20.92, 20.96, 20.8] +370.62 +18.531 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 11.540670394897461, 'TIME_S_1KI': 23.94329957447606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 320.7209017944336, 'W': 22.04720368625311, 'J_1KI': 665.3960618141775, 'W_1KI': 45.7410864860023, 'W_D': 3.5162036862531103, 'J_D': 51.15025166893007, 'W_D_1KI': 7.295028394715996, 'J_D_1KI': 15.134913681983395} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json index ad31b62..4a87c51 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.699479103088379, "TIME_S_1KI": 106.99479103088379, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 372.40043621063234, "W": 23.7347384855202, "J_1KI": 3724.0043621063232, "W_1KI": 237.34738485520202, "W_D": 5.215738485520202, "J_D": 81.83546190547948, "W_D_1KI": 52.15738485520202, "J_D_1KI": 521.5738485520202} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.83819031715393, "TIME_S_1KI": 108.3819031715393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 370.6096257972718, "W": 23.619261647740284, "J_1KI": 3706.096257972718, "W_1KI": 236.19261647740282, "W_D": 5.2132616477402856, "J_D": 81.80124244642268, "W_D_1KI": 52.132616477402856, "J_D_1KI": 521.3261647740286} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output index f0aea1a..ad8d7d2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.699479103088379} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.83819031715393} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 534, 1091, ..., 4998975, - 4999490, 5000000]), - col_indices=tensor([ 4, 42, 44, ..., 9941, 9942, 9945]), - values=tensor([0.3387, 0.3479, 0.7697, ..., 0.0992, 0.1573, 0.7910]), +tensor(crow_indices=tensor([ 0, 517, 1012, ..., 4998964, + 4999467, 5000000]), + col_indices=tensor([ 4, 10, 19, ..., 9973, 9989, 9990]), + values=tensor([0.2609, 0.1530, 0.6472, ..., 0.9186, 0.8556, 0.5075]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0536, 0.4974, 0.9494, ..., 0.5617, 0.8582, 0.7161]) +tensor([0.4885, 0.4408, 0.6533, ..., 0.1062, 0.1430, 0.1529]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.699479103088379 seconds +Time: 10.83819031715393 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 534, 1091, ..., 4998975, - 4999490, 5000000]), - col_indices=tensor([ 4, 42, 44, ..., 9941, 9942, 9945]), - values=tensor([0.3387, 0.3479, 0.7697, ..., 0.0992, 0.1573, 0.7910]), +tensor(crow_indices=tensor([ 0, 517, 1012, ..., 4998964, + 4999467, 5000000]), + col_indices=tensor([ 4, 10, 19, ..., 9973, 9989, 9990]), + values=tensor([0.2609, 0.1530, 0.6472, ..., 0.9186, 0.8556, 0.5075]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0536, 0.4974, 0.9494, ..., 0.5617, 0.8582, 0.7161]) +tensor([0.4885, 0.4408, 0.6533, ..., 0.1062, 0.1430, 0.1529]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.699479103088379 seconds +Time: 10.83819031715393 seconds -[20.52, 20.4, 20.48, 20.48, 20.48, 20.64, 20.76, 20.8, 20.8, 20.56] -[20.56, 20.2, 20.28, 23.96, 25.84, 29.28, 30.68, 31.08, 27.28, 26.24, 25.12, 24.52, 24.52, 24.32, 24.2] -15.69010066986084 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.699479103088379, 'TIME_S_1KI': 106.99479103088379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 372.40043621063234, 'W': 23.7347384855202} -[20.52, 20.4, 20.48, 20.48, 20.48, 20.64, 20.76, 20.8, 20.8, 20.56, 20.6, 20.6, 20.6, 20.68, 20.68, 20.68, 20.56, 20.4, 20.28, 20.44] -370.38 -18.519 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.699479103088379, 'TIME_S_1KI': 106.99479103088379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 372.40043621063234, 'W': 23.7347384855202, 'J_1KI': 3724.0043621063232, 'W_1KI': 237.34738485520202, 'W_D': 5.215738485520202, 'J_D': 81.83546190547948, 'W_D_1KI': 52.15738485520202, 'J_D_1KI': 521.5738485520202} +[20.12, 20.2, 20.32, 20.32, 20.36, 20.4, 20.32, 20.32, 20.28, 20.52] +[20.68, 20.72, 20.76, 25.48, 27.28, 29.12, 30.52, 28.36, 26.92, 25.16, 24.48, 24.28, 24.2, 24.12, 24.24] +15.690991163253784 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.83819031715393, 'TIME_S_1KI': 108.3819031715393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 370.6096257972718, 'W': 23.619261647740284} +[20.12, 20.2, 20.32, 20.32, 20.36, 20.4, 20.32, 20.32, 20.28, 20.52, 20.48, 20.48, 20.6, 20.6, 20.68, 20.76, 20.6, 20.48, 20.52, 20.64] +368.12 +18.406 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.83819031715393, 'TIME_S_1KI': 108.3819031715393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 370.6096257972718, 'W': 23.619261647740284, 'J_1KI': 3706.096257972718, 'W_1KI': 236.19261647740282, 'W_D': 5.2132616477402856, 'J_D': 81.80124244642268, 'W_D_1KI': 52.132616477402856, 'J_D_1KI': 521.3261647740286} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json index f0b391a..cf15ded 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31538224220276, "TIME_S_1KI": 213.1538224220276, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 656.8145585250853, "W": 23.224732449579832, "J_1KI": 6568.145585250853, "W_1KI": 232.24732449579832, "W_D": 4.7157324495798285, "J_D": 133.36479693436596, "W_D_1KI": 47.157324495798285, "J_D_1KI": 471.57324495798287} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.307238340377808, "TIME_S_1KI": 213.07238340377808, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 644.1193591403961, "W": 22.767072029791674, "J_1KI": 6441.193591403961, "W_1KI": 227.67072029791674, "W_D": 4.383072029791673, "J_D": 124.00459502220151, "W_D_1KI": 43.830720297916734, "J_D_1KI": 438.30720297916736} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output index f5c642e..aedd9eb 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31538224220276} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.307238340377808} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 941, 1920, ..., 9998069, - 9999051, 10000000]), - col_indices=tensor([ 4, 12, 19, ..., 9982, 9986, 9989]), - values=tensor([0.3288, 0.1903, 0.7853, ..., 0.1848, 0.4723, 0.3439]), +tensor(crow_indices=tensor([ 0, 978, 2000, ..., 9997908, + 9998960, 10000000]), + col_indices=tensor([ 10, 23, 33, ..., 9971, 9974, 9991]), + values=tensor([0.8946, 0.8479, 0.7256, ..., 0.4012, 0.5254, 0.1171]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8250, 0.2999, 0.1337, ..., 0.5908, 0.0422, 0.7676]) +tensor([0.4414, 0.3269, 0.8562, ..., 0.9318, 0.1636, 0.1553]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.31538224220276 seconds +Time: 21.307238340377808 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 941, 1920, ..., 9998069, - 9999051, 10000000]), - col_indices=tensor([ 4, 12, 19, ..., 9982, 9986, 9989]), - values=tensor([0.3288, 0.1903, 0.7853, ..., 0.1848, 0.4723, 0.3439]), +tensor(crow_indices=tensor([ 0, 978, 2000, ..., 9997908, + 9998960, 10000000]), + col_indices=tensor([ 10, 23, 33, ..., 9971, 9974, 9991]), + values=tensor([0.8946, 0.8479, 0.7256, ..., 0.4012, 0.5254, 0.1171]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8250, 0.2999, 0.1337, ..., 0.5908, 0.0422, 0.7676]) +tensor([0.4414, 0.3269, 0.8562, ..., 0.9318, 0.1636, 0.1553]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.31538224220276 seconds +Time: 21.307238340377808 seconds -[20.48, 20.28, 20.4, 20.48, 20.4, 20.6, 20.96, 20.96, 21.04, 20.88] -[20.92, 20.84, 20.88, 24.28, 26.48, 27.96, 29.4, 28.08, 28.12, 26.52, 25.88, 24.92, 24.44, 24.32, 24.56, 24.84, 24.84, 24.96, 24.88, 24.72, 24.72, 24.48, 24.56, 24.28, 24.16, 24.12, 24.48] -28.280823469161987 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31538224220276, 'TIME_S_1KI': 213.1538224220276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 656.8145585250853, 'W': 23.224732449579832} -[20.48, 20.28, 20.4, 20.48, 20.4, 20.6, 20.96, 20.96, 21.04, 20.88, 20.44, 20.52, 20.68, 20.36, 20.52, 20.4, 20.4, 20.4, 20.52, 20.72] -370.18000000000006 -18.509000000000004 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31538224220276, 'TIME_S_1KI': 213.1538224220276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 656.8145585250853, 'W': 23.224732449579832, 'J_1KI': 6568.145585250853, 'W_1KI': 232.24732449579832, 'W_D': 4.7157324495798285, 'J_D': 133.36479693436596, 'W_D_1KI': 47.157324495798285, 'J_D_1KI': 471.57324495798287} +[20.48, 20.56, 20.6, 20.4, 20.4, 20.44, 20.52, 20.44, 20.48, 20.64] +[20.6, 20.52, 21.4, 22.48, 24.2, 25.6, 26.84, 26.96, 26.44, 25.72, 25.28, 25.28, 24.6, 24.52, 24.52, 24.52, 24.6, 24.68, 24.64, 24.68, 24.6, 24.48, 24.4, 24.44, 24.4, 24.48, 24.68] +28.29170823097229 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.307238340377808, 'TIME_S_1KI': 213.07238340377808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 644.1193591403961, 'W': 22.767072029791674} +[20.48, 20.56, 20.6, 20.4, 20.4, 20.44, 20.52, 20.44, 20.48, 20.64, 19.76, 20.0, 20.16, 20.4, 20.64, 20.64, 20.6, 20.52, 20.36, 20.16] +367.68 +18.384 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.307238340377808, 'TIME_S_1KI': 213.07238340377808, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 644.1193591403961, 'W': 22.767072029791674, 'J_1KI': 6441.193591403961, 'W_1KI': 227.67072029791674, 'W_D': 4.383072029791673, 'J_D': 124.00459502220151, 'W_D_1KI': 43.830720297916734, 'J_D_1KI': 438.30720297916736} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.json index 6cc4360..d5be847 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.396695137023926, "TIME_S_1KI": 423.96695137023926, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1244.9685798645019, "W": 23.740989470007165, "J_1KI": 12449.68579864502, "W_1KI": 237.40989470007165, "W_D": 5.275989470007168, "J_D": 276.6709081840517, "W_D_1KI": 52.75989470007168, "J_D_1KI": 527.5989470007169} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.98237729072571, "TIME_S_1KI": 429.8237729072571, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1240.7171248722075, "W": 23.6549559152447, "J_1KI": 12407.171248722074, "W_1KI": 236.549559152447, "W_D": 5.164955915244697, "J_D": 270.90514462232557, "W_D_1KI": 51.64955915244697, "J_D_1KI": 516.4955915244697} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.output index 8c655a7..734d8cb 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.396695137023926} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.98237729072571} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1981, 3970, ..., 19995969, - 19997989, 20000000]), - col_indices=tensor([ 3, 4, 9, ..., 9978, 9982, 9987]), - values=tensor([0.8747, 0.4611, 0.0013, ..., 0.7048, 0.8145, 0.2728]), +tensor(crow_indices=tensor([ 0, 1987, 4017, ..., 19995959, + 19997966, 20000000]), + col_indices=tensor([ 11, 19, 20, ..., 9988, 9989, 9996]), + values=tensor([0.2351, 0.1753, 0.4676, ..., 0.9953, 0.9645, 0.6300]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.1891, 0.5511, 0.0831, ..., 0.7428, 0.4718, 0.5050]) +tensor([0.3446, 0.4327, 0.7046, ..., 0.4760, 0.7086, 0.8763]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.396695137023926 seconds +Time: 42.98237729072571 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1981, 3970, ..., 19995969, - 19997989, 20000000]), - col_indices=tensor([ 3, 4, 9, ..., 9978, 9982, 9987]), - values=tensor([0.8747, 0.4611, 0.0013, ..., 0.7048, 0.8145, 0.2728]), +tensor(crow_indices=tensor([ 0, 1987, 4017, ..., 19995959, + 19997966, 20000000]), + col_indices=tensor([ 11, 19, 20, ..., 9988, 9989, 9996]), + values=tensor([0.2351, 0.1753, 0.4676, ..., 0.9953, 0.9645, 0.6300]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.1891, 0.5511, 0.0831, ..., 0.7428, 0.4718, 0.5050]) +tensor([0.3446, 0.4327, 0.7046, ..., 0.4760, 0.7086, 0.8763]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.396695137023926 seconds +Time: 42.98237729072571 seconds -[20.28, 20.48, 20.28, 20.48, 20.68, 20.72, 20.92, 20.84, 20.6, 20.6] -[20.68, 20.48, 20.48, 24.96, 25.96, 28.64, 31.0, 30.08, 29.88, 29.2, 29.04, 27.32, 26.6, 25.64, 24.48, 24.28, 24.48, 24.6, 24.64, 24.64, 24.8, 24.8, 24.96, 24.76, 25.0, 24.92, 24.88, 24.72, 24.76, 24.72, 24.68, 24.72, 24.56, 24.6, 24.6, 24.56, 24.72, 24.8, 24.76, 24.64, 24.64, 24.64, 24.68, 24.56, 24.52, 24.68, 24.44, 24.28, 24.24, 24.04] -52.43962478637695 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.396695137023926, 'TIME_S_1KI': 423.96695137023926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1244.9685798645019, 'W': 23.740989470007165} -[20.28, 20.48, 20.28, 20.48, 20.68, 20.72, 20.92, 20.84, 20.6, 20.6, 20.92, 20.6, 20.68, 20.68, 20.32, 20.36, 20.28, 20.24, 20.16, 20.16] -369.29999999999995 -18.464999999999996 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.396695137023926, 'TIME_S_1KI': 423.96695137023926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1244.9685798645019, 'W': 23.740989470007165, 'J_1KI': 12449.68579864502, 'W_1KI': 237.40989470007165, 'W_D': 5.275989470007168, 'J_D': 276.6709081840517, 'W_D_1KI': 52.75989470007168, 'J_D_1KI': 527.5989470007169} +[20.6, 20.88, 20.8, 20.76, 20.76, 20.36, 20.44, 20.68, 20.8, 21.08] +[21.12, 21.04, 21.04, 24.2, 26.04, 27.64, 29.88, 31.6, 28.48, 28.48, 28.68, 26.56, 26.44, 25.72, 24.92, 24.84, 24.72, 24.36, 24.24, 24.24, 24.32, 24.56, 24.52, 24.76, 24.76, 24.84, 24.96, 24.96, 24.8, 24.96, 24.6, 24.48, 24.48, 24.2, 24.16, 24.36, 24.56, 24.68, 24.76, 24.64, 24.52, 24.32, 24.36, 24.52, 24.64, 24.8, 24.8, 24.84, 24.72, 24.36] +52.450620889663696 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.98237729072571, 'TIME_S_1KI': 429.8237729072571, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.7171248722075, 'W': 23.6549559152447} +[20.6, 20.88, 20.8, 20.76, 20.76, 20.36, 20.44, 20.68, 20.8, 21.08, 20.56, 20.44, 20.04, 20.2, 20.28, 20.32, 20.48, 20.64, 20.52, 20.56] +369.8 +18.490000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.98237729072571, 'TIME_S_1KI': 429.8237729072571, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.7171248722075, 'W': 23.6549559152447, 'J_1KI': 12407.171248722074, 'W_1KI': 236.549559152447, 'W_D': 5.164955915244697, 'J_D': 270.90514462232557, 'W_D_1KI': 51.64955915244697, 'J_D_1KI': 516.4955915244697} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.json index 33ea6fd..6ccdb07 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.99070644378662, "TIME_S_1KI": 629.9070644378662, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1817.8575790786747, "W": 23.739082320260188, "J_1KI": 18178.57579078675, "W_1KI": 237.3908232026019, "W_D": 5.089082320260189, "J_D": 389.7044856929784, "W_D_1KI": 50.890823202601894, "J_D_1KI": 508.908232026019} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.992671489715576, "TIME_S_1KI": 629.9267148971558, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1924.1088186645509, "W": 23.521015273480383, "J_1KI": 19241.08818664551, "W_1KI": 235.21015273480384, "W_D": 5.0750152734803855, "J_D": 415.15561845541015, "W_D_1KI": 50.750152734803855, "J_D_1KI": 507.50152734803856} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.output index 58e2c22..45c0b9c 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.99070644378662} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.992671489715576} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2976, 6021, ..., 29993904, - 29996986, 30000000]), - col_indices=tensor([ 0, 1, 2, ..., 9993, 9995, 9997]), - values=tensor([0.2230, 0.6279, 0.9702, ..., 0.2815, 0.5420, 0.7025]), +tensor(crow_indices=tensor([ 0, 2964, 5936, ..., 29994031, + 29997007, 30000000]), + col_indices=tensor([ 3, 13, 15, ..., 9991, 9994, 9999]), + values=tensor([0.4668, 0.1670, 0.4964, ..., 0.2068, 0.5203, 0.8363]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3830, 0.2972, 0.7622, ..., 0.1887, 0.7379, 0.3841]) +tensor([0.2452, 0.0965, 0.0920, ..., 0.6406, 0.2865, 0.2449]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 62.99070644378662 seconds +Time: 62.992671489715576 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2976, 6021, ..., 29993904, - 29996986, 30000000]), - col_indices=tensor([ 0, 1, 2, ..., 9993, 9995, 9997]), - values=tensor([0.2230, 0.6279, 0.9702, ..., 0.2815, 0.5420, 0.7025]), +tensor(crow_indices=tensor([ 0, 2964, 5936, ..., 29994031, + 29997007, 30000000]), + col_indices=tensor([ 3, 13, 15, ..., 9991, 9994, 9999]), + values=tensor([0.4668, 0.1670, 0.4964, ..., 0.2068, 0.5203, 0.8363]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3830, 0.2972, 0.7622, ..., 0.1887, 0.7379, 0.3841]) +tensor([0.2452, 0.0965, 0.0920, ..., 0.6406, 0.2865, 0.2449]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 62.99070644378662 seconds +Time: 62.992671489715576 seconds -[20.4, 20.36, 20.44, 20.44, 20.72, 20.96, 20.88, 21.12, 20.72, 20.36] -[20.44, 20.2, 23.44, 25.08, 27.68, 28.88, 31.04, 29.08, 29.08, 29.48, 28.4, 29.16, 28.52, 28.08, 27.8, 26.96, 25.8, 24.96, 24.8, 24.68, 24.76, 24.68, 24.72, 24.68, 24.52, 24.44, 24.4, 24.76, 24.6, 24.88, 24.88, 24.72, 24.44, 24.48, 24.24, 24.32, 24.24, 24.16, 24.36, 24.56, 24.28, 24.24, 24.28, 24.16, 23.76, 23.92, 24.36, 24.4, 24.52, 24.84, 24.76, 24.44, 24.44, 24.24, 24.32, 24.04, 24.08, 24.28, 24.08, 24.08, 24.0, 23.88, 24.04, 24.28, 24.16, 24.4, 24.48, 24.4, 24.48, 24.6, 24.52, 24.52, 24.52] -76.5765733718872 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.99070644378662, 'TIME_S_1KI': 629.9070644378662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1817.8575790786747, 'W': 23.739082320260188} -[20.4, 20.36, 20.44, 20.44, 20.72, 20.96, 20.88, 21.12, 20.72, 20.36, 20.28, 20.84, 20.84, 20.8, 21.0, 21.12, 21.0, 20.52, 20.56, 20.32] -373.0 -18.65 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.99070644378662, 'TIME_S_1KI': 629.9070644378662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1817.8575790786747, 'W': 23.739082320260188, 'J_1KI': 18178.57579078675, 'W_1KI': 237.3908232026019, 'W_D': 5.089082320260189, 'J_D': 389.7044856929784, 'W_D_1KI': 50.890823202601894, 'J_D_1KI': 508.908232026019} +[20.2, 20.2, 20.16, 20.32, 20.48, 20.52, 20.72, 20.6, 20.6, 20.44] +[20.4, 20.36, 21.32, 22.36, 24.48, 25.44, 25.44, 27.88, 28.32, 28.08, 28.28, 28.84, 28.12, 28.28, 27.48, 26.56, 25.64, 24.44, 24.32, 24.36, 24.4, 24.24, 24.12, 24.04, 24.2, 24.32, 24.44, 24.6, 24.6, 24.68, 24.48, 24.2, 24.32, 24.16, 24.12, 24.28, 24.4, 24.56, 24.64, 24.32, 24.24, 24.16, 24.48, 24.64, 24.8, 24.72, 24.68, 24.56, 24.6, 24.6, 24.48, 24.72, 24.64, 24.64, 24.68, 24.92, 24.84, 24.8, 24.6, 24.56, 24.64, 24.56, 24.4, 24.4, 24.36, 24.24, 24.16, 24.16, 24.16, 23.96, 24.08, 24.08, 24.24, 24.24, 24.28, 24.44, 24.32, 24.32] +81.80381655693054 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.992671489715576, 'TIME_S_1KI': 629.9267148971558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1924.1088186645509, 'W': 23.521015273480383} +[20.2, 20.2, 20.16, 20.32, 20.48, 20.52, 20.72, 20.6, 20.6, 20.44, 20.8, 20.8, 20.56, 20.52, 20.32, 20.44, 20.6, 20.6, 20.44, 20.64] +368.91999999999996 +18.445999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.992671489715576, 'TIME_S_1KI': 629.9267148971558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1924.1088186645509, 'W': 23.521015273480383, 'J_1KI': 19241.08818664551, 'W_1KI': 235.21015273480384, 'W_D': 5.0750152734803855, 'J_D': 415.15561845541015, 'W_D_1KI': 50.750152734803855, 'J_D_1KI': 507.50152734803856} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..56a0b93 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 85.97017788887024, "TIME_S_1KI": 859.7017788887024, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2387.2728435325635, "W": 23.694035817285897, "J_1KI": 23872.728435325633, "W_1KI": 236.94035817285896, "W_D": 5.440035817285899, "J_D": 548.1062776556028, "W_D_1KI": 54.400358172858994, "J_D_1KI": 544.00358172859} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..ecd3fea --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 85.97017788887024} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4089, 8015, ..., 39991984, + 39995972, 40000000]), + col_indices=tensor([ 3, 7, 8, ..., 9991, 9993, 9999]), + values=tensor([0.1769, 0.1553, 0.1261, ..., 0.3324, 0.8180, 0.7589]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.3780, 0.5744, 0.3320, ..., 0.1044, 0.3638, 0.0832]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 85.97017788887024 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4089, 8015, ..., 39991984, + 39995972, 40000000]), + col_indices=tensor([ 3, 7, 8, ..., 9991, 9993, 9999]), + values=tensor([0.1769, 0.1553, 0.1261, ..., 0.3324, 0.8180, 0.7589]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.3780, 0.5744, 0.3320, ..., 0.1044, 0.3638, 0.0832]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 85.97017788887024 seconds + +[20.48, 20.4, 20.28, 20.52, 20.64, 20.48, 20.52, 20.44, 20.16, 20.08] +[20.08, 20.24, 20.32, 21.52, 23.0, 25.08, 25.88, 28.48, 30.52, 29.92, 30.08, 30.4, 29.48, 28.32, 29.08, 28.76, 27.96, 26.72, 25.64, 24.92, 24.48, 24.48, 24.48, 24.32, 24.28, 24.24, 24.28, 24.24, 24.44, 24.48, 24.84, 24.92, 24.8, 24.8, 24.84, 24.48, 24.32, 24.52, 24.36, 24.24, 24.36, 24.4, 24.4, 24.4, 24.48, 24.28, 24.28, 24.48, 24.36, 24.4, 24.52, 24.44, 24.52, 24.64, 24.68, 24.88, 24.92, 24.72, 24.96, 24.72, 24.44, 24.64, 24.64, 24.64, 24.68, 24.68, 24.8, 24.52, 24.28, 24.28, 24.24, 24.16, 24.24, 24.36, 24.48, 24.68, 24.72, 24.6, 24.48, 24.48, 24.24, 24.36, 24.4, 24.44, 24.84, 25.0, 25.04, 25.04, 25.04, 24.68, 24.4, 24.36, 24.48, 24.44, 24.48, 24.68] +100.75416707992554 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 85.97017788887024, 'TIME_S_1KI': 859.7017788887024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2387.2728435325635, 'W': 23.694035817285897} +[20.48, 20.4, 20.28, 20.52, 20.64, 20.48, 20.52, 20.44, 20.16, 20.08, 20.56, 20.4, 20.44, 20.16, 20.08, 19.92, 20.0, 19.84, 20.16, 20.16] +365.08 +18.253999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 85.97017788887024, 'TIME_S_1KI': 859.7017788887024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2387.2728435325635, 'W': 23.694035817285897, 'J_1KI': 23872.728435325633, 'W_1KI': 236.94035817285896, 'W_D': 5.440035817285899, 'J_D': 548.1062776556028, 'W_D_1KI': 54.400358172858994, 'J_D_1KI': 544.00358172859} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..3f6b51b --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 104.97069644927979, "TIME_S_1KI": 1049.7069644927979, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3012.450877876282, "W": 23.728634661712928, "J_1KI": 30124.50877876282, "W_1KI": 237.28634661712928, "W_D": 4.652634661712927, "J_D": 590.6717167224888, "W_D_1KI": 46.52634661712927, "J_D_1KI": 465.2634661712927} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..8b19e00 --- /dev/null +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.5 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 104.97069644927979} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4981, 9935, ..., 49989912, + 49994956, 50000000]), + col_indices=tensor([ 1, 5, 6, ..., 9994, 9995, 9998]), + values=tensor([0.9764, 0.1800, 0.4422, ..., 0.5448, 0.5139, 0.1546]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1638, 0.1363, 0.5993, ..., 0.4801, 0.4016, 0.8447]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 104.97069644927979 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4981, 9935, ..., 49989912, + 49994956, 50000000]), + col_indices=tensor([ 1, 5, 6, ..., 9994, 9995, 9998]), + values=tensor([0.9764, 0.1800, 0.4422, ..., 0.5448, 0.5139, 0.1546]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1638, 0.1363, 0.5993, ..., 0.4801, 0.4016, 0.8447]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 104.97069644927979 seconds + +[23.24, 23.36, 23.52, 22.68, 22.16, 21.72, 20.8, 20.6, 20.6, 20.52] +[20.48, 20.52, 20.68, 22.36, 23.36, 25.6, 26.16, 26.72, 28.12, 29.2, 29.72, 30.24, 29.88, 28.08, 27.52, 27.84, 28.48, 28.88, 28.88, 28.52, 27.68, 26.6, 25.4, 24.56, 24.64, 24.72, 24.56, 24.48, 24.36, 24.28, 24.4, 24.52, 24.64, 24.56, 24.56, 24.32, 24.36, 24.44, 24.6, 24.6, 24.52, 24.6, 24.6, 24.44, 24.4, 24.28, 24.4, 24.64, 24.52, 24.6, 24.52, 24.48, 24.12, 24.4, 24.4, 24.32, 24.48, 24.64, 24.68, 24.76, 24.64, 24.64, 24.4, 24.36, 24.2, 24.16, 24.24, 24.2, 24.4, 24.36, 24.36, 24.64, 24.6, 24.64, 24.76, 24.84, 24.72, 24.64, 24.56, 24.36, 24.44, 24.48, 24.48, 24.48, 24.52, 24.28, 24.32, 24.12, 24.12, 24.28, 24.36, 24.28, 24.52, 24.64, 24.76, 24.8, 24.52, 24.68, 24.36, 24.52, 24.52, 24.68, 24.6, 24.68, 24.52, 24.52, 24.48, 24.56, 24.72, 24.84, 25.0, 24.84, 24.6, 24.4, 24.32, 24.32, 24.4, 24.44, 24.52, 24.48, 24.24] +126.9542441368103 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 104.97069644927979, 'TIME_S_1KI': 1049.7069644927979, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3012.450877876282, 'W': 23.728634661712928} +[23.24, 23.36, 23.52, 22.68, 22.16, 21.72, 20.8, 20.6, 20.6, 20.52, 20.08, 20.08, 20.32, 20.44, 20.44, 20.44, 20.76, 20.68, 20.64, 20.72] +381.52000000000004 +19.076 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 104.97069644927979, 'TIME_S_1KI': 1049.7069644927979, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3012.450877876282, 'W': 23.728634661712928, 'J_1KI': 30124.50877876282, 'W_1KI': 237.28634661712928, 'W_D': 4.652634661712927, 'J_D': 590.6717167224888, 'W_D_1KI': 46.52634661712927, 'J_D_1KI': 465.2634661712927} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json index 256b7e3..39e7e8d 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 147223, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.998100280761719, "TIME_S_1KI": 0.07470368271779354, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.2257844543456, "W": 22.66095026687019, "J_1KI": 2.256616048133414, "W_1KI": 0.15392262259884795, "W_D": 4.3619502668701955, "J_D": 63.94931951642035, "W_D_1KI": 0.029628184909084827, "J_D_1KI": 0.00020124698524744657} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 141479, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.518462419509888, "TIME_S_1KI": 0.07434645720926701, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 335.0979061508179, "W": 22.891804729847298, "J_1KI": 2.368534596306292, "W_1KI": 0.1618035519748323, "W_D": 4.302804729847299, "J_D": 62.98589703011518, "W_D_1KI": 0.030413027586053753, "J_D_1KI": 0.0002149649600721927} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output index a769041..063f646 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,266 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015372514724731445} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015038013458251953} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), - col_indices=tensor([3209, 9868, 3248, 6619, 340, 2292, 7833, 3392, 6373, - 1926, 8761, 7309, 7662, 7112, 9220, 3460, 9210, 9337, - 5270, 8671, 5002, 6954, 8836, 761, 7936, 5205, 4423, - 5866, 2140, 76, 8198, 3105, 6063, 2414, 5795, 8249, - 3229, 3225, 6597, 3776, 3375, 2931, 9809, 7037, 3178, - 6061, 4148, 6345, 6554, 2041, 7831, 9356, 1293, 5890, - 3788, 7939, 1779, 945, 7194, 3467, 8405, 3255, 8893, - 1669, 2661, 614, 6554, 8211, 1542, 4830, 2116, 6825, - 4028, 8188, 3362, 1229, 1014, 2629, 60, 4341, 8573, - 344, 2144, 7288, 8591, 1396, 212, 7483, 7941, 134, - 292, 9035, 5218, 3760, 5255, 8326, 5872, 9422, 5064, - 1086, 5137, 505, 3749, 1743, 2035, 8335, 8836, 193, - 9939, 568, 4682, 8836, 9271, 8548, 6366, 5833, 4592, - 9204, 1646, 9941, 8566, 6013, 5398, 3083, 5993, 1547, - 9892, 4995, 9172, 6435, 2837, 813, 4813, 1229, 5022, - 7573, 4340, 3946, 5005, 3375, 4969, 6442, 1642, 4944, - 719, 4401, 9666, 2565, 7859, 2383, 6897, 7942, 2904, - 1047, 3110, 6786, 88, 4488, 8747, 4203, 3652, 6051, - 2302, 7218, 9399, 7374, 5910, 928, 5333, 4890, 6792, - 6062, 9221, 9163, 4174, 2896, 9079, 9463, 4208, 5064, - 7724, 2493, 6553, 5557, 8081, 9830, 2788, 4152, 7234, - 544, 6509, 5692, 1300, 2830, 9380, 1684, 9089, 5782, - 393, 9511, 2834, 554, 2905, 5654, 8350, 1747, 8061, - 3684, 7688, 2628, 4287, 3328, 8038, 7059, 3207, 9644, - 6057, 9467, 5893, 7274, 1133, 4739, 5738, 3748, 7812, - 2444, 9074, 454, 761, 500, 6433, 3779, 8509, 8830, - 6164, 5825, 850, 1492, 145, 499, 2214, 7508, 3582, - 1886, 2390, 8118, 3401, 5635, 7260, 8422, 6823, 6262, - 9571, 4326, 2162, 8956, 5708, 7110, 4681, 462, 9728, - 2261, 2203, 2746, 5158, 8676, 7833, 4522, 1961, 7126, - 5692, 6002, 4837, 8636, 405, 1956, 8123, 5103, 7581, - 269, 8632, 135, 5524, 5948, 6685, 8043, 3706, 935, - 1445, 8185, 7972, 9341, 2312, 3081, 2043, 4519, 1533, - 9679, 8227, 9554, 3652, 824, 8035, 5846, 6458, 8139, - 5279, 1551, 3623, 2541, 6344, 2318, 9360, 8584, 9478, - 5672, 6142, 2286, 8123, 5498, 770, 7176, 456, 9177, - 9809, 2702, 1264, 7605, 5300, 2103, 3058, 2051, 1882, - 6297, 3562, 1383, 6913, 7884, 2698, 3990, 9715, 8693, - 2273, 6508, 3649, 6905, 2527, 2597, 2004, 3902, 8701, - 3289, 7304, 3591, 7249, 5558, 5443, 8985, 9103, 809, - 2290, 2274, 5849, 6456, 6694, 4646, 1044, 3808, 5972, - 584, 9153, 1314, 8076, 1874, 630, 9422, 7201, 7271, - 2554, 8484, 6762, 4360, 9672, 9206, 7238, 670, 261, - 7282, 4415, 8095, 5313, 556, 825, 8440, 6187, 3980, - 9374, 4645, 1791, 2944, 8696, 6211, 8269, 9169, 6448, - 1597, 8204, 1505, 8627, 9214, 6978, 3170, 6053, 7401, - 1622, 5348, 9321, 8871, 4197, 4724, 5006, 1208, 9868, - 5608, 2933, 4739, 2856, 6964, 5457, 9490, 2939, 8482, - 1706, 9462, 5508, 6207, 2528, 5845, 6687, 1429, 6904, - 6857, 5980, 1821, 3495, 7978, 717, 8671, 579, 3286, - 887, 9751, 6063, 3554, 478, 2346, 2715, 6230, 8314, - 9263, 8757, 6504, 1482, 9283, 6266, 2632, 8114, 9322, - 2614, 4650, 6056, 8492, 3113, 826, 1487, 9140, 7416, - 7679, 9789, 7142, 4923, 1585, 5093, 5772, 2033, 5245, - 8006, 9978, 5857, 1355, 851, 5200, 8304, 5704, 3358, - 8939, 7230, 1375, 6361, 6987, 2890, 847, 284, 5635, - 2950, 5927, 4878, 706, 5188, 2344, 5086, 7880, 7517, - 1759, 3646, 2795, 5560, 4430, 6312, 6067, 4770, 6312, - 7816, 2489, 4516, 4206, 8931, 9933, 7409, 5795, 3224, - 9549, 513, 4869, 3275, 8533, 1434, 7035, 5766, 8355, - 3615, 7273, 4810, 1822, 8987, 5862, 7804, 6291, 3123, - 8324, 910, 3823, 8563, 5832, 1345, 743, 9538, 1425, - 1584, 1245, 4705, 4281, 7546, 4065, 6171, 9291, 990, - 4041, 5597, 6554, 8240, 8238, 6185, 8030, 2085, 8194, - 9631, 1292, 3009, 3267, 1595, 2327, 1125, 1646, 4746, - 1415, 8714, 2232, 7073, 5930, 7011, 55, 989, 2475, - 5544, 2472, 4127, 1839, 5169, 2530, 1956, 7139, 3386, - 8181, 8523, 171, 1301, 5967, 548, 983, 9893, 5816, - 785, 9972, 7080, 7125, 1561, 326, 3497, 769, 7886, - 6549, 7277, 7031, 6240, 4002, 7102, 8726, 916, 4682, - 2949, 4811, 4044, 7673, 4734, 9767, 6661, 3130, 3538, - 3521, 4909, 9932, 861, 6634, 7437, 9719, 9476, 9830, - 4919, 9257, 8438, 5402, 3055, 7641, 9591, 2146, 858, - 7075, 8845, 1624, 3798, 8047, 3425, 4197, 3144, 8268, - 472, 8494, 4913, 2139, 8400, 8497, 4513, 89, 2209, - 8090, 2080, 3370, 2376, 3620, 2461, 6864, 494, 7285, - 1786, 2146, 1376, 4153, 3064, 5331, 3128, 7774, 4797, - 6823, 5809, 7554, 2423, 3053, 5382, 5543, 5683, 1277, - 4836, 9205, 7550, 6908, 2869, 2091, 2867, 5163, 7698, - 8343, 1967, 4007, 9458, 4283, 6112, 5901, 9866, 4663, - 8311, 5474, 5957, 9701, 7202, 5032, 1871, 8242, 1522, - 8481, 762, 5930, 8891, 7093, 2033, 9164, 152, 9198, - 4185, 4093, 4667, 5351, 6201, 7388, 6549, 8266, 7538, - 2670, 6818, 8834, 5685, 2671, 6016, 4286, 9742, 6121, - 4666, 6526, 6512, 259, 7033, 8244, 4223, 1085, 4469, - 1035, 7267, 9736, 8892, 5043, 9002, 4300, 8899, 8431, - 4947, 7717, 9483, 9531, 640, 9735, 136, 1004, 5963, - 2560, 1270, 1063, 6551, 6380, 9051, 6415, 2273, 8966, - 1448, 7975, 7104, 1638, 6895, 2796, 7076, 1607, 8593, - 8650, 4534, 4443, 8898, 3141, 730, 7859, 569, 4715, - 8107, 6041, 7589, 8647, 5268, 5224, 9357, 3430, 8808, - 12, 1062, 6461, 8940, 7168, 7178, 9954, 1483, 4577, - 6145, 3798, 5241, 3768, 4435, 9815, 4630, 4239, 3642, - 7610, 6283, 3620, 9310, 5539, 7589, 7924, 9409, 40, - 5336, 3123, 7751, 1910, 907, 9203, 6319, 755, 6279, - 6919, 5494, 5179, 9028, 1415, 5153, 7002, 7586, 3433, - 5870, 6467, 9059, 2416, 4341, 4715, 6133, 8530, 2254, - 5462, 9186, 7294, 4272, 1845, 4289, 5086, 9957, 5707, - 2498, 2610, 6779, 4816, 5544, 3242, 9052, 5820, 5426, - 475, 6174, 8032, 6228, 3859, 7942, 6852, 6841, 762, - 9084, 1360, 9518, 1399, 9474, 9073, 2698, 3992, 1688, - 6424, 4716, 9100, 9699, 7494, 4411, 8629, 5459, 3464, - 1277, 3163, 4665, 7812, 7685, 5789, 2468, 8083, 8019, - 2880, 7221, 4770, 2039, 4111, 4375, 8217, 9082, 7877, - 2202, 6073, 6164, 7099, 1964, 662, 7796, 1896, 7509, - 6563, 1763, 3395, 2316, 1045, 7898, 8435, 5923, 1093, - 4006, 5474, 2690, 3754, 7989, 9753, 5385, 2389, 2008, - 5223, 2679, 6615, 8933, 1776, 4320, 6999, 5312, 2814, - 7847, 7325, 670, 7203, 6195, 963, 5468, 6227, 8998, - 6214, 993, 9855, 2891, 1973, 3928, 3222, 2306, 3674, - 8093, 6359, 3072, 5055, 3934, 4856, 6132, 9135, 2922, - 5957]), - values=tensor([0.4275, 0.3881, 0.3139, 0.1808, 0.5310, 0.9604, 0.6046, - 0.8590, 0.0287, 0.6756, 0.8185, 0.6927, 0.6675, 0.9855, - 0.7208, 0.8799, 0.8629, 0.7190, 0.7258, 0.9361, 0.0339, - 0.8077, 0.0414, 0.6703, 0.6580, 0.3675, 0.6750, 0.5718, - 0.4038, 0.0553, 0.0413, 0.2273, 0.5506, 0.5477, 0.5624, - 0.2283, 0.5380, 0.3589, 0.2593, 0.3555, 0.2992, 0.1850, - 0.8180, 0.5232, 0.0988, 0.3425, 0.4717, 0.9349, 0.9395, - 0.6753, 0.8041, 0.9749, 0.8897, 0.6447, 0.0817, 0.5610, - 0.1896, 0.0161, 0.7003, 0.7673, 0.8307, 0.7283, 0.3245, - 0.7473, 0.8098, 0.7438, 0.5987, 0.9483, 0.2473, 0.6059, - 0.8590, 0.0578, 0.9300, 0.1277, 0.1910, 0.5390, 0.9923, - 0.4717, 0.2172, 0.2304, 0.7135, 0.7750, 0.1479, 0.7140, - 0.7426, 0.6959, 0.2516, 0.7713, 0.8673, 0.3328, 0.6887, - 0.0208, 0.8741, 0.0944, 0.9474, 0.4933, 0.1551, 0.9330, - 0.9766, 0.0714, 0.8864, 0.8490, 0.5548, 0.2259, 0.0151, - 0.2924, 0.3046, 0.3078, 0.5126, 0.0910, 0.6552, 0.9529, - 0.7444, 0.5430, 0.3329, 0.7363, 0.3272, 0.2064, 0.4308, - 0.1399, 0.3205, 0.6451, 0.2111, 0.5851, 0.8748, 0.5024, - 0.1107, 0.3502, 0.5846, 0.1250, 0.2751, 0.3449, 0.5057, - 0.2501, 0.6308, 0.3508, 0.6882, 0.9384, 0.9511, 0.0630, - 0.4802, 0.4548, 0.7968, 0.6334, 0.3897, 0.8817, 0.1901, - 0.5186, 0.8390, 0.1266, 0.4062, 0.8053, 0.1020, 0.6893, - 0.0686, 0.2855, 0.7574, 0.2385, 0.1315, 0.1501, 0.7480, - 0.6280, 0.4408, 0.8455, 0.1285, 0.1782, 0.6626, 0.6066, - 0.6225, 0.0217, 0.6914, 0.5402, 0.9870, 0.3813, 0.7033, - 0.7712, 0.4849, 0.9511, 0.0434, 0.4425, 0.9459, 0.2266, - 0.2372, 0.1136, 0.1849, 0.2397, 0.0967, 0.7983, 0.6073, - 0.4963, 0.1922, 0.1740, 0.8024, 0.3263, 0.6368, 0.5308, - 0.5142, 0.6581, 0.3418, 0.3095, 0.4538, 0.9619, 0.2867, - 0.0950, 0.5813, 0.8368, 0.3176, 0.9734, 0.7668, 0.2728, - 0.2680, 0.8861, 0.2565, 0.4795, 0.6296, 0.9507, 0.8958, - 0.6429, 0.7807, 0.0849, 0.2596, 0.2876, 0.2984, 0.4516, - 0.1776, 0.5252, 0.4818, 0.5382, 0.0378, 0.7754, 0.1815, - 0.5299, 0.4358, 0.7701, 0.9552, 0.7604, 0.5343, 0.8461, - 0.3733, 0.4748, 0.5019, 0.5319, 0.1950, 0.4915, 0.8982, - 0.8792, 0.4578, 0.6727, 0.5864, 0.0044, 0.3636, 0.0865, - 0.2878, 0.8889, 0.8029, 0.0564, 0.2779, 0.2583, 0.5323, - 0.1895, 0.5860, 0.2646, 0.8879, 0.0599, 0.9224, 0.7196, - 0.2286, 0.8550, 0.3374, 0.6693, 0.7673, 0.1370, 0.6482, - 0.0168, 0.4774, 0.5008, 0.7572, 0.1368, 0.1470, 0.6304, - 0.9706, 0.7405, 0.7412, 0.6298, 0.9122, 0.5157, 0.2650, - 0.2818, 0.6111, 0.7527, 0.0490, 0.1004, 0.1241, 0.4454, - 0.2894, 0.8228, 0.9204, 0.4938, 0.0564, 0.5710, 0.0400, - 0.2954, 0.8673, 0.1061, 0.7455, 0.9212, 0.6252, 0.7116, - 0.4632, 0.3277, 0.9426, 0.1467, 0.0608, 0.4242, 0.5067, - 0.2902, 0.2721, 0.6498, 0.6839, 0.5643, 0.4679, 0.4103, - 0.1304, 0.0703, 0.8222, 0.8881, 0.0527, 0.9839, 0.0318, - 0.0604, 0.9264, 0.4420, 0.3194, 0.8482, 0.7627, 0.6903, - 0.2123, 0.7648, 0.9926, 0.4948, 0.8455, 0.3602, 0.4169, - 0.8328, 0.4984, 0.5681, 0.5841, 0.1407, 0.5588, 0.2134, - 0.7507, 0.9645, 0.2482, 0.2182, 0.2073, 0.8998, 0.3807, - 0.6886, 0.2991, 0.4671, 0.6025, 0.2544, 0.5519, 0.8787, - 0.3819, 0.8798, 0.3104, 0.1937, 0.3473, 0.6897, 0.2116, - 0.3715, 0.4188, 0.9594, 0.7854, 0.5738, 0.0788, 0.2956, - 0.4393, 0.0205, 0.4497, 0.5799, 0.6998, 0.9907, 0.6955, - 0.3688, 0.5214, 0.6675, 0.4868, 0.9051, 0.2066, 0.5278, - 0.8434, 0.2920, 0.5001, 0.1687, 0.6222, 0.3592, 0.3402, - 0.8386, 0.9190, 0.2841, 0.6388, 0.8916, 0.7093, 0.0676, - 0.8268, 0.3857, 0.7248, 0.8444, 0.9262, 0.3228, 0.1858, - 0.0541, 0.0147, 0.3726, 0.7119, 0.3771, 0.7018, 0.0498, - 0.9764, 0.7213, 0.6857, 0.9076, 0.6930, 0.9925, 0.6796, - 0.9099, 0.8957, 0.7847, 0.7694, 0.5255, 0.5450, 0.8537, - 0.1109, 0.6083, 0.2595, 0.0732, 0.5598, 0.3732, 0.8030, - 0.2943, 0.5044, 0.8687, 0.1569, 0.7485, 0.8397, 0.0563, - 0.7580, 0.1302, 0.3220, 0.5738, 0.3028, 0.8634, 0.5299, - 0.5384, 0.7185, 0.0740, 0.0896, 0.9393, 0.1708, 0.1684, - 0.2457, 0.7029, 0.1009, 0.4980, 0.2762, 0.9245, 0.0022, - 0.2297, 0.4398, 0.5470, 0.7673, 0.7132, 0.8762, 0.6237, - 0.6368, 0.1300, 0.5274, 0.0913, 0.3842, 0.9630, 0.8184, - 0.7355, 0.5099, 0.4932, 0.6527, 0.8872, 0.4640, 0.0588, - 0.0306, 0.0236, 0.7435, 0.9265, 0.3932, 0.2464, 0.9514, - 0.8510, 0.0383, 0.3895, 0.0953, 0.4855, 0.1771, 0.7719, - 0.8612, 0.3632, 0.8694, 0.4383, 0.3873, 0.3076, 0.9881, - 0.4672, 0.6409, 0.8009, 0.0756, 0.2586, 0.8768, 0.1936, - 0.7249, 0.5557, 0.8572, 0.7169, 0.0050, 0.3295, 0.4291, - 0.4545, 0.4668, 0.5083, 0.2008, 0.3836, 0.4073, 0.4304, - 0.5879, 0.4421, 0.6393, 0.9889, 0.4650, 0.1876, 0.0798, - 0.1449, 0.8117, 0.9255, 0.7918, 0.1406, 0.4863, 0.3651, - 0.6067, 0.0364, 0.4333, 0.7822, 0.1643, 0.8343, 0.6051, - 0.6308, 0.5806, 0.0642, 0.7919, 0.6050, 0.3368, 0.7779, - 0.6713, 0.9857, 0.2853, 0.6567, 0.0967, 0.1455, 0.3512, - 0.6778, 0.9504, 0.2678, 0.9544, 0.9340, 0.7942, 0.7527, - 0.8358, 0.7805, 0.5659, 0.6318, 0.6349, 0.7939, 0.9928, - 0.5106, 0.8033, 0.2636, 0.1925, 0.5894, 0.1231, 0.2221, - 0.7313, 0.2019, 0.1380, 0.7432, 0.1575, 0.1695, 0.9626, - 0.8004, 0.5557, 0.4175, 0.9737, 0.1028, 0.5973, 0.2245, - 0.1568, 0.0148, 0.5338, 0.5531, 0.7419, 0.6821, 0.6004, - 0.5075, 0.2898, 0.0572, 0.6541, 0.9867, 0.3169, 0.1091, - 0.3007, 0.8230, 0.1327, 0.9676, 0.8384, 0.4791, 0.8642, - 0.9498, 0.1113, 0.9321, 0.2886, 0.8491, 0.4298, 0.3001, - 0.9524, 0.4168, 0.8559, 0.9356, 0.3367, 0.3858, 0.3719, - 0.5105, 0.6573, 0.6925, 0.8585, 0.6425, 0.0422, 0.4540, - 0.3811, 0.7744, 0.8488, 0.8536, 0.4800, 0.9205, 0.8820, - 0.7503, 0.7388, 0.7461, 0.1778, 0.5414, 0.8666, 0.9751, - 0.7892, 0.7263, 0.7718, 0.9065, 0.4577, 0.3046, 0.6991, - 0.4446, 0.7928, 0.1473, 0.3983, 0.1582, 0.8541, 0.4231, - 0.5913, 0.3400, 0.8006, 0.4155, 0.5300, 0.2295, 0.5802, - 0.4966, 0.0158, 0.5097, 0.7507, 0.4467, 0.6603, 0.6281, - 0.1152, 0.6784, 0.7820, 0.0012, 0.3761, 0.8898, 0.3791, - 0.9433, 0.1967, 0.8005, 0.7968, 0.3696, 0.5243, 0.0864, - 0.5603, 0.6699, 0.3078, 0.6225, 0.0752, 0.2391, 0.9255, - 0.2650, 0.4673, 0.0102, 0.8488, 0.0879, 0.5946, 0.5603, - 0.8401, 0.0314, 0.3946, 0.8748, 0.3681, 0.9390, 0.1692, - 0.0070, 0.7205, 0.1654, 0.3264, 0.1626, 0.1313, 0.4264, - 0.8391, 0.2851, 0.5610, 0.8789, 0.5540, 0.4573, 0.8763, - 0.8174, 0.9396, 0.8320, 0.3091, 0.2567, 0.5267, 0.9107, - 0.4557, 0.5158, 0.6588, 0.7481, 0.3435, 0.5032, 0.1494, - 0.2718, 0.9923, 0.0652, 0.5633, 0.2524, 0.1153, 0.8668, - 0.0841, 0.4940, 0.8535, 0.3145, 0.1873, 0.9043, 0.8521, - 0.2257, 0.4809, 0.4740, 0.4980, 0.5276, 0.9017, 0.9235, - 0.9432, 0.5306, 0.4201, 0.2755, 0.9923, 0.8605, 0.1183, - 0.9099, 0.3224, 0.9282, 0.7819, 0.8800, 0.4258, 0.6844, - 0.1727, 0.5079, 0.3511, 0.1414, 0.6247, 0.1502, 0.3955, - 0.8356, 0.2976, 0.3207, 0.8266, 0.7924, 0.8054, 0.2943, - 0.7693, 0.0348, 0.4802, 0.5128, 0.1271, 0.4537, 0.3203, - 0.9071, 0.7626, 0.0279, 0.0653, 0.7417, 0.8793, 0.3925, - 0.3770, 0.0111, 0.1162, 0.8992, 0.1358, 0.0867, 0.3383, - 0.0706, 0.8072, 0.4835, 0.2675, 0.0759, 0.3658, 0.8759, - 0.2284, 0.3000, 0.0818, 0.5543, 0.6464, 0.9176, 0.5691, - 0.4971, 0.2282, 0.3215, 0.4138, 0.8085, 0.5363, 0.5913, - 0.1304, 0.2349, 0.6791, 0.2305, 0.3800, 0.4937, 0.6027, - 0.6534, 0.4279, 0.4943, 0.0085, 0.1579, 0.4565, 0.1248, - 0.0451, 0.7534, 0.2521, 0.6142, 0.7396, 0.5206, 0.5054, - 0.5613, 0.0793, 0.7518, 0.6824, 0.6628, 0.6832, 0.8860, - 0.4838, 0.1172, 0.2289, 0.9153, 0.1052, 0.5633, 0.2296, - 0.0191, 0.7705, 0.1415, 0.7241, 0.1302, 0.8941, 0.8284, - 0.8306, 0.6027, 0.1602, 0.9703, 0.1293, 0.7369, 0.5368, - 0.4238, 0.6523, 0.3576, 0.1197, 0.1318, 0.1788, 0.5367, - 0.3701, 0.7941, 0.1745, 0.1459, 0.5116, 0.3561, 0.4053, - 0.8269, 0.1280, 0.7759, 0.6886, 0.1888, 0.5706, 0.1161, - 0.8356, 0.4561, 0.7736, 0.4631, 0.3148, 0.8575, 0.0637, - 0.0338, 0.5995, 0.4671, 0.3925, 0.0599, 0.4135, 0.6334, - 0.0196, 0.1795, 0.1793, 0.0102, 0.8458, 0.5380, 0.4059, - 0.0568, 0.6315, 0.2073, 0.7972, 0.9797, 0.0690, 0.3810, - 0.7467, 0.1427, 0.9719, 0.3101, 0.6784, 0.3571, 0.7329, - 0.6992, 0.2961, 0.7605, 0.3985, 0.1295, 0.8951, 0.5622, - 0.2362, 0.0149, 0.3226, 0.8306, 0.2548, 0.2532, 0.8888, - 0.1434, 0.1482, 0.2179, 0.8422, 0.6150, 0.7603, 0.2665, - 0.8673, 0.3035, 0.1706, 0.2689, 0.9151, 0.1045, 0.0141, - 0.4861, 0.1566, 0.2750, 0.3958, 0.0336, 0.7854, 0.0838, - 0.7202, 0.5357, 0.1356, 0.8048, 0.1340, 0.0716, 0.5308, - 0.4959, 0.3349, 0.8036, 0.6913, 0.6107, 0.4431, 0.5386, - 0.3116, 0.0546, 0.1399, 0.8659, 0.1354, 0.9518]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([5864, 4405, 5729, 7440, 7532, 6631, 7923, 3010, 3029, + 2940, 5062, 9268, 3446, 187, 4317, 4648, 8062, 3893, + 5019, 2299, 688, 2729, 9745, 6309, 3931, 8649, 4204, + 4550, 1634, 8927, 8336, 8142, 9678, 7824, 5140, 1172, + 9318, 7955, 3525, 9079, 8745, 526, 6166, 8688, 4874, + 621, 8656, 5589, 8913, 5798, 4682, 5407, 4443, 3266, + 5338, 9311, 8101, 7686, 8250, 4928, 8287, 1035, 5751, + 7949, 1534, 1248, 8898, 457, 1669, 3571, 4261, 7091, + 1987, 7196, 6543, 3083, 8344, 3427, 8934, 8788, 9993, + 582, 3612, 205, 4374, 9675, 7025, 6706, 8283, 9758, + 388, 6656, 4662, 2840, 9133, 6223, 2008, 103, 6263, + 1780, 8794, 2485, 595, 8760, 7131, 920, 639, 3147, + 4988, 2784, 4643, 5882, 9084, 4427, 3471, 7788, 1329, + 817, 9799, 4531, 2797, 6701, 557, 9001, 9631, 7526, + 6160, 8435, 4528, 7202, 8159, 5898, 3411, 5849, 9882, + 6172, 8234, 8877, 6633, 6715, 9567, 7586, 6894, 7152, + 9726, 3832, 9776, 125, 4402, 9464, 3151, 1828, 9161, + 8749, 3208, 5145, 207, 840, 4912, 5074, 8030, 1359, + 623, 1415, 3038, 6216, 5323, 7570, 1881, 9778, 6477, + 7927, 5460, 9115, 5723, 9411, 6310, 8678, 6443, 8043, + 3938, 4435, 6970, 5667, 9380, 8000, 88, 1075, 6191, + 4832, 1398, 590, 8856, 7762, 18, 4896, 2673, 4406, + 5972, 6040, 6302, 2537, 3116, 8067, 2006, 3745, 6151, + 4430, 1104, 7963, 6998, 6091, 54, 4316, 8371, 108, + 3997, 4987, 6055, 2400, 9143, 3879, 5197, 7322, 6442, + 8086, 1968, 5445, 6409, 7547, 7850, 1997, 7316, 4261, + 6840, 8827, 8907, 6893, 1052, 5156, 6116, 6734, 8081, + 3162, 3922, 2558, 9316, 3657, 7593, 2454, 8315, 4197, + 2551, 3742, 9899, 2057, 3451, 7493, 8221, 763, 4779, + 7437, 3879, 3226, 77, 4531, 1288, 7302, 743, 4595, + 4957, 47, 8535, 2788, 1255, 3015, 7524, 2958, 8560, + 9523, 2112, 3672, 7668, 7058, 939, 8508, 7521, 9876, + 8727, 1041, 4296, 1044, 3045, 9281, 716, 9005, 1113, + 8303, 8091, 1990, 6310, 9195, 4783, 3372, 3304, 3193, + 7372, 3161, 4758, 3845, 4375, 9660, 6299, 8248, 509, + 8758, 891, 6268, 6139, 854, 20, 4556, 7142, 6056, + 8643, 8049, 2424, 7944, 2637, 8370, 5950, 8405, 1972, + 4423, 2517, 6182, 1406, 7119, 4896, 2307, 168, 2365, + 4903, 9570, 669, 1652, 8495, 9481, 2603, 8419, 2066, + 9933, 3726, 7242, 2083, 8951, 3929, 1191, 4049, 3373, + 3848, 9600, 5827, 8799, 3573, 4984, 9578, 7389, 1211, + 4158, 5931, 9785, 7126, 1041, 4298, 5765, 4365, 1450, + 1653, 4243, 1734, 995, 6040, 9111, 2176, 8777, 2449, + 8396, 597, 1313, 3615, 1643, 4888, 1561, 8244, 162, + 8233, 1654, 347, 9492, 4177, 3312, 3387, 273, 7372, + 3006, 4312, 6692, 325, 368, 7402, 2412, 231, 2046, + 6680, 6048, 6976, 8052, 1102, 7253, 4833, 4306, 5249, + 9819, 9331, 1749, 2497, 6510, 8638, 8909, 1758, 2563, + 2825, 9746, 3224, 7605, 2037, 4549, 1532, 4978, 8668, + 4788, 8674, 6963, 9348, 8157, 5042, 2243, 8555, 2821, + 4124, 4465, 3374, 9318, 5948, 1863, 4205, 794, 2936, + 8061, 4396, 5407, 1908, 2557, 250, 1684, 1591, 7521, + 4302, 7559, 3306, 5734, 3705, 5139, 7808, 9171, 7448, + 3166, 7892, 8250, 6347, 6558, 8466, 6396, 9087, 6296, + 1015, 5284, 5520, 338, 5988, 5752, 5645, 296, 7248, + 5080, 4162, 9783, 5922, 9767, 242, 1082, 8261, 1824, + 7615, 2709, 4270, 7780, 1619, 5940, 3421, 5470, 4103, + 8926, 5526, 9185, 6704, 2670, 5915, 2791, 1376, 5378, + 9194, 7394, 9148, 7925, 3350, 656, 4911, 3343, 4534, + 467, 1174, 1829, 246, 7761, 3214, 7978, 6297, 3696, + 9152, 4257, 1481, 2689, 6730, 3241, 4178, 3753, 9837, + 1108, 6204, 5445, 8378, 6056, 4580, 9030, 9212, 9030, + 1400, 613, 3361, 5176, 2298, 295, 3058, 23, 6486, + 5741, 8561, 1316, 220, 3310, 8215, 3625, 531, 3459, + 9576, 2373, 9409, 9105, 4982, 6915, 1230, 8555, 3272, + 4176, 164, 1680, 7224, 2405, 8991, 7384, 3396, 6561, + 5206, 8418, 5306, 2280, 5693, 4535, 1143, 2814, 8807, + 669, 6445, 3177, 1051, 9340, 5283, 1519, 6325, 8838, + 4270, 8189, 386, 4649, 7472, 390, 2762, 4361, 2155, + 5878, 9379, 5583, 6009, 299, 5805, 7786, 3822, 616, + 9028, 2596, 2539, 172, 7760, 9662, 344, 7629, 7369, + 8064, 1852, 7783, 5748, 7909, 7509, 9640, 604, 2104, + 2421, 5040, 7694, 2374, 4549, 7046, 1134, 3291, 3798, + 3413, 6204, 5144, 5289, 3573, 9405, 1262, 7227, 8003, + 2101, 8909, 2871, 7271, 69, 7038, 3632, 2323, 8234, + 9876, 2845, 146, 427, 9994, 8060, 5905, 9220, 3837, + 8060, 208, 9111, 2330, 7996, 3768, 4736, 896, 1557, + 8512, 2664, 2766, 5757, 1751, 7710, 7127, 8099, 5560, + 3418, 8501, 339, 8748, 1489, 7536, 1398, 4290, 5961, + 1829, 2771, 1394, 4536, 4451, 6336, 1890, 4192, 6774, + 351, 4873, 5307, 1118, 2189, 4774, 4081, 5800, 8345, + 9337, 1074, 4918, 5054, 7310, 6935, 2657, 7393, 6763, + 3101, 194, 6227, 9939, 3573, 83, 201, 6117, 7206, + 8694, 347, 5592, 7666, 4392, 1220, 3664, 8919, 4104, + 7931, 9707, 3131, 2942, 2402, 5466, 3228, 8951, 640, + 9884, 7582, 2657, 4935, 3168, 9673, 1042, 1887, 9404, + 245, 7909, 7862, 3135, 636, 1649, 7858, 8647, 5775, + 2178, 5196, 1351, 7501, 104, 9655, 5505, 8201, 6926, + 8086, 9449, 7230, 4913, 185, 7834, 1229, 3132, 2955, + 4329, 6499, 3840, 9373, 1762, 4498, 4319, 8713, 3252, + 2114, 7816, 4767, 6485, 4067, 6275, 8622, 952, 8236, + 1705, 6324, 5388, 2883, 4479, 1175, 2899, 4956, 9717, + 5550, 1340, 1232, 3741, 5410, 1629, 4898, 8253, 8933, + 9596, 3848, 7857, 4262, 9797, 6282, 5114, 6366, 6932, + 6236, 8268, 811, 9368, 8403, 4439, 3156, 4635, 6644, + 1535, 4342, 7048, 4399, 8769, 8551, 8209, 1885, 9012, + 1832, 1415, 8444, 7279, 9817, 9759, 6774, 1774, 7153, + 5597, 5493, 6737, 8857, 6841, 6826, 3593, 7876, 5876, + 8371, 1413, 801, 5784, 4131, 8643, 4143, 301, 467, + 1936, 2117, 1380, 5002, 3976, 4001, 3738, 8088, 1488, + 9809, 4119, 991, 5512, 1386, 4787, 9676, 8833, 1136, + 3925, 252, 7937, 8274, 7768, 331, 1821, 7503, 8390, + 7044, 5029, 4614, 5356, 3537, 5617, 32, 2473, 9657, + 4379, 5121, 9250, 7374, 4691, 7529, 488, 9057, 3679, + 3667, 7445, 3209, 9840, 1176, 6066, 41, 394, 7868, + 5036, 5034, 1330, 3866, 7476, 8236, 2971, 9536, 9829, + 7990, 9531, 1063, 6718, 7956, 339, 65, 8969, 3120, + 5261, 9988, 3742, 4027, 9119, 902, 2482, 5917, 6387, + 1166, 1246, 3839, 6746, 3345, 6549, 3976, 7255, 7032, + 2273, 2201, 4349, 8490, 5982, 3736, 8569, 4990, 2562, + 2244, 7437, 8467, 8781, 7162, 9346, 4410, 2881, 1272, + 5540]), + values=tensor([3.0625e-01, 6.7934e-01, 4.5397e-01, 6.5607e-02, + 1.4994e-02, 5.2918e-01, 5.8821e-01, 8.8482e-01, + 1.7902e-01, 7.5536e-01, 1.3656e-01, 3.7672e-01, + 9.6214e-01, 7.7965e-01, 5.3679e-01, 9.0486e-01, + 4.8461e-01, 4.9381e-01, 6.4311e-01, 3.1023e-01, + 4.5858e-01, 7.9317e-01, 8.9086e-01, 4.2077e-01, + 9.6955e-01, 2.4388e-01, 7.3066e-01, 5.8800e-01, + 3.9417e-01, 4.2279e-02, 9.0147e-01, 5.4018e-01, + 5.5235e-01, 6.5805e-01, 8.0654e-01, 8.7663e-01, + 5.3809e-01, 8.2417e-01, 1.4615e-01, 5.1071e-01, + 8.7138e-01, 3.0842e-01, 4.2755e-01, 3.1038e-01, + 3.7370e-01, 9.2119e-01, 5.0073e-01, 7.0744e-01, + 7.8605e-02, 1.2861e-01, 5.9206e-01, 3.5245e-01, + 7.0183e-01, 5.4927e-01, 3.7253e-01, 9.0190e-01, + 2.9039e-01, 8.6832e-01, 3.1786e-01, 1.3646e-01, + 7.8303e-01, 4.3688e-01, 7.6723e-01, 5.3263e-01, + 1.8766e-01, 2.7440e-02, 9.2946e-01, 8.0868e-01, + 1.7153e-01, 9.5255e-01, 9.1104e-01, 2.6816e-01, + 8.8685e-01, 3.8851e-01, 4.8158e-01, 5.8146e-01, + 3.9322e-01, 6.5135e-01, 8.1410e-01, 7.4420e-01, + 7.4368e-01, 4.7538e-01, 1.6606e-01, 4.2086e-01, + 4.2518e-01, 9.5223e-01, 2.6404e-02, 3.0836e-01, + 2.9882e-01, 3.4906e-01, 5.2507e-01, 1.0146e-01, + 8.4190e-01, 8.7220e-01, 8.3270e-01, 5.2602e-01, + 1.5987e-02, 8.4776e-01, 3.6377e-02, 1.6271e-01, + 3.5542e-01, 8.8639e-01, 3.5242e-01, 9.5873e-01, + 6.4359e-01, 3.7253e-02, 8.8893e-01, 2.3248e-01, + 1.9166e-01, 8.6615e-02, 3.1060e-01, 4.3209e-01, + 7.4680e-01, 4.3259e-01, 6.3440e-01, 5.8079e-01, + 1.6977e-01, 1.4637e-02, 2.3253e-01, 7.4857e-01, + 6.7054e-01, 2.5624e-01, 7.5784e-01, 5.7191e-01, + 4.9739e-01, 7.0194e-01, 2.4938e-01, 7.0627e-01, + 6.1347e-01, 8.8315e-02, 4.3875e-01, 2.4492e-01, + 5.9218e-01, 8.9638e-02, 4.3891e-01, 6.8567e-01, + 2.7211e-01, 2.1737e-01, 8.3642e-01, 9.8885e-01, + 5.6275e-02, 1.6920e-01, 2.8433e-01, 5.7337e-01, + 3.6808e-01, 4.8947e-01, 2.8678e-01, 1.0292e-01, + 8.3071e-02, 7.7823e-01, 5.5554e-01, 5.9102e-01, + 7.4360e-01, 4.0772e-01, 1.1013e-01, 3.3269e-01, + 6.5881e-01, 6.5737e-01, 1.1134e-01, 3.7097e-01, + 8.4800e-01, 7.5459e-01, 8.5874e-01, 1.9071e-01, + 1.9302e-01, 9.5168e-01, 4.5168e-01, 2.7821e-01, + 4.7216e-01, 6.4694e-01, 1.7592e-01, 7.0750e-01, + 6.0090e-01, 2.9098e-01, 2.2334e-01, 4.8707e-01, + 3.3010e-01, 9.9041e-01, 4.9924e-02, 4.5500e-02, + 8.3591e-01, 1.8594e-01, 8.0137e-01, 1.8758e-01, + 1.9161e-01, 3.6148e-01, 5.4027e-01, 5.3768e-01, + 5.0506e-01, 1.9153e-01, 6.1544e-01, 9.7534e-01, + 4.9864e-01, 3.0502e-02, 4.2353e-01, 3.7881e-01, + 7.8943e-01, 9.0021e-01, 6.2319e-01, 4.9896e-01, + 5.9048e-01, 9.4855e-01, 1.7406e-01, 4.3749e-02, + 7.9835e-01, 8.2535e-01, 6.3527e-01, 9.6898e-02, + 1.5264e-01, 4.8639e-01, 1.3966e-01, 8.3261e-01, + 4.9434e-01, 5.5775e-01, 5.1155e-01, 2.3060e-01, + 9.3227e-01, 3.1296e-01, 8.2093e-01, 8.0661e-01, + 5.4982e-01, 5.8890e-01, 5.9634e-01, 2.3659e-01, + 1.6939e-01, 1.1154e-01, 2.6894e-02, 3.7075e-01, + 9.2418e-01, 7.9254e-01, 5.4200e-01, 7.8036e-01, + 6.9019e-01, 4.2285e-04, 3.7143e-01, 2.2277e-01, + 5.0315e-01, 9.1134e-01, 2.2544e-01, 8.9445e-01, + 3.3790e-01, 4.7058e-01, 8.8960e-01, 2.5217e-01, + 2.4401e-01, 4.3596e-01, 7.2872e-01, 9.3035e-01, + 6.9921e-01, 4.6415e-01, 4.7574e-01, 2.3138e-01, + 7.9896e-01, 8.4067e-01, 6.8784e-02, 2.3958e-01, + 6.2501e-01, 3.2428e-02, 6.1903e-01, 8.2588e-01, + 3.5043e-01, 2.7078e-01, 5.9017e-01, 6.1178e-01, + 9.7785e-01, 6.0496e-01, 7.8839e-01, 1.2857e-01, + 1.4042e-01, 8.7156e-01, 2.9379e-02, 7.9930e-01, + 9.5713e-01, 7.0495e-01, 5.6471e-01, 9.6826e-01, + 2.9073e-01, 4.6323e-02, 4.1706e-01, 2.9331e-01, + 4.8944e-01, 8.1897e-01, 2.5378e-01, 3.3311e-02, + 6.5191e-01, 4.5247e-02, 4.6072e-01, 9.1319e-01, + 5.9023e-01, 6.4136e-01, 6.4373e-01, 6.4903e-01, + 7.2608e-01, 4.2135e-01, 6.4504e-01, 5.3251e-01, + 7.7245e-01, 2.6378e-01, 6.0571e-01, 6.4307e-01, + 4.5026e-02, 6.0776e-01, 3.2274e-01, 6.5712e-01, + 1.4759e-02, 6.5568e-01, 5.1803e-01, 6.2868e-01, + 9.1672e-01, 4.8572e-01, 6.6685e-02, 1.7251e-01, + 7.4338e-01, 8.1045e-01, 5.9846e-01, 5.3984e-01, + 8.6075e-01, 1.8302e-01, 7.0490e-01, 5.9852e-01, + 6.9520e-01, 5.1208e-01, 9.2567e-01, 7.7140e-02, + 3.9521e-01, 1.3899e-01, 9.4503e-01, 9.4233e-01, + 8.9797e-01, 4.7187e-01, 7.8606e-01, 3.0687e-01, + 3.9440e-01, 8.9646e-01, 3.9176e-01, 2.5017e-01, + 2.4380e-01, 5.3721e-01, 1.5510e-01, 6.4122e-01, + 9.1365e-01, 4.7972e-02, 4.6932e-01, 3.8974e-01, + 4.5906e-01, 4.3810e-01, 6.7747e-01, 6.2677e-01, + 9.5224e-01, 7.6873e-01, 6.9173e-02, 3.6705e-01, + 1.9893e-01, 8.9358e-02, 7.6222e-01, 1.3259e-01, + 2.9482e-01, 4.3797e-01, 2.7561e-01, 3.8786e-01, + 4.0850e-01, 1.4337e-01, 9.6088e-01, 4.4425e-03, + 8.1598e-01, 8.7053e-01, 6.8297e-01, 5.2642e-01, + 5.8953e-01, 1.2577e-01, 2.2204e-01, 4.1336e-01, + 4.6982e-01, 5.5566e-01, 5.0762e-01, 9.3365e-01, + 3.4666e-02, 8.5398e-01, 1.5699e-01, 4.1478e-01, + 3.9502e-01, 8.1801e-03, 7.4207e-01, 5.4685e-01, + 6.8142e-01, 7.7258e-01, 6.0246e-01, 4.8885e-01, + 3.2397e-01, 5.3911e-03, 8.0300e-01, 9.9099e-01, + 3.4198e-01, 7.2895e-01, 2.2300e-01, 2.9611e-01, + 1.8872e-01, 8.4295e-01, 1.6342e-01, 4.6008e-01, + 3.7600e-01, 6.8675e-01, 8.8407e-02, 8.1393e-01, + 1.1445e-01, 4.3392e-01, 7.4310e-01, 6.4179e-02, + 1.2789e-01, 5.7826e-01, 1.7856e-01, 2.0319e-01, + 7.8733e-01, 1.4716e-01, 3.4396e-01, 2.7895e-01, + 8.6068e-01, 5.5215e-03, 5.4924e-01, 4.2811e-01, + 3.3978e-01, 8.1832e-01, 2.6194e-01, 9.8799e-02, + 4.1033e-01, 5.4947e-01, 6.7066e-01, 8.9361e-01, + 5.7983e-01, 6.6581e-01, 2.0998e-01, 9.3360e-01, + 7.5859e-01, 5.0295e-02, 2.9976e-01, 8.7684e-01, + 1.7065e-01, 5.7729e-01, 5.7103e-01, 9.3945e-01, + 1.2343e-01, 7.6483e-01, 5.7842e-01, 4.8845e-01, + 6.9910e-01, 1.0693e-01, 4.0385e-01, 9.4249e-03, + 8.7031e-01, 9.8505e-01, 3.0057e-01, 8.6716e-01, + 2.0695e-01, 5.1809e-01, 4.2743e-01, 8.5435e-01, + 1.3483e-01, 8.3962e-01, 4.0581e-01, 8.8927e-01, + 5.9780e-03, 8.1042e-01, 5.9508e-01, 2.1764e-01, + 9.9950e-01, 6.5594e-02, 6.5787e-01, 6.8796e-01, + 1.6003e-02, 6.5005e-01, 2.5523e-01, 9.6310e-01, + 8.1280e-01, 9.6850e-01, 4.1017e-01, 9.0061e-01, + 9.0083e-01, 5.3784e-01, 3.4937e-01, 8.1447e-01, + 4.0901e-01, 1.0740e-01, 1.1399e-01, 4.7569e-01, + 9.5147e-01, 7.5053e-01, 5.5328e-01, 2.6735e-01, + 3.3430e-01, 1.2330e-01, 3.8451e-01, 6.1621e-01, + 1.9907e-02, 3.0287e-01, 9.1099e-01, 4.5796e-02, + 7.8138e-01, 4.1412e-01, 2.3792e-01, 2.1758e-01, + 8.9713e-01, 2.5993e-01, 2.6339e-01, 1.1194e-01, + 7.8726e-01, 7.6580e-01, 8.3177e-01, 2.9270e-01, + 7.6718e-01, 2.7596e-01, 8.3319e-01, 5.2919e-01, + 3.4066e-01, 7.9104e-01, 1.7002e-01, 2.0658e-01, + 5.9089e-01, 1.8325e-01, 5.8275e-01, 2.3186e-01, + 2.4370e-01, 5.7392e-01, 4.4874e-01, 7.8507e-01, + 6.2326e-01, 8.2729e-01, 5.3853e-01, 1.1013e-01, + 9.0667e-01, 7.6253e-01, 4.2696e-01, 4.2688e-01, + 2.7118e-02, 7.3923e-01, 4.4817e-01, 8.0269e-01, + 3.0568e-01, 5.6891e-01, 7.4765e-01, 5.4850e-02, + 4.7863e-01, 8.2522e-01, 6.9770e-01, 9.7186e-01, + 1.7059e-01, 6.5103e-01, 1.7090e-01, 6.1770e-01, + 4.2760e-01, 3.0737e-01, 4.3497e-01, 5.0266e-01, + 1.0475e-01, 8.0105e-01, 7.9233e-01, 9.0753e-01, + 6.7050e-01, 3.2824e-03, 8.6454e-01, 5.8482e-01, + 7.2952e-01, 6.3882e-01, 3.7026e-01, 9.3051e-01, + 7.0332e-01, 8.9754e-01, 3.9017e-01, 9.7750e-01, + 3.1535e-01, 9.3470e-01, 1.0995e-01, 9.7427e-01, + 9.1949e-01, 3.5201e-01, 4.2497e-01, 9.0324e-01, + 6.6742e-01, 6.7347e-01, 3.7225e-01, 3.0518e-01, + 2.3208e-01, 5.1253e-01, 8.7073e-01, 2.9594e-01, + 1.0317e-01, 4.4350e-01, 8.5109e-01, 2.5671e-01, + 7.6732e-01, 7.1259e-02, 3.4578e-01, 8.9922e-01, + 9.6768e-01, 9.8119e-02, 5.9218e-02, 9.3921e-01, + 7.0508e-01, 6.0928e-01, 4.0675e-02, 2.7228e-01, + 5.9060e-01, 1.6901e-01, 4.1090e-01, 9.4973e-01, + 7.6222e-01, 6.7014e-01, 3.5314e-01, 5.1866e-01, + 2.4448e-01, 4.1254e-01, 4.5152e-01, 9.5140e-01, + 8.2588e-02, 8.3646e-01, 3.5168e-01, 6.9125e-02, + 6.5655e-01, 5.8400e-01, 6.7578e-01, 9.6829e-01, + 3.2435e-01, 2.1669e-01, 5.0881e-01, 6.6010e-01, + 9.9413e-01, 7.2719e-01, 4.9947e-01, 1.2786e-01, + 5.4251e-01, 3.1702e-01, 2.4297e-01, 8.9598e-01, + 9.6596e-01, 2.9073e-01, 7.6399e-01, 6.4410e-01, + 4.4917e-02, 5.7331e-01, 4.8768e-01, 1.5737e-01, + 4.1426e-01, 7.5691e-02, 1.1400e-02, 6.3347e-01, + 9.7471e-01, 4.8068e-01, 5.1207e-01, 6.0535e-01, + 6.1780e-01, 5.1745e-01, 1.1609e-01, 8.2131e-01, + 2.3242e-01, 2.9832e-01, 8.7804e-01, 3.4896e-01, + 3.7945e-01, 3.7611e-01, 6.8179e-01, 6.1163e-01, + 9.9284e-01, 8.7602e-01, 2.4566e-01, 6.5668e-01, + 8.0133e-01, 5.9246e-01, 1.2335e-01, 5.9772e-01, + 8.6985e-01, 5.7323e-01, 9.0899e-01, 5.7744e-01, + 7.7745e-01, 1.9965e-01, 3.8997e-01, 5.5048e-01, + 3.8323e-03, 2.9864e-01, 3.9406e-01, 5.1309e-01, + 9.8198e-02, 7.9148e-01, 9.8276e-01, 6.9093e-01, + 2.9100e-01, 7.3870e-01, 9.0672e-01, 4.2406e-01, + 6.9570e-01, 2.2882e-01, 8.8716e-01, 4.3867e-01, + 4.3172e-02, 5.4098e-01, 1.0475e-01, 8.0054e-01, + 1.1326e-01, 6.8061e-01, 3.1575e-01, 1.7557e-01, + 5.2757e-01, 3.5965e-01, 3.7901e-01, 8.5552e-01, + 6.4949e-01, 8.6056e-01, 6.7278e-01, 5.1555e-01, + 9.1367e-01, 4.1302e-01, 3.3050e-01, 5.5465e-01, + 5.1774e-02, 5.3705e-02, 7.9909e-01, 7.1162e-01, + 5.4237e-01, 9.5004e-01, 9.4275e-01, 9.3457e-01, + 7.5024e-01, 7.0720e-01, 2.1138e-01, 3.9708e-01, + 4.7646e-02, 8.4320e-01, 4.5412e-01, 4.2428e-02, + 5.5696e-01, 6.9159e-01, 2.7000e-01, 8.2888e-01, + 6.5681e-02, 9.6445e-01, 4.3295e-01, 4.0186e-02, + 1.3193e-01, 8.3988e-02, 1.8590e-01, 6.0375e-01, + 7.6220e-02, 6.8820e-01, 3.5162e-01, 1.5205e-01, + 3.2419e-01, 6.1973e-01, 9.4905e-02, 2.9364e-01, + 8.5631e-01, 7.9832e-01, 1.8592e-01, 8.3541e-02, + 4.8102e-01, 2.6354e-01, 4.4820e-01, 3.0574e-01, + 8.5256e-01, 7.9016e-01, 5.8093e-01, 2.8070e-01, + 8.2189e-01, 1.3404e-01, 1.7765e-01, 4.3575e-01, + 7.9613e-01, 4.4263e-01, 6.3908e-01, 6.6473e-01, + 1.6290e-01, 8.5106e-02, 5.8780e-01, 8.8252e-01, + 1.4565e-01, 3.7198e-01, 9.5389e-01, 5.5220e-01, + 8.3885e-01, 1.7989e-01, 9.7110e-01, 7.1531e-01, + 5.3113e-01, 8.3077e-01, 2.4941e-01, 9.8477e-01, + 2.0196e-01, 3.5487e-01, 5.3081e-01, 8.0503e-01, + 4.7495e-01, 6.4974e-01, 6.3780e-01, 5.4582e-01, + 6.8931e-01, 9.9250e-02, 5.9804e-01, 9.8319e-01, + 9.4741e-01, 2.9675e-01, 8.2086e-01, 2.6537e-01, + 5.1718e-01, 5.4456e-01, 1.3200e-01, 2.4955e-01, + 3.3289e-02, 5.9704e-01, 3.1025e-01, 5.5724e-01, + 8.5918e-01, 4.6545e-01, 4.6224e-01, 1.1120e-01, + 1.2538e-01, 6.7721e-01, 4.9712e-01, 2.9803e-01, + 7.6996e-01, 8.4018e-01, 3.5558e-02, 5.8389e-01, + 6.8730e-01, 5.4331e-01, 6.3098e-01, 5.6001e-01, + 2.4965e-01, 5.4269e-01, 6.3266e-01, 6.3612e-01, + 5.5311e-01, 4.1962e-01, 7.7565e-01, 5.4556e-01, + 5.3726e-01, 1.8117e-01, 3.2950e-01, 4.4221e-01, + 1.9084e-02, 1.4366e-01, 4.7691e-01, 5.6167e-01, + 8.8484e-01, 5.9715e-01, 4.3537e-01, 9.7932e-02, + 9.1410e-01, 6.4713e-01, 1.5110e-01, 6.7473e-01, + 8.6023e-02, 6.0688e-01, 6.9210e-01, 3.6176e-01, + 8.3606e-02, 6.1369e-01, 6.9943e-01, 4.9996e-01, + 3.4645e-01, 1.9730e-01, 8.1552e-01, 3.9487e-01, + 5.7817e-01, 9.8319e-02, 6.2641e-02, 5.8353e-01, + 6.3400e-01, 5.5185e-01, 2.8587e-01, 9.2101e-01, + 7.6998e-01, 4.3278e-01, 7.0580e-01, 5.0207e-01, + 7.2717e-01, 7.9082e-01, 9.2035e-01, 1.6286e-01, + 4.8958e-01, 7.5866e-01, 2.0512e-01, 2.3862e-01, + 3.7592e-01, 2.6322e-01, 6.5727e-01, 5.3523e-01, + 4.8432e-01, 7.9223e-01, 6.0262e-01, 8.2024e-01, + 8.5435e-01, 9.3759e-01, 7.7440e-01, 5.5400e-01, + 2.1126e-01, 8.3923e-01, 9.6260e-01, 9.2510e-01, + 2.2483e-01, 4.6953e-01, 8.2033e-01, 4.9381e-01, + 1.5869e-01, 8.8073e-01, 5.6950e-01, 5.2842e-01, + 9.5044e-01, 5.0309e-01, 2.8057e-01, 1.0560e-01, + 3.4425e-01, 1.8236e-01, 7.7621e-01, 6.0505e-01, + 8.8280e-01, 2.5670e-01, 8.7823e-01, 6.2451e-01, + 4.0445e-01, 9.8540e-01, 8.2704e-01, 3.5627e-01, + 1.5486e-01, 6.9952e-02, 6.5991e-01, 6.4257e-01, + 8.1865e-01, 7.8009e-01, 9.9536e-01, 7.4532e-01, + 4.3675e-01, 5.4840e-01, 1.8494e-01, 8.8545e-01, + 8.5942e-01, 7.3245e-01, 3.2637e-01, 3.9756e-01, + 2.5391e-01, 7.6839e-01, 5.0349e-01, 9.9444e-01, + 3.0921e-01, 3.2943e-01, 5.6092e-02, 6.6642e-01, + 6.1594e-01, 6.7792e-01, 9.1305e-02, 7.7830e-01, + 9.6161e-01, 1.2181e-01, 3.6857e-01, 5.1784e-01, + 4.2081e-01, 5.3355e-01, 5.1650e-02, 5.1590e-01, + 4.8562e-01, 5.9115e-02, 7.8704e-01, 6.9563e-01, + 3.7683e-02, 7.9775e-01, 9.0316e-02, 6.8609e-01, + 5.3611e-01, 3.9760e-01, 1.9597e-01, 1.3318e-01, + 8.2249e-01, 8.1588e-01, 4.2823e-03, 6.8715e-02, + 1.5912e-01, 1.2970e-01, 8.1394e-02, 4.8507e-01, + 7.8042e-01, 5.9863e-01, 4.7134e-01, 1.4523e-01, + 8.7900e-01, 7.4395e-01, 1.6950e-01, 1.2914e-01, + 6.7946e-01, 2.3713e-01, 9.0834e-01, 3.1627e-01, + 1.4134e-01, 9.3240e-01, 6.7854e-01, 4.1327e-01, + 5.8183e-01, 5.3041e-01, 7.7103e-01, 1.2864e-01, + 1.1689e-01, 5.3759e-01, 3.1594e-02, 7.7564e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4774, 0.5622, 0.8471, ..., 0.2935, 0.6471, 0.0068]) +tensor([0.6336, 0.1005, 0.7948, ..., 0.1112, 0.8541, 0.7566]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -268,650 +375,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.015372514724731445 seconds +Time: 0.015038013458251953 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68303 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.871366500854492} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([6744, 9183, 5203, 6638, 1533, 7743, 539, 8215, 8490, - 4534, 6346, 2886, 815, 690, 4659, 7859, 4960, 9599, - 7211, 9102, 9352, 9158, 5228, 349, 9466, 1784, 6758, - 6019, 4222, 3313, 4202, 6284, 5941, 3644, 8527, 621, - 8978, 2864, 4741, 937, 3040, 5951, 4377, 2752, 2224, - 833, 9594, 8371, 4644, 3164, 5751, 2168, 7735, 2026, - 7627, 2921, 3825, 1318, 5894, 9816, 8373, 6219, 7761, - 770, 6016, 7731, 2607, 3685, 9115, 9936, 4556, 2302, - 1032, 5304, 9652, 9315, 2299, 8095, 2227, 9852, 7527, - 7548, 5459, 1914, 4627, 9758, 4418, 5645, 5335, 1474, - 5, 4325, 1166, 5758, 8037, 4831, 7864, 4621, 1408, - 7991, 7361, 3430, 5370, 9921, 6713, 3837, 9935, 1916, - 9036, 5612, 1786, 9554, 5873, 9290, 5803, 8105, 7749, - 2495, 2472, 8808, 5958, 6237, 7275, 4348, 4709, 7618, - 2723, 835, 9714, 1090, 6330, 6812, 6188, 232, 626, - 445, 8400, 57, 7784, 9047, 5661, 8636, 2320, 4471, - 9589, 9560, 5053, 2465, 7391, 8282, 7210, 2128, 3549, - 2704, 7346, 5039, 6538, 8120, 953, 8729, 5862, 8226, - 4137, 112, 6745, 8084, 376, 7059, 6560, 8487, 4693, - 10, 5925, 5522, 7046, 5332, 5096, 8815, 7290, 8154, - 1699, 3419, 3972, 7305, 963, 699, 8976, 8405, 1750, - 8383, 6396, 7366, 2223, 7026, 1722, 7778, 173, 7465, - 1026, 2239, 9167, 4227, 3336, 8659, 6898, 264, 1819, - 4526, 9903, 3299, 8471, 2338, 5008, 6034, 5255, 1016, - 2476, 3281, 9291, 6915, 6157, 3429, 6616, 5294, 1960, - 3849, 6756, 6524, 9676, 1278, 2144, 8034, 9702, 8689, - 7428, 2101, 1606, 8926, 2565, 4025, 4286, 9944, 6729, - 1860, 6272, 6011, 1865, 1480, 6982, 593, 5077, 8793, - 8373, 2645, 7072, 2995, 422, 4753, 1315, 8695, 9479, - 4749, 7734, 6063, 7096, 9419, 8427, 6287, 6628, 3423, - 6286, 7303, 6136, 8068, 796, 3097, 1701, 3836, 8310, - 9629, 9555, 6582, 1297, 7896, 2598, 2280, 4630, 1294, - 7175, 3070, 3321, 7083, 6930, 8188, 3776, 9884, 1450, - 5587, 7988, 8659, 5326, 9433, 9194, 1815, 1042, 9718, - 3878, 4772, 7012, 9395, 8269, 9270, 1629, 2660, 209, - 5712, 932, 1485, 5192, 4238, 6353, 6193, 5165, 3026, - 3281, 2398, 4990, 8451, 5995, 2437, 7572, 9348, 2438, - 9680, 6106, 4164, 7478, 3249, 3727, 1957, 9143, 6126, - 1831, 4451, 8075, 4026, 2735, 8626, 4378, 205, 5899, - 1940, 1951, 2036, 8343, 8308, 8732, 8451, 6204, 7090, - 1801, 4175, 4396, 4659, 2284, 3194, 7493, 7149, 2323, - 5044, 3306, 5547, 5456, 1851, 4655, 6488, 9141, 7070, - 4125, 1093, 5396, 7738, 2079, 8139, 738, 1474, 9389, - 8473, 6944, 9388, 9801, 3310, 4840, 8737, 2836, 7124, - 5736, 8458, 7851, 2445, 4348, 2958, 1500, 7079, 3361, - 2917, 5609, 9694, 7338, 2003, 6819, 9301, 8759, 3668, - 4230, 9606, 5045, 1392, 3446, 5031, 2339, 7379, 9413, - 7019, 1575, 9646, 9892, 4175, 7521, 9210, 8996, 9856, - 2105, 6891, 2516, 7644, 6342, 5591, 4607, 8370, 2267, - 2178, 98, 4758, 5716, 8549, 2672, 3367, 9687, 6760, - 9110, 1378, 9958, 2062, 8086, 5488, 6201, 5121, 5155, - 4299, 3592, 4569, 7674, 9743, 9715, 5859, 5982, 9157, - 5766, 746, 379, 1868, 3707, 4085, 3960, 6014, 1918, - 7302, 5034, 8923, 4358, 3196, 5464, 5160, 7466, 5520, - 4012, 4775, 1536, 4418, 7081, 2030, 8186, 3043, 4489, - 6982, 4291, 14, 4887, 277, 3371, 7617, 8246, 1976, - 1779, 4401, 3966, 5238, 2266, 3849, 2641, 4879, 9073, - 3290, 4069, 7474, 232, 4401, 1411, 2648, 6385, 9602, - 1030, 9986, 4068, 265, 1296, 9722, 4721, 3794, 7224, - 9214, 2941, 4786, 6676, 2620, 6240, 7532, 6528, 3667, - 6404, 7204, 9481, 2749, 555, 4908, 3755, 47, 7892, - 8014, 5573, 4646, 8048, 9408, 4239, 7085, 7339, 2213, - 9590, 4261, 4369, 1661, 5901, 621, 7189, 453, 6633, - 1412, 1040, 3670, 8460, 7301, 8172, 5431, 8655, 8445, - 5264, 1982, 6719, 6488, 9538, 9776, 4381, 2280, 1223, - 2336, 4148, 8508, 585, 9480, 5943, 6989, 3101, 8689, - 8256, 6028, 1081, 8748, 6924, 5582, 9219, 1164, 9719, - 6904, 9219, 3348, 1248, 2380, 4837, 6421, 8759, 8176, - 2413, 9241, 6280, 795, 6836, 6623, 3295, 5487, 113, - 562, 6970, 5028, 2406, 8295, 8368, 3385, 4212, 9418, - 4453, 6440, 4796, 4189, 429, 199, 5828, 1302, 4275, - 3705, 6507, 9466, 8720, 636, 9800, 6974, 9561, 3002, - 6294, 6645, 1538, 238, 807, 3626, 6120, 5716, 8600, - 4318, 5792, 1943, 5603, 2143, 3104, 3061, 862, 2604, - 9101, 6695, 4250, 6121, 3993, 5656, 3094, 4118, 5827, - 4887, 1061, 5947, 6676, 6374, 4231, 1551, 240, 1173, - 8389, 1604, 5288, 4594, 2461, 5134, 2250, 9472, 1501, - 6338, 6393, 5061, 6467, 9017, 111, 4430, 4906, 4617, - 7659, 3038, 9083, 8874, 4785, 6903, 4936, 1238, 8369, - 183, 8286, 1773, 4890, 3477, 1143, 6106, 7346, 5613, - 5765, 1348, 576, 3940, 2554, 5492, 9590, 603, 7762, - 563, 6706, 3669, 5773, 6097, 3721, 8648, 2328, 9556, - 4798, 605, 5091, 7028, 8465, 606, 8771, 9883, 2697, - 1661, 8326, 5685, 5164, 2959, 1796, 6376, 4139, 9534, - 1259, 5224, 6687, 803, 432, 6625, 1457, 844, 9575, - 7195, 2325, 5841, 4426, 9449, 829, 1474, 6946, 23, - 4915, 4070, 2967, 6725, 2677, 2638, 219, 4785, 1030, - 4914, 3439, 8781, 4947, 6034, 2961, 6021, 103, 3443, - 9818, 844, 8744, 4287, 6940, 9206, 8132, 9639, 4328, - 6379, 4109, 7287, 5872, 2968, 8194, 8179, 2332, 2296, - 5350, 3926, 8242, 8552, 4727, 2202, 387, 3297, 1857, - 3876, 1691, 9764, 1422, 229, 1091, 1690, 730, 2018, - 7618, 1303, 3323, 7404, 3433, 488, 4748, 2180, 1314, - 6350, 9436, 3086, 9808, 6299, 1930, 3547, 4010, 3605, - 6165, 956, 5351, 421, 9662, 1173, 8605, 7930, 2736, - 6953, 2718, 9042, 3741, 7757, 1552, 9205, 1650, 599, - 5595, 4263, 1103, 7418, 9241, 8601, 1744, 7642, 515, - 9006, 3083, 8693, 9032, 5906, 7000, 2551, 3028, 3835, - 5449, 4937, 2426, 8599, 3062, 2693, 9800, 9210, 5275, - 3360, 3233, 6476, 4125, 6253, 8942, 1537, 467, 9364, - 2729, 7910, 1994, 1148, 3863, 8051, 6394, 8157, 1640, - 8390, 8149, 5412, 2816, 3795, 8617, 9981, 2763, 1764, - 676, 9771, 7442, 4680, 2279, 1341, 3408, 6649, 5478, - 8223, 5047, 7129, 5067, 1428, 6922, 7376, 493, 3030, - 127, 9415, 1410, 4169, 9819, 3335, 4738, 3017, 486, - 451, 4170, 208, 4490, 1163, 5281, 8361, 7624, 6925, - 1101, 1934, 2095, 7683, 109, 688, 2415, 8688, 4906, - 47, 1639, 3155, 9545, 9361, 5321, 5522, 1318, 4944, - 3639, 4692, 6598, 5731, 4104, 2490, 6697, 6146, 6141, - 8631, 4236, 8812, 5534, 9172, 2357, 694, 5351, 3729, - 8488, 2621, 5018, 1297, 5214, 2847, 2659, 9771, 3304, - 3414]), - values=tensor([8.7997e-01, 2.8521e-01, 4.5005e-01, 5.0855e-01, - 3.8634e-03, 5.7734e-01, 3.8405e-01, 9.4486e-01, - 3.5202e-01, 7.6382e-01, 4.7786e-01, 6.9793e-01, - 5.0912e-01, 6.3767e-03, 6.9119e-01, 4.0181e-01, - 7.6100e-01, 8.5859e-01, 3.6709e-01, 7.7781e-01, - 4.3558e-01, 2.5672e-01, 7.1189e-01, 8.1200e-01, - 3.4793e-01, 3.2184e-01, 1.1386e-01, 6.1289e-01, - 8.2371e-01, 1.9096e-01, 1.8303e-01, 1.8841e-01, - 5.0087e-01, 2.5093e-02, 6.6238e-03, 6.4069e-01, - 1.1422e-01, 6.7509e-01, 2.8931e-01, 9.6964e-01, - 2.4089e-01, 4.4554e-01, 3.2641e-01, 4.3071e-01, - 6.5669e-02, 3.6619e-01, 6.9264e-01, 7.7302e-01, - 6.5551e-01, 4.8090e-02, 3.4286e-01, 6.4302e-02, - 9.6403e-01, 7.7081e-01, 6.0460e-01, 4.8930e-01, - 7.6331e-01, 6.1378e-01, 7.4581e-01, 6.4943e-01, - 6.6800e-01, 9.2555e-01, 9.3781e-01, 7.1424e-01, - 3.9480e-01, 7.6845e-01, 9.4928e-01, 1.6757e-02, - 7.3400e-01, 5.1327e-01, 7.3558e-01, 1.5695e-01, - 6.7293e-01, 9.5435e-01, 9.1287e-01, 2.5381e-02, - 8.4456e-01, 7.6779e-01, 1.3407e-01, 4.9738e-01, - 3.9647e-01, 5.3527e-01, 7.3173e-01, 3.2717e-01, - 2.7642e-01, 1.7541e-01, 8.7538e-01, 5.3714e-01, - 4.0317e-01, 9.6258e-01, 4.9134e-01, 5.2103e-01, - 6.3584e-01, 7.8266e-01, 6.2083e-01, 7.1376e-01, - 5.7960e-01, 5.3449e-01, 7.5515e-01, 3.9838e-01, - 9.3057e-02, 7.9195e-01, 9.9938e-02, 3.2785e-01, - 4.1981e-01, 4.7350e-01, 2.9521e-01, 7.2738e-01, - 7.1675e-01, 9.1113e-01, 6.8622e-01, 3.1104e-01, - 4.7595e-01, 5.6348e-02, 6.2878e-01, 8.8733e-01, - 8.3583e-01, 3.5424e-01, 7.4340e-01, 6.9196e-01, - 8.6307e-01, 4.9523e-01, 1.9610e-01, 4.7483e-01, - 5.7196e-01, 5.1546e-01, 7.5100e-01, 5.9452e-01, - 2.3036e-01, 2.6958e-01, 5.3660e-01, 1.1357e-01, - 1.4815e-01, 4.3323e-01, 7.8068e-01, 7.5669e-01, - 5.1025e-01, 8.9680e-01, 2.9991e-01, 9.5303e-01, - 6.5093e-01, 1.8516e-01, 6.6969e-02, 1.1082e-01, - 8.5796e-02, 1.8005e-01, 8.5690e-02, 4.0885e-01, - 8.5941e-01, 5.4485e-02, 8.5646e-01, 9.8199e-01, - 5.0636e-01, 4.3617e-01, 5.9529e-01, 5.7098e-01, - 7.3113e-01, 9.0655e-01, 4.7307e-01, 6.9439e-01, - 9.5960e-01, 2.8637e-01, 4.1165e-01, 3.5498e-01, - 2.9302e-01, 7.1891e-01, 7.9570e-01, 5.6832e-01, - 7.0477e-01, 2.7348e-01, 9.5003e-01, 2.2469e-01, - 2.5175e-01, 4.8973e-01, 5.9989e-01, 5.8269e-01, - 7.9838e-01, 5.5642e-01, 6.1809e-01, 6.7618e-01, - 8.2227e-01, 5.4224e-01, 1.0585e-01, 3.0707e-01, - 8.6384e-01, 8.3680e-01, 8.1983e-01, 5.4882e-01, - 6.7569e-01, 4.0288e-01, 9.5827e-01, 6.4296e-01, - 4.5776e-01, 4.7685e-01, 4.7969e-01, 9.1368e-01, - 2.5314e-01, 8.9231e-01, 6.5064e-01, 1.3463e-01, - 7.7634e-02, 7.6612e-01, 7.6132e-01, 4.5487e-01, - 8.1562e-01, 3.9770e-01, 3.7874e-01, 9.8044e-01, - 2.9015e-01, 2.3310e-01, 8.9180e-02, 3.3477e-01, - 7.4886e-01, 7.7876e-01, 5.7465e-01, 6.7747e-01, - 6.3826e-01, 6.8923e-01, 2.2027e-01, 9.2721e-01, - 6.5802e-01, 8.6034e-01, 6.7590e-01, 3.0092e-01, - 5.0367e-01, 5.2007e-01, 5.4932e-01, 9.2163e-01, - 1.5539e-01, 8.3302e-01, 6.3467e-01, 6.7336e-01, - 9.6415e-01, 7.3161e-01, 5.1727e-01, 9.6905e-01, - 8.0804e-01, 8.6858e-02, 5.0096e-01, 9.7323e-01, - 5.0529e-01, 6.9268e-01, 8.5286e-01, 6.1778e-01, - 9.8231e-01, 3.3323e-01, 8.6429e-01, 7.8369e-02, - 4.1466e-01, 9.1520e-01, 5.0481e-01, 6.4411e-01, - 2.9477e-01, 8.0817e-01, 9.8288e-01, 3.3812e-01, - 2.4965e-01, 9.9967e-01, 9.0376e-01, 5.8973e-01, - 7.8162e-01, 6.5235e-01, 1.7058e-01, 4.2443e-01, - 6.6975e-01, 9.8723e-01, 6.7750e-01, 1.4445e-02, - 4.8293e-01, 3.9921e-01, 1.6541e-01, 3.0431e-01, - 7.0151e-01, 4.4106e-01, 3.5106e-02, 1.8337e-01, - 2.3382e-01, 2.3108e-01, 9.0945e-01, 9.0869e-01, - 2.5770e-01, 1.1305e-01, 3.0215e-01, 5.6599e-01, - 4.9016e-01, 1.0358e-01, 6.5084e-01, 2.1980e-01, - 2.7526e-01, 6.8575e-01, 3.8685e-01, 3.3360e-01, - 9.2841e-01, 2.3845e-01, 6.0353e-01, 4.7893e-01, - 2.1612e-01, 4.9916e-02, 6.1017e-01, 4.1103e-01, - 6.8532e-01, 8.6455e-02, 6.1925e-01, 1.9289e-01, - 2.1648e-01, 4.0255e-02, 7.4462e-01, 8.3120e-01, - 5.4511e-01, 9.3700e-02, 3.9437e-01, 7.9637e-01, - 5.2834e-01, 6.5519e-01, 9.8216e-01, 7.8679e-01, - 5.1865e-01, 2.6188e-01, 3.8483e-01, 1.0953e-01, - 6.0364e-01, 6.2100e-01, 2.7577e-01, 3.8468e-01, - 5.9031e-01, 1.4769e-01, 3.2718e-01, 7.6561e-01, - 7.0164e-01, 4.9655e-01, 8.9997e-01, 8.1649e-02, - 8.3568e-01, 3.7404e-01, 5.7489e-01, 4.8879e-01, - 3.3945e-01, 6.4600e-01, 3.3785e-02, 9.3062e-01, - 8.0442e-01, 4.2287e-01, 8.8652e-01, 2.5031e-01, - 5.4172e-02, 8.0831e-01, 9.0044e-01, 2.0783e-01, - 2.6264e-02, 6.3673e-01, 3.1357e-01, 1.3635e-02, - 9.4946e-01, 8.7749e-01, 6.3987e-01, 3.0051e-01, - 3.8978e-01, 3.9192e-01, 7.7167e-01, 2.5028e-01, - 6.3457e-01, 2.2326e-01, 2.1021e-01, 4.9092e-01, - 5.6111e-01, 6.4954e-01, 8.0426e-01, 4.5267e-02, - 7.2168e-01, 5.1283e-04, 9.1136e-01, 1.2495e-01, - 4.4895e-01, 9.3184e-01, 2.2892e-01, 5.9504e-01, - 5.6472e-02, 2.9795e-01, 4.0327e-01, 5.5233e-02, - 7.8699e-01, 9.1170e-02, 4.2592e-01, 2.8865e-01, - 9.5409e-01, 7.2826e-01, 6.7525e-01, 7.3618e-01, - 3.4218e-01, 6.9883e-01, 7.1871e-01, 2.0906e-01, - 9.9500e-01, 9.5206e-01, 1.0883e-01, 7.8057e-01, - 4.1855e-01, 1.7458e-01, 9.7395e-01, 3.9682e-01, - 7.6878e-01, 7.3827e-01, 2.9447e-01, 4.1030e-01, - 7.1074e-01, 4.2242e-01, 6.9407e-02, 1.0676e-01, - 3.7330e-01, 7.8475e-02, 2.3566e-01, 3.3687e-01, - 8.9509e-01, 5.6818e-01, 8.2692e-02, 2.1473e-01, - 2.7932e-01, 3.2494e-01, 9.5931e-01, 5.0787e-01, - 5.3320e-01, 8.9201e-01, 9.8617e-01, 3.2344e-01, - 7.5618e-01, 5.6291e-01, 1.7829e-01, 7.5177e-01, - 1.7789e-01, 5.7880e-01, 9.0026e-01, 6.1347e-03, - 1.1215e-01, 8.8491e-01, 7.7699e-02, 6.0626e-01, - 9.8867e-01, 4.0378e-01, 8.6001e-01, 4.4491e-01, - 3.1045e-02, 1.0014e-01, 8.3254e-01, 7.2285e-01, - 8.8978e-01, 3.3346e-01, 5.5897e-01, 7.5235e-01, - 3.0218e-01, 9.7073e-01, 8.0571e-01, 6.3659e-01, - 7.4468e-01, 8.8894e-02, 3.8364e-01, 8.3517e-01, - 7.1046e-01, 8.2789e-02, 9.9635e-01, 1.9280e-01, - 3.2091e-01, 6.0294e-01, 9.4375e-01, 7.4749e-01, - 6.7696e-01, 4.1623e-01, 7.4117e-02, 5.0351e-01, - 7.9718e-01, 7.1889e-01, 5.1471e-01, 7.8589e-01, - 9.1567e-01, 5.0537e-01, 1.4661e-01, 2.0153e-01, - 1.9491e-01, 6.4695e-01, 9.8462e-01, 6.0265e-01, - 2.7637e-01, 3.9293e-01, 1.7625e-01, 8.0870e-01, - 2.1159e-01, 5.9264e-01, 5.7012e-01, 5.9849e-01, - 5.3945e-01, 9.0767e-01, 5.0641e-01, 8.2994e-02, - 4.1407e-01, 8.5969e-01, 7.5631e-01, 9.7899e-01, - 3.0245e-01, 9.5165e-01, 5.6284e-01, 5.8693e-01, - 1.3760e-01, 8.9978e-01, 8.0792e-01, 6.6211e-01, - 5.6306e-01, 2.5657e-01, 7.2956e-01, 3.7115e-01, - 5.1520e-01, 8.2467e-01, 7.5545e-01, 9.4052e-01, - 9.7952e-01, 6.6546e-01, 7.8683e-01, 7.5895e-02, - 8.2766e-01, 9.3261e-01, 8.4123e-01, 7.0903e-01, - 8.6546e-01, 9.9407e-01, 5.9707e-01, 6.4446e-01, - 3.2013e-01, 4.5177e-01, 7.2474e-01, 9.5717e-01, - 9.2961e-01, 1.8631e-02, 1.2011e-01, 8.7293e-01, - 5.0934e-01, 1.2118e-01, 4.5119e-01, 3.9655e-01, - 9.6740e-01, 7.3842e-01, 5.3682e-01, 1.5135e-01, - 6.6126e-01, 6.5927e-01, 1.1407e-01, 4.1038e-01, - 1.6834e-01, 8.2535e-01, 7.0347e-02, 2.7263e-01, - 4.2171e-01, 2.8301e-01, 8.8235e-02, 2.2241e-01, - 4.6000e-01, 6.6548e-01, 1.8844e-01, 6.6034e-01, - 2.1227e-02, 3.8783e-02, 3.3827e-01, 4.7088e-01, - 9.7627e-01, 6.2968e-01, 6.3522e-01, 2.4334e-01, - 1.9212e-01, 1.3712e-01, 1.2686e-01, 3.0093e-01, - 9.4777e-01, 2.4785e-01, 9.8636e-01, 9.3583e-01, - 3.6026e-01, 2.2137e-01, 6.1133e-01, 9.1866e-02, - 3.6103e-01, 1.7489e-01, 4.5825e-01, 7.3822e-01, - 1.1665e-01, 3.6889e-01, 7.8219e-01, 1.6535e-01, - 7.7655e-01, 4.3396e-02, 4.5151e-01, 9.3061e-01, - 3.4963e-01, 6.3379e-01, 3.0356e-01, 7.9136e-01, - 2.1718e-01, 4.5984e-01, 8.5264e-01, 2.2071e-01, - 5.8333e-01, 3.7356e-01, 2.4578e-02, 6.6263e-01, - 7.4748e-01, 2.1894e-01, 2.5251e-01, 4.2144e-01, - 8.0678e-01, 1.2904e-01, 8.8791e-01, 2.4954e-01, - 5.5182e-01, 8.3078e-01, 8.0810e-01, 9.4817e-01, - 8.9573e-01, 5.1727e-01, 3.6179e-01, 3.5744e-01, - 8.1063e-01, 6.3401e-01, 8.4475e-01, 4.6719e-01, - 4.7935e-01, 8.8285e-01, 3.5260e-01, 8.1538e-01, - 2.9803e-01, 7.1210e-01, 9.6602e-01, 4.3768e-01, - 6.0388e-01, 7.5485e-01, 1.6097e-01, 7.3941e-01, - 1.2583e-01, 3.7349e-01, 2.7473e-02, 5.5611e-01, - 9.1308e-01, 4.4818e-01, 4.5070e-01, 3.2069e-01, - 1.4256e-01, 4.3464e-01, 2.5253e-01, 3.2121e-01, - 3.3224e-01, 3.9011e-01, 7.1556e-01, 5.7877e-01, - 8.0467e-01, 6.7205e-01, 1.0771e-01, 2.8920e-01, - 5.6940e-02, 4.6611e-02, 8.5119e-01, 7.0781e-01, - 1.4819e-01, 2.7250e-01, 8.7019e-01, 4.3230e-01, - 2.6507e-01, 9.2497e-01, 2.6065e-01, 6.7056e-01, - 4.7623e-01, 7.7408e-02, 7.8815e-01, 7.9083e-01, - 4.0135e-01, 4.4360e-01, 5.6498e-01, 2.1514e-01, - 4.4589e-01, 7.5491e-01, 9.7064e-01, 5.8939e-02, - 5.4940e-01, 7.9838e-01, 7.1091e-01, 2.8896e-01, - 8.1053e-01, 8.6197e-02, 9.4069e-01, 4.2678e-01, - 7.9980e-01, 3.9927e-01, 7.9838e-01, 7.2181e-01, - 6.1672e-01, 4.3699e-01, 1.9963e-01, 1.9977e-01, - 5.4157e-01, 6.4055e-01, 5.1059e-01, 6.7688e-01, - 4.1296e-01, 9.1355e-01, 9.9221e-01, 1.9937e-01, - 2.0294e-01, 4.8334e-01, 5.5805e-01, 5.7577e-01, - 9.8396e-01, 2.1795e-01, 1.8931e-01, 8.5201e-01, - 3.8953e-01, 6.9513e-02, 6.9908e-01, 4.2103e-01, - 5.1686e-01, 9.8052e-01, 4.8102e-01, 7.7828e-01, - 3.3843e-01, 9.9544e-01, 2.5451e-01, 9.0598e-01, - 7.5647e-02, 2.9191e-01, 6.4351e-01, 3.2196e-01, - 2.0623e-01, 8.6748e-01, 7.8486e-01, 6.7205e-01, - 3.1625e-01, 8.5128e-01, 8.8565e-01, 4.2137e-01, - 1.1091e-01, 4.9402e-01, 5.4089e-01, 6.8405e-01, - 2.9753e-01, 4.2060e-01, 9.1311e-01, 5.5459e-01, - 8.3026e-01, 4.6157e-01, 2.3608e-01, 5.8476e-01, - 2.5801e-01, 7.1950e-01, 9.1236e-01, 8.9678e-01, - 5.6071e-01, 4.7115e-01, 9.0639e-01, 1.3986e-01, - 5.1603e-01, 4.5611e-01, 5.2778e-01, 2.3069e-01, - 1.7250e-01, 6.1973e-01, 4.9448e-01, 7.8930e-01, - 6.9896e-01, 6.4817e-01, 4.7390e-01, 6.7748e-01, - 1.8072e-01, 9.9144e-01, 2.6587e-01, 6.0122e-01, - 2.1225e-01, 4.4200e-01, 4.8065e-01, 9.0465e-01, - 3.2260e-01, 2.5671e-01, 2.8894e-01, 2.4832e-01, - 7.5346e-01, 3.7883e-01, 2.2906e-01, 6.0232e-02, - 5.1161e-01, 7.4210e-02, 6.5179e-01, 5.1750e-01, - 8.8617e-01, 4.7427e-01, 6.8617e-01, 5.4228e-02, - 7.3517e-01, 7.3277e-01, 2.1593e-01, 8.7086e-01, - 2.5536e-01, 6.7634e-01, 4.0865e-01, 3.9570e-01, - 8.1394e-01, 6.7279e-01, 3.1018e-01, 7.9731e-01, - 3.2988e-01, 1.4131e-01, 2.7103e-02, 5.9738e-01, - 5.7453e-01, 9.5820e-01, 2.2775e-01, 5.9104e-01, - 5.1097e-01, 9.5412e-01, 9.8822e-01, 5.2242e-01, - 2.8580e-01, 6.6891e-01, 8.0483e-01, 9.6335e-01, - 8.8453e-01, 8.3167e-01, 7.5733e-01, 9.4445e-01, - 3.0266e-01, 9.4298e-01, 6.4349e-01, 3.7207e-01, - 2.8157e-01, 9.8279e-01, 5.2135e-01, 9.2412e-01, - 3.9414e-01, 1.9883e-01, 1.0428e-01, 4.4835e-01, - 5.4509e-01, 2.9717e-01, 8.8737e-01, 3.2817e-01, - 8.1706e-01, 5.4337e-01, 3.5309e-02, 6.1329e-01, - 2.4008e-01, 8.6714e-01, 8.2432e-01, 8.6613e-01, - 4.7304e-01, 8.0100e-01, 5.9219e-01, 9.1623e-01, - 8.9663e-01, 8.4183e-01, 6.1959e-01, 2.5572e-01, - 5.3324e-01, 8.7137e-01, 2.3142e-01, 1.2987e-01, - 8.7470e-01, 3.2674e-01, 7.0319e-01, 6.8691e-01, - 6.0352e-02, 1.0758e-01, 8.0610e-01, 1.4246e-01, - 8.5753e-01, 3.1758e-01, 7.2485e-02, 4.9372e-01, - 5.9390e-01, 8.6305e-01, 9.5054e-01, 6.1048e-01, - 2.5868e-01, 6.7061e-01, 5.4914e-02, 3.1942e-01, - 3.2211e-01, 5.9735e-02, 6.3294e-01, 5.3201e-01, - 3.2903e-01, 3.8267e-01, 4.1705e-01, 2.8449e-01, - 9.6245e-01, 9.8518e-01, 3.5270e-01, 3.0525e-01, - 5.7444e-01, 3.8535e-01, 7.2539e-01, 9.0836e-01, - 2.5651e-01, 1.1982e-01, 7.7055e-01, 4.9427e-01, - 7.7750e-01, 1.2286e-01, 7.6843e-01, 9.7353e-01, - 3.1458e-03, 4.9794e-01, 5.4164e-01, 2.7698e-01, - 9.4323e-01, 6.8588e-01, 4.4740e-01, 2.5060e-01, - 6.3933e-01, 1.5948e-01, 8.0108e-01, 1.6827e-01, - 7.7705e-01, 3.7266e-01, 1.1629e-01, 1.2457e-01, - 1.7987e-01, 1.7544e-01, 8.9379e-01, 9.9154e-01, - 1.9943e-01, 4.3856e-01, 8.6042e-01, 4.5407e-01, - 2.6806e-01, 5.9331e-01, 6.6726e-02, 8.4538e-01, - 4.3269e-01, 6.3831e-01, 8.4049e-01, 5.6436e-01, - 7.0962e-01, 2.9599e-01, 3.5067e-01, 4.4666e-01, - 1.2939e-01, 9.4365e-01, 7.8824e-02, 2.4235e-01, - 3.1459e-02, 4.0226e-01, 6.6546e-01, 1.9632e-01, - 1.0086e-01, 6.9880e-01, 4.8300e-01, 8.2713e-01, - 7.4273e-01, 3.5099e-02, 2.7427e-01, 6.4444e-01, - 3.3388e-01, 5.7429e-01, 6.7086e-01, 8.3286e-01, - 8.6153e-02, 6.5216e-02, 7.5920e-01, 4.8135e-01, - 3.9134e-01, 2.2790e-01, 4.1788e-01, 3.1685e-01, - 4.6375e-01, 7.1242e-01, 9.2845e-01, 4.7767e-01, - 8.8388e-01, 5.0231e-01, 8.8959e-01, 1.3500e-01, - 5.5147e-01, 3.4307e-01, 8.1439e-01, 5.6923e-01, - 1.5864e-01, 2.8270e-01, 7.7082e-01, 9.5930e-01, - 6.5183e-01, 2.8440e-01, 8.4987e-01, 4.0660e-01, - 4.5850e-01, 7.9000e-01, 7.4111e-01, 9.7289e-02, - 7.1325e-01, 4.8387e-01, 8.8020e-01, 9.2394e-01, - 2.2972e-01, 1.5188e-01, 7.6577e-01, 9.9898e-01, - 3.1108e-01, 6.0390e-01, 1.0443e-01, 2.6637e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5445, 0.4387, 0.2146, ..., 0.9330, 0.4366, 0.2965]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 4.871366500854492 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 147223 -ss 10000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.998100280761719} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 69823 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.181962966918945} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 606, 4248, 4323, 142, 3267, 1209, 7616, 5137, 8211, - 6424, 2299, 2295, 132, 1237, 207, 4471, 8426, 1510, - 3485, 6960, 5069, 6876, 5759, 6010, 6198, 5515, 1057, - 6920, 9155, 4894, 8077, 2579, 7160, 9343, 5510, 6273, - 3046, 6095, 9498, 4568, 7882, 4172, 6457, 7199, 2821, - 8109, 7947, 3059, 5273, 811, 3090, 6904, 7921, 941, - 171, 1700, 8078, 8915, 5888, 6297, 3017, 6368, 5232, - 3121, 6921, 1954, 1701, 989, 2512, 3553, 4586, 8676, - 4195, 7666, 522, 8800, 4183, 4215, 5436, 4713, 8751, - 9665, 2209, 2880, 8327, 8043, 3725, 5157, 5205, 5068, - 5472, 2221, 9235, 5844, 1615, 5209, 8676, 880, 3145, - 5350, 6766, 607, 1821, 2811, 5587, 4952, 36, 972, - 9657, 5891, 932, 6251, 2971, 8136, 8846, 5746, 6772, - 3335, 9210, 5699, 6015, 5058, 4114, 1121, 1345, 4570, - 3080, 1837, 5077, 2774, 60, 9761, 283, 1953, 8754, - 568, 8363, 7504, 4312, 2950, 8818, 3615, 7260, 5054, - 9479, 8823, 817, 4247, 8916, 458, 1663, 1516, 8921, - 8262, 3930, 5511, 4167, 1454, 227, 9082, 8167, 6759, - 4104, 2365, 4891, 7213, 8479, 3335, 3691, 5216, 8378, - 1063, 5818, 8471, 7448, 5050, 8971, 747, 666, 6626, - 6463, 5721, 7264, 5789, 105, 7831, 7486, 5643, 8037, - 3964, 1225, 7868, 6793, 785, 9337, 463, 785, 4486, - 40, 8639, 6486, 5799, 220, 7289, 3630, 4023, 9716, - 636, 3153, 9820, 4041, 7979, 6263, 5265, 4683, 9203, - 90, 7642, 420, 3880, 2822, 376, 3766, 4385, 838, - 7707, 6033, 8983, 6271, 3139, 9042, 3814, 2813, 2724, - 6271, 5387, 8121, 7195, 5117, 5794, 8947, 1085, 4429, - 160, 2993, 9952, 7440, 7699, 3817, 18, 5719, 6435, - 6095, 7069, 731, 9481, 9394, 2426, 4205, 2461, 3873, - 8586, 4374, 7356, 5004, 2798, 7617, 2197, 119, 2470, - 3245, 8733, 6824, 4746, 9120, 2061, 7077, 2855, 8255, - 3205, 7568, 3053, 9530, 1111, 6923, 2083, 2934, 8128, - 5682, 8762, 6169, 7878, 5288, 2262, 4469, 7426, 1628, - 8495, 4564, 2858, 9806, 1173, 5731, 2322, 8964, 1865, - 7016, 9059, 5039, 2002, 1262, 7718, 4777, 1962, 1879, - 7855, 7890, 8641, 1099, 829, 2515, 1746, 2320, 6687, - 4799, 4569, 6237, 125, 2360, 7731, 401, 9623, 7974, - 7957, 893, 162, 8581, 115, 4518, 1206, 9451, 4274, - 8143, 5673, 5773, 9287, 9034, 6759, 2616, 4836, 638, - 2832, 3042, 3961, 9127, 7640, 5801, 3757, 8870, 8049, - 7486, 8964, 1399, 684, 1029, 2622, 6759, 2002, 3761, - 9818, 252, 8051, 7124, 5540, 8162, 333, 6237, 8228, - 7539, 7326, 3539, 9730, 3200, 1285, 3058, 6960, 9438, - 3335, 118, 848, 7603, 2443, 3021, 5193, 1159, 4828, - 5979, 3020, 6180, 8297, 2434, 8432, 873, 6202, 7493, - 1203, 4541, 547, 3753, 2874, 591, 2074, 4910, 3558, - 8, 8298, 2922, 6062, 8645, 3039, 7096, 8203, 433, - 4098, 2978, 5201, 220, 4789, 9244, 7862, 3711, 8615, - 7690, 3102, 7583, 2602, 5733, 1120, 4841, 144, 8214, - 5416, 404, 6971, 805, 6355, 8972, 3062, 8352, 2931, - 889, 7464, 2057, 2241, 2991, 9880, 133, 6643, 7302, - 3644, 4493, 6722, 9152, 4033, 7880, 8852, 4724, 5057, - 855, 3221, 7385, 5224, 5527, 5418, 5692, 9101, 2016, - 4500, 7558, 4991, 2183, 8431, 5687, 826, 8185, 7512, - 2034, 7269, 9017, 5667, 1066, 3954, 6620, 6000, 5735, - 4170, 6764, 5652, 6631, 3447, 1745, 1777, 3216, 7230, - 9533, 1387, 2874, 521, 745, 8458, 5566, 3373, 8999, - 2967, 8776, 9639, 3523, 831, 3365, 1405, 3704, 2518, - 8736, 9840, 8611, 7168, 974, 9007, 4762, 6891, 9021, - 6430, 9209, 301, 1699, 2870, 8888, 7910, 1671, 663, - 5112, 5271, 1358, 1676, 8365, 1780, 7972, 1361, 1045, - 1410, 4989, 2560, 1704, 6356, 9942, 2422, 9763, 6643, - 1212, 5204, 5812, 5025, 7667, 8288, 576, 8273, 6833, - 9949, 6536, 5359, 6612, 7293, 5641, 1851, 335, 4310, - 5189, 2075, 5757, 8286, 7613, 8539, 4264, 7571, 1867, - 7380, 7067, 1, 1197, 3144, 5717, 7323, 4837, 5709, - 8246, 2292, 5490, 4382, 1545, 1139, 9329, 6740, 703, - 1593, 798, 7486, 9746, 3819, 8833, 6776, 5180, 5052, - 2319, 7164, 4881, 5400, 7841, 2215, 554, 7675, 7385, - 9306, 6355, 6231, 9361, 2385, 796, 4758, 7147, 5797, - 8281, 3852, 8531, 1545, 8108, 6926, 7059, 4504, 4531, - 7506, 124, 7458, 3327, 3628, 7556, 4417, 5016, 2525, - 7489, 8555, 8443, 3229, 5225, 3661, 6918, 4100, 8017, - 696, 3226, 8086, 741, 4257, 7752, 9762, 5276, 7602, - 5466, 6581, 5529, 3577, 8691, 4977, 7816, 9124, 4760, - 3859, 4524, 9305, 6899, 4753, 8459, 4314, 3121, 6139, - 2846, 229, 1517, 5567, 5587, 6908, 6983, 1860, 4933, - 7361, 4014, 765, 1397, 6972, 1767, 184, 2375, 5132, - 8705, 5405, 2339, 1938, 8334, 4952, 7588, 5389, 6605, - 3177, 1985, 9590, 5212, 8072, 3816, 8811, 3096, 8925, - 1481, 7459, 6634, 1656, 5790, 1907, 5450, 5369, 1969, - 416, 8199, 204, 7300, 3972, 4789, 9148, 3034, 8861, - 867, 3687, 8017, 7357, 7678, 1306, 4227, 2554, 13, - 4237, 2094, 155, 3093, 7018, 9364, 9696, 454, 2404, - 9877, 1667, 5368, 2586, 9721, 344, 4734, 6749, 9534, - 6490, 9586, 8397, 4492, 1324, 1754, 8517, 7428, 618, - 5779, 2546, 5800, 591, 8731, 153, 2268, 5592, 747, - 1729, 6137, 9551, 3864, 9573, 4958, 9404, 1107, 40, - 81, 4587, 1225, 3165, 570, 3811, 3337, 716, 3120, - 5004, 6021, 7094, 5385, 1117, 9004, 5007, 7419, 1047, - 3691, 9910, 3062, 8070, 427, 7658, 5876, 5585, 6170, - 8863, 2006, 168, 2709, 1624, 9133, 9550, 8635, 9703, - 2241, 7070, 8845, 7089, 5296, 7227, 4530, 8156, 6517, - 6289, 3818, 5042, 4297, 6306, 292, 2597, 1535, 5147, - 8744, 3740, 2035, 1766, 8354, 3023, 1994, 479, 1457, - 2645, 8758, 6863, 6834, 8195, 5541, 5631, 8260, 1567, - 9934, 417, 8257, 382, 2493, 3232, 8660, 8338, 7113, - 1209, 5614, 471, 6024, 9286, 54, 7652, 6187, 540, - 8542, 6207, 6408, 4218, 7616, 5719, 4534, 6986, 2199, - 2970, 7293, 6650, 9284, 956, 6642, 9326, 5000, 9529, - 5318, 3414, 2028, 6559, 6060, 2447, 543, 4868, 7178, - 336, 1140, 3673, 2489, 8807, 2157, 5056, 6476, 3035, - 9189, 2353, 2512, 4440, 9211, 3097, 2278, 932, 4252, - 768, 331, 5614, 3971, 5355, 7842, 9323, 6119, 7597, - 450, 8478, 583, 2348, 5392, 1174, 1349, 5119, 7445, - 479, 1422, 413, 167, 314, 7818, 8189, 3817, 9967, - 3831, 4635, 9032, 2332, 6873, 8301, 1534, 4964, 9327, - 3874, 9991, 8234, 517, 4545, 7823, 9803, 8221, 542, - 5331, 9104, 7312, 7039, 7742, 2907, 3191, 8019, 2692, - 5064, 3352, 9762, 7319, 9026, 6962, 128, 5753, 4, - 9327, 3158, 8441, 4944, 4588, 8836, 9882, 3216, 3631, - 4729]), - values=tensor([0.7863, 0.8342, 0.7119, 0.8691, 0.1799, 0.8019, 0.5297, - 0.0461, 0.6789, 0.7616, 0.1867, 0.1510, 0.4759, 0.4897, - 0.3112, 0.5107, 0.2673, 0.7677, 0.2078, 0.1024, 0.6974, - 0.7483, 0.1375, 0.4229, 0.3418, 0.4279, 0.1779, 0.3128, - 0.7738, 0.2688, 0.7139, 0.5474, 0.8893, 0.2669, 0.3732, - 0.3978, 0.5696, 0.8366, 0.1938, 0.3846, 0.7418, 0.1575, - 0.9175, 0.3788, 0.9034, 0.5215, 0.5959, 0.4812, 0.8080, - 0.9553, 0.3234, 0.3058, 0.4874, 0.0548, 0.2922, 0.3243, - 0.2802, 0.3277, 0.7008, 0.8317, 0.6850, 0.3190, 0.4857, - 0.5360, 0.3195, 0.2796, 0.9648, 0.3173, 0.1462, 0.6508, - 0.9193, 0.0306, 0.5949, 0.4367, 0.7038, 0.9052, 0.8896, - 0.7649, 0.2853, 0.3726, 0.3482, 0.2792, 0.4239, 0.7674, - 0.7629, 0.2768, 0.2724, 0.5503, 0.3248, 0.8259, 0.0264, - 0.4983, 0.0596, 0.1536, 0.8502, 0.3449, 0.3085, 0.4356, - 0.3873, 0.9396, 0.5172, 0.4872, 0.6271, 0.3707, 0.6993, - 0.4127, 0.1519, 0.7471, 0.9960, 0.8186, 0.7247, 0.9753, - 0.6228, 0.3862, 0.0395, 0.6842, 0.6671, 0.0794, 0.0052, - 0.9718, 0.2986, 0.0151, 0.4374, 0.9946, 0.0935, 0.5060, - 0.9278, 0.3396, 0.3559, 0.9710, 0.0242, 0.8115, 0.3763, - 0.7869, 0.4303, 0.4782, 0.2549, 0.1494, 0.9501, 0.9807, - 0.5176, 0.8320, 0.7401, 0.7586, 0.0516, 0.2314, 0.8977, - 0.3697, 0.6354, 0.3793, 0.1332, 0.4121, 0.9345, 0.8805, - 0.2787, 0.5335, 0.9802, 0.1369, 0.6510, 0.3232, 0.7449, - 0.7218, 0.7851, 0.7585, 0.3555, 0.2232, 0.3523, 0.7028, - 0.1003, 0.5059, 0.7756, 0.5967, 0.2963, 0.2044, 0.5060, - 0.4409, 0.2094, 0.4839, 0.8768, 0.4050, 0.2371, 0.4748, - 0.4748, 0.2096, 0.9009, 0.7365, 0.7361, 0.9956, 0.8167, - 0.9573, 0.1456, 0.8912, 0.8245, 0.2111, 0.1344, 0.3731, - 0.3546, 0.1724, 0.5871, 0.2882, 0.4315, 0.5993, 0.8036, - 0.5470, 0.0035, 0.4441, 0.2185, 0.7867, 0.1945, 0.1865, - 0.6911, 0.1596, 0.9086, 0.6358, 0.5350, 0.7830, 0.3829, - 0.8050, 0.3156, 0.1687, 0.6780, 0.7685, 0.5011, 0.3136, - 0.7647, 0.2212, 0.6030, 0.2126, 0.7262, 0.0615, 0.5973, - 0.9209, 0.1964, 0.0162, 0.2415, 0.2513, 0.1957, 0.9780, - 0.4213, 0.1357, 0.1199, 0.0038, 0.5586, 0.0956, 0.1284, - 0.2755, 0.0056, 0.5708, 0.5209, 0.1329, 0.1111, 0.6389, - 0.5765, 0.0036, 0.4213, 0.3664, 0.1220, 0.0489, 0.9965, - 0.8755, 0.8525, 0.6302, 0.2268, 0.7377, 0.0782, 0.4169, - 0.9956, 0.6600, 0.0917, 0.5793, 0.2528, 0.5405, 0.4980, - 0.7610, 0.2135, 0.4588, 0.6096, 0.1996, 0.3369, 0.2309, - 0.4068, 0.9687, 0.7562, 0.2269, 0.1687, 0.3793, 0.6399, - 0.4915, 0.4112, 0.6703, 0.6153, 0.4705, 0.1233, 0.9046, - 0.5631, 0.3352, 0.9593, 0.2252, 0.0553, 0.6186, 0.4222, - 0.2235, 0.1408, 0.4026, 0.0716, 0.3602, 0.6066, 0.8411, - 0.9387, 0.9914, 0.7443, 0.1915, 0.6794, 0.2087, 0.8185, - 0.7287, 0.5539, 0.8187, 0.7845, 0.7145, 0.3411, 0.1268, - 0.7307, 0.2110, 0.3286, 0.1623, 0.5391, 0.8854, 0.3559, - 0.3656, 0.2022, 0.2735, 0.4384, 0.2267, 0.5682, 0.0871, - 0.7504, 0.0440, 0.9453, 0.0560, 0.7453, 0.5245, 0.7868, - 0.7607, 0.0740, 0.5851, 0.6988, 0.9941, 0.1340, 0.8946, - 0.5851, 0.0531, 0.1239, 0.5973, 0.8490, 0.8281, 0.0289, - 0.9819, 0.2244, 0.1732, 0.2714, 0.1424, 0.2251, 0.3208, - 0.8178, 0.8825, 0.1082, 0.5536, 0.6102, 0.1422, 0.0979, - 0.8259, 0.1018, 0.6720, 0.0237, 0.5334, 0.2595, 0.3522, - 0.7713, 0.9285, 0.1688, 0.2536, 0.8257, 0.4873, 0.4909, - 0.6034, 0.7331, 0.7261, 0.8379, 0.4814, 0.4604, 0.0061, - 0.6796, 0.9124, 0.6545, 0.1060, 0.4611, 0.8779, 0.0489, - 0.4770, 0.9426, 0.0362, 0.9291, 0.0085, 0.2023, 0.9600, - 0.4833, 0.8674, 0.3213, 0.0549, 0.2797, 0.9705, 0.5031, - 0.0798, 0.1913, 0.0630, 0.4306, 0.1285, 0.5088, 0.2413, - 0.7971, 0.4797, 0.5836, 0.3643, 0.1162, 0.6647, 0.5069, - 0.8942, 0.2930, 0.0041, 0.5855, 0.8851, 0.2293, 0.4329, - 0.7326, 0.1690, 0.2530, 0.2387, 0.0036, 0.0121, 0.1039, - 0.5190, 0.2097, 0.4634, 0.9255, 0.4940, 0.3517, 0.6614, - 0.4552, 0.1007, 0.0099, 0.5784, 0.2947, 0.7080, 0.3681, - 0.7319, 0.5470, 0.3905, 0.8546, 0.8378, 0.2638, 0.2474, - 0.2020, 0.2602, 0.2058, 0.3439, 0.3725, 0.2372, 0.8608, - 0.6782, 0.4161, 0.2714, 0.0130, 0.3098, 0.6316, 0.5573, - 0.6366, 0.0552, 0.8331, 0.1655, 0.6120, 0.6847, 0.5996, - 0.3423, 0.3329, 0.9321, 0.1631, 0.7762, 0.9917, 0.4457, - 0.2145, 0.3700, 0.3585, 0.1830, 0.8788, 0.7830, 0.8978, - 0.9083, 0.9699, 0.0188, 0.8464, 0.2189, 0.0314, 0.1098, - 0.5475, 0.6995, 0.2544, 0.4684, 0.3350, 0.8207, 0.7849, - 0.7699, 0.7118, 0.1858, 0.2650, 0.1482, 0.3208, 0.2300, - 0.0603, 0.6347, 0.1795, 0.1880, 0.1849, 0.3946, 0.2904, - 0.3987, 0.0378, 0.8753, 0.9825, 0.3658, 0.9591, 0.8361, - 0.6193, 0.9979, 0.4363, 0.2356, 0.5991, 0.1784, 0.1734, - 0.6202, 0.8094, 0.4349, 0.0297, 0.2971, 0.5907, 0.7311, - 0.5068, 0.6370, 0.7797, 0.6554, 0.9023, 0.2928, 0.4467, - 0.3417, 0.7580, 0.0048, 0.5521, 0.6463, 0.4551, 0.2157, - 0.9490, 0.7709, 0.5133, 0.6298, 0.9824, 0.0939, 0.6470, - 0.8582, 0.6745, 0.3195, 0.7034, 0.3210, 0.4343, 0.8580, - 0.2725, 0.9024, 0.1300, 0.1018, 0.1214, 0.8294, 0.8231, - 0.4988, 0.6393, 0.5659, 0.3564, 0.4693, 0.7534, 0.8943, - 0.8326, 0.6300, 0.8559, 0.5119, 0.1976, 0.2140, 0.7843, - 0.6970, 0.1656, 0.6279, 0.1965, 0.1246, 0.2067, 0.4844, - 0.6932, 0.0875, 0.9163, 0.9941, 0.6738, 0.5892, 0.8709, - 0.4754, 0.3597, 0.3053, 0.6792, 0.1671, 0.1823, 0.5845, - 0.7883, 0.0033, 0.7413, 0.6371, 0.5830, 0.0867, 0.2426, - 0.0434, 0.2486, 0.2783, 0.8635, 0.4149, 0.8689, 0.2094, - 0.0406, 0.9992, 0.4458, 0.4786, 0.5163, 0.4532, 0.9377, - 0.1115, 0.5946, 0.5658, 0.7630, 0.5075, 0.2843, 0.6994, - 0.4331, 0.1647, 0.7111, 0.9415, 0.9134, 0.3649, 0.1405, - 0.3023, 0.1916, 0.9338, 0.8955, 0.1579, 0.6881, 0.6431, - 0.7938, 0.6435, 0.1598, 0.8058, 0.0143, 0.7389, 0.1256, - 0.3343, 0.5721, 0.4218, 0.4586, 0.7800, 0.2224, 0.0329, - 0.3782, 0.5096, 0.7729, 0.2314, 0.6628, 0.4257, 0.0530, - 0.2394, 0.3782, 0.3378, 0.9264, 0.3846, 0.8312, 0.7165, - 0.9018, 0.2255, 0.8764, 0.4648, 0.8905, 0.6287, 0.3061, - 0.2358, 0.3575, 0.8837, 0.8661, 0.4644, 0.0307, 0.1658, - 0.1603, 0.6516, 0.7616, 0.5701, 0.3014, 0.9310, 0.2065, - 0.7077, 0.7364, 0.4491, 0.0518, 0.7097, 0.4874, 0.4668, - 0.0620, 0.4255, 0.6665, 0.1014, 0.1580, 0.0470, 0.3432, - 0.8393, 0.1570, 0.1401, 0.5172, 0.8417, 0.4672, 0.2201, - 0.1674, 0.9708, 0.6467, 0.6089, 0.9800, 0.0513, 0.3044, - 0.6979, 0.6719, 0.1842, 0.8617, 0.2669, 0.6961, 0.6593, - 0.1234, 0.4151, 0.5864, 0.5859, 0.0830, 0.1869, 0.0506, - 0.4507, 0.0944, 0.5583, 0.8982, 0.5055, 0.6171, 0.7678, - 0.4718, 0.9977, 0.2777, 0.7559, 0.1953, 0.4405, 0.9006, - 0.3125, 0.6338, 0.3459, 0.2249, 0.6948, 0.3347, 0.4623, - 0.5826, 0.5751, 0.0351, 0.6388, 0.1795, 0.7330, 0.5707, - 0.5527, 0.2760, 0.8521, 0.1919, 0.1692, 0.4703, 0.1457, - 0.4745, 0.3853, 0.5193, 0.8361, 0.1959, 0.4596, 0.5784, - 0.0974, 0.3018, 0.0027, 0.0284, 0.6379, 0.0985, 0.6697, - 0.1617, 0.9408, 0.1225, 0.4828, 0.1493, 0.2255, 0.9622, - 0.3456, 0.5549, 0.7083, 0.8183, 0.4017, 0.9015, 0.9500, - 0.9757, 0.7762, 0.4376, 0.1119, 0.0478, 0.0482, 0.0578, - 0.7538, 0.6748, 0.4915, 0.0046, 0.4804, 0.0171, 0.2571, - 0.5740, 0.8135, 0.9212, 0.1282, 0.1633, 0.3991, 0.3795, - 0.2563, 0.7909, 0.3096, 0.9640, 0.3523, 0.8436, 0.3227, - 0.0600, 0.3198, 0.3035, 0.1361, 0.3922, 0.1782, 0.7833, - 0.2504, 0.4757, 0.7349, 0.8713, 0.6836, 0.3507, 0.6395, - 0.8433, 0.8479, 0.5637, 0.2767, 0.4270, 0.1521, 0.7400, - 0.4410, 0.9217, 0.8199, 0.3646, 0.7246, 0.6747, 0.1362, - 0.4476, 0.3311, 0.4522, 0.8256, 0.9839, 0.1661, 0.7065, - 0.0053, 0.7677, 0.6798, 0.6573, 0.7053, 0.0946, 0.4782, - 0.6733, 0.8968, 0.8493, 0.9722, 0.3359, 0.2513, 0.8759, - 0.7557, 0.5642, 0.6956, 0.9785, 0.2314, 0.2092, 0.6617, - 0.2157, 0.9152, 0.2913, 0.0438, 0.9309, 0.2537, 0.0994, - 0.4607, 0.6405, 0.5177, 0.7145, 0.1394, 0.3492, 0.5865, - 0.9348, 0.8342, 0.9034, 0.5205, 0.0516, 0.1632, 0.3433, - 0.4758, 0.2442, 0.7218, 0.3687, 0.3685, 0.7796, 0.4166, - 0.7390, 0.4015, 0.0501, 0.4473, 0.1656, 0.4610, 0.3317, - 0.3754, 0.6628, 0.9353, 0.1661, 0.2491, 0.3244, 0.5026, - 0.2276, 0.1611, 0.0412, 0.1485, 0.2596, 0.3703, 0.5359, - 0.7023, 0.3612, 0.9260, 0.3044, 0.4320, 0.5730, 0.4544, - 0.7409, 0.6046, 0.2126, 0.8407, 0.5541, 0.9635, 0.4726, - 0.7284, 0.1079, 0.8545, 0.8839, 0.1658, 0.6432, 0.3731, - 0.4876, 0.5276, 0.8205, 0.3497, 0.2810, 0.3329, 0.4371, - 0.6824, 0.9070, 0.8115, 0.6630, 0.8608, 0.8445, 0.6452, - 0.0464, 0.2074, 0.6033, 0.8590, 0.4426, 0.1662, 0.9143, - 0.8420, 0.9435, 0.3667, 0.0587, 0.3344, 0.5940, 0.9391, - 0.3098, 0.3277, 0.3122, 0.0248, 0.5693, 0.1331]), + col_indices=tensor([ 97, 1440, 2144, 1958, 7389, 6406, 9389, 3840, 7302, + 7650, 6550, 1650, 8929, 3476, 9627, 1105, 2693, 8161, + 4415, 8455, 8546, 8597, 2765, 3785, 5590, 9273, 9598, + 7759, 3666, 5111, 2913, 3868, 384, 3707, 3158, 4105, + 614, 6258, 6608, 6293, 3245, 152, 7796, 7825, 9295, + 9605, 6793, 5166, 8849, 738, 8932, 6611, 2974, 9696, + 6879, 4301, 5843, 2680, 7501, 1026, 4904, 5591, 2632, + 3516, 5105, 2002, 4490, 517, 6090, 2932, 4756, 2904, + 3294, 7369, 6077, 3618, 9173, 892, 1574, 313, 5627, + 5038, 3446, 5742, 3871, 4129, 3108, 4338, 3174, 6661, + 1143, 2460, 187, 9880, 9250, 1427, 5319, 7832, 1784, + 6055, 7132, 2385, 9649, 9155, 2487, 4564, 5823, 7785, + 2354, 215, 4606, 6413, 2705, 9919, 6547, 8757, 707, + 8716, 9266, 1630, 8461, 5359, 2977, 8804, 289, 3558, + 2009, 1035, 9359, 6638, 5276, 9139, 933, 9345, 3985, + 8096, 6706, 8360, 8194, 2284, 8390, 522, 6988, 439, + 8000, 3571, 1051, 1301, 1313, 791, 4003, 3968, 4291, + 5569, 462, 8336, 6655, 9703, 5886, 9008, 5884, 3487, + 84, 6290, 6486, 2355, 1305, 8255, 7319, 8381, 7173, + 9691, 2459, 1408, 2333, 2885, 8256, 7299, 9106, 1202, + 5362, 9745, 3690, 689, 9329, 7084, 2724, 63, 7349, + 685, 1669, 1566, 1800, 7260, 3700, 3434, 2227, 4271, + 4833, 2546, 199, 4179, 4483, 8845, 7983, 4728, 8091, + 1874, 4681, 4088, 3324, 1985, 2752, 8293, 931, 4664, + 9024, 198, 4871, 2975, 7830, 8688, 8501, 5427, 5910, + 8833, 8818, 9709, 4851, 2723, 5746, 3450, 2289, 1404, + 4858, 1908, 7747, 7068, 3929, 373, 4523, 4738, 3041, + 5455, 7782, 8589, 7697, 2588, 7454, 5700, 4550, 2288, + 3849, 4287, 291, 894, 7642, 216, 2639, 6433, 6178, + 9942, 3204, 9107, 2880, 4317, 9720, 4399, 2994, 390, + 6433, 1863, 2275, 1441, 6608, 8486, 678, 170, 6016, + 6218, 5113, 4344, 4390, 6679, 3115, 2056, 6758, 191, + 6959, 9146, 6847, 3539, 9579, 6113, 4920, 4946, 8875, + 8473, 209, 8091, 5542, 6577, 5566, 1443, 4491, 8506, + 7604, 8274, 1498, 1995, 6003, 1008, 8966, 7935, 9536, + 4755, 2692, 5177, 3511, 5329, 4867, 4470, 265, 5037, + 2387, 336, 4363, 1439, 3217, 6143, 7278, 8898, 4184, + 2783, 1449, 2558, 7221, 4103, 2882, 6703, 404, 8289, + 9655, 3445, 8101, 3377, 4035, 3000, 5910, 6074, 8749, + 7236, 9509, 70, 9176, 8495, 542, 8375, 9478, 3525, + 6753, 6535, 9980, 8134, 4216, 8371, 6168, 785, 4448, + 3750, 3513, 7478, 2395, 3957, 8706, 7740, 7688, 3962, + 3259, 6591, 5116, 8959, 9737, 1589, 392, 7750, 5911, + 7762, 8687, 2817, 9425, 8085, 798, 7096, 1055, 9084, + 6086, 524, 3787, 5041, 7289, 4871, 618, 6684, 4422, + 25, 1635, 6027, 8439, 776, 101, 6710, 8743, 6872, + 3450, 988, 844, 8092, 8825, 5974, 6212, 7331, 2532, + 5406, 8205, 5557, 1369, 599, 2266, 9061, 9460, 2148, + 8968, 801, 5588, 2922, 5527, 5719, 6371, 5182, 5966, + 1977, 7009, 6894, 3858, 1462, 2085, 4547, 7225, 3234, + 4954, 6127, 5126, 5404, 9796, 3371, 4300, 3486, 5215, + 5970, 1474, 2763, 6766, 5748, 8338, 6067, 826, 2753, + 8001, 3661, 1119, 6072, 4775, 6081, 8401, 775, 1827, + 1471, 8453, 3437, 3312, 2053, 680, 493, 7396, 8975, + 3686, 3012, 2394, 8389, 9868, 8264, 5545, 6024, 3672, + 964, 3856, 6655, 7001, 6883, 9350, 2602, 5853, 8929, + 8541, 9967, 7003, 9554, 6790, 8847, 3125, 6723, 735, + 3447, 2521, 2739, 5611, 5326, 1119, 4143, 4859, 3850, + 7205, 6635, 5409, 5803, 6288, 1116, 9741, 5764, 7719, + 1349, 8657, 1940, 1879, 6040, 8284, 2780, 1702, 9921, + 5373, 5151, 1513, 8529, 2637, 4625, 2803, 6536, 6760, + 3960, 5094, 1377, 3265, 4575, 1252, 5816, 3802, 5195, + 8000, 2757, 8461, 6164, 186, 5234, 7155, 727, 3351, + 8209, 5049, 4151, 5551, 4264, 8059, 1078, 887, 2942, + 1061, 7337, 3614, 2959, 7476, 9113, 450, 9014, 1421, + 8522, 4525, 8332, 9751, 554, 9317, 2749, 9892, 2290, + 7036, 4367, 9597, 572, 6969, 3029, 3992, 8642, 2421, + 5801, 4255, 8208, 3659, 76, 1006, 8958, 5971, 3958, + 9367, 4798, 330, 49, 8730, 1262, 4763, 8961, 5330, + 1664, 1930, 2712, 4519, 6603, 2904, 8200, 9075, 1022, + 9229, 3064, 2483, 3451, 3241, 7560, 3118, 2559, 3352, + 6841, 4743, 7250, 7531, 457, 333, 7871, 3475, 3407, + 8646, 1384, 7630, 7989, 2395, 7147, 3970, 6510, 5247, + 5251, 6195, 9792, 973, 1645, 4908, 1518, 7657, 7844, + 5618, 6642, 9414, 4532, 1427, 9392, 2872, 6398, 7114, + 5713, 4992, 6900, 2985, 4973, 8157, 7014, 1769, 752, + 7114, 1563, 1044, 1854, 895, 5472, 4404, 4866, 5655, + 9000, 886, 3410, 8977, 2270, 4273, 4766, 2460, 9294, + 2970, 2111, 7438, 9804, 3460, 4150, 6104, 5296, 5823, + 3262, 9614, 568, 5927, 3591, 3486, 1676, 2070, 1469, + 9556, 4077, 5276, 1746, 4908, 8388, 9446, 1614, 9240, + 8968, 5306, 8383, 2659, 9189, 8488, 4404, 1894, 8188, + 2493, 2842, 1254, 371, 4191, 1342, 3074, 3908, 631, + 8986, 1900, 4896, 3009, 3155, 108, 9030, 504, 3169, + 5511, 2206, 2905, 2925, 5839, 398, 2377, 4645, 3533, + 2218, 7665, 2523, 8405, 2917, 2640, 8481, 1335, 2324, + 142, 7141, 454, 1721, 600, 21, 7639, 6394, 6756, + 2100, 6154, 2399, 4712, 9571, 5952, 2764, 252, 3378, + 3432, 8074, 3221, 7002, 3979, 7847, 9339, 6916, 2393, + 6910, 1571, 8562, 5518, 1117, 9592, 7785, 4833, 8090, + 6383, 8262, 7080, 5229, 5596, 6004, 413, 6571, 5062, + 1817, 2823, 3123, 8391, 9797, 7353, 9956, 4726, 7765, + 31, 4922, 2097, 9792, 7496, 3023, 2293, 5954, 9177, + 9853, 1725, 7564, 9400, 3439, 7615, 5764, 5690, 3702, + 8224, 4681, 1186, 5501, 4481, 8001, 6179, 6473, 9356, + 877, 3728, 409, 6818, 1256, 4400, 3916, 6273, 5592, + 6765, 7520, 1005, 7832, 3951, 4384, 4727, 5048, 1474, + 5391, 3357, 9620, 3608, 4133, 5863, 4047, 2771, 3444, + 5226, 8693, 8181, 8211, 2511, 3017, 9087, 926, 5361, + 5453, 668, 6941, 156, 4461, 7660, 7713, 5414, 8454, + 6373, 3988, 9957, 5943, 5610, 4857, 8410, 6959, 4958, + 4599, 5279, 2042, 635, 5516, 4334, 5746, 2803, 331, + 9376, 305, 8720, 9921, 2463, 1348, 6404, 3045, 6309, + 148, 9025, 1249, 5522, 9691, 2692, 718, 2973, 7746, + 9637, 4323, 5732, 9547, 5054, 4744, 5791, 6919, 1938, + 5011, 6849, 6139, 6434, 7532, 4468, 8484, 4625, 464, + 9177, 1326, 6076, 6266, 842, 7365, 3490, 1572, 3501, + 3971, 8454, 9926, 3805, 7557, 1532, 1434, 2849, 6510, + 1547, 7226, 4204, 9303, 4346, 3011, 3263, 6764, 7247, + 61, 9713, 8858, 3219, 4137, 6816, 5045, 9876, 2043, + 9518, 7671, 4612, 4601, 3675, 919, 9228, 572, 5805, + 6886]), + values=tensor([0.7985, 0.8292, 0.9126, 0.9637, 0.6390, 0.8964, 0.8703, + 0.5740, 0.7944, 0.6135, 0.7277, 0.6175, 0.8474, 0.1693, + 0.9718, 0.3770, 0.4466, 0.5584, 0.0945, 0.9174, 0.8832, + 0.8556, 0.4889, 0.2005, 0.4036, 0.4531, 0.5589, 0.8568, + 0.4439, 0.9299, 0.8050, 0.4071, 0.9407, 0.4063, 0.9050, + 0.1484, 0.1837, 0.1745, 0.8969, 0.4439, 0.1084, 0.9603, + 0.9882, 0.7713, 0.0025, 0.1854, 0.8141, 0.4395, 0.8368, + 0.9452, 0.2309, 0.6459, 0.8645, 0.3433, 0.4050, 0.4809, + 0.6021, 0.1990, 0.8181, 0.6952, 0.0015, 0.7187, 0.3544, + 0.5735, 0.2228, 0.5742, 0.4718, 0.4102, 0.0428, 0.5798, + 0.2382, 0.9476, 0.8600, 0.3008, 0.0739, 0.4952, 0.6514, + 0.5453, 0.4108, 0.0420, 0.0438, 0.2852, 0.4954, 0.6389, + 0.8081, 0.1745, 0.3758, 0.0849, 0.4427, 0.8535, 0.6799, + 0.3397, 0.6223, 0.6836, 0.1776, 0.8539, 0.0973, 0.5104, + 0.4337, 0.0103, 0.7867, 0.7132, 0.2456, 0.8833, 0.4727, + 0.3282, 0.8894, 0.1954, 0.4334, 0.5604, 0.9313, 0.0887, + 0.0195, 0.9201, 0.1580, 0.8789, 0.4454, 0.3180, 0.6578, + 0.1571, 0.6426, 0.8215, 0.7678, 0.7734, 0.9643, 0.6635, + 0.4640, 0.1192, 0.3127, 0.5541, 0.0428, 0.7717, 0.3765, + 0.5534, 0.5170, 0.4780, 0.3986, 0.0515, 0.2643, 0.1702, + 0.4840, 0.2859, 0.3770, 0.3143, 0.7340, 0.1232, 0.5933, + 0.7908, 0.9321, 0.9864, 0.5408, 0.5611, 0.0364, 0.9984, + 0.3879, 0.9205, 0.6084, 0.3702, 0.9297, 0.2739, 0.2424, + 0.3713, 0.9299, 0.4786, 0.2485, 0.7000, 0.5867, 0.2526, + 0.2055, 0.7219, 0.0395, 0.5351, 0.5204, 0.1385, 0.2131, + 0.1600, 0.8725, 0.6258, 0.6871, 0.9696, 0.2154, 0.6714, + 0.9419, 0.8948, 0.4671, 0.3580, 0.1487, 0.5628, 0.4240, + 0.8898, 0.0113, 0.7472, 0.5565, 0.3675, 0.4370, 0.5810, + 0.9113, 0.6737, 0.5809, 0.4784, 0.2270, 0.4437, 0.7931, + 0.3073, 0.4791, 0.5522, 0.4110, 0.2204, 0.2798, 0.5595, + 0.7561, 0.8108, 0.3126, 0.6258, 0.7953, 0.5360, 0.4249, + 0.9229, 0.3180, 0.3942, 0.7678, 0.9044, 0.0657, 0.4366, + 0.8571, 0.8417, 0.8251, 0.5783, 0.5957, 0.5001, 0.7450, + 0.9639, 0.1376, 0.9637, 0.8467, 0.8156, 0.5971, 0.3582, + 0.2053, 0.2511, 0.7546, 0.1803, 0.4055, 0.4873, 0.1034, + 0.4279, 0.8853, 0.4308, 0.0813, 0.9511, 0.2670, 0.4699, + 0.7103, 0.2748, 0.8052, 0.8237, 0.6693, 0.5230, 0.7654, + 0.1530, 0.6796, 0.4914, 0.0097, 0.5150, 0.8382, 0.4668, + 0.4928, 0.6120, 0.9311, 0.5064, 0.8214, 0.1474, 0.5086, + 0.3183, 0.3811, 0.1954, 0.7453, 0.3820, 0.0064, 0.7015, + 0.3425, 0.8868, 0.1282, 0.0397, 0.4121, 0.5858, 0.8175, + 0.1462, 0.8908, 0.3143, 0.9510, 0.3113, 0.0211, 0.6620, + 0.6158, 0.9765, 0.2578, 0.8390, 0.7517, 0.3534, 0.4243, + 0.2505, 0.4322, 0.8264, 0.1524, 0.8995, 0.6943, 0.5309, + 0.7488, 0.0580, 0.2312, 0.3011, 0.3264, 0.5037, 0.2370, + 0.0420, 0.5623, 0.9267, 0.9858, 0.1481, 0.9483, 0.3165, + 0.9126, 0.6509, 0.9182, 0.4777, 0.7481, 0.0469, 0.9043, + 0.0952, 0.9595, 0.4161, 0.7247, 0.4741, 0.3234, 0.7650, + 0.7565, 0.2450, 0.4376, 0.3044, 0.4336, 0.3663, 0.8597, + 0.7323, 0.1417, 0.1041, 0.6531, 0.0600, 0.7813, 0.4968, + 0.7078, 0.7809, 0.4645, 0.9134, 0.7955, 0.9270, 0.3892, + 0.4919, 0.6508, 0.8104, 0.6444, 0.8538, 0.0168, 0.7213, + 0.4470, 0.4092, 0.3972, 0.4272, 0.4396, 0.2854, 0.2165, + 0.2704, 0.8738, 0.2559, 0.7158, 0.1350, 0.3858, 0.6654, + 0.5491, 0.7159, 0.8599, 0.7612, 0.7746, 0.8916, 0.7312, + 0.4446, 0.6634, 0.2510, 0.1755, 0.8638, 0.0674, 0.5624, + 0.4466, 0.9224, 0.6449, 0.8438, 0.7007, 0.4461, 0.5492, + 0.6264, 0.8720, 0.1196, 0.8088, 0.0537, 0.7721, 0.5736, + 0.7320, 0.7051, 0.1777, 0.8302, 0.5806, 0.1983, 0.0260, + 0.0731, 0.1308, 0.6630, 0.8906, 0.5272, 0.4359, 0.7247, + 0.6229, 0.6902, 0.3033, 0.3539, 0.3894, 0.1203, 0.1495, + 0.2198, 0.7012, 0.5626, 0.6430, 0.3475, 0.9616, 0.6384, + 0.4801, 0.6594, 0.9016, 0.8242, 0.5233, 0.4910, 0.0668, + 0.9682, 0.6322, 0.7289, 0.9652, 0.5294, 0.9312, 0.6097, + 0.1493, 0.8753, 0.2504, 0.7297, 0.7080, 0.8460, 0.0758, + 0.7276, 0.6564, 0.7728, 0.1077, 0.3736, 0.0846, 0.6712, + 0.6204, 0.3024, 0.8368, 0.2720, 0.7488, 0.1453, 0.6549, + 0.4598, 0.6335, 0.7503, 0.2179, 0.1850, 0.3285, 0.2431, + 0.8578, 0.5099, 0.1381, 0.3953, 0.5936, 0.4803, 0.5885, + 0.3319, 0.1395, 0.9688, 0.2284, 0.0659, 0.5555, 0.5374, + 0.8116, 0.5551, 0.8949, 0.6673, 0.1214, 0.9602, 0.9835, + 0.7835, 0.2650, 0.8046, 0.0969, 0.6181, 0.3852, 0.7238, + 0.9892, 0.6275, 0.2684, 0.2229, 0.2103, 0.5833, 0.3375, + 0.6187, 0.9326, 0.3067, 0.9429, 0.2824, 0.3546, 0.6443, + 0.8602, 0.4912, 0.8492, 0.3908, 0.9713, 0.2146, 0.5871, + 0.0961, 0.5002, 0.8787, 0.5251, 0.4161, 0.0982, 0.1089, + 0.8405, 0.6380, 0.9982, 0.7942, 0.8628, 0.2091, 0.0744, + 0.3184, 0.4646, 0.9971, 0.7054, 0.9889, 0.6888, 0.5889, + 0.6484, 0.1037, 0.9756, 0.6617, 0.7727, 0.4897, 0.7861, + 0.8012, 0.3596, 0.7235, 0.1717, 0.7363, 0.3334, 0.0269, + 0.8284, 0.9001, 0.3651, 0.6109, 0.8218, 0.4341, 0.6079, + 0.6209, 0.4368, 0.8726, 0.4720, 0.4031, 0.2434, 0.1293, + 0.9540, 0.6254, 0.6361, 0.5769, 0.0918, 0.4746, 0.1167, + 0.0831, 0.4813, 0.4777, 0.2588, 0.6223, 0.1043, 0.1707, + 0.8211, 0.7738, 0.9579, 0.1500, 0.7661, 0.7596, 0.8727, + 0.6702, 0.7094, 0.7778, 0.8947, 0.4550, 0.8353, 0.4465, + 0.8633, 0.6174, 0.9452, 0.1900, 0.0166, 0.4551, 0.9494, + 0.9672, 0.2496, 0.2093, 0.4845, 0.5468, 0.7706, 0.9413, + 0.0331, 0.1560, 0.1401, 0.4091, 0.5408, 0.3378, 0.5539, + 0.2973, 0.0411, 0.1417, 0.0080, 0.7945, 0.4029, 0.8152, + 0.7044, 0.3206, 0.7483, 0.7363, 0.0999, 0.4047, 0.9217, + 0.9062, 0.4433, 0.7833, 0.5042, 0.6245, 0.7463, 0.9553, + 0.2497, 0.8168, 0.4291, 0.8654, 0.6059, 0.2493, 0.1021, + 0.2663, 0.7964, 0.9497, 0.5403, 0.1135, 0.8180, 0.3151, + 0.3801, 0.5925, 0.5264, 0.0484, 0.4237, 0.6130, 0.5419, + 0.5757, 0.7273, 0.3291, 0.4798, 0.8050, 0.2176, 0.7297, + 0.0683, 0.5826, 0.9404, 0.1106, 0.8144, 0.8262, 0.2267, + 0.8793, 0.8564, 0.0688, 0.2247, 0.1100, 0.2796, 0.2585, + 0.0970, 0.0779, 0.2881, 0.8446, 0.8344, 0.7065, 0.9599, + 0.1995, 0.3767, 0.5024, 0.9333, 0.6174, 0.2666, 0.8382, + 0.2056, 0.3331, 0.1704, 0.6676, 0.6982, 0.5664, 0.1921, + 0.5898, 0.0372, 0.5474, 0.8241, 0.6228, 0.7903, 0.4308, + 0.1025, 0.5364, 0.4844, 0.0771, 0.6369, 0.4529, 0.8183, + 0.8444, 0.5716, 0.1334, 0.7138, 0.2356, 0.5390, 0.2195, + 0.5219, 0.8738, 0.3348, 0.9412, 0.6114, 0.4049, 0.9981, + 0.9835, 0.3611, 0.6302, 0.5451, 0.9737, 0.5366, 0.2597, + 0.4409, 0.8814, 0.6152, 0.1559, 0.0135, 0.7104, 0.8824, + 0.4782, 0.8982, 0.7490, 0.0923, 0.8937, 0.8444, 0.0322, + 0.3054, 0.6715, 0.9118, 0.2631, 0.0906, 0.2909, 0.9425, + 0.1448, 0.8400, 0.3342, 0.5380, 0.1519, 0.3943, 0.4147, + 0.1510, 0.2153, 0.7710, 0.1552, 0.6763, 0.4545, 0.3125, + 0.1084, 0.7681, 0.5976, 0.9620, 0.0118, 0.4733, 0.7150, + 0.0682, 0.6551, 0.4845, 0.0677, 0.2500, 0.0550, 0.0363, + 0.9889, 0.0257, 0.5240, 0.9603, 0.2596, 0.5516, 0.0887, + 0.2762, 0.8570, 0.7975, 0.9373, 0.2743, 0.5900, 0.0957, + 0.7322, 0.0404, 0.1185, 0.1339, 0.6396, 0.5159, 0.6155, + 0.2310, 0.5873, 0.2315, 0.6330, 0.6628, 0.7503, 0.0022, + 0.4145, 0.5890, 0.9789, 0.2170, 0.0337, 0.4966, 0.3888, + 0.3070, 0.6157, 0.8843, 0.7732, 0.1928, 0.8747, 0.3203, + 0.4770, 0.9202, 0.7496, 0.3764, 0.1150, 0.0707, 0.1106, + 0.7800, 0.9243, 0.1374, 0.6770, 0.4105, 0.2288, 0.7035, + 0.1140, 0.9869, 0.7479, 0.6129, 0.1763, 0.5008, 0.3926, + 0.0704, 0.3687, 0.2538, 0.1892, 0.2583, 0.1767, 0.8872, + 0.3984, 0.2164, 0.6220, 0.0697, 0.8667, 0.9981, 0.3230, + 0.4418, 0.2500, 0.1038, 0.0090, 0.4565, 0.0019, 0.4164, + 0.0820, 0.6061, 0.0871, 0.1636, 0.1580, 0.0086, 0.2387, + 0.6178, 0.2746, 0.7215, 0.8097, 0.6786, 0.2335, 0.0302, + 0.2325, 0.0182, 0.5502, 0.6378, 0.1699, 0.1845, 0.3699, + 0.0911, 0.5517, 0.0687, 0.6693, 0.6092, 0.0948, 0.8453, + 0.9252, 0.7773, 0.4322, 0.6158, 0.5836, 0.4636, 0.6169, + 0.2967, 0.1721, 0.3829, 0.1477, 0.3742, 0.8852, 0.0565, + 0.9577, 0.3647, 0.5936, 0.6885, 0.0902, 0.3014, 0.9178, + 0.7811, 0.3970, 0.8000, 0.1178, 0.8765, 0.1240, 0.6417, + 0.5235, 0.4652, 0.1804, 0.5607, 0.1673, 0.4895, 0.0864, + 0.8986, 0.2827, 0.0053, 0.2671, 0.7331, 0.4628, 0.3817, + 0.3875, 0.8415, 0.3093, 0.9524, 0.2639, 0.7580, 0.5933, + 0.7004, 0.5962, 0.1538, 0.8149, 0.6737, 0.4614, 0.0100, + 0.0884, 0.9894, 0.5265, 0.4667, 0.4118, 0.9744, 0.1588, + 0.2705, 0.8118, 0.6210, 0.1153, 0.3632, 0.8850, 0.7575, + 0.2906, 0.3588, 0.4315, 0.5729, 0.6483, 0.8535, 0.1307, + 0.8408, 0.8607, 0.9545, 0.5493, 0.8516, 0.8836, 0.2334, + 0.7791, 0.0415, 0.7662, 0.1332, 0.5970, 0.5830, 0.1760, + 0.8516, 0.3598, 0.9753, 0.0488, 0.2694, 0.1902]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8452, 0.1047, 0.0563, ..., 0.6079, 0.4820, 0.4351]) +tensor([0.0156, 0.6147, 0.1096, ..., 0.9950, 0.7654, 0.9443]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -919,268 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.998100280761719 seconds +Time: 5.181962966918945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141479 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.518462419509888} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 606, 4248, 4323, 142, 3267, 1209, 7616, 5137, 8211, - 6424, 2299, 2295, 132, 1237, 207, 4471, 8426, 1510, - 3485, 6960, 5069, 6876, 5759, 6010, 6198, 5515, 1057, - 6920, 9155, 4894, 8077, 2579, 7160, 9343, 5510, 6273, - 3046, 6095, 9498, 4568, 7882, 4172, 6457, 7199, 2821, - 8109, 7947, 3059, 5273, 811, 3090, 6904, 7921, 941, - 171, 1700, 8078, 8915, 5888, 6297, 3017, 6368, 5232, - 3121, 6921, 1954, 1701, 989, 2512, 3553, 4586, 8676, - 4195, 7666, 522, 8800, 4183, 4215, 5436, 4713, 8751, - 9665, 2209, 2880, 8327, 8043, 3725, 5157, 5205, 5068, - 5472, 2221, 9235, 5844, 1615, 5209, 8676, 880, 3145, - 5350, 6766, 607, 1821, 2811, 5587, 4952, 36, 972, - 9657, 5891, 932, 6251, 2971, 8136, 8846, 5746, 6772, - 3335, 9210, 5699, 6015, 5058, 4114, 1121, 1345, 4570, - 3080, 1837, 5077, 2774, 60, 9761, 283, 1953, 8754, - 568, 8363, 7504, 4312, 2950, 8818, 3615, 7260, 5054, - 9479, 8823, 817, 4247, 8916, 458, 1663, 1516, 8921, - 8262, 3930, 5511, 4167, 1454, 227, 9082, 8167, 6759, - 4104, 2365, 4891, 7213, 8479, 3335, 3691, 5216, 8378, - 1063, 5818, 8471, 7448, 5050, 8971, 747, 666, 6626, - 6463, 5721, 7264, 5789, 105, 7831, 7486, 5643, 8037, - 3964, 1225, 7868, 6793, 785, 9337, 463, 785, 4486, - 40, 8639, 6486, 5799, 220, 7289, 3630, 4023, 9716, - 636, 3153, 9820, 4041, 7979, 6263, 5265, 4683, 9203, - 90, 7642, 420, 3880, 2822, 376, 3766, 4385, 838, - 7707, 6033, 8983, 6271, 3139, 9042, 3814, 2813, 2724, - 6271, 5387, 8121, 7195, 5117, 5794, 8947, 1085, 4429, - 160, 2993, 9952, 7440, 7699, 3817, 18, 5719, 6435, - 6095, 7069, 731, 9481, 9394, 2426, 4205, 2461, 3873, - 8586, 4374, 7356, 5004, 2798, 7617, 2197, 119, 2470, - 3245, 8733, 6824, 4746, 9120, 2061, 7077, 2855, 8255, - 3205, 7568, 3053, 9530, 1111, 6923, 2083, 2934, 8128, - 5682, 8762, 6169, 7878, 5288, 2262, 4469, 7426, 1628, - 8495, 4564, 2858, 9806, 1173, 5731, 2322, 8964, 1865, - 7016, 9059, 5039, 2002, 1262, 7718, 4777, 1962, 1879, - 7855, 7890, 8641, 1099, 829, 2515, 1746, 2320, 6687, - 4799, 4569, 6237, 125, 2360, 7731, 401, 9623, 7974, - 7957, 893, 162, 8581, 115, 4518, 1206, 9451, 4274, - 8143, 5673, 5773, 9287, 9034, 6759, 2616, 4836, 638, - 2832, 3042, 3961, 9127, 7640, 5801, 3757, 8870, 8049, - 7486, 8964, 1399, 684, 1029, 2622, 6759, 2002, 3761, - 9818, 252, 8051, 7124, 5540, 8162, 333, 6237, 8228, - 7539, 7326, 3539, 9730, 3200, 1285, 3058, 6960, 9438, - 3335, 118, 848, 7603, 2443, 3021, 5193, 1159, 4828, - 5979, 3020, 6180, 8297, 2434, 8432, 873, 6202, 7493, - 1203, 4541, 547, 3753, 2874, 591, 2074, 4910, 3558, - 8, 8298, 2922, 6062, 8645, 3039, 7096, 8203, 433, - 4098, 2978, 5201, 220, 4789, 9244, 7862, 3711, 8615, - 7690, 3102, 7583, 2602, 5733, 1120, 4841, 144, 8214, - 5416, 404, 6971, 805, 6355, 8972, 3062, 8352, 2931, - 889, 7464, 2057, 2241, 2991, 9880, 133, 6643, 7302, - 3644, 4493, 6722, 9152, 4033, 7880, 8852, 4724, 5057, - 855, 3221, 7385, 5224, 5527, 5418, 5692, 9101, 2016, - 4500, 7558, 4991, 2183, 8431, 5687, 826, 8185, 7512, - 2034, 7269, 9017, 5667, 1066, 3954, 6620, 6000, 5735, - 4170, 6764, 5652, 6631, 3447, 1745, 1777, 3216, 7230, - 9533, 1387, 2874, 521, 745, 8458, 5566, 3373, 8999, - 2967, 8776, 9639, 3523, 831, 3365, 1405, 3704, 2518, - 8736, 9840, 8611, 7168, 974, 9007, 4762, 6891, 9021, - 6430, 9209, 301, 1699, 2870, 8888, 7910, 1671, 663, - 5112, 5271, 1358, 1676, 8365, 1780, 7972, 1361, 1045, - 1410, 4989, 2560, 1704, 6356, 9942, 2422, 9763, 6643, - 1212, 5204, 5812, 5025, 7667, 8288, 576, 8273, 6833, - 9949, 6536, 5359, 6612, 7293, 5641, 1851, 335, 4310, - 5189, 2075, 5757, 8286, 7613, 8539, 4264, 7571, 1867, - 7380, 7067, 1, 1197, 3144, 5717, 7323, 4837, 5709, - 8246, 2292, 5490, 4382, 1545, 1139, 9329, 6740, 703, - 1593, 798, 7486, 9746, 3819, 8833, 6776, 5180, 5052, - 2319, 7164, 4881, 5400, 7841, 2215, 554, 7675, 7385, - 9306, 6355, 6231, 9361, 2385, 796, 4758, 7147, 5797, - 8281, 3852, 8531, 1545, 8108, 6926, 7059, 4504, 4531, - 7506, 124, 7458, 3327, 3628, 7556, 4417, 5016, 2525, - 7489, 8555, 8443, 3229, 5225, 3661, 6918, 4100, 8017, - 696, 3226, 8086, 741, 4257, 7752, 9762, 5276, 7602, - 5466, 6581, 5529, 3577, 8691, 4977, 7816, 9124, 4760, - 3859, 4524, 9305, 6899, 4753, 8459, 4314, 3121, 6139, - 2846, 229, 1517, 5567, 5587, 6908, 6983, 1860, 4933, - 7361, 4014, 765, 1397, 6972, 1767, 184, 2375, 5132, - 8705, 5405, 2339, 1938, 8334, 4952, 7588, 5389, 6605, - 3177, 1985, 9590, 5212, 8072, 3816, 8811, 3096, 8925, - 1481, 7459, 6634, 1656, 5790, 1907, 5450, 5369, 1969, - 416, 8199, 204, 7300, 3972, 4789, 9148, 3034, 8861, - 867, 3687, 8017, 7357, 7678, 1306, 4227, 2554, 13, - 4237, 2094, 155, 3093, 7018, 9364, 9696, 454, 2404, - 9877, 1667, 5368, 2586, 9721, 344, 4734, 6749, 9534, - 6490, 9586, 8397, 4492, 1324, 1754, 8517, 7428, 618, - 5779, 2546, 5800, 591, 8731, 153, 2268, 5592, 747, - 1729, 6137, 9551, 3864, 9573, 4958, 9404, 1107, 40, - 81, 4587, 1225, 3165, 570, 3811, 3337, 716, 3120, - 5004, 6021, 7094, 5385, 1117, 9004, 5007, 7419, 1047, - 3691, 9910, 3062, 8070, 427, 7658, 5876, 5585, 6170, - 8863, 2006, 168, 2709, 1624, 9133, 9550, 8635, 9703, - 2241, 7070, 8845, 7089, 5296, 7227, 4530, 8156, 6517, - 6289, 3818, 5042, 4297, 6306, 292, 2597, 1535, 5147, - 8744, 3740, 2035, 1766, 8354, 3023, 1994, 479, 1457, - 2645, 8758, 6863, 6834, 8195, 5541, 5631, 8260, 1567, - 9934, 417, 8257, 382, 2493, 3232, 8660, 8338, 7113, - 1209, 5614, 471, 6024, 9286, 54, 7652, 6187, 540, - 8542, 6207, 6408, 4218, 7616, 5719, 4534, 6986, 2199, - 2970, 7293, 6650, 9284, 956, 6642, 9326, 5000, 9529, - 5318, 3414, 2028, 6559, 6060, 2447, 543, 4868, 7178, - 336, 1140, 3673, 2489, 8807, 2157, 5056, 6476, 3035, - 9189, 2353, 2512, 4440, 9211, 3097, 2278, 932, 4252, - 768, 331, 5614, 3971, 5355, 7842, 9323, 6119, 7597, - 450, 8478, 583, 2348, 5392, 1174, 1349, 5119, 7445, - 479, 1422, 413, 167, 314, 7818, 8189, 3817, 9967, - 3831, 4635, 9032, 2332, 6873, 8301, 1534, 4964, 9327, - 3874, 9991, 8234, 517, 4545, 7823, 9803, 8221, 542, - 5331, 9104, 7312, 7039, 7742, 2907, 3191, 8019, 2692, - 5064, 3352, 9762, 7319, 9026, 6962, 128, 5753, 4, - 9327, 3158, 8441, 4944, 4588, 8836, 9882, 3216, 3631, - 4729]), - values=tensor([0.7863, 0.8342, 0.7119, 0.8691, 0.1799, 0.8019, 0.5297, - 0.0461, 0.6789, 0.7616, 0.1867, 0.1510, 0.4759, 0.4897, - 0.3112, 0.5107, 0.2673, 0.7677, 0.2078, 0.1024, 0.6974, - 0.7483, 0.1375, 0.4229, 0.3418, 0.4279, 0.1779, 0.3128, - 0.7738, 0.2688, 0.7139, 0.5474, 0.8893, 0.2669, 0.3732, - 0.3978, 0.5696, 0.8366, 0.1938, 0.3846, 0.7418, 0.1575, - 0.9175, 0.3788, 0.9034, 0.5215, 0.5959, 0.4812, 0.8080, - 0.9553, 0.3234, 0.3058, 0.4874, 0.0548, 0.2922, 0.3243, - 0.2802, 0.3277, 0.7008, 0.8317, 0.6850, 0.3190, 0.4857, - 0.5360, 0.3195, 0.2796, 0.9648, 0.3173, 0.1462, 0.6508, - 0.9193, 0.0306, 0.5949, 0.4367, 0.7038, 0.9052, 0.8896, - 0.7649, 0.2853, 0.3726, 0.3482, 0.2792, 0.4239, 0.7674, - 0.7629, 0.2768, 0.2724, 0.5503, 0.3248, 0.8259, 0.0264, - 0.4983, 0.0596, 0.1536, 0.8502, 0.3449, 0.3085, 0.4356, - 0.3873, 0.9396, 0.5172, 0.4872, 0.6271, 0.3707, 0.6993, - 0.4127, 0.1519, 0.7471, 0.9960, 0.8186, 0.7247, 0.9753, - 0.6228, 0.3862, 0.0395, 0.6842, 0.6671, 0.0794, 0.0052, - 0.9718, 0.2986, 0.0151, 0.4374, 0.9946, 0.0935, 0.5060, - 0.9278, 0.3396, 0.3559, 0.9710, 0.0242, 0.8115, 0.3763, - 0.7869, 0.4303, 0.4782, 0.2549, 0.1494, 0.9501, 0.9807, - 0.5176, 0.8320, 0.7401, 0.7586, 0.0516, 0.2314, 0.8977, - 0.3697, 0.6354, 0.3793, 0.1332, 0.4121, 0.9345, 0.8805, - 0.2787, 0.5335, 0.9802, 0.1369, 0.6510, 0.3232, 0.7449, - 0.7218, 0.7851, 0.7585, 0.3555, 0.2232, 0.3523, 0.7028, - 0.1003, 0.5059, 0.7756, 0.5967, 0.2963, 0.2044, 0.5060, - 0.4409, 0.2094, 0.4839, 0.8768, 0.4050, 0.2371, 0.4748, - 0.4748, 0.2096, 0.9009, 0.7365, 0.7361, 0.9956, 0.8167, - 0.9573, 0.1456, 0.8912, 0.8245, 0.2111, 0.1344, 0.3731, - 0.3546, 0.1724, 0.5871, 0.2882, 0.4315, 0.5993, 0.8036, - 0.5470, 0.0035, 0.4441, 0.2185, 0.7867, 0.1945, 0.1865, - 0.6911, 0.1596, 0.9086, 0.6358, 0.5350, 0.7830, 0.3829, - 0.8050, 0.3156, 0.1687, 0.6780, 0.7685, 0.5011, 0.3136, - 0.7647, 0.2212, 0.6030, 0.2126, 0.7262, 0.0615, 0.5973, - 0.9209, 0.1964, 0.0162, 0.2415, 0.2513, 0.1957, 0.9780, - 0.4213, 0.1357, 0.1199, 0.0038, 0.5586, 0.0956, 0.1284, - 0.2755, 0.0056, 0.5708, 0.5209, 0.1329, 0.1111, 0.6389, - 0.5765, 0.0036, 0.4213, 0.3664, 0.1220, 0.0489, 0.9965, - 0.8755, 0.8525, 0.6302, 0.2268, 0.7377, 0.0782, 0.4169, - 0.9956, 0.6600, 0.0917, 0.5793, 0.2528, 0.5405, 0.4980, - 0.7610, 0.2135, 0.4588, 0.6096, 0.1996, 0.3369, 0.2309, - 0.4068, 0.9687, 0.7562, 0.2269, 0.1687, 0.3793, 0.6399, - 0.4915, 0.4112, 0.6703, 0.6153, 0.4705, 0.1233, 0.9046, - 0.5631, 0.3352, 0.9593, 0.2252, 0.0553, 0.6186, 0.4222, - 0.2235, 0.1408, 0.4026, 0.0716, 0.3602, 0.6066, 0.8411, - 0.9387, 0.9914, 0.7443, 0.1915, 0.6794, 0.2087, 0.8185, - 0.7287, 0.5539, 0.8187, 0.7845, 0.7145, 0.3411, 0.1268, - 0.7307, 0.2110, 0.3286, 0.1623, 0.5391, 0.8854, 0.3559, - 0.3656, 0.2022, 0.2735, 0.4384, 0.2267, 0.5682, 0.0871, - 0.7504, 0.0440, 0.9453, 0.0560, 0.7453, 0.5245, 0.7868, - 0.7607, 0.0740, 0.5851, 0.6988, 0.9941, 0.1340, 0.8946, - 0.5851, 0.0531, 0.1239, 0.5973, 0.8490, 0.8281, 0.0289, - 0.9819, 0.2244, 0.1732, 0.2714, 0.1424, 0.2251, 0.3208, - 0.8178, 0.8825, 0.1082, 0.5536, 0.6102, 0.1422, 0.0979, - 0.8259, 0.1018, 0.6720, 0.0237, 0.5334, 0.2595, 0.3522, - 0.7713, 0.9285, 0.1688, 0.2536, 0.8257, 0.4873, 0.4909, - 0.6034, 0.7331, 0.7261, 0.8379, 0.4814, 0.4604, 0.0061, - 0.6796, 0.9124, 0.6545, 0.1060, 0.4611, 0.8779, 0.0489, - 0.4770, 0.9426, 0.0362, 0.9291, 0.0085, 0.2023, 0.9600, - 0.4833, 0.8674, 0.3213, 0.0549, 0.2797, 0.9705, 0.5031, - 0.0798, 0.1913, 0.0630, 0.4306, 0.1285, 0.5088, 0.2413, - 0.7971, 0.4797, 0.5836, 0.3643, 0.1162, 0.6647, 0.5069, - 0.8942, 0.2930, 0.0041, 0.5855, 0.8851, 0.2293, 0.4329, - 0.7326, 0.1690, 0.2530, 0.2387, 0.0036, 0.0121, 0.1039, - 0.5190, 0.2097, 0.4634, 0.9255, 0.4940, 0.3517, 0.6614, - 0.4552, 0.1007, 0.0099, 0.5784, 0.2947, 0.7080, 0.3681, - 0.7319, 0.5470, 0.3905, 0.8546, 0.8378, 0.2638, 0.2474, - 0.2020, 0.2602, 0.2058, 0.3439, 0.3725, 0.2372, 0.8608, - 0.6782, 0.4161, 0.2714, 0.0130, 0.3098, 0.6316, 0.5573, - 0.6366, 0.0552, 0.8331, 0.1655, 0.6120, 0.6847, 0.5996, - 0.3423, 0.3329, 0.9321, 0.1631, 0.7762, 0.9917, 0.4457, - 0.2145, 0.3700, 0.3585, 0.1830, 0.8788, 0.7830, 0.8978, - 0.9083, 0.9699, 0.0188, 0.8464, 0.2189, 0.0314, 0.1098, - 0.5475, 0.6995, 0.2544, 0.4684, 0.3350, 0.8207, 0.7849, - 0.7699, 0.7118, 0.1858, 0.2650, 0.1482, 0.3208, 0.2300, - 0.0603, 0.6347, 0.1795, 0.1880, 0.1849, 0.3946, 0.2904, - 0.3987, 0.0378, 0.8753, 0.9825, 0.3658, 0.9591, 0.8361, - 0.6193, 0.9979, 0.4363, 0.2356, 0.5991, 0.1784, 0.1734, - 0.6202, 0.8094, 0.4349, 0.0297, 0.2971, 0.5907, 0.7311, - 0.5068, 0.6370, 0.7797, 0.6554, 0.9023, 0.2928, 0.4467, - 0.3417, 0.7580, 0.0048, 0.5521, 0.6463, 0.4551, 0.2157, - 0.9490, 0.7709, 0.5133, 0.6298, 0.9824, 0.0939, 0.6470, - 0.8582, 0.6745, 0.3195, 0.7034, 0.3210, 0.4343, 0.8580, - 0.2725, 0.9024, 0.1300, 0.1018, 0.1214, 0.8294, 0.8231, - 0.4988, 0.6393, 0.5659, 0.3564, 0.4693, 0.7534, 0.8943, - 0.8326, 0.6300, 0.8559, 0.5119, 0.1976, 0.2140, 0.7843, - 0.6970, 0.1656, 0.6279, 0.1965, 0.1246, 0.2067, 0.4844, - 0.6932, 0.0875, 0.9163, 0.9941, 0.6738, 0.5892, 0.8709, - 0.4754, 0.3597, 0.3053, 0.6792, 0.1671, 0.1823, 0.5845, - 0.7883, 0.0033, 0.7413, 0.6371, 0.5830, 0.0867, 0.2426, - 0.0434, 0.2486, 0.2783, 0.8635, 0.4149, 0.8689, 0.2094, - 0.0406, 0.9992, 0.4458, 0.4786, 0.5163, 0.4532, 0.9377, - 0.1115, 0.5946, 0.5658, 0.7630, 0.5075, 0.2843, 0.6994, - 0.4331, 0.1647, 0.7111, 0.9415, 0.9134, 0.3649, 0.1405, - 0.3023, 0.1916, 0.9338, 0.8955, 0.1579, 0.6881, 0.6431, - 0.7938, 0.6435, 0.1598, 0.8058, 0.0143, 0.7389, 0.1256, - 0.3343, 0.5721, 0.4218, 0.4586, 0.7800, 0.2224, 0.0329, - 0.3782, 0.5096, 0.7729, 0.2314, 0.6628, 0.4257, 0.0530, - 0.2394, 0.3782, 0.3378, 0.9264, 0.3846, 0.8312, 0.7165, - 0.9018, 0.2255, 0.8764, 0.4648, 0.8905, 0.6287, 0.3061, - 0.2358, 0.3575, 0.8837, 0.8661, 0.4644, 0.0307, 0.1658, - 0.1603, 0.6516, 0.7616, 0.5701, 0.3014, 0.9310, 0.2065, - 0.7077, 0.7364, 0.4491, 0.0518, 0.7097, 0.4874, 0.4668, - 0.0620, 0.4255, 0.6665, 0.1014, 0.1580, 0.0470, 0.3432, - 0.8393, 0.1570, 0.1401, 0.5172, 0.8417, 0.4672, 0.2201, - 0.1674, 0.9708, 0.6467, 0.6089, 0.9800, 0.0513, 0.3044, - 0.6979, 0.6719, 0.1842, 0.8617, 0.2669, 0.6961, 0.6593, - 0.1234, 0.4151, 0.5864, 0.5859, 0.0830, 0.1869, 0.0506, - 0.4507, 0.0944, 0.5583, 0.8982, 0.5055, 0.6171, 0.7678, - 0.4718, 0.9977, 0.2777, 0.7559, 0.1953, 0.4405, 0.9006, - 0.3125, 0.6338, 0.3459, 0.2249, 0.6948, 0.3347, 0.4623, - 0.5826, 0.5751, 0.0351, 0.6388, 0.1795, 0.7330, 0.5707, - 0.5527, 0.2760, 0.8521, 0.1919, 0.1692, 0.4703, 0.1457, - 0.4745, 0.3853, 0.5193, 0.8361, 0.1959, 0.4596, 0.5784, - 0.0974, 0.3018, 0.0027, 0.0284, 0.6379, 0.0985, 0.6697, - 0.1617, 0.9408, 0.1225, 0.4828, 0.1493, 0.2255, 0.9622, - 0.3456, 0.5549, 0.7083, 0.8183, 0.4017, 0.9015, 0.9500, - 0.9757, 0.7762, 0.4376, 0.1119, 0.0478, 0.0482, 0.0578, - 0.7538, 0.6748, 0.4915, 0.0046, 0.4804, 0.0171, 0.2571, - 0.5740, 0.8135, 0.9212, 0.1282, 0.1633, 0.3991, 0.3795, - 0.2563, 0.7909, 0.3096, 0.9640, 0.3523, 0.8436, 0.3227, - 0.0600, 0.3198, 0.3035, 0.1361, 0.3922, 0.1782, 0.7833, - 0.2504, 0.4757, 0.7349, 0.8713, 0.6836, 0.3507, 0.6395, - 0.8433, 0.8479, 0.5637, 0.2767, 0.4270, 0.1521, 0.7400, - 0.4410, 0.9217, 0.8199, 0.3646, 0.7246, 0.6747, 0.1362, - 0.4476, 0.3311, 0.4522, 0.8256, 0.9839, 0.1661, 0.7065, - 0.0053, 0.7677, 0.6798, 0.6573, 0.7053, 0.0946, 0.4782, - 0.6733, 0.8968, 0.8493, 0.9722, 0.3359, 0.2513, 0.8759, - 0.7557, 0.5642, 0.6956, 0.9785, 0.2314, 0.2092, 0.6617, - 0.2157, 0.9152, 0.2913, 0.0438, 0.9309, 0.2537, 0.0994, - 0.4607, 0.6405, 0.5177, 0.7145, 0.1394, 0.3492, 0.5865, - 0.9348, 0.8342, 0.9034, 0.5205, 0.0516, 0.1632, 0.3433, - 0.4758, 0.2442, 0.7218, 0.3687, 0.3685, 0.7796, 0.4166, - 0.7390, 0.4015, 0.0501, 0.4473, 0.1656, 0.4610, 0.3317, - 0.3754, 0.6628, 0.9353, 0.1661, 0.2491, 0.3244, 0.5026, - 0.2276, 0.1611, 0.0412, 0.1485, 0.2596, 0.3703, 0.5359, - 0.7023, 0.3612, 0.9260, 0.3044, 0.4320, 0.5730, 0.4544, - 0.7409, 0.6046, 0.2126, 0.8407, 0.5541, 0.9635, 0.4726, - 0.7284, 0.1079, 0.8545, 0.8839, 0.1658, 0.6432, 0.3731, - 0.4876, 0.5276, 0.8205, 0.3497, 0.2810, 0.3329, 0.4371, - 0.6824, 0.9070, 0.8115, 0.6630, 0.8608, 0.8445, 0.6452, - 0.0464, 0.2074, 0.6033, 0.8590, 0.4426, 0.1662, 0.9143, - 0.8420, 0.9435, 0.3667, 0.0587, 0.3344, 0.5940, 0.9391, - 0.3098, 0.3277, 0.3122, 0.0248, 0.5693, 0.1331]), + col_indices=tensor([5164, 5014, 8214, 7408, 7189, 1684, 5605, 5380, 8291, + 2108, 7787, 2390, 8050, 3945, 323, 1593, 9975, 7469, + 9792, 4149, 871, 4387, 5301, 7715, 9119, 5888, 295, + 4914, 5328, 4062, 7869, 2788, 6432, 6896, 1775, 7091, + 2661, 3222, 4908, 7195, 1665, 8351, 9893, 3743, 5436, + 2809, 9630, 1622, 5321, 1592, 8813, 8614, 7700, 8002, + 3712, 6099, 4122, 3310, 5969, 4767, 5305, 6118, 1703, + 473, 6265, 8292, 427, 5095, 342, 5438, 1041, 5095, + 2360, 3728, 2460, 2969, 8940, 1045, 3487, 7564, 4400, + 3528, 9967, 9653, 3225, 5904, 531, 5595, 8523, 2376, + 3710, 7970, 1799, 3700, 1127, 2527, 3145, 7531, 4294, + 2867, 2401, 7906, 5920, 7020, 6742, 677, 783, 1993, + 6573, 4513, 4593, 2757, 1116, 4862, 6014, 7351, 538, + 8121, 5008, 4220, 8873, 8781, 2580, 8966, 1462, 7460, + 2945, 8650, 1448, 8331, 5855, 7858, 6630, 14, 2408, + 1550, 7593, 2396, 9195, 1063, 8155, 2843, 6978, 7099, + 2692, 4742, 4670, 5714, 2183, 9324, 6142, 4343, 5836, + 6020, 7748, 3715, 1364, 4681, 1287, 1882, 8847, 8533, + 5905, 7892, 7080, 8112, 5074, 958, 6693, 8396, 2280, + 2261, 8100, 2332, 1158, 6260, 2668, 6466, 7466, 839, + 2845, 1866, 4548, 9628, 8503, 5403, 8227, 2896, 967, + 5256, 8201, 9900, 4749, 3479, 3578, 7851, 6466, 9639, + 4700, 9148, 2152, 8564, 8364, 1395, 1953, 1048, 5672, + 2941, 7604, 3805, 4008, 7407, 1648, 5843, 838, 9487, + 9489, 3465, 5317, 8500, 3490, 7811, 7411, 1679, 4308, + 2932, 1814, 6781, 7121, 6539, 991, 80, 3511, 9215, + 2313, 9324, 9929, 9584, 2328, 4071, 7859, 9184, 2928, + 9630, 4925, 8078, 8751, 5067, 7616, 7991, 8337, 7745, + 9708, 1181, 655, 5321, 9877, 9978, 9969, 6588, 618, + 5184, 8217, 903, 5059, 6663, 4147, 1779, 2205, 5999, + 5239, 9463, 6252, 8902, 3184, 1718, 3667, 483, 7325, + 8040, 8301, 1129, 3056, 863, 9473, 4930, 4624, 6093, + 3258, 6658, 7229, 5826, 3645, 6235, 2794, 9345, 7800, + 4682, 8507, 3955, 1159, 1796, 7751, 2454, 765, 1280, + 4371, 122, 4191, 6024, 8262, 2785, 9254, 5050, 2307, + 90, 7154, 415, 4690, 5768, 544, 1734, 4836, 6687, + 5255, 3711, 713, 6169, 5269, 3423, 6218, 2747, 2428, + 6455, 329, 9000, 4343, 155, 4593, 8612, 8035, 5387, + 6489, 9297, 5463, 5415, 3973, 4738, 8776, 7734, 6327, + 5081, 6652, 7559, 6409, 1007, 5142, 5757, 6957, 2310, + 936, 5634, 1234, 8187, 6350, 8579, 6749, 6079, 3275, + 8208, 4069, 4569, 7298, 790, 2158, 4058, 1020, 5059, + 7742, 9469, 9658, 4422, 9882, 2406, 9383, 1980, 8260, + 8950, 135, 2699, 1394, 9198, 8874, 1616, 8012, 8675, + 3471, 8133, 307, 7698, 7045, 9960, 2318, 5349, 3893, + 118, 3733, 6050, 9702, 7146, 4002, 786, 112, 4454, + 1651, 8278, 6208, 5003, 2984, 8876, 962, 8650, 4908, + 1083, 4974, 9102, 9377, 7168, 7295, 2214, 3030, 4443, + 2880, 4133, 2725, 3978, 7831, 898, 3321, 1143, 4338, + 7418, 2841, 1505, 6526, 8654, 3875, 6272, 8820, 5675, + 559, 7991, 7759, 3522, 8305, 3279, 2287, 5257, 4276, + 5519, 1436, 3906, 2748, 388, 6185, 2624, 6937, 8828, + 9835, 5631, 4204, 7488, 678, 5042, 8200, 9119, 8409, + 402, 7583, 235, 4186, 7812, 1976, 2196, 589, 6216, + 2780, 6449, 9342, 5762, 7738, 9044, 8413, 4539, 3955, + 8617, 7359, 1822, 1913, 3448, 3685, 7449, 1045, 7085, + 1923, 230, 2259, 154, 1151, 8945, 4346, 7951, 2009, + 8683, 2642, 802, 4664, 1209, 4050, 1644, 8672, 5209, + 8285, 7389, 9598, 627, 2295, 2901, 2694, 5835, 6529, + 2381, 9477, 7741, 3909, 837, 8040, 8451, 1073, 4177, + 2582, 4563, 4897, 6150, 7477, 7690, 6127, 1303, 8316, + 1094, 277, 9443, 6908, 431, 7236, 7243, 5421, 4744, + 7003, 7774, 9747, 9706, 1487, 5265, 6772, 7380, 5881, + 3932, 7300, 9912, 1317, 1560, 2115, 2953, 1282, 2752, + 7300, 5106, 3698, 5723, 4565, 2754, 832, 8900, 9890, + 4606, 1424, 6048, 854, 2358, 3142, 3019, 9615, 3232, + 176, 3275, 5665, 5198, 3789, 1432, 5416, 1528, 6498, + 3572, 4537, 543, 9193, 43, 2436, 9663, 3322, 1325, + 6780, 2545, 1336, 7719, 4052, 9860, 4812, 2218, 7830, + 8183, 3127, 1689, 5483, 9816, 280, 3187, 7867, 8297, + 6551, 9622, 1793, 99, 3506, 1611, 8062, 185, 8043, + 2619, 9658, 7562, 5798, 1725, 8469, 2437, 4413, 479, + 9191, 1399, 7293, 1442, 6408, 8654, 2157, 7887, 6312, + 5834, 6792, 6246, 548, 1615, 9316, 9900, 5356, 4062, + 8416, 941, 912, 6136, 2457, 4981, 1179, 3648, 4797, + 284, 443, 8118, 3334, 6615, 2534, 4058, 7276, 9902, + 5121, 3151, 4719, 138, 6928, 3909, 9170, 6242, 9774, + 9728, 296, 1798, 3207, 1500, 9518, 5255, 7744, 8630, + 4587, 5444, 1194, 7424, 5053, 9930, 9096, 5428, 7303, + 7203, 9435, 2424, 1594, 1980, 928, 6374, 4531, 3798, + 8402, 3410, 2674, 2030, 5337, 3373, 2899, 7755, 3745, + 4583, 6303, 6271, 8854, 1351, 1797, 641, 2720, 8702, + 8846, 738, 9098, 3411, 2497, 3255, 3523, 2754, 7671, + 478, 5610, 8326, 1924, 2033, 6277, 9284, 8275, 7536, + 4790, 314, 8434, 3520, 6452, 6974, 3071, 8659, 1970, + 5792, 3523, 4801, 6160, 1016, 4940, 5165, 5954, 7766, + 9508, 6283, 3215, 322, 7560, 5180, 4002, 2974, 4829, + 3149, 64, 2054, 9547, 2638, 8230, 5888, 6520, 139, + 4829, 8009, 891, 6782, 75, 6384, 4600, 9499, 368, + 6433, 3100, 62, 5714, 176, 6077, 8345, 6556, 8093, + 7894, 1584, 6971, 3933, 955, 799, 6606, 7223, 3986, + 7117, 3665, 7001, 6281, 9379, 8186, 8077, 5321, 4969, + 2297, 1820, 4785, 6438, 9310, 6940, 8207, 7577, 6147, + 1762, 7274, 2173, 2887, 4288, 6336, 2267, 5654, 1877, + 4387, 4091, 7574, 5339, 3334, 5727, 5795, 4676, 4873, + 4881, 4922, 7697, 7533, 5889, 5550, 1960, 5410, 6260, + 8984, 9211, 6472, 7705, 4518, 4933, 4037, 8516, 7051, + 7434, 9874, 792, 7732, 1121, 1904, 3041, 6897, 6528, + 9382, 6904, 2197, 703, 684, 4211, 7638, 1396, 9598, + 4722, 2606, 2333, 1857, 451, 2618, 2752, 5904, 5815, + 9170, 4005, 9933, 4475, 7914, 9654, 9931, 4901, 6494, + 2288, 6604, 5490, 7751, 7161, 1388, 7042, 5562, 2736, + 4097, 3933, 144, 5152, 4116, 3435, 5240, 3141, 7651, + 2916, 845, 5993, 1686, 8827, 2738, 243, 5820, 6080, + 2621, 7001, 6526, 5517, 6790, 4005, 8160, 4388, 2978, + 2027, 6982, 3610, 8562, 1151, 8254, 7542, 4516, 7019, + 1597, 5782, 956, 6669, 4960, 7339, 1917, 948, 469, + 5435, 4929, 2280, 5810, 8128, 162, 4255, 293, 8166, + 1683, 5976, 7531, 4696, 8458, 7699, 1547, 31, 2531, + 6377, 1781, 3529, 1648, 9654, 179, 1525, 3250, 6095, + 3778, 517, 5582, 6448, 923, 1640, 2794, 6864, 4327, + 6109]), + values=tensor([4.0894e-01, 5.8274e-01, 1.0001e-01, 5.5598e-01, + 2.4672e-01, 1.9755e-01, 8.2091e-01, 7.0576e-01, + 4.5244e-01, 1.6010e-01, 5.7075e-01, 9.1330e-01, + 7.0918e-02, 9.8239e-01, 2.7952e-01, 3.4202e-01, + 5.9213e-01, 7.1230e-01, 8.0731e-01, 7.1013e-02, + 1.4535e-01, 8.1639e-02, 2.2992e-02, 8.4328e-01, + 8.9992e-01, 3.4183e-01, 2.1527e-01, 7.5802e-01, + 2.9101e-02, 1.5313e-01, 1.6619e-01, 5.7122e-01, + 7.9630e-01, 8.3344e-01, 1.9528e-02, 7.9254e-01, + 4.5549e-01, 4.3718e-01, 1.8439e-01, 9.7946e-01, + 4.7430e-01, 4.5919e-01, 9.3638e-01, 9.3367e-01, + 5.9734e-01, 9.0725e-01, 4.8254e-01, 5.4409e-02, + 2.1436e-01, 4.6783e-01, 5.3136e-01, 6.8794e-01, + 9.6213e-01, 4.1559e-01, 1.1413e-01, 2.4341e-01, + 7.9806e-01, 9.2222e-01, 6.5033e-01, 2.4043e-01, + 1.3622e-01, 3.5399e-02, 5.3369e-01, 6.4089e-01, + 9.1741e-01, 8.7783e-01, 4.8646e-01, 1.3798e-01, + 8.3382e-01, 5.4621e-01, 1.0795e-01, 2.9808e-01, + 1.4304e-01, 8.8996e-01, 5.0273e-01, 5.7297e-02, + 2.8710e-01, 9.8689e-01, 2.0435e-01, 2.5281e-01, + 2.5011e-01, 5.7144e-01, 4.8076e-01, 8.6385e-01, + 7.3395e-01, 8.9446e-01, 3.1732e-01, 9.9132e-01, + 2.3537e-01, 2.7118e-01, 9.4902e-01, 7.1211e-01, + 4.5125e-01, 3.5412e-01, 5.6490e-01, 8.7166e-01, + 7.6578e-01, 5.6882e-01, 8.5908e-01, 1.4375e-02, + 6.7832e-01, 1.0632e-01, 6.9835e-01, 8.6425e-01, + 6.0537e-01, 5.1290e-01, 2.5591e-01, 4.5431e-01, + 3.0884e-01, 2.5146e-01, 7.6164e-01, 9.8501e-01, + 9.4361e-01, 1.4966e-01, 3.8185e-01, 7.8881e-01, + 7.5426e-01, 9.3050e-01, 2.1693e-03, 9.1242e-01, + 9.9614e-01, 6.1988e-01, 8.5159e-01, 5.5741e-01, + 3.5577e-01, 6.7235e-01, 7.7937e-01, 4.0132e-01, + 4.7445e-01, 2.8636e-01, 8.5029e-01, 6.1639e-01, + 4.2199e-01, 7.2784e-01, 4.0906e-01, 9.9752e-01, + 6.2927e-02, 5.5887e-02, 7.6696e-01, 8.5272e-01, + 3.5616e-01, 1.7774e-01, 3.7619e-03, 5.1377e-01, + 6.7491e-01, 5.0433e-01, 8.6182e-01, 8.9724e-01, + 8.2984e-01, 6.1205e-01, 3.9839e-01, 7.6438e-01, + 4.2370e-01, 9.6523e-01, 3.2476e-01, 8.6659e-01, + 3.7647e-01, 6.1772e-01, 7.9455e-01, 6.5134e-01, + 1.3352e-02, 5.2857e-01, 5.4057e-01, 6.1086e-02, + 2.3965e-01, 3.5309e-01, 9.6176e-01, 2.5046e-01, + 9.9705e-01, 8.7395e-01, 5.3122e-01, 9.9433e-01, + 8.1238e-01, 8.6421e-03, 2.7089e-01, 5.7344e-01, + 4.9578e-02, 7.5159e-01, 7.1330e-02, 4.5327e-02, + 1.4165e-01, 8.4999e-02, 8.7349e-01, 3.8633e-01, + 8.6717e-01, 6.2412e-02, 3.5330e-01, 5.5040e-01, + 6.3873e-01, 4.0969e-01, 4.3263e-01, 9.4857e-01, + 2.0115e-01, 9.9246e-01, 2.6795e-01, 7.0640e-01, + 5.0328e-01, 3.6843e-01, 9.6134e-02, 2.1408e-01, + 1.9101e-02, 8.6657e-01, 6.8164e-01, 6.4434e-01, + 6.3950e-01, 8.2129e-01, 4.7860e-01, 7.7644e-01, + 4.3503e-01, 9.7628e-01, 6.5282e-01, 1.9258e-01, + 4.4757e-01, 6.7717e-01, 8.8517e-01, 3.7940e-01, + 8.9754e-01, 7.7245e-02, 8.5143e-02, 5.7048e-01, + 8.9039e-01, 3.8963e-01, 3.0000e-01, 2.1237e-01, + 5.5496e-01, 4.6138e-01, 7.3517e-01, 1.1538e-01, + 1.4946e-02, 4.1932e-01, 1.5041e-01, 9.0340e-01, + 3.7811e-01, 2.4993e-01, 7.9005e-01, 7.1657e-01, + 8.0893e-01, 4.3493e-01, 6.2771e-01, 6.5094e-01, + 1.0476e-01, 4.9120e-01, 4.7346e-01, 5.7681e-02, + 9.1805e-01, 4.3716e-01, 6.1096e-01, 4.4589e-01, + 3.6246e-01, 7.6273e-01, 8.8340e-01, 8.1913e-01, + 1.9666e-02, 5.7679e-02, 8.1136e-01, 7.9759e-01, + 6.8084e-01, 9.6582e-01, 6.6891e-01, 5.0207e-01, + 2.6635e-01, 3.0523e-01, 9.4666e-02, 1.9776e-01, + 5.2916e-01, 1.0254e-01, 9.9880e-01, 8.6894e-01, + 1.0781e-01, 3.5508e-01, 4.7592e-01, 9.2893e-02, + 9.2081e-01, 5.4115e-01, 2.8060e-01, 7.9634e-01, + 5.1729e-01, 8.2883e-01, 7.5547e-01, 5.1412e-01, + 1.9968e-01, 8.5321e-01, 4.0251e-02, 2.5542e-01, + 5.1705e-01, 4.9638e-01, 5.2979e-01, 9.5171e-01, + 5.4373e-01, 6.3929e-01, 4.8420e-02, 6.3484e-02, + 1.0822e-01, 3.4350e-02, 5.5253e-02, 9.1785e-01, + 7.4530e-01, 8.7828e-02, 1.1568e-01, 1.2826e-01, + 8.7388e-02, 4.6536e-01, 3.8950e-01, 4.8037e-01, + 7.2754e-02, 5.9334e-01, 5.2116e-01, 7.5971e-01, + 8.6063e-01, 6.6925e-01, 7.5840e-01, 1.9221e-01, + 2.2819e-01, 5.5368e-01, 7.1787e-01, 3.4261e-02, + 1.1434e-01, 2.4902e-01, 2.3587e-01, 3.3240e-01, + 9.8225e-01, 3.6058e-01, 4.9739e-01, 6.1269e-01, + 1.7349e-01, 4.0476e-01, 4.3105e-02, 5.9398e-01, + 1.1729e-01, 5.9161e-01, 3.0933e-01, 9.7659e-01, + 7.7386e-01, 5.0122e-01, 3.5753e-01, 4.3112e-01, + 9.9500e-01, 9.2487e-01, 6.9221e-02, 8.7997e-01, + 1.4840e-01, 2.4266e-01, 1.4810e-01, 6.0954e-01, + 4.1162e-01, 9.3216e-01, 6.9320e-01, 9.3928e-01, + 3.1548e-01, 5.2168e-01, 9.2754e-01, 8.1486e-01, + 8.5823e-01, 5.5826e-01, 2.1093e-01, 9.2752e-01, + 7.4682e-01, 6.0051e-02, 9.5614e-01, 1.0734e-01, + 9.3432e-01, 6.9265e-01, 2.1710e-01, 6.5546e-01, + 8.9375e-01, 9.5434e-01, 8.7263e-01, 6.5068e-01, + 9.0741e-01, 3.6530e-01, 7.3210e-01, 4.2256e-01, + 3.7208e-01, 8.6877e-01, 6.8114e-01, 1.3169e-01, + 2.6109e-01, 1.4870e-01, 2.8545e-01, 5.1676e-01, + 3.3668e-01, 8.5952e-01, 2.8327e-01, 8.8990e-01, + 2.8873e-01, 4.8784e-01, 7.4932e-01, 5.9967e-01, + 9.3077e-01, 1.3852e-02, 4.3073e-01, 7.7902e-02, + 7.5222e-02, 1.9341e-01, 5.1533e-01, 3.9688e-01, + 9.8947e-01, 1.8762e-01, 8.8740e-02, 6.7834e-01, + 7.8140e-01, 9.1747e-01, 2.2166e-01, 6.8925e-01, + 1.1487e-01, 7.2926e-01, 9.0233e-01, 8.4992e-01, + 7.7937e-02, 6.0706e-02, 8.8192e-01, 9.4724e-01, + 3.4270e-01, 3.9980e-01, 6.7014e-01, 2.8075e-01, + 5.6090e-01, 4.9750e-01, 1.4793e-01, 6.9616e-02, + 1.2870e-01, 5.0923e-01, 9.2758e-01, 4.0954e-01, + 2.9094e-01, 1.2183e-01, 9.4085e-01, 9.7350e-01, + 7.7277e-01, 4.3174e-01, 7.0809e-01, 3.7859e-01, + 5.3678e-01, 8.1831e-01, 1.5176e-01, 5.9930e-01, + 9.7115e-01, 2.4797e-01, 5.3137e-01, 7.3717e-01, + 1.1118e-01, 3.3328e-01, 3.5010e-01, 9.4197e-01, + 4.9545e-01, 2.8102e-02, 2.3249e-01, 1.3621e-01, + 8.4379e-01, 5.8956e-01, 6.4469e-01, 6.2175e-01, + 5.8167e-01, 9.9478e-01, 3.7495e-03, 2.9909e-01, + 6.0214e-01, 7.2313e-01, 3.7477e-01, 4.2292e-01, + 6.4713e-01, 4.6010e-01, 8.6745e-02, 2.1639e-02, + 4.0429e-02, 9.8974e-01, 1.2118e-01, 5.1650e-01, + 4.9975e-01, 3.9394e-01, 6.4617e-01, 9.0253e-01, + 9.8159e-01, 1.6534e-01, 6.9111e-01, 5.0225e-02, + 5.0400e-02, 9.0474e-01, 8.1500e-01, 3.1023e-01, + 3.7266e-02, 5.8378e-01, 2.7933e-02, 6.1578e-01, + 1.0829e-01, 6.9696e-01, 2.5574e-01, 4.6162e-01, + 9.8664e-01, 2.9086e-01, 6.4012e-01, 1.4159e-01, + 8.3428e-01, 6.1565e-01, 1.5366e-01, 9.6348e-01, + 4.0966e-01, 4.8415e-01, 5.8438e-01, 2.1891e-01, + 4.7288e-01, 3.0498e-01, 7.9715e-01, 1.3969e-01, + 2.4748e-01, 3.8520e-01, 4.3714e-01, 4.1827e-01, + 5.8424e-01, 7.3540e-01, 5.7507e-01, 6.9330e-01, + 4.3550e-01, 7.1386e-01, 7.0768e-01, 1.1949e-01, + 6.4032e-01, 7.9486e-01, 7.6524e-01, 7.1039e-01, + 5.7516e-01, 5.8960e-01, 2.9066e-01, 1.3653e-01, + 3.2759e-01, 9.3242e-01, 2.6758e-01, 9.6697e-01, + 5.9172e-02, 1.0175e-01, 4.4119e-01, 4.9519e-01, + 7.9361e-01, 6.7683e-01, 8.5985e-02, 2.3760e-01, + 7.9722e-01, 5.9807e-01, 7.8945e-01, 8.9307e-01, + 3.1571e-01, 8.1729e-01, 4.5486e-01, 9.1640e-01, + 5.6777e-01, 2.8348e-01, 3.5190e-01, 5.1105e-02, + 4.0959e-01, 8.1676e-01, 8.7608e-01, 8.5927e-01, + 7.1751e-02, 1.3622e-01, 7.3856e-04, 9.1464e-01, + 7.4105e-01, 1.8627e-01, 5.3778e-01, 6.6493e-02, + 5.0929e-01, 7.8137e-01, 8.5716e-01, 8.4558e-01, + 4.7604e-01, 7.2100e-01, 4.7412e-01, 2.5388e-01, + 4.2401e-01, 4.8072e-01, 3.6366e-01, 9.8602e-01, + 1.1481e-01, 5.2800e-01, 5.3392e-02, 4.7370e-01, + 7.5174e-01, 8.1419e-01, 8.4000e-01, 1.4341e-01, + 9.6726e-01, 3.7731e-01, 1.0682e-01, 2.6834e-01, + 8.3644e-01, 1.9659e-01, 8.7613e-02, 9.9349e-01, + 1.8690e-01, 6.6413e-01, 4.2211e-01, 9.2056e-01, + 6.0902e-01, 7.3642e-01, 7.3980e-01, 6.5955e-01, + 1.5784e-01, 6.1023e-01, 1.1485e-01, 9.7705e-03, + 1.0572e-01, 3.3247e-01, 3.4286e-01, 6.2421e-01, + 2.2510e-01, 4.2059e-01, 1.1972e-01, 5.0521e-01, + 9.5224e-01, 3.3911e-01, 6.8732e-01, 4.0729e-01, + 8.3521e-01, 6.2139e-01, 9.9870e-01, 1.8769e-01, + 4.8983e-01, 9.2862e-01, 9.7802e-01, 2.7507e-01, + 4.0103e-02, 2.1245e-01, 2.0174e-01, 8.5345e-01, + 2.7619e-01, 6.8601e-01, 9.1421e-01, 7.3642e-01, + 3.3032e-01, 4.5368e-01, 3.8043e-01, 5.7643e-01, + 8.3639e-01, 8.0191e-01, 5.6781e-01, 6.0869e-01, + 4.8752e-01, 6.4961e-01, 8.9269e-01, 8.6712e-01, + 5.2297e-01, 9.0969e-01, 4.7633e-01, 5.4207e-01, + 8.2832e-01, 4.3266e-01, 7.3135e-01, 5.4331e-01, + 8.5099e-01, 3.4706e-02, 1.6109e-01, 9.2806e-01, + 1.5954e-01, 9.5769e-01, 1.8777e-02, 7.5505e-01, + 5.7525e-01, 6.2967e-01, 2.7744e-01, 7.9418e-01, + 3.8017e-01, 7.6002e-01, 1.7390e-01, 3.2046e-02, + 2.5235e-01, 2.4512e-01, 8.3269e-01, 7.2001e-01, + 7.7456e-03, 4.6020e-01, 1.5040e-01, 4.5890e-01, + 6.4873e-01, 8.2394e-01, 5.2198e-02, 5.0519e-01, + 6.9360e-01, 2.3311e-01, 3.7009e-02, 2.8397e-01, + 2.8403e-02, 3.5982e-01, 5.5695e-01, 7.4590e-01, + 4.9669e-01, 5.0174e-01, 4.6801e-01, 1.7821e-01, + 4.5014e-02, 6.4983e-01, 9.0782e-01, 1.9282e-01, + 3.6519e-01, 9.8159e-01, 1.4949e-01, 8.2867e-01, + 2.1216e-01, 4.7033e-02, 2.7033e-03, 8.6883e-01, + 6.6889e-01, 6.4411e-01, 1.6944e-01, 2.2065e-01, + 6.8890e-03, 9.9653e-01, 5.2089e-01, 4.0022e-01, + 3.0635e-01, 5.1870e-01, 9.0873e-01, 6.1765e-02, + 3.6346e-01, 3.3309e-01, 2.2863e-01, 4.3468e-01, + 8.6755e-01, 7.6640e-01, 3.0385e-01, 3.5763e-01, + 4.1656e-01, 1.4362e-01, 9.6354e-01, 5.7426e-01, + 7.8689e-01, 2.4730e-01, 7.7422e-01, 3.5805e-02, + 1.9351e-01, 5.0990e-01, 6.2640e-01, 3.7221e-01, + 3.1572e-01, 8.4142e-01, 5.0295e-01, 4.9000e-01, + 9.5176e-01, 9.2623e-02, 3.9572e-01, 3.3737e-01, + 4.9911e-01, 7.9045e-01, 5.2579e-01, 1.4566e-01, + 3.4194e-01, 9.7302e-01, 7.4489e-03, 2.6327e-01, + 8.2843e-01, 8.2229e-01, 8.6943e-01, 4.9570e-01, + 5.8722e-01, 9.6892e-01, 8.6864e-01, 4.1771e-01, + 2.3907e-01, 3.6921e-01, 2.6982e-01, 8.3031e-01, + 1.8733e-01, 2.0567e-01, 6.0647e-01, 4.6222e-01, + 4.7998e-01, 3.1085e-01, 5.6119e-01, 1.5141e-01, + 2.7687e-01, 7.2627e-01, 7.3959e-01, 3.2150e-02, + 6.8320e-01, 9.6806e-01, 8.0941e-01, 3.6996e-01, + 1.0113e-01, 6.0608e-01, 2.5950e-01, 7.7470e-01, + 4.5427e-01, 7.0661e-01, 4.1646e-01, 9.6120e-01, + 2.1486e-01, 7.9020e-01, 6.9684e-01, 8.5318e-01, + 2.4751e-02, 1.9432e-01, 3.3890e-03, 3.5931e-01, + 6.0078e-01, 3.1420e-01, 7.9915e-01, 7.2995e-01, + 4.5837e-01, 4.8434e-01, 2.4191e-01, 8.7264e-01, + 4.4628e-01, 1.7433e-01, 3.9048e-01, 5.8578e-01, + 5.1387e-01, 2.3966e-01, 5.9851e-01, 6.1038e-01, + 9.8604e-01, 8.5463e-01, 7.8940e-01, 2.9878e-01, + 8.8823e-01, 2.9276e-01, 8.4526e-01, 7.0736e-01, + 1.9470e-01, 6.4877e-01, 9.0222e-01, 1.7804e-01, + 5.1116e-01, 5.3144e-01, 1.6398e-02, 1.8603e-01, + 8.4622e-01, 9.7673e-01, 8.3551e-01, 7.9152e-01, + 2.9879e-02, 3.2414e-02, 3.3248e-01, 9.0278e-01, + 7.2618e-02, 3.8623e-02, 4.5740e-01, 1.3855e-01, + 4.6681e-01, 5.3146e-01, 3.4373e-01, 3.8471e-01, + 7.6964e-01, 3.2053e-01, 4.4641e-01, 9.3560e-01, + 3.6526e-01, 1.7340e-01, 4.1921e-01, 3.2031e-01, + 1.1241e-01, 7.3094e-01, 6.8199e-01, 4.7013e-01, + 5.7060e-01, 8.6818e-01, 9.5280e-01, 2.7502e-01, + 2.5660e-01, 2.5142e-01, 8.7490e-01, 9.3681e-01, + 7.6161e-01, 3.5498e-01, 3.9613e-02, 7.6181e-01, + 9.7582e-01, 1.9366e-01, 9.0553e-02, 1.5015e-01, + 3.4037e-01, 2.7234e-01, 2.6012e-01, 5.4730e-01, + 7.5914e-01, 7.2804e-01, 8.8377e-01, 2.9417e-01, + 8.2797e-01, 7.8180e-01, 6.9616e-01, 5.9021e-01, + 3.7858e-01, 4.2900e-01, 2.6402e-01, 7.0528e-01, + 1.6843e-01, 1.3766e-01, 2.6640e-01, 1.8234e-01, + 8.9174e-01, 2.9224e-01, 9.0712e-01, 6.5247e-01, + 5.3578e-01, 4.8627e-01, 5.4907e-01, 8.4350e-01, + 6.6691e-01, 8.1816e-01, 1.7307e-01, 7.7056e-01, + 8.6383e-02, 4.5982e-01, 4.4756e-01, 4.6444e-01, + 4.6836e-01, 9.6666e-01, 8.8660e-01, 8.5342e-01, + 5.4247e-01, 8.6053e-01, 8.6540e-01, 9.2791e-01, + 2.1159e-01, 2.4351e-01, 4.9738e-01, 4.1807e-01, + 2.7549e-01, 8.2197e-01, 7.6196e-01, 8.8791e-01, + 9.1088e-01, 8.8134e-01, 6.6822e-01, 5.0556e-01, + 5.4217e-01, 3.6135e-01, 5.4197e-01, 8.7029e-01, + 1.5876e-01, 2.6070e-01, 1.4902e-01, 4.9540e-01, + 3.2658e-01, 9.2315e-01, 9.8529e-02, 8.3386e-01, + 6.6861e-01, 6.8086e-01, 4.1234e-01, 9.1040e-01, + 8.0733e-02, 8.3027e-01, 2.7401e-02, 7.0357e-01, + 7.4001e-01, 1.0754e-01, 4.0982e-01, 7.3224e-01, + 3.0803e-01, 2.1047e-01, 8.9684e-01, 2.7451e-01, + 1.7786e-01, 8.7756e-01, 9.1917e-01, 5.9533e-01, + 2.2290e-01, 8.5840e-01, 8.1366e-01, 4.6831e-01, + 3.2199e-01, 1.0599e-01, 9.0382e-01, 5.5555e-01, + 1.1648e-01, 2.6181e-01, 9.0756e-02, 7.6516e-01, + 1.3491e-01, 1.5806e-01, 6.1588e-01, 3.5698e-01, + 9.6605e-01, 7.0001e-01, 3.4645e-01, 3.6756e-01, + 8.9547e-01, 5.0293e-01, 7.8512e-02, 9.5652e-01, + 4.6658e-01, 5.7874e-01, 8.5198e-01, 1.4339e-01, + 3.6238e-01, 4.9661e-01, 7.7543e-01, 1.9030e-01, + 1.1043e-01, 5.6247e-01, 8.0300e-01, 2.2866e-01, + 1.3044e-01, 4.3849e-01, 6.0789e-02, 6.0168e-01, + 8.5059e-01, 2.1034e-02, 7.3786e-01, 5.2646e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8452, 0.1047, 0.0563, ..., 0.6079, 0.4820, 0.4351]) +tensor([0.4567, 0.4180, 0.1623, ..., 0.7337, 0.1746, 0.4398]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1188,13 +1026,389 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.998100280761719 seconds +Time: 10.518462419509888 seconds -[20.36, 20.16, 20.2, 20.32, 20.32, 20.36, 20.56, 20.6, 20.52, 20.56] -[20.68, 20.8, 21.2, 24.88, 26.6, 27.2, 27.72, 25.88, 24.68, 23.88, 23.88, 23.8, 23.88, 23.68] -14.660717248916626 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 147223, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.998100280761719, 'TIME_S_1KI': 0.07470368271779354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2257844543456, 'W': 22.66095026687019} -[20.36, 20.16, 20.2, 20.32, 20.32, 20.36, 20.56, 20.6, 20.52, 20.56, 20.32, 20.36, 20.24, 20.2, 20.24, 20.36, 20.24, 20.2, 20.36, 20.24] -365.9799999999999 -18.298999999999996 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 147223, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.998100280761719, 'TIME_S_1KI': 0.07470368271779354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2257844543456, 'W': 22.66095026687019, 'J_1KI': 2.256616048133414, 'W_1KI': 0.15392262259884795, 'W_D': 4.3619502668701955, 'J_D': 63.94931951642035, 'W_D_1KI': 0.029628184909084827, 'J_D_1KI': 0.00020124698524744657} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5164, 5014, 8214, 7408, 7189, 1684, 5605, 5380, 8291, + 2108, 7787, 2390, 8050, 3945, 323, 1593, 9975, 7469, + 9792, 4149, 871, 4387, 5301, 7715, 9119, 5888, 295, + 4914, 5328, 4062, 7869, 2788, 6432, 6896, 1775, 7091, + 2661, 3222, 4908, 7195, 1665, 8351, 9893, 3743, 5436, + 2809, 9630, 1622, 5321, 1592, 8813, 8614, 7700, 8002, + 3712, 6099, 4122, 3310, 5969, 4767, 5305, 6118, 1703, + 473, 6265, 8292, 427, 5095, 342, 5438, 1041, 5095, + 2360, 3728, 2460, 2969, 8940, 1045, 3487, 7564, 4400, + 3528, 9967, 9653, 3225, 5904, 531, 5595, 8523, 2376, + 3710, 7970, 1799, 3700, 1127, 2527, 3145, 7531, 4294, + 2867, 2401, 7906, 5920, 7020, 6742, 677, 783, 1993, + 6573, 4513, 4593, 2757, 1116, 4862, 6014, 7351, 538, + 8121, 5008, 4220, 8873, 8781, 2580, 8966, 1462, 7460, + 2945, 8650, 1448, 8331, 5855, 7858, 6630, 14, 2408, + 1550, 7593, 2396, 9195, 1063, 8155, 2843, 6978, 7099, + 2692, 4742, 4670, 5714, 2183, 9324, 6142, 4343, 5836, + 6020, 7748, 3715, 1364, 4681, 1287, 1882, 8847, 8533, + 5905, 7892, 7080, 8112, 5074, 958, 6693, 8396, 2280, + 2261, 8100, 2332, 1158, 6260, 2668, 6466, 7466, 839, + 2845, 1866, 4548, 9628, 8503, 5403, 8227, 2896, 967, + 5256, 8201, 9900, 4749, 3479, 3578, 7851, 6466, 9639, + 4700, 9148, 2152, 8564, 8364, 1395, 1953, 1048, 5672, + 2941, 7604, 3805, 4008, 7407, 1648, 5843, 838, 9487, + 9489, 3465, 5317, 8500, 3490, 7811, 7411, 1679, 4308, + 2932, 1814, 6781, 7121, 6539, 991, 80, 3511, 9215, + 2313, 9324, 9929, 9584, 2328, 4071, 7859, 9184, 2928, + 9630, 4925, 8078, 8751, 5067, 7616, 7991, 8337, 7745, + 9708, 1181, 655, 5321, 9877, 9978, 9969, 6588, 618, + 5184, 8217, 903, 5059, 6663, 4147, 1779, 2205, 5999, + 5239, 9463, 6252, 8902, 3184, 1718, 3667, 483, 7325, + 8040, 8301, 1129, 3056, 863, 9473, 4930, 4624, 6093, + 3258, 6658, 7229, 5826, 3645, 6235, 2794, 9345, 7800, + 4682, 8507, 3955, 1159, 1796, 7751, 2454, 765, 1280, + 4371, 122, 4191, 6024, 8262, 2785, 9254, 5050, 2307, + 90, 7154, 415, 4690, 5768, 544, 1734, 4836, 6687, + 5255, 3711, 713, 6169, 5269, 3423, 6218, 2747, 2428, + 6455, 329, 9000, 4343, 155, 4593, 8612, 8035, 5387, + 6489, 9297, 5463, 5415, 3973, 4738, 8776, 7734, 6327, + 5081, 6652, 7559, 6409, 1007, 5142, 5757, 6957, 2310, + 936, 5634, 1234, 8187, 6350, 8579, 6749, 6079, 3275, + 8208, 4069, 4569, 7298, 790, 2158, 4058, 1020, 5059, + 7742, 9469, 9658, 4422, 9882, 2406, 9383, 1980, 8260, + 8950, 135, 2699, 1394, 9198, 8874, 1616, 8012, 8675, + 3471, 8133, 307, 7698, 7045, 9960, 2318, 5349, 3893, + 118, 3733, 6050, 9702, 7146, 4002, 786, 112, 4454, + 1651, 8278, 6208, 5003, 2984, 8876, 962, 8650, 4908, + 1083, 4974, 9102, 9377, 7168, 7295, 2214, 3030, 4443, + 2880, 4133, 2725, 3978, 7831, 898, 3321, 1143, 4338, + 7418, 2841, 1505, 6526, 8654, 3875, 6272, 8820, 5675, + 559, 7991, 7759, 3522, 8305, 3279, 2287, 5257, 4276, + 5519, 1436, 3906, 2748, 388, 6185, 2624, 6937, 8828, + 9835, 5631, 4204, 7488, 678, 5042, 8200, 9119, 8409, + 402, 7583, 235, 4186, 7812, 1976, 2196, 589, 6216, + 2780, 6449, 9342, 5762, 7738, 9044, 8413, 4539, 3955, + 8617, 7359, 1822, 1913, 3448, 3685, 7449, 1045, 7085, + 1923, 230, 2259, 154, 1151, 8945, 4346, 7951, 2009, + 8683, 2642, 802, 4664, 1209, 4050, 1644, 8672, 5209, + 8285, 7389, 9598, 627, 2295, 2901, 2694, 5835, 6529, + 2381, 9477, 7741, 3909, 837, 8040, 8451, 1073, 4177, + 2582, 4563, 4897, 6150, 7477, 7690, 6127, 1303, 8316, + 1094, 277, 9443, 6908, 431, 7236, 7243, 5421, 4744, + 7003, 7774, 9747, 9706, 1487, 5265, 6772, 7380, 5881, + 3932, 7300, 9912, 1317, 1560, 2115, 2953, 1282, 2752, + 7300, 5106, 3698, 5723, 4565, 2754, 832, 8900, 9890, + 4606, 1424, 6048, 854, 2358, 3142, 3019, 9615, 3232, + 176, 3275, 5665, 5198, 3789, 1432, 5416, 1528, 6498, + 3572, 4537, 543, 9193, 43, 2436, 9663, 3322, 1325, + 6780, 2545, 1336, 7719, 4052, 9860, 4812, 2218, 7830, + 8183, 3127, 1689, 5483, 9816, 280, 3187, 7867, 8297, + 6551, 9622, 1793, 99, 3506, 1611, 8062, 185, 8043, + 2619, 9658, 7562, 5798, 1725, 8469, 2437, 4413, 479, + 9191, 1399, 7293, 1442, 6408, 8654, 2157, 7887, 6312, + 5834, 6792, 6246, 548, 1615, 9316, 9900, 5356, 4062, + 8416, 941, 912, 6136, 2457, 4981, 1179, 3648, 4797, + 284, 443, 8118, 3334, 6615, 2534, 4058, 7276, 9902, + 5121, 3151, 4719, 138, 6928, 3909, 9170, 6242, 9774, + 9728, 296, 1798, 3207, 1500, 9518, 5255, 7744, 8630, + 4587, 5444, 1194, 7424, 5053, 9930, 9096, 5428, 7303, + 7203, 9435, 2424, 1594, 1980, 928, 6374, 4531, 3798, + 8402, 3410, 2674, 2030, 5337, 3373, 2899, 7755, 3745, + 4583, 6303, 6271, 8854, 1351, 1797, 641, 2720, 8702, + 8846, 738, 9098, 3411, 2497, 3255, 3523, 2754, 7671, + 478, 5610, 8326, 1924, 2033, 6277, 9284, 8275, 7536, + 4790, 314, 8434, 3520, 6452, 6974, 3071, 8659, 1970, + 5792, 3523, 4801, 6160, 1016, 4940, 5165, 5954, 7766, + 9508, 6283, 3215, 322, 7560, 5180, 4002, 2974, 4829, + 3149, 64, 2054, 9547, 2638, 8230, 5888, 6520, 139, + 4829, 8009, 891, 6782, 75, 6384, 4600, 9499, 368, + 6433, 3100, 62, 5714, 176, 6077, 8345, 6556, 8093, + 7894, 1584, 6971, 3933, 955, 799, 6606, 7223, 3986, + 7117, 3665, 7001, 6281, 9379, 8186, 8077, 5321, 4969, + 2297, 1820, 4785, 6438, 9310, 6940, 8207, 7577, 6147, + 1762, 7274, 2173, 2887, 4288, 6336, 2267, 5654, 1877, + 4387, 4091, 7574, 5339, 3334, 5727, 5795, 4676, 4873, + 4881, 4922, 7697, 7533, 5889, 5550, 1960, 5410, 6260, + 8984, 9211, 6472, 7705, 4518, 4933, 4037, 8516, 7051, + 7434, 9874, 792, 7732, 1121, 1904, 3041, 6897, 6528, + 9382, 6904, 2197, 703, 684, 4211, 7638, 1396, 9598, + 4722, 2606, 2333, 1857, 451, 2618, 2752, 5904, 5815, + 9170, 4005, 9933, 4475, 7914, 9654, 9931, 4901, 6494, + 2288, 6604, 5490, 7751, 7161, 1388, 7042, 5562, 2736, + 4097, 3933, 144, 5152, 4116, 3435, 5240, 3141, 7651, + 2916, 845, 5993, 1686, 8827, 2738, 243, 5820, 6080, + 2621, 7001, 6526, 5517, 6790, 4005, 8160, 4388, 2978, + 2027, 6982, 3610, 8562, 1151, 8254, 7542, 4516, 7019, + 1597, 5782, 956, 6669, 4960, 7339, 1917, 948, 469, + 5435, 4929, 2280, 5810, 8128, 162, 4255, 293, 8166, + 1683, 5976, 7531, 4696, 8458, 7699, 1547, 31, 2531, + 6377, 1781, 3529, 1648, 9654, 179, 1525, 3250, 6095, + 3778, 517, 5582, 6448, 923, 1640, 2794, 6864, 4327, + 6109]), + values=tensor([4.0894e-01, 5.8274e-01, 1.0001e-01, 5.5598e-01, + 2.4672e-01, 1.9755e-01, 8.2091e-01, 7.0576e-01, + 4.5244e-01, 1.6010e-01, 5.7075e-01, 9.1330e-01, + 7.0918e-02, 9.8239e-01, 2.7952e-01, 3.4202e-01, + 5.9213e-01, 7.1230e-01, 8.0731e-01, 7.1013e-02, + 1.4535e-01, 8.1639e-02, 2.2992e-02, 8.4328e-01, + 8.9992e-01, 3.4183e-01, 2.1527e-01, 7.5802e-01, + 2.9101e-02, 1.5313e-01, 1.6619e-01, 5.7122e-01, + 7.9630e-01, 8.3344e-01, 1.9528e-02, 7.9254e-01, + 4.5549e-01, 4.3718e-01, 1.8439e-01, 9.7946e-01, + 4.7430e-01, 4.5919e-01, 9.3638e-01, 9.3367e-01, + 5.9734e-01, 9.0725e-01, 4.8254e-01, 5.4409e-02, + 2.1436e-01, 4.6783e-01, 5.3136e-01, 6.8794e-01, + 9.6213e-01, 4.1559e-01, 1.1413e-01, 2.4341e-01, + 7.9806e-01, 9.2222e-01, 6.5033e-01, 2.4043e-01, + 1.3622e-01, 3.5399e-02, 5.3369e-01, 6.4089e-01, + 9.1741e-01, 8.7783e-01, 4.8646e-01, 1.3798e-01, + 8.3382e-01, 5.4621e-01, 1.0795e-01, 2.9808e-01, + 1.4304e-01, 8.8996e-01, 5.0273e-01, 5.7297e-02, + 2.8710e-01, 9.8689e-01, 2.0435e-01, 2.5281e-01, + 2.5011e-01, 5.7144e-01, 4.8076e-01, 8.6385e-01, + 7.3395e-01, 8.9446e-01, 3.1732e-01, 9.9132e-01, + 2.3537e-01, 2.7118e-01, 9.4902e-01, 7.1211e-01, + 4.5125e-01, 3.5412e-01, 5.6490e-01, 8.7166e-01, + 7.6578e-01, 5.6882e-01, 8.5908e-01, 1.4375e-02, + 6.7832e-01, 1.0632e-01, 6.9835e-01, 8.6425e-01, + 6.0537e-01, 5.1290e-01, 2.5591e-01, 4.5431e-01, + 3.0884e-01, 2.5146e-01, 7.6164e-01, 9.8501e-01, + 9.4361e-01, 1.4966e-01, 3.8185e-01, 7.8881e-01, + 7.5426e-01, 9.3050e-01, 2.1693e-03, 9.1242e-01, + 9.9614e-01, 6.1988e-01, 8.5159e-01, 5.5741e-01, + 3.5577e-01, 6.7235e-01, 7.7937e-01, 4.0132e-01, + 4.7445e-01, 2.8636e-01, 8.5029e-01, 6.1639e-01, + 4.2199e-01, 7.2784e-01, 4.0906e-01, 9.9752e-01, + 6.2927e-02, 5.5887e-02, 7.6696e-01, 8.5272e-01, + 3.5616e-01, 1.7774e-01, 3.7619e-03, 5.1377e-01, + 6.7491e-01, 5.0433e-01, 8.6182e-01, 8.9724e-01, + 8.2984e-01, 6.1205e-01, 3.9839e-01, 7.6438e-01, + 4.2370e-01, 9.6523e-01, 3.2476e-01, 8.6659e-01, + 3.7647e-01, 6.1772e-01, 7.9455e-01, 6.5134e-01, + 1.3352e-02, 5.2857e-01, 5.4057e-01, 6.1086e-02, + 2.3965e-01, 3.5309e-01, 9.6176e-01, 2.5046e-01, + 9.9705e-01, 8.7395e-01, 5.3122e-01, 9.9433e-01, + 8.1238e-01, 8.6421e-03, 2.7089e-01, 5.7344e-01, + 4.9578e-02, 7.5159e-01, 7.1330e-02, 4.5327e-02, + 1.4165e-01, 8.4999e-02, 8.7349e-01, 3.8633e-01, + 8.6717e-01, 6.2412e-02, 3.5330e-01, 5.5040e-01, + 6.3873e-01, 4.0969e-01, 4.3263e-01, 9.4857e-01, + 2.0115e-01, 9.9246e-01, 2.6795e-01, 7.0640e-01, + 5.0328e-01, 3.6843e-01, 9.6134e-02, 2.1408e-01, + 1.9101e-02, 8.6657e-01, 6.8164e-01, 6.4434e-01, + 6.3950e-01, 8.2129e-01, 4.7860e-01, 7.7644e-01, + 4.3503e-01, 9.7628e-01, 6.5282e-01, 1.9258e-01, + 4.4757e-01, 6.7717e-01, 8.8517e-01, 3.7940e-01, + 8.9754e-01, 7.7245e-02, 8.5143e-02, 5.7048e-01, + 8.9039e-01, 3.8963e-01, 3.0000e-01, 2.1237e-01, + 5.5496e-01, 4.6138e-01, 7.3517e-01, 1.1538e-01, + 1.4946e-02, 4.1932e-01, 1.5041e-01, 9.0340e-01, + 3.7811e-01, 2.4993e-01, 7.9005e-01, 7.1657e-01, + 8.0893e-01, 4.3493e-01, 6.2771e-01, 6.5094e-01, + 1.0476e-01, 4.9120e-01, 4.7346e-01, 5.7681e-02, + 9.1805e-01, 4.3716e-01, 6.1096e-01, 4.4589e-01, + 3.6246e-01, 7.6273e-01, 8.8340e-01, 8.1913e-01, + 1.9666e-02, 5.7679e-02, 8.1136e-01, 7.9759e-01, + 6.8084e-01, 9.6582e-01, 6.6891e-01, 5.0207e-01, + 2.6635e-01, 3.0523e-01, 9.4666e-02, 1.9776e-01, + 5.2916e-01, 1.0254e-01, 9.9880e-01, 8.6894e-01, + 1.0781e-01, 3.5508e-01, 4.7592e-01, 9.2893e-02, + 9.2081e-01, 5.4115e-01, 2.8060e-01, 7.9634e-01, + 5.1729e-01, 8.2883e-01, 7.5547e-01, 5.1412e-01, + 1.9968e-01, 8.5321e-01, 4.0251e-02, 2.5542e-01, + 5.1705e-01, 4.9638e-01, 5.2979e-01, 9.5171e-01, + 5.4373e-01, 6.3929e-01, 4.8420e-02, 6.3484e-02, + 1.0822e-01, 3.4350e-02, 5.5253e-02, 9.1785e-01, + 7.4530e-01, 8.7828e-02, 1.1568e-01, 1.2826e-01, + 8.7388e-02, 4.6536e-01, 3.8950e-01, 4.8037e-01, + 7.2754e-02, 5.9334e-01, 5.2116e-01, 7.5971e-01, + 8.6063e-01, 6.6925e-01, 7.5840e-01, 1.9221e-01, + 2.2819e-01, 5.5368e-01, 7.1787e-01, 3.4261e-02, + 1.1434e-01, 2.4902e-01, 2.3587e-01, 3.3240e-01, + 9.8225e-01, 3.6058e-01, 4.9739e-01, 6.1269e-01, + 1.7349e-01, 4.0476e-01, 4.3105e-02, 5.9398e-01, + 1.1729e-01, 5.9161e-01, 3.0933e-01, 9.7659e-01, + 7.7386e-01, 5.0122e-01, 3.5753e-01, 4.3112e-01, + 9.9500e-01, 9.2487e-01, 6.9221e-02, 8.7997e-01, + 1.4840e-01, 2.4266e-01, 1.4810e-01, 6.0954e-01, + 4.1162e-01, 9.3216e-01, 6.9320e-01, 9.3928e-01, + 3.1548e-01, 5.2168e-01, 9.2754e-01, 8.1486e-01, + 8.5823e-01, 5.5826e-01, 2.1093e-01, 9.2752e-01, + 7.4682e-01, 6.0051e-02, 9.5614e-01, 1.0734e-01, + 9.3432e-01, 6.9265e-01, 2.1710e-01, 6.5546e-01, + 8.9375e-01, 9.5434e-01, 8.7263e-01, 6.5068e-01, + 9.0741e-01, 3.6530e-01, 7.3210e-01, 4.2256e-01, + 3.7208e-01, 8.6877e-01, 6.8114e-01, 1.3169e-01, + 2.6109e-01, 1.4870e-01, 2.8545e-01, 5.1676e-01, + 3.3668e-01, 8.5952e-01, 2.8327e-01, 8.8990e-01, + 2.8873e-01, 4.8784e-01, 7.4932e-01, 5.9967e-01, + 9.3077e-01, 1.3852e-02, 4.3073e-01, 7.7902e-02, + 7.5222e-02, 1.9341e-01, 5.1533e-01, 3.9688e-01, + 9.8947e-01, 1.8762e-01, 8.8740e-02, 6.7834e-01, + 7.8140e-01, 9.1747e-01, 2.2166e-01, 6.8925e-01, + 1.1487e-01, 7.2926e-01, 9.0233e-01, 8.4992e-01, + 7.7937e-02, 6.0706e-02, 8.8192e-01, 9.4724e-01, + 3.4270e-01, 3.9980e-01, 6.7014e-01, 2.8075e-01, + 5.6090e-01, 4.9750e-01, 1.4793e-01, 6.9616e-02, + 1.2870e-01, 5.0923e-01, 9.2758e-01, 4.0954e-01, + 2.9094e-01, 1.2183e-01, 9.4085e-01, 9.7350e-01, + 7.7277e-01, 4.3174e-01, 7.0809e-01, 3.7859e-01, + 5.3678e-01, 8.1831e-01, 1.5176e-01, 5.9930e-01, + 9.7115e-01, 2.4797e-01, 5.3137e-01, 7.3717e-01, + 1.1118e-01, 3.3328e-01, 3.5010e-01, 9.4197e-01, + 4.9545e-01, 2.8102e-02, 2.3249e-01, 1.3621e-01, + 8.4379e-01, 5.8956e-01, 6.4469e-01, 6.2175e-01, + 5.8167e-01, 9.9478e-01, 3.7495e-03, 2.9909e-01, + 6.0214e-01, 7.2313e-01, 3.7477e-01, 4.2292e-01, + 6.4713e-01, 4.6010e-01, 8.6745e-02, 2.1639e-02, + 4.0429e-02, 9.8974e-01, 1.2118e-01, 5.1650e-01, + 4.9975e-01, 3.9394e-01, 6.4617e-01, 9.0253e-01, + 9.8159e-01, 1.6534e-01, 6.9111e-01, 5.0225e-02, + 5.0400e-02, 9.0474e-01, 8.1500e-01, 3.1023e-01, + 3.7266e-02, 5.8378e-01, 2.7933e-02, 6.1578e-01, + 1.0829e-01, 6.9696e-01, 2.5574e-01, 4.6162e-01, + 9.8664e-01, 2.9086e-01, 6.4012e-01, 1.4159e-01, + 8.3428e-01, 6.1565e-01, 1.5366e-01, 9.6348e-01, + 4.0966e-01, 4.8415e-01, 5.8438e-01, 2.1891e-01, + 4.7288e-01, 3.0498e-01, 7.9715e-01, 1.3969e-01, + 2.4748e-01, 3.8520e-01, 4.3714e-01, 4.1827e-01, + 5.8424e-01, 7.3540e-01, 5.7507e-01, 6.9330e-01, + 4.3550e-01, 7.1386e-01, 7.0768e-01, 1.1949e-01, + 6.4032e-01, 7.9486e-01, 7.6524e-01, 7.1039e-01, + 5.7516e-01, 5.8960e-01, 2.9066e-01, 1.3653e-01, + 3.2759e-01, 9.3242e-01, 2.6758e-01, 9.6697e-01, + 5.9172e-02, 1.0175e-01, 4.4119e-01, 4.9519e-01, + 7.9361e-01, 6.7683e-01, 8.5985e-02, 2.3760e-01, + 7.9722e-01, 5.9807e-01, 7.8945e-01, 8.9307e-01, + 3.1571e-01, 8.1729e-01, 4.5486e-01, 9.1640e-01, + 5.6777e-01, 2.8348e-01, 3.5190e-01, 5.1105e-02, + 4.0959e-01, 8.1676e-01, 8.7608e-01, 8.5927e-01, + 7.1751e-02, 1.3622e-01, 7.3856e-04, 9.1464e-01, + 7.4105e-01, 1.8627e-01, 5.3778e-01, 6.6493e-02, + 5.0929e-01, 7.8137e-01, 8.5716e-01, 8.4558e-01, + 4.7604e-01, 7.2100e-01, 4.7412e-01, 2.5388e-01, + 4.2401e-01, 4.8072e-01, 3.6366e-01, 9.8602e-01, + 1.1481e-01, 5.2800e-01, 5.3392e-02, 4.7370e-01, + 7.5174e-01, 8.1419e-01, 8.4000e-01, 1.4341e-01, + 9.6726e-01, 3.7731e-01, 1.0682e-01, 2.6834e-01, + 8.3644e-01, 1.9659e-01, 8.7613e-02, 9.9349e-01, + 1.8690e-01, 6.6413e-01, 4.2211e-01, 9.2056e-01, + 6.0902e-01, 7.3642e-01, 7.3980e-01, 6.5955e-01, + 1.5784e-01, 6.1023e-01, 1.1485e-01, 9.7705e-03, + 1.0572e-01, 3.3247e-01, 3.4286e-01, 6.2421e-01, + 2.2510e-01, 4.2059e-01, 1.1972e-01, 5.0521e-01, + 9.5224e-01, 3.3911e-01, 6.8732e-01, 4.0729e-01, + 8.3521e-01, 6.2139e-01, 9.9870e-01, 1.8769e-01, + 4.8983e-01, 9.2862e-01, 9.7802e-01, 2.7507e-01, + 4.0103e-02, 2.1245e-01, 2.0174e-01, 8.5345e-01, + 2.7619e-01, 6.8601e-01, 9.1421e-01, 7.3642e-01, + 3.3032e-01, 4.5368e-01, 3.8043e-01, 5.7643e-01, + 8.3639e-01, 8.0191e-01, 5.6781e-01, 6.0869e-01, + 4.8752e-01, 6.4961e-01, 8.9269e-01, 8.6712e-01, + 5.2297e-01, 9.0969e-01, 4.7633e-01, 5.4207e-01, + 8.2832e-01, 4.3266e-01, 7.3135e-01, 5.4331e-01, + 8.5099e-01, 3.4706e-02, 1.6109e-01, 9.2806e-01, + 1.5954e-01, 9.5769e-01, 1.8777e-02, 7.5505e-01, + 5.7525e-01, 6.2967e-01, 2.7744e-01, 7.9418e-01, + 3.8017e-01, 7.6002e-01, 1.7390e-01, 3.2046e-02, + 2.5235e-01, 2.4512e-01, 8.3269e-01, 7.2001e-01, + 7.7456e-03, 4.6020e-01, 1.5040e-01, 4.5890e-01, + 6.4873e-01, 8.2394e-01, 5.2198e-02, 5.0519e-01, + 6.9360e-01, 2.3311e-01, 3.7009e-02, 2.8397e-01, + 2.8403e-02, 3.5982e-01, 5.5695e-01, 7.4590e-01, + 4.9669e-01, 5.0174e-01, 4.6801e-01, 1.7821e-01, + 4.5014e-02, 6.4983e-01, 9.0782e-01, 1.9282e-01, + 3.6519e-01, 9.8159e-01, 1.4949e-01, 8.2867e-01, + 2.1216e-01, 4.7033e-02, 2.7033e-03, 8.6883e-01, + 6.6889e-01, 6.4411e-01, 1.6944e-01, 2.2065e-01, + 6.8890e-03, 9.9653e-01, 5.2089e-01, 4.0022e-01, + 3.0635e-01, 5.1870e-01, 9.0873e-01, 6.1765e-02, + 3.6346e-01, 3.3309e-01, 2.2863e-01, 4.3468e-01, + 8.6755e-01, 7.6640e-01, 3.0385e-01, 3.5763e-01, + 4.1656e-01, 1.4362e-01, 9.6354e-01, 5.7426e-01, + 7.8689e-01, 2.4730e-01, 7.7422e-01, 3.5805e-02, + 1.9351e-01, 5.0990e-01, 6.2640e-01, 3.7221e-01, + 3.1572e-01, 8.4142e-01, 5.0295e-01, 4.9000e-01, + 9.5176e-01, 9.2623e-02, 3.9572e-01, 3.3737e-01, + 4.9911e-01, 7.9045e-01, 5.2579e-01, 1.4566e-01, + 3.4194e-01, 9.7302e-01, 7.4489e-03, 2.6327e-01, + 8.2843e-01, 8.2229e-01, 8.6943e-01, 4.9570e-01, + 5.8722e-01, 9.6892e-01, 8.6864e-01, 4.1771e-01, + 2.3907e-01, 3.6921e-01, 2.6982e-01, 8.3031e-01, + 1.8733e-01, 2.0567e-01, 6.0647e-01, 4.6222e-01, + 4.7998e-01, 3.1085e-01, 5.6119e-01, 1.5141e-01, + 2.7687e-01, 7.2627e-01, 7.3959e-01, 3.2150e-02, + 6.8320e-01, 9.6806e-01, 8.0941e-01, 3.6996e-01, + 1.0113e-01, 6.0608e-01, 2.5950e-01, 7.7470e-01, + 4.5427e-01, 7.0661e-01, 4.1646e-01, 9.6120e-01, + 2.1486e-01, 7.9020e-01, 6.9684e-01, 8.5318e-01, + 2.4751e-02, 1.9432e-01, 3.3890e-03, 3.5931e-01, + 6.0078e-01, 3.1420e-01, 7.9915e-01, 7.2995e-01, + 4.5837e-01, 4.8434e-01, 2.4191e-01, 8.7264e-01, + 4.4628e-01, 1.7433e-01, 3.9048e-01, 5.8578e-01, + 5.1387e-01, 2.3966e-01, 5.9851e-01, 6.1038e-01, + 9.8604e-01, 8.5463e-01, 7.8940e-01, 2.9878e-01, + 8.8823e-01, 2.9276e-01, 8.4526e-01, 7.0736e-01, + 1.9470e-01, 6.4877e-01, 9.0222e-01, 1.7804e-01, + 5.1116e-01, 5.3144e-01, 1.6398e-02, 1.8603e-01, + 8.4622e-01, 9.7673e-01, 8.3551e-01, 7.9152e-01, + 2.9879e-02, 3.2414e-02, 3.3248e-01, 9.0278e-01, + 7.2618e-02, 3.8623e-02, 4.5740e-01, 1.3855e-01, + 4.6681e-01, 5.3146e-01, 3.4373e-01, 3.8471e-01, + 7.6964e-01, 3.2053e-01, 4.4641e-01, 9.3560e-01, + 3.6526e-01, 1.7340e-01, 4.1921e-01, 3.2031e-01, + 1.1241e-01, 7.3094e-01, 6.8199e-01, 4.7013e-01, + 5.7060e-01, 8.6818e-01, 9.5280e-01, 2.7502e-01, + 2.5660e-01, 2.5142e-01, 8.7490e-01, 9.3681e-01, + 7.6161e-01, 3.5498e-01, 3.9613e-02, 7.6181e-01, + 9.7582e-01, 1.9366e-01, 9.0553e-02, 1.5015e-01, + 3.4037e-01, 2.7234e-01, 2.6012e-01, 5.4730e-01, + 7.5914e-01, 7.2804e-01, 8.8377e-01, 2.9417e-01, + 8.2797e-01, 7.8180e-01, 6.9616e-01, 5.9021e-01, + 3.7858e-01, 4.2900e-01, 2.6402e-01, 7.0528e-01, + 1.6843e-01, 1.3766e-01, 2.6640e-01, 1.8234e-01, + 8.9174e-01, 2.9224e-01, 9.0712e-01, 6.5247e-01, + 5.3578e-01, 4.8627e-01, 5.4907e-01, 8.4350e-01, + 6.6691e-01, 8.1816e-01, 1.7307e-01, 7.7056e-01, + 8.6383e-02, 4.5982e-01, 4.4756e-01, 4.6444e-01, + 4.6836e-01, 9.6666e-01, 8.8660e-01, 8.5342e-01, + 5.4247e-01, 8.6053e-01, 8.6540e-01, 9.2791e-01, + 2.1159e-01, 2.4351e-01, 4.9738e-01, 4.1807e-01, + 2.7549e-01, 8.2197e-01, 7.6196e-01, 8.8791e-01, + 9.1088e-01, 8.8134e-01, 6.6822e-01, 5.0556e-01, + 5.4217e-01, 3.6135e-01, 5.4197e-01, 8.7029e-01, + 1.5876e-01, 2.6070e-01, 1.4902e-01, 4.9540e-01, + 3.2658e-01, 9.2315e-01, 9.8529e-02, 8.3386e-01, + 6.6861e-01, 6.8086e-01, 4.1234e-01, 9.1040e-01, + 8.0733e-02, 8.3027e-01, 2.7401e-02, 7.0357e-01, + 7.4001e-01, 1.0754e-01, 4.0982e-01, 7.3224e-01, + 3.0803e-01, 2.1047e-01, 8.9684e-01, 2.7451e-01, + 1.7786e-01, 8.7756e-01, 9.1917e-01, 5.9533e-01, + 2.2290e-01, 8.5840e-01, 8.1366e-01, 4.6831e-01, + 3.2199e-01, 1.0599e-01, 9.0382e-01, 5.5555e-01, + 1.1648e-01, 2.6181e-01, 9.0756e-02, 7.6516e-01, + 1.3491e-01, 1.5806e-01, 6.1588e-01, 3.5698e-01, + 9.6605e-01, 7.0001e-01, 3.4645e-01, 3.6756e-01, + 8.9547e-01, 5.0293e-01, 7.8512e-02, 9.5652e-01, + 4.6658e-01, 5.7874e-01, 8.5198e-01, 1.4339e-01, + 3.6238e-01, 4.9661e-01, 7.7543e-01, 1.9030e-01, + 1.1043e-01, 5.6247e-01, 8.0300e-01, 2.2866e-01, + 1.3044e-01, 4.3849e-01, 6.0789e-02, 6.0168e-01, + 8.5059e-01, 2.1034e-02, 7.3786e-01, 5.2646e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4567, 0.4180, 0.1623, ..., 0.7337, 0.1746, 0.4398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.518462419509888 seconds + +[20.44, 20.6, 20.6, 20.72, 20.68, 20.8, 20.68, 20.68, 20.6, 20.6] +[21.04, 21.0, 21.12, 25.8, 27.6, 28.0, 28.72, 26.32, 24.76, 23.36, 23.68, 23.56, 23.52, 23.84] +14.63833498954773 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 141479, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.518462419509888, 'TIME_S_1KI': 0.07434645720926701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.0979061508179, 'W': 22.891804729847298} +[20.44, 20.6, 20.6, 20.72, 20.68, 20.8, 20.68, 20.68, 20.6, 20.6, 20.68, 20.56, 20.56, 20.56, 20.6, 20.64, 20.56, 20.76, 20.92, 20.8] +371.78 +18.589 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 141479, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.518462419509888, 'TIME_S_1KI': 0.07434645720926701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.0979061508179, 'W': 22.891804729847298, 'J_1KI': 2.368534596306292, 'W_1KI': 0.1618035519748323, 'W_D': 4.302804729847299, 'J_D': 62.98589703011518, 'W_D_1KI': 0.030413027586053753, 'J_D_1KI': 0.0002149649600721927} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.json index f94bbe2..7c93d40 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 52408, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.675631761550903, "TIME_S_1KI": 0.2037023309714338, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 322.76721565246584, "W": 22.07477701860741, "J_1KI": 6.1587394224634755, "W_1KI": 0.42121006370415603, "W_D": 3.4607770186074056, "J_D": 50.6018865489959, "W_D_1KI": 0.06603528122819809, "J_D_1KI": 0.0012600229207029095} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 50379, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.08161449432373, "TIME_S_1KI": 0.20011541504046787, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 293.8021245384217, "W": 21.614421497236524, "J_1KI": 5.8318371650572995, "W_1KI": 0.4290363345290007, "W_D": 3.025421497236522, "J_D": 41.12417552447326, "W_D_1KI": 0.060053226487951764, "J_D_1KI": 0.0011920289503156427} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.output index a9bfe94..c2ed777 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02814483642578125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.027973651885986328} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 4997, 4998, 5000]), - col_indices=tensor([7223, 597, 5381, ..., 4437, 2871, 7175]), - values=tensor([0.8424, 0.9605, 0.7186, ..., 0.3316, 0.2968, 0.8125]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4998, 4998, 5000]), + col_indices=tensor([5173, 3476, 8311, ..., 3507, 1550, 2416]), + values=tensor([0.1624, 0.2116, 0.7810, ..., 0.1936, 0.2168, 0.5075]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.9947, 0.8149, 0.3597, ..., 0.7445, 0.4060, 0.0098]) +tensor([0.9559, 0.2089, 0.7482, ..., 0.9674, 0.6990, 0.3683]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.02814483642578125 seconds +Time: 0.027973651885986328 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37307 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.474437952041626} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37535 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.822920083999634} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), - col_indices=tensor([7873, 9438, 5376, ..., 1254, 8934, 6510]), - values=tensor([0.8139, 0.0055, 0.6843, ..., 0.4362, 0.9226, 0.6386]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([1207, 4588, 6635, ..., 4462, 7085, 9737]), + values=tensor([0.8511, 0.6719, 0.6671, ..., 0.0958, 0.9883, 0.1849]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.9683, 0.4961, 0.7880, ..., 0.7466, 0.9086, 0.6990]) +tensor([0.7262, 0.6607, 0.5856, ..., 0.5982, 0.8892, 0.2173]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 7.474437952041626 seconds +Time: 7.822920083999634 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52408 -ss 10000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.675631761550903} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 50379 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.08161449432373} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([6316, 4387, 8598, ..., 977, 3012, 3071]), - values=tensor([0.0249, 0.1066, 0.4899, ..., 0.3057, 0.2915, 0.5832]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([5494, 8331, 1267, ..., 9174, 296, 8633]), + values=tensor([0.8003, 0.4485, 0.3154, ..., 0.8226, 0.3395, 0.4010]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.6250, 0.8754, 0.6636, ..., 0.3831, 0.1537, 0.5147]) +tensor([0.1323, 0.6232, 0.2469, ..., 0.6634, 0.4951, 0.0985]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.675631761550903 seconds +Time: 10.08161449432373 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([6316, 4387, 8598, ..., 977, 3012, 3071]), - values=tensor([0.0249, 0.1066, 0.4899, ..., 0.3057, 0.2915, 0.5832]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([5494, 8331, 1267, ..., 9174, 296, 8633]), + values=tensor([0.8003, 0.4485, 0.3154, ..., 0.8226, 0.3395, 0.4010]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.6250, 0.8754, 0.6636, ..., 0.3831, 0.1537, 0.5147]) +tensor([0.1323, 0.6232, 0.2469, ..., 0.6634, 0.4951, 0.0985]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.675631761550903 seconds +Time: 10.08161449432373 seconds -[20.6, 20.52, 20.48, 20.48, 20.44, 20.28, 20.56, 20.6, 20.76, 21.08] -[21.08, 20.76, 21.6, 22.6, 23.96, 24.64, 25.4, 24.84, 24.84, 24.72, 23.92, 24.04, 24.08, 23.92] -14.621539115905762 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 52408, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.675631761550903, 'TIME_S_1KI': 0.2037023309714338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.76721565246584, 'W': 22.07477701860741} -[20.6, 20.52, 20.48, 20.48, 20.44, 20.28, 20.56, 20.6, 20.76, 21.08, 20.48, 20.64, 20.8, 20.76, 20.92, 21.0, 20.88, 20.76, 20.84, 20.96] -372.2800000000001 -18.614000000000004 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 52408, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.675631761550903, 'TIME_S_1KI': 0.2037023309714338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.76721565246584, 'W': 22.07477701860741, 'J_1KI': 6.1587394224634755, 'W_1KI': 0.42121006370415603, 'W_D': 3.4607770186074056, 'J_D': 50.6018865489959, 'W_D_1KI': 0.06603528122819809, 'J_D_1KI': 0.0012600229207029095} +[20.4, 20.4, 20.52, 20.68, 20.68, 20.64, 20.48, 20.52, 20.4, 20.32] +[20.32, 20.16, 20.96, 22.16, 24.08, 25.0, 25.56, 25.08, 24.76, 23.56, 23.56, 23.32, 23.28] +13.592874765396118 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 50379, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.08161449432373, 'TIME_S_1KI': 0.20011541504046787, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 293.8021245384217, 'W': 21.614421497236524} +[20.4, 20.4, 20.52, 20.68, 20.68, 20.64, 20.48, 20.52, 20.4, 20.32, 20.72, 20.36, 20.24, 20.36, 20.56, 21.12, 21.28, 21.16, 21.2, 20.92] +371.78000000000003 +18.589000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 50379, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.08161449432373, 'TIME_S_1KI': 0.20011541504046787, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 293.8021245384217, 'W': 21.614421497236524, 'J_1KI': 5.8318371650572995, 'W_1KI': 0.4290363345290007, 'W_D': 3.025421497236522, 'J_D': 41.12417552447326, 'W_D_1KI': 0.060053226487951764, 'J_D_1KI': 0.0011920289503156427} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.json index 118d60f..6638e3c 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.35910129547119, "TIME_S_1KI": 983.5910129547119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2706.022798595428, "W": 24.09983015121324, "J_1KI": 27060.22798595428, "W_1KI": 240.9983015121324, "W_D": 5.635830151213241, "J_D": 632.8129610252375, "W_D_1KI": 56.358301512132414, "J_D_1KI": 563.5830151213241} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.34671473503113, "TIME_S_1KI": 983.4671473503113, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2702.3393918704983, "W": 24.052580950114546, "J_1KI": 27023.393918704984, "W_1KI": 240.52580950114546, "W_D": 5.428580950114547, "J_D": 609.9082744541165, "W_D_1KI": 54.28580950114547, "J_D_1KI": 542.8580950114548} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.output index 22bc540..bbf7a9b 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.35910129547119} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.34671473503113} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 113, ..., 24999916, - 24999964, 25000000]), - col_indices=tensor([ 13628, 17541, 24252, ..., 467551, 469636, - 477818]), - values=tensor([0.8374, 0.1433, 0.7046, ..., 0.7606, 0.4438, 0.1648]), +tensor(crow_indices=tensor([ 0, 60, 122, ..., 24999905, + 24999951, 25000000]), + col_indices=tensor([ 13817, 14058, 49011, ..., 453122, 457251, + 499785]), + values=tensor([0.7246, 0.5124, 0.1037, ..., 0.4878, 0.2821, 0.0236]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5427, 0.9990, 0.7165, ..., 0.2818, 0.2990, 0.5329]) +tensor([0.3477, 0.6286, 0.4734, ..., 0.5349, 0.4636, 0.2407]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 98.35910129547119 seconds +Time: 98.34671473503113 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 113, ..., 24999916, - 24999964, 25000000]), - col_indices=tensor([ 13628, 17541, 24252, ..., 467551, 469636, - 477818]), - values=tensor([0.8374, 0.1433, 0.7046, ..., 0.7606, 0.4438, 0.1648]), +tensor(crow_indices=tensor([ 0, 60, 122, ..., 24999905, + 24999951, 25000000]), + col_indices=tensor([ 13817, 14058, 49011, ..., 453122, 457251, + 499785]), + values=tensor([0.7246, 0.5124, 0.1037, ..., 0.4878, 0.2821, 0.0236]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5427, 0.9990, 0.7165, ..., 0.2818, 0.2990, 0.5329]) +tensor([0.3477, 0.6286, 0.4734, ..., 0.5349, 0.4636, 0.2407]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 98.35910129547119 seconds +Time: 98.34671473503113 seconds -[20.28, 20.32, 20.44, 20.32, 20.6, 20.64, 20.64, 20.52, 20.76, 20.8] -[20.68, 20.52, 20.6, 22.08, 24.8, 25.76, 28.52, 28.48, 30.56, 30.0, 29.28, 29.8, 28.92, 28.32, 28.32, 27.32, 26.84, 25.96, 25.16, 25.08, 24.72, 24.92, 24.96, 25.12, 25.32, 25.44, 25.32, 25.24, 25.2, 25.08, 25.12, 25.28, 25.16, 25.32, 25.2, 25.04, 25.04, 25.04, 25.28, 25.16, 25.28, 25.36, 25.36, 25.36, 25.44, 25.48, 25.28, 25.24, 25.12, 25.04, 25.04, 25.2, 25.16, 25.36, 25.24, 25.36, 25.12, 25.12, 25.12, 25.2, 25.32, 25.36, 25.16, 25.16, 25.2, 25.28, 25.4, 25.44, 25.48, 25.28, 25.24, 25.36, 25.36, 25.24, 25.36, 25.2, 25.24, 25.52, 25.52, 25.52, 25.52, 25.4, 25.08, 25.2, 25.24, 25.44, 25.12, 25.12, 24.92, 24.8, 24.8, 25.08, 25.12, 25.32, 25.28, 25.2, 25.04, 25.08, 25.16, 25.4, 25.44, 25.36, 25.36, 25.36, 25.28, 25.04, 25.16] -112.28389501571655 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.35910129547119, 'TIME_S_1KI': 983.5910129547119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2706.022798595428, 'W': 24.09983015121324} -[20.28, 20.32, 20.44, 20.32, 20.6, 20.64, 20.64, 20.52, 20.76, 20.8, 20.48, 20.64, 20.56, 20.44, 20.36, 20.4, 20.36, 20.52, 20.64, 20.68] -369.28 -18.464 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.35910129547119, 'TIME_S_1KI': 983.5910129547119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2706.022798595428, 'W': 24.09983015121324, 'J_1KI': 27060.22798595428, 'W_1KI': 240.9983015121324, 'W_D': 5.635830151213241, 'J_D': 632.8129610252375, 'W_D_1KI': 56.358301512132414, 'J_D_1KI': 563.5830151213241} +[20.48, 20.48, 20.32, 20.24, 20.52, 20.64, 20.92, 21.0, 20.84, 20.56] +[20.32, 20.16, 22.68, 23.96, 26.8, 28.44, 28.44, 30.0, 29.32, 29.6, 29.0, 28.16, 28.68, 27.4, 27.08, 27.0, 26.2, 25.52, 25.28, 25.24, 25.08, 25.0, 24.92, 25.08, 25.0, 25.04, 25.0, 25.12, 25.08, 25.08, 25.08, 25.0, 25.0, 25.2, 25.0, 25.16, 25.2, 25.08, 24.96, 24.76, 24.84, 24.92, 25.04, 25.2, 25.4, 25.32, 25.32, 25.24, 25.0, 25.16, 25.08, 25.08, 25.04, 25.44, 25.24, 25.48, 25.52, 25.44, 25.36, 25.28, 25.08, 25.12, 25.12, 25.04, 24.96, 24.96, 24.96, 24.96, 25.12, 25.2, 25.2, 25.2, 25.08, 25.08, 24.96, 24.88, 25.04, 25.12, 25.4, 25.32, 25.44, 25.56, 25.36, 25.24, 25.32, 25.36, 25.44, 25.6, 25.48, 25.4, 25.32, 25.08, 25.16, 25.04, 25.0, 25.0, 24.88, 24.96, 25.04, 25.16, 25.16, 24.88, 25.12, 24.84, 25.0, 24.92, 25.16] +112.35132718086243 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.34671473503113, 'TIME_S_1KI': 983.4671473503113, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2702.3393918704983, 'W': 24.052580950114546} +[20.48, 20.48, 20.32, 20.24, 20.52, 20.64, 20.92, 21.0, 20.84, 20.56, 20.52, 20.48, 20.64, 20.76, 20.76, 20.92, 20.92, 20.92, 20.84, 21.0] +372.47999999999996 +18.624 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.34671473503113, 'TIME_S_1KI': 983.4671473503113, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2702.3393918704983, 'W': 24.052580950114546, 'J_1KI': 27023.393918704984, 'W_1KI': 240.52580950114546, 'W_D': 5.428580950114547, 'J_D': 609.9082744541165, 'W_D_1KI': 54.28580950114547, 'J_D_1KI': 542.8580950114548} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json index 36dce3a..838aa74 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078357696533203, "TIME_S_1KI": 100.78357696533203, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 355.02523105621333, "W": 24.261238712111865, "J_1KI": 3550.2523105621335, "W_1KI": 242.61238712111864, "W_D": 5.657238712111866, "J_D": 82.78482829093929, "W_D_1KI": 56.57238712111866, "J_D_1KI": 565.7238712111866} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.067489624023438, "TIME_S_1KI": 100.67489624023438, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 335.37869496345525, "W": 22.8901921314268, "J_1KI": 3353.7869496345525, "W_1KI": 228.901921314268, "W_D": 4.521192131426801, "J_D": 66.24284794163708, "W_D_1KI": 45.21192131426801, "J_D_1KI": 452.1192131426801} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output index 8996689..6328d8b 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078357696533203} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.067489624023438} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 6, ..., 2499989, - 2499997, 2500000]), - col_indices=tensor([ 10944, 177257, 201447, ..., 125511, 168548, - 443200]), - values=tensor([0.0549, 0.4670, 0.3111, ..., 0.0129, 0.0661, 0.9327]), +tensor(crow_indices=tensor([ 0, 4, 11, ..., 2499996, + 2499999, 2500000]), + col_indices=tensor([261896, 273598, 341514, ..., 229093, 382580, + 115157]), + values=tensor([0.6612, 0.8768, 0.0724, ..., 0.5774, 0.6445, 0.0934]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3951, 0.3409, 0.2222, ..., 0.4533, 0.5999, 0.5088]) +tensor([0.8715, 0.7923, 0.2777, ..., 0.2922, 0.6586, 0.1397]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.078357696533203 seconds +Time: 10.067489624023438 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 6, ..., 2499989, - 2499997, 2500000]), - col_indices=tensor([ 10944, 177257, 201447, ..., 125511, 168548, - 443200]), - values=tensor([0.0549, 0.4670, 0.3111, ..., 0.0129, 0.0661, 0.9327]), +tensor(crow_indices=tensor([ 0, 4, 11, ..., 2499996, + 2499999, 2500000]), + col_indices=tensor([261896, 273598, 341514, ..., 229093, 382580, + 115157]), + values=tensor([0.6612, 0.8768, 0.0724, ..., 0.5774, 0.6445, 0.0934]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3951, 0.3409, 0.2222, ..., 0.4533, 0.5999, 0.5088]) +tensor([0.8715, 0.7923, 0.2777, ..., 0.2922, 0.6586, 0.1397]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.078357696533203 seconds +Time: 10.067489624023438 seconds -[20.6, 20.6, 20.76, 20.88, 20.68, 20.64, 20.36, 20.76, 20.88, 20.96] -[21.0, 20.96, 24.12, 26.84, 26.84, 28.92, 30.04, 30.92, 26.56, 25.12, 24.8, 25.2, 25.28, 25.52] -14.633433818817139 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078357696533203, 'TIME_S_1KI': 100.78357696533203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 355.02523105621333, 'W': 24.261238712111865} -[20.6, 20.6, 20.76, 20.88, 20.68, 20.64, 20.36, 20.76, 20.88, 20.96, 20.52, 20.48, 20.56, 20.6, 20.6, 20.64, 20.64, 20.76, 20.8, 20.8] -372.08 -18.604 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078357696533203, 'TIME_S_1KI': 100.78357696533203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 355.02523105621333, 'W': 24.261238712111865, 'J_1KI': 3550.2523105621335, 'W_1KI': 242.61238712111864, 'W_D': 5.657238712111866, 'J_D': 82.78482829093929, 'W_D_1KI': 56.57238712111866, 'J_D_1KI': 565.7238712111866} +[20.24, 20.0, 20.0, 20.2, 20.28, 20.52, 20.4, 20.2, 20.52, 20.6] +[20.64, 20.76, 21.52, 22.24, 24.4, 25.84, 26.96, 26.96, 26.68, 26.68, 25.0, 24.64, 24.68, 24.92] +14.651633024215698 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.067489624023438, 'TIME_S_1KI': 100.67489624023438, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.37869496345525, 'W': 22.8901921314268} +[20.24, 20.0, 20.0, 20.2, 20.28, 20.52, 20.4, 20.2, 20.52, 20.6, 20.4, 20.32, 20.44, 20.52, 20.32, 20.6, 20.72, 20.68, 20.76, 20.56] +367.38 +18.369 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.067489624023438, 'TIME_S_1KI': 100.67489624023438, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.37869496345525, 'W': 22.8901921314268, 'J_1KI': 3353.7869496345525, 'W_1KI': 228.901921314268, 'W_D': 4.521192131426801, 'J_D': 66.24284794163708, 'W_D_1KI': 45.21192131426801, 'J_D_1KI': 452.1192131426801} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.json index 741a72d..b28b780 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.092703104019165, "TIME_S_1KI": 500.92703104019165, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1421.3682743358613, "W": 24.212233919725165, "J_1KI": 14213.682743358613, "W_1KI": 242.12233919725165, "W_D": 5.561233919725165, "J_D": 326.46972955346126, "W_D_1KI": 55.61233919725165, "J_D_1KI": 556.1233919725165} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.31077265739441, "TIME_S_1KI": 503.1077265739441, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1394.836717920303, "W": 24.161271875633943, "J_1KI": 13948.36717920303, "W_1KI": 241.61271875633943, "W_D": 5.625271875633942, "J_D": 324.7484569854733, "W_D_1KI": 56.25271875633942, "J_D_1KI": 562.5271875633941} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.output index 2787c93..a18507d 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.092703104019165} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.31077265739441} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 19, 45, ..., 12499947, - 12499973, 12500000]), - col_indices=tensor([ 17397, 55872, 132943, ..., 437400, 464141, - 486359]), - values=tensor([0.6537, 0.5151, 0.3039, ..., 0.7629, 0.2656, 0.5446]), +tensor(crow_indices=tensor([ 0, 24, 53, ..., 12499945, + 12499970, 12500000]), + col_indices=tensor([ 18146, 22620, 44577, ..., 431175, 474683, + 476577]), + values=tensor([0.0909, 0.4067, 0.5768, ..., 0.8704, 0.8226, 0.2311]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4135, 0.5444, 0.9798, ..., 0.8106, 0.6562, 0.9974]) +tensor([0.7846, 0.2097, 0.9073, ..., 0.9672, 0.4159, 0.3283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 50.092703104019165 seconds +Time: 50.31077265739441 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 19, 45, ..., 12499947, - 12499973, 12500000]), - col_indices=tensor([ 17397, 55872, 132943, ..., 437400, 464141, - 486359]), - values=tensor([0.6537, 0.5151, 0.3039, ..., 0.7629, 0.2656, 0.5446]), +tensor(crow_indices=tensor([ 0, 24, 53, ..., 12499945, + 12499970, 12500000]), + col_indices=tensor([ 18146, 22620, 44577, ..., 431175, 474683, + 476577]), + values=tensor([0.0909, 0.4067, 0.5768, ..., 0.8704, 0.8226, 0.2311]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4135, 0.5444, 0.9798, ..., 0.8106, 0.6562, 0.9974]) +tensor([0.7846, 0.2097, 0.9073, ..., 0.9672, 0.4159, 0.3283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 50.092703104019165 seconds +Time: 50.31077265739441 seconds -[20.76, 20.64, 20.64, 20.64, 20.48, 20.64, 20.6, 20.8, 21.08, 20.96] -[20.92, 20.92, 23.96, 24.8, 26.64, 28.44, 28.44, 30.32, 28.64, 28.92, 27.6, 26.6, 25.88, 25.0, 25.04, 25.08, 25.08, 25.16, 25.16, 25.12, 25.08, 24.88, 25.08, 25.12, 25.2, 25.28, 25.36, 25.32, 25.32, 25.04, 25.04, 25.28, 25.16, 25.12, 25.24, 25.16, 25.16, 25.36, 25.24, 25.2, 25.28, 25.12, 24.96, 25.12, 25.2, 25.32, 25.36, 25.48, 25.56, 25.52, 25.52, 25.36, 25.4, 25.2, 25.28, 25.56] -58.704549074172974 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.092703104019165, 'TIME_S_1KI': 500.92703104019165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.3682743358613, 'W': 24.212233919725165} -[20.76, 20.64, 20.64, 20.64, 20.48, 20.64, 20.6, 20.8, 21.08, 20.96, 20.76, 20.4, 20.48, 20.92, 20.8, 20.92, 20.88, 20.72, 20.64, 21.0] -373.02 -18.651 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.092703104019165, 'TIME_S_1KI': 500.92703104019165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.3682743358613, 'W': 24.212233919725165, 'J_1KI': 14213.682743358613, 'W_1KI': 242.12233919725165, 'W_D': 5.561233919725165, 'J_D': 326.46972955346126, 'W_D_1KI': 55.61233919725165, 'J_D_1KI': 556.1233919725165} +[20.8, 20.44, 20.36, 20.32, 20.28, 20.24, 20.36, 20.28, 20.28, 20.28] +[20.4, 20.8, 20.64, 25.2, 26.28, 28.52, 30.88, 29.6, 29.28, 28.92, 28.28, 26.44, 26.08, 25.2, 25.04, 25.28, 25.24, 25.2, 25.2, 25.32, 25.36, 25.32, 25.32, 25.28, 25.24, 25.0, 25.0, 24.84, 24.88, 24.76, 24.88, 25.12, 25.24, 25.4, 25.68, 25.56, 25.6, 25.36, 25.08, 24.96, 24.96, 24.84, 24.88, 25.36, 25.32, 25.64, 25.6, 25.32, 25.2, 25.28, 25.04, 25.08, 25.0, 25.08, 25.0] +57.730268716812134 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.31077265739441, 'TIME_S_1KI': 503.1077265739441, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1394.836717920303, 'W': 24.161271875633943} +[20.8, 20.44, 20.36, 20.32, 20.28, 20.24, 20.36, 20.28, 20.28, 20.28, 20.88, 21.2, 21.0, 20.8, 20.92, 20.88, 20.88, 20.48, 20.72, 20.6] +370.72 +18.536 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.31077265739441, 'TIME_S_1KI': 503.1077265739441, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1394.836717920303, 'W': 24.161271875633943, 'J_1KI': 13948.36717920303, 'W_1KI': 241.61271875633943, 'W_D': 5.625271875633942, 'J_D': 324.7484569854733, 'W_D_1KI': 56.25271875633942, 'J_D_1KI': 562.5271875633941} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json index 8bb383c..2112a13 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1701, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.195863723754883, "TIME_S_1KI": 5.994040989861777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 298.5689013671875, "W": 22.02886990511616, "J_1KI": 175.5255152070473, "W_1KI": 12.950540802537425, "W_D": 3.5278699051161624, "J_D": 47.81508294677735, "W_D_1KI": 2.0739975926608833, "J_D_1KI": 1.2192813595889966} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1805, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.802817583084106, "TIME_S_1KI": 5.984940489243272, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 337.43533813476563, "W": 23.11078306223205, "J_1KI": 186.94478567023026, "W_1KI": 12.803757929214433, "W_D": 4.706783062232045, "J_D": 68.72267935943601, "W_D_1KI": 2.60763604555792, "J_D_1KI": 1.4446737094503712} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output index 7e66808..ecd1a48 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6172366142272949} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6196467876434326} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 11, ..., 249984, 249992, +tensor(crow_indices=tensor([ 0, 7, 9, ..., 249995, 249997, 250000]), - col_indices=tensor([ 55, 33912, 7825, ..., 25553, 31300, 45367]), - values=tensor([0.2156, 0.3825, 0.1471, ..., 0.6075, 0.9514, 0.6641]), + col_indices=tensor([ 8366, 9967, 13922, ..., 7259, 19728, 20274]), + values=tensor([0.2696, 0.3809, 0.3608, ..., 0.4336, 0.8213, 0.0030]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8972, 0.9948, 0.0628, ..., 0.4950, 0.5589, 0.8119]) +tensor([0.8091, 0.0187, 0.2706, ..., 0.3325, 0.9347, 0.7061]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.6172366142272949 seconds +Time: 0.6196467876434326 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1701 -ss 50000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.195863723754883} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1694 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.853110074996948} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 249986, 249995, +tensor(crow_indices=tensor([ 0, 9, 15, ..., 249992, 249997, 250000]), - col_indices=tensor([ 4095, 7631, 26458, ..., 36946, 37655, 49733]), - values=tensor([0.3588, 0.4994, 0.4557, ..., 0.6547, 0.8163, 0.6645]), + col_indices=tensor([ 2870, 8286, 14444, ..., 21713, 35775, 49251]), + values=tensor([0.0516, 0.1595, 0.3064, ..., 0.9376, 0.2976, 0.2768]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0295, 0.0838, 0.1870, ..., 0.2542, 0.3969, 0.7673]) +tensor([0.4626, 0.6501, 0.4407, ..., 0.3508, 0.8520, 0.2800]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.195863723754883 seconds +Time: 9.853110074996948 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1805 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.802817583084106} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 249986, 249995, +tensor(crow_indices=tensor([ 0, 5, 12, ..., 249993, 249997, 250000]), - col_indices=tensor([ 4095, 7631, 26458, ..., 36946, 37655, 49733]), - values=tensor([0.3588, 0.4994, 0.4557, ..., 0.6547, 0.8163, 0.6645]), + col_indices=tensor([ 8417, 19550, 34715, ..., 18246, 29072, 37036]), + values=tensor([0.0315, 0.0556, 0.7166, ..., 0.2758, 0.8792, 0.3364]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0295, 0.0838, 0.1870, ..., 0.2542, 0.3969, 0.7673]) +tensor([0.0639, 0.6431, 0.4064, ..., 0.4418, 0.0626, 0.0226]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +56,30 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.195863723754883 seconds +Time: 10.802817583084106 seconds -[20.6, 20.6, 20.68, 20.56, 20.68, 20.6, 20.6, 20.48, 20.36, 20.44] -[20.44, 20.16, 20.24, 22.04, 22.92, 24.8, 25.72, 25.84, 25.56, 24.68, 24.84, 25.0, 25.2] -13.55352783203125 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.195863723754883, 'TIME_S_1KI': 5.994040989861777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.5689013671875, 'W': 22.02886990511616} -[20.6, 20.6, 20.68, 20.56, 20.68, 20.6, 20.6, 20.48, 20.36, 20.44, 20.56, 20.52, 20.52, 20.44, 20.44, 20.44, 20.44, 20.56, 20.88, 20.84] -370.02 -18.500999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.195863723754883, 'TIME_S_1KI': 5.994040989861777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.5689013671875, 'W': 22.02886990511616, 'J_1KI': 175.5255152070473, 'W_1KI': 12.950540802537425, 'W_D': 3.5278699051161624, 'J_D': 47.81508294677735, 'W_D_1KI': 2.0739975926608833, 'J_D_1KI': 1.2192813595889966} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 12, ..., 249993, 249997, + 250000]), + col_indices=tensor([ 8417, 19550, 34715, ..., 18246, 29072, 37036]), + values=tensor([0.0315, 0.0556, 0.7166, ..., 0.2758, 0.8792, 0.3364]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0639, 0.6431, 0.4064, ..., 0.4418, 0.0626, 0.0226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.802817583084106 seconds + +[20.28, 20.24, 20.56, 20.4, 20.36, 20.56, 20.68, 20.88, 20.84, 20.84] +[21.04, 20.84, 20.76, 24.6, 25.84, 27.68, 28.56, 26.12, 25.8, 24.72, 24.8, 24.76, 24.96, 24.96] +14.600774765014648 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1805, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.802817583084106, 'TIME_S_1KI': 5.984940489243272, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.43533813476563, 'W': 23.11078306223205} +[20.28, 20.24, 20.56, 20.4, 20.36, 20.56, 20.68, 20.88, 20.84, 20.84, 20.16, 20.04, 20.08, 20.2, 20.2, 20.36, 20.44, 20.52, 20.72, 20.72] +368.08000000000004 +18.404000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1805, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.802817583084106, 'TIME_S_1KI': 5.984940489243272, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.43533813476563, 'W': 23.11078306223205, 'J_1KI': 186.94478567023026, 'W_1KI': 12.803757929214433, 'W_D': 4.706783062232045, 'J_D': 68.72267935943601, 'W_D_1KI': 2.60763604555792, 'J_D_1KI': 1.4446737094503712} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json index 911c49b..e1102fc 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 193, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.676417112350464, "TIME_S_1KI": 55.318223380054214, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 327.75970675468443, "W": 22.393652951640597, "J_1KI": 1698.2368225631317, "W_1KI": 116.02928990487356, "W_D": 4.002652951640599, "J_D": 58.58393717646598, "W_D_1KI": 20.73913446445906, "J_D_1KI": 107.45665525626457} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 182, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.083391189575195, "TIME_S_1KI": 55.403248294369206, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 329.27284624099735, "W": 22.48290781479867, "J_1KI": 1809.191462862623, "W_1KI": 123.5324605208718, "W_D": 3.9759078147986706, "J_D": 58.22905530524258, "W_D_1KI": 21.84564733405863, "J_D_1KI": 120.03102930801444} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output index 7534e40..7b030b2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,34 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.431778192520142} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.760874509811401} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 95, ..., 2499895, +tensor(crow_indices=tensor([ 0, 40, 90, ..., 2499907, + 2499950, 2500000]), + col_indices=tensor([ 5137, 7037, 7061, ..., 47893, 48222, 48297]), + values=tensor([0.2856, 0.6560, 0.8379, ..., 0.9963, 0.9596, 0.9129]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8414, 0.0757, 0.2660, ..., 0.4452, 0.9388, 0.6329]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 5.760874509811401 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 182 -ss 50000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.083391189575195} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499891, 2499944, 2500000]), - col_indices=tensor([ 14, 1180, 1352, ..., 49220, 49912, 49936]), - values=tensor([0.8618, 0.4205, 0.6419, ..., 0.4989, 0.5508, 0.1652]), + col_indices=tensor([ 982, 1922, 2289, ..., 46268, 46530, 48275]), + values=tensor([0.0968, 0.2247, 0.7152, ..., 0.3494, 0.1935, 0.0398]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3965, 0.7585, 0.8670, ..., 0.1152, 0.9413, 0.0865]) +tensor([0.0363, 0.2596, 0.3372, ..., 0.9210, 0.3896, 0.9305]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 5.431778192520142 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 193 -ss 50000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.676417112350464} +Time: 10.083391189575195 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 106, ..., 2499905, - 2499942, 2500000]), - col_indices=tensor([ 275, 2452, 2625, ..., 47289, 48937, 49987]), - values=tensor([0.8108, 0.0031, 0.6812, ..., 0.9899, 0.5982, 0.1156]), +tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499891, + 2499944, 2500000]), + col_indices=tensor([ 982, 1922, 2289, ..., 46268, 46530, 48275]), + values=tensor([0.0968, 0.2247, 0.7152, ..., 0.3494, 0.1935, 0.0398]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1893, 0.7932, 0.1409, ..., 0.0408, 0.4757, 0.3205]) +tensor([0.0363, 0.2596, 0.3372, ..., 0.9210, 0.3896, 0.9305]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,30 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.676417112350464 seconds +Time: 10.083391189575195 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 106, ..., 2499905, - 2499942, 2500000]), - col_indices=tensor([ 275, 2452, 2625, ..., 47289, 48937, 49987]), - values=tensor([0.8108, 0.0031, 0.6812, ..., 0.9899, 0.5982, 0.1156]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1893, 0.7932, 0.1409, ..., 0.0408, 0.4757, 0.3205]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 10.676417112350464 seconds - -[20.32, 20.4, 20.88, 20.88, 20.88, 20.88, 20.64, 20.24, 20.48, 20.52] -[20.48, 20.64, 20.88, 22.0, 23.72, 25.32, 26.2, 26.2, 26.28, 25.08, 24.6, 24.56, 24.36, 24.36] -14.636276960372925 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.676417112350464, 'TIME_S_1KI': 55.318223380054214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.75970675468443, 'W': 22.393652951640597} -[20.32, 20.4, 20.88, 20.88, 20.88, 20.88, 20.64, 20.24, 20.48, 20.52, 20.24, 20.24, 20.24, 20.4, 20.4, 20.2, 20.16, 20.08, 20.16, 20.24] -367.81999999999994 -18.391 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.676417112350464, 'TIME_S_1KI': 55.318223380054214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.75970675468443, 'W': 22.393652951640597, 'J_1KI': 1698.2368225631317, 'W_1KI': 116.02928990487356, 'W_D': 4.002652951640599, 'J_D': 58.58393717646598, 'W_D_1KI': 20.73913446445906, 'J_D_1KI': 107.45665525626457} +[20.36, 20.48, 20.36, 20.36, 20.44, 20.36, 20.36, 20.2, 20.2, 20.36] +[20.6, 20.48, 21.6, 22.36, 24.2, 25.16, 26.2, 25.8, 25.48, 25.48, 24.84, 24.64, 24.6, 24.56] +14.645474195480347 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 182, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.083391189575195, 'TIME_S_1KI': 55.403248294369206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.27284624099735, 'W': 22.48290781479867} +[20.36, 20.48, 20.36, 20.36, 20.44, 20.36, 20.36, 20.2, 20.2, 20.36, 20.12, 20.44, 20.28, 20.4, 20.88, 21.0, 21.04, 21.24, 21.04, 21.28] +370.14 +18.506999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 182, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.083391189575195, 'TIME_S_1KI': 55.403248294369206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.27284624099735, 'W': 22.48290781479867, 'J_1KI': 1809.191462862623, 'W_1KI': 123.5324605208718, 'W_D': 3.9759078147986706, 'J_D': 58.22905530524258, 'W_D_1KI': 21.84564733405863, 'J_D_1KI': 120.03102930801444} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.json index 0d711cb..238daf2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.1586058139801, "TIME_S_1KI": 531.586058139801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1662.8207085037227, "W": 23.65320095688087, "J_1KI": 16628.20708503723, "W_1KI": 236.53200956880872, "W_D": 4.901200956880867, "J_D": 344.55456842803903, "W_D_1KI": 49.01200956880867, "J_D_1KI": 490.12009568808674} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.2018678188324, "TIME_S_1KI": 532.018678188324, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1611.726173362732, "W": 23.645525596360955, "J_1KI": 16117.26173362732, "W_1KI": 236.45525596360955, "W_D": 5.302525596360958, "J_D": 361.43071778011347, "W_D_1KI": 53.02525596360958, "J_D_1KI": 530.2525596360958} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.output index 142816b..5cc7744 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.1586058139801} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.2018678188324} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 489, 973, ..., 24998974, - 24999478, 25000000]), - col_indices=tensor([ 275, 454, 699, ..., 49715, 49729, 49796]), - values=tensor([0.3350, 0.9556, 0.9308, ..., 0.7756, 0.4208, 0.8843]), +tensor(crow_indices=tensor([ 0, 513, 981, ..., 24998959, + 24999509, 25000000]), + col_indices=tensor([ 1090, 1172, 1302, ..., 49749, 49921, 49980]), + values=tensor([0.8467, 0.6526, 0.0884, ..., 0.9518, 0.6309, 0.1810]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3743, 0.4258, 0.0327, ..., 0.7931, 0.5462, 0.4257]) +tensor([0.7712, 0.6597, 0.5718, ..., 0.6878, 0.4395, 0.4563]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 53.1586058139801 seconds +Time: 53.2018678188324 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 489, 973, ..., 24998974, - 24999478, 25000000]), - col_indices=tensor([ 275, 454, 699, ..., 49715, 49729, 49796]), - values=tensor([0.3350, 0.9556, 0.9308, ..., 0.7756, 0.4208, 0.8843]), +tensor(crow_indices=tensor([ 0, 513, 981, ..., 24998959, + 24999509, 25000000]), + col_indices=tensor([ 1090, 1172, 1302, ..., 49749, 49921, 49980]), + values=tensor([0.8467, 0.6526, 0.0884, ..., 0.9518, 0.6309, 0.1810]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3743, 0.4258, 0.0327, ..., 0.7931, 0.5462, 0.4257]) +tensor([0.7712, 0.6597, 0.5718, ..., 0.6878, 0.4395, 0.4563]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +33,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 53.1586058139801 seconds +Time: 53.2018678188324 seconds -[20.44, 20.76, 21.08, 21.12, 21.36, 21.16, 21.04, 21.0, 20.96, 20.84] -[21.0, 20.84, 20.84, 23.72, 24.72, 26.8, 28.0, 29.48, 28.44, 28.04, 28.08, 27.92, 28.6, 27.68, 27.36, 26.52, 25.72, 24.68, 24.64, 24.72, 24.64, 24.64, 24.68, 24.68, 24.56, 24.76, 24.76, 24.64, 24.68, 24.64, 24.8, 24.64, 25.04, 24.96, 24.84, 24.88, 24.68, 24.68, 24.64, 24.8, 24.72, 24.72, 24.68, 24.56, 24.36, 24.36, 24.48, 24.4, 24.44, 24.4, 24.6, 24.56, 24.72, 24.96, 24.92, 24.84, 24.76, 24.72, 24.76, 24.8, 24.88, 24.92, 24.76, 24.68, 24.56, 24.64, 24.6] -70.30002880096436 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.1586058139801, 'TIME_S_1KI': 531.586058139801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1662.8207085037227, 'W': 23.65320095688087} -[20.44, 20.76, 21.08, 21.12, 21.36, 21.16, 21.04, 21.0, 20.96, 20.84, 20.6, 20.6, 20.72, 20.72, 20.68, 20.56, 20.76, 20.64, 20.6, 20.68] -375.04 -18.752000000000002 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.1586058139801, 'TIME_S_1KI': 531.586058139801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1662.8207085037227, 'W': 23.65320095688087, 'J_1KI': 16628.20708503723, 'W_1KI': 236.53200956880872, 'W_D': 4.901200956880867, 'J_D': 344.55456842803903, 'W_D_1KI': 49.01200956880867, 'J_D_1KI': 490.12009568808674} +[20.48, 20.44, 20.68, 20.68, 20.44, 20.48, 20.44, 20.32, 20.44, 20.56] +[20.56, 20.48, 20.56, 24.2, 24.84, 26.92, 29.04, 30.2, 29.4, 28.96, 28.96, 28.32, 28.76, 26.88, 26.6, 26.24, 24.96, 24.56, 24.4, 24.44, 24.44, 24.76, 24.72, 24.64, 24.6, 24.52, 24.4, 24.32, 24.52, 24.52, 24.68, 24.76, 24.92, 24.96, 24.8, 25.0, 24.84, 24.84, 24.72, 24.72, 24.72, 24.96, 24.96, 24.96, 24.84, 24.96, 24.68, 24.52, 24.6, 24.68, 24.72, 24.8, 24.84, 24.8, 24.64, 24.6, 24.52, 24.48, 24.64, 24.6, 24.76, 24.68, 24.48, 24.48, 24.36] +68.16199398040771 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.2018678188324, 'TIME_S_1KI': 532.018678188324, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1611.726173362732, 'W': 23.645525596360955} +[20.48, 20.44, 20.68, 20.68, 20.44, 20.48, 20.44, 20.32, 20.44, 20.56, 20.24, 20.24, 20.2, 20.24, 20.36, 20.2, 20.12, 20.4, 20.36, 20.36] +366.85999999999996 +18.342999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.2018678188324, 'TIME_S_1KI': 532.018678188324, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1611.726173362732, 'W': 23.645525596360955, 'J_1KI': 16117.26173362732, 'W_1KI': 236.45525596360955, 'W_D': 5.302525596360958, 'J_D': 361.43071778011347, 'W_D_1KI': 53.02525596360958, 'J_D_1KI': 530.2525596360958} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json index 458d682..d8d6b08 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10429, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.529776096343994, "TIME_S_1KI": 1.0096630641810331, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 331.7018713665009, "W": 22.733124806095596, "J_1KI": 31.80572167671885, "W_1KI": 2.1797990992516634, "W_D": 4.373124806095593, "J_D": 63.8088117790222, "W_D_1KI": 0.41932350235838456, "J_D_1KI": 0.040207450604888735} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10737, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.584118604660034, "TIME_S_1KI": 0.9857612559057496, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 341.27173243522645, "W": 23.373566767227835, "J_1KI": 31.78464491340472, "W_1KI": 2.176917832469762, "W_D": 4.731566767227836, "J_D": 69.08444927835465, "W_D_1KI": 0.4406786595164232, "J_D_1KI": 0.04104299706774921} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output index f7f583d..f384557 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1077582836151123} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.10845208168029785} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), - col_indices=tensor([30956, 25020, 4290, ..., 1571, 5930, 34059]), - values=tensor([0.1925, 0.5429, 0.7430, ..., 0.0669, 0.5504, 0.8934]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([10441, 12058, 1165, ..., 36295, 43132, 4272]), + values=tensor([0.3253, 0.3276, 0.1912, ..., 0.9523, 0.2694, 0.6554]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9564, 0.4579, 0.9465, ..., 0.7236, 0.9546, 0.7676]) +tensor([0.6034, 0.6498, 0.5195, ..., 0.6962, 0.2706, 0.7770]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.1077582836151123 seconds +Time: 0.10845208168029785 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 9744 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.809481859207153} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 9681 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.466950178146362} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([47588, 45161, 40455, ..., 30522, 42036, 2005]), - values=tensor([0.2055, 0.2802, 0.2448, ..., 0.0926, 0.8451, 0.9361]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([16649, 30767, 40043, ..., 14732, 49411, 48251]), + values=tensor([0.2763, 0.6267, 0.8853, ..., 0.1180, 0.1805, 0.1584]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5605, 0.1853, 0.0043, ..., 0.6007, 0.1968, 0.9775]) +tensor([0.0516, 0.0339, 0.9930, ..., 0.5767, 0.5756, 0.9045]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.809481859207153 seconds +Time: 9.466950178146362 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10429 -ss 50000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.529776096343994} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10737 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.584118604660034} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), - col_indices=tensor([20158, 23859, 20874, ..., 41939, 15422, 41283]), - values=tensor([0.7225, 0.1851, 0.6655, ..., 0.6086, 0.8791, 0.8414]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), + col_indices=tensor([ 5127, 27266, 30482, ..., 7060, 29930, 33047]), + values=tensor([0.1376, 0.9046, 0.0909, ..., 0.3129, 0.7703, 0.5618]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1130, 0.7539, 0.9598, ..., 0.4914, 0.7455, 0.6539]) +tensor([0.5979, 0.8287, 0.9902, ..., 0.5455, 0.7521, 0.9043]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.529776096343994 seconds +Time: 10.584118604660034 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), - col_indices=tensor([20158, 23859, 20874, ..., 41939, 15422, 41283]), - values=tensor([0.7225, 0.1851, 0.6655, ..., 0.6086, 0.8791, 0.8414]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), + col_indices=tensor([ 5127, 27266, 30482, ..., 7060, 29930, 33047]), + values=tensor([0.1376, 0.9046, 0.0909, ..., 0.3129, 0.7703, 0.5618]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1130, 0.7539, 0.9598, ..., 0.4914, 0.7455, 0.6539]) +tensor([0.5979, 0.8287, 0.9902, ..., 0.5455, 0.7521, 0.9043]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.529776096343994 seconds +Time: 10.584118604660034 seconds -[20.44, 20.44, 20.32, 20.64, 20.6, 20.48, 20.6, 20.36, 20.32, 20.16] -[20.36, 20.16, 20.76, 22.32, 23.88, 23.88, 25.12, 26.36, 26.64, 26.44, 26.08, 26.2, 25.88, 25.48] -14.591125249862671 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.529776096343994, 'TIME_S_1KI': 1.0096630641810331, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.7018713665009, 'W': 22.733124806095596} -[20.44, 20.44, 20.32, 20.64, 20.6, 20.48, 20.6, 20.36, 20.32, 20.16, 20.56, 20.32, 20.36, 20.56, 20.36, 20.28, 20.44, 20.16, 20.16, 20.44] -367.20000000000005 -18.360000000000003 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.529776096343994, 'TIME_S_1KI': 1.0096630641810331, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.7018713665009, 'W': 22.733124806095596, 'J_1KI': 31.80572167671885, 'W_1KI': 2.1797990992516634, 'W_D': 4.373124806095593, 'J_D': 63.8088117790222, 'W_D_1KI': 0.41932350235838456, 'J_D_1KI': 0.040207450604888735} +[20.16, 20.36, 20.52, 20.48, 20.72, 20.72, 20.8, 20.52, 20.48, 20.52] +[20.48, 20.56, 21.24, 23.48, 25.24, 26.28, 27.48, 27.48, 26.56, 25.88, 26.0, 26.0, 26.08, 26.12] +14.600755453109741 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10737, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.584118604660034, 'TIME_S_1KI': 0.9857612559057496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 341.27173243522645, 'W': 23.373566767227835} +[20.16, 20.36, 20.52, 20.48, 20.72, 20.72, 20.8, 20.52, 20.48, 20.52, 20.68, 21.12, 21.04, 21.08, 21.0, 20.92, 20.68, 20.64, 20.76, 20.64] +372.84 +18.642 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10737, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.584118604660034, 'TIME_S_1KI': 0.9857612559057496, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 341.27173243522645, 'W': 23.373566767227835, 'J_1KI': 31.78464491340472, 'W_1KI': 2.176917832469762, 'W_D': 4.731566767227836, 'J_D': 69.08444927835465, 'W_D_1KI': 0.4406786595164232, 'J_D_1KI': 0.04104299706774921} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.json index fea28ac..8608fc6 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3217, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.183324813842773, "TIME_S_1KI": 3.1654724320307035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 313.5762239074707, "W": 23.142141761096784, "J_1KI": 97.47473543906457, "W_1KI": 7.193702754459679, "W_D": 4.7631417610967866, "J_D": 64.54061265373231, "W_D_1KI": 1.4806160276956128, "J_D_1KI": 0.46024744410805496} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3224, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.37877106666565, "TIME_S_1KI": 3.2192217948714794, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 314.116130990982, "W": 23.234138612361182, "J_1KI": 97.43056172176861, "W_1KI": 7.206618676290689, "W_D": 4.5161386123611855, "J_D": 61.056362432956696, "W_D_1KI": 1.4007874107820055, "J_D_1KI": 0.4344874102921853} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.output index dbda68a..e89d63c 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3263556957244873} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.325664758682251} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 124996, 124997, +tensor(crow_indices=tensor([ 0, 2, 7, ..., 124996, 124999, 125000]), - col_indices=tensor([ 8999, 37078, 2648, ..., 24880, 43913, 47673]), - values=tensor([0.7939, 0.1706, 0.9831, ..., 0.2838, 0.4924, 0.0921]), + col_indices=tensor([ 6097, 42457, 3456, ..., 36348, 42448, 28927]), + values=tensor([0.4987, 0.3368, 0.7540, ..., 0.7918, 0.8816, 0.1270]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.2834, 0.1318, 0.3567, ..., 0.3503, 0.0519, 0.6169]) +tensor([0.7083, 0.2316, 0.4262, ..., 0.1086, 0.8747, 0.3498]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.3263556957244873 seconds +Time: 0.325664758682251 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3217 -ss 50000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.183324813842773} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3224 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.37877106666565} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 125000, 125000, +tensor(crow_indices=tensor([ 0, 4, 9, ..., 124994, 124996, 125000]), - col_indices=tensor([ 9508, 26799, 1812, ..., 32912, 38580, 39384]), - values=tensor([0.1038, 0.2683, 0.7729, ..., 0.6337, 0.2232, 0.8870]), + col_indices=tensor([15659, 16774, 27785, ..., 24457, 28743, 36442]), + values=tensor([0.7259, 0.8899, 0.5066, ..., 0.6604, 0.1626, 0.6211]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7924, 0.9644, 0.0933, ..., 0.2945, 0.3904, 0.9557]) +tensor([0.0844, 0.2883, 0.1673, ..., 0.8919, 0.1822, 0.4280]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.183324813842773 seconds +Time: 10.37877106666565 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 125000, 125000, +tensor(crow_indices=tensor([ 0, 4, 9, ..., 124994, 124996, 125000]), - col_indices=tensor([ 9508, 26799, 1812, ..., 32912, 38580, 39384]), - values=tensor([0.1038, 0.2683, 0.7729, ..., 0.6337, 0.2232, 0.8870]), + col_indices=tensor([15659, 16774, 27785, ..., 24457, 28743, 36442]), + values=tensor([0.7259, 0.8899, 0.5066, ..., 0.6604, 0.1626, 0.6211]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7924, 0.9644, 0.0933, ..., 0.2945, 0.3904, 0.9557]) +tensor([0.0844, 0.2883, 0.1673, ..., 0.8919, 0.1822, 0.4280]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.183324813842773 seconds +Time: 10.37877106666565 seconds -[20.36, 20.32, 20.32, 20.32, 20.36, 20.28, 20.48, 20.52, 20.64, 20.52] -[20.52, 20.48, 21.56, 23.32, 25.2, 26.32, 27.16, 27.16, 26.96, 26.76, 25.76, 25.84, 25.52] -13.550008773803711 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.183324813842773, 'TIME_S_1KI': 3.1654724320307035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.5762239074707, 'W': 23.142141761096784} -[20.36, 20.32, 20.32, 20.32, 20.36, 20.28, 20.48, 20.52, 20.64, 20.52, 20.52, 20.6, 20.44, 20.6, 20.48, 20.32, 20.2, 20.36, 20.4, 20.48] -367.5799999999999 -18.378999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.183324813842773, 'TIME_S_1KI': 3.1654724320307035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.5762239074707, 'W': 23.142141761096784, 'J_1KI': 97.47473543906457, 'W_1KI': 7.193702754459679, 'W_D': 4.7631417610967866, 'J_D': 64.54061265373231, 'W_D_1KI': 1.4806160276956128, 'J_D_1KI': 0.46024744410805496} +[20.16, 20.52, 20.52, 20.68, 21.04, 21.04, 20.8, 20.72, 20.52, 20.2] +[20.2, 20.2, 20.28, 24.4, 25.8, 27.76, 28.48, 29.28, 25.96, 25.12, 25.44, 25.48, 25.32] +13.519594430923462 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3224, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.37877106666565, 'TIME_S_1KI': 3.2192217948714794, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.116130990982, 'W': 23.234138612361182} +[20.16, 20.52, 20.52, 20.68, 21.04, 21.04, 20.8, 20.72, 20.52, 20.2, 20.56, 20.84, 21.04, 21.12, 21.16, 21.0, 21.0, 20.96, 20.68, 20.52] +374.35999999999996 +18.717999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3224, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.37877106666565, 'TIME_S_1KI': 3.2192217948714794, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 314.116130990982, 'W': 23.234138612361182, 'J_1KI': 97.43056172176861, 'W_1KI': 7.206618676290689, 'W_D': 4.5161386123611855, 'J_D': 61.056362432956696, 'W_D_1KI': 1.4007874107820055, 'J_D_1KI': 0.4344874102921853} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json index 769abe1..9d5e4cc 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 97993, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.781827449798584, "TIME_S_1KI": 0.1100265064831017, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.41705104827884, "W": 21.541217597275665, "J_1KI": 3.218771249459439, "W_1KI": 0.21982404454681115, "W_D": 2.808217597275668, "J_D": 41.119296494484, "W_D_1KI": 0.02865732855689353, "J_D_1KI": 0.00029244260872606745} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 95747, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.263434171676636, "TIME_S_1KI": 0.10719327155604494, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 333.5903104114533, "W": 22.704418765935785, "J_1KI": 3.484081072111432, "W_1KI": 0.23712929664569946, "W_D": 4.341418765935785, "J_D": 63.78737322831156, "W_D_1KI": 0.04534260881213808, "J_D_1KI": 0.0004735668878621584} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output index bb7f54b..55ca761 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0191800594329834} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018416166305541992} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), - col_indices=tensor([3313, 1621, 3812, ..., 4525, 1664, 4698]), - values=tensor([0.0941, 0.2796, 0.9707, ..., 0.4661, 0.7642, 0.2416]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2500, 2500, 2500]), + col_indices=tensor([ 735, 4326, 100, ..., 1042, 2421, 4766]), + values=tensor([0.6086, 0.7437, 0.5596, ..., 0.8625, 0.2710, 0.0986]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.5336, 0.9402, 0.6361, ..., 0.0126, 0.4753, 0.7232]) +tensor([0.9833, 0.8269, 0.5336, ..., 0.7281, 0.6185, 0.5983]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.0191800594329834 seconds +Time: 0.018416166305541992 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 54744 -ss 5000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.86583399772644} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 57015 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.25248908996582} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), - col_indices=tensor([ 440, 3019, 2397, ..., 2648, 4224, 1471]), - values=tensor([0.9686, 0.9548, 0.6770, ..., 0.0683, 0.1247, 0.7029]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 2498, 2499, 2500]), + col_indices=tensor([ 369, 1877, 3345, ..., 21, 2128, 3541]), + values=tensor([0.3671, 0.0402, 0.3630, ..., 0.4019, 0.4016, 0.1662]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.3648, 0.8360, 0.9424, ..., 0.5773, 0.5768, 0.8650]) +tensor([0.2965, 0.8012, 0.0739, ..., 0.9768, 0.4941, 0.4983]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 5.86583399772644 seconds +Time: 6.25248908996582 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 97993 -ss 5000 -sd 0.0001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.781827449798584} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 95747 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.263434171676636} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 2498, 2500, 2500]), - col_indices=tensor([3713, 3378, 4473, ..., 4286, 2104, 3764]), - values=tensor([0.2566, 0.6316, 0.0221, ..., 0.9864, 0.6559, 0.8912]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 2499, 2500, 2500]), + col_indices=tensor([1213, 4481, 360, ..., 4227, 4904, 3632]), + values=tensor([0.3676, 0.2026, 0.1814, ..., 0.2745, 0.8391, 0.9884]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.3213, 0.3541, 0.7168, ..., 0.5598, 0.7087, 0.6560]) +tensor([0.3591, 0.1818, 0.1202, ..., 0.7531, 0.0604, 0.6207]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.781827449798584 seconds +Time: 10.263434171676636 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 2498, 2500, 2500]), - col_indices=tensor([3713, 3378, 4473, ..., 4286, 2104, 3764]), - values=tensor([0.2566, 0.6316, 0.0221, ..., 0.9864, 0.6559, 0.8912]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 2499, 2500, 2500]), + col_indices=tensor([1213, 4481, 360, ..., 4227, 4904, 3632]), + values=tensor([0.3676, 0.2026, 0.1814, ..., 0.2745, 0.8391, 0.9884]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.3213, 0.3541, 0.7168, ..., 0.5598, 0.7087, 0.6560]) +tensor([0.3591, 0.1818, 0.1202, ..., 0.7531, 0.0604, 0.6207]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.781827449798584 seconds +Time: 10.263434171676636 seconds -[20.44, 20.48, 20.88, 20.96, 20.96, 20.92, 20.72, 20.72, 20.56, 20.48] -[20.44, 20.32, 20.6, 21.56, 23.32, 24.04, 24.72, 24.96, 24.56, 23.56, 23.68, 23.44, 23.52, 23.56] -14.642489433288574 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 97993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.781827449798584, 'TIME_S_1KI': 0.1100265064831017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.41705104827884, 'W': 21.541217597275665} -[20.44, 20.48, 20.88, 20.96, 20.96, 20.92, 20.72, 20.72, 20.56, 20.48, 20.36, 20.44, 20.44, 20.32, 20.4, 20.68, 21.08, 21.76, 21.76, 21.88] -374.65999999999997 -18.732999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 97993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.781827449798584, 'TIME_S_1KI': 0.1100265064831017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.41705104827884, 'W': 21.541217597275665, 'J_1KI': 3.218771249459439, 'W_1KI': 0.21982404454681115, 'W_D': 2.808217597275668, 'J_D': 41.119296494484, 'W_D_1KI': 0.02865732855689353, 'J_D_1KI': 0.00029244260872606745} +[20.84, 20.68, 20.64, 20.56, 20.56, 20.56, 20.6, 20.48, 20.52, 20.56] +[20.88, 20.76, 23.04, 24.4, 25.92, 26.52, 27.36, 27.36, 24.8, 24.32, 23.56, 23.64, 23.56, 23.4] +14.692748308181763 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 95747, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.263434171676636, 'TIME_S_1KI': 0.10719327155604494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.5903104114533, 'W': 22.704418765935785} +[20.84, 20.68, 20.64, 20.56, 20.56, 20.56, 20.6, 20.48, 20.52, 20.56, 20.0, 20.24, 20.4, 20.24, 20.28, 20.28, 20.04, 20.28, 20.16, 20.08] +367.26 +18.363 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 95747, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.263434171676636, 'TIME_S_1KI': 0.10719327155604494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 333.5903104114533, 'W': 22.704418765935785, 'J_1KI': 3.484081072111432, 'W_1KI': 0.23712929664569946, 'W_D': 4.341418765935785, 'J_D': 63.78737322831156, 'W_D_1KI': 0.04534260881213808, 'J_D_1KI': 0.0004735668878621584} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json index 1de7362..54b5f5b 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17801, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.451899290084839, "TIME_S_1KI": 0.5871523672875029, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 335.3643196678161, "W": 22.918247866906853, "J_1KI": 18.839633709781257, "W_1KI": 1.2874696852371694, "W_D": 4.382247866906855, "J_D": 64.12573871421813, "W_D_1KI": 0.24617987005824704, "J_D_1KI": 0.013829552837382564} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17473, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.709591150283813, "TIME_S_1KI": 0.612922288690197, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 324.97824934959414, "W": 22.14868587809875, "J_1KI": 18.598881093664176, "W_1KI": 1.2675949108967408, "W_D": 3.5076858780987514, "J_D": 51.466783275842694, "W_D_1KI": 0.20074891993926353, "J_D_1KI": 0.011489092882691211} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output index 6e9f06a..66e7d6f 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06674790382385254} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0665597915649414} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 24994, 24998, 25000]), - col_indices=tensor([ 153, 1166, 1591, ..., 4476, 1654, 3013]), - values=tensor([0.9133, 0.8479, 0.0929, ..., 0.2328, 0.6185, 0.0308]), +tensor(crow_indices=tensor([ 0, 5, 14, ..., 24994, 24998, 25000]), + col_indices=tensor([ 507, 638, 1055, ..., 4798, 3833, 3997]), + values=tensor([0.8063, 0.4305, 0.6118, ..., 0.6098, 0.4042, 0.1710]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9627, 0.4329, 0.3045, ..., 0.2813, 0.7730, 0.0924]) +tensor([0.6867, 0.0560, 0.6335, ..., 0.3316, 0.3395, 0.5116]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.06674790382385254 seconds +Time: 0.0665597915649414 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15730 -ss 5000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.27815866470337} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15775 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.479244947433472} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 24988, 24995, 25000]), - col_indices=tensor([1294, 410, 634, ..., 1096, 2182, 3875]), - values=tensor([0.7576, 0.8466, 0.6529, ..., 0.8373, 0.3120, 0.9707]), +tensor(crow_indices=tensor([ 0, 4, 11, ..., 24992, 24995, 25000]), + col_indices=tensor([2207, 4124, 4643, ..., 2062, 3898, 4585]), + values=tensor([0.5047, 0.0690, 0.0441, ..., 0.5027, 0.1579, 0.0562]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6367, 0.3440, 0.8123, ..., 0.8035, 0.7344, 0.3858]) +tensor([0.6664, 0.4626, 0.0519, ..., 0.2284, 0.8294, 0.4871]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 9.27815866470337 seconds +Time: 9.479244947433472 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17801 -ss 5000 -sd 0.001 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.451899290084839} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17473 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.709591150283813} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 24991, 24996, 25000]), - col_indices=tensor([ 139, 2091, 2694, ..., 3635, 3692, 4401]), - values=tensor([0.7198, 0.5125, 0.0166, ..., 0.6335, 0.1279, 0.4059]), +tensor(crow_indices=tensor([ 0, 6, 16, ..., 24987, 24994, 25000]), + col_indices=tensor([2119, 2193, 3608, ..., 1895, 4273, 4313]), + values=tensor([0.0069, 0.3550, 0.7527, ..., 0.0941, 0.9384, 0.2112]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0330, 0.5472, 0.9005, ..., 0.3693, 0.0673, 0.4597]) +tensor([0.9071, 0.9833, 0.8622, ..., 0.4214, 0.4481, 0.9386]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.451899290084839 seconds +Time: 10.709591150283813 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 24991, 24996, 25000]), - col_indices=tensor([ 139, 2091, 2694, ..., 3635, 3692, 4401]), - values=tensor([0.7198, 0.5125, 0.0166, ..., 0.6335, 0.1279, 0.4059]), +tensor(crow_indices=tensor([ 0, 6, 16, ..., 24987, 24994, 25000]), + col_indices=tensor([2119, 2193, 3608, ..., 1895, 4273, 4313]), + values=tensor([0.0069, 0.3550, 0.7527, ..., 0.0941, 0.9384, 0.2112]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0330, 0.5472, 0.9005, ..., 0.3693, 0.0673, 0.4597]) +tensor([0.9071, 0.9833, 0.8622, ..., 0.4214, 0.4481, 0.9386]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.451899290084839 seconds +Time: 10.709591150283813 seconds -[20.28, 20.36, 20.4, 20.6, 20.68, 20.64, 20.52, 20.48, 20.32, 20.24] -[20.4, 20.48, 23.8, 23.8, 25.36, 27.2, 28.0, 28.44, 25.2, 23.88, 23.72, 23.92, 24.08, 24.08] -14.63306975364685 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.451899290084839, 'TIME_S_1KI': 0.5871523672875029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.3643196678161, 'W': 22.918247866906853} -[20.28, 20.36, 20.4, 20.6, 20.68, 20.64, 20.52, 20.48, 20.32, 20.24, 20.44, 20.64, 20.96, 20.68, 20.56, 20.68, 20.72, 20.76, 20.76, 20.96] -370.71999999999997 -18.535999999999998 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.451899290084839, 'TIME_S_1KI': 0.5871523672875029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.3643196678161, 'W': 22.918247866906853, 'J_1KI': 18.839633709781257, 'W_1KI': 1.2874696852371694, 'W_D': 4.382247866906855, 'J_D': 64.12573871421813, 'W_D_1KI': 0.24617987005824704, 'J_D_1KI': 0.013829552837382564} +[20.52, 20.72, 20.68, 20.56, 20.52, 20.56, 20.48, 20.48, 20.6, 20.56] +[20.68, 20.76, 21.92, 21.92, 23.64, 25.88, 26.32, 26.52, 25.52, 23.72, 23.6, 23.68, 23.72, 23.4] +14.672574758529663 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.709591150283813, 'TIME_S_1KI': 0.612922288690197, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.97824934959414, 'W': 22.14868587809875} +[20.52, 20.72, 20.68, 20.56, 20.52, 20.56, 20.48, 20.48, 20.6, 20.56, 20.68, 20.8, 21.4, 21.44, 21.04, 20.88, 20.8, 20.8, 20.16, 20.04] +372.82 +18.641 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.709591150283813, 'TIME_S_1KI': 0.612922288690197, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.97824934959414, 'W': 22.14868587809875, 'J_1KI': 18.598881093664176, 'W_1KI': 1.2675949108967408, 'W_D': 3.5076858780987514, 'J_D': 51.466783275842694, 'W_D_1KI': 0.20074891993926353, 'J_D_1KI': 0.011489092882691211} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json index d1eb1c9..ddb34f3 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1927, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.278133630752563, "TIME_S_1KI": 5.333748640764174, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.9295339012146, "W": 21.68469080536072, "J_1KI": 163.94890186881918, "W_1KI": 11.253082929611168, "W_D": 3.326690805360723, "J_D": 48.46736737012868, "W_D_1KI": 1.7263574495904115, "J_D_1KI": 0.8958782820915472} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1833, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.383540153503418, "TIME_S_1KI": 5.6647791344808605, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 292.9585527515411, "W": 21.694089269658985, "J_1KI": 159.82463325234104, "W_1KI": 11.835291472809049, "W_D": 3.3000892696589865, "J_D": 44.564644515514345, "W_D_1KI": 1.8003760336382906, "J_D_1KI": 0.9822018732342012} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output index 9788e34..8dfa4d8 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.5448694229125977} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.5728230476379395} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 55, 104, ..., 249896, 249951, +tensor(crow_indices=tensor([ 0, 43, 96, ..., 249904, 249954, 250000]), - col_indices=tensor([ 128, 142, 245, ..., 4657, 4734, 4838]), - values=tensor([0.1820, 0.1438, 0.1562, ..., 0.6881, 0.0081, 0.4382]), + col_indices=tensor([ 95, 346, 388, ..., 4782, 4863, 4911]), + values=tensor([0.3664, 0.9331, 0.5456, ..., 0.6113, 0.2274, 0.2730]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7604, 0.0772, 0.6951, ..., 0.4926, 0.6864, 0.3702]) +tensor([0.4281, 0.8072, 0.4313, ..., 0.4409, 0.3944, 0.1429]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.5448694229125977 seconds +Time: 0.5728230476379395 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1927 -ss 5000 -sd 0.01 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.278133630752563} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1833 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.383540153503418} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 109, ..., 249898, 249957, +tensor(crow_indices=tensor([ 0, 46, 90, ..., 249904, 249947, 250000]), - col_indices=tensor([ 25, 140, 158, ..., 4486, 4823, 4835]), - values=tensor([0.8176, 0.1521, 0.6094, ..., 0.2740, 0.3181, 0.5161]), + col_indices=tensor([ 12, 53, 87, ..., 4543, 4695, 4843]), + values=tensor([0.5985, 0.4900, 0.8442, ..., 0.5790, 0.3470, 0.4042]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4533, 0.4508, 0.3256, ..., 0.6556, 0.1742, 0.9221]) +tensor([0.7634, 0.7341, 0.0971, ..., 0.1366, 0.5741, 0.8769]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.278133630752563 seconds +Time: 10.383540153503418 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 109, ..., 249898, 249957, +tensor(crow_indices=tensor([ 0, 46, 90, ..., 249904, 249947, 250000]), - col_indices=tensor([ 25, 140, 158, ..., 4486, 4823, 4835]), - values=tensor([0.8176, 0.1521, 0.6094, ..., 0.2740, 0.3181, 0.5161]), + col_indices=tensor([ 12, 53, 87, ..., 4543, 4695, 4843]), + values=tensor([0.5985, 0.4900, 0.8442, ..., 0.5790, 0.3470, 0.4042]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4533, 0.4508, 0.3256, ..., 0.6556, 0.1742, 0.9221]) +tensor([0.7634, 0.7341, 0.0971, ..., 0.1366, 0.5741, 0.8769]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.278133630752563 seconds +Time: 10.383540153503418 seconds -[20.28, 20.32, 20.56, 20.6, 20.48, 20.56, 20.76, 20.68, 20.88, 20.88] -[20.72, 20.48, 20.56, 21.28, 22.24, 24.0, 24.68, 25.2, 25.04, 23.88, 23.88, 24.04, 24.2, 24.4] -14.56924319267273 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.278133630752563, 'TIME_S_1KI': 5.333748640764174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.9295339012146, 'W': 21.68469080536072} -[20.28, 20.32, 20.56, 20.6, 20.48, 20.56, 20.76, 20.68, 20.88, 20.88, 20.16, 20.08, 20.16, 20.16, 20.24, 20.36, 20.08, 20.2, 20.28, 20.2] -367.15999999999997 -18.357999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.278133630752563, 'TIME_S_1KI': 5.333748640764174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.9295339012146, 'W': 21.68469080536072, 'J_1KI': 163.94890186881918, 'W_1KI': 11.253082929611168, 'W_D': 3.326690805360723, 'J_D': 48.46736737012868, 'W_D_1KI': 1.7263574495904115, 'J_D_1KI': 0.8958782820915472} +[20.48, 20.48, 20.52, 20.36, 20.32, 20.2, 20.36, 20.44, 20.44, 20.44] +[20.44, 20.56, 20.76, 21.96, 22.6, 24.4, 25.2, 25.2, 25.16, 24.2, 24.2, 24.2, 24.2] +13.504072427749634 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1833, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.383540153503418, 'TIME_S_1KI': 5.6647791344808605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 292.9585527515411, 'W': 21.694089269658985} +[20.48, 20.48, 20.52, 20.36, 20.32, 20.2, 20.36, 20.44, 20.44, 20.44, 20.32, 20.12, 20.24, 20.52, 20.6, 20.6, 20.88, 20.64, 20.36, 20.36] +367.88 +18.394 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1833, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.383540153503418, 'TIME_S_1KI': 5.6647791344808605, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 292.9585527515411, 'W': 21.694089269658985, 'J_1KI': 159.82463325234104, 'W_1KI': 11.835291472809049, 'W_D': 3.3000892696589865, 'J_D': 44.564644515514345, 'W_D_1KI': 1.8003760336382906, 'J_D_1KI': 0.9822018732342012} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json index 942bb7d..2707bfa 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 393, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.605815887451172, "TIME_S_1KI": 26.986808873921557, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 343.57593024253845, "W": 23.547531420592268, "J_1KI": 874.239008250734, "W_1KI": 59.917382749598644, "W_D": 5.065531420592269, "J_D": 73.90985657548903, "W_D_1KI": 12.889392927715695, "J_D_1KI": 32.79743747510355} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 396, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44465947151184, "TIME_S_1KI": 26.37540270583798, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 357.4221782398224, "W": 22.896313160260082, "J_1KI": 902.5812581813697, "W_1KI": 57.818972626919404, "W_D": 4.42131316026008, "J_D": 69.01877037405966, "W_D_1KI": 11.16493222287899, "J_D_1KI": 28.19427329009846} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output index 5869b7e..4c48fa2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6673474311828613} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6507058143615723} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 248, 507, ..., 1249488, - 1249771, 1250000]), - col_indices=tensor([ 0, 22, 35, ..., 4958, 4983, 4999]), - values=tensor([0.4233, 0.1325, 0.2059, ..., 0.9744, 0.8399, 0.1366]), +tensor(crow_indices=tensor([ 0, 234, 496, ..., 1249497, + 1249774, 1250000]), + col_indices=tensor([ 44, 54, 71, ..., 4912, 4980, 4999]), + values=tensor([0.9715, 0.0417, 0.5412, ..., 0.8388, 0.4566, 0.9787]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.6304, 0.5951, 0.1863, ..., 0.0552, 0.3796, 0.7701]) +tensor([0.5381, 0.6562, 0.7136, ..., 0.2755, 0.3235, 0.3236]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 2.6673474311828613 seconds +Time: 2.6507058143615723 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 393 -ss 5000 -sd 0.05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.605815887451172} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 396 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.44465947151184} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 284, 548, ..., 1249494, - 1249762, 1250000]), - col_indices=tensor([ 9, 27, 28, ..., 4894, 4914, 4954]), - values=tensor([0.8223, 0.3728, 0.3102, ..., 0.8633, 0.4361, 0.2072]), +tensor(crow_indices=tensor([ 0, 238, 499, ..., 1249538, + 1249780, 1250000]), + col_indices=tensor([ 54, 59, 126, ..., 4967, 4989, 4999]), + values=tensor([0.2943, 0.9680, 0.2448, ..., 0.1328, 0.5177, 0.9242]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4176, 0.5149, 0.4165, ..., 0.2240, 0.9505, 0.5242]) +tensor([0.0840, 0.5266, 0.2925, ..., 0.7387, 0.5450, 0.9821]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.605815887451172 seconds +Time: 10.44465947151184 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 284, 548, ..., 1249494, - 1249762, 1250000]), - col_indices=tensor([ 9, 27, 28, ..., 4894, 4914, 4954]), - values=tensor([0.8223, 0.3728, 0.3102, ..., 0.8633, 0.4361, 0.2072]), +tensor(crow_indices=tensor([ 0, 238, 499, ..., 1249538, + 1249780, 1250000]), + col_indices=tensor([ 54, 59, 126, ..., 4967, 4989, 4999]), + values=tensor([0.2943, 0.9680, 0.2448, ..., 0.1328, 0.5177, 0.9242]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4176, 0.5149, 0.4165, ..., 0.2240, 0.9505, 0.5242]) +tensor([0.0840, 0.5266, 0.2925, ..., 0.7387, 0.5450, 0.9821]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.605815887451172 seconds +Time: 10.44465947151184 seconds -[20.32, 20.32, 20.44, 20.4, 20.52, 20.52, 20.64, 20.8, 20.92, 20.88] -[20.92, 21.04, 24.24, 26.28, 27.72, 27.72, 28.36, 29.08, 25.2, 24.24, 24.28, 24.28, 24.28, 24.2] -14.59074091911316 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.605815887451172, 'TIME_S_1KI': 26.986808873921557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.57593024253845, 'W': 23.547531420592268} -[20.32, 20.32, 20.44, 20.4, 20.52, 20.52, 20.64, 20.8, 20.92, 20.88, 20.24, 20.28, 20.52, 20.64, 20.6, 20.56, 20.52, 20.48, 20.48, 20.56] -369.64 -18.482 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.605815887451172, 'TIME_S_1KI': 26.986808873921557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.57593024253845, 'W': 23.547531420592268, 'J_1KI': 874.239008250734, 'W_1KI': 59.917382749598644, 'W_D': 5.065531420592269, 'J_D': 73.90985657548903, 'W_D_1KI': 12.889392927715695, 'J_D_1KI': 32.79743747510355} +[20.48, 20.32, 20.16, 20.24, 20.24, 20.24, 20.32, 20.24, 20.52, 20.44] +[20.44, 20.68, 20.56, 23.84, 25.48, 27.28, 28.2, 28.52, 24.88, 24.16, 24.0, 24.24, 24.32, 24.08, 24.28] +15.610468626022339 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44465947151184, 'TIME_S_1KI': 26.37540270583798, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.4221782398224, 'W': 22.896313160260082} +[20.48, 20.32, 20.16, 20.24, 20.24, 20.24, 20.32, 20.24, 20.52, 20.44, 20.72, 20.64, 20.96, 20.92, 20.92, 20.88, 20.92, 20.68, 20.32, 20.32] +369.5 +18.475 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.44465947151184, 'TIME_S_1KI': 26.37540270583798, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 357.4221782398224, 'W': 22.896313160260082, 'J_1KI': 902.5812581813697, 'W_1KI': 57.818972626919404, 'W_D': 4.42131316026008, 'J_D': 69.01877037405966, 'W_D_1KI': 11.16493222287899, 'J_D_1KI': 28.19427329009846} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json index 5939696..380f9a2 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 194, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.912250995635986, "TIME_S_1KI": 56.248716472350445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 326.6964877033234, "W": 22.305376949262342, "J_1KI": 1684.0025139346567, "W_1KI": 114.97616984155846, "W_D": 3.7133769492623436, "J_D": 54.38810604286199, "W_D_1KI": 19.14111829516672, "J_D_1KI": 98.66555822250888} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 194, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.317471265792847, "TIME_S_1KI": 53.18284157625178, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 318.17810523986816, "W": 21.768206003636212, "J_1KI": 1640.0933259787018, "W_1KI": 112.20724744142377, "W_D": 3.159206003636214, "J_D": 46.176987673282646, "W_D_1KI": 16.28456702905265, "J_D_1KI": 83.9410671600652} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output index 469c726..54e21a8 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.392450332641602} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.392406225204468} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 508, 1024, ..., 2499064, - 2499534, 2500000]), - col_indices=tensor([ 1, 4, 10, ..., 4973, 4986, 4993]), - values=tensor([0.4448, 0.2935, 0.6096, ..., 0.6772, 0.8304, 0.1969]), +tensor(crow_indices=tensor([ 0, 509, 1034, ..., 2499007, + 2499478, 2500000]), + col_indices=tensor([ 3, 23, 30, ..., 4972, 4977, 4989]), + values=tensor([0.8700, 0.9761, 0.7748, ..., 0.3262, 0.4651, 0.0904]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5801, 0.6662, 0.3258, ..., 0.9572, 0.7518, 0.3845]) +tensor([0.3291, 0.7750, 0.8899, ..., 0.8293, 0.9829, 0.7202]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 5.392450332641602 seconds +Time: 5.392406225204468 seconds ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 194 -ss 5000 -sd 0.1 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.912250995635986} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.317471265792847} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 510, 1021, ..., 2499033, - 2499527, 2500000]), - col_indices=tensor([ 27, 33, 84, ..., 4958, 4963, 4982]), - values=tensor([0.5404, 0.4129, 0.3312, ..., 0.4218, 0.5770, 0.4495]), +tensor(crow_indices=tensor([ 0, 506, 991, ..., 2499015, + 2499494, 2500000]), + col_indices=tensor([ 21, 27, 28, ..., 4979, 4982, 4992]), + values=tensor([0.7502, 0.0415, 0.3266, ..., 0.5968, 0.4348, 0.9268]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8518, 0.9058, 0.3829, ..., 0.5160, 0.0011, 0.3108]) +tensor([0.0304, 0.6835, 0.7819, ..., 0.2366, 0.8464, 0.6218]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.912250995635986 seconds +Time: 10.317471265792847 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 510, 1021, ..., 2499033, - 2499527, 2500000]), - col_indices=tensor([ 27, 33, 84, ..., 4958, 4963, 4982]), - values=tensor([0.5404, 0.4129, 0.3312, ..., 0.4218, 0.5770, 0.4495]), +tensor(crow_indices=tensor([ 0, 506, 991, ..., 2499015, + 2499494, 2500000]), + col_indices=tensor([ 21, 27, 28, ..., 4979, 4982, 4992]), + values=tensor([0.7502, 0.0415, 0.3266, ..., 0.5968, 0.4348, 0.9268]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8518, 0.9058, 0.3829, ..., 0.5160, 0.0011, 0.3108]) +tensor([0.0304, 0.6835, 0.7819, ..., 0.2366, 0.8464, 0.6218]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.912250995635986 seconds +Time: 10.317471265792847 seconds -[20.28, 20.36, 20.6, 20.76, 20.96, 20.8, 20.76, 20.64, 20.48, 20.76] -[20.88, 20.88, 20.64, 21.88, 22.92, 24.88, 26.08, 26.2, 25.72, 24.96, 24.48, 24.56, 24.76, 24.68] -14.646535158157349 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.912250995635986, 'TIME_S_1KI': 56.248716472350445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.6964877033234, 'W': 22.305376949262342} -[20.28, 20.36, 20.6, 20.76, 20.96, 20.8, 20.76, 20.64, 20.48, 20.76, 20.68, 20.08, 20.2, 20.52, 20.52, 20.72, 20.72, 21.32, 21.0, 21.08] -371.84 -18.592 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.912250995635986, 'TIME_S_1KI': 56.248716472350445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.6964877033234, 'W': 22.305376949262342, 'J_1KI': 1684.0025139346567, 'W_1KI': 114.97616984155846, 'W_D': 3.7133769492623436, 'J_D': 54.38810604286199, 'W_D_1KI': 19.14111829516672, 'J_D_1KI': 98.66555822250888} +[20.36, 20.52, 20.68, 20.76, 20.68, 21.04, 20.72, 20.52, 20.48, 20.4] +[20.4, 20.16, 20.4, 21.2, 22.4, 24.12, 25.16, 25.48, 25.2, 24.44, 24.16, 24.0, 24.28, 24.16] +14.61664342880249 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.317471265792847, 'TIME_S_1KI': 53.18284157625178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.17810523986816, 'W': 21.768206003636212} +[20.36, 20.52, 20.68, 20.76, 20.68, 21.04, 20.72, 20.52, 20.48, 20.4, 20.72, 20.68, 20.72, 20.56, 20.88, 20.88, 20.72, 20.64, 20.68, 20.56] +372.17999999999995 +18.608999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.317471265792847, 'TIME_S_1KI': 53.18284157625178, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 318.17810523986816, 'W': 21.768206003636212, 'J_1KI': 1640.0933259787018, 'W_1KI': 112.20724744142377, 'W_D': 3.159206003636214, 'J_D': 46.176987673282646, 'W_D_1KI': 16.28456702905265, 'J_D_1KI': 83.9410671600652} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.json index 8d5c96a..57b525e 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.527036905288696, "TIME_S_1KI": 105.27036905288696, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 363.19735393524167, "W": 23.113061201495835, "J_1KI": 3631.973539352417, "W_1KI": 231.13061201495836, "W_D": 4.666061201495836, "J_D": 73.32222533869742, "W_D_1KI": 46.66061201495836, "J_D_1KI": 466.6061201495836} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.55970311164856, "TIME_S_1KI": 105.5970311164856, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 356.9031545257568, "W": 22.746465682058464, "J_1KI": 3569.031545257568, "W_1KI": 227.46465682058465, "W_D": 4.400465682058467, "J_D": 69.04545546817783, "W_D_1KI": 44.00465682058467, "J_D_1KI": 440.0465682058467} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.output index 9300b4b..a8ba021 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.527036905288696} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.55970311164856} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 996, 2006, ..., 4997968, - 4998974, 5000000]), - col_indices=tensor([ 4, 8, 12, ..., 4976, 4983, 4993]), - values=tensor([0.4991, 0.7024, 0.1537, ..., 0.4726, 0.2476, 0.0939]), +tensor(crow_indices=tensor([ 0, 1027, 2051, ..., 4998026, + 4998991, 5000000]), + col_indices=tensor([ 1, 3, 7, ..., 4966, 4991, 4998]), + values=tensor([0.9616, 0.5316, 0.4974, ..., 0.9954, 0.9065, 0.6509]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1571, 0.7792, 0.7385, ..., 0.2151, 0.4821, 0.5033]) +tensor([0.4589, 0.4640, 0.2413, ..., 0.6458, 0.9198, 0.8507]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.527036905288696 seconds +Time: 10.55970311164856 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 996, 2006, ..., 4997968, - 4998974, 5000000]), - col_indices=tensor([ 4, 8, 12, ..., 4976, 4983, 4993]), - values=tensor([0.4991, 0.7024, 0.1537, ..., 0.4726, 0.2476, 0.0939]), +tensor(crow_indices=tensor([ 0, 1027, 2051, ..., 4998026, + 4998991, 5000000]), + col_indices=tensor([ 1, 3, 7, ..., 4966, 4991, 4998]), + values=tensor([0.9616, 0.5316, 0.4974, ..., 0.9954, 0.9065, 0.6509]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1571, 0.7792, 0.7385, ..., 0.2151, 0.4821, 0.5033]) +tensor([0.4589, 0.4640, 0.2413, ..., 0.6458, 0.9198, 0.8507]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.527036905288696 seconds +Time: 10.55970311164856 seconds -[20.48, 20.44, 20.48, 20.64, 20.72, 20.72, 20.68, 20.72, 20.68, 20.6] -[20.4, 20.48, 21.0, 23.96, 25.48, 27.4, 28.52, 27.56, 26.24, 25.04, 24.48, 24.4, 24.4, 24.32, 24.48] -15.71394419670105 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.527036905288696, 'TIME_S_1KI': 105.27036905288696, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 363.19735393524167, 'W': 23.113061201495835} -[20.48, 20.44, 20.48, 20.64, 20.72, 20.72, 20.68, 20.72, 20.68, 20.6, 20.4, 20.16, 20.32, 20.32, 20.16, 20.56, 20.48, 20.52, 20.48, 20.24] -368.94 -18.447 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.527036905288696, 'TIME_S_1KI': 105.27036905288696, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 363.19735393524167, 'W': 23.113061201495835, 'J_1KI': 3631.973539352417, 'W_1KI': 231.13061201495836, 'W_D': 4.666061201495836, 'J_D': 73.32222533869742, 'W_D_1KI': 46.66061201495836, 'J_D_1KI': 466.6061201495836} +[20.36, 20.6, 20.68, 20.68, 20.48, 20.68, 20.64, 20.64, 20.68, 20.88] +[20.72, 20.56, 21.08, 21.8, 23.92, 25.72, 27.04, 27.04, 26.84, 25.16, 24.48, 24.6, 24.68, 24.48, 24.48] +15.690488338470459 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.55970311164856, 'TIME_S_1KI': 105.5970311164856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.9031545257568, 'W': 22.746465682058464} +[20.36, 20.6, 20.68, 20.68, 20.48, 20.68, 20.64, 20.64, 20.68, 20.88, 19.88, 19.88, 20.04, 20.24, 20.28, 20.32, 20.24, 20.12, 20.0, 20.32] +366.91999999999996 +18.345999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.55970311164856, 'TIME_S_1KI': 105.5970311164856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 356.9031545257568, 'W': 22.746465682058464, 'J_1KI': 3569.031545257568, 'W_1KI': 227.46465682058465, 'W_D': 4.400465682058467, 'J_D': 69.04545546817783, 'W_D_1KI': 44.00465682058467, 'J_D_1KI': 440.0465682058467} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.json index fef5f93..d155ffd 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.927062749862671, "TIME_S_1KI": 159.2706274986267, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 517.2976983642578, "W": 23.521442010494468, "J_1KI": 5172.9769836425785, "W_1KI": 235.21442010494468, "W_D": 5.048442010494465, "J_D": 111.02837280082699, "W_D_1KI": 50.484420104944654, "J_D_1KI": 504.8442010494465} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.782387495040894, "TIME_S_1KI": 157.82387495040894, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 523.4366407585144, "W": 23.79272774089768, "J_1KI": 5234.366407585144, "W_1KI": 237.92727740897678, "W_D": 5.223727740897679, "J_D": 114.92127051210406, "W_D_1KI": 52.237277408976794, "J_D_1KI": 522.3727740897679} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.output index eaea98f..2dde6be 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.927062749862671} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.782387495040894} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1527, 3024, ..., 7496971, - 7498460, 7500000]), - col_indices=tensor([ 0, 3, 4, ..., 4985, 4992, 4996]), - values=tensor([0.7552, 0.2419, 0.2481, ..., 0.7383, 0.7786, 0.4470]), +tensor(crow_indices=tensor([ 0, 1434, 2918, ..., 7496956, + 7498470, 7500000]), + col_indices=tensor([ 3, 4, 5, ..., 4995, 4996, 4997]), + values=tensor([0.9895, 0.4913, 0.2478, ..., 0.3299, 0.7256, 0.9586]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.6238, 0.3406, 0.3665, ..., 0.0040, 0.2464, 0.8126]) +tensor([0.9076, 0.3308, 0.0614, ..., 0.4765, 0.1763, 0.9057]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 15.927062749862671 seconds +Time: 15.782387495040894 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1527, 3024, ..., 7496971, - 7498460, 7500000]), - col_indices=tensor([ 0, 3, 4, ..., 4985, 4992, 4996]), - values=tensor([0.7552, 0.2419, 0.2481, ..., 0.7383, 0.7786, 0.4470]), +tensor(crow_indices=tensor([ 0, 1434, 2918, ..., 7496956, + 7498470, 7500000]), + col_indices=tensor([ 3, 4, 5, ..., 4995, 4996, 4997]), + values=tensor([0.9895, 0.4913, 0.2478, ..., 0.3299, 0.7256, 0.9586]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.6238, 0.3406, 0.3665, ..., 0.0040, 0.2464, 0.8126]) +tensor([0.9076, 0.3308, 0.0614, ..., 0.4765, 0.1763, 0.9057]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 15.927062749862671 seconds +Time: 15.782387495040894 seconds -[20.4, 20.32, 20.32, 20.4, 20.48, 20.52, 20.8, 20.76, 20.76, 20.64] -[20.8, 20.8, 21.12, 22.88, 23.72, 26.16, 27.92, 27.96, 27.28, 26.72, 25.24, 24.32, 24.24, 24.16, 24.2, 24.68, 24.8, 24.68, 24.68, 24.56, 24.64] -21.99260139465332 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.927062749862671, 'TIME_S_1KI': 159.2706274986267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 517.2976983642578, 'W': 23.521442010494468} -[20.4, 20.32, 20.32, 20.4, 20.48, 20.52, 20.8, 20.76, 20.76, 20.64, 20.56, 20.16, 20.24, 20.6, 20.64, 20.76, 20.8, 20.6, 20.28, 20.44] -369.46000000000004 -18.473000000000003 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.927062749862671, 'TIME_S_1KI': 159.2706274986267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 517.2976983642578, 'W': 23.521442010494468, 'J_1KI': 5172.9769836425785, 'W_1KI': 235.21442010494468, 'W_D': 5.048442010494465, 'J_D': 111.02837280082699, 'W_D_1KI': 50.484420104944654, 'J_D_1KI': 504.8442010494465} +[20.32, 20.2, 20.28, 20.4, 20.64, 20.72, 20.76, 20.76, 20.72, 20.4] +[20.28, 20.4, 20.6, 25.12, 25.92, 28.28, 30.16, 28.08, 26.96, 26.28, 25.28, 24.36, 24.68, 24.68, 24.72, 24.72, 24.72, 24.48, 24.2, 24.08, 23.72] +21.999858379364014 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.782387495040894, 'TIME_S_1KI': 157.82387495040894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 523.4366407585144, 'W': 23.79272774089768} +[20.32, 20.2, 20.28, 20.4, 20.64, 20.72, 20.76, 20.76, 20.72, 20.4, 20.48, 20.8, 20.56, 20.64, 20.56, 20.48, 20.76, 20.92, 21.08, 21.0] +371.38 +18.569 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.782387495040894, 'TIME_S_1KI': 157.82387495040894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 523.4366407585144, 'W': 23.79272774089768, 'J_1KI': 5234.366407585144, 'W_1KI': 237.92727740897678, 'W_D': 5.223727740897679, 'J_D': 114.92127051210406, 'W_D_1KI': 52.237277408976794, 'J_D_1KI': 522.3727740897679} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.json index d2af780..715ce05 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.84249472618103, "TIME_S_1KI": 228.4249472618103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 682.8522075366976, "W": 24.128078827131084, "J_1KI": 6828.522075366976, "W_1KI": 241.28078827131083, "W_D": 3.4220788271310845, "J_D": 96.84874200773261, "W_D_1KI": 34.220788271310845, "J_D_1KI": 342.2078827131084} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.19744086265564, "TIME_S_1KI": 211.9744086265564, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 653.3600071525573, "W": 23.102443027250636, "J_1KI": 6533.600071525573, "W_1KI": 231.02443027250635, "W_D": 3.838443027250637, "J_D": 108.5549766654967, "W_D_1KI": 38.38443027250637, "J_D_1KI": 383.8443027250637} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.output index c75f057..a6eda71 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.84249472618103} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.19744086265564} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2006, 4067, ..., 9995955, - 9997971, 10000000]), - col_indices=tensor([ 1, 3, 4, ..., 4995, 4998, 4999]), - values=tensor([0.5438, 0.4529, 0.4674, ..., 0.4313, 0.1734, 0.8643]), +tensor(crow_indices=tensor([ 0, 1987, 3990, ..., 9995921, + 9997979, 10000000]), + col_indices=tensor([ 1, 3, 7, ..., 4994, 4996, 4998]), + values=tensor([0.8352, 0.5081, 0.7013, ..., 0.4620, 0.3531, 0.9818]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4316, 0.5719, 0.8319, ..., 0.7407, 0.2442, 0.5797]) +tensor([0.6761, 0.9286, 0.5768, ..., 0.1789, 0.3308, 0.7051]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 22.84249472618103 seconds +Time: 21.19744086265564 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2006, 4067, ..., 9995955, - 9997971, 10000000]), - col_indices=tensor([ 1, 3, 4, ..., 4995, 4998, 4999]), - values=tensor([0.5438, 0.4529, 0.4674, ..., 0.4313, 0.1734, 0.8643]), +tensor(crow_indices=tensor([ 0, 1987, 3990, ..., 9995921, + 9997979, 10000000]), + col_indices=tensor([ 1, 3, 7, ..., 4994, 4996, 4998]), + values=tensor([0.8352, 0.5081, 0.7013, ..., 0.4620, 0.3531, 0.9818]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4316, 0.5719, 0.8319, ..., 0.7407, 0.2442, 0.5797]) +tensor([0.6761, 0.9286, 0.5768, ..., 0.1789, 0.3308, 0.7051]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 22.84249472618103 seconds +Time: 21.19744086265564 seconds -[26.88, 26.16, 25.48, 25.52, 24.68, 24.6, 24.36, 24.16, 24.16, 24.28] -[24.4, 24.52, 24.52, 28.04, 29.2, 30.8, 31.68, 30.2, 29.88, 28.12, 26.84, 25.72, 24.28, 24.16, 24.04, 24.12, 24.24, 24.24, 24.4, 24.44, 24.48, 24.52, 24.68, 24.48, 24.68, 24.6, 24.68] -28.301142930984497 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.84249472618103, 'TIME_S_1KI': 228.4249472618103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 682.8522075366976, 'W': 24.128078827131084} -[26.88, 26.16, 25.48, 25.52, 24.68, 24.6, 24.36, 24.16, 24.16, 24.28, 20.44, 20.64, 20.68, 20.8, 20.92, 21.32, 21.36, 21.44, 21.44, 21.2] -414.12 -20.706 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.84249472618103, 'TIME_S_1KI': 228.4249472618103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 682.8522075366976, 'W': 24.128078827131084, 'J_1KI': 6828.522075366976, 'W_1KI': 241.28078827131083, 'W_D': 3.4220788271310845, 'J_D': 96.84874200773261, 'W_D_1KI': 34.220788271310845, 'J_D_1KI': 342.2078827131084} +[20.56, 20.56, 20.56, 20.76, 20.88, 20.68, 20.72, 20.44, 20.48, 20.36] +[20.36, 20.44, 21.72, 23.12, 23.12, 25.08, 27.4, 28.88, 28.4, 27.2, 26.4, 25.64, 25.4, 25.28, 25.32, 25.2, 24.88, 24.64, 24.64, 24.72, 24.68, 24.56, 24.36, 24.48, 24.52, 24.2, 24.2] +28.280992031097412 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.19744086265564, 'TIME_S_1KI': 211.9744086265564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 653.3600071525573, 'W': 23.102443027250636} +[20.56, 20.56, 20.56, 20.76, 20.88, 20.68, 20.72, 20.44, 20.48, 20.36, 21.88, 21.88, 21.84, 22.16, 22.48, 22.56, 22.44, 22.44, 22.08, 21.84] +385.28 +19.264 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.19744086265564, 'TIME_S_1KI': 211.9744086265564, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 653.3600071525573, 'W': 23.102443027250636, 'J_1KI': 6533.600071525573, 'W_1KI': 231.02443027250635, 'W_D': 3.838443027250637, 'J_D': 108.5549766654967, 'W_D_1KI': 38.38443027250637, 'J_D_1KI': 383.8443027250637} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.json index 1c5a213..425830a 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.49390745162964, "TIME_S_1KI": 264.9390745162964, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 833.4256306743622, "W": 24.07917164557845, "J_1KI": 8334.256306743622, "W_1KI": 240.79171645578452, "W_D": 5.603171645578453, "J_D": 193.9363584108353, "W_D_1KI": 56.031716455784526, "J_D_1KI": 560.3171645578453} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.608420610427856, "TIME_S_1KI": 266.08420610427856, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 808.2595381355286, "W": 23.359669488514182, "J_1KI": 8082.595381355286, "W_1KI": 233.5966948851418, "W_D": 4.68066948851418, "J_D": 161.95416466879843, "W_D_1KI": 46.8066948851418, "J_D_1KI": 468.06694885141803} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.output index 99ea343..ac88e94 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.49390745162964} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.608420610427856} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2475, 4991, ..., 12495070, - 12497536, 12500000]), - col_indices=tensor([ 3, 6, 7, ..., 4992, 4996, 4999]), - values=tensor([0.7861, 0.1444, 0.2009, ..., 0.5207, 0.8919, 0.5019]), +tensor(crow_indices=tensor([ 0, 2453, 4908, ..., 12494971, + 12497427, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4996, 4997, 4999]), + values=tensor([0.5398, 0.0111, 0.2025, ..., 0.8909, 0.4439, 0.2288]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4801, 0.0235, 0.0420, ..., 0.5930, 0.2408, 0.0610]) +tensor([0.7554, 0.3335, 0.4109, ..., 0.1756, 0.4233, 0.6660]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.49390745162964 seconds +Time: 26.608420610427856 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2475, 4991, ..., 12495070, - 12497536, 12500000]), - col_indices=tensor([ 3, 6, 7, ..., 4992, 4996, 4999]), - values=tensor([0.7861, 0.1444, 0.2009, ..., 0.5207, 0.8919, 0.5019]), +tensor(crow_indices=tensor([ 0, 2453, 4908, ..., 12494971, + 12497427, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4996, 4997, 4999]), + values=tensor([0.5398, 0.0111, 0.2025, ..., 0.8909, 0.4439, 0.2288]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4801, 0.0235, 0.0420, ..., 0.5930, 0.2408, 0.0610]) +tensor([0.7554, 0.3335, 0.4109, ..., 0.1756, 0.4233, 0.6660]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.49390745162964 seconds +Time: 26.608420610427856 seconds -[20.72, 20.56, 20.56, 20.36, 20.36, 20.36, 20.28, 20.56, 20.72, 21.08] -[21.48, 21.52, 24.72, 26.76, 28.04, 29.8, 31.48, 31.48, 29.76, 28.44, 27.2, 25.96, 25.12, 24.52, 24.64, 24.64, 24.72, 24.52, 24.56, 24.68, 24.68, 24.6, 24.52, 24.2, 24.32, 24.24, 24.16, 24.4, 24.44, 24.44, 24.44, 24.6, 24.36] -34.611889600753784 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.49390745162964, 'TIME_S_1KI': 264.9390745162964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4256306743622, 'W': 24.07917164557845} -[20.72, 20.56, 20.56, 20.36, 20.36, 20.36, 20.28, 20.56, 20.72, 21.08, 20.64, 20.64, 20.36, 20.36, 20.56, 20.64, 20.68, 20.56, 20.56, 20.36] -369.52 -18.476 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.49390745162964, 'TIME_S_1KI': 264.9390745162964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4256306743622, 'W': 24.07917164557845, 'J_1KI': 8334.256306743622, 'W_1KI': 240.79171645578452, 'W_D': 5.603171645578453, 'J_D': 193.9363584108353, 'W_D_1KI': 56.031716455784526, 'J_D_1KI': 560.3171645578453} +[20.48, 20.76, 20.88, 20.88, 21.04, 20.8, 20.8, 20.8, 20.68, 20.52] +[20.68, 20.72, 21.2, 23.28, 25.28, 26.92, 28.84, 29.56, 28.68, 27.68, 26.12, 25.32, 24.6, 24.56, 24.48, 24.4, 24.4, 24.48, 24.6, 24.64, 24.76, 24.72, 24.48, 24.2, 24.08, 24.0, 24.16, 24.2, 24.08, 24.4, 24.24, 24.16, 24.24] +34.60064101219177 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.608420610427856, 'TIME_S_1KI': 266.08420610427856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 808.2595381355286, 'W': 23.359669488514182} +[20.48, 20.76, 20.88, 20.88, 21.04, 20.8, 20.8, 20.8, 20.68, 20.52, 21.08, 21.0, 20.68, 20.72, 20.72, 20.64, 20.72, 20.6, 20.52, 20.6] +373.58000000000004 +18.679000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.608420610427856, 'TIME_S_1KI': 266.08420610427856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 808.2595381355286, 'W': 23.359669488514182, 'J_1KI': 8082.595381355286, 'W_1KI': 233.5966948851418, 'W_D': 4.68066948851418, 'J_D': 161.95416466879843, 'W_D_1KI': 46.8066948851418, 'J_D_1KI': 468.06694885141803} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json index 732c4d4..db06094 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 289937, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.948570489883423, "TIME_S_1KI": 0.037761894790535266, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 337.0917576313019, "W": 22.992700250818316, "J_1KI": 1.1626379442130597, "W_1KI": 0.07930240104166876, "W_D": 4.582700250818316, "J_D": 67.18612713575365, "W_D_1KI": 0.015805848342289243, "J_D_1KI": 5.4514768181671336e-05} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 290608, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.898369789123535, "TIME_S_1KI": 0.037501960679415344, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 329.4187112140655, "W": 22.471646126593484, "J_1KI": 1.1335500440939874, "W_1KI": 0.07732631629753305, "W_D": 4.110646126593483, "J_D": 60.259214730024304, "W_D_1KI": 0.014144986120800127, "J_D_1KI": 4.867376713923955e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output index 90b44fb..8746e29 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,102 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012730121612548828} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.01248311996459961} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([ 601, 2647, 820, 2703, 4832, 2905, 4036, 996, 2456, - 2955, 1267, 3283, 2251, 4368, 2032, 1143, 1874, 4481, - 3312, 337, 3271, 2673, 2707, 912, 1355, 576, 4171, - 1338, 4509, 2894, 3669, 436, 629, 3354, 3800, 1938, - 3841, 3356, 3452, 2739, 639, 4726, 1185, 2215, 4780, - 2365, 813, 3529, 3133, 2027, 1430, 1360, 1560, 4504, - 4891, 990, 715, 1174, 4133, 1335, 2115, 2803, 644, - 1222, 359, 4157, 1393, 961, 2251, 4773, 836, 2710, - 702, 1699, 2540, 3245, 4452, 1916, 3004, 2947, 4627, - 2897, 3573, 4136, 3724, 2543, 4225, 4206, 1697, 2467, - 4746, 2725, 1632, 1936, 4003, 4667, 837, 3403, 2009, - 3106, 4241, 1632, 143, 2900, 2184, 2389, 134, 1255, - 455, 3988, 4547, 3113, 3784, 246, 1055, 1579, 2608, - 1441, 4808, 3117, 2320, 2723, 2732, 1022, 4658, 752, - 2092, 3461, 403, 1092, 2475, 1500, 4745, 1977, 2013, - 1043, 1615, 2577, 280, 4124, 4918, 3583, 3155, 4834, - 2410, 1454, 4710, 2436, 3776, 3032, 3163, 332, 2083, - 3800, 348, 742, 2269, 4249, 2249, 388, 3899, 1000, - 3276, 3790, 482, 1626, 3791, 386, 1380, 385, 518, - 3261, 3414, 3411, 1506, 372, 2674, 2798, 1667, 3725, - 190, 3734, 1581, 3514, 2910, 3386, 1246, 4529, 2936, - 3830, 2148, 4608, 2142, 767, 3320, 1467, 118, 736, - 1289, 3485, 482, 1713, 920, 523, 2549, 1394, 1179, - 1453, 3629, 477, 885, 4060, 1379, 4354, 1610, 3955, - 4389, 2465, 337, 97, 1261, 1276, 880, 2430, 2803, - 1291, 2721, 585, 2387, 4856, 993, 177, 4024, 1337, - 4378, 435, 408, 1205, 4, 2496, 4066, 296, 288, - 2154, 1297, 3984, 4892, 1675, 3223, 2466]), - values=tensor([0.2688, 0.3138, 0.1347, 0.8899, 0.0694, 0.1416, 0.2868, - 0.2019, 0.8985, 0.0861, 0.2909, 0.4503, 0.7663, 0.8882, - 0.6672, 0.1346, 0.9398, 0.2159, 0.4799, 0.2790, 0.3866, - 0.2729, 0.6835, 0.4176, 0.9415, 0.7950, 0.5659, 0.4247, - 0.4627, 0.0016, 0.2802, 0.5691, 0.4545, 0.9589, 0.5833, - 0.2407, 0.8459, 0.0609, 0.7229, 0.1587, 0.3799, 0.2604, - 0.0299, 0.6751, 0.8528, 0.9681, 0.0567, 0.8653, 0.8227, - 0.8273, 0.6799, 0.0354, 0.3989, 0.1205, 0.6402, 0.1199, - 0.3054, 0.1464, 0.1989, 0.0387, 0.3720, 0.5942, 0.7253, - 0.7730, 0.9054, 0.0855, 0.5753, 0.3128, 0.7859, 0.4565, - 0.8518, 0.4282, 0.9370, 0.5476, 0.3415, 0.1584, 0.4788, - 0.2685, 0.1433, 0.3934, 0.1639, 0.1743, 0.9037, 0.0304, - 0.0289, 0.0705, 0.5423, 0.6257, 0.8142, 0.7578, 0.3516, - 0.0327, 0.0056, 0.9367, 0.3464, 0.2720, 0.5506, 0.6244, - 0.9778, 0.9403, 0.2695, 0.1008, 0.2814, 0.4022, 0.0750, - 0.8589, 0.5073, 0.2768, 0.4090, 0.0915, 0.6257, 0.7999, - 0.6904, 0.8703, 0.1142, 0.1298, 0.5992, 0.4302, 0.0539, - 0.7905, 0.9381, 0.9895, 0.8549, 0.3053, 0.2672, 0.8126, - 0.5440, 0.4082, 0.4559, 0.5422, 0.5229, 0.1070, 0.6759, - 0.9088, 0.1120, 0.0848, 0.1772, 0.0720, 0.8290, 0.5142, - 0.0465, 0.4337, 0.4749, 0.7655, 0.4175, 0.8665, 0.2208, - 0.6756, 0.9278, 0.6977, 0.7861, 0.8161, 0.4230, 0.8317, - 0.1717, 0.0383, 0.6928, 0.6239, 0.7675, 0.9077, 0.2548, - 0.9243, 0.7360, 0.7612, 0.0838, 0.0670, 0.8799, 0.8976, - 0.6367, 0.4875, 0.4382, 0.0454, 0.5556, 0.5205, 0.8555, - 0.8390, 0.6880, 0.8890, 0.5970, 0.9613, 0.7713, 0.4355, - 0.0933, 0.7601, 0.9627, 0.3532, 0.8675, 0.4814, 0.2521, - 0.6473, 0.8370, 0.9626, 0.0085, 0.0901, 0.8755, 0.5072, - 0.9504, 0.7596, 0.2658, 0.8293, 0.6634, 0.4401, 0.0682, - 0.6406, 0.9649, 0.2363, 0.8410, 0.6169, 0.9731, 0.1306, - 0.2698, 0.6020, 0.0496, 0.3126, 0.8880, 0.7892, 0.7667, - 0.6466, 0.0659, 0.7587, 0.7496, 0.6160, 0.2212, 0.0833, - 0.9146, 0.0286, 0.3379, 0.5728, 0.8427, 0.7370, 0.7738, - 0.6182, 0.3534, 0.1226, 0.0015, 0.7059, 0.3466, 0.3941, - 0.7962, 0.2804, 0.4929, 0.7827, 0.0766, 0.2294, 0.8494, - 0.9943, 0.0815, 0.8720, 0.8261, 0.8846]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4430, 0.7360, 0.8513, ..., 0.2058, 0.5954, 0.5363]) + col_indices=tensor([3524, 1345, 4815, 4282, 4870, 119, 2862, 1539, 1636, + 3659, 3338, 910, 2726, 4294, 1913, 612, 750, 3685, + 1742, 4163, 474, 2208, 783, 2555, 3401, 944, 2517, + 1491, 1736, 309, 1327, 2601, 4360, 3282, 805, 1178, + 4484, 4166, 878, 2859, 3645, 3279, 1594, 4535, 3992, + 3942, 180, 749, 971, 4123, 2642, 1200, 2757, 546, + 4891, 739, 2092, 3807, 4611, 4024, 2154, 2442, 116, + 531, 748, 2218, 3961, 4960, 1520, 3640, 3626, 4969, + 2697, 4422, 4544, 4650, 2145, 1972, 91, 1275, 12, + 1649, 4153, 2590, 4831, 3787, 3742, 3197, 722, 3104, + 4177, 1213, 3435, 3250, 2182, 4681, 1127, 4720, 4512, + 4315, 3558, 1093, 432, 2693, 1073, 2295, 1318, 2503, + 4078, 3880, 4406, 2730, 3241, 3255, 2397, 2485, 2442, + 4996, 2878, 485, 1051, 2612, 3805, 341, 4915, 1902, + 2199, 1815, 3939, 601, 3979, 3394, 2435, 1164, 4429, + 2748, 1492, 2737, 23, 1257, 2789, 3753, 4340, 3846, + 3397, 1436, 4214, 2461, 2693, 3825, 2174, 963, 3477, + 1823, 2545, 1096, 277, 4699, 2760, 1341, 2236, 142, + 3553, 611, 354, 3592, 4613, 4868, 1779, 3721, 984, + 4520, 3645, 4006, 2561, 740, 3063, 4804, 2766, 2445, + 3804, 1962, 2833, 4689, 1262, 1798, 4147, 3435, 1694, + 3913, 2084, 4099, 136, 752, 3650, 3010, 1306, 3942, + 3576, 3657, 3759, 2092, 4190, 391, 4160, 3781, 1011, + 4244, 3578, 1272, 3512, 44, 139, 4307, 2531, 3115, + 3521, 2069, 3644, 1328, 3792, 4435, 2603, 3808, 4373, + 1357, 1345, 1436, 3843, 1555, 1238, 4762, 2479, 3210, + 1849, 4232, 4656, 2573, 4998, 3754, 2885, 2729, 3216, + 147, 103, 4258, 4306, 2159, 276, 4304]), + values=tensor([9.6905e-01, 4.3497e-01, 2.0944e-01, 8.0847e-01, + 3.2885e-02, 8.4973e-01, 7.9677e-01, 7.9640e-01, + 5.6744e-01, 4.2898e-01, 1.0364e-01, 5.0105e-01, + 9.5264e-01, 4.7536e-01, 5.7775e-01, 6.1557e-01, + 4.5675e-01, 5.9933e-01, 6.4169e-02, 6.3246e-01, + 4.3167e-01, 2.8776e-01, 5.5890e-01, 5.5969e-02, + 2.1801e-03, 7.2800e-01, 5.6436e-01, 9.4797e-01, + 1.7965e-01, 7.5227e-01, 5.2546e-02, 3.5745e-02, + 7.5535e-01, 7.1083e-01, 5.8062e-01, 4.1428e-01, + 6.9798e-01, 5.3478e-01, 8.9727e-01, 1.5153e-02, + 1.5503e-01, 2.7371e-01, 4.3971e-01, 4.1084e-01, + 9.2242e-01, 7.8940e-01, 1.4724e-01, 1.8340e-01, + 2.3280e-01, 5.4735e-01, 4.2712e-01, 7.6551e-01, + 8.4454e-01, 4.2663e-01, 9.9026e-01, 2.4310e-01, + 3.9901e-01, 1.1606e-02, 9.6207e-01, 2.5401e-01, + 7.7138e-01, 2.8637e-02, 7.8595e-01, 5.7258e-01, + 6.7141e-01, 1.4320e-01, 1.6119e-02, 7.0290e-01, + 7.8965e-01, 2.3831e-01, 3.5664e-01, 7.3584e-01, + 2.7798e-01, 4.5289e-01, 8.5077e-01, 1.5601e-02, + 5.2968e-01, 2.0860e-01, 1.1382e-01, 9.0156e-01, + 5.8184e-01, 5.7852e-03, 7.5569e-01, 7.5094e-01, + 5.4545e-02, 9.7282e-01, 2.3250e-01, 9.5631e-01, + 8.8935e-01, 9.1006e-01, 8.0073e-01, 8.3933e-03, + 2.0262e-01, 7.7007e-01, 9.8448e-01, 8.9304e-01, + 9.3279e-01, 8.6342e-01, 6.5832e-01, 1.8739e-01, + 6.3298e-01, 2.2299e-01, 3.6074e-04, 9.0510e-01, + 6.6913e-01, 6.6890e-01, 6.6236e-01, 9.6497e-01, + 2.9261e-02, 1.9197e-01, 2.6640e-02, 3.1160e-01, + 6.5758e-02, 1.2656e-01, 5.3827e-01, 9.8049e-01, + 9.8313e-01, 2.3396e-01, 8.9658e-01, 1.8063e-01, + 1.1232e-01, 7.3954e-01, 9.0232e-01, 5.1764e-01, + 6.9947e-01, 1.7904e-01, 7.1848e-01, 5.8453e-01, + 6.6222e-01, 5.1844e-01, 5.4079e-01, 1.8983e-02, + 9.1750e-01, 7.8893e-01, 6.9791e-01, 5.4260e-01, + 2.9252e-01, 3.4612e-01, 8.4557e-01, 5.2518e-03, + 1.4649e-01, 7.3551e-01, 6.2607e-01, 1.5554e-01, + 2.1157e-01, 2.0955e-05, 6.9805e-01, 2.0182e-01, + 8.3632e-01, 3.3942e-01, 8.7350e-01, 4.8981e-01, + 5.9056e-01, 1.3046e-01, 9.4263e-01, 8.8990e-01, + 2.2499e-01, 2.6839e-01, 1.8410e-01, 4.7143e-01, + 1.8274e-01, 2.4572e-01, 6.1300e-01, 4.9338e-01, + 9.0894e-02, 9.7528e-01, 4.9050e-01, 6.8524e-01, + 3.9504e-01, 2.9749e-01, 6.2761e-01, 2.2348e-01, + 4.6711e-01, 4.2154e-01, 6.6428e-01, 9.7446e-01, + 8.9599e-01, 1.2513e-02, 9.8230e-01, 1.7610e-01, + 4.0552e-01, 8.7425e-01, 4.7813e-01, 3.9152e-01, + 6.4584e-01, 3.9083e-01, 6.7045e-01, 1.1211e-01, + 5.0947e-01, 3.7668e-01, 4.1774e-01, 8.4774e-01, + 7.1912e-01, 4.6455e-01, 7.1822e-01, 5.8105e-01, + 2.0422e-01, 7.0318e-01, 4.9082e-01, 1.5573e-01, + 3.4188e-01, 5.9691e-01, 2.8313e-01, 4.8029e-01, + 2.7626e-01, 1.8473e-01, 8.4021e-01, 8.9695e-01, + 7.2050e-01, 1.8491e-01, 1.4306e-01, 7.9692e-01, + 3.3619e-01, 9.9618e-01, 4.4166e-01, 1.9421e-01, + 1.5702e-01, 3.1751e-01, 8.2439e-01, 8.4379e-01, + 4.3923e-01, 9.7719e-01, 2.7895e-01, 1.3864e-01, + 7.0069e-01, 5.9201e-01, 7.3145e-01, 5.3124e-02, + 8.1772e-01, 5.4291e-01, 9.0008e-01, 4.6391e-01, + 8.4267e-01, 2.4175e-01, 5.1916e-01, 5.0765e-01, + 9.1184e-01, 6.3731e-01, 9.8252e-01, 7.1974e-01, + 7.8639e-01, 3.6429e-01, 5.1018e-01, 2.9878e-01, + 2.1566e-01, 6.0615e-01, 7.8790e-01, 7.8008e-01, + 1.1884e-01, 3.0949e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.6617, 0.9328, 0.8652, ..., 0.8174, 0.3929, 0.3392]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +104,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.012730121612548828 seconds +Time: 0.01248311996459961 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 82481 -ss 5000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.9870200157165527} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 84113 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.191556692123413} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1850, 2920, 982, 2920, 4207, 4970, 436, 2573, 4448, - 4877, 4672, 2082, 4283, 4267, 2430, 1672, 1924, 2492, - 2408, 375, 2305, 3583, 3031, 1418, 2140, 1197, 1752, - 3076, 720, 312, 2699, 3140, 1513, 401, 3552, 13, - 806, 4950, 885, 1841, 3287, 275, 2117, 560, 18, - 1477, 2688, 1794, 1927, 4953, 3645, 3622, 4539, 3985, - 3945, 2233, 4167, 2147, 1082, 1206, 1252, 1066, 1078, - 321, 808, 1999, 635, 1039, 3245, 1630, 1008, 4182, - 1408, 2667, 193, 3182, 772, 1491, 608, 1914, 2811, - 1620, 3712, 1794, 3637, 3266, 214, 1284, 4462, 4923, - 2463, 2700, 3904, 4098, 3900, 3027, 4116, 3580, 332, - 4692, 353, 653, 1766, 2037, 590, 2523, 136, 4354, - 4126, 2451, 226, 2678, 2130, 4889, 2592, 1660, 3477, - 4537, 2214, 1017, 1706, 4859, 3869, 167, 4374, 4799, - 1507, 1665, 4935, 3083, 2008, 4174, 859, 984, 2840, - 863, 82, 79, 4175, 3907, 4158, 4533, 3431, 2281, - 2787, 3034, 4208, 2453, 2306, 4607, 1125, 4475, 945, - 2063, 2439, 3548, 3678, 3034, 1770, 1726, 3619, 4461, - 647, 1318, 4963, 2847, 798, 1118, 1938, 2152, 3584, - 162, 4914, 1173, 3329, 4768, 2504, 2250, 4542, 1812, - 1545, 228, 2, 4710, 208, 2941, 770, 2538, 3754, - 4746, 2065, 1748, 2708, 2153, 2280, 3564, 3494, 4959, - 1719, 167, 854, 1084, 801, 1756, 2522, 2179, 3363, - 4832, 639, 1945, 2876, 4790, 630, 3306, 2308, 3577, - 253, 3942, 331, 3878, 976, 3355, 242, 1100, 869, - 105, 4517, 3895, 3065, 4030, 177, 4481, 2908, 861, - 1478, 4165, 3720, 1447, 2081, 2042, 4335, 1110, 2494, - 4959, 3445, 552, 1961, 1713, 2677, 2157]), - values=tensor([0.2973, 0.0528, 0.0593, 0.4373, 0.3149, 0.1720, 0.8405, - 0.5692, 0.8566, 0.6937, 0.9133, 0.4677, 0.7516, 0.1147, - 0.9773, 0.0717, 0.0053, 0.2351, 0.3045, 0.8381, 0.0276, - 0.1683, 0.6574, 0.1413, 0.3680, 0.2281, 0.6440, 0.9364, - 0.7063, 0.6312, 0.4354, 0.4765, 0.5047, 0.5079, 0.1300, - 0.6462, 0.5531, 0.3126, 0.0835, 0.2181, 0.0832, 0.6136, - 0.2259, 0.2091, 0.0668, 0.3926, 0.4321, 0.7371, 0.0309, - 0.5039, 0.4193, 0.4354, 0.3007, 0.2701, 0.1422, 0.9665, - 0.4389, 0.2429, 0.7584, 0.6797, 0.5891, 0.3173, 0.8048, - 0.5910, 0.6072, 0.6009, 0.0564, 0.6818, 0.0189, 0.7587, - 0.4355, 0.8685, 0.7348, 0.6709, 0.7401, 0.8320, 0.4287, - 0.5419, 0.6982, 0.3521, 0.5136, 0.1073, 0.2471, 0.8444, - 0.2369, 0.0792, 0.5748, 0.6149, 0.9268, 0.1438, 0.9218, - 0.2189, 0.9680, 0.3579, 0.6778, 0.9284, 0.9712, 0.8475, - 0.6595, 0.8451, 0.9596, 0.0291, 0.2904, 0.4624, 0.9348, - 0.5743, 0.6942, 0.7654, 0.0460, 0.6704, 0.4112, 0.0182, - 0.7891, 0.1191, 0.0775, 0.8674, 0.6379, 0.6054, 0.8989, - 0.6635, 0.7675, 0.4663, 0.1353, 0.5542, 0.9368, 0.0244, - 0.5413, 0.4729, 0.7814, 0.8256, 0.2315, 0.9472, 0.2322, - 0.6177, 0.8709, 0.4587, 0.3448, 0.7377, 0.9270, 0.8111, - 0.2693, 0.6265, 0.4066, 0.9210, 0.2302, 0.6077, 0.3406, - 0.5854, 0.6597, 0.4653, 0.2831, 0.2390, 0.4564, 0.7151, - 0.8705, 0.3781, 0.3836, 0.2946, 0.0129, 0.3443, 0.5513, - 0.5972, 0.1489, 0.6113, 0.5915, 0.8810, 0.4599, 0.1897, - 0.3004, 0.9932, 0.0623, 0.6712, 0.9400, 0.1765, 0.4924, - 0.4162, 0.7114, 0.5036, 0.7747, 0.8718, 0.4237, 0.6772, - 0.3151, 0.4843, 0.4319, 0.0489, 0.9698, 0.2863, 0.1393, - 0.0339, 0.9839, 0.8166, 0.2810, 0.0680, 0.7799, 0.6229, - 0.5426, 0.1095, 0.3560, 0.3903, 0.8409, 0.3643, 0.1432, - 0.9733, 0.8764, 0.1891, 0.5211, 0.8147, 0.5398, 0.1212, - 0.4051, 0.7700, 0.6201, 0.6092, 0.5740, 0.0174, 0.9730, - 0.5750, 0.1625, 0.0572, 0.6170, 0.5243, 0.2437, 0.4114, - 0.6512, 0.1771, 0.4980, 0.0027, 0.6626, 0.7558, 0.5376, - 0.7689, 0.8026, 0.1009, 0.8359, 0.8508, 0.4274, 0.6167, - 0.9714, 0.0496, 0.8017, 0.4516, 0.6537, 0.1179, 0.6975, - 0.5184, 0.3878, 0.1200, 0.4588, 0.2915]), + col_indices=tensor([3713, 4311, 3122, 669, 4232, 3663, 2007, 3995, 2985, + 3186, 3907, 417, 3242, 4650, 3555, 93, 1502, 1235, + 1517, 785, 337, 4882, 2860, 2764, 3711, 2924, 3286, + 4437, 4762, 2022, 2884, 3050, 2592, 1641, 3866, 675, + 1257, 4075, 1119, 3480, 3384, 4278, 3820, 1485, 2782, + 72, 2113, 1126, 4767, 4007, 1142, 652, 561, 2935, + 1975, 199, 4018, 2743, 2458, 3277, 3886, 4123, 1491, + 983, 570, 514, 3824, 4575, 2583, 283, 3529, 3788, + 4838, 4949, 3779, 1539, 1909, 327, 440, 1117, 1523, + 3726, 1854, 4368, 4914, 250, 4650, 3239, 1693, 1481, + 4096, 4498, 1596, 3687, 3767, 911, 4714, 234, 831, + 3718, 2204, 3324, 625, 3692, 2664, 3195, 4696, 3863, + 3331, 3017, 889, 1472, 1511, 4445, 3379, 2896, 737, + 2089, 2378, 1814, 70, 1098, 1702, 4643, 3827, 3704, + 338, 3850, 572, 2425, 1157, 760, 1664, 4581, 3070, + 1871, 3483, 3605, 2740, 1195, 431, 2064, 2672, 1171, + 2724, 2576, 2931, 3462, 259, 729, 2762, 807, 1190, + 489, 3861, 2642, 4610, 1608, 3168, 3574, 3973, 402, + 4665, 1847, 1256, 1911, 3481, 326, 1493, 4053, 3454, + 1136, 1612, 3786, 2055, 3675, 3833, 3959, 191, 4814, + 835, 2978, 3726, 4192, 1935, 2380, 4871, 413, 2488, + 1620, 97, 520, 3933, 1572, 2751, 3149, 1837, 4901, + 1682, 3126, 4228, 3404, 1656, 3196, 749, 3456, 2262, + 1012, 1921, 813, 3611, 3531, 527, 4192, 3607, 2965, + 2347, 1978, 3154, 3685, 858, 504, 2829, 4599, 1094, + 2491, 3516, 1247, 4751, 3262, 2849, 2566, 401, 546, + 2883, 3333, 2377, 2004, 4616, 2751, 2666, 266, 2836, + 3755, 3303, 2664, 1990, 4686, 1843, 3764]), + values=tensor([0.5362, 0.1764, 0.8382, 0.9994, 0.9800, 0.0618, 0.4592, + 0.9131, 0.9597, 0.4081, 0.6282, 0.2883, 0.1835, 0.6612, + 0.1657, 0.9387, 0.4076, 0.3204, 0.8856, 0.8078, 0.4631, + 0.5983, 0.4460, 0.4890, 0.6668, 0.6615, 0.9286, 0.7329, + 0.1129, 0.9430, 0.4312, 0.1048, 0.6685, 0.7567, 0.6472, + 0.5415, 0.6927, 0.4313, 0.1130, 0.1067, 0.3776, 0.2111, + 0.4988, 0.2323, 0.8735, 0.1866, 0.2181, 0.9275, 0.3209, + 0.2121, 0.3869, 0.6612, 0.6968, 0.7951, 0.2076, 0.6433, + 0.4556, 0.6183, 0.3118, 0.3906, 0.6489, 0.8528, 0.3289, + 0.6201, 0.5839, 0.1037, 0.6280, 0.2385, 0.9761, 0.9993, + 0.9358, 0.5168, 0.1953, 0.6987, 0.1991, 0.2663, 0.4851, + 0.8655, 0.6221, 0.8880, 0.7203, 0.5440, 0.9586, 0.8308, + 0.7505, 0.4769, 0.0521, 0.4691, 0.7734, 0.7087, 0.3170, + 0.5274, 0.1086, 0.5094, 0.7757, 0.7279, 0.2033, 0.2473, + 0.2107, 0.0412, 0.5145, 0.7673, 0.1630, 0.8339, 0.4727, + 0.9721, 0.8769, 0.3873, 0.6619, 0.5938, 0.1169, 0.9732, + 0.9194, 0.0403, 0.8304, 0.6171, 0.7459, 0.2771, 0.3672, + 0.4302, 0.3220, 0.4230, 0.9353, 0.2646, 0.7337, 0.5313, + 0.2141, 0.6391, 0.0194, 0.2608, 0.8149, 0.4521, 0.7486, + 0.3912, 0.2437, 0.1368, 0.0368, 0.7254, 0.6183, 0.5911, + 0.1441, 0.2590, 0.3769, 0.0584, 0.8103, 0.7259, 0.1986, + 0.3606, 0.6084, 0.0467, 0.8688, 0.0545, 0.5586, 0.5364, + 0.2793, 0.7875, 0.8326, 0.8683, 0.4631, 0.8848, 0.8452, + 0.8386, 0.7465, 0.5293, 0.9616, 0.6576, 0.7374, 0.6066, + 0.6626, 0.3726, 0.3589, 0.8492, 0.8153, 0.9689, 0.9417, + 0.4647, 0.0179, 0.4250, 0.6143, 0.0168, 0.6908, 0.7336, + 0.2135, 0.6902, 0.9525, 0.2757, 0.6859, 0.1512, 0.4814, + 0.3432, 0.5356, 0.7377, 0.6058, 0.6860, 0.8576, 0.1848, + 0.1777, 0.8696, 0.6365, 0.6756, 0.8681, 0.5291, 0.6612, + 0.8062, 0.0245, 0.3853, 0.1769, 0.2160, 0.8294, 0.3529, + 0.9743, 0.4080, 0.8953, 0.6456, 0.2314, 0.5812, 0.0730, + 0.4822, 0.6524, 0.6384, 0.9953, 0.0509, 0.4987, 0.0171, + 0.4272, 0.5973, 0.2318, 0.8370, 0.4338, 0.3625, 0.3612, + 0.0107, 0.9389, 0.2647, 0.8948, 0.1513, 0.4188, 0.1808, + 0.2512, 0.8313, 0.9627, 0.2460, 0.6266, 0.5196, 0.6184, + 0.5321, 0.3380, 0.0267, 0.8089, 0.6366]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.1901, 0.2382, 0.6937, ..., 0.7632, 0.8746, 0.1540]) +tensor([0.1216, 0.3140, 0.3620, ..., 0.0196, 0.1616, 0.5885]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +185,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 2.9870200157165527 seconds +Time: 3.191556692123413 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 289937 -ss 5000 -sd 1e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.948570489883423} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 276725 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.998385429382324} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4712, 4178, 2903, 24, 4753, 51, 2819, 4572, 4453, - 3780, 2899, 3226, 3780, 3989, 722, 4731, 1139, 1109, - 4512, 1669, 4522, 2228, 2733, 1441, 2781, 363, 3766, - 2188, 1770, 795, 1224, 1803, 4910, 1370, 4516, 2224, - 2678, 4365, 692, 1811, 1383, 2901, 749, 3344, 1016, - 4896, 4731, 857, 4171, 1998, 4569, 2011, 3832, 2691, - 1005, 4276, 2954, 4491, 2491, 2981, 645, 4461, 2128, - 3675, 4293, 1741, 3314, 1065, 1939, 1615, 3365, 3901, - 589, 3305, 4000, 4212, 790, 4927, 4076, 2238, 4107, - 3701, 3348, 1617, 1179, 3888, 4445, 2667, 3215, 4009, - 4710, 219, 2800, 233, 1521, 2319, 680, 1854, 4750, - 3077, 1721, 3819, 3579, 2334, 2886, 4510, 1278, 1666, - 4749, 4910, 1969, 2508, 532, 1736, 4315, 1491, 537, - 3309, 3121, 4585, 2996, 3358, 502, 4286, 4572, 2864, - 1049, 469, 825, 1143, 635, 2773, 2543, 3425, 3473, - 2174, 4228, 3516, 1137, 2463, 4638, 1994, 2452, 2065, - 96, 3029, 2790, 1834, 4863, 978, 4811, 3677, 2912, - 1938, 2797, 895, 1501, 2558, 1230, 534, 2633, 3017, - 4982, 4618, 4241, 2899, 2098, 2010, 1636, 2502, 2716, - 4980, 363, 466, 23, 1737, 1476, 1286, 4720, 833, - 2653, 201, 3769, 3397, 3009, 4570, 2692, 2095, 4797, - 3941, 2845, 1360, 1763, 3589, 3716, 2365, 196, 1112, - 123, 2267, 4731, 228, 4673, 1590, 3794, 3816, 2846, - 863, 3759, 1182, 304, 2540, 66, 4385, 3694, 3525, - 31, 4315, 4266, 4089, 2728, 1405, 1294, 4022, 2222, - 370, 101, 3253, 4145, 1994, 1358, 981, 2203, 2167, - 3742, 4696, 614, 2733, 396, 4399, 427, 1682, 4896, - 3429, 693, 870, 4939, 3305, 4250, 3680]), - values=tensor([0.3479, 0.5084, 0.6603, 0.8257, 0.8683, 0.9247, 0.0338, - 0.9486, 0.8504, 0.5745, 0.3925, 0.3196, 0.6449, 0.2119, - 0.0164, 0.7309, 0.7682, 0.1461, 0.7397, 0.9951, 0.7123, - 0.4571, 0.7549, 0.0282, 0.5968, 0.6667, 0.3749, 0.3789, - 0.4293, 0.3353, 0.3273, 0.0531, 0.5787, 0.8917, 0.4198, - 0.7695, 0.7895, 0.7926, 0.6654, 0.0192, 0.0703, 0.9096, - 0.9289, 0.6077, 0.6990, 0.6780, 0.1687, 0.0557, 0.0641, - 0.1726, 0.7968, 0.1192, 0.9982, 0.1104, 0.3778, 0.1311, - 0.0584, 0.9615, 0.6551, 0.7173, 0.4827, 0.9281, 0.2508, - 0.5901, 0.8616, 0.6261, 0.7668, 0.8880, 0.5680, 0.6476, - 0.9494, 0.3895, 0.7153, 0.7995, 0.4681, 0.0628, 0.0354, - 0.8123, 0.7147, 0.5397, 0.7785, 0.1737, 0.3550, 0.8870, - 0.9193, 0.0915, 0.0963, 0.4243, 0.0483, 0.3655, 0.7711, - 0.4395, 0.3161, 0.5266, 0.7991, 0.4530, 0.0590, 0.9302, - 0.7021, 0.5336, 0.6784, 0.9823, 0.0943, 0.7391, 0.7084, - 0.0171, 0.4786, 0.7623, 0.5776, 0.2256, 0.8698, 0.1309, - 0.6095, 0.6277, 0.0828, 0.3536, 0.7932, 0.1162, 0.9939, - 0.6893, 0.6054, 0.2963, 0.4057, 0.5571, 0.8162, 0.7161, - 0.6029, 0.7576, 0.8687, 0.3351, 0.8262, 0.5784, 0.6376, - 0.1057, 0.2968, 0.0568, 0.6646, 0.7354, 0.2403, 0.0158, - 0.7552, 0.5770, 0.3899, 0.7014, 0.1196, 0.2500, 0.6112, - 0.3203, 0.8311, 0.8445, 0.8722, 0.6620, 0.5633, 0.3401, - 0.0024, 0.6473, 0.3675, 0.6286, 0.4764, 0.3994, 0.7176, - 0.9295, 0.7610, 0.0448, 0.1910, 0.5959, 0.2410, 0.6714, - 0.3638, 0.8788, 0.4303, 0.8357, 0.1493, 0.7533, 0.2046, - 0.6241, 0.3330, 0.7519, 0.0927, 0.5403, 0.3301, 0.0842, - 0.3044, 0.5311, 0.1859, 0.7234, 0.6523, 0.1074, 0.7205, - 0.0951, 0.9394, 0.8290, 0.0965, 0.9119, 0.9547, 0.5884, - 0.2956, 0.6206, 0.7425, 0.9894, 0.3994, 0.9059, 0.3500, - 0.1825, 0.6628, 0.6687, 0.3257, 0.5028, 0.7592, 0.5362, - 0.2886, 0.3968, 0.4420, 0.4118, 0.6245, 0.3599, 0.5238, - 0.6126, 0.6306, 0.0343, 0.1672, 0.1822, 0.1255, 0.6333, - 0.3425, 0.1597, 0.8225, 0.7857, 0.7675, 0.1595, 0.4863, - 0.8578, 0.1155, 0.8038, 0.8906, 0.6082, 0.3640, 0.5820, - 0.4951, 0.3638, 0.3016, 0.7272, 0.7832, 0.5085, 0.1101, - 0.2648, 0.6399, 0.4137, 0.5843, 0.7184]), + col_indices=tensor([2380, 3613, 1075, 3246, 4042, 3374, 1478, 2979, 2276, + 2930, 2086, 4202, 3737, 1516, 3862, 142, 4015, 1357, + 560, 319, 2604, 629, 688, 2977, 3263, 632, 933, + 3903, 4637, 3440, 4071, 3122, 4597, 2020, 1753, 4968, + 236, 401, 4721, 2814, 3194, 1487, 3253, 2635, 4988, + 2193, 3078, 475, 4007, 4428, 3886, 1507, 3897, 2401, + 4405, 1940, 1077, 2906, 1323, 1917, 3748, 4260, 4279, + 3825, 1665, 617, 1150, 4448, 3886, 1543, 4797, 1119, + 1977, 3776, 961, 857, 1652, 4949, 3964, 1680, 4247, + 1503, 1704, 3880, 4443, 3685, 2352, 1751, 824, 368, + 4345, 4291, 354, 1088, 122, 396, 1168, 2175, 4172, + 2207, 3149, 2042, 2530, 1496, 799, 2665, 552, 4528, + 662, 2611, 2924, 4054, 589, 4354, 4094, 679, 4317, + 960, 4204, 3208, 423, 3877, 1963, 3175, 4640, 302, + 642, 3523, 3895, 4751, 2497, 771, 1939, 4647, 2695, + 1569, 2830, 1821, 677, 2907, 493, 1717, 2727, 3612, + 423, 4487, 2293, 3921, 2761, 2513, 1788, 3917, 443, + 3856, 3871, 2231, 2463, 17, 3133, 2026, 4379, 2969, + 4814, 2101, 2835, 2347, 1072, 2258, 894, 1197, 2716, + 52, 451, 3006, 631, 601, 2025, 3399, 1702, 998, + 2824, 329, 850, 1719, 1765, 4361, 4408, 3036, 312, + 4820, 3179, 2183, 2206, 3026, 678, 2561, 857, 3235, + 1507, 2484, 4159, 397, 2284, 3338, 4068, 1850, 2780, + 2565, 3271, 2476, 1353, 1636, 3712, 2663, 4387, 4942, + 3723, 179, 2743, 954, 4391, 63, 624, 2121, 4577, + 2940, 2315, 3388, 3771, 3004, 839, 4600, 819, 4973, + 3927, 4001, 4085, 2667, 3409, 1518, 1515, 3367, 3354, + 1687, 4445, 2699, 2676, 1307, 165, 1692]), + values=tensor([0.3654, 0.9688, 0.7037, 0.1671, 0.3181, 0.5966, 0.6794, + 0.3505, 0.6371, 0.3737, 0.9079, 0.6073, 0.9177, 0.5556, + 0.3997, 0.3909, 0.0350, 0.1513, 0.1992, 0.2362, 0.8089, + 0.8918, 0.7819, 0.3606, 0.3706, 0.4698, 0.5423, 0.8164, + 0.0434, 0.0786, 0.1808, 0.6335, 0.6516, 0.6817, 0.6880, + 0.8936, 0.7162, 0.0476, 0.2319, 0.8767, 0.5104, 0.0121, + 0.3697, 0.5497, 0.6989, 0.7260, 0.6407, 0.4363, 0.3211, + 0.9746, 0.3345, 0.0982, 0.2806, 0.5109, 0.9644, 0.1545, + 0.0332, 0.4128, 0.5605, 0.1574, 0.7300, 0.3651, 0.3470, + 0.2390, 0.2125, 0.5486, 0.0518, 0.3734, 0.5993, 0.2319, + 0.6180, 0.6717, 0.4616, 0.6681, 0.1662, 0.3850, 0.4625, + 0.8555, 0.7532, 0.7238, 0.0784, 0.9432, 0.7045, 0.4871, + 0.3992, 0.7542, 0.8708, 0.2154, 0.5281, 0.0787, 0.4018, + 0.9066, 0.6477, 0.0720, 0.1523, 0.5964, 0.7497, 0.6084, + 0.1830, 0.7602, 0.9287, 0.6515, 0.5936, 0.5401, 0.3124, + 0.6783, 0.5252, 0.6643, 0.7069, 0.4662, 0.0605, 0.2500, + 0.5601, 0.3810, 0.8385, 0.3052, 0.5729, 0.0338, 0.8176, + 0.7963, 0.8379, 0.4036, 0.0594, 0.5366, 0.6907, 0.5474, + 0.8692, 0.3165, 0.9487, 0.9210, 0.0809, 0.9652, 0.2990, + 0.5881, 0.1996, 0.3192, 0.3596, 0.2710, 0.4182, 0.6748, + 0.1091, 0.4052, 0.1457, 0.2376, 0.4509, 0.3765, 0.7946, + 0.2743, 0.8721, 0.2924, 0.5997, 0.8679, 0.6877, 0.4975, + 0.9946, 0.7490, 0.8987, 0.4323, 0.8556, 0.0644, 0.2014, + 0.4571, 0.5904, 0.1948, 0.0242, 0.2200, 0.7531, 0.9807, + 0.0340, 0.6445, 0.3538, 0.5381, 0.1875, 0.2701, 0.0340, + 0.1739, 0.4145, 0.0100, 0.7463, 0.7750, 0.8434, 0.0880, + 0.6662, 0.7618, 0.0356, 0.4252, 0.8131, 0.6804, 0.5433, + 0.5757, 0.4288, 0.6083, 0.2401, 0.7393, 0.1894, 0.8963, + 0.1437, 0.2457, 0.0570, 0.7376, 0.0470, 0.9104, 0.3679, + 0.1543, 0.8914, 0.3423, 0.3481, 0.6670, 0.5277, 0.9388, + 0.7501, 0.8032, 0.4265, 0.1515, 0.1156, 0.2383, 0.9250, + 0.6618, 0.8262, 0.6467, 0.7861, 0.5413, 0.4775, 0.3128, + 0.3404, 0.0688, 0.5764, 0.2346, 0.9806, 0.8393, 0.8113, + 0.0170, 0.3215, 0.3506, 0.2192, 0.6468, 0.7892, 0.6495, + 0.5652, 0.4589, 0.2573, 0.9179, 0.9147, 0.1216, 0.8679, + 0.8657, 0.0404, 0.8722, 0.6530, 0.6209]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.0011, 0.9357, 0.8539, ..., 0.1995, 0.1479, 0.1616]) +tensor([0.6910, 0.0217, 0.5495, ..., 0.8566, 0.3088, 0.4862]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +266,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.948570489883423 seconds +Time: 9.998385429382324 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 290608 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.898369789123535} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4712, 4178, 2903, 24, 4753, 51, 2819, 4572, 4453, - 3780, 2899, 3226, 3780, 3989, 722, 4731, 1139, 1109, - 4512, 1669, 4522, 2228, 2733, 1441, 2781, 363, 3766, - 2188, 1770, 795, 1224, 1803, 4910, 1370, 4516, 2224, - 2678, 4365, 692, 1811, 1383, 2901, 749, 3344, 1016, - 4896, 4731, 857, 4171, 1998, 4569, 2011, 3832, 2691, - 1005, 4276, 2954, 4491, 2491, 2981, 645, 4461, 2128, - 3675, 4293, 1741, 3314, 1065, 1939, 1615, 3365, 3901, - 589, 3305, 4000, 4212, 790, 4927, 4076, 2238, 4107, - 3701, 3348, 1617, 1179, 3888, 4445, 2667, 3215, 4009, - 4710, 219, 2800, 233, 1521, 2319, 680, 1854, 4750, - 3077, 1721, 3819, 3579, 2334, 2886, 4510, 1278, 1666, - 4749, 4910, 1969, 2508, 532, 1736, 4315, 1491, 537, - 3309, 3121, 4585, 2996, 3358, 502, 4286, 4572, 2864, - 1049, 469, 825, 1143, 635, 2773, 2543, 3425, 3473, - 2174, 4228, 3516, 1137, 2463, 4638, 1994, 2452, 2065, - 96, 3029, 2790, 1834, 4863, 978, 4811, 3677, 2912, - 1938, 2797, 895, 1501, 2558, 1230, 534, 2633, 3017, - 4982, 4618, 4241, 2899, 2098, 2010, 1636, 2502, 2716, - 4980, 363, 466, 23, 1737, 1476, 1286, 4720, 833, - 2653, 201, 3769, 3397, 3009, 4570, 2692, 2095, 4797, - 3941, 2845, 1360, 1763, 3589, 3716, 2365, 196, 1112, - 123, 2267, 4731, 228, 4673, 1590, 3794, 3816, 2846, - 863, 3759, 1182, 304, 2540, 66, 4385, 3694, 3525, - 31, 4315, 4266, 4089, 2728, 1405, 1294, 4022, 2222, - 370, 101, 3253, 4145, 1994, 1358, 981, 2203, 2167, - 3742, 4696, 614, 2733, 396, 4399, 427, 1682, 4896, - 3429, 693, 870, 4939, 3305, 4250, 3680]), - values=tensor([0.3479, 0.5084, 0.6603, 0.8257, 0.8683, 0.9247, 0.0338, - 0.9486, 0.8504, 0.5745, 0.3925, 0.3196, 0.6449, 0.2119, - 0.0164, 0.7309, 0.7682, 0.1461, 0.7397, 0.9951, 0.7123, - 0.4571, 0.7549, 0.0282, 0.5968, 0.6667, 0.3749, 0.3789, - 0.4293, 0.3353, 0.3273, 0.0531, 0.5787, 0.8917, 0.4198, - 0.7695, 0.7895, 0.7926, 0.6654, 0.0192, 0.0703, 0.9096, - 0.9289, 0.6077, 0.6990, 0.6780, 0.1687, 0.0557, 0.0641, - 0.1726, 0.7968, 0.1192, 0.9982, 0.1104, 0.3778, 0.1311, - 0.0584, 0.9615, 0.6551, 0.7173, 0.4827, 0.9281, 0.2508, - 0.5901, 0.8616, 0.6261, 0.7668, 0.8880, 0.5680, 0.6476, - 0.9494, 0.3895, 0.7153, 0.7995, 0.4681, 0.0628, 0.0354, - 0.8123, 0.7147, 0.5397, 0.7785, 0.1737, 0.3550, 0.8870, - 0.9193, 0.0915, 0.0963, 0.4243, 0.0483, 0.3655, 0.7711, - 0.4395, 0.3161, 0.5266, 0.7991, 0.4530, 0.0590, 0.9302, - 0.7021, 0.5336, 0.6784, 0.9823, 0.0943, 0.7391, 0.7084, - 0.0171, 0.4786, 0.7623, 0.5776, 0.2256, 0.8698, 0.1309, - 0.6095, 0.6277, 0.0828, 0.3536, 0.7932, 0.1162, 0.9939, - 0.6893, 0.6054, 0.2963, 0.4057, 0.5571, 0.8162, 0.7161, - 0.6029, 0.7576, 0.8687, 0.3351, 0.8262, 0.5784, 0.6376, - 0.1057, 0.2968, 0.0568, 0.6646, 0.7354, 0.2403, 0.0158, - 0.7552, 0.5770, 0.3899, 0.7014, 0.1196, 0.2500, 0.6112, - 0.3203, 0.8311, 0.8445, 0.8722, 0.6620, 0.5633, 0.3401, - 0.0024, 0.6473, 0.3675, 0.6286, 0.4764, 0.3994, 0.7176, - 0.9295, 0.7610, 0.0448, 0.1910, 0.5959, 0.2410, 0.6714, - 0.3638, 0.8788, 0.4303, 0.8357, 0.1493, 0.7533, 0.2046, - 0.6241, 0.3330, 0.7519, 0.0927, 0.5403, 0.3301, 0.0842, - 0.3044, 0.5311, 0.1859, 0.7234, 0.6523, 0.1074, 0.7205, - 0.0951, 0.9394, 0.8290, 0.0965, 0.9119, 0.9547, 0.5884, - 0.2956, 0.6206, 0.7425, 0.9894, 0.3994, 0.9059, 0.3500, - 0.1825, 0.6628, 0.6687, 0.3257, 0.5028, 0.7592, 0.5362, - 0.2886, 0.3968, 0.4420, 0.4118, 0.6245, 0.3599, 0.5238, - 0.6126, 0.6306, 0.0343, 0.1672, 0.1822, 0.1255, 0.6333, - 0.3425, 0.1597, 0.8225, 0.7857, 0.7675, 0.1595, 0.4863, - 0.8578, 0.1155, 0.8038, 0.8906, 0.6082, 0.3640, 0.5820, - 0.4951, 0.3638, 0.3016, 0.7272, 0.7832, 0.5085, 0.1101, - 0.2648, 0.6399, 0.4137, 0.5843, 0.7184]), + col_indices=tensor([ 262, 2035, 485, 251, 4016, 1058, 3070, 4726, 3354, + 1939, 656, 3827, 2792, 2167, 1494, 4192, 4713, 3379, + 2457, 4009, 3893, 2667, 534, 1122, 570, 1229, 1463, + 1765, 2099, 1423, 4621, 340, 797, 1441, 64, 2918, + 554, 4698, 4070, 3140, 583, 2602, 1349, 1767, 934, + 4043, 2088, 3735, 4900, 920, 1738, 2586, 1766, 1119, + 3760, 1261, 2743, 4494, 2571, 1393, 591, 2226, 3745, + 190, 2270, 3593, 2940, 4389, 4189, 3957, 2357, 1743, + 741, 4832, 644, 4777, 4297, 1070, 1878, 1639, 1265, + 2829, 4815, 1097, 123, 3210, 3906, 4032, 409, 1440, + 2276, 1690, 1240, 1218, 2681, 4285, 967, 4145, 991, + 380, 4630, 1568, 2612, 4648, 571, 2340, 3473, 4468, + 4951, 590, 3746, 4386, 4991, 4675, 4247, 3758, 4080, + 2914, 2911, 472, 2572, 297, 3753, 3928, 4336, 3350, + 4684, 2928, 1411, 3040, 4413, 4746, 2683, 1901, 156, + 3365, 4910, 1017, 1689, 4202, 695, 2739, 3318, 2650, + 2581, 4444, 3875, 2644, 1034, 1322, 773, 825, 4773, + 1762, 1101, 134, 557, 552, 3083, 499, 2461, 662, + 4984, 1598, 2411, 2667, 4853, 2929, 1919, 807, 2944, + 3720, 3584, 1884, 464, 1313, 1150, 1993, 1991, 3764, + 4464, 549, 1610, 1298, 3085, 1988, 1550, 3474, 2899, + 4106, 1211, 3566, 962, 719, 2589, 1845, 2344, 4331, + 2467, 4382, 742, 4580, 1728, 2164, 1341, 2191, 2416, + 4108, 381, 350, 2062, 4226, 1858, 3082, 2326, 4068, + 2758, 1464, 1085, 4496, 2754, 161, 3385, 3816, 4881, + 3720, 1081, 1453, 3426, 1398, 4809, 4474, 1813, 629, + 3352, 1118, 968, 2723, 119, 3558, 2137, 4256, 4030, + 4468, 967, 4265, 2344, 2803, 1802, 4304]), + values=tensor([0.0871, 0.3484, 0.2566, 0.9283, 0.1741, 0.9571, 0.8521, + 0.6057, 0.2563, 0.1045, 0.9085, 0.8959, 0.7821, 0.8727, + 0.6685, 0.8223, 0.1524, 0.6655, 0.4758, 0.1442, 0.6879, + 0.4766, 0.1133, 0.9196, 0.1521, 0.6905, 0.7846, 0.0122, + 0.9559, 0.5783, 0.7867, 0.0761, 0.6975, 0.4309, 0.0264, + 0.0557, 0.1429, 0.4538, 0.0539, 0.3491, 0.9221, 0.9334, + 0.2993, 0.9627, 0.4567, 0.8190, 0.0533, 0.4908, 0.4644, + 0.3233, 0.5006, 0.9344, 0.6161, 0.2284, 0.9734, 0.0052, + 0.4383, 0.8643, 0.9330, 0.7708, 0.1842, 0.2466, 0.9870, + 0.7330, 0.9632, 0.0038, 0.7263, 0.0927, 0.6443, 0.0353, + 0.1452, 0.2942, 0.8150, 0.9734, 0.0725, 0.4772, 0.2666, + 0.3352, 0.2112, 0.2663, 0.2571, 0.3320, 0.1354, 0.9465, + 0.3307, 0.8541, 0.4032, 0.4545, 0.9709, 0.4870, 0.3303, + 0.6591, 0.4920, 0.2138, 0.2687, 0.2549, 0.0918, 0.2328, + 0.2962, 0.7744, 0.0496, 0.5982, 0.2525, 0.1633, 0.5355, + 0.3492, 0.9753, 0.1833, 0.8697, 0.0845, 0.0054, 0.2943, + 0.9821, 0.7834, 0.1851, 0.2576, 0.6535, 0.1276, 0.3355, + 0.8625, 0.7957, 0.2216, 0.2334, 0.8911, 0.5346, 0.7484, + 0.0957, 0.7183, 0.4187, 0.7637, 0.7388, 0.4363, 0.3237, + 0.0534, 0.1720, 0.2441, 0.4731, 0.0804, 0.8457, 0.1423, + 0.4546, 0.2480, 0.8873, 0.2714, 0.1548, 0.1114, 0.6153, + 0.4668, 0.5543, 0.1948, 0.6609, 0.9571, 0.5826, 0.5814, + 0.5154, 0.3817, 0.8387, 0.4262, 0.0502, 0.9612, 0.6460, + 0.6082, 0.0199, 0.7345, 0.7435, 0.1742, 0.7268, 0.9121, + 0.6326, 0.9690, 0.4748, 0.6829, 0.7581, 0.2134, 0.2328, + 0.6122, 0.1927, 0.4101, 0.1967, 0.3705, 0.2988, 0.2241, + 0.0243, 0.3422, 0.9698, 0.3703, 0.1179, 0.8922, 0.5610, + 0.6790, 0.8139, 0.3896, 0.4613, 0.8865, 0.0387, 0.7094, + 0.4145, 0.9294, 0.9785, 0.5763, 0.3234, 0.5144, 0.3400, + 0.5427, 0.5328, 0.8371, 0.3451, 0.4750, 0.4464, 0.2861, + 0.3037, 0.0063, 0.7639, 0.0990, 0.0874, 0.0368, 0.1610, + 0.6825, 0.5116, 0.9108, 0.7259, 0.0932, 0.4235, 0.3752, + 0.9322, 0.3307, 0.7276, 0.7046, 0.2464, 0.5850, 0.2349, + 0.3961, 0.8183, 0.7883, 0.0990, 0.1263, 0.1382, 0.2157, + 0.1476, 0.2670, 0.7789, 0.2413, 0.2428, 0.7730, 0.2374, + 0.6779, 0.6296, 0.3705, 0.7172, 0.2121]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.0011, 0.9357, 0.8539, ..., 0.1995, 0.1479, 0.1616]) +tensor([0.9230, 0.6870, 0.7737, ..., 0.0152, 0.4575, 0.0315]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +347,91 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.948570489883423 seconds +Time: 10.898369789123535 seconds -[20.16, 20.24, 20.28, 20.24, 20.08, 20.16, 20.2, 20.28, 20.28, 20.36] -[20.72, 20.56, 20.44, 24.96, 27.52, 28.24, 29.2, 26.64, 25.12, 24.04, 23.96, 23.92, 24.04, 24.2] -14.660816431045532 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.948570489883423, 'TIME_S_1KI': 0.037761894790535266, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.0917576313019, 'W': 22.992700250818316} -[20.16, 20.24, 20.28, 20.24, 20.08, 20.16, 20.2, 20.28, 20.28, 20.36, 20.4, 20.4, 20.44, 20.84, 20.84, 20.76, 20.64, 20.8, 20.84, 20.84] -368.2 -18.41 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.948570489883423, 'TIME_S_1KI': 0.037761894790535266, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.0917576313019, 'W': 22.992700250818316, 'J_1KI': 1.1626379442130597, 'W_1KI': 0.07930240104166876, 'W_D': 4.582700250818316, 'J_D': 67.18612713575365, 'W_D_1KI': 0.015805848342289243, 'J_D_1KI': 5.4514768181671336e-05} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 262, 2035, 485, 251, 4016, 1058, 3070, 4726, 3354, + 1939, 656, 3827, 2792, 2167, 1494, 4192, 4713, 3379, + 2457, 4009, 3893, 2667, 534, 1122, 570, 1229, 1463, + 1765, 2099, 1423, 4621, 340, 797, 1441, 64, 2918, + 554, 4698, 4070, 3140, 583, 2602, 1349, 1767, 934, + 4043, 2088, 3735, 4900, 920, 1738, 2586, 1766, 1119, + 3760, 1261, 2743, 4494, 2571, 1393, 591, 2226, 3745, + 190, 2270, 3593, 2940, 4389, 4189, 3957, 2357, 1743, + 741, 4832, 644, 4777, 4297, 1070, 1878, 1639, 1265, + 2829, 4815, 1097, 123, 3210, 3906, 4032, 409, 1440, + 2276, 1690, 1240, 1218, 2681, 4285, 967, 4145, 991, + 380, 4630, 1568, 2612, 4648, 571, 2340, 3473, 4468, + 4951, 590, 3746, 4386, 4991, 4675, 4247, 3758, 4080, + 2914, 2911, 472, 2572, 297, 3753, 3928, 4336, 3350, + 4684, 2928, 1411, 3040, 4413, 4746, 2683, 1901, 156, + 3365, 4910, 1017, 1689, 4202, 695, 2739, 3318, 2650, + 2581, 4444, 3875, 2644, 1034, 1322, 773, 825, 4773, + 1762, 1101, 134, 557, 552, 3083, 499, 2461, 662, + 4984, 1598, 2411, 2667, 4853, 2929, 1919, 807, 2944, + 3720, 3584, 1884, 464, 1313, 1150, 1993, 1991, 3764, + 4464, 549, 1610, 1298, 3085, 1988, 1550, 3474, 2899, + 4106, 1211, 3566, 962, 719, 2589, 1845, 2344, 4331, + 2467, 4382, 742, 4580, 1728, 2164, 1341, 2191, 2416, + 4108, 381, 350, 2062, 4226, 1858, 3082, 2326, 4068, + 2758, 1464, 1085, 4496, 2754, 161, 3385, 3816, 4881, + 3720, 1081, 1453, 3426, 1398, 4809, 4474, 1813, 629, + 3352, 1118, 968, 2723, 119, 3558, 2137, 4256, 4030, + 4468, 967, 4265, 2344, 2803, 1802, 4304]), + values=tensor([0.0871, 0.3484, 0.2566, 0.9283, 0.1741, 0.9571, 0.8521, + 0.6057, 0.2563, 0.1045, 0.9085, 0.8959, 0.7821, 0.8727, + 0.6685, 0.8223, 0.1524, 0.6655, 0.4758, 0.1442, 0.6879, + 0.4766, 0.1133, 0.9196, 0.1521, 0.6905, 0.7846, 0.0122, + 0.9559, 0.5783, 0.7867, 0.0761, 0.6975, 0.4309, 0.0264, + 0.0557, 0.1429, 0.4538, 0.0539, 0.3491, 0.9221, 0.9334, + 0.2993, 0.9627, 0.4567, 0.8190, 0.0533, 0.4908, 0.4644, + 0.3233, 0.5006, 0.9344, 0.6161, 0.2284, 0.9734, 0.0052, + 0.4383, 0.8643, 0.9330, 0.7708, 0.1842, 0.2466, 0.9870, + 0.7330, 0.9632, 0.0038, 0.7263, 0.0927, 0.6443, 0.0353, + 0.1452, 0.2942, 0.8150, 0.9734, 0.0725, 0.4772, 0.2666, + 0.3352, 0.2112, 0.2663, 0.2571, 0.3320, 0.1354, 0.9465, + 0.3307, 0.8541, 0.4032, 0.4545, 0.9709, 0.4870, 0.3303, + 0.6591, 0.4920, 0.2138, 0.2687, 0.2549, 0.0918, 0.2328, + 0.2962, 0.7744, 0.0496, 0.5982, 0.2525, 0.1633, 0.5355, + 0.3492, 0.9753, 0.1833, 0.8697, 0.0845, 0.0054, 0.2943, + 0.9821, 0.7834, 0.1851, 0.2576, 0.6535, 0.1276, 0.3355, + 0.8625, 0.7957, 0.2216, 0.2334, 0.8911, 0.5346, 0.7484, + 0.0957, 0.7183, 0.4187, 0.7637, 0.7388, 0.4363, 0.3237, + 0.0534, 0.1720, 0.2441, 0.4731, 0.0804, 0.8457, 0.1423, + 0.4546, 0.2480, 0.8873, 0.2714, 0.1548, 0.1114, 0.6153, + 0.4668, 0.5543, 0.1948, 0.6609, 0.9571, 0.5826, 0.5814, + 0.5154, 0.3817, 0.8387, 0.4262, 0.0502, 0.9612, 0.6460, + 0.6082, 0.0199, 0.7345, 0.7435, 0.1742, 0.7268, 0.9121, + 0.6326, 0.9690, 0.4748, 0.6829, 0.7581, 0.2134, 0.2328, + 0.6122, 0.1927, 0.4101, 0.1967, 0.3705, 0.2988, 0.2241, + 0.0243, 0.3422, 0.9698, 0.3703, 0.1179, 0.8922, 0.5610, + 0.6790, 0.8139, 0.3896, 0.4613, 0.8865, 0.0387, 0.7094, + 0.4145, 0.9294, 0.9785, 0.5763, 0.3234, 0.5144, 0.3400, + 0.5427, 0.5328, 0.8371, 0.3451, 0.4750, 0.4464, 0.2861, + 0.3037, 0.0063, 0.7639, 0.0990, 0.0874, 0.0368, 0.1610, + 0.6825, 0.5116, 0.9108, 0.7259, 0.0932, 0.4235, 0.3752, + 0.9322, 0.3307, 0.7276, 0.7046, 0.2464, 0.5850, 0.2349, + 0.3961, 0.8183, 0.7883, 0.0990, 0.1263, 0.1382, 0.2157, + 0.1476, 0.2670, 0.7789, 0.2413, 0.2428, 0.7730, 0.2374, + 0.6779, 0.6296, 0.3705, 0.7172, 0.2121]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9230, 0.6870, 0.7737, ..., 0.0152, 0.4575, 0.0315]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.898369789123535 seconds + +[20.8, 20.84, 20.72, 20.68, 20.44, 20.36, 20.4, 20.44, 20.64, 20.88] +[20.88, 20.96, 20.84, 23.76, 25.48, 26.72, 27.4, 27.92, 24.28, 23.32, 23.56, 23.68, 23.56, 23.72] +14.659304857254028 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 290608, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.898369789123535, 'TIME_S_1KI': 0.037501960679415344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.4187112140655, 'W': 22.471646126593484} +[20.8, 20.84, 20.72, 20.68, 20.44, 20.36, 20.4, 20.44, 20.64, 20.88, 20.6, 20.44, 20.4, 20.4, 20.2, 20.2, 20.08, 20.0, 19.88, 19.92] +367.22 +18.361 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 290608, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.898369789123535, 'TIME_S_1KI': 0.037501960679415344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 329.4187112140655, 'W': 22.471646126593484, 'J_1KI': 1.1335500440939874, 'W_1KI': 0.07732631629753305, 'W_D': 4.110646126593483, 'J_D': 60.259214730024304, 'W_D_1KI': 0.014144986120800127, 'J_D_1KI': 4.867376713923955e-05} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.json index 8ad2558..68fb904 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 1, "ITERATIONS": 154350, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.289572715759277, "TIME_S_1KI": 0.0666638983852237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 297.04585779190063, "W": 21.84801552939114, "J_1KI": 1.9244953533650835, "W_1KI": 0.14154852950690727, "W_D": 3.3270155293911436, "J_D": 45.23413947987558, "W_D_1KI": 0.0215550082888963, "J_D_1KI": 0.0001396501994745468} +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 149910, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.487581491470337, "TIME_S_1KI": 0.06995918545440823, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 308.13872550964356, "W": 22.64443234695719, "J_1KI": 2.0554914649432563, "W_1KI": 0.15105351442170092, "W_D": 4.297432346957187, "J_D": 58.478185986995705, "W_D_1KI": 0.02866674902913206, "J_D_1KI": 0.00019122639603183283} diff --git a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.output index f253283..4ad6b06 100644 --- a/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_1core/altra_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,51 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014810323715209961} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014495134353637695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([2527, 1057, 2629, ..., 4578, 222, 4522]), + values=tensor([0.8818, 0.3903, 0.1399, ..., 0.1466, 0.3063, 0.0529]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.6522, 0.5583, 0.7059, ..., 0.6700, 0.4586, 0.5971]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.014495134353637695 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 72438 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 5.073688983917236} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1249, 1250]), + col_indices=tensor([2776, 3385, 4122, ..., 2958, 1775, 4424]), + values=tensor([0.3033, 0.9980, 0.9637, ..., 0.4776, 0.5824, 0.9810]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.2995, 0.4250, 0.8077, ..., 0.7167, 0.6502, 0.4555]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 5.073688983917236 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 149910 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.487581491470337} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([4902, 4751, 573, ..., 1409, 1871, 577]), - values=tensor([0.0874, 0.7756, 0.4965, ..., 0.1251, 0.3364, 0.3476]), + col_indices=tensor([4220, 2132, 4487, ..., 2448, 3646, 2184]), + values=tensor([0.6318, 0.3725, 0.8565, ..., 0.5203, 0.4829, 0.8123]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.4221, 0.7918, 0.4416, ..., 0.8475, 0.7362, 0.1103]) +tensor([0.5845, 0.5120, 0.6003, ..., 0.5543, 0.5465, 0.9228]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.014810323715209961 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 70896 -ss 5000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.822846412658691} +Time: 10.487581491470337 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([1594, 2931, 2652, ..., 4428, 449, 1795]), - values=tensor([0.3058, 0.1710, 0.0965, ..., 0.7799, 0.8373, 0.5140]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4220, 2132, 4487, ..., 2448, 3646, 2184]), + values=tensor([0.6318, 0.3725, 0.8565, ..., 0.5203, 0.4829, 0.8123]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.0899, 0.4612, 0.1283, ..., 0.7452, 0.2953, 0.1670]) +tensor([0.5845, 0.5120, 0.6003, ..., 0.5543, 0.5465, 0.9228]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,48 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 4.822846412658691 seconds +Time: 10.487581491470337 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 154350 -ss 5000 -sd 5e-05 -c 1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.289572715759277} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([ 621, 1968, 1113, ..., 1968, 726, 3393]), - values=tensor([0.9316, 0.3440, 0.3874, ..., 0.4845, 0.3520, 0.3225]), - size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.4702, 0.8122, 0.0166, ..., 0.1291, 0.0008, 0.5220]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250 -Density: 5e-05 -Time: 10.289572715759277 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([ 621, 1968, 1113, ..., 1968, 726, 3393]), - values=tensor([0.9316, 0.3440, 0.3874, ..., 0.4845, 0.3520, 0.3225]), - size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.4702, 0.8122, 0.0166, ..., 0.1291, 0.0008, 0.5220]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250 -Density: 5e-05 -Time: 10.289572715759277 seconds - -[20.4, 20.44, 20.52, 20.68, 20.64, 20.48, 20.24, 20.36, 20.2, 20.16] -[20.36, 20.44, 21.32, 23.28, 23.28, 24.96, 25.6, 26.08, 24.88, 23.92, 23.64, 23.6, 23.6] -13.59601092338562 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 154350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.289572715759277, 'TIME_S_1KI': 0.0666638983852237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.04585779190063, 'W': 21.84801552939114} -[20.4, 20.44, 20.52, 20.68, 20.64, 20.48, 20.24, 20.36, 20.2, 20.16, 20.52, 20.64, 20.96, 21.2, 21.24, 21.04, 20.76, 20.36, 20.08, 20.08] -370.41999999999996 -18.520999999999997 -{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 154350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.289572715759277, 'TIME_S_1KI': 0.0666638983852237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.04585779190063, 'W': 21.84801552939114, 'J_1KI': 1.9244953533650835, 'W_1KI': 0.14154852950690727, 'W_D': 3.3270155293911436, 'J_D': 45.23413947987558, 'W_D_1KI': 0.0215550082888963, 'J_D_1KI': 0.0001396501994745468} +[20.2, 20.44, 20.36, 20.28, 20.4, 20.4, 20.28, 20.28, 20.52, 20.8] +[20.88, 20.88, 21.44, 25.44, 27.32, 28.04, 28.68, 25.96, 24.48, 23.4, 23.16, 23.2, 23.2] +13.607703685760498 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 149910, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.487581491470337, 'TIME_S_1KI': 0.06995918545440823, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.13872550964356, 'W': 22.64443234695719} +[20.2, 20.44, 20.36, 20.28, 20.4, 20.4, 20.28, 20.28, 20.52, 20.8, 20.4, 20.4, 20.48, 20.64, 20.52, 20.44, 20.4, 20.2, 20.16, 20.08] +366.94000000000005 +18.347 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 149910, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.487581491470337, 'TIME_S_1KI': 0.06995918545440823, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.13872550964356, 'W': 22.64443234695719, 'J_1KI': 2.0554914649432563, 'W_1KI': 0.15105351442170092, 'W_D': 4.297432346957187, 'J_D': 58.478185986995705, 'W_D_1KI': 0.02866674902913206, 'J_D_1KI': 0.00019122639603183283} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..6e8d15a --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 362, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.458970785140991, "TIME_S_1KI": 28.892184489339755, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 843.6801159381868, "W": 64.68, "J_1KI": 2330.608055077864, "W_1KI": 178.6740331491713, "W_D": 29.0225, "J_D": 378.5668856650591, "W_D_1KI": 80.17265193370166, "J_D_1KI": 221.4714141814963} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..a4e45b0 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8928701877593994} + +tensor(indices=tensor([[58791, 73131, 88126, ..., 5351, 19697, 4548], + [86236, 31656, 15988, ..., 30767, 69117, 58161]]), + values=tensor([0.1204, 0.6176, 0.3154, ..., 0.2086, 0.4659, 0.5503]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.7281, 0.2344, 0.6036, ..., 0.9404, 0.0725, 0.5355]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 2.8928701877593994 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '362', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.458970785140991} + +tensor(indices=tensor([[ 2973, 76526, 18432, ..., 12133, 71602, 59722], + [76574, 54783, 34116, ..., 41113, 64908, 71871]]), + values=tensor([0.0072, 0.9730, 0.8159, ..., 0.7707, 0.6456, 0.2969]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.6834, 0.2829, 0.7273, ..., 0.6780, 0.1842, 0.8230]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.458970785140991 seconds + +tensor(indices=tensor([[ 2973, 76526, 18432, ..., 12133, 71602, 59722], + [76574, 54783, 34116, ..., 41113, 64908, 71871]]), + values=tensor([0.0072, 0.9730, 0.8159, ..., 0.7707, 0.6456, 0.2969]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.6834, 0.2829, 0.7273, ..., 0.6780, 0.1842, 0.8230]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.458970785140991 seconds + +[39.79, 39.54, 39.38, 38.96, 39.24, 38.89, 39.23, 38.89, 39.37, 45.8] +[64.68] +13.043910264968872 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.458970785140991, 'TIME_S_1KI': 28.892184489339755, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.6801159381868, 'W': 64.68} +[39.79, 39.54, 39.38, 38.96, 39.24, 38.89, 39.23, 38.89, 39.37, 45.8, 40.25, 39.47, 38.85, 38.99, 38.81, 38.8, 44.08, 38.81, 39.36, 39.12] +713.1500000000001 +35.657500000000006 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.458970785140991, 'TIME_S_1KI': 28.892184489339755, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.6801159381868, 'W': 64.68, 'J_1KI': 2330.608055077864, 'W_1KI': 178.6740331491713, 'W_D': 29.0225, 'J_D': 378.5668856650591, 'W_D_1KI': 80.17265193370166, 'J_D_1KI': 221.4714141814963} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..203ccfb --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.68237566947937, "TIME_S_1KI": 286.8237566947937, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2208.85256472826, "W": 65.45, "J_1KI": 22088.525647282597, "W_1KI": 654.5000000000001, "W_D": 30.167249999999996, "J_D": 1018.1055390878319, "W_D_1KI": 301.67249999999996, "J_D_1KI": 3016.7249999999995} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..0c536cd --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.68237566947937} + +tensor(indices=tensor([[45866, 21818, 90078, ..., 48525, 51910, 25231], + [24824, 8406, 28560, ..., 76405, 29596, 55525]]), + values=tensor([0.1606, 0.0532, 0.2422, ..., 0.0519, 0.4291, 0.6123]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0660, 0.8386, 0.5601, ..., 0.6176, 0.3323, 0.3593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.68237566947937 seconds + +tensor(indices=tensor([[45866, 21818, 90078, ..., 48525, 51910, 25231], + [24824, 8406, 28560, ..., 76405, 29596, 55525]]), + values=tensor([0.1606, 0.0532, 0.2422, ..., 0.0519, 0.4291, 0.6123]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0660, 0.8386, 0.5601, ..., 0.6176, 0.3323, 0.3593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.68237566947937 seconds + +[39.81, 38.87, 38.84, 38.85, 39.23, 39.4, 39.34, 39.42, 38.82, 38.83] +[65.45] +33.74870228767395 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.68237566947937, 'TIME_S_1KI': 286.8237566947937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2208.85256472826, 'W': 65.45} +[39.81, 38.87, 38.84, 38.85, 39.23, 39.4, 39.34, 39.42, 38.82, 38.83, 40.66, 39.41, 39.39, 38.85, 39.95, 38.82, 38.87, 38.78, 39.65, 39.03] +705.6550000000001 +35.28275000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.68237566947937, 'TIME_S_1KI': 286.8237566947937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2208.85256472826, 'W': 65.45, 'J_1KI': 22088.525647282597, 'W_1KI': 654.5000000000001, 'W_D': 30.167249999999996, 'J_D': 1018.1055390878319, 'W_D_1KI': 301.67249999999996, 'J_D_1KI': 3016.7249999999995} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..91927fd --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 284.509868144989, "TIME_S_1KI": 2845.09868144989, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 20913.089103269576, "W": 66.06, "J_1KI": 209130.89103269577, "W_1KI": 660.6, "W_D": 30.540000000000006, "J_D": 9668.267351102832, "W_D_1KI": 305.40000000000003, "J_D_1KI": 3054.0000000000005} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..dee42a0 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 284.509868144989} + +tensor(indices=tensor([[49763, 43101, 66675, ..., 73270, 45201, 46163], + [45697, 43367, 24277, ..., 76754, 39903, 69552]]), + values=tensor([0.7589, 0.0177, 0.8296, ..., 0.2563, 0.6450, 0.3604]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2227, 0.1834, 0.9727, ..., 0.3699, 0.2605, 0.0936]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 284.509868144989 seconds + +tensor(indices=tensor([[49763, 43101, 66675, ..., 73270, 45201, 46163], + [45697, 43367, 24277, ..., 76754, 39903, 69552]]), + values=tensor([0.7589, 0.0177, 0.8296, ..., 0.2563, 0.6450, 0.3604]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2227, 0.1834, 0.9727, ..., 0.3699, 0.2605, 0.0936]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 284.509868144989 seconds + +[39.94, 39.61, 39.25, 39.04, 39.12, 39.14, 39.52, 39.44, 39.18, 39.41] +[66.06] +316.57718896865845 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 284.509868144989, 'TIME_S_1KI': 2845.09868144989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20913.089103269576, 'W': 66.06} +[39.94, 39.61, 39.25, 39.04, 39.12, 39.14, 39.52, 39.44, 39.18, 39.41, 39.84, 39.36, 39.77, 39.23, 39.29, 39.06, 39.22, 39.22, 39.1, 44.51] +710.3999999999999 +35.519999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 284.509868144989, 'TIME_S_1KI': 2845.09868144989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20913.089103269576, 'W': 66.06, 'J_1KI': 209130.89103269577, 'W_1KI': 660.6, 'W_D': 30.540000000000006, 'J_D': 9668.267351102832, 'W_D_1KI': 305.40000000000003, 'J_D_1KI': 3054.0000000000005} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..d09a424 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3487, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.00940465927124, "TIME_S_1KI": 2.8704917290711904, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 827.4998007631302, "W": 65.34, "J_1KI": 237.30995146634075, "W_1KI": 18.73817034700316, "W_D": 29.525999999999996, "J_D": 373.9326464238166, "W_D_1KI": 8.467450530542013, "J_D_1KI": 2.428290946527678} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..b265bb6 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.30111074447631836} + +tensor(indices=tensor([[18461, 89769, 72704, ..., 33431, 94383, 61001], + [57293, 15299, 90790, ..., 74426, 46108, 18253]]), + values=tensor([0.5589, 0.4623, 0.4281, ..., 0.9456, 0.3660, 0.9670]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.5652, 0.6228, 0.3883, ..., 0.3322, 0.6181, 0.9927]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.30111074447631836 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3487', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.00940465927124} + +tensor(indices=tensor([[10858, 46480, 87461, ..., 11643, 342, 27140], + [39828, 51027, 15315, ..., 19594, 99205, 1718]]), + values=tensor([0.6737, 0.1822, 0.5864, ..., 0.3526, 0.3959, 0.8007]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7414, 0.0742, 0.8578, ..., 0.9670, 0.0406, 0.5119]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.00940465927124 seconds + +tensor(indices=tensor([[10858, 46480, 87461, ..., 11643, 342, 27140], + [39828, 51027, 15315, ..., 19594, 99205, 1718]]), + values=tensor([0.6737, 0.1822, 0.5864, ..., 0.3526, 0.3959, 0.8007]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7414, 0.0742, 0.8578, ..., 0.9670, 0.0406, 0.5119]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.00940465927124 seconds + +[40.43, 44.41, 39.66, 39.22, 40.07, 39.09, 39.64, 39.6, 41.64, 39.44] +[65.34] +12.664520978927612 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3487, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.00940465927124, 'TIME_S_1KI': 2.8704917290711904, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 827.4998007631302, 'W': 65.34} +[40.43, 44.41, 39.66, 39.22, 40.07, 39.09, 39.64, 39.6, 41.64, 39.44, 40.18, 39.67, 39.32, 39.13, 39.23, 38.99, 39.14, 38.96, 39.0, 38.97] +716.2800000000001 +35.81400000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3487, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.00940465927124, 'TIME_S_1KI': 2.8704917290711904, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 827.4998007631302, 'W': 65.34, 'J_1KI': 237.30995146634075, 'W_1KI': 18.73817034700316, 'W_D': 29.525999999999996, 'J_D': 373.9326464238166, 'W_D_1KI': 8.467450530542013, 'J_D_1KI': 2.428290946527678} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..deac0f8 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 725, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.478241443634033, "TIME_S_1KI": 14.452746818805561, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 850.4818459033967, "W": 64.84, "J_1KI": 1173.0784081426161, "W_1KI": 89.43448275862069, "W_D": 29.61025, "J_D": 388.38649101883175, "W_D_1KI": 40.84172413793104, "J_D_1KI": 56.333412604042806} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..dcc17d6 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,57 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.4476914405822754} + +tensor(indices=tensor([[ 1323, 82459, 78654, ..., 47070, 24642, 34894], + [76031, 87894, 99380, ..., 88344, 80962, 74135]]), + values=tensor([1.5908e-01, 7.3565e-01, 6.5693e-02, ..., + 4.5422e-01, 9.5700e-01, 2.0786e-04]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.6217, 0.2257, 0.6077, ..., 0.4263, 0.5923, 0.1812]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.4476914405822754 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '725', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.478241443634033} + +tensor(indices=tensor([[48966, 88452, 71005, ..., 56745, 41106, 90252], + [37609, 64658, 39125, ..., 62895, 52127, 83752]]), + values=tensor([0.4982, 0.4313, 0.9448, ..., 0.3000, 0.8823, 0.1923]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.9999, 0.4709, 0.5420, ..., 0.9110, 0.0907, 0.6410]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.478241443634033 seconds + +tensor(indices=tensor([[48966, 88452, 71005, ..., 56745, 41106, 90252], + [37609, 64658, 39125, ..., 62895, 52127, 83752]]), + values=tensor([0.4982, 0.4313, 0.9448, ..., 0.3000, 0.8823, 0.1923]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.9999, 0.4709, 0.5420, ..., 0.9110, 0.0907, 0.6410]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.478241443634033 seconds + +[40.05, 38.91, 39.17, 39.11, 39.3, 38.85, 39.08, 39.28, 39.0, 39.22] +[64.84] +13.116623163223267 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.478241443634033, 'TIME_S_1KI': 14.452746818805561, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.4818459033967, 'W': 64.84} +[40.05, 38.91, 39.17, 39.11, 39.3, 38.85, 39.08, 39.28, 39.0, 39.22, 39.98, 39.34, 38.82, 39.59, 39.46, 38.82, 38.83, 38.87, 39.15, 38.78] +704.595 +35.22975 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.478241443634033, 'TIME_S_1KI': 14.452746818805561, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 850.4818459033967, 'W': 64.84, 'J_1KI': 1173.0784081426161, 'W_1KI': 89.43448275862069, 'W_D': 29.61025, 'J_D': 388.38649101883175, 'W_D_1KI': 40.84172413793104, 'J_D_1KI': 56.333412604042806} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..9f03841 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 36067, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.477728366851807, "TIME_S_1KI": 0.2905073437450247, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 840.6016425848006, "W": 64.38, "J_1KI": 23.30666932610976, "W_1KI": 1.7850112291013944, "W_D": 28.784, "J_D": 375.8291034507751, "W_D_1KI": 0.7980702581307012, "J_D_1KI": 0.022127436663174124} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..f3ec4bc --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0383303165435791} + +tensor(indices=tensor([[5508, 4954, 7848, ..., 1758, 1526, 9240], + [9365, 6354, 5695, ..., 2433, 9489, 8702]]), + values=tensor([0.2436, 0.9410, 0.7512, ..., 0.2766, 0.4734, 0.1148]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.9681, 0.6426, 0.3908, ..., 0.5177, 0.7553, 0.7925]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.0383303165435791 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '27393', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.974592924118042} + +tensor(indices=tensor([[3459, 6969, 3123, ..., 3646, 360, 941], + [5660, 9378, 7356, ..., 6372, 1791, 9998]]), + values=tensor([0.0512, 0.8025, 0.3889, ..., 0.8369, 0.2822, 0.1098]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.6394, 0.1321, 0.3352, ..., 0.5313, 0.1208, 0.4764]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 7.974592924118042 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '36067', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.477728366851807} + +tensor(indices=tensor([[3941, 9783, 13, ..., 5091, 7816, 3367], + [8532, 3611, 3948, ..., 9236, 2711, 6400]]), + values=tensor([0.9384, 0.6012, 0.2153, ..., 0.4457, 0.6290, 0.6323]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.4401, 0.6623, 0.5998, ..., 0.5574, 0.8413, 0.6844]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.477728366851807 seconds + +tensor(indices=tensor([[3941, 9783, 13, ..., 5091, 7816, 3367], + [8532, 3611, 3948, ..., 9236, 2711, 6400]]), + values=tensor([0.9384, 0.6012, 0.2153, ..., 0.4457, 0.6290, 0.6323]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.4401, 0.6623, 0.5998, ..., 0.5574, 0.8413, 0.6844]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.477728366851807 seconds + +[41.01, 44.13, 39.31, 39.11, 39.65, 39.59, 38.95, 38.99, 39.06, 38.91] +[64.38] +13.056875467300415 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 36067, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.477728366851807, 'TIME_S_1KI': 0.2905073437450247, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.6016425848006, 'W': 64.38} +[41.01, 44.13, 39.31, 39.11, 39.65, 39.59, 38.95, 38.99, 39.06, 38.91, 40.03, 39.11, 39.28, 38.92, 38.87, 40.75, 38.78, 38.74, 39.36, 38.69] +711.92 +35.596 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 36067, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.477728366851807, 'TIME_S_1KI': 0.2905073437450247, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.6016425848006, 'W': 64.38, 'J_1KI': 23.30666932610976, 'W_1KI': 1.7850112291013944, 'W_D': 28.784, 'J_D': 375.8291034507751, 'W_D_1KI': 0.7980702581307012, 'J_D_1KI': 0.022127436663174124} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..7b702e9 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3599, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.264557123184204, "TIME_S_1KI": 2.8520581059139216, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 829.6974464750291, "W": 64.54, "J_1KI": 230.53555056266435, "W_1KI": 17.932759099749934, "W_D": 29.417500000000004, "J_D": 378.17825583636767, "W_D_1KI": 8.17379827729925, "J_D_1KI": 2.2711303910250766} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..5aa9fc1 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.29174256324768066} + +tensor(indices=tensor([[6288, 3372, 4127, ..., 36, 9481, 4676], + [4919, 3466, 863, ..., 2434, 7688, 6024]]), + values=tensor([0.3090, 0.3400, 0.3463, ..., 0.1742, 0.0674, 0.0263]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.5970, 0.5896, 0.3645, ..., 0.4602, 0.6694, 0.4884]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.29174256324768066 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3599', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.264557123184204} + +tensor(indices=tensor([[6373, 5559, 6720, ..., 5251, 3148, 7094], + [7749, 6579, 7674, ..., 4170, 7096, 7811]]), + values=tensor([0.5260, 0.0922, 0.8759, ..., 0.2980, 0.1359, 0.9599]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8352, 0.3170, 0.7847, ..., 0.1098, 0.1418, 0.4915]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.264557123184204 seconds + +tensor(indices=tensor([[6373, 5559, 6720, ..., 5251, 3148, 7094], + [7749, 6579, 7674, ..., 4170, 7096, 7811]]), + values=tensor([0.5260, 0.0922, 0.8759, ..., 0.2980, 0.1359, 0.9599]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8352, 0.3170, 0.7847, ..., 0.1098, 0.1418, 0.4915]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.264557123184204 seconds + +[40.16, 38.71, 39.17, 38.71, 38.88, 39.28, 38.8, 39.08, 39.92, 38.69] +[64.54] +12.85555386543274 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.264557123184204, 'TIME_S_1KI': 2.8520581059139216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 829.6974464750291, 'W': 64.54} +[40.16, 38.71, 39.17, 38.71, 38.88, 39.28, 38.8, 39.08, 39.92, 38.69, 39.39, 38.68, 38.97, 38.8, 38.71, 39.55, 38.75, 39.17, 38.81, 38.68] +702.45 +35.1225 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3599, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.264557123184204, 'TIME_S_1KI': 2.8520581059139216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 829.6974464750291, 'W': 64.54, 'J_1KI': 230.53555056266435, 'W_1KI': 17.932759099749934, 'W_D': 29.417500000000004, 'J_D': 378.17825583636767, 'W_D_1KI': 8.17379827729925, 'J_D_1KI': 2.2711303910250766} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..2d11147 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 371, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.468514919281006, "TIME_S_1KI": 28.217021345770906, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 854.4845187759399, "W": 65.08, "J_1KI": 2303.19277298097, "W_1KI": 175.4177897574124, "W_D": 29.298500000000004, "J_D": 384.68215539884574, "W_D_1KI": 78.97169811320757, "J_D_1KI": 212.86171998169158} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..69bffce --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.8267459869384766} + +tensor(indices=tensor([[3930, 8806, 9327, ..., 5195, 1526, 4660], + [3982, 6239, 5158, ..., 3670, 9954, 6480]]), + values=tensor([0.6288, 0.0296, 0.0961, ..., 0.5582, 0.9231, 0.2059]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.6082, 0.5103, 0.6693, ..., 0.9040, 0.1518, 0.8293]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.8267459869384766 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '371', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.468514919281006} + +tensor(indices=tensor([[6062, 9740, 9229, ..., 9890, 7193, 6521], + [9996, 6071, 7958, ..., 9778, 7594, 8240]]), + values=tensor([0.9383, 0.9158, 0.0509, ..., 0.4043, 0.6405, 0.0666]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1391, 0.6160, 0.5361, ..., 0.5237, 0.6048, 0.5134]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.468514919281006 seconds + +tensor(indices=tensor([[6062, 9740, 9229, ..., 9890, 7193, 6521], + [9996, 6071, 7958, ..., 9778, 7594, 8240]]), + values=tensor([0.9383, 0.9158, 0.0509, ..., 0.4043, 0.6405, 0.0666]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1391, 0.6160, 0.5361, ..., 0.5237, 0.6048, 0.5134]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.468514919281006 seconds + +[39.44, 38.73, 41.74, 38.8, 39.49, 39.05, 39.41, 39.31, 39.48, 38.65] +[65.08] +13.129755973815918 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 371, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.468514919281006, 'TIME_S_1KI': 28.217021345770906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.4845187759399, 'W': 65.08} +[39.44, 38.73, 41.74, 38.8, 39.49, 39.05, 39.41, 39.31, 39.48, 38.65, 40.17, 39.46, 38.9, 39.07, 39.08, 39.41, 39.2, 41.05, 44.75, 39.14] +715.6299999999999 +35.781499999999994 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 371, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.468514919281006, 'TIME_S_1KI': 28.217021345770906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.4845187759399, 'W': 65.08, 'J_1KI': 2303.19277298097, 'W_1KI': 175.4177897574124, 'W_D': 29.298500000000004, 'J_D': 384.68215539884574, 'W_D_1KI': 78.97169811320757, 'J_D_1KI': 212.86171998169158} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..2c63f6c --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.091460704803467, "TIME_S_1KI": 140.91460704803467, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1163.8158720517158, "W": 65.47, "J_1KI": 11638.158720517158, "W_1KI": 654.6999999999999, "W_D": 29.9765, "J_D": 532.8719488095045, "W_D_1KI": 299.765, "J_D_1KI": 2997.6499999999996} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..1de9227 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.091460704803467} + +tensor(indices=tensor([[2157, 7429, 9727, ..., 8434, 2981, 8787], + [ 242, 1980, 5351, ..., 7163, 7777, 1816]]), + values=tensor([0.4268, 0.3765, 0.1440, ..., 0.7224, 0.4911, 0.3147]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5452, 0.6131, 0.5579, ..., 0.2678, 0.6085, 0.3336]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.091460704803467 seconds + +tensor(indices=tensor([[2157, 7429, 9727, ..., 8434, 2981, 8787], + [ 242, 1980, 5351, ..., 7163, 7777, 1816]]), + values=tensor([0.4268, 0.3765, 0.1440, ..., 0.7224, 0.4911, 0.3147]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5452, 0.6131, 0.5579, ..., 0.2678, 0.6085, 0.3336]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.091460704803467 seconds + +[45.1, 38.94, 39.27, 39.74, 39.33, 39.47, 40.55, 38.92, 38.72, 38.73] +[65.47] +17.776323080062866 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.091460704803467, 'TIME_S_1KI': 140.91460704803467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1163.8158720517158, 'W': 65.47} +[45.1, 38.94, 39.27, 39.74, 39.33, 39.47, 40.55, 38.92, 38.72, 38.73, 39.37, 39.13, 39.13, 39.17, 39.28, 38.89, 39.23, 39.09, 39.88, 39.06] +709.87 +35.4935 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.091460704803467, 'TIME_S_1KI': 140.91460704803467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1163.8158720517158, 'W': 65.47, 'J_1KI': 11638.158720517158, 'W_1KI': 654.6999999999999, 'W_D': 29.9765, 'J_D': 532.8719488095045, 'W_D_1KI': 299.765, 'J_D_1KI': 2997.6499999999996} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..a223447 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.189528226852417, "TIME_S_1KI": 281.89528226852417, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2132.68843079567, "W": 64.84, "J_1KI": 21326.884307956698, "W_1KI": 648.4000000000001, "W_D": 29.309250000000006, "J_D": 964.0268104611041, "W_D_1KI": 293.09250000000003, "J_D_1KI": 2930.925} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..a64905d --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.189528226852417} + +tensor(indices=tensor([[4879, 6456, 1697, ..., 7477, 315, 5814], + [8632, 4652, 5768, ..., 5606, 5685, 6107]]), + values=tensor([0.0577, 0.9344, 0.4248, ..., 0.4499, 0.0236, 0.5317]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6774, 0.4585, 0.3637, ..., 0.9140, 0.9408, 0.9074]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.189528226852417 seconds + +tensor(indices=tensor([[4879, 6456, 1697, ..., 7477, 315, 5814], + [8632, 4652, 5768, ..., 5606, 5685, 6107]]), + values=tensor([0.0577, 0.9344, 0.4248, ..., 0.4499, 0.0236, 0.5317]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6774, 0.4585, 0.3637, ..., 0.9140, 0.9408, 0.9074]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.189528226852417 seconds + +[39.61, 38.81, 39.41, 39.19, 39.04, 38.87, 38.86, 38.78, 39.21, 39.11] +[64.84] +32.891555070877075 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.189528226852417, 'TIME_S_1KI': 281.89528226852417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2132.68843079567, 'W': 64.84} +[39.61, 38.81, 39.41, 39.19, 39.04, 38.87, 38.86, 38.78, 39.21, 39.11, 40.41, 39.36, 39.26, 39.24, 39.1, 40.38, 44.3, 38.79, 38.81, 39.28] +710.615 +35.53075 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.189528226852417, 'TIME_S_1KI': 281.89528226852417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2132.68843079567, 'W': 64.84, 'J_1KI': 21326.884307956698, 'W_1KI': 648.4000000000001, 'W_D': 29.309250000000006, 'J_D': 964.0268104611041, 'W_D_1KI': 293.09250000000003, 'J_D_1KI': 2930.925} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..670bc2f --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.27469801902771, "TIME_S_1KI": 562.7469801902771, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4137.226250839234, "W": 65.54, "J_1KI": 41372.26250839234, "W_1KI": 655.4000000000001, "W_D": 30.26400000000001, "J_D": 1910.4213496398932, "W_D_1KI": 302.6400000000001, "J_D_1KI": 3026.400000000001} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..a04fa89 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.27469801902771} + +tensor(indices=tensor([[7603, 8913, 1745, ..., 4026, 8108, 42], + [1311, 2084, 8297, ..., 9481, 6695, 6568]]), + values=tensor([0.4159, 0.0061, 0.1611, ..., 0.5379, 0.5740, 0.1044]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.4953, 0.3875, 0.5134, ..., 0.2435, 0.0630, 0.8237]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.27469801902771 seconds + +tensor(indices=tensor([[7603, 8913, 1745, ..., 4026, 8108, 42], + [1311, 2084, 8297, ..., 9481, 6695, 6568]]), + values=tensor([0.4159, 0.0061, 0.1611, ..., 0.5379, 0.5740, 0.1044]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.4953, 0.3875, 0.5134, ..., 0.2435, 0.0630, 0.8237]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.27469801902771 seconds + +[40.43, 38.88, 39.15, 38.91, 39.03, 38.82, 39.2, 38.83, 39.14, 38.83] +[65.54] +63.12520980834961 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.27469801902771, 'TIME_S_1KI': 562.7469801902771, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4137.226250839234, 'W': 65.54} +[40.43, 38.88, 39.15, 38.91, 39.03, 38.82, 39.2, 38.83, 39.14, 38.83, 40.36, 39.4, 39.24, 39.15, 39.03, 39.19, 39.59, 39.31, 39.17, 39.34] +705.52 +35.275999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.27469801902771, 'TIME_S_1KI': 562.7469801902771, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4137.226250839234, 'W': 65.54, 'J_1KI': 41372.26250839234, 'W_1KI': 655.4000000000001, 'W_D': 30.26400000000001, 'J_D': 1910.4213496398932, 'W_D_1KI': 302.6400000000001, 'J_D_1KI': 3026.400000000001} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..61ee2ab --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 84.63426041603088, "TIME_S_1KI": 846.3426041603088, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6117.8857078742985, "W": 65.64, "J_1KI": 61178.857078742985, "W_1KI": 656.4, "W_D": 29.80825, "J_D": 2778.236847223401, "W_D_1KI": 298.08250000000004, "J_D_1KI": 2980.8250000000003} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..fc29758 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 84.63426041603088} + +tensor(indices=tensor([[6557, 5717, 2260, ..., 2998, 2062, 946], + [9077, 2479, 8415, ..., 3704, 3826, 7433]]), + values=tensor([0.5832, 0.8244, 0.3182, ..., 0.2377, 0.1983, 0.3613]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.6571, 0.6648, 0.3232, ..., 0.9276, 0.4539, 0.4033]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 84.63426041603088 seconds + +tensor(indices=tensor([[6557, 5717, 2260, ..., 2998, 2062, 946], + [9077, 2479, 8415, ..., 3704, 3826, 7433]]), + values=tensor([0.5832, 0.8244, 0.3182, ..., 0.2377, 0.1983, 0.3613]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.6571, 0.6648, 0.3232, ..., 0.9276, 0.4539, 0.4033]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 84.63426041603088 seconds + +[40.16, 39.1, 39.67, 39.4, 41.14, 39.1, 39.19, 39.16, 39.19, 39.17] +[65.64] +93.20362138748169 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 84.63426041603088, 'TIME_S_1KI': 846.3426041603088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6117.8857078742985, 'W': 65.64} +[40.16, 39.1, 39.67, 39.4, 41.14, 39.1, 39.19, 39.16, 39.19, 39.17, 39.8, 39.66, 39.22, 44.55, 39.26, 39.1, 40.04, 39.7, 39.81, 39.56] +716.635 +35.83175 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 84.63426041603088, 'TIME_S_1KI': 846.3426041603088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6117.8857078742985, 'W': 65.64, 'J_1KI': 61178.857078742985, 'W_1KI': 656.4, 'W_D': 29.80825, 'J_D': 2778.236847223401, 'W_D_1KI': 298.08250000000004, 'J_D_1KI': 2980.8250000000003} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..a0e49dd --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 113.74189758300781, "TIME_S_1KI": 1137.4189758300781, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8411.817435126306, "W": 68.18, "J_1KI": 84118.17435126306, "W_1KI": 681.8000000000001, "W_D": 32.47800000000001, "J_D": 4007.0256183342944, "W_D_1KI": 324.7800000000001, "J_D_1KI": 3247.8000000000006} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..18d4d04 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 113.74189758300781} + +tensor(indices=tensor([[ 98, 5717, 9207, ..., 2831, 9478, 5788], + [6304, 7772, 6308, ..., 7248, 5124, 5480]]), + values=tensor([0.1634, 0.1665, 0.1616, ..., 0.5220, 0.6003, 0.7750]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.2208, 0.2901, 0.3618, ..., 0.1796, 0.2169, 0.5409]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 113.74189758300781 seconds + +tensor(indices=tensor([[ 98, 5717, 9207, ..., 2831, 9478, 5788], + [6304, 7772, 6308, ..., 7248, 5124, 5480]]), + values=tensor([0.1634, 0.1665, 0.1616, ..., 0.5220, 0.6003, 0.7750]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.2208, 0.2901, 0.3618, ..., 0.1796, 0.2169, 0.5409]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 113.74189758300781 seconds + +[39.8, 39.22, 39.1, 39.56, 39.3, 39.32, 39.71, 39.69, 40.77, 44.07] +[68.18] +123.37661242485046 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 113.74189758300781, 'TIME_S_1KI': 1137.4189758300781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8411.817435126306, 'W': 68.18} +[39.8, 39.22, 39.1, 39.56, 39.3, 39.32, 39.71, 39.69, 40.77, 44.07, 40.11, 40.13, 39.2, 39.2, 39.71, 39.54, 39.46, 39.15, 39.45, 39.08] +714.04 +35.702 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 113.74189758300781, 'TIME_S_1KI': 1137.4189758300781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8411.817435126306, 'W': 68.18, 'J_1KI': 84118.17435126306, 'W_1KI': 681.8000000000001, 'W_D': 32.47800000000001, 'J_D': 4007.0256183342944, 'W_D_1KI': 324.7800000000001, 'J_D_1KI': 3247.8000000000006} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..876dfc3 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 140.71093130111694, "TIME_S_1KI": 1407.1093130111694, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10166.688220214843, "W": 65.6, "J_1KI": 101666.88220214842, "W_1KI": 655.9999999999999, "W_D": 30.132499999999993, "J_D": 4669.934951152801, "W_D_1KI": 301.32499999999993, "J_D_1KI": 3013.249999999999} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..7da6c28 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 140.71093130111694} + +tensor(indices=tensor([[9534, 8557, 4354, ..., 3037, 7198, 1930], + [5851, 8260, 2146, ..., 8789, 6190, 8909]]), + values=tensor([0.0336, 0.5969, 0.5898, ..., 0.6856, 0.1994, 0.9040]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.6607, 0.7149, 0.2750, ..., 0.7398, 0.0558, 0.8929]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 140.71093130111694 seconds + +tensor(indices=tensor([[9534, 8557, 4354, ..., 3037, 7198, 1930], + [5851, 8260, 2146, ..., 8789, 6190, 8909]]), + values=tensor([0.0336, 0.5969, 0.5898, ..., 0.6856, 0.1994, 0.9040]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.6607, 0.7149, 0.2750, ..., 0.7398, 0.0558, 0.8929]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 140.71093130111694 seconds + +[39.87, 39.52, 39.22, 39.19, 39.33, 39.15, 39.09, 39.71, 39.26, 39.43] +[65.6] +154.9800033569336 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 140.71093130111694, 'TIME_S_1KI': 1407.1093130111694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10166.688220214843, 'W': 65.6} +[39.87, 39.52, 39.22, 39.19, 39.33, 39.15, 39.09, 39.71, 39.26, 39.43, 40.26, 39.69, 39.67, 39.11, 40.01, 39.38, 39.21, 39.19, 39.16, 39.36] +709.35 +35.4675 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 140.71093130111694, 'TIME_S_1KI': 1407.1093130111694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10166.688220214843, 'W': 65.6, 'J_1KI': 101666.88220214842, 'W_1KI': 655.9999999999999, 'W_D': 30.132499999999993, 'J_D': 4669.934951152801, 'W_D_1KI': 301.32499999999993, 'J_D_1KI': 3013.249999999999} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..0f8e038 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 312424, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.427929401397705, "TIME_S_1KI": 0.03337749149040312, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 847.5212089419365, "W": 64.75, "J_1KI": 2.712727603967482, "W_1KI": 0.20725040329808211, "W_D": 29.04875, "J_D": 380.2228836795687, "W_D_1KI": 0.09297861239853532, "J_D_1KI": 0.0002976039369527799} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..9a8df2c --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,962 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012271881103515625} + +tensor(indices=tensor([[2756, 4882, 9842, ..., 8323, 1339, 6368], + [5276, 199, 9672, ..., 1948, 9889, 3256]]), + values=tensor([0.1607, 0.4255, 0.2308, 0.0319, 0.2557, 0.1810, 0.3600, + 0.8485, 0.0770, 0.5369, 0.5081, 0.3881, 0.7737, 0.4097, + 0.9640, 0.4979, 0.4886, 0.6258, 0.1961, 0.6184, 0.0454, + 0.1229, 0.5176, 0.1365, 0.8265, 0.8776, 0.9798, 0.2358, + 0.3690, 0.3150, 0.7526, 0.3646, 0.3163, 0.8677, 0.7024, + 0.6032, 0.4241, 0.7106, 0.5636, 0.7747, 0.6506, 0.7566, + 0.2741, 0.1627, 0.4661, 0.8652, 0.0154, 0.7478, 0.0122, + 0.2094, 0.7493, 0.6911, 0.4690, 0.7143, 0.5680, 0.8663, + 0.5141, 0.7789, 0.4327, 0.4415, 0.7176, 0.0820, 0.3918, + 0.7178, 0.1685, 0.2023, 0.7212, 0.9602, 0.3480, 0.9810, + 0.1300, 0.2395, 0.3950, 0.2278, 0.3419, 0.5169, 0.1650, + 0.0437, 0.4720, 0.8686, 0.8053, 0.5829, 0.5528, 0.3374, + 0.0571, 0.0259, 0.7191, 0.3284, 0.2032, 0.6847, 0.5746, + 0.4952, 0.3663, 0.1467, 0.3931, 0.4853, 0.0643, 0.0855, + 0.1945, 0.0289, 0.3961, 0.6626, 0.7022, 0.0863, 0.5676, + 0.7152, 0.7572, 0.2951, 0.2123, 0.8443, 0.8576, 0.3672, + 0.6814, 0.7596, 0.3362, 0.7240, 0.4019, 0.4246, 0.5176, + 0.7017, 0.9445, 0.7045, 0.0120, 0.6049, 0.5427, 0.9939, + 0.3251, 0.7102, 0.5994, 0.8382, 0.8092, 0.0528, 0.2165, + 0.9553, 0.8759, 0.0886, 0.0045, 0.6458, 0.5372, 0.4692, + 0.9533, 0.8550, 0.5894, 0.1632, 0.8400, 0.4227, 0.8348, + 0.7136, 0.7198, 0.3396, 0.2454, 0.7925, 0.6606, 0.8555, + 0.3309, 0.3333, 0.9584, 0.3307, 0.2443, 0.1003, 0.7094, + 0.1077, 0.5035, 0.3591, 0.2996, 0.5748, 0.8286, 0.4871, + 0.6011, 0.7341, 0.2229, 0.2947, 0.9016, 0.8705, 0.4411, + 0.4414, 0.8047, 0.8857, 0.6392, 0.3759, 0.8511, 0.2783, + 0.9471, 0.5587, 0.7993, 0.6300, 0.5223, 0.6936, 0.8768, + 0.2029, 0.2784, 0.3890, 0.7139, 0.0708, 0.1179, 0.8587, + 0.9214, 0.7692, 0.6427, 0.5234, 0.5238, 0.0618, 0.9566, + 0.1126, 0.5329, 0.3081, 0.3919, 0.1669, 0.0206, 0.8532, + 0.4811, 0.0114, 0.1700, 0.8313, 0.3549, 0.4837, 0.7706, + 0.2124, 0.3200, 0.9223, 0.4219, 0.7598, 0.1784, 0.7579, + 0.8491, 0.0996, 0.6023, 0.1103, 0.0903, 0.6534, 0.8391, + 0.2047, 0.3688, 0.2028, 0.6968, 0.2977, 0.8891, 0.4815, + 0.3079, 0.5056, 0.8682, 0.4909, 0.8789, 0.8755, 0.9596, + 0.8297, 0.8512, 0.0202, 0.0850, 0.9120, 0.9187, 0.8459, + 0.7973, 0.7424, 0.0238, 0.4486, 0.5594, 0.8942, 0.4727, + 0.6433, 0.4299, 0.3245, 0.7324, 0.6416, 0.4595, 0.4421, + 0.6815, 0.4284, 0.9015, 0.2014, 0.8636, 0.5639, 0.4418, + 0.6945, 0.1293, 0.5261, 0.5092, 0.4795, 0.5738, 0.8723, + 0.8676, 0.9796, 0.3865, 0.4116, 0.1540, 0.8904, 0.5397, + 0.2850, 0.3729, 0.3706, 0.7333, 0.1524, 0.9154, 0.9806, + 0.8096, 0.6008, 0.9000, 0.0077, 0.7951, 0.4462, 0.7705, + 0.6059, 0.1409, 0.4403, 0.6112, 0.7580, 0.5039, 0.5974, + 0.8213, 0.5247, 0.3266, 0.8168, 0.8016, 0.9799, 0.7268, + 0.3155, 0.2308, 0.6493, 0.8596, 0.7468, 0.8027, 0.2879, + 0.1386, 0.7160, 0.9401, 0.3205, 0.0772, 0.9720, 0.2625, + 0.3250, 0.9983, 0.6909, 0.7360, 0.7295, 0.6319, 0.2069, + 0.4301, 0.9858, 0.3065, 0.6384, 0.6441, 0.6766, 0.7009, + 0.5083, 0.2472, 0.1305, 0.2271, 0.6804, 0.9059, 0.5011, + 0.1277, 0.4333, 0.7160, 0.8907, 0.0323, 0.3838, 0.9910, + 0.0056, 0.9735, 0.6565, 0.3185, 0.8054, 0.7199, 0.7518, + 0.5746, 0.0962, 0.1968, 0.5029, 0.0845, 0.8158, 0.6571, + 0.1221, 0.6321, 0.7428, 0.1106, 0.2405, 0.7540, 0.9740, + 0.3071, 0.9477, 0.0089, 0.8687, 0.3516, 0.1141, 0.7734, + 0.7651, 0.6707, 0.5971, 0.6571, 0.7594, 0.4549, 0.1847, + 0.6256, 0.2247, 0.4874, 0.4825, 0.2916, 0.1486, 0.9902, + 0.7001, 0.8870, 0.5298, 0.5332, 0.7315, 0.2750, 0.3474, + 0.7452, 0.4488, 0.8999, 0.4601, 0.2522, 0.6835, 0.0614, + 0.8060, 0.9693, 0.0976, 0.3122, 0.8782, 0.9864, 0.5756, + 0.1643, 0.6142, 0.1028, 0.6057, 0.0786, 0.9053, 0.6607, + 0.7170, 0.7916, 0.2526, 0.3153, 0.5681, 0.2495, 0.2332, + 0.4335, 0.0299, 0.4469, 0.7547, 0.1583, 0.9232, 0.9149, + 0.4286, 0.8710, 0.7141, 0.7216, 0.7871, 0.9302, 0.1370, + 0.0236, 0.7087, 0.6246, 0.1104, 0.6263, 0.6587, 0.7047, + 0.5427, 0.1591, 0.2497, 0.1999, 0.5319, 0.5734, 0.2149, + 0.4992, 0.3806, 0.8264, 0.4541, 0.4913, 0.0245, 0.0322, + 0.7190, 0.4058, 0.4808, 0.5386, 0.3161, 0.8660, 0.8983, + 0.8829, 0.6630, 0.6983, 0.7664, 0.9579, 0.6447, 0.6135, + 0.0314, 0.2526, 0.9270, 0.4699, 0.6674, 0.7464, 0.7034, + 0.5706, 0.7461, 0.6638, 0.2904, 0.8556, 0.3128, 0.5569, + 0.0758, 0.7974, 0.4416, 0.2323, 0.2980, 0.9237, 0.1942, + 0.7527, 0.5415, 0.1187, 0.3218, 0.1619, 0.0986, 0.9354, + 0.7200, 0.3217, 0.3146, 0.0413, 0.5409, 0.3740, 0.0703, + 0.6617, 0.5875, 0.2763, 0.9102, 0.9184, 0.5827, 0.7160, + 0.0650, 0.5848, 0.4261, 0.9585, 0.0456, 0.9761, 0.3364, + 0.5484, 0.8493, 0.0522, 0.7424, 0.4071, 0.0397, 0.2566, + 0.1291, 0.4959, 0.4203, 0.1201, 0.1498, 0.6357, 0.7458, + 0.0120, 0.2912, 0.3678, 0.7341, 0.4848, 0.7826, 0.6421, + 0.0796, 0.0711, 0.4725, 0.3240, 0.9447, 0.5393, 0.6241, + 0.6412, 0.5019, 0.2534, 0.2520, 0.8440, 0.6067, 0.9634, + 0.1093, 0.2032, 0.2835, 0.6239, 0.0882, 0.3315, 0.9473, + 0.6289, 0.1143, 0.1429, 0.0478, 0.5693, 0.9399, 0.7085, + 0.8366, 0.1639, 0.5074, 0.4183, 0.0349, 0.9849, 0.5848, + 0.4556, 0.0947, 0.1247, 0.7421, 0.3046, 0.2864, 0.7442, + 0.1371, 0.3479, 0.1142, 0.6285, 0.0072, 0.2747, 0.4076, + 0.8915, 0.9235, 0.5281, 0.5207, 0.9193, 0.0912, 0.5365, + 0.1004, 0.6475, 0.1988, 0.8945, 0.7470, 0.5459, 0.0053, + 0.5308, 0.3300, 0.6268, 0.0549, 0.7517, 0.1455, 0.4011, + 0.4484, 0.7043, 0.7843, 0.8921, 0.2618, 0.4778, 0.4794, + 0.8035, 0.8087, 0.3969, 0.6756, 0.7945, 0.4608, 0.7977, + 0.1304, 0.1743, 0.1294, 0.9399, 0.9879, 0.3962, 0.4778, + 0.3422, 0.7353, 0.1053, 0.9708, 0.7150, 0.4507, 0.6397, + 0.0394, 0.0621, 0.5407, 0.3132, 0.4968, 0.5910, 0.2195, + 0.8289, 0.1799, 0.6150, 0.2738, 0.9712, 0.7785, 0.9385, + 0.7131, 0.7066, 0.3016, 0.0835, 0.9165, 0.2105, 0.9055, + 0.1948, 0.5073, 0.5755, 0.0624, 0.6056, 0.5850, 0.0843, + 0.8118, 0.8429, 0.5751, 0.2378, 0.5403, 0.4904, 0.2056, + 0.6165, 0.3730, 0.5482, 0.9502, 0.2397, 0.9517, 0.4864, + 0.6614, 0.3239, 0.1118, 0.0160, 0.9664, 0.6261, 0.4673, + 0.5460, 0.9821, 0.5056, 0.1840, 0.3454, 0.5369, 0.9136, + 0.0408, 0.3247, 0.1345, 0.6447, 0.8657, 0.2718, 0.6883, + 0.5948, 0.6508, 0.6435, 0.4673, 0.5347, 0.2400, 0.4559, + 0.3122, 0.4453, 0.3176, 0.4509, 0.2046, 0.1908, 0.8404, + 0.3054, 0.1104, 0.9874, 0.6161, 0.5629, 0.6662, 0.5407, + 0.3999, 0.2526, 0.0087, 0.7169, 0.7428, 0.5725, 0.6507, + 0.0670, 0.1863, 0.6994, 0.5142, 0.7578, 0.7673, 0.9706, + 0.2610, 0.6416, 0.6814, 0.1156, 0.1463, 0.1422, 0.6075, + 0.5796, 0.6929, 0.4034, 0.0472, 0.8229, 0.3337, 0.9366, + 0.3789, 0.2947, 0.8473, 0.7697, 0.7974, 0.4922, 0.8882, + 0.1522, 0.4457, 0.1847, 0.5677, 0.5294, 0.8316, 0.0440, + 0.6691, 0.4978, 0.9974, 0.6442, 0.2512, 0.3235, 0.7069, + 0.2940, 0.5223, 0.7068, 0.6868, 0.0989, 0.5687, 0.1748, + 0.5699, 0.1830, 0.9238, 0.7839, 0.0045, 0.7931, 0.0980, + 0.4733, 0.3508, 0.1134, 0.0944, 0.3663, 0.9498, 0.1183, + 0.4825, 0.3668, 0.1828, 0.6051, 0.9410, 0.1905, 0.3574, + 0.3028, 0.3308, 0.1387, 0.2815, 0.1371, 0.6691, 0.1891, + 0.1224, 0.7366, 0.6198, 0.4240, 0.6925, 0.0568, 0.4276, + 0.1940, 0.6196, 0.0838, 0.1883, 0.5894, 0.5055, 0.1661, + 0.0283, 0.7782, 0.8504, 0.7139, 0.9995, 0.7152, 0.9088, + 0.6909, 0.2523, 0.6702, 0.1894, 0.6420, 0.7525, 0.7473, + 0.7141, 0.4906, 0.3007, 0.2505, 0.1328, 0.8914, 0.3072, + 0.7015, 0.2813, 0.9328, 0.6074, 0.9905, 0.9514, 0.9899, + 0.4610, 0.9130, 0.7459, 0.6310, 0.6177, 0.4541, 0.4819, + 0.5369, 0.2701, 0.4007, 0.7336, 0.8670, 0.6834, 0.8540, + 0.0574, 0.5799, 0.8048, 0.4633, 0.7742, 0.8654, 0.5720, + 0.1063, 0.2976, 0.2200, 0.6805, 0.9116, 0.5686, 0.8291, + 0.4942, 0.2188, 0.4414, 0.9520, 0.6674, 0.9147, 0.8972, + 0.4777, 0.3990, 0.3234, 0.3824, 0.8564, 0.1623, 0.1606, + 0.0326, 0.4223, 0.8640, 0.0316, 0.8638, 0.7484, 0.5661, + 0.7082, 0.8849, 0.1541, 0.1558, 0.8258, 0.8073, 0.3263, + 0.4381, 0.4817, 0.4301, 0.3318, 0.0540, 0.6532, 0.4916, + 0.5613, 0.6931, 0.7594, 0.7384, 0.3044, 0.4981, 0.4201, + 0.6026, 0.0529, 0.7364, 0.8673, 0.0177, 0.7763, 0.0018, + 0.9604, 0.8513, 0.9818, 0.9536, 0.4834, 0.2606, 0.5345, + 0.9220, 0.4008, 0.3025, 0.3589, 0.2738, 0.4775, 0.4651, + 0.4262, 0.6860, 0.8632, 0.2353, 0.2117, 0.4376, 0.7188, + 0.3471, 0.6828, 0.0942, 0.5819, 0.2720, 0.1821, 0.6072, + 0.8960, 0.5741, 0.1672, 0.8671, 0.7131, 0.1829, 0.6414, + 0.7090, 0.6681, 0.8180, 0.9062, 0.7401, 0.0626, 0.8391, + 0.3040, 0.8288, 0.2170, 0.4000, 0.1752, 0.3638, 0.7822, + 0.5678, 0.8378, 0.6541, 0.4836, 0.5219, 0.1807, 0.1450, + 0.2166, 0.1339, 0.7422, 0.4888, 0.7013, 0.9282]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.1677, 0.5877, 0.4588, ..., 0.0348, 0.6139, 0.2897]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.012271881103515625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '85561', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.875548839569092} + +tensor(indices=tensor([[7024, 6142, 7921, ..., 3447, 8833, 3202], + [7123, 8363, 9772, ..., 8908, 1219, 4063]]), + values=tensor([6.9196e-01, 8.0436e-01, 5.1047e-01, 1.3028e-01, + 7.7425e-01, 5.9251e-01, 6.9477e-01, 8.3009e-01, + 9.4936e-01, 4.4730e-01, 9.3789e-02, 2.4985e-02, + 7.3052e-01, 8.6249e-01, 6.0613e-01, 7.3496e-01, + 4.9158e-02, 5.9129e-01, 3.0900e-01, 2.1017e-01, + 5.8069e-01, 5.5345e-01, 4.6627e-01, 1.4187e-01, + 3.3515e-01, 8.9475e-01, 9.5676e-01, 1.1035e-01, + 5.0082e-01, 4.8742e-01, 4.1444e-02, 9.9341e-02, + 6.9544e-01, 5.0191e-01, 2.7789e-01, 9.9935e-01, + 2.4732e-01, 8.3885e-01, 3.1776e-01, 3.6576e-01, + 2.4485e-01, 6.5632e-01, 2.6868e-01, 7.3629e-01, + 7.8122e-01, 3.3239e-01, 7.9836e-01, 8.3461e-01, + 9.0915e-01, 2.6881e-01, 1.7371e-01, 8.9995e-01, + 5.5517e-03, 4.1438e-01, 2.6407e-01, 4.3057e-01, + 6.8941e-01, 8.5675e-01, 7.5601e-01, 8.9765e-01, + 3.3854e-01, 4.0167e-01, 6.1365e-01, 4.5158e-01, + 2.9842e-01, 3.1356e-01, 3.7918e-01, 4.7194e-01, + 9.3677e-01, 3.4189e-01, 1.2740e-01, 8.3192e-01, + 8.5666e-01, 8.2185e-01, 5.6062e-01, 7.8027e-01, + 5.8590e-01, 7.7630e-01, 1.8746e-01, 8.5625e-01, + 6.6627e-01, 3.8185e-01, 9.6025e-01, 4.4707e-01, + 3.3645e-01, 9.8035e-01, 9.2345e-01, 6.5445e-01, + 3.4593e-01, 8.5672e-01, 3.0947e-01, 3.0590e-01, + 4.5382e-01, 6.7784e-03, 3.0370e-01, 4.7620e-01, + 7.3401e-01, 1.2975e-01, 4.3203e-01, 8.5002e-01, + 8.3726e-01, 2.2902e-01, 8.5140e-01, 7.7023e-01, + 1.7861e-01, 7.6516e-01, 1.1762e-01, 9.1410e-01, + 5.8975e-01, 8.4454e-01, 4.6587e-01, 4.7307e-01, + 7.4837e-01, 1.8732e-01, 7.8859e-01, 8.7273e-01, + 2.5010e-01, 4.5507e-01, 4.4196e-01, 4.2649e-01, + 1.5094e-01, 3.1374e-01, 4.7760e-02, 3.1837e-01, + 5.2156e-01, 6.7041e-01, 8.5977e-01, 1.9092e-01, + 3.1399e-01, 4.8592e-01, 4.8142e-01, 9.4344e-01, + 3.3952e-02, 5.5703e-03, 7.4859e-01, 6.3661e-01, + 3.3776e-01, 9.0383e-02, 4.1680e-01, 4.6034e-01, + 1.6077e-01, 7.0239e-02, 5.0257e-01, 3.2003e-01, + 3.0896e-01, 4.7679e-02, 6.7752e-01, 2.5934e-01, + 4.3523e-01, 6.2948e-01, 4.5116e-01, 5.3781e-01, + 5.7688e-01, 2.6334e-01, 5.4071e-01, 8.1856e-01, + 6.3240e-01, 2.3168e-01, 4.2887e-01, 6.9857e-01, + 4.3931e-02, 1.6073e-01, 4.9227e-01, 5.6662e-01, + 6.0976e-01, 6.8671e-01, 8.0104e-01, 8.4630e-01, + 2.4452e-01, 1.7994e-01, 4.6184e-01, 3.6466e-01, + 2.9619e-02, 3.1508e-01, 3.1448e-01, 2.8735e-02, + 4.1604e-01, 4.7280e-01, 2.3339e-01, 9.8691e-01, + 9.3424e-01, 8.8052e-01, 5.5880e-01, 5.1752e-01, + 8.5972e-01, 5.0859e-01, 7.1713e-01, 9.5339e-01, + 3.3220e-01, 1.2314e-01, 2.1415e-01, 1.4051e-01, + 9.0208e-01, 9.2825e-01, 1.0282e-01, 1.1682e-01, + 4.8003e-01, 8.6824e-01, 6.1499e-01, 5.3774e-01, + 2.1109e-01, 3.8955e-01, 1.4717e-02, 9.9073e-01, + 7.2828e-01, 7.8731e-01, 7.4965e-01, 3.2305e-01, + 6.5898e-01, 3.6281e-01, 4.4701e-01, 3.1660e-01, + 9.4862e-01, 7.3073e-01, 1.7453e-01, 3.6247e-01, + 7.3580e-01, 9.9837e-01, 4.5070e-01, 6.6941e-01, + 6.7395e-01, 5.1014e-01, 1.8681e-01, 3.6547e-01, + 5.3999e-01, 3.0583e-02, 7.6806e-01, 4.3753e-01, + 8.5057e-01, 4.4687e-01, 1.0239e-01, 8.7465e-01, + 7.5788e-01, 8.1523e-01, 6.2422e-01, 5.2933e-01, + 9.0279e-01, 5.8716e-01, 1.5174e-01, 5.8118e-02, + 6.7598e-01, 8.2576e-02, 1.6003e-01, 2.0977e-01, + 1.0895e-01, 9.8201e-02, 2.7153e-01, 5.4635e-01, + 6.2627e-01, 7.0854e-01, 4.9083e-01, 1.3865e-01, + 7.8063e-01, 6.2811e-01, 1.9922e-01, 5.0664e-01, + 6.3939e-02, 3.1431e-01, 1.7496e-01, 2.3613e-02, + 1.7490e-01, 5.2571e-01, 3.7472e-01, 1.9783e-01, + 5.2057e-01, 6.0135e-02, 7.7489e-01, 4.1101e-01, + 4.4054e-01, 5.1553e-01, 8.5272e-01, 3.9144e-01, + 7.3156e-01, 1.3128e-01, 3.5321e-01, 3.9546e-01, + 4.7348e-02, 9.8307e-01, 6.2707e-01, 7.9877e-01, + 9.1938e-01, 1.9215e-01, 7.8100e-01, 1.7815e-01, + 8.5235e-01, 8.3300e-01, 3.8593e-01, 4.4356e-01, + 5.7720e-01, 1.4098e-01, 7.3532e-01, 5.1164e-01, + 2.5387e-03, 2.3563e-01, 1.1058e-01, 9.1007e-01, + 4.4411e-01, 9.6612e-01, 6.9225e-01, 6.3002e-01, + 2.8941e-01, 1.4658e-01, 1.8227e-01, 1.6806e-01, + 6.7807e-01, 7.2992e-01, 3.6696e-01, 5.4175e-01, + 1.7957e-01, 8.2480e-01, 2.9918e-01, 6.8101e-01, + 9.3289e-01, 7.3034e-01, 2.6242e-01, 9.5173e-01, + 3.6440e-01, 4.5099e-01, 4.0746e-02, 3.2111e-01, + 5.6430e-01, 2.8833e-01, 8.7190e-01, 5.0395e-01, + 8.9938e-01, 1.9415e-02, 3.5264e-01, 7.3839e-01, + 1.5017e-01, 3.7734e-01, 9.0242e-01, 5.3040e-01, + 8.7677e-01, 1.0919e-01, 5.4139e-01, 5.3978e-01, + 5.1195e-01, 4.7004e-01, 3.9302e-01, 4.8868e-01, + 9.8249e-01, 1.5804e-02, 8.7507e-01, 8.7778e-01, + 3.0173e-01, 1.6193e-01, 6.6415e-01, 2.5329e-01, + 7.4140e-02, 2.6731e-01, 7.0555e-01, 9.6455e-01, + 3.7034e-01, 4.5235e-01, 1.4375e-01, 6.6933e-01, + 4.1806e-01, 5.7726e-01, 5.2122e-01, 6.1109e-01, + 2.6210e-01, 3.4186e-01, 6.4262e-01, 9.3231e-01, + 7.3358e-01, 1.0161e-01, 9.9285e-01, 3.3351e-01, + 8.0084e-01, 6.8185e-01, 1.8455e-01, 9.7903e-01, + 9.9742e-01, 5.5823e-01, 1.7529e-01, 5.2753e-01, + 9.5901e-01, 3.6525e-01, 1.3503e-01, 3.0870e-02, + 4.2042e-01, 1.1791e-01, 6.8008e-01, 7.4037e-01, + 7.7537e-02, 9.4323e-01, 8.0903e-01, 9.3490e-01, + 8.5899e-01, 3.2447e-01, 1.3606e-01, 2.3822e-01, + 1.9725e-01, 9.5001e-01, 4.0909e-01, 7.4570e-01, + 5.2812e-01, 6.6707e-01, 4.9704e-01, 4.4856e-01, + 5.3632e-02, 3.6619e-01, 4.9879e-01, 6.4130e-01, + 2.6024e-01, 6.4106e-01, 4.6444e-01, 3.2491e-01, + 1.5673e-01, 7.2040e-01, 4.7005e-01, 9.1712e-01, + 7.2951e-01, 5.3762e-01, 6.4629e-01, 5.5280e-01, + 2.0312e-02, 2.8737e-01, 4.4292e-01, 2.2227e-01, + 1.7902e-02, 6.1376e-01, 4.3465e-02, 7.9726e-01, + 3.0342e-01, 7.0388e-01, 4.6079e-02, 1.8693e-01, + 6.8661e-01, 2.7026e-01, 9.6692e-01, 8.2834e-01, + 3.3510e-01, 7.3209e-01, 6.6945e-01, 1.1354e-01, + 2.3871e-01, 5.0894e-01, 4.9809e-01, 2.2687e-01, + 9.0442e-02, 2.8269e-01, 3.0194e-01, 2.5807e-01, + 8.7667e-01, 3.2540e-01, 4.3307e-01, 2.3714e-01, + 2.6528e-01, 6.8447e-01, 2.0942e-01, 8.2948e-01, + 2.2193e-02, 7.0258e-01, 2.3101e-01, 9.3440e-01, + 9.6340e-02, 9.9150e-01, 7.1212e-01, 9.6079e-01, + 8.7980e-01, 6.4114e-01, 9.1749e-01, 3.6695e-01, + 2.6676e-01, 9.5986e-01, 9.6373e-01, 6.6751e-01, + 5.1999e-01, 9.3926e-01, 5.7946e-01, 3.3468e-01, + 6.7161e-02, 4.6663e-01, 5.9798e-01, 7.4845e-01, + 7.1666e-01, 1.1821e-01, 6.4577e-01, 7.2910e-01, + 5.5190e-01, 5.1593e-01, 9.7222e-01, 5.6894e-01, + 1.5923e-03, 5.0146e-01, 6.2893e-01, 1.6590e-01, + 7.0879e-01, 7.4975e-01, 4.8317e-01, 8.3040e-01, + 9.4447e-01, 1.0880e-01, 1.0029e-01, 2.1280e-01, + 2.4416e-01, 9.1756e-01, 5.1176e-01, 9.3317e-01, + 1.6091e-01, 5.3700e-01, 1.9406e-01, 9.2449e-02, + 8.4750e-01, 1.1721e-01, 1.8757e-01, 2.2682e-01, + 8.6137e-01, 2.2386e-01, 5.3733e-01, 6.3809e-01, + 7.1361e-02, 1.1678e-01, 3.3800e-01, 7.6503e-01, + 3.3262e-01, 3.3345e-01, 4.5233e-01, 5.2631e-01, + 8.2306e-01, 5.2372e-02, 8.7945e-02, 4.5779e-01, + 5.3258e-01, 8.1742e-01, 5.2705e-01, 2.6141e-01, + 7.5349e-01, 1.0899e-01, 6.5800e-01, 1.0455e-01, + 1.4973e-01, 8.8427e-03, 2.3985e-01, 8.0680e-01, + 9.7605e-01, 5.9110e-01, 4.4371e-01, 3.2931e-01, + 8.8646e-01, 7.9616e-02, 8.5138e-01, 4.5214e-01, + 1.9802e-01, 9.9863e-01, 5.8628e-01, 9.3439e-01, + 2.3215e-01, 6.0438e-01, 6.4026e-01, 1.9876e-01, + 1.7695e-01, 7.1448e-01, 1.3388e-01, 7.3805e-01, + 6.7613e-01, 7.4863e-01, 3.0695e-02, 5.8459e-01, + 4.8058e-01, 8.2853e-01, 4.6562e-01, 5.5266e-01, + 1.6483e-01, 1.3867e-01, 5.1493e-01, 6.6036e-01, + 4.4111e-01, 6.5010e-01, 4.2129e-01, 7.0601e-01, + 6.0578e-01, 5.0681e-01, 9.0442e-02, 7.7955e-02, + 1.9791e-01, 3.5879e-01, 6.1644e-01, 6.4126e-01, + 1.8453e-01, 5.9205e-04, 5.6852e-01, 1.7663e-01, + 3.1774e-01, 1.8355e-01, 6.4098e-01, 6.0936e-01, + 5.2037e-01, 9.5357e-01, 5.7463e-01, 3.3888e-01, + 1.6775e-01, 2.2707e-01, 5.4334e-01, 4.4711e-02, + 7.4311e-01, 8.8912e-01, 4.9614e-01, 5.4084e-02, + 8.9709e-01, 6.3132e-01, 3.3867e-01, 9.2974e-01, + 4.2930e-01, 5.8600e-02, 7.8234e-01, 9.2250e-01, + 3.7131e-01, 7.6520e-01, 1.6093e-01, 8.9499e-01, + 4.7975e-01, 5.0009e-01, 1.0165e-01, 1.0014e-01, + 4.9023e-01, 1.4570e-02, 5.5654e-01, 2.2645e-01, + 2.6290e-01, 7.9478e-01, 2.2838e-01, 3.1290e-01, + 5.1141e-01, 5.5144e-01, 1.7210e-01, 8.8685e-01, + 9.9224e-01, 6.4132e-01, 8.9167e-01, 7.8584e-02, + 7.0288e-01, 5.5634e-01, 5.8874e-01, 8.2665e-01, + 9.7921e-01, 4.6719e-01, 1.6618e-01, 3.2881e-01, + 9.8405e-01, 4.0267e-01, 2.6180e-01, 2.2271e-01, + 1.6742e-01, 6.6281e-01, 2.0637e-01, 4.0364e-01, + 7.7116e-01, 2.7803e-01, 2.9427e-01, 2.6071e-01, + 9.0750e-02, 3.4500e-01, 2.2510e-01, 2.7472e-01, + 4.0072e-01, 7.1835e-01, 8.1323e-02, 7.4477e-01, + 8.3328e-01, 8.4874e-01, 1.8867e-01, 4.2543e-02, + 3.3127e-02, 9.1939e-02, 5.7643e-01, 5.5420e-01, + 9.9696e-01, 4.1925e-01, 9.3954e-01, 5.3403e-01, + 3.0529e-01, 8.6930e-01, 3.5892e-01, 9.0284e-01, + 1.4399e-01, 1.4322e-01, 7.2158e-01, 8.4896e-01, + 4.7178e-01, 4.5546e-02, 9.6806e-02, 9.5011e-01, + 4.7595e-01, 5.6782e-01, 1.4282e-01, 5.9352e-01, + 2.7984e-01, 4.9366e-01, 4.6396e-02, 4.3370e-01, + 3.6821e-01, 1.3949e-01, 7.4999e-01, 7.5608e-01, + 1.0481e-01, 5.1664e-01, 5.3604e-01, 8.7895e-01, + 1.0114e-01, 3.2039e-02, 1.1627e-01, 9.6672e-01, + 4.9634e-01, 4.1169e-01, 5.5755e-01, 8.8358e-01, + 6.6452e-01, 8.4399e-01, 3.9278e-01, 3.6227e-01, + 4.2955e-01, 3.5446e-01, 6.3436e-01, 8.9731e-01, + 5.5924e-01, 3.8936e-01, 8.6380e-01, 1.9892e-01, + 6.5146e-02, 5.3081e-01, 3.2680e-01, 1.4012e-01, + 2.9632e-01, 7.5454e-02, 5.2659e-01, 3.1668e-01, + 1.0237e-01, 4.1139e-01, 5.1052e-01, 7.7160e-01, + 5.0193e-01, 9.8969e-01, 9.5285e-01, 8.5397e-02, + 6.8589e-01, 7.6030e-01, 3.5753e-01, 2.7165e-01, + 1.2316e-01, 1.0817e-01, 1.1726e-01, 9.7922e-01, + 8.2305e-01, 5.9954e-01, 5.8706e-01, 5.8253e-02, + 7.9081e-01, 8.1024e-01, 4.9934e-01, 6.8651e-02, + 7.0913e-01, 1.1477e-01, 6.1484e-02, 1.1932e-01, + 9.8969e-01, 8.7806e-01, 1.5254e-01, 6.7058e-02, + 9.0135e-01, 7.6217e-02, 3.3121e-01, 8.1214e-02, + 7.4706e-02, 7.1575e-01, 8.4372e-01, 2.5870e-01, + 5.9593e-01, 6.4690e-01, 5.3905e-01, 7.5709e-01, + 1.8446e-01, 3.5266e-01, 6.9873e-01, 4.7999e-01, + 5.8519e-01, 6.6730e-01, 5.1117e-01, 3.8247e-01, + 9.3257e-01, 5.7630e-01, 9.6497e-01, 6.1447e-01, + 1.8637e-01, 8.4084e-01, 3.1229e-01, 1.0227e-01, + 2.3015e-01, 3.1743e-01, 9.1090e-02, 1.3426e-01, + 8.4811e-01, 4.6629e-01, 2.0736e-01, 1.0738e-01, + 9.6971e-01, 9.2302e-01, 1.6267e-01, 8.5379e-01, + 6.6515e-01, 7.9655e-01, 3.5900e-01, 2.5932e-02, + 2.2775e-02, 2.5987e-02, 7.5412e-01, 7.2228e-01, + 6.6636e-01, 6.6831e-02, 6.7418e-01, 3.0985e-01, + 8.1370e-01, 7.4535e-01, 7.1986e-01, 4.6765e-01, + 2.9425e-01, 1.3017e-01, 8.1472e-01, 3.1347e-01, + 7.1719e-01, 9.7721e-01, 8.7854e-02, 6.7120e-01, + 3.4708e-01, 2.3886e-01, 4.4420e-01, 2.6273e-01, + 4.0237e-01, 7.2243e-01, 5.1597e-01, 3.0752e-01, + 8.1608e-01, 3.8917e-01, 3.5975e-01, 8.4742e-01, + 7.3463e-01, 7.8594e-01, 7.3949e-02, 8.8138e-01, + 2.1707e-01, 6.1304e-01, 7.4044e-01, 4.2973e-01, + 8.7339e-01, 5.6359e-01, 7.3362e-01, 1.6726e-01, + 2.6978e-01, 5.5803e-01, 7.5261e-01, 3.1359e-02, + 3.2683e-01, 9.9868e-01, 9.3624e-01, 6.3551e-01, + 5.7852e-01, 2.1813e-01, 5.4454e-02, 1.9997e-01, + 6.1249e-01, 6.8371e-01, 6.9643e-01, 9.4732e-01, + 3.8041e-01, 5.9647e-01, 3.3183e-01, 4.5485e-01, + 4.7719e-01, 7.4415e-02, 7.0036e-01, 3.2270e-01, + 5.7362e-01, 2.4392e-01, 7.2995e-01, 9.4655e-01, + 1.9627e-01, 2.6180e-01, 6.5919e-01, 8.4280e-01, + 2.1734e-01, 7.8651e-01, 2.9611e-01, 7.6014e-01, + 7.0846e-01, 5.4520e-01, 6.7922e-01, 6.9295e-01, + 3.9309e-02, 4.5211e-01, 3.1003e-01, 7.0088e-01, + 9.0334e-01, 8.0175e-01, 6.4128e-01, 8.0164e-01, + 2.6784e-01, 8.9872e-01, 8.6822e-01, 4.0301e-01, + 3.6901e-01, 5.4581e-01, 4.8183e-01, 1.0058e-01, + 6.3060e-01, 4.3921e-01, 3.4726e-01, 3.6669e-01, + 2.6920e-01, 9.2540e-01, 2.5721e-01, 5.8661e-01, + 5.3743e-01, 6.6448e-01, 5.5880e-01, 8.8954e-01, + 7.2366e-01, 2.8285e-01, 1.6440e-01, 1.2871e-01, + 7.9845e-01, 8.4586e-01, 9.1893e-01, 2.5816e-01, + 6.0931e-01, 9.7285e-01, 6.9184e-01, 1.8452e-01, + 9.0443e-01, 8.1548e-01, 7.7685e-01, 6.7494e-01, + 8.3961e-02, 6.2718e-01, 7.2737e-01, 6.9276e-01, + 7.0018e-01, 2.3448e-02, 5.9454e-01, 4.8897e-01, + 1.9076e-01, 5.3699e-01, 9.4450e-01, 3.9936e-01, + 2.4520e-01, 3.5704e-02, 1.5969e-01, 6.1027e-01, + 4.8326e-01, 9.4585e-01, 3.7229e-01, 1.6744e-01, + 7.9399e-01, 4.8609e-01, 8.1747e-01, 5.1065e-02, + 4.6443e-01, 8.3569e-01, 4.5854e-01, 4.7087e-01, + 4.5160e-01, 9.8820e-01, 8.8266e-02, 4.2600e-01, + 7.5258e-01, 6.4648e-02, 8.1411e-01, 9.7808e-01, + 8.7756e-02, 7.3898e-01, 9.0543e-02, 8.3554e-01, + 7.8121e-01, 5.2426e-01, 7.4824e-01, 4.3053e-01, + 8.4677e-01, 2.3587e-01, 6.5147e-01, 6.6529e-01, + 4.1636e-01, 9.6878e-02, 2.8498e-01, 8.8935e-01, + 3.7627e-01, 9.1246e-01, 5.2558e-01, 6.4803e-01, + 3.5124e-01, 5.4250e-01, 9.7112e-01, 2.4606e-01, + 6.4566e-01, 3.4014e-01, 4.7485e-01, 2.1755e-01, + 8.4602e-01, 1.5409e-01, 9.7822e-01, 9.3563e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.6427, 0.8591, 0.8328, ..., 0.3752, 0.0346, 0.6716]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.875548839569092 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '312424', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.427929401397705} + +tensor(indices=tensor([[2119, 370, 4867, ..., 6157, 4801, 3156], + [7170, 1669, 8220, ..., 3023, 1288, 8467]]), + values=tensor([1.8312e-01, 7.3021e-01, 7.6464e-01, 4.8412e-01, + 1.5609e-01, 6.6732e-01, 9.7368e-02, 3.7180e-01, + 2.3391e-01, 2.3905e-01, 1.7829e-01, 9.9389e-01, + 7.6105e-01, 9.2245e-01, 5.5538e-01, 2.8142e-01, + 6.7317e-01, 7.3850e-01, 3.4643e-01, 1.1757e-01, + 7.2606e-02, 8.9541e-01, 1.4152e-01, 3.7652e-01, + 6.2180e-01, 4.3944e-01, 8.2046e-01, 2.0405e-03, + 6.9751e-01, 3.8474e-01, 4.9488e-01, 4.6700e-01, + 4.0551e-01, 7.5934e-01, 2.1113e-01, 9.1893e-01, + 4.2057e-03, 7.8424e-02, 2.8094e-01, 3.0915e-01, + 2.0910e-01, 3.3698e-01, 1.4164e-01, 3.5863e-01, + 3.5687e-01, 7.2615e-02, 1.3379e-01, 9.2263e-01, + 2.4811e-01, 5.2390e-01, 6.9887e-02, 4.9367e-02, + 7.2391e-01, 5.0464e-01, 5.7919e-01, 2.9308e-01, + 2.6529e-01, 6.1911e-01, 6.5888e-01, 2.0729e-01, + 1.1878e-01, 6.0158e-01, 8.5799e-01, 8.9306e-01, + 3.3516e-01, 8.5090e-02, 6.1078e-01, 6.4668e-01, + 3.6377e-02, 4.1522e-01, 2.8105e-01, 6.9552e-01, + 5.8841e-01, 5.8434e-01, 8.3145e-01, 9.6718e-01, + 6.6380e-02, 7.9427e-01, 9.0060e-02, 3.9442e-01, + 2.5314e-01, 7.7662e-01, 6.8006e-01, 4.4329e-02, + 3.5769e-02, 4.4221e-01, 6.6089e-01, 7.7878e-01, + 5.2209e-01, 4.2534e-02, 9.6788e-01, 1.6671e-02, + 9.6923e-01, 5.6144e-01, 1.5801e-01, 3.5811e-01, + 3.7047e-01, 9.1746e-01, 1.2856e-01, 9.5044e-01, + 1.2196e-01, 8.0309e-01, 4.3474e-01, 6.5979e-01, + 6.1392e-01, 8.3567e-01, 5.6500e-01, 4.4357e-01, + 7.9144e-01, 6.1372e-01, 4.4152e-01, 9.8351e-01, + 2.0071e-01, 3.0790e-01, 9.6430e-01, 9.7225e-02, + 3.0209e-01, 1.9537e-01, 2.6178e-01, 1.5584e-01, + 9.0782e-01, 4.7460e-01, 7.6292e-01, 9.2549e-01, + 8.3405e-01, 6.1262e-01, 3.5385e-01, 7.8330e-01, + 8.6584e-01, 5.7228e-01, 6.4037e-01, 1.7740e-01, + 9.5735e-01, 7.6071e-02, 8.9185e-01, 8.1914e-02, + 8.8697e-01, 5.6774e-01, 7.5438e-01, 2.4131e-01, + 2.5978e-01, 4.9244e-02, 7.6644e-01, 8.8563e-01, + 3.6335e-01, 6.4613e-01, 7.9835e-01, 3.2914e-01, + 4.3924e-02, 4.9992e-01, 1.3715e-01, 6.8403e-01, + 4.9517e-02, 3.6644e-02, 1.8893e-01, 1.0566e-01, + 3.4005e-01, 4.0650e-01, 3.1296e-01, 9.3438e-01, + 8.8639e-01, 5.1614e-01, 7.7426e-01, 3.1588e-01, + 3.8492e-01, 7.5576e-01, 8.3543e-01, 4.7571e-01, + 2.6191e-01, 3.2880e-02, 9.9007e-01, 7.6323e-01, + 8.5492e-01, 1.3013e-01, 6.6154e-01, 6.8446e-01, + 2.9868e-01, 7.1734e-02, 7.7075e-01, 2.4576e-01, + 6.9270e-01, 3.0929e-02, 5.8579e-01, 8.2459e-01, + 4.5646e-01, 2.8382e-01, 5.4818e-02, 8.4626e-01, + 7.7507e-01, 4.0388e-01, 5.3286e-01, 2.1961e-01, + 1.2604e-01, 9.1162e-01, 7.0215e-01, 1.6616e-01, + 3.4670e-01, 6.2482e-01, 4.6684e-01, 7.7805e-01, + 8.1569e-02, 7.0206e-01, 4.4895e-01, 9.7662e-01, + 9.6874e-01, 1.1363e-01, 4.1492e-01, 5.5514e-01, + 7.8131e-01, 8.1923e-01, 6.5862e-01, 4.5091e-01, + 2.8494e-02, 6.5861e-01, 1.5656e-01, 5.5375e-01, + 7.9710e-01, 6.8132e-01, 3.1976e-01, 3.3919e-01, + 6.3549e-01, 2.0704e-01, 7.6000e-01, 6.8972e-01, + 7.8630e-02, 6.6751e-01, 3.6432e-01, 1.8579e-01, + 4.4543e-01, 8.7083e-01, 4.2237e-01, 4.0057e-01, + 5.5765e-01, 9.5658e-02, 8.0082e-01, 9.3254e-01, + 8.7075e-01, 2.6062e-01, 7.4787e-01, 2.1407e-01, + 8.6307e-01, 3.7104e-01, 2.5075e-01, 2.5788e-01, + 3.5617e-01, 4.3311e-01, 6.5418e-01, 6.5210e-01, + 7.7688e-01, 7.7941e-01, 7.9153e-01, 5.3662e-01, + 2.2429e-01, 6.1689e-01, 5.6867e-01, 9.5297e-01, + 9.2783e-01, 2.0067e-01, 5.1906e-01, 7.1936e-01, + 9.0912e-01, 1.3329e-01, 1.0597e-01, 1.7989e-01, + 6.1609e-01, 4.6894e-02, 3.1246e-01, 8.7829e-01, + 2.2490e-01, 2.4823e-01, 9.9724e-03, 2.2425e-01, + 3.8862e-02, 2.5865e-01, 1.8847e-01, 5.8074e-01, + 8.6945e-01, 6.4165e-01, 6.8864e-01, 3.3133e-01, + 5.8246e-02, 2.9207e-01, 1.8777e-01, 2.7906e-01, + 5.2277e-01, 3.4218e-01, 3.2118e-01, 6.8783e-01, + 7.8539e-01, 7.2070e-01, 1.1307e-01, 4.7345e-01, + 2.0686e-01, 9.8983e-01, 3.8739e-01, 8.0375e-01, + 8.1521e-01, 6.2756e-01, 3.8942e-01, 1.5027e-02, + 8.0894e-01, 5.0622e-01, 1.8098e-01, 6.1361e-01, + 5.3930e-01, 9.8839e-02, 1.1099e-01, 8.5349e-01, + 8.0089e-01, 5.3257e-01, 4.0307e-01, 1.3745e-01, + 9.1004e-01, 1.5636e-01, 6.7634e-02, 4.3564e-02, + 1.5075e-01, 3.9366e-01, 5.5652e-01, 9.7021e-01, + 4.1157e-01, 3.6200e-01, 9.3667e-01, 9.9134e-01, + 4.7518e-01, 6.1808e-01, 5.6326e-01, 4.1457e-01, + 9.9247e-01, 8.2613e-01, 3.7293e-01, 5.1540e-02, + 5.9348e-01, 9.0873e-01, 6.2265e-01, 6.3758e-01, + 3.0655e-01, 1.7375e-01, 8.2881e-01, 9.9916e-01, + 6.2225e-01, 2.6628e-01, 9.6161e-01, 7.9303e-01, + 9.3582e-01, 8.9691e-01, 2.6329e-01, 8.6303e-01, + 5.8593e-01, 9.2131e-01, 7.4435e-01, 8.7190e-02, + 2.1794e-01, 4.6484e-01, 6.2854e-01, 2.5546e-01, + 5.8537e-01, 1.6982e-01, 2.2333e-01, 9.6681e-01, + 7.2005e-01, 9.3399e-01, 7.3319e-01, 8.2680e-01, + 8.1516e-01, 3.5205e-01, 7.8966e-01, 4.7854e-01, + 4.7408e-01, 4.4599e-01, 3.4353e-01, 7.1637e-01, + 4.0998e-01, 8.5288e-01, 1.2011e-01, 5.5029e-01, + 4.3544e-01, 5.2361e-01, 6.9647e-01, 9.9043e-01, + 7.3445e-01, 2.3838e-01, 4.1416e-01, 9.9105e-01, + 5.7867e-01, 5.8376e-01, 1.4777e-01, 5.9303e-01, + 2.0332e-01, 4.6805e-01, 6.7640e-01, 9.9999e-01, + 5.9218e-01, 6.4634e-01, 8.0104e-01, 6.8721e-01, + 5.2765e-02, 8.5161e-01, 7.9137e-01, 6.3561e-01, + 3.3495e-01, 9.1585e-01, 5.6677e-01, 9.6334e-01, + 1.7727e-01, 3.8575e-01, 5.6272e-01, 6.3699e-02, + 1.5808e-01, 5.2135e-02, 9.5833e-01, 8.2802e-01, + 3.6595e-01, 9.1926e-01, 5.4352e-01, 4.9316e-01, + 4.8384e-01, 7.2923e-01, 2.5552e-01, 4.8518e-01, + 9.7772e-01, 1.2488e-01, 2.7264e-01, 8.3253e-01, + 4.1428e-01, 3.5722e-01, 7.9530e-01, 4.9734e-01, + 8.6275e-03, 6.0638e-01, 4.8099e-01, 5.1299e-01, + 8.0771e-01, 3.7747e-01, 9.6382e-01, 3.1199e-02, + 1.3195e-01, 2.0493e-01, 5.4053e-01, 6.7797e-01, + 1.3970e-01, 5.3726e-01, 7.3474e-01, 8.8990e-01, + 7.3988e-01, 4.1938e-01, 1.9905e-01, 9.5666e-01, + 5.7635e-01, 8.8509e-01, 1.8238e-01, 5.2725e-01, + 2.7315e-01, 3.7634e-01, 9.9295e-01, 5.3940e-01, + 9.6960e-01, 1.2952e-01, 8.9433e-01, 5.4080e-01, + 9.8904e-01, 5.6085e-01, 2.9492e-01, 2.8775e-02, + 6.6257e-01, 1.0013e-01, 7.7299e-01, 5.9370e-01, + 4.6844e-01, 4.4246e-01, 4.8230e-01, 4.4025e-01, + 1.2932e-01, 5.0158e-01, 3.4561e-01, 6.3800e-01, + 7.6644e-01, 8.4960e-01, 7.3786e-01, 3.8484e-01, + 5.8584e-01, 9.6796e-01, 6.5105e-01, 2.5321e-01, + 1.7734e-01, 6.3052e-01, 4.3696e-01, 3.6457e-01, + 2.7272e-01, 5.8435e-01, 2.1206e-02, 5.4384e-02, + 9.4271e-01, 8.9365e-01, 5.3990e-01, 8.1847e-01, + 7.0901e-01, 5.3044e-01, 6.1877e-01, 1.1962e-01, + 4.5914e-02, 9.2967e-01, 5.4143e-01, 7.4890e-01, + 9.8228e-01, 9.3711e-01, 3.4617e-01, 1.9038e-02, + 3.9740e-01, 8.3132e-01, 4.2363e-01, 5.4495e-01, + 9.8543e-01, 8.2898e-01, 5.8470e-01, 8.8143e-01, + 4.0867e-01, 6.6392e-01, 5.4874e-01, 7.0118e-01, + 7.2307e-02, 5.7554e-01, 4.0564e-01, 1.0870e-03, + 2.0096e-01, 3.8055e-01, 7.4717e-01, 8.1397e-01, + 3.2829e-01, 8.0787e-01, 9.4238e-01, 5.4680e-01, + 5.2848e-01, 1.9310e-01, 7.2819e-01, 4.3255e-01, + 7.0849e-01, 2.4386e-01, 6.8791e-01, 2.0154e-01, + 6.0846e-01, 3.3334e-01, 2.2221e-01, 9.4985e-01, + 2.0216e-01, 4.5104e-01, 2.3064e-01, 9.4746e-01, + 2.5555e-01, 2.2402e-02, 5.7584e-01, 8.9413e-01, + 5.9529e-01, 2.5323e-01, 8.1748e-01, 8.5593e-01, + 3.8348e-01, 3.7183e-01, 1.8799e-02, 7.4252e-01, + 1.7736e-01, 4.6820e-01, 4.0454e-02, 4.7313e-01, + 5.7518e-01, 3.2170e-01, 4.9098e-01, 8.4657e-01, + 3.3057e-01, 8.0814e-01, 9.2674e-04, 4.3379e-02, + 6.0298e-01, 3.3700e-01, 7.3865e-01, 6.4367e-01, + 9.0927e-02, 2.6707e-01, 4.2173e-01, 5.8261e-01, + 2.4476e-01, 1.4488e-01, 6.1556e-01, 5.8658e-01, + 4.4925e-01, 1.6846e-01, 6.6856e-02, 6.6485e-02, + 3.8473e-01, 6.4202e-01, 2.7416e-01, 3.2795e-01, + 9.6223e-01, 2.7541e-01, 4.8494e-01, 7.3838e-01, + 4.4147e-01, 9.5682e-01, 9.1638e-03, 6.5319e-01, + 5.9508e-01, 1.7638e-01, 6.1482e-01, 9.9942e-01, + 9.8085e-01, 8.0579e-01, 8.6558e-01, 5.5512e-01, + 9.6847e-01, 7.5073e-01, 2.0441e-01, 1.4840e-01, + 5.4172e-02, 9.3348e-01, 7.2162e-01, 5.9162e-01, + 4.7312e-01, 6.0415e-01, 9.9107e-01, 8.0605e-01, + 4.3986e-02, 6.8727e-01, 3.2505e-01, 1.9134e-01, + 1.7638e-01, 2.7051e-01, 4.5466e-01, 1.1341e-01, + 9.4784e-01, 7.0837e-02, 8.5058e-01, 6.3147e-01, + 3.5592e-01, 2.9015e-01, 1.1883e-01, 7.4326e-01, + 2.3049e-01, 9.5652e-01, 7.5182e-01, 6.6144e-01, + 4.6311e-01, 8.6375e-01, 8.9287e-01, 8.0653e-01, + 7.7421e-01, 2.2481e-01, 6.0946e-01, 1.7692e-01, + 7.1021e-01, 2.8033e-01, 8.8487e-02, 2.1556e-01, + 7.0209e-01, 9.0661e-01, 2.6746e-01, 1.9315e-01, + 9.4691e-01, 4.4041e-01, 4.6893e-01, 8.4396e-01, + 3.8921e-02, 3.7440e-01, 6.5789e-01, 6.9847e-02, + 3.1750e-01, 8.5309e-01, 1.0239e-01, 6.1780e-01, + 5.1002e-01, 5.5388e-01, 4.0862e-01, 1.2231e-01, + 9.4250e-01, 4.1529e-01, 2.5207e-01, 8.8937e-01, + 2.7785e-01, 5.2685e-01, 4.6369e-01, 2.7030e-01, + 6.7074e-02, 2.2412e-02, 4.1220e-01, 7.6231e-01, + 8.5684e-01, 8.8004e-01, 3.5003e-01, 9.0402e-01, + 3.3650e-02, 2.2938e-02, 6.8991e-01, 5.5282e-01, + 4.5545e-02, 2.1309e-01, 8.5304e-01, 8.8407e-01, + 6.8302e-01, 8.8960e-01, 7.3879e-01, 8.3454e-01, + 5.1934e-02, 6.3727e-01, 1.2223e-02, 1.9264e-01, + 7.3369e-01, 2.5143e-01, 6.4867e-01, 8.3002e-01, + 4.2205e-01, 5.1059e-02, 2.4172e-01, 6.9796e-01, + 5.0497e-01, 8.8670e-01, 8.8775e-01, 8.8076e-01, + 7.0083e-01, 3.4764e-01, 4.9336e-02, 1.5666e-01, + 2.7809e-02, 6.6684e-01, 4.8474e-02, 8.7248e-01, + 5.2152e-01, 4.1336e-01, 5.1172e-01, 7.4374e-01, + 6.3950e-01, 2.8102e-01, 7.0600e-01, 5.1464e-01, + 7.5093e-01, 6.4640e-01, 7.7618e-01, 9.7835e-01, + 7.3232e-01, 2.1637e-01, 2.8803e-01, 3.3177e-01, + 8.4665e-01, 3.5767e-01, 2.8235e-01, 6.0362e-01, + 4.6255e-01, 9.9400e-01, 3.6439e-01, 7.3426e-01, + 3.8993e-01, 8.2240e-01, 4.9244e-01, 8.6970e-01, + 9.2178e-01, 8.5348e-01, 7.1373e-01, 2.7922e-01, + 9.6329e-01, 4.9357e-01, 2.4284e-01, 4.6679e-01, + 3.0943e-01, 2.9822e-01, 2.5399e-01, 1.7784e-01, + 9.8823e-01, 5.2753e-01, 9.9058e-01, 7.5403e-01, + 4.3623e-02, 1.1892e-01, 1.8293e-01, 3.7756e-01, + 5.4637e-01, 5.7908e-01, 2.2365e-01, 7.3399e-01, + 8.2036e-01, 7.2816e-01, 7.7561e-01, 6.9415e-01, + 1.8483e-01, 5.5960e-02, 2.8699e-01, 9.9353e-01, + 1.7746e-01, 3.1593e-01, 5.9801e-01, 2.5089e-01, + 4.5685e-01, 7.8008e-01, 8.7662e-01, 8.3813e-02, + 1.8744e-01, 2.9216e-01, 9.4158e-02, 3.3458e-01, + 9.8502e-01, 4.9704e-01, 4.3369e-01, 3.9710e-01, + 6.1640e-01, 3.9325e-02, 1.3934e-01, 5.7092e-01, + 1.3933e-01, 4.9986e-01, 2.1964e-01, 8.1009e-01, + 7.0122e-01, 6.2106e-01, 6.6871e-02, 9.5679e-02, + 8.7816e-01, 1.4902e-01, 2.4698e-01, 5.6937e-01, + 4.1964e-01, 6.9421e-01, 7.2362e-01, 6.1623e-01, + 6.5641e-01, 9.0935e-01, 7.8322e-01, 5.5046e-01, + 9.0309e-01, 5.7276e-01, 3.4251e-02, 6.5240e-01, + 1.8413e-01, 6.4886e-01, 9.3211e-01, 8.1506e-01, + 1.2750e-02, 6.8833e-01, 8.6864e-02, 1.7997e-01, + 9.3671e-01, 7.3373e-02, 3.0836e-02, 8.8256e-01, + 4.2791e-01, 8.5767e-01, 6.3049e-01, 3.2487e-01, + 2.1938e-01, 9.9209e-01, 8.9164e-01, 1.5945e-01, + 8.0405e-01, 2.9757e-01, 6.4339e-01, 6.2081e-01, + 1.6219e-02, 8.0818e-01, 1.5974e-01, 2.2533e-01, + 6.4414e-02, 5.5729e-01, 5.0836e-01, 9.3804e-01, + 7.9662e-01, 7.3919e-01, 6.5223e-01, 5.7788e-01, + 3.1509e-01, 5.8571e-01, 3.3393e-01, 5.2047e-02, + 8.3915e-01, 7.6763e-01, 3.7666e-01, 3.1968e-01, + 7.9699e-01, 8.4183e-01, 6.2446e-01, 6.6666e-01, + 5.9732e-01, 6.2257e-01, 3.4286e-01, 7.6894e-01, + 6.4692e-01, 7.1011e-01, 5.5085e-01, 1.4733e-01, + 4.6545e-01, 7.0454e-01, 7.3621e-01, 9.5135e-01, + 8.8825e-01, 7.7723e-01, 5.5275e-02, 4.6903e-01, + 4.6193e-01, 4.0243e-01, 5.6782e-01, 9.0760e-01, + 2.0523e-01, 8.0415e-01, 6.6916e-01, 9.6048e-01, + 4.3196e-01, 8.1134e-02, 8.2374e-01, 7.1872e-02, + 3.6603e-01, 8.7181e-01, 9.8142e-01, 7.7029e-01, + 6.6115e-01, 9.8621e-01, 5.8343e-01, 9.7103e-01, + 1.6017e-01, 5.3701e-02, 9.7492e-01, 1.1451e-01, + 6.3745e-03, 9.6125e-01, 4.3112e-01, 1.0879e-01, + 8.5477e-01, 5.7303e-01, 1.0164e-02, 1.8420e-01, + 8.8972e-02, 9.1317e-01, 1.3149e-01, 4.9524e-01, + 1.7149e-01, 5.4635e-01, 2.0335e-01, 1.8077e-01, + 8.1957e-01, 9.6727e-01, 4.4750e-01, 9.8684e-01, + 6.5860e-01, 5.0957e-01, 1.8542e-01, 7.0739e-01, + 6.8969e-01, 8.3943e-02, 5.1050e-01, 7.9317e-01, + 4.8507e-01, 6.0141e-01, 7.2267e-01, 2.6164e-01, + 2.2171e-01, 2.5745e-01, 6.2151e-01, 1.0765e-01, + 7.0520e-01, 9.4738e-01, 8.3515e-01, 9.1969e-01, + 5.2043e-01, 5.6068e-01, 9.6095e-01, 7.2309e-01, + 4.1503e-01, 7.3997e-01, 8.1054e-01, 9.1857e-01, + 3.1579e-01, 7.0653e-01, 2.0555e-01, 8.3081e-01, + 3.2284e-02, 7.8978e-01, 8.2729e-01, 3.9369e-01, + 5.0487e-01, 9.8492e-01, 9.7628e-01, 5.2118e-02, + 5.3673e-01, 8.4445e-01, 6.1678e-01, 1.3465e-01, + 2.8502e-01, 9.6391e-01, 1.9580e-01, 9.4046e-01, + 3.9735e-01, 7.3862e-01, 1.1849e-01, 8.5405e-02, + 6.8968e-01, 1.9438e-01, 9.9961e-01, 7.9560e-01, + 8.7678e-01, 4.9365e-01, 5.7739e-01, 8.2626e-01, + 7.7587e-01, 2.5610e-01, 8.9258e-01, 1.6790e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8014, 0.3668, 0.4669, ..., 0.4909, 0.1083, 0.8957]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.427929401397705 seconds + +tensor(indices=tensor([[2119, 370, 4867, ..., 6157, 4801, 3156], + [7170, 1669, 8220, ..., 3023, 1288, 8467]]), + values=tensor([1.8312e-01, 7.3021e-01, 7.6464e-01, 4.8412e-01, + 1.5609e-01, 6.6732e-01, 9.7368e-02, 3.7180e-01, + 2.3391e-01, 2.3905e-01, 1.7829e-01, 9.9389e-01, + 7.6105e-01, 9.2245e-01, 5.5538e-01, 2.8142e-01, + 6.7317e-01, 7.3850e-01, 3.4643e-01, 1.1757e-01, + 7.2606e-02, 8.9541e-01, 1.4152e-01, 3.7652e-01, + 6.2180e-01, 4.3944e-01, 8.2046e-01, 2.0405e-03, + 6.9751e-01, 3.8474e-01, 4.9488e-01, 4.6700e-01, + 4.0551e-01, 7.5934e-01, 2.1113e-01, 9.1893e-01, + 4.2057e-03, 7.8424e-02, 2.8094e-01, 3.0915e-01, + 2.0910e-01, 3.3698e-01, 1.4164e-01, 3.5863e-01, + 3.5687e-01, 7.2615e-02, 1.3379e-01, 9.2263e-01, + 2.4811e-01, 5.2390e-01, 6.9887e-02, 4.9367e-02, + 7.2391e-01, 5.0464e-01, 5.7919e-01, 2.9308e-01, + 2.6529e-01, 6.1911e-01, 6.5888e-01, 2.0729e-01, + 1.1878e-01, 6.0158e-01, 8.5799e-01, 8.9306e-01, + 3.3516e-01, 8.5090e-02, 6.1078e-01, 6.4668e-01, + 3.6377e-02, 4.1522e-01, 2.8105e-01, 6.9552e-01, + 5.8841e-01, 5.8434e-01, 8.3145e-01, 9.6718e-01, + 6.6380e-02, 7.9427e-01, 9.0060e-02, 3.9442e-01, + 2.5314e-01, 7.7662e-01, 6.8006e-01, 4.4329e-02, + 3.5769e-02, 4.4221e-01, 6.6089e-01, 7.7878e-01, + 5.2209e-01, 4.2534e-02, 9.6788e-01, 1.6671e-02, + 9.6923e-01, 5.6144e-01, 1.5801e-01, 3.5811e-01, + 3.7047e-01, 9.1746e-01, 1.2856e-01, 9.5044e-01, + 1.2196e-01, 8.0309e-01, 4.3474e-01, 6.5979e-01, + 6.1392e-01, 8.3567e-01, 5.6500e-01, 4.4357e-01, + 7.9144e-01, 6.1372e-01, 4.4152e-01, 9.8351e-01, + 2.0071e-01, 3.0790e-01, 9.6430e-01, 9.7225e-02, + 3.0209e-01, 1.9537e-01, 2.6178e-01, 1.5584e-01, + 9.0782e-01, 4.7460e-01, 7.6292e-01, 9.2549e-01, + 8.3405e-01, 6.1262e-01, 3.5385e-01, 7.8330e-01, + 8.6584e-01, 5.7228e-01, 6.4037e-01, 1.7740e-01, + 9.5735e-01, 7.6071e-02, 8.9185e-01, 8.1914e-02, + 8.8697e-01, 5.6774e-01, 7.5438e-01, 2.4131e-01, + 2.5978e-01, 4.9244e-02, 7.6644e-01, 8.8563e-01, + 3.6335e-01, 6.4613e-01, 7.9835e-01, 3.2914e-01, + 4.3924e-02, 4.9992e-01, 1.3715e-01, 6.8403e-01, + 4.9517e-02, 3.6644e-02, 1.8893e-01, 1.0566e-01, + 3.4005e-01, 4.0650e-01, 3.1296e-01, 9.3438e-01, + 8.8639e-01, 5.1614e-01, 7.7426e-01, 3.1588e-01, + 3.8492e-01, 7.5576e-01, 8.3543e-01, 4.7571e-01, + 2.6191e-01, 3.2880e-02, 9.9007e-01, 7.6323e-01, + 8.5492e-01, 1.3013e-01, 6.6154e-01, 6.8446e-01, + 2.9868e-01, 7.1734e-02, 7.7075e-01, 2.4576e-01, + 6.9270e-01, 3.0929e-02, 5.8579e-01, 8.2459e-01, + 4.5646e-01, 2.8382e-01, 5.4818e-02, 8.4626e-01, + 7.7507e-01, 4.0388e-01, 5.3286e-01, 2.1961e-01, + 1.2604e-01, 9.1162e-01, 7.0215e-01, 1.6616e-01, + 3.4670e-01, 6.2482e-01, 4.6684e-01, 7.7805e-01, + 8.1569e-02, 7.0206e-01, 4.4895e-01, 9.7662e-01, + 9.6874e-01, 1.1363e-01, 4.1492e-01, 5.5514e-01, + 7.8131e-01, 8.1923e-01, 6.5862e-01, 4.5091e-01, + 2.8494e-02, 6.5861e-01, 1.5656e-01, 5.5375e-01, + 7.9710e-01, 6.8132e-01, 3.1976e-01, 3.3919e-01, + 6.3549e-01, 2.0704e-01, 7.6000e-01, 6.8972e-01, + 7.8630e-02, 6.6751e-01, 3.6432e-01, 1.8579e-01, + 4.4543e-01, 8.7083e-01, 4.2237e-01, 4.0057e-01, + 5.5765e-01, 9.5658e-02, 8.0082e-01, 9.3254e-01, + 8.7075e-01, 2.6062e-01, 7.4787e-01, 2.1407e-01, + 8.6307e-01, 3.7104e-01, 2.5075e-01, 2.5788e-01, + 3.5617e-01, 4.3311e-01, 6.5418e-01, 6.5210e-01, + 7.7688e-01, 7.7941e-01, 7.9153e-01, 5.3662e-01, + 2.2429e-01, 6.1689e-01, 5.6867e-01, 9.5297e-01, + 9.2783e-01, 2.0067e-01, 5.1906e-01, 7.1936e-01, + 9.0912e-01, 1.3329e-01, 1.0597e-01, 1.7989e-01, + 6.1609e-01, 4.6894e-02, 3.1246e-01, 8.7829e-01, + 2.2490e-01, 2.4823e-01, 9.9724e-03, 2.2425e-01, + 3.8862e-02, 2.5865e-01, 1.8847e-01, 5.8074e-01, + 8.6945e-01, 6.4165e-01, 6.8864e-01, 3.3133e-01, + 5.8246e-02, 2.9207e-01, 1.8777e-01, 2.7906e-01, + 5.2277e-01, 3.4218e-01, 3.2118e-01, 6.8783e-01, + 7.8539e-01, 7.2070e-01, 1.1307e-01, 4.7345e-01, + 2.0686e-01, 9.8983e-01, 3.8739e-01, 8.0375e-01, + 8.1521e-01, 6.2756e-01, 3.8942e-01, 1.5027e-02, + 8.0894e-01, 5.0622e-01, 1.8098e-01, 6.1361e-01, + 5.3930e-01, 9.8839e-02, 1.1099e-01, 8.5349e-01, + 8.0089e-01, 5.3257e-01, 4.0307e-01, 1.3745e-01, + 9.1004e-01, 1.5636e-01, 6.7634e-02, 4.3564e-02, + 1.5075e-01, 3.9366e-01, 5.5652e-01, 9.7021e-01, + 4.1157e-01, 3.6200e-01, 9.3667e-01, 9.9134e-01, + 4.7518e-01, 6.1808e-01, 5.6326e-01, 4.1457e-01, + 9.9247e-01, 8.2613e-01, 3.7293e-01, 5.1540e-02, + 5.9348e-01, 9.0873e-01, 6.2265e-01, 6.3758e-01, + 3.0655e-01, 1.7375e-01, 8.2881e-01, 9.9916e-01, + 6.2225e-01, 2.6628e-01, 9.6161e-01, 7.9303e-01, + 9.3582e-01, 8.9691e-01, 2.6329e-01, 8.6303e-01, + 5.8593e-01, 9.2131e-01, 7.4435e-01, 8.7190e-02, + 2.1794e-01, 4.6484e-01, 6.2854e-01, 2.5546e-01, + 5.8537e-01, 1.6982e-01, 2.2333e-01, 9.6681e-01, + 7.2005e-01, 9.3399e-01, 7.3319e-01, 8.2680e-01, + 8.1516e-01, 3.5205e-01, 7.8966e-01, 4.7854e-01, + 4.7408e-01, 4.4599e-01, 3.4353e-01, 7.1637e-01, + 4.0998e-01, 8.5288e-01, 1.2011e-01, 5.5029e-01, + 4.3544e-01, 5.2361e-01, 6.9647e-01, 9.9043e-01, + 7.3445e-01, 2.3838e-01, 4.1416e-01, 9.9105e-01, + 5.7867e-01, 5.8376e-01, 1.4777e-01, 5.9303e-01, + 2.0332e-01, 4.6805e-01, 6.7640e-01, 9.9999e-01, + 5.9218e-01, 6.4634e-01, 8.0104e-01, 6.8721e-01, + 5.2765e-02, 8.5161e-01, 7.9137e-01, 6.3561e-01, + 3.3495e-01, 9.1585e-01, 5.6677e-01, 9.6334e-01, + 1.7727e-01, 3.8575e-01, 5.6272e-01, 6.3699e-02, + 1.5808e-01, 5.2135e-02, 9.5833e-01, 8.2802e-01, + 3.6595e-01, 9.1926e-01, 5.4352e-01, 4.9316e-01, + 4.8384e-01, 7.2923e-01, 2.5552e-01, 4.8518e-01, + 9.7772e-01, 1.2488e-01, 2.7264e-01, 8.3253e-01, + 4.1428e-01, 3.5722e-01, 7.9530e-01, 4.9734e-01, + 8.6275e-03, 6.0638e-01, 4.8099e-01, 5.1299e-01, + 8.0771e-01, 3.7747e-01, 9.6382e-01, 3.1199e-02, + 1.3195e-01, 2.0493e-01, 5.4053e-01, 6.7797e-01, + 1.3970e-01, 5.3726e-01, 7.3474e-01, 8.8990e-01, + 7.3988e-01, 4.1938e-01, 1.9905e-01, 9.5666e-01, + 5.7635e-01, 8.8509e-01, 1.8238e-01, 5.2725e-01, + 2.7315e-01, 3.7634e-01, 9.9295e-01, 5.3940e-01, + 9.6960e-01, 1.2952e-01, 8.9433e-01, 5.4080e-01, + 9.8904e-01, 5.6085e-01, 2.9492e-01, 2.8775e-02, + 6.6257e-01, 1.0013e-01, 7.7299e-01, 5.9370e-01, + 4.6844e-01, 4.4246e-01, 4.8230e-01, 4.4025e-01, + 1.2932e-01, 5.0158e-01, 3.4561e-01, 6.3800e-01, + 7.6644e-01, 8.4960e-01, 7.3786e-01, 3.8484e-01, + 5.8584e-01, 9.6796e-01, 6.5105e-01, 2.5321e-01, + 1.7734e-01, 6.3052e-01, 4.3696e-01, 3.6457e-01, + 2.7272e-01, 5.8435e-01, 2.1206e-02, 5.4384e-02, + 9.4271e-01, 8.9365e-01, 5.3990e-01, 8.1847e-01, + 7.0901e-01, 5.3044e-01, 6.1877e-01, 1.1962e-01, + 4.5914e-02, 9.2967e-01, 5.4143e-01, 7.4890e-01, + 9.8228e-01, 9.3711e-01, 3.4617e-01, 1.9038e-02, + 3.9740e-01, 8.3132e-01, 4.2363e-01, 5.4495e-01, + 9.8543e-01, 8.2898e-01, 5.8470e-01, 8.8143e-01, + 4.0867e-01, 6.6392e-01, 5.4874e-01, 7.0118e-01, + 7.2307e-02, 5.7554e-01, 4.0564e-01, 1.0870e-03, + 2.0096e-01, 3.8055e-01, 7.4717e-01, 8.1397e-01, + 3.2829e-01, 8.0787e-01, 9.4238e-01, 5.4680e-01, + 5.2848e-01, 1.9310e-01, 7.2819e-01, 4.3255e-01, + 7.0849e-01, 2.4386e-01, 6.8791e-01, 2.0154e-01, + 6.0846e-01, 3.3334e-01, 2.2221e-01, 9.4985e-01, + 2.0216e-01, 4.5104e-01, 2.3064e-01, 9.4746e-01, + 2.5555e-01, 2.2402e-02, 5.7584e-01, 8.9413e-01, + 5.9529e-01, 2.5323e-01, 8.1748e-01, 8.5593e-01, + 3.8348e-01, 3.7183e-01, 1.8799e-02, 7.4252e-01, + 1.7736e-01, 4.6820e-01, 4.0454e-02, 4.7313e-01, + 5.7518e-01, 3.2170e-01, 4.9098e-01, 8.4657e-01, + 3.3057e-01, 8.0814e-01, 9.2674e-04, 4.3379e-02, + 6.0298e-01, 3.3700e-01, 7.3865e-01, 6.4367e-01, + 9.0927e-02, 2.6707e-01, 4.2173e-01, 5.8261e-01, + 2.4476e-01, 1.4488e-01, 6.1556e-01, 5.8658e-01, + 4.4925e-01, 1.6846e-01, 6.6856e-02, 6.6485e-02, + 3.8473e-01, 6.4202e-01, 2.7416e-01, 3.2795e-01, + 9.6223e-01, 2.7541e-01, 4.8494e-01, 7.3838e-01, + 4.4147e-01, 9.5682e-01, 9.1638e-03, 6.5319e-01, + 5.9508e-01, 1.7638e-01, 6.1482e-01, 9.9942e-01, + 9.8085e-01, 8.0579e-01, 8.6558e-01, 5.5512e-01, + 9.6847e-01, 7.5073e-01, 2.0441e-01, 1.4840e-01, + 5.4172e-02, 9.3348e-01, 7.2162e-01, 5.9162e-01, + 4.7312e-01, 6.0415e-01, 9.9107e-01, 8.0605e-01, + 4.3986e-02, 6.8727e-01, 3.2505e-01, 1.9134e-01, + 1.7638e-01, 2.7051e-01, 4.5466e-01, 1.1341e-01, + 9.4784e-01, 7.0837e-02, 8.5058e-01, 6.3147e-01, + 3.5592e-01, 2.9015e-01, 1.1883e-01, 7.4326e-01, + 2.3049e-01, 9.5652e-01, 7.5182e-01, 6.6144e-01, + 4.6311e-01, 8.6375e-01, 8.9287e-01, 8.0653e-01, + 7.7421e-01, 2.2481e-01, 6.0946e-01, 1.7692e-01, + 7.1021e-01, 2.8033e-01, 8.8487e-02, 2.1556e-01, + 7.0209e-01, 9.0661e-01, 2.6746e-01, 1.9315e-01, + 9.4691e-01, 4.4041e-01, 4.6893e-01, 8.4396e-01, + 3.8921e-02, 3.7440e-01, 6.5789e-01, 6.9847e-02, + 3.1750e-01, 8.5309e-01, 1.0239e-01, 6.1780e-01, + 5.1002e-01, 5.5388e-01, 4.0862e-01, 1.2231e-01, + 9.4250e-01, 4.1529e-01, 2.5207e-01, 8.8937e-01, + 2.7785e-01, 5.2685e-01, 4.6369e-01, 2.7030e-01, + 6.7074e-02, 2.2412e-02, 4.1220e-01, 7.6231e-01, + 8.5684e-01, 8.8004e-01, 3.5003e-01, 9.0402e-01, + 3.3650e-02, 2.2938e-02, 6.8991e-01, 5.5282e-01, + 4.5545e-02, 2.1309e-01, 8.5304e-01, 8.8407e-01, + 6.8302e-01, 8.8960e-01, 7.3879e-01, 8.3454e-01, + 5.1934e-02, 6.3727e-01, 1.2223e-02, 1.9264e-01, + 7.3369e-01, 2.5143e-01, 6.4867e-01, 8.3002e-01, + 4.2205e-01, 5.1059e-02, 2.4172e-01, 6.9796e-01, + 5.0497e-01, 8.8670e-01, 8.8775e-01, 8.8076e-01, + 7.0083e-01, 3.4764e-01, 4.9336e-02, 1.5666e-01, + 2.7809e-02, 6.6684e-01, 4.8474e-02, 8.7248e-01, + 5.2152e-01, 4.1336e-01, 5.1172e-01, 7.4374e-01, + 6.3950e-01, 2.8102e-01, 7.0600e-01, 5.1464e-01, + 7.5093e-01, 6.4640e-01, 7.7618e-01, 9.7835e-01, + 7.3232e-01, 2.1637e-01, 2.8803e-01, 3.3177e-01, + 8.4665e-01, 3.5767e-01, 2.8235e-01, 6.0362e-01, + 4.6255e-01, 9.9400e-01, 3.6439e-01, 7.3426e-01, + 3.8993e-01, 8.2240e-01, 4.9244e-01, 8.6970e-01, + 9.2178e-01, 8.5348e-01, 7.1373e-01, 2.7922e-01, + 9.6329e-01, 4.9357e-01, 2.4284e-01, 4.6679e-01, + 3.0943e-01, 2.9822e-01, 2.5399e-01, 1.7784e-01, + 9.8823e-01, 5.2753e-01, 9.9058e-01, 7.5403e-01, + 4.3623e-02, 1.1892e-01, 1.8293e-01, 3.7756e-01, + 5.4637e-01, 5.7908e-01, 2.2365e-01, 7.3399e-01, + 8.2036e-01, 7.2816e-01, 7.7561e-01, 6.9415e-01, + 1.8483e-01, 5.5960e-02, 2.8699e-01, 9.9353e-01, + 1.7746e-01, 3.1593e-01, 5.9801e-01, 2.5089e-01, + 4.5685e-01, 7.8008e-01, 8.7662e-01, 8.3813e-02, + 1.8744e-01, 2.9216e-01, 9.4158e-02, 3.3458e-01, + 9.8502e-01, 4.9704e-01, 4.3369e-01, 3.9710e-01, + 6.1640e-01, 3.9325e-02, 1.3934e-01, 5.7092e-01, + 1.3933e-01, 4.9986e-01, 2.1964e-01, 8.1009e-01, + 7.0122e-01, 6.2106e-01, 6.6871e-02, 9.5679e-02, + 8.7816e-01, 1.4902e-01, 2.4698e-01, 5.6937e-01, + 4.1964e-01, 6.9421e-01, 7.2362e-01, 6.1623e-01, + 6.5641e-01, 9.0935e-01, 7.8322e-01, 5.5046e-01, + 9.0309e-01, 5.7276e-01, 3.4251e-02, 6.5240e-01, + 1.8413e-01, 6.4886e-01, 9.3211e-01, 8.1506e-01, + 1.2750e-02, 6.8833e-01, 8.6864e-02, 1.7997e-01, + 9.3671e-01, 7.3373e-02, 3.0836e-02, 8.8256e-01, + 4.2791e-01, 8.5767e-01, 6.3049e-01, 3.2487e-01, + 2.1938e-01, 9.9209e-01, 8.9164e-01, 1.5945e-01, + 8.0405e-01, 2.9757e-01, 6.4339e-01, 6.2081e-01, + 1.6219e-02, 8.0818e-01, 1.5974e-01, 2.2533e-01, + 6.4414e-02, 5.5729e-01, 5.0836e-01, 9.3804e-01, + 7.9662e-01, 7.3919e-01, 6.5223e-01, 5.7788e-01, + 3.1509e-01, 5.8571e-01, 3.3393e-01, 5.2047e-02, + 8.3915e-01, 7.6763e-01, 3.7666e-01, 3.1968e-01, + 7.9699e-01, 8.4183e-01, 6.2446e-01, 6.6666e-01, + 5.9732e-01, 6.2257e-01, 3.4286e-01, 7.6894e-01, + 6.4692e-01, 7.1011e-01, 5.5085e-01, 1.4733e-01, + 4.6545e-01, 7.0454e-01, 7.3621e-01, 9.5135e-01, + 8.8825e-01, 7.7723e-01, 5.5275e-02, 4.6903e-01, + 4.6193e-01, 4.0243e-01, 5.6782e-01, 9.0760e-01, + 2.0523e-01, 8.0415e-01, 6.6916e-01, 9.6048e-01, + 4.3196e-01, 8.1134e-02, 8.2374e-01, 7.1872e-02, + 3.6603e-01, 8.7181e-01, 9.8142e-01, 7.7029e-01, + 6.6115e-01, 9.8621e-01, 5.8343e-01, 9.7103e-01, + 1.6017e-01, 5.3701e-02, 9.7492e-01, 1.1451e-01, + 6.3745e-03, 9.6125e-01, 4.3112e-01, 1.0879e-01, + 8.5477e-01, 5.7303e-01, 1.0164e-02, 1.8420e-01, + 8.8972e-02, 9.1317e-01, 1.3149e-01, 4.9524e-01, + 1.7149e-01, 5.4635e-01, 2.0335e-01, 1.8077e-01, + 8.1957e-01, 9.6727e-01, 4.4750e-01, 9.8684e-01, + 6.5860e-01, 5.0957e-01, 1.8542e-01, 7.0739e-01, + 6.8969e-01, 8.3943e-02, 5.1050e-01, 7.9317e-01, + 4.8507e-01, 6.0141e-01, 7.2267e-01, 2.6164e-01, + 2.2171e-01, 2.5745e-01, 6.2151e-01, 1.0765e-01, + 7.0520e-01, 9.4738e-01, 8.3515e-01, 9.1969e-01, + 5.2043e-01, 5.6068e-01, 9.6095e-01, 7.2309e-01, + 4.1503e-01, 7.3997e-01, 8.1054e-01, 9.1857e-01, + 3.1579e-01, 7.0653e-01, 2.0555e-01, 8.3081e-01, + 3.2284e-02, 7.8978e-01, 8.2729e-01, 3.9369e-01, + 5.0487e-01, 9.8492e-01, 9.7628e-01, 5.2118e-02, + 5.3673e-01, 8.4445e-01, 6.1678e-01, 1.3465e-01, + 2.8502e-01, 9.6391e-01, 1.9580e-01, 9.4046e-01, + 3.9735e-01, 7.3862e-01, 1.1849e-01, 8.5405e-02, + 6.8968e-01, 1.9438e-01, 9.9961e-01, 7.9560e-01, + 8.7678e-01, 4.9365e-01, 5.7739e-01, 8.2626e-01, + 7.7587e-01, 2.5610e-01, 8.9258e-01, 1.6790e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8014, 0.3668, 0.4669, ..., 0.4909, 0.1083, 0.8957]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.427929401397705 seconds + +[39.45, 38.96, 38.99, 38.83, 39.31, 38.79, 38.97, 38.87, 39.36, 44.44] +[64.75] +13.089130640029907 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 312424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.427929401397705, 'TIME_S_1KI': 0.03337749149040312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5212089419365, 'W': 64.75} +[39.45, 38.96, 38.99, 38.83, 39.31, 38.79, 38.97, 38.87, 39.36, 44.44, 40.34, 39.69, 39.35, 39.24, 38.98, 40.28, 44.43, 38.93, 39.34, 39.18] +714.025 +35.70125 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 312424, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.427929401397705, 'TIME_S_1KI': 0.03337749149040312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 847.5212089419365, 'W': 64.75, 'J_1KI': 2.712727603967482, 'W_1KI': 0.20725040329808211, 'W_D': 29.04875, 'J_D': 380.2228836795687, 'W_D_1KI': 0.09297861239853532, 'J_D_1KI': 0.0002976039369527799} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..e7e344a --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 70776, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.457204341888428, "TIME_S_1KI": 0.14775071128473533, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 843.2878910303115, "W": 64.57, "J_1KI": 11.914884862528421, "W_1KI": 0.9123149090087034, "W_D": 28.59675, "J_D": 373.4751896828413, "W_D_1KI": 0.4040458630044083, "J_D_1KI": 0.0057087976574602736} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..91bc8a2 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02408146858215332} + +tensor(indices=tensor([[4932, 9298, 6973, ..., 430, 6814, 307], + [6790, 8296, 5102, ..., 7681, 8109, 9670]]), + values=tensor([0.4381, 0.0121, 0.9377, ..., 0.2212, 0.9658, 0.2274]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.2308, 0.5790, 0.9172, ..., 0.4919, 0.9399, 0.7488]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.02408146858215332 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '43601', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.468353033065796} + +tensor(indices=tensor([[3074, 7371, 7778, ..., 4885, 8899, 4794], + [1726, 2812, 228, ..., 25, 3403, 1703]]), + values=tensor([0.7489, 0.8834, 0.3612, ..., 0.9061, 0.2816, 0.9175]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.6264, 0.1482, 0.9244, ..., 0.3300, 0.2380, 0.1920]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.468353033065796 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '70776', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.457204341888428} + +tensor(indices=tensor([[3104, 6962, 9843, ..., 4567, 9740, 7799], + [9633, 7942, 505, ..., 8548, 2941, 2205]]), + values=tensor([0.4154, 0.6190, 0.5636, ..., 0.2939, 0.9495, 0.1419]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.7357, 0.9952, 0.5558, ..., 0.9509, 0.9770, 0.0200]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.457204341888428 seconds + +tensor(indices=tensor([[3104, 6962, 9843, ..., 4567, 9740, 7799], + [9633, 7942, 505, ..., 8548, 2941, 2205]]), + values=tensor([0.4154, 0.6190, 0.5636, ..., 0.2939, 0.9495, 0.1419]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.7357, 0.9952, 0.5558, ..., 0.9509, 0.9770, 0.0200]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.457204341888428 seconds + +[39.5, 38.79, 39.07, 39.36, 38.94, 38.88, 39.65, 38.88, 38.89, 39.19] +[64.57] +13.060057163238525 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 70776, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.457204341888428, 'TIME_S_1KI': 0.14775071128473533, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.2878910303115, 'W': 64.57} +[39.5, 38.79, 39.07, 39.36, 38.94, 38.88, 39.65, 38.88, 38.89, 39.19, 40.38, 38.72, 38.98, 40.44, 39.33, 39.2, 39.2, 46.73, 45.51, 38.72] +719.4649999999999 +35.97324999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 70776, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.457204341888428, 'TIME_S_1KI': 0.14775071128473533, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.2878910303115, 'W': 64.57, 'J_1KI': 11.914884862528421, 'W_1KI': 0.9123149090087034, 'W_D': 28.59675, 'J_D': 373.4751896828413, 'W_D_1KI': 0.4040458630044083, 'J_D_1KI': 0.0057087976574602736} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..3691946 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 72.77176570892334, "TIME_S_1KI": 727.7176570892334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5415.086123878956, "W": 66.39, "J_1KI": 54150.86123878956, "W_1KI": 663.9000000000001, "W_D": 30.805999999999997, "J_D": 2512.684788856983, "W_D_1KI": 308.06, "J_D_1KI": 3080.6} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..a712061 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 72.77176570892334} + +tensor(indices=tensor([[383027, 94170, 43500, ..., 29758, 479102, 296851], + [ 55023, 425996, 250514, ..., 10155, 107705, 236258]]), + values=tensor([0.9267, 0.7735, 0.7473, ..., 0.4349, 0.6155, 0.4772]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.9528, 0.9072, 0.4061, ..., 0.4081, 0.9695, 0.1860]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 72.77176570892334 seconds + +tensor(indices=tensor([[383027, 94170, 43500, ..., 29758, 479102, 296851], + [ 55023, 425996, 250514, ..., 10155, 107705, 236258]]), + values=tensor([0.9267, 0.7735, 0.7473, ..., 0.4349, 0.6155, 0.4772]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.9528, 0.9072, 0.4061, ..., 0.4081, 0.9695, 0.1860]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 72.77176570892334 seconds + +[40.17, 39.38, 39.11, 38.96, 39.04, 44.04, 39.25, 39.26, 39.3, 39.34] +[66.39] +81.56478571891785 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 72.77176570892334, 'TIME_S_1KI': 727.7176570892334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5415.086123878956, 'W': 66.39} +[40.17, 39.38, 39.11, 38.96, 39.04, 44.04, 39.25, 39.26, 39.3, 39.34, 39.73, 39.19, 39.15, 39.24, 39.54, 39.15, 39.26, 39.09, 39.21, 39.78] +711.6800000000001 +35.584 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 72.77176570892334, 'TIME_S_1KI': 727.7176570892334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5415.086123878956, 'W': 66.39, 'J_1KI': 54150.86123878956, 'W_1KI': 663.9000000000001, 'W_D': 30.805999999999997, 'J_D': 2512.684788856983, 'W_D_1KI': 308.06, 'J_D_1KI': 3080.6} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..25d52a3 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 144, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.343156576156616, "TIME_S_1KI": 71.82747622330983, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 898.3143911361694, "W": 66.0, "J_1KI": 6238.294382890065, "W_1KI": 458.3333333333333, "W_D": 30.563499999999998, "J_D": 415.9944226286411, "W_D_1KI": 212.24652777777774, "J_D_1KI": 1473.934220679012} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..a49e42f --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.243755578994751} + +tensor(indices=tensor([[392883, 389952, 43038, ..., 37204, 277384, 167621], + [413800, 310428, 14419, ..., 218609, 208934, 192212]]), + values=tensor([0.5173, 0.7213, 0.1626, ..., 0.4468, 0.9916, 0.0128]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8074, 0.9449, 0.8857, ..., 0.1362, 0.0986, 0.9338]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 7.243755578994751 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '144', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.343156576156616} + +tensor(indices=tensor([[460766, 337807, 111851, ..., 109182, 112026, 11067], + [114142, 59210, 33297, ..., 9847, 113785, 116539]]), + values=tensor([0.6946, 0.1631, 0.5529, ..., 0.1461, 0.9045, 0.0094]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8284, 0.3490, 0.1919, ..., 0.0430, 0.5901, 0.9358]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.343156576156616 seconds + +tensor(indices=tensor([[460766, 337807, 111851, ..., 109182, 112026, 11067], + [114142, 59210, 33297, ..., 9847, 113785, 116539]]), + values=tensor([0.6946, 0.1631, 0.5529, ..., 0.1461, 0.9045, 0.0094]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8284, 0.3490, 0.1919, ..., 0.0430, 0.5901, 0.9358]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.343156576156616 seconds + +[39.92, 39.06, 39.15, 39.12, 39.02, 38.96, 39.19, 39.17, 39.47, 39.34] +[66.0] +13.61082410812378 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.343156576156616, 'TIME_S_1KI': 71.82747622330983, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 898.3143911361694, 'W': 66.0} +[39.92, 39.06, 39.15, 39.12, 39.02, 38.96, 39.19, 39.17, 39.47, 39.34, 40.87, 39.47, 39.7, 39.02, 39.69, 39.51, 39.22, 39.9, 39.52, 38.99] +708.73 +35.4365 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.343156576156616, 'TIME_S_1KI': 71.82747622330983, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 898.3143911361694, 'W': 66.0, 'J_1KI': 6238.294382890065, 'W_1KI': 458.3333333333333, 'W_D': 30.563499999999998, 'J_D': 415.9944226286411, 'W_D_1KI': 212.24652777777774, 'J_D_1KI': 1473.934220679012} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..98aaf1d --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.535229444503784, "TIME_S_1KI": 365.35229444503784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2768.6764058446884, "W": 65.67, "J_1KI": 27686.764058446883, "W_1KI": 656.7, "W_D": 30.268, "J_D": 1276.1123412837983, "W_D_1KI": 302.68, "J_D_1KI": 3026.8} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..5d3f407 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.535229444503784} + +tensor(indices=tensor([[ 28780, 448515, 209957, ..., 395303, 388002, 7101], + [326931, 461876, 24331, ..., 28691, 286378, 494195]]), + values=tensor([0.1987, 0.3163, 0.7606, ..., 0.9775, 0.1028, 0.7403]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.8846, 0.8398, 0.9503, ..., 0.8732, 0.7810, 0.6747]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.535229444503784 seconds + +tensor(indices=tensor([[ 28780, 448515, 209957, ..., 395303, 388002, 7101], + [326931, 461876, 24331, ..., 28691, 286378, 494195]]), + values=tensor([0.1987, 0.3163, 0.7606, ..., 0.9775, 0.1028, 0.7403]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.8846, 0.8398, 0.9503, ..., 0.8732, 0.7810, 0.6747]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.535229444503784 seconds + +[40.58, 38.97, 39.2, 38.89, 39.69, 39.52, 39.06, 39.03, 39.54, 38.88] +[65.67] +42.16044473648071 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.535229444503784, 'TIME_S_1KI': 365.35229444503784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2768.6764058446884, 'W': 65.67} +[40.58, 38.97, 39.2, 38.89, 39.69, 39.52, 39.06, 39.03, 39.54, 38.88, 40.19, 38.96, 39.61, 38.9, 39.81, 39.28, 39.57, 39.31, 39.38, 38.99] +708.0400000000001 +35.402 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.535229444503784, 'TIME_S_1KI': 365.35229444503784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2768.6764058446884, 'W': 65.67, 'J_1KI': 27686.764058446883, 'W_1KI': 656.7, 'W_D': 30.268, 'J_D': 1276.1123412837983, 'W_D_1KI': 302.68, 'J_D_1KI': 3026.8} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..6a5f3f6 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1440, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.317506074905396, "TIME_S_1KI": 7.164934774239859, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 833.2424128532409, "W": 64.58, "J_1KI": 578.6405644814173, "W_1KI": 44.84722222222222, "W_D": 28.4645, "J_D": 367.2627541136742, "W_D_1KI": 19.76701388888889, "J_D_1KI": 13.727092978395063} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..5d1c601 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.7289695739746094} + +tensor(indices=tensor([[13766, 26242, 27182, ..., 24235, 13294, 42105], + [12071, 8759, 14727, ..., 7382, 3241, 7199]]), + values=tensor([0.8519, 0.2537, 0.8740, ..., 0.6708, 0.1150, 0.0931]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3666, 0.6080, 0.6148, ..., 0.7941, 0.1226, 0.7371]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.7289695739746094 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1440', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.317506074905396} + +tensor(indices=tensor([[44216, 46438, 25886, ..., 41275, 14498, 34138], + [17463, 42643, 10452, ..., 34048, 9249, 39324]]), + values=tensor([0.3012, 0.5275, 0.0414, ..., 0.4737, 0.3661, 0.8374]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8037, 0.4582, 0.5466, ..., 0.7763, 0.9592, 0.7507]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.317506074905396 seconds + +tensor(indices=tensor([[44216, 46438, 25886, ..., 41275, 14498, 34138], + [17463, 42643, 10452, ..., 34048, 9249, 39324]]), + values=tensor([0.3012, 0.5275, 0.0414, ..., 0.4737, 0.3661, 0.8374]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8037, 0.4582, 0.5466, ..., 0.7763, 0.9592, 0.7507]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.317506074905396 seconds + +[39.47, 38.75, 38.93, 39.14, 38.85, 38.89, 40.83, 39.32, 39.44, 39.61] +[64.58] +12.902483940124512 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1440, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.317506074905396, 'TIME_S_1KI': 7.164934774239859, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.2424128532409, 'W': 64.58} +[39.47, 38.75, 38.93, 39.14, 38.85, 38.89, 40.83, 39.32, 39.44, 39.61, 40.11, 39.01, 39.07, 40.03, 39.76, 44.33, 48.43, 39.23, 39.27, 38.87] +722.31 +36.1155 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1440, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.317506074905396, 'TIME_S_1KI': 7.164934774239859, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.2424128532409, 'W': 64.58, 'J_1KI': 578.6405644814173, 'W_1KI': 44.84722222222222, 'W_D': 28.4645, 'J_D': 367.2627541136742, 'W_D_1KI': 19.76701388888889, 'J_D_1KI': 13.727092978395063} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..2c3e35e --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 147, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.514722108840942, "TIME_S_1KI": 71.52872182884995, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 890.2784266066552, "W": 65.51, "J_1KI": 6056.315827256159, "W_1KI": 445.6462585034014, "W_D": 29.79825000000001, "J_D": 404.9570924382807, "W_D_1KI": 202.70918367346945, "J_D_1KI": 1378.9740385950302} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..dbfe5c7 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.110639810562134} + +tensor(indices=tensor([[ 9158, 5384, 28070, ..., 10393, 44342, 42811], + [35976, 30919, 37000, ..., 31536, 48404, 46365]]), + values=tensor([0.7001, 0.3349, 0.9291, ..., 0.9365, 0.4506, 0.2153]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8697, 0.3332, 0.4985, ..., 0.0048, 0.1944, 0.4180]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 7.110639810562134 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '147', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.514722108840942} + +tensor(indices=tensor([[32156, 6201, 21177, ..., 23614, 45977, 38246], + [ 6418, 28353, 9141, ..., 30010, 31348, 34593]]), + values=tensor([0.6613, 0.1377, 0.5651, ..., 0.0013, 0.4189, 0.3306]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1855, 0.3509, 0.5872, ..., 0.6173, 0.2095, 0.8671]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.514722108840942 seconds + +tensor(indices=tensor([[32156, 6201, 21177, ..., 23614, 45977, 38246], + [ 6418, 28353, 9141, ..., 30010, 31348, 34593]]), + values=tensor([0.6613, 0.1377, 0.5651, ..., 0.0013, 0.4189, 0.3306]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1855, 0.3509, 0.5872, ..., 0.6173, 0.2095, 0.8671]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.514722108840942 seconds + +[39.9, 39.44, 39.26, 39.38, 44.86, 38.87, 39.05, 39.45, 39.04, 39.02] +[65.51] +13.589962244033813 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.514722108840942, 'TIME_S_1KI': 71.52872182884995, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 890.2784266066552, 'W': 65.51} +[39.9, 39.44, 39.26, 39.38, 44.86, 38.87, 39.05, 39.45, 39.04, 39.02, 46.65, 38.92, 38.83, 39.4, 39.59, 39.25, 39.04, 38.72, 38.99, 38.72] +714.2349999999999 +35.711749999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.514722108840942, 'TIME_S_1KI': 71.52872182884995, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 890.2784266066552, 'W': 65.51, 'J_1KI': 6056.315827256159, 'W_1KI': 445.6462585034014, 'W_D': 29.79825000000001, 'J_D': 404.9570924382807, 'W_D_1KI': 202.70918367346945, 'J_D_1KI': 1378.9740385950302} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..61832df --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.53878498077393, "TIME_S_1KI": 715.3878498077393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5276.530365753174, "W": 65.56, "J_1KI": 52765.30365753174, "W_1KI": 655.6, "W_D": 30.018, "J_D": 2415.9684032821656, "W_D_1KI": 300.18, "J_D_1KI": 3001.8} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..fb12be8 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.53878498077393} + +tensor(indices=tensor([[ 7582, 21142, 45177, ..., 36583, 23749, 23702], + [16570, 15472, 8583, ..., 35580, 11844, 49612]]), + values=tensor([0.3677, 0.9369, 0.1965, ..., 0.4990, 0.0078, 0.8724]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6103, 0.1288, 0.0790, ..., 0.2199, 0.1644, 0.1402]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.53878498077393 seconds + +tensor(indices=tensor([[ 7582, 21142, 45177, ..., 36583, 23749, 23702], + [16570, 15472, 8583, ..., 35580, 11844, 49612]]), + values=tensor([0.3677, 0.9369, 0.1965, ..., 0.4990, 0.0078, 0.8724]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6103, 0.1288, 0.0790, ..., 0.2199, 0.1644, 0.1402]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.53878498077393 seconds + +[39.66, 38.98, 39.2, 39.07, 39.17, 40.42, 39.46, 39.01, 39.44, 39.54] +[65.56] +80.48398971557617 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.53878498077393, 'TIME_S_1KI': 715.3878498077393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5276.530365753174, 'W': 65.56} +[39.66, 38.98, 39.2, 39.07, 39.17, 40.42, 39.46, 39.01, 39.44, 39.54, 40.62, 38.93, 39.79, 39.36, 39.57, 38.94, 39.28, 39.35, 39.0, 43.92] +710.84 +35.542 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.53878498077393, 'TIME_S_1KI': 715.3878498077393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5276.530365753174, 'W': 65.56, 'J_1KI': 52765.30365753174, 'W_1KI': 655.6, 'W_D': 30.018, 'J_D': 2415.9684032821656, 'W_D_1KI': 300.18, 'J_D_1KI': 3001.8} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..fce4712 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 354.20170974731445, "TIME_S_1KI": 3542.0170974731445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 25468.79347042322, "W": 65.91, "J_1KI": 254687.93470423223, "W_1KI": 659.1, "W_D": 30.048999999999992, "J_D": 11611.466772762058, "W_D_1KI": 300.4899999999999, "J_D_1KI": 3004.8999999999987} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..c1394b0 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 354.20170974731445} + +tensor(indices=tensor([[ 5190, 24176, 40930, ..., 25141, 1372, 29701], + [39595, 998, 37630, ..., 16655, 7016, 36431]]), + values=tensor([0.6184, 0.4680, 0.3500, ..., 0.6189, 0.9422, 0.6126]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.9523, 0.3083, 0.5384, ..., 0.9147, 0.7122, 0.9310]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 354.20170974731445 seconds + +tensor(indices=tensor([[ 5190, 24176, 40930, ..., 25141, 1372, 29701], + [39595, 998, 37630, ..., 16655, 7016, 36431]]), + values=tensor([0.6184, 0.4680, 0.3500, ..., 0.6189, 0.9422, 0.6126]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.9523, 0.3083, 0.5384, ..., 0.9147, 0.7122, 0.9310]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 354.20170974731445 seconds + +[39.97, 39.37, 39.22, 39.2, 39.28, 39.16, 39.87, 39.15, 39.89, 40.01] +[65.91] +386.41774344444275 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 354.20170974731445, 'TIME_S_1KI': 3542.0170974731445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25468.79347042322, 'W': 65.91} +[39.97, 39.37, 39.22, 39.2, 39.28, 39.16, 39.87, 39.15, 39.89, 40.01, 40.81, 39.47, 39.38, 39.49, 39.34, 39.23, 41.49, 44.46, 39.18, 39.29] +717.22 +35.861000000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 354.20170974731445, 'TIME_S_1KI': 3542.0170974731445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25468.79347042322, 'W': 65.91, 'J_1KI': 254687.93470423223, 'W_1KI': 659.1, 'W_D': 30.048999999999992, 'J_D': 11611.466772762058, 'W_D_1KI': 300.4899999999999, 'J_D_1KI': 3004.8999999999987} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..6bfdaad --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 14405, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.491212368011475, "TIME_S_1KI": 0.7283035312746599, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 852.1216920471192, "W": 65.56, "J_1KI": 59.15457771934184, "W_1KI": 4.5511975008677545, "W_D": 29.49575, "J_D": 383.3735265130997, "W_D_1KI": 2.0476049982644917, "J_D_1KI": 0.14214543549215494} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..6dd2d76 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0822446346282959} + +tensor(indices=tensor([[48386, 4020, 24438, ..., 6501, 6070, 3245], + [49566, 49143, 46387, ..., 3885, 23564, 24071]]), + values=tensor([0.2772, 0.5588, 0.7335, ..., 0.6183, 0.1500, 0.0330]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0762, 0.9958, 0.4590, ..., 0.2990, 0.9545, 0.9892]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.0822446346282959 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '12766', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.305252075195312} + +tensor(indices=tensor([[14514, 28000, 49911, ..., 39102, 7756, 21973], + [27241, 33897, 29722, ..., 41051, 5051, 4355]]), + values=tensor([0.4995, 0.5919, 0.2222, ..., 0.5966, 0.8995, 0.9459]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.7936, 0.2535, 0.8351, ..., 0.8311, 0.5838, 0.8259]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.305252075195312 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '14405', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.491212368011475} + +tensor(indices=tensor([[31198, 14169, 45092, ..., 21148, 8175, 12344], + [24427, 4130, 35536, ..., 40833, 1839, 39405]]), + values=tensor([0.1904, 0.6721, 0.4889, ..., 0.0560, 0.4757, 0.0075]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6866, 0.3545, 0.9085, ..., 0.4801, 0.5089, 0.9857]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.491212368011475 seconds + +tensor(indices=tensor([[31198, 14169, 45092, ..., 21148, 8175, 12344], + [24427, 4130, 35536, ..., 40833, 1839, 39405]]), + values=tensor([0.1904, 0.6721, 0.4889, ..., 0.0560, 0.4757, 0.0075]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6866, 0.3545, 0.9085, ..., 0.4801, 0.5089, 0.9857]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.491212368011475 seconds + +[39.98, 39.52, 39.88, 39.68, 39.27, 39.03, 39.15, 39.45, 40.72, 44.45] +[65.56] +12.99758529663086 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 14405, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.491212368011475, 'TIME_S_1KI': 0.7283035312746599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.1216920471192, 'W': 65.56} +[39.98, 39.52, 39.88, 39.68, 39.27, 39.03, 39.15, 39.45, 40.72, 44.45, 39.71, 39.97, 40.57, 40.69, 39.19, 39.42, 44.31, 39.04, 39.62, 39.41] +721.285 +36.06425 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 14405, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.491212368011475, 'TIME_S_1KI': 0.7283035312746599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.1216920471192, 'W': 65.56, 'J_1KI': 59.15457771934184, 'W_1KI': 4.5511975008677545, 'W_D': 29.49575, 'J_D': 383.3735265130997, 'W_D_1KI': 2.0476049982644917, 'J_D_1KI': 0.14214543549215494} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..ce8f9d0 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 2862, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.286558151245117, "TIME_S_1KI": 3.594185238031138, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 833.4476539707184, "W": 64.98, "J_1KI": 291.2116191372182, "W_1KI": 22.70440251572327, "W_D": 29.459500000000006, "J_D": 377.853972947836, "W_D_1KI": 10.29332634521314, "J_D_1KI": 3.596550085678945} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..f7938cc --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3668184280395508} + +tensor(indices=tensor([[46239, 36094, 34321, ..., 10491, 7077, 48151], + [12421, 11798, 33183, ..., 8318, 34431, 4060]]), + values=tensor([0.7506, 0.9130, 0.0960, ..., 0.3533, 0.4749, 0.3283]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.2934, 0.3487, 0.2815, ..., 0.0979, 0.8436, 0.6695]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.3668184280395508 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '2862', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.286558151245117} + +tensor(indices=tensor([[39088, 4587, 40146, ..., 29224, 42913, 12463], + [31480, 36110, 14356, ..., 5110, 31511, 21462]]), + values=tensor([0.6802, 0.1231, 0.7556, ..., 0.6509, 0.8179, 0.7630]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.7982, 0.4298, 0.5100, ..., 0.6001, 0.7429, 0.9348]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.286558151245117 seconds + +tensor(indices=tensor([[39088, 4587, 40146, ..., 29224, 42913, 12463], + [31480, 36110, 14356, ..., 5110, 31511, 21462]]), + values=tensor([0.6802, 0.1231, 0.7556, ..., 0.6509, 0.8179, 0.7630]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.7982, 0.4298, 0.5100, ..., 0.6001, 0.7429, 0.9348]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.286558151245117 seconds + +[45.53, 39.41, 39.11, 40.12, 39.06, 39.05, 38.98, 39.29, 39.09, 39.05] +[64.98] +12.826218128204346 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.286558151245117, 'TIME_S_1KI': 3.594185238031138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4476539707184, 'W': 64.98} +[45.53, 39.41, 39.11, 40.12, 39.06, 39.05, 38.98, 39.29, 39.09, 39.05, 40.04, 38.84, 39.34, 38.88, 40.87, 39.25, 39.32, 39.22, 38.88, 38.78] +710.41 +35.5205 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 2862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.286558151245117, 'TIME_S_1KI': 3.594185238031138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4476539707184, 'W': 64.98, 'J_1KI': 291.2116191372182, 'W_1KI': 22.70440251572327, 'W_D': 29.459500000000006, 'J_D': 377.853972947836, 'W_D_1KI': 10.29332634521314, 'J_D_1KI': 3.596550085678945} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..98f55da --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 138978, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.458280324935913, "TIME_S_1KI": 0.07525133708166698, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 843.08068202734, "W": 64.65, "J_1KI": 6.066288779715783, "W_1KI": 0.4651815395242413, "W_D": 29.213250000000002, "J_D": 380.96097036713365, "W_D_1KI": 0.2102005353365281, "J_D_1KI": 0.0015124734514565478} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..2672d91 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.016300678253173828} + +tensor(indices=tensor([[2033, 2815, 2817, ..., 1164, 2382, 2625], + [1302, 4952, 1344, ..., 4063, 2176, 58]]), + values=tensor([0.5861, 0.6837, 0.0303, ..., 0.5378, 0.4611, 0.7393]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9792, 0.9319, 0.9624, ..., 0.2415, 0.4938, 0.0096]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.016300678253173828 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '64414', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.866543531417847} + +tensor(indices=tensor([[ 324, 735, 2309, ..., 3758, 189, 1615], + [3378, 2298, 4650, ..., 4118, 3827, 2779]]), + values=tensor([0.7884, 0.4302, 0.0499, ..., 0.2987, 0.1020, 0.3160]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8973, 0.1521, 0.5315, ..., 0.5998, 0.2695, 0.3504]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.866543531417847 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '138978', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.458280324935913} + +tensor(indices=tensor([[1581, 4068, 2315, ..., 3028, 3999, 522], + [1726, 264, 3901, ..., 3558, 2872, 2367]]), + values=tensor([0.5933, 0.6975, 0.0415, ..., 0.6285, 0.1181, 0.4583]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.2972, 0.9910, 0.1714, ..., 0.0533, 0.8781, 0.0841]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.458280324935913 seconds + +tensor(indices=tensor([[1581, 4068, 2315, ..., 3028, 3999, 522], + [1726, 264, 3901, ..., 3558, 2872, 2367]]), + values=tensor([0.5933, 0.6975, 0.0415, ..., 0.6285, 0.1181, 0.4583]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.2972, 0.9910, 0.1714, ..., 0.0533, 0.8781, 0.0841]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.458280324935913 seconds + +[40.1, 39.57, 39.21, 41.07, 39.32, 38.98, 39.43, 39.29, 38.94, 39.1] +[64.65] +13.040691137313843 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 138978, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.458280324935913, 'TIME_S_1KI': 0.07525133708166698, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.08068202734, 'W': 64.65} +[40.1, 39.57, 39.21, 41.07, 39.32, 38.98, 39.43, 39.29, 38.94, 39.1, 39.62, 39.49, 39.31, 39.23, 39.4, 38.89, 38.98, 39.21, 39.16, 39.69] +708.7350000000001 +35.43675 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 138978, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.458280324935913, 'TIME_S_1KI': 0.07525133708166698, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 843.08068202734, 'W': 64.65, 'J_1KI': 6.066288779715783, 'W_1KI': 0.4651815395242413, 'W_D': 29.213250000000002, 'J_D': 380.96097036713365, 'W_D_1KI': 0.2102005353365281, 'J_D_1KI': 0.0015124734514565478} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..e2a7bf3 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 14812, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.602743148803711, "TIME_S_1KI": 0.7158211685662781, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 857.8070358514785, "W": 65.02, "J_1KI": 57.91297838586812, "W_1KI": 4.389684039967594, "W_D": 29.03, "J_D": 382.9919755578041, "W_D_1KI": 1.9598973805022954, "J_D_1KI": 0.1323182136444974} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..dd5917a --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.08086681365966797} + +tensor(indices=tensor([[4649, 3811, 1637, ..., 2739, 1581, 755], + [ 680, 821, 2679, ..., 1986, 4392, 1004]]), + values=tensor([0.7047, 0.8031, 0.7519, ..., 0.9929, 0.0562, 0.5387]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6453, 0.7496, 0.8540, ..., 0.7718, 0.4404, 0.1354]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.08086681365966797 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '12984', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.204148292541504} + +tensor(indices=tensor([[ 997, 3041, 4339, ..., 4819, 2435, 2156], + [3456, 4517, 2194, ..., 631, 4765, 4983]]), + values=tensor([0.4386, 0.6129, 0.0714, ..., 0.0873, 0.9368, 0.6174]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8099, 0.9240, 0.6528, ..., 0.6788, 0.7124, 0.5124]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.204148292541504 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '14812', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.602743148803711} + +tensor(indices=tensor([[4045, 3595, 1545, ..., 270, 1428, 263], + [1792, 4067, 3231, ..., 4282, 4513, 2856]]), + values=tensor([0.6216, 0.5282, 0.4071, ..., 0.4382, 0.4998, 0.5853]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3831, 0.7653, 0.0818, ..., 0.0213, 0.9390, 0.8185]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.602743148803711 seconds + +tensor(indices=tensor([[4045, 3595, 1545, ..., 270, 1428, 263], + [1792, 4067, 3231, ..., 4282, 4513, 2856]]), + values=tensor([0.6216, 0.5282, 0.4071, ..., 0.4382, 0.4998, 0.5853]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3831, 0.7653, 0.0818, ..., 0.0213, 0.9390, 0.8185]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.602743148803711 seconds + +[39.7, 39.13, 39.83, 39.72, 40.65, 45.24, 38.97, 39.32, 39.29, 39.02] +[65.02] +13.19297194480896 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 14812, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.602743148803711, 'TIME_S_1KI': 0.7158211685662781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.8070358514785, 'W': 65.02} +[39.7, 39.13, 39.83, 39.72, 40.65, 45.24, 38.97, 39.32, 39.29, 39.02, 39.62, 38.83, 44.69, 38.92, 39.31, 39.52, 39.2, 39.09, 38.87, 40.1] +719.8 +35.989999999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 14812, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.602743148803711, 'TIME_S_1KI': 0.7158211685662781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.8070358514785, 'W': 65.02, 'J_1KI': 57.91297838586812, 'W_1KI': 4.389684039967594, 'W_D': 29.03, 'J_D': 382.9919755578041, 'W_D_1KI': 1.9598973805022954, 'J_D_1KI': 0.1323182136444974} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..ec96c42 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1457, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.253655672073364, "TIME_S_1KI": 7.03751247225351, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 831.5186310982704, "W": 64.61, "J_1KI": 570.7059925176873, "W_1KI": 44.34454358270418, "W_D": 29.4585, "J_D": 379.12539226448536, "W_D_1KI": 20.21859986273164, "J_D_1KI": 13.876870187187125} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..8ae1484 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.7203061580657959} + +tensor(indices=tensor([[4419, 1088, 2352, ..., 1413, 1959, 923], + [3828, 206, 2698, ..., 842, 3533, 1103]]), + values=tensor([0.2855, 0.9074, 0.7484, ..., 0.0872, 0.4690, 0.5414]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6494, 0.2180, 0.6537, ..., 0.0978, 0.8612, 0.1811]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.7203061580657959 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1457', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.253655672073364} + +tensor(indices=tensor([[3551, 883, 2590, ..., 1068, 602, 3317], + [4244, 2533, 1335, ..., 434, 2815, 3157]]), + values=tensor([0.2984, 0.1618, 0.0751, ..., 0.1293, 0.5353, 0.1898]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2569, 0.9115, 0.4511, ..., 0.6738, 0.9714, 0.7792]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.253655672073364 seconds + +tensor(indices=tensor([[3551, 883, 2590, ..., 1068, 602, 3317], + [4244, 2533, 1335, ..., 434, 2815, 3157]]), + values=tensor([0.2984, 0.1618, 0.0751, ..., 0.1293, 0.5353, 0.1898]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2569, 0.9115, 0.4511, ..., 0.6738, 0.9714, 0.7792]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.253655672073364 seconds + +[39.52, 39.03, 38.86, 38.9, 38.82, 38.78, 38.79, 38.85, 39.02, 38.76] +[64.61] +12.869813203811646 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1457, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.253655672073364, 'TIME_S_1KI': 7.03751247225351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 831.5186310982704, 'W': 64.61} +[39.52, 39.03, 38.86, 38.9, 38.82, 38.78, 38.79, 38.85, 39.02, 38.76, 40.82, 38.87, 38.94, 38.85, 38.95, 39.02, 39.44, 38.8, 39.95, 39.22] +703.03 +35.1515 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1457, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.253655672073364, 'TIME_S_1KI': 7.03751247225351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 831.5186310982704, 'W': 64.61, 'J_1KI': 570.7059925176873, 'W_1KI': 44.34454358270418, 'W_D': 29.4585, 'J_D': 379.12539226448536, 'W_D_1KI': 20.21859986273164, 'J_D_1KI': 13.876870187187125} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..577e414 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 293, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.322140216827393, "TIME_S_1KI": 35.22914749770442, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 852.4280810117722, "W": 64.7, "J_1KI": 2909.3108566954684, "W_1KI": 220.81911262798636, "W_D": 28.65025, "J_D": 377.46951511603595, "W_D_1KI": 97.78242320819112, "J_D_1KI": 333.7284068538946} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..9b2e914 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.5736217498779297} + +tensor(indices=tensor([[2962, 790, 3138, ..., 1357, 1108, 4179], + [1582, 481, 1943, ..., 776, 3799, 1730]]), + values=tensor([0.9539, 0.3082, 0.7622, ..., 0.2058, 0.3546, 0.4987]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.4906, 0.6198, 0.2201, ..., 0.2278, 0.7825, 0.6852]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 3.5736217498779297 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '293', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.322140216827393} + +tensor(indices=tensor([[ 486, 4954, 1279, ..., 4850, 4509, 1596], + [2141, 295, 2462, ..., 4897, 2942, 2431]]), + values=tensor([0.7447, 0.2340, 0.0322, ..., 0.3467, 0.6540, 0.8689]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1965, 0.5661, 0.8973, ..., 0.8800, 0.9944, 0.5712]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.322140216827393 seconds + +tensor(indices=tensor([[ 486, 4954, 1279, ..., 4850, 4509, 1596], + [2141, 295, 2462, ..., 4897, 2942, 2431]]), + values=tensor([0.7447, 0.2340, 0.0322, ..., 0.3467, 0.6540, 0.8689]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1965, 0.5661, 0.8973, ..., 0.8800, 0.9944, 0.5712]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.322140216827393 seconds + +[39.61, 38.9, 55.14, 39.38, 39.08, 39.38, 39.24, 38.96, 39.07, 38.87] +[64.7] +13.175086259841919 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 293, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.322140216827393, 'TIME_S_1KI': 35.22914749770442, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.4280810117722, 'W': 64.7} +[39.61, 38.9, 55.14, 39.38, 39.08, 39.38, 39.24, 38.96, 39.07, 38.87, 39.52, 38.84, 38.78, 38.91, 38.84, 39.33, 39.26, 39.94, 38.78, 40.33] +720.9950000000001 +36.04975 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 293, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.322140216827393, 'TIME_S_1KI': 35.22914749770442, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.4280810117722, 'W': 64.7, 'J_1KI': 2909.3108566954684, 'W_1KI': 220.81911262798636, 'W_D': 28.65025, 'J_D': 377.46951511603595, 'W_D_1KI': 97.78242320819112, 'J_D_1KI': 333.7284068538946} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..895c0c1 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 148, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.523432970046997, "TIME_S_1KI": 71.10427682464187, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 874.4558725833892, "W": 64.86, "J_1KI": 5908.48562556344, "W_1KI": 438.2432432432433, "W_D": 29.656499999999994, "J_D": 399.83503831744184, "W_D_1KI": 200.38175675675672, "J_D_1KI": 1353.930788897005} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..8de306d --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 7.055205345153809} + +tensor(indices=tensor([[ 669, 1688, 3903, ..., 301, 2622, 3927], + [ 917, 3411, 1684, ..., 2081, 4061, 1926]]), + values=tensor([0.2736, 0.7239, 0.2585, ..., 0.2919, 0.3728, 0.4655]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4009, 0.9007, 0.5186, ..., 0.3142, 0.7117, 0.4731]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 7.055205345153809 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '148', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.523432970046997} + +tensor(indices=tensor([[ 309, 267, 876, ..., 3188, 857, 4038], + [3693, 573, 4232, ..., 4444, 2306, 4988]]), + values=tensor([0.3778, 0.6566, 0.7350, ..., 0.6010, 0.4035, 0.6872]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.0419, 0.9227, 0.3733, ..., 0.4618, 0.0955, 0.8597]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.523432970046997 seconds + +tensor(indices=tensor([[ 309, 267, 876, ..., 3188, 857, 4038], + [3693, 573, 4232, ..., 4444, 2306, 4988]]), + values=tensor([0.3778, 0.6566, 0.7350, ..., 0.6010, 0.4035, 0.6872]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.0419, 0.9227, 0.3733, ..., 0.4618, 0.0955, 0.8597]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.523432970046997 seconds + +[40.32, 39.13, 39.5, 39.94, 39.0, 38.81, 38.87, 38.77, 39.18, 38.92] +[64.86] +13.482205867767334 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.523432970046997, 'TIME_S_1KI': 71.10427682464187, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.4558725833892, 'W': 64.86} +[40.32, 39.13, 39.5, 39.94, 39.0, 38.81, 38.87, 38.77, 39.18, 38.92, 39.62, 39.11, 39.06, 39.15, 38.9, 38.79, 38.9, 38.7, 39.01, 39.64] +704.07 +35.203500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.523432970046997, 'TIME_S_1KI': 71.10427682464187, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.4558725833892, 'W': 64.86, 'J_1KI': 5908.48562556344, 'W_1KI': 438.2432432432433, 'W_D': 29.656499999999994, 'J_D': 399.83503831744184, 'W_D_1KI': 200.38175675675672, 'J_D_1KI': 1353.930788897005} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..3b66878 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.084239721298218, "TIME_S_1KI": 140.84239721298218, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1156.0599614834784, "W": 64.99, "J_1KI": 11560.599614834784, "W_1KI": 649.9, "W_D": 29.552499999999995, "J_D": 525.6879829472302, "W_D_1KI": 295.5249999999999, "J_D_1KI": 2955.249999999999} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..f07ad96 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.084239721298218} + +tensor(indices=tensor([[2995, 405, 706, ..., 3534, 1772, 2619], + [2314, 1216, 254, ..., 1103, 1569, 2146]]), + values=tensor([0.0577, 0.4996, 0.1005, ..., 0.6746, 0.9735, 0.0426]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5947, 0.9912, 0.9587, ..., 0.3281, 0.5278, 0.1699]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.084239721298218 seconds + +tensor(indices=tensor([[2995, 405, 706, ..., 3534, 1772, 2619], + [2314, 1216, 254, ..., 1103, 1569, 2146]]), + values=tensor([0.0577, 0.4996, 0.1005, ..., 0.6746, 0.9735, 0.0426]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.5947, 0.9912, 0.9587, ..., 0.3281, 0.5278, 0.1699]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.084239721298218 seconds + +[40.05, 39.2, 39.37, 38.78, 38.88, 38.69, 38.86, 39.1, 39.36, 39.06] +[64.99] +17.78827452659607 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.084239721298218, 'TIME_S_1KI': 140.84239721298218, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1156.0599614834784, 'W': 64.99} +[40.05, 39.2, 39.37, 38.78, 38.88, 38.69, 38.86, 39.1, 39.36, 39.06, 39.59, 38.79, 38.95, 38.79, 39.17, 39.23, 39.16, 44.39, 39.27, 38.82] +708.75 +35.4375 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.084239721298218, 'TIME_S_1KI': 140.84239721298218, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1156.0599614834784, 'W': 64.99, 'J_1KI': 11560.599614834784, 'W_1KI': 649.9, 'W_D': 29.552499999999995, 'J_D': 525.6879829472302, 'W_D_1KI': 295.5249999999999, 'J_D_1KI': 2955.249999999999} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..0be25ea --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.10159134864807, "TIME_S_1KI": 211.0159134864807, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1652.6831939053536, "W": 65.39, "J_1KI": 16526.831939053536, "W_1KI": 653.9000000000001, "W_D": 29.835250000000002, "J_D": 754.0635611097217, "W_D_1KI": 298.3525, "J_D_1KI": 2983.525} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..1051597 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.10159134864807} + +tensor(indices=tensor([[2826, 152, 3489, ..., 2323, 1023, 332], + [ 230, 3021, 1127, ..., 753, 1305, 4821]]), + values=tensor([0.6067, 0.6391, 0.4435, ..., 0.9765, 0.8575, 0.4087]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.2429, 0.9350, 0.6268, ..., 0.5029, 0.5029, 0.5969]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.10159134864807 seconds + +tensor(indices=tensor([[2826, 152, 3489, ..., 2323, 1023, 332], + [ 230, 3021, 1127, ..., 753, 1305, 4821]]), + values=tensor([0.6067, 0.6391, 0.4435, ..., 0.9765, 0.8575, 0.4087]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.2429, 0.9350, 0.6268, ..., 0.5029, 0.5029, 0.5969]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.10159134864807 seconds + +[39.51, 38.75, 38.83, 38.95, 38.92, 38.88, 38.93, 39.29, 40.27, 38.76] +[65.39] +25.274249792099 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.10159134864807, 'TIME_S_1KI': 211.0159134864807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1652.6831939053536, 'W': 65.39} +[39.51, 38.75, 38.83, 38.95, 38.92, 38.88, 38.93, 39.29, 40.27, 38.76, 39.94, 39.28, 39.17, 38.89, 39.2, 38.96, 40.21, 44.57, 39.41, 38.96] +711.095 +35.55475 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.10159134864807, 'TIME_S_1KI': 211.0159134864807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1652.6831939053536, 'W': 65.39, 'J_1KI': 16526.831939053536, 'W_1KI': 653.9000000000001, 'W_D': 29.835250000000002, 'J_D': 754.0635611097217, 'W_D_1KI': 298.3525, 'J_D_1KI': 2983.525} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..629775e --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.182029962539673, "TIME_S_1KI": 281.82029962539673, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2137.3513273501394, "W": 65.19, "J_1KI": 21373.513273501394, "W_1KI": 651.9, "W_D": 29.083749999999995, "J_D": 953.554098279178, "W_D_1KI": 290.8375, "J_D_1KI": 2908.375} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..c5c5ef5 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.182029962539673} + +tensor(indices=tensor([[3321, 4385, 2586, ..., 2930, 4100, 2158], + [1802, 1344, 1295, ..., 3953, 473, 2788]]), + values=tensor([0.0323, 0.9382, 0.5443, ..., 0.0239, 0.6426, 0.9379]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7540, 0.7351, 0.1003, ..., 0.5551, 0.3837, 0.7572]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.182029962539673 seconds + +tensor(indices=tensor([[3321, 4385, 2586, ..., 2930, 4100, 2158], + [1802, 1344, 1295, ..., 3953, 473, 2788]]), + values=tensor([0.0323, 0.9382, 0.5443, ..., 0.0239, 0.6426, 0.9379]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7540, 0.7351, 0.1003, ..., 0.5551, 0.3837, 0.7572]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.182029962539673 seconds + +[39.8, 38.95, 39.61, 45.22, 49.41, 39.1, 39.59, 38.86, 39.7, 39.57] +[65.19] +32.78649067878723 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.182029962539673, 'TIME_S_1KI': 281.82029962539673, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2137.3513273501394, 'W': 65.19} +[39.8, 38.95, 39.61, 45.22, 49.41, 39.1, 39.59, 38.86, 39.7, 39.57, 39.56, 38.91, 39.07, 38.88, 39.14, 38.85, 39.27, 39.37, 39.1, 39.26] +722.125 +36.10625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.182029962539673, 'TIME_S_1KI': 281.82029962539673, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2137.3513273501394, 'W': 65.19, 'J_1KI': 21373.513273501394, 'W_1KI': 651.9, 'W_D': 29.083749999999995, 'J_D': 953.554098279178, 'W_D_1KI': 290.8375, 'J_D_1KI': 2908.375} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..637a4a1 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.5884153842926, "TIME_S_1KI": 355.884153842926, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2621.104042739868, "W": 64.96, "J_1KI": 26211.04042739868, "W_1KI": 649.5999999999999, "W_D": 29.726, "J_D": 1199.42947620821, "W_D_1KI": 297.26, "J_D_1KI": 2972.6} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..5ba1765 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.5884153842926} + +tensor(indices=tensor([[1376, 4959, 2220, ..., 1740, 594, 3921], + [2076, 4888, 393, ..., 591, 2057, 771]]), + values=tensor([0.4463, 0.3695, 0.8491, ..., 0.5015, 0.2890, 0.6757]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7940, 0.0782, 0.3674, ..., 0.7831, 0.0641, 0.5356]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.5884153842926 seconds + +tensor(indices=tensor([[1376, 4959, 2220, ..., 1740, 594, 3921], + [2076, 4888, 393, ..., 591, 2057, 771]]), + values=tensor([0.4463, 0.3695, 0.8491, ..., 0.5015, 0.2890, 0.6757]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.7940, 0.0782, 0.3674, ..., 0.7831, 0.0641, 0.5356]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.5884153842926 seconds + +[39.55, 39.28, 38.86, 39.09, 38.9, 38.86, 39.75, 39.25, 39.29, 39.19] +[64.96] +40.34950804710388 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.5884153842926, 'TIME_S_1KI': 355.884153842926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.104042739868, 'W': 64.96} +[39.55, 39.28, 38.86, 39.09, 38.9, 38.86, 39.75, 39.25, 39.29, 39.19, 39.46, 38.85, 39.07, 38.88, 38.96, 39.86, 39.28, 39.07, 38.9, 38.86] +704.68 +35.233999999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.5884153842926, 'TIME_S_1KI': 355.884153842926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.104042739868, 'W': 64.96, 'J_1KI': 26211.04042739868, 'W_1KI': 649.5999999999999, 'W_D': 29.726, 'J_D': 1199.42947620821, 'W_D_1KI': 297.26, 'J_D_1KI': 2972.6} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..91a48e8 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 904172, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.473444938659668, "TIME_S_1KI": 0.011583465246280207, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 839.7985753679276, "W": 65.14, "J_1KI": 0.9288040056183199, "W_1KI": 0.07204381467242958, "W_D": 29.481250000000003, "J_D": 380.078473289311, "W_D_1KI": 0.0326057984542764, "J_D_1KI": 3.6061499863163644e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..adf1553 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,456 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.010354042053222656} + +tensor(indices=tensor([[1603, 2949, 4696, 4335, 1584, 4763, 3698, 365, 4843, + 43, 2747, 2544, 2995, 2357, 3659, 2467, 2875, 1941, + 562, 3574, 331, 1292, 2849, 1742, 1542, 4827, 3248, + 3498, 1068, 4943, 3246, 1221, 2910, 4713, 735, 3220, + 1550, 80, 2322, 2416, 2760, 1002, 11, 1286, 3721, + 4395, 3743, 538, 1871, 3411, 3474, 3308, 3144, 3013, + 3596, 3184, 2134, 3316, 1443, 3269, 3263, 1911, 263, + 4187, 2295, 2848, 2042, 666, 4313, 756, 4123, 3090, + 1416, 4827, 3465, 3314, 4, 1756, 46, 2552, 1565, + 2890, 2007, 3934, 1828, 1838, 1932, 2850, 3462, 3018, + 4265, 2009, 4539, 771, 3290, 4602, 1782, 201, 2227, + 4909, 225, 4266, 3034, 3023, 3751, 3862, 2588, 4986, + 1263, 570, 151, 2102, 371, 613, 399, 2620, 3291, + 4799, 2434, 1650, 1600, 688, 56, 325, 750, 3713, + 3002, 1969, 3315, 4447, 3093, 40, 2965, 3324, 4095, + 558, 670, 1949, 2117, 1535, 2778, 4608, 1213, 4974, + 4516, 3122, 1598, 274, 3276, 2570, 4067, 4217, 4177, + 4957, 4157, 3371, 3043, 4966, 2786, 2184, 2827, 1541, + 4595, 1547, 4084, 4386, 1084, 3308, 2946, 1531, 3201, + 4426, 1605, 235, 702, 4388, 2234, 1723, 1284, 3783, + 4688, 3700, 4968, 2774, 1985, 3850, 1699, 2349, 347, + 4913, 3253, 2292, 789, 4861, 3938, 3863, 2419, 3648, + 1360, 4692, 3614, 341, 3062, 4336, 283, 931, 4621, + 1039, 1116, 2346, 3451, 1594, 2313, 3096, 2271, 81, + 2630, 1875, 1672, 2227, 3155, 4812, 3916, 1751, 2613, + 1904, 2081, 608, 2991, 2611, 4927, 4643, 1082, 489, + 1114, 1704, 2789, 309, 1817, 668, 4047, 1103, 4868, + 1721, 2484, 2461, 1697, 941, 770, 2115], + [ 425, 3833, 4267, 258, 1667, 536, 317, 4057, 1968, + 3705, 3272, 136, 2833, 779, 2812, 1006, 4714, 3499, + 3456, 1176, 4909, 2217, 101, 971, 2764, 3551, 1791, + 4888, 3832, 3290, 4523, 1291, 1635, 2311, 4438, 4840, + 219, 1603, 3207, 3253, 1065, 2419, 2439, 1752, 630, + 1405, 1501, 4929, 2237, 1597, 3415, 1124, 99, 2927, + 3197, 4567, 1429, 4166, 2934, 1882, 123, 2445, 335, + 3573, 3874, 3728, 4234, 840, 3132, 3339, 1062, 640, + 946, 4212, 4094, 2200, 658, 345, 4247, 4356, 1463, + 4360, 148, 4209, 2815, 4329, 608, 1874, 3811, 4568, + 3394, 1796, 4232, 3421, 1590, 4941, 981, 742, 3906, + 1118, 1393, 1785, 77, 2162, 3561, 4398, 25, 784, + 2182, 1076, 4604, 4157, 4246, 2756, 2345, 3949, 2951, + 4407, 1883, 2071, 1996, 3080, 97, 2161, 196, 1436, + 3238, 1542, 4599, 2224, 3256, 1325, 4649, 4262, 511, + 4110, 1720, 4489, 2118, 814, 2449, 2777, 4780, 2232, + 1882, 2967, 1021, 1853, 1965, 4076, 3648, 2712, 4271, + 1819, 4769, 1251, 4529, 1660, 2895, 784, 4941, 908, + 3350, 2366, 2727, 4362, 4585, 1775, 1321, 1546, 4049, + 2770, 3762, 3687, 167, 365, 4862, 3053, 1125, 2497, + 2162, 797, 819, 2389, 1001, 767, 4442, 2780, 3048, + 2825, 3554, 2397, 1257, 1787, 4925, 482, 1624, 4513, + 4693, 3051, 1323, 2675, 4492, 1308, 213, 1412, 3382, + 2676, 2218, 1786, 3339, 2573, 1644, 2493, 103, 337, + 4219, 2185, 2428, 4492, 2503, 125, 1342, 2761, 1111, + 4674, 1565, 3974, 533, 4171, 2357, 3316, 3713, 1807, + 46, 4972, 1369, 927, 1797, 418, 3331, 1101, 2139, + 3286, 429, 4733, 1650, 892, 4157, 4819]]), + values=tensor([5.8821e-02, 7.2877e-01, 8.6256e-01, 4.7990e-01, + 9.6415e-01, 8.6847e-02, 4.3990e-01, 6.4151e-01, + 4.3814e-01, 7.2219e-02, 7.3349e-01, 4.5085e-01, + 2.3147e-01, 9.4994e-01, 8.5773e-01, 5.8984e-01, + 6.6476e-01, 4.3313e-01, 7.7211e-01, 3.2369e-01, + 5.5827e-01, 6.1638e-01, 5.9970e-02, 9.1889e-01, + 7.7084e-01, 1.3860e-01, 4.5617e-01, 7.2404e-01, + 5.5478e-01, 7.2454e-01, 6.3040e-01, 8.2171e-01, + 1.7074e-01, 3.8624e-01, 5.8001e-01, 4.2516e-01, + 3.2033e-02, 7.2873e-01, 2.4802e-01, 6.5435e-02, + 5.2867e-01, 7.9558e-02, 7.8474e-02, 8.7211e-01, + 5.7938e-01, 7.9947e-01, 4.5941e-01, 3.4268e-01, + 7.2318e-01, 7.8450e-01, 6.6355e-01, 5.3281e-01, + 5.7218e-01, 9.6615e-01, 8.4047e-01, 3.9325e-01, + 3.1042e-02, 1.4304e-01, 3.2910e-01, 6.2197e-01, + 6.8746e-01, 1.3247e-01, 4.7949e-01, 8.1996e-02, + 9.1754e-01, 5.6972e-01, 9.4374e-01, 7.9968e-01, + 9.8468e-02, 4.1599e-01, 5.5299e-01, 3.8108e-01, + 6.2635e-01, 6.0242e-01, 9.3384e-01, 2.9018e-01, + 7.7298e-01, 7.6908e-01, 3.9001e-01, 5.6070e-01, + 5.7814e-01, 8.7663e-01, 6.7451e-01, 6.2109e-01, + 7.8072e-01, 2.5228e-01, 7.9124e-02, 3.5598e-01, + 7.2237e-01, 1.1667e-01, 5.3536e-01, 8.5915e-01, + 3.3326e-01, 9.5557e-01, 6.4124e-01, 2.3127e-01, + 5.0876e-01, 3.2360e-01, 1.8264e-01, 4.4381e-01, + 3.1297e-01, 5.9294e-01, 7.7004e-01, 3.6743e-01, + 5.5192e-01, 3.3826e-01, 8.2609e-01, 9.4822e-01, + 8.3040e-01, 7.1493e-01, 2.0937e-01, 3.9542e-01, + 4.6478e-01, 2.6336e-01, 5.3011e-01, 7.0245e-01, + 1.6665e-01, 7.9013e-03, 4.1032e-01, 1.2554e-01, + 6.9721e-01, 2.3792e-01, 8.3459e-01, 7.2520e-02, + 4.2206e-02, 2.7216e-01, 7.3532e-01, 2.1989e-01, + 9.3151e-01, 5.4765e-01, 1.5481e-01, 2.8522e-01, + 9.0225e-01, 6.7932e-01, 6.7205e-01, 9.0367e-02, + 2.6025e-01, 2.6801e-01, 2.9794e-01, 9.7466e-01, + 2.3099e-02, 9.1017e-01, 2.9781e-02, 3.3622e-01, + 6.4011e-01, 1.9705e-01, 8.4920e-02, 7.6412e-01, + 5.6014e-01, 1.1270e-01, 1.4449e-01, 4.5982e-01, + 4.4161e-03, 9.8934e-01, 2.1218e-02, 6.4924e-01, + 1.3505e-01, 1.4206e-01, 1.7495e-01, 9.5293e-01, + 6.0917e-01, 8.6237e-01, 7.4702e-01, 5.5015e-02, + 8.6071e-01, 1.9189e-01, 6.5521e-01, 2.5991e-01, + 7.9563e-02, 8.6973e-02, 8.6937e-01, 6.6764e-02, + 5.8737e-01, 4.9001e-01, 1.7851e-01, 8.6918e-01, + 9.9335e-01, 9.3983e-01, 7.0431e-01, 9.9576e-01, + 9.8150e-02, 6.2950e-01, 7.0282e-01, 4.3005e-01, + 9.3674e-01, 4.8207e-01, 7.5158e-01, 4.9534e-01, + 7.8962e-01, 1.0405e-01, 2.2288e-01, 4.4022e-01, + 4.5067e-01, 4.6502e-01, 1.6218e-01, 6.8974e-02, + 6.6675e-02, 9.0392e-01, 2.2613e-01, 3.9165e-01, + 6.1431e-01, 2.4227e-01, 1.6047e-02, 4.9994e-02, + 2.7464e-01, 4.4304e-01, 2.4886e-01, 1.7759e-01, + 9.2744e-01, 7.6114e-01, 9.2714e-01, 8.2017e-01, + 7.5619e-01, 5.3200e-02, 6.0223e-01, 5.6476e-01, + 6.6509e-01, 8.0844e-01, 9.3994e-01, 1.6134e-01, + 6.0150e-01, 5.7204e-01, 4.6568e-01, 7.0598e-04, + 6.1898e-01, 8.9304e-01, 6.8533e-01, 4.8979e-01, + 1.1007e-01, 3.6850e-01, 9.5254e-01, 2.1422e-01, + 8.2153e-01, 7.4369e-01, 9.1635e-01, 8.3779e-01, + 1.1480e-01, 4.4598e-01, 2.8227e-01, 7.9265e-01, + 9.5768e-01, 6.3216e-01, 1.1463e-01, 8.2642e-02, + 6.0614e-01, 8.9561e-01, 5.3237e-01, 8.3212e-01, + 5.7920e-01, 5.1976e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.2459, 0.7524, 0.3197, ..., 0.4178, 0.7536, 0.5140]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.010354042053222656 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '101409', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.1776456832885742} + +tensor(indices=tensor([[4567, 4509, 1294, 4951, 4164, 2515, 2232, 2798, 1969, + 4280, 2475, 2239, 4646, 3139, 969, 1329, 126, 2636, + 3366, 4875, 4552, 236, 2526, 1121, 4082, 3211, 3162, + 2211, 1015, 749, 2336, 371, 1990, 2815, 1455, 2389, + 2888, 3923, 2427, 2927, 4121, 4035, 4137, 746, 3281, + 1277, 4816, 2715, 2966, 2270, 3641, 1354, 4734, 1719, + 2011, 1910, 4156, 4172, 4163, 2574, 2551, 3522, 282, + 1217, 2471, 391, 35, 3998, 2943, 2308, 4529, 1979, + 158, 319, 2744, 2921, 161, 2039, 1159, 3037, 81, + 1713, 562, 944, 213, 1272, 2904, 384, 2320, 179, + 543, 1112, 3308, 1003, 4139, 2335, 3546, 2546, 4859, + 477, 1780, 207, 242, 2189, 3401, 4507, 2045, 239, + 3432, 171, 3886, 2930, 2756, 4497, 2980, 4388, 2899, + 760, 4033, 1347, 883, 2025, 16, 4472, 3322, 516, + 4844, 3447, 440, 727, 3215, 3864, 2269, 3813, 810, + 957, 2957, 4769, 874, 3146, 1150, 2500, 666, 2671, + 3368, 51, 4277, 3579, 2561, 2525, 1707, 2625, 1796, + 4076, 2221, 1222, 2865, 4059, 3409, 4256, 14, 804, + 1551, 4372, 4500, 4398, 3603, 822, 1253, 4306, 976, + 1159, 1899, 2241, 2004, 2751, 330, 4044, 2281, 4425, + 1243, 2635, 1619, 3463, 229, 3884, 2973, 3161, 701, + 1492, 2221, 4315, 4395, 670, 1953, 1964, 3344, 3967, + 1339, 139, 1020, 3081, 3707, 2088, 914, 990, 4431, + 2404, 1460, 1953, 4178, 3225, 3855, 3899, 1467, 2765, + 4804, 4639, 2464, 3362, 2518, 794, 1237, 1718, 3488, + 1443, 934, 3216, 2400, 261, 3993, 492, 1805, 3497, + 855, 1048, 895, 1926, 2985, 2800, 1311, 2361, 379, + 4620, 2170, 1637, 1412, 2988, 4786, 261], + [3763, 271, 3832, 3949, 808, 853, 5, 125, 3229, + 3982, 582, 4681, 1974, 3202, 4903, 3178, 2715, 433, + 1284, 4300, 4482, 381, 1013, 745, 340, 3963, 85, + 44, 3255, 3647, 3728, 1938, 2072, 3954, 1580, 4856, + 1650, 521, 1596, 1159, 2396, 2516, 4525, 2014, 2513, + 4591, 4832, 4549, 713, 4544, 4177, 117, 4657, 843, + 3756, 4046, 4721, 3432, 2202, 776, 793, 2892, 371, + 4194, 3936, 3575, 3372, 3883, 3764, 4221, 1030, 3099, + 4766, 3649, 680, 1640, 582, 284, 2833, 776, 4074, + 3378, 2470, 1452, 3002, 412, 1757, 4236, 2961, 3004, + 1130, 3382, 2940, 2805, 4581, 3972, 3715, 4425, 4993, + 4255, 4742, 2819, 3894, 2055, 3149, 1283, 3328, 2612, + 3094, 1101, 287, 2549, 2470, 4103, 3154, 3651, 373, + 4522, 3998, 4926, 1203, 1588, 1753, 2588, 1759, 2691, + 1663, 1468, 505, 312, 505, 593, 4683, 1707, 2666, + 1391, 3374, 4584, 2854, 473, 1229, 2079, 4144, 3767, + 4107, 1852, 3190, 4491, 1082, 1132, 4165, 4549, 4538, + 1288, 1312, 1004, 228, 32, 2176, 628, 2628, 4045, + 481, 4406, 2521, 1646, 4989, 990, 2305, 1710, 1475, + 1214, 1473, 3454, 3337, 3884, 1021, 2383, 3978, 1125, + 4610, 3127, 1230, 918, 1978, 3156, 3014, 3365, 4920, + 4025, 3207, 1669, 1713, 4877, 3110, 2083, 1591, 1004, + 3927, 2181, 610, 1182, 813, 4000, 1859, 2859, 1608, + 2141, 4709, 3316, 2970, 584, 4904, 4806, 1338, 3776, + 1292, 64, 3549, 56, 4947, 1904, 4252, 4280, 4543, + 2674, 426, 450, 3178, 3874, 2413, 564, 4544, 817, + 767, 2397, 2799, 4559, 2961, 528, 1329, 2195, 1497, + 863, 4641, 100, 143, 3326, 1695, 701]]), + values=tensor([0.2392, 0.9119, 0.4062, 0.5190, 0.4821, 0.0610, 0.9592, + 0.1536, 0.2535, 0.9810, 0.2809, 0.3942, 0.7336, 0.4933, + 0.0949, 0.6264, 0.9395, 0.9773, 0.6165, 0.2323, 0.3165, + 0.3006, 0.0779, 0.9140, 0.9754, 0.7870, 0.3070, 0.7892, + 0.1046, 0.6732, 0.5493, 0.4220, 0.4787, 0.5540, 0.1893, + 0.9815, 0.6207, 0.1811, 0.6356, 0.8509, 0.0446, 0.3714, + 0.8728, 0.4223, 0.1128, 0.0517, 0.4691, 0.8326, 0.9354, + 0.6919, 0.1210, 0.8508, 0.0330, 0.3340, 0.0613, 0.6850, + 0.5073, 0.3125, 0.6065, 0.6729, 0.9617, 0.7711, 0.2555, + 0.5224, 0.7602, 0.1278, 0.3644, 0.4523, 0.5818, 0.2735, + 0.9397, 0.8974, 0.1178, 0.6492, 0.8042, 0.1127, 0.1135, + 0.4220, 0.4739, 0.0328, 0.1174, 0.5396, 0.5931, 0.5423, + 0.2423, 0.3893, 0.6323, 0.6509, 0.4958, 0.7631, 0.4839, + 0.4913, 0.3739, 0.0699, 0.7795, 0.0888, 0.4786, 0.4111, + 0.0273, 0.0710, 0.6162, 0.2638, 0.9543, 0.2882, 0.5006, + 0.9168, 0.0455, 0.6476, 0.7352, 0.2421, 0.9543, 0.9159, + 0.0924, 0.1731, 0.4680, 0.9540, 0.6762, 0.6079, 0.2004, + 0.5460, 0.3224, 0.4460, 0.6368, 0.2968, 0.7477, 0.1648, + 0.6923, 0.4222, 0.2172, 0.8694, 0.2537, 0.2780, 0.4603, + 0.9922, 0.2288, 0.2006, 0.6682, 0.6024, 0.9103, 0.2968, + 0.1685, 0.7895, 0.4008, 0.3128, 0.4151, 0.1729, 0.7136, + 0.2399, 0.6242, 0.1418, 0.4114, 0.0854, 0.7012, 0.1266, + 0.1263, 0.9055, 0.2471, 0.6617, 0.7676, 0.5070, 0.9057, + 0.3361, 0.7219, 0.7151, 0.0066, 0.3739, 0.7592, 0.8983, + 0.7234, 0.9573, 0.8337, 0.7375, 0.2027, 0.9803, 0.5916, + 0.7801, 0.5939, 0.0812, 0.4447, 0.7456, 0.6059, 0.8777, + 0.5599, 0.6185, 0.1675, 0.5279, 0.8239, 0.1571, 0.0390, + 0.8342, 0.9277, 0.9851, 0.5140, 0.6550, 0.5465, 0.3275, + 0.0391, 0.2305, 0.8353, 0.2662, 0.4157, 0.1351, 0.5811, + 0.7984, 0.2608, 0.8386, 0.7933, 0.5607, 0.5645, 0.8051, + 0.3449, 0.8885, 0.7558, 0.4042, 0.7243, 0.4760, 0.0781, + 0.0496, 0.1508, 0.4084, 0.6241, 0.9203, 0.8717, 0.2717, + 0.6603, 0.1094, 0.9357, 0.5633, 0.5403, 0.4104, 0.2156, + 0.2450, 0.8410, 0.6731, 0.6959, 0.6772, 0.4322, 0.6387, + 0.0841, 0.5455, 0.0092, 0.6620, 0.0458, 0.9126, 0.2442, + 0.8228, 0.8536, 0.8916, 0.7682, 0.3605]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3125, 0.6824, 0.5650, ..., 0.9591, 0.3610, 0.4308]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.1776456832885742 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '904172', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.473444938659668} + +tensor(indices=tensor([[1068, 4605, 4249, 3022, 490, 4086, 2031, 1698, 431, + 1175, 4388, 4781, 1088, 816, 728, 2463, 1179, 3774, + 4142, 403, 414, 2400, 2391, 1374, 4767, 15, 243, + 3610, 489, 3817, 4435, 523, 3900, 2264, 1958, 1649, + 3694, 2103, 3491, 3023, 810, 1794, 422, 4380, 4277, + 2556, 2805, 2034, 2932, 3276, 4483, 2802, 3224, 4928, + 4569, 4465, 2182, 3700, 4626, 1982, 3705, 2181, 4921, + 2072, 487, 2925, 924, 3094, 890, 2026, 1111, 3985, + 4318, 272, 1278, 4763, 4852, 1973, 1452, 2278, 954, + 4141, 3710, 46, 2871, 1203, 871, 3499, 2145, 4034, + 1196, 1458, 3556, 3257, 4552, 3914, 2670, 3041, 2490, + 1448, 2926, 3866, 764, 1298, 3005, 4352, 4134, 1999, + 1638, 991, 3191, 4737, 3653, 4957, 455, 4318, 212, + 1642, 3923, 3262, 104, 1383, 2552, 3097, 3636, 920, + 452, 592, 2392, 3027, 2120, 939, 832, 1134, 417, + 4958, 1289, 503, 2395, 1044, 3549, 640, 2396, 3348, + 4871, 2946, 4303, 2553, 4181, 4202, 3, 4107, 1547, + 2561, 4689, 3081, 665, 3871, 2279, 4867, 3682, 1031, + 1140, 2967, 2719, 183, 311, 4747, 4473, 2472, 4698, + 3288, 3011, 3332, 4568, 3330, 3258, 493, 1812, 747, + 537, 1971, 4067, 1504, 4266, 2099, 272, 4516, 3709, + 3181, 3104, 1114, 1324, 617, 1835, 1316, 290, 2865, + 2133, 768, 4596, 1303, 2357, 2219, 3691, 1070, 1361, + 3000, 3730, 3405, 2652, 1632, 4135, 4505, 2110, 278, + 4484, 3208, 848, 3330, 4954, 1239, 3989, 488, 1119, + 2152, 273, 1223, 1820, 4430, 2158, 4322, 4110, 1907, + 2089, 1319, 1135, 1916, 1856, 1949, 2151, 2156, 623, + 1727, 210, 2330, 4084, 690, 4678, 703], + [ 855, 4517, 4041, 3550, 447, 330, 3828, 820, 4159, + 2862, 1318, 4114, 1809, 4302, 4955, 1189, 873, 183, + 443, 2038, 1975, 1085, 1751, 3431, 267, 3324, 1417, + 3750, 2157, 3141, 3314, 1385, 4690, 3649, 597, 2043, + 4336, 295, 143, 2057, 882, 3359, 3805, 1977, 1233, + 3680, 2930, 4275, 957, 4697, 2722, 3651, 1713, 928, + 1736, 2513, 2240, 336, 2380, 2513, 4193, 1722, 4294, + 2744, 3404, 4080, 4473, 2309, 4372, 3412, 4639, 4422, + 2071, 1739, 675, 2069, 2877, 887, 2944, 1132, 3259, + 1203, 1992, 754, 2342, 1930, 3901, 1629, 3080, 1923, + 522, 1788, 1879, 3016, 4766, 2653, 4232, 4888, 3074, + 1625, 3702, 198, 3887, 2973, 2735, 1786, 4434, 1918, + 2729, 1752, 2218, 2042, 3098, 2255, 3864, 4917, 2698, + 1273, 518, 2730, 1357, 2981, 286, 1259, 2345, 4035, + 2858, 2670, 2658, 4111, 3665, 672, 2874, 1363, 1628, + 247, 4570, 4133, 2830, 2725, 2274, 4815, 4569, 4635, + 2554, 2419, 4973, 3355, 884, 147, 1375, 3776, 4653, + 3838, 1725, 3722, 2830, 3133, 1070, 1155, 2808, 488, + 4269, 1207, 52, 3939, 3973, 2819, 2693, 2828, 3491, + 1859, 296, 262, 904, 4967, 646, 1590, 429, 1638, + 3431, 104, 3826, 588, 781, 3485, 2749, 2750, 3222, + 1115, 1903, 4123, 1808, 4116, 1768, 3988, 349, 1145, + 2100, 1321, 1700, 2969, 2661, 303, 2263, 3748, 800, + 4682, 1767, 15, 744, 3772, 1998, 2268, 1755, 3317, + 103, 4403, 2406, 225, 3566, 2567, 1031, 3425, 1929, + 4917, 4763, 1416, 3701, 3675, 2233, 3269, 1094, 4143, + 4261, 13, 4462, 1024, 3861, 1080, 4217, 4661, 2231, + 3046, 3201, 962, 4085, 2116, 4064, 4225]]), + values=tensor([0.0969, 0.9696, 0.4863, 0.5775, 0.5153, 0.6090, 0.2232, + 0.1235, 0.7624, 0.6849, 0.1871, 0.9798, 0.3225, 0.9082, + 0.5749, 0.8325, 0.8077, 0.2076, 0.2205, 0.3135, 0.9016, + 0.4071, 0.4029, 0.9984, 0.0332, 0.9320, 0.4739, 0.1303, + 0.0338, 0.7202, 0.3844, 0.0133, 0.1754, 0.8108, 0.0434, + 0.7641, 0.5971, 0.8331, 0.4633, 0.4635, 0.6170, 0.3254, + 0.9355, 0.1270, 0.3493, 0.2632, 0.6609, 0.1943, 0.2143, + 0.7971, 0.1336, 0.6556, 0.8691, 0.5663, 0.7858, 0.1666, + 0.1075, 0.8104, 0.3512, 0.4083, 0.6914, 0.3116, 0.1566, + 0.4165, 0.7022, 0.4872, 0.2927, 0.8959, 0.5258, 0.5602, + 0.1244, 0.0753, 0.1455, 0.5189, 0.6781, 0.9198, 0.5378, + 0.3765, 0.8221, 0.9749, 0.8036, 0.7468, 0.1959, 0.5385, + 0.4103, 0.6119, 0.4250, 0.9707, 0.6088, 0.0329, 0.1117, + 0.8626, 0.8100, 0.8899, 0.0022, 0.6901, 0.9957, 0.4347, + 0.9679, 0.1291, 0.5270, 0.5468, 0.8700, 0.6871, 0.2119, + 0.7839, 0.5795, 0.4700, 0.4003, 0.1187, 0.5453, 0.7341, + 0.2561, 0.4480, 0.9785, 0.1000, 0.7675, 0.0094, 0.3072, + 0.4307, 0.2125, 0.8084, 0.3457, 0.0662, 0.7637, 0.8387, + 0.3128, 0.2311, 0.4264, 0.3172, 0.5268, 0.4733, 0.8350, + 0.7184, 0.4629, 0.5406, 0.3718, 0.2610, 0.5113, 0.8587, + 0.1007, 0.5958, 0.8824, 0.6888, 0.0320, 0.8953, 0.2968, + 0.3991, 0.4757, 0.2754, 0.5168, 0.7530, 0.7652, 0.7422, + 0.9288, 0.1274, 0.7465, 0.5787, 0.3838, 0.1203, 0.5466, + 0.3771, 0.3582, 0.5724, 0.0263, 0.1913, 0.4990, 0.8701, + 0.3258, 0.2866, 0.6643, 0.4819, 0.9032, 0.1817, 0.5537, + 0.7857, 0.4698, 0.9341, 0.6297, 0.7237, 0.9581, 0.2294, + 0.7410, 0.8136, 0.7895, 0.7463, 0.6784, 0.6740, 0.2672, + 0.2458, 0.2839, 0.5887, 0.5619, 0.3782, 0.9625, 0.2938, + 0.8381, 0.6910, 0.0535, 0.5779, 0.9358, 0.9292, 0.2289, + 0.9541, 0.8123, 0.1785, 0.2759, 0.9658, 0.5868, 0.2623, + 0.2484, 0.7792, 0.9078, 0.3283, 0.4956, 0.4941, 0.4122, + 0.1233, 0.9259, 0.0669, 0.4967, 0.5240, 0.4274, 0.4746, + 0.7490, 0.1508, 0.4805, 0.2981, 0.2983, 0.2382, 0.9951, + 0.9914, 0.7717, 0.2467, 0.8169, 0.6649, 0.8060, 0.2746, + 0.3619, 0.7712, 0.1675, 0.1290, 0.3804, 0.9021, 0.4784, + 0.4155, 0.5611, 0.9918, 0.9062, 0.6267]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.8891, 0.1750, 0.3464, ..., 0.7080, 0.1369, 0.7419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.473444938659668 seconds + +tensor(indices=tensor([[1068, 4605, 4249, 3022, 490, 4086, 2031, 1698, 431, + 1175, 4388, 4781, 1088, 816, 728, 2463, 1179, 3774, + 4142, 403, 414, 2400, 2391, 1374, 4767, 15, 243, + 3610, 489, 3817, 4435, 523, 3900, 2264, 1958, 1649, + 3694, 2103, 3491, 3023, 810, 1794, 422, 4380, 4277, + 2556, 2805, 2034, 2932, 3276, 4483, 2802, 3224, 4928, + 4569, 4465, 2182, 3700, 4626, 1982, 3705, 2181, 4921, + 2072, 487, 2925, 924, 3094, 890, 2026, 1111, 3985, + 4318, 272, 1278, 4763, 4852, 1973, 1452, 2278, 954, + 4141, 3710, 46, 2871, 1203, 871, 3499, 2145, 4034, + 1196, 1458, 3556, 3257, 4552, 3914, 2670, 3041, 2490, + 1448, 2926, 3866, 764, 1298, 3005, 4352, 4134, 1999, + 1638, 991, 3191, 4737, 3653, 4957, 455, 4318, 212, + 1642, 3923, 3262, 104, 1383, 2552, 3097, 3636, 920, + 452, 592, 2392, 3027, 2120, 939, 832, 1134, 417, + 4958, 1289, 503, 2395, 1044, 3549, 640, 2396, 3348, + 4871, 2946, 4303, 2553, 4181, 4202, 3, 4107, 1547, + 2561, 4689, 3081, 665, 3871, 2279, 4867, 3682, 1031, + 1140, 2967, 2719, 183, 311, 4747, 4473, 2472, 4698, + 3288, 3011, 3332, 4568, 3330, 3258, 493, 1812, 747, + 537, 1971, 4067, 1504, 4266, 2099, 272, 4516, 3709, + 3181, 3104, 1114, 1324, 617, 1835, 1316, 290, 2865, + 2133, 768, 4596, 1303, 2357, 2219, 3691, 1070, 1361, + 3000, 3730, 3405, 2652, 1632, 4135, 4505, 2110, 278, + 4484, 3208, 848, 3330, 4954, 1239, 3989, 488, 1119, + 2152, 273, 1223, 1820, 4430, 2158, 4322, 4110, 1907, + 2089, 1319, 1135, 1916, 1856, 1949, 2151, 2156, 623, + 1727, 210, 2330, 4084, 690, 4678, 703], + [ 855, 4517, 4041, 3550, 447, 330, 3828, 820, 4159, + 2862, 1318, 4114, 1809, 4302, 4955, 1189, 873, 183, + 443, 2038, 1975, 1085, 1751, 3431, 267, 3324, 1417, + 3750, 2157, 3141, 3314, 1385, 4690, 3649, 597, 2043, + 4336, 295, 143, 2057, 882, 3359, 3805, 1977, 1233, + 3680, 2930, 4275, 957, 4697, 2722, 3651, 1713, 928, + 1736, 2513, 2240, 336, 2380, 2513, 4193, 1722, 4294, + 2744, 3404, 4080, 4473, 2309, 4372, 3412, 4639, 4422, + 2071, 1739, 675, 2069, 2877, 887, 2944, 1132, 3259, + 1203, 1992, 754, 2342, 1930, 3901, 1629, 3080, 1923, + 522, 1788, 1879, 3016, 4766, 2653, 4232, 4888, 3074, + 1625, 3702, 198, 3887, 2973, 2735, 1786, 4434, 1918, + 2729, 1752, 2218, 2042, 3098, 2255, 3864, 4917, 2698, + 1273, 518, 2730, 1357, 2981, 286, 1259, 2345, 4035, + 2858, 2670, 2658, 4111, 3665, 672, 2874, 1363, 1628, + 247, 4570, 4133, 2830, 2725, 2274, 4815, 4569, 4635, + 2554, 2419, 4973, 3355, 884, 147, 1375, 3776, 4653, + 3838, 1725, 3722, 2830, 3133, 1070, 1155, 2808, 488, + 4269, 1207, 52, 3939, 3973, 2819, 2693, 2828, 3491, + 1859, 296, 262, 904, 4967, 646, 1590, 429, 1638, + 3431, 104, 3826, 588, 781, 3485, 2749, 2750, 3222, + 1115, 1903, 4123, 1808, 4116, 1768, 3988, 349, 1145, + 2100, 1321, 1700, 2969, 2661, 303, 2263, 3748, 800, + 4682, 1767, 15, 744, 3772, 1998, 2268, 1755, 3317, + 103, 4403, 2406, 225, 3566, 2567, 1031, 3425, 1929, + 4917, 4763, 1416, 3701, 3675, 2233, 3269, 1094, 4143, + 4261, 13, 4462, 1024, 3861, 1080, 4217, 4661, 2231, + 3046, 3201, 962, 4085, 2116, 4064, 4225]]), + values=tensor([0.0969, 0.9696, 0.4863, 0.5775, 0.5153, 0.6090, 0.2232, + 0.1235, 0.7624, 0.6849, 0.1871, 0.9798, 0.3225, 0.9082, + 0.5749, 0.8325, 0.8077, 0.2076, 0.2205, 0.3135, 0.9016, + 0.4071, 0.4029, 0.9984, 0.0332, 0.9320, 0.4739, 0.1303, + 0.0338, 0.7202, 0.3844, 0.0133, 0.1754, 0.8108, 0.0434, + 0.7641, 0.5971, 0.8331, 0.4633, 0.4635, 0.6170, 0.3254, + 0.9355, 0.1270, 0.3493, 0.2632, 0.6609, 0.1943, 0.2143, + 0.7971, 0.1336, 0.6556, 0.8691, 0.5663, 0.7858, 0.1666, + 0.1075, 0.8104, 0.3512, 0.4083, 0.6914, 0.3116, 0.1566, + 0.4165, 0.7022, 0.4872, 0.2927, 0.8959, 0.5258, 0.5602, + 0.1244, 0.0753, 0.1455, 0.5189, 0.6781, 0.9198, 0.5378, + 0.3765, 0.8221, 0.9749, 0.8036, 0.7468, 0.1959, 0.5385, + 0.4103, 0.6119, 0.4250, 0.9707, 0.6088, 0.0329, 0.1117, + 0.8626, 0.8100, 0.8899, 0.0022, 0.6901, 0.9957, 0.4347, + 0.9679, 0.1291, 0.5270, 0.5468, 0.8700, 0.6871, 0.2119, + 0.7839, 0.5795, 0.4700, 0.4003, 0.1187, 0.5453, 0.7341, + 0.2561, 0.4480, 0.9785, 0.1000, 0.7675, 0.0094, 0.3072, + 0.4307, 0.2125, 0.8084, 0.3457, 0.0662, 0.7637, 0.8387, + 0.3128, 0.2311, 0.4264, 0.3172, 0.5268, 0.4733, 0.8350, + 0.7184, 0.4629, 0.5406, 0.3718, 0.2610, 0.5113, 0.8587, + 0.1007, 0.5958, 0.8824, 0.6888, 0.0320, 0.8953, 0.2968, + 0.3991, 0.4757, 0.2754, 0.5168, 0.7530, 0.7652, 0.7422, + 0.9288, 0.1274, 0.7465, 0.5787, 0.3838, 0.1203, 0.5466, + 0.3771, 0.3582, 0.5724, 0.0263, 0.1913, 0.4990, 0.8701, + 0.3258, 0.2866, 0.6643, 0.4819, 0.9032, 0.1817, 0.5537, + 0.7857, 0.4698, 0.9341, 0.6297, 0.7237, 0.9581, 0.2294, + 0.7410, 0.8136, 0.7895, 0.7463, 0.6784, 0.6740, 0.2672, + 0.2458, 0.2839, 0.5887, 0.5619, 0.3782, 0.9625, 0.2938, + 0.8381, 0.6910, 0.0535, 0.5779, 0.9358, 0.9292, 0.2289, + 0.9541, 0.8123, 0.1785, 0.2759, 0.9658, 0.5868, 0.2623, + 0.2484, 0.7792, 0.9078, 0.3283, 0.4956, 0.4941, 0.4122, + 0.1233, 0.9259, 0.0669, 0.4967, 0.5240, 0.4274, 0.4746, + 0.7490, 0.1508, 0.4805, 0.2981, 0.2983, 0.2382, 0.9951, + 0.9914, 0.7717, 0.2467, 0.8169, 0.6649, 0.8060, 0.2746, + 0.3619, 0.7712, 0.1675, 0.1290, 0.3804, 0.9021, 0.4784, + 0.4155, 0.5611, 0.9918, 0.9062, 0.6267]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.8891, 0.1750, 0.3464, ..., 0.7080, 0.1369, 0.7419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.473444938659668 seconds + +[40.92, 39.3, 39.57, 39.31, 39.4, 39.33, 39.89, 39.42, 39.99, 39.52] +[65.14] +12.892210245132446 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 904172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.473444938659668, 'TIME_S_1KI': 0.011583465246280207, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 839.7985753679276, 'W': 65.14} +[40.92, 39.3, 39.57, 39.31, 39.4, 39.33, 39.89, 39.42, 39.99, 39.52, 39.82, 40.55, 39.58, 39.22, 39.2, 39.13, 40.07, 39.75, 39.17, 40.33] +713.175 +35.65875 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 904172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.473444938659668, 'TIME_S_1KI': 0.011583465246280207, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 839.7985753679276, 'W': 65.14, 'J_1KI': 0.9288040056183199, 'W_1KI': 0.07204381467242958, 'W_D': 29.481250000000003, 'J_D': 380.078473289311, 'W_D_1KI': 0.0326057984542764, 'J_D_1KI': 3.6061499863163644e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..e559329 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 262660, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.607843160629272, "TIME_S_1KI": 0.04038621472865786, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 846.3253393292428, "W": 64.87, "J_1KI": 3.222132564262708, "W_1KI": 0.24697327343333586, "W_D": 29.246500000000005, "J_D": 381.5639592522383, "W_D_1KI": 0.11134736922256912, "J_D_1KI": 0.00042392206359007507} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..bab70c0 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.013487577438354492} + +tensor(indices=tensor([[3458, 3296, 222, ..., 1444, 2251, 3846], + [4299, 1410, 1932, ..., 3706, 1279, 2032]]), + values=tensor([0.4478, 0.3309, 0.1032, ..., 0.1760, 0.8945, 0.2442]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6830, 0.5755, 0.7439, ..., 0.3425, 0.0047, 0.9929]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.013487577438354492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '77849', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.1120543479919434} + +tensor(indices=tensor([[1587, 3871, 1326, ..., 3783, 1439, 1937], + [1157, 3622, 4030, ..., 4369, 2134, 1156]]), + values=tensor([0.3704, 0.8984, 0.6075, ..., 0.7516, 0.5164, 0.7911]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4942, 0.1126, 0.2663, ..., 0.4271, 0.4829, 0.3873]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.1120543479919434 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '262660', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.607843160629272} + +tensor(indices=tensor([[1848, 1441, 3244, ..., 995, 3529, 801], + [2000, 3321, 1860, ..., 4284, 416, 3750]]), + values=tensor([0.8242, 0.0835, 0.8195, ..., 0.4255, 0.6275, 0.4442]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.2894, 0.0717, 0.9555, ..., 0.7076, 0.4099, 0.6837]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.607843160629272 seconds + +tensor(indices=tensor([[1848, 1441, 3244, ..., 995, 3529, 801], + [2000, 3321, 1860, ..., 4284, 416, 3750]]), + values=tensor([0.8242, 0.0835, 0.8195, ..., 0.4255, 0.6275, 0.4442]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.2894, 0.0717, 0.9555, ..., 0.7076, 0.4099, 0.6837]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.607843160629272 seconds + +[39.83, 39.12, 39.36, 39.7, 39.56, 39.75, 39.63, 39.61, 39.2, 39.05] +[64.87] +13.046482801437378 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 262660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.607843160629272, 'TIME_S_1KI': 0.04038621472865786, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.3253393292428, 'W': 64.87} +[39.83, 39.12, 39.36, 39.7, 39.56, 39.75, 39.63, 39.61, 39.2, 39.05, 39.72, 38.94, 39.56, 39.07, 39.16, 38.92, 39.18, 39.41, 40.79, 44.42] +712.47 +35.6235 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 262660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.607843160629272, 'TIME_S_1KI': 0.04038621472865786, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 846.3253393292428, 'W': 64.87, 'J_1KI': 3.222132564262708, 'W_1KI': 0.24697327343333586, 'W_D': 29.246500000000005, 'J_D': 381.5639592522383, 'W_D_1KI': 0.11134736922256912, 'J_D_1KI': 0.00042392206359007507} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json index 57ab5e0..b51440d 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6238, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.769784212112427, "TIME_S_1KI": 1.7264803161449866, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 915.7679015994072, "W": 67.11, "J_1KI": 146.8047293362307, "W_1KI": 10.75825585123437, "W_D": 31.7095, "J_D": 432.70067465007304, "W_D_1KI": 5.083279897403013, "J_D_1KI": 0.8148893711771422} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 5996, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.181211709976196, "TIME_S_1KI": 1.6980006187418606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 899.0054618740082, "W": 66.87, "J_1KI": 149.93419977885395, "W_1KI": 11.152434956637759, "W_D": 31.423750000000005, "J_D": 422.4633300817014, "W_D_1KI": 5.240785523682455, "J_D_1KI": 0.8740469519150192} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output index fe560b9..f9c7f08 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.18373441696166992} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.18697094917297363} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 18, ..., 999979, - 999991, 1000000]), - col_indices=tensor([ 9419, 17690, 19775, ..., 65904, 78906, 97730]), - values=tensor([0.1002, 0.0063, 0.1334, ..., 0.8477, 0.2339, 0.2955]), +tensor(crow_indices=tensor([ 0, 11, 24, ..., 999982, + 999994, 1000000]), + col_indices=tensor([ 9983, 15946, 21045, ..., 77894, 88648, 89497]), + values=tensor([0.1086, 0.7102, 0.2104, ..., 0.2072, 0.6130, 0.1097]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9261, 0.9567, 0.5751, ..., 0.3199, 0.0262, 0.3042]) +tensor([0.1361, 0.7050, 0.5435, ..., 0.1985, 0.6376, 0.3340]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,39 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.18373441696166992 seconds +Time: 0.18697094917297363 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5714', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.617395401000977} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5615', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.832083702087402} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 15, ..., 999976, - 999989, 1000000]), - col_indices=tensor([12342, 20602, 31374, ..., 83399, 88988, 97850]), - values=tensor([0.3594, 0.1684, 0.5761, ..., 0.4601, 0.3694, 0.1608]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.5848, 0.1566, 0.7046, ..., 0.5634, 0.8550, 0.2097]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 9.617395401000977 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6238', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.769784212112427} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 15, ..., 999972, +tensor(crow_indices=tensor([ 0, 15, 27, ..., 999979, 999990, 1000000]), - col_indices=tensor([ 1977, 7363, 16479, ..., 91067, 93957, 95744]), - values=tensor([0.8934, 0.4616, 0.7140, ..., 0.3224, 0.7140, 0.9696]), + col_indices=tensor([ 2643, 3189, 5240, ..., 68705, 72961, 97726]), + values=tensor([0.7160, 0.7950, 0.9929, ..., 0.3041, 0.4414, 0.2180]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0584, 0.0097, 0.6336, ..., 0.7366, 0.8575, 0.7006]) +tensor([0.1777, 0.1946, 0.6010, ..., 0.0098, 0.9741, 0.9528]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.769784212112427 seconds +Time: 9.832083702087402 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5996', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.181211709976196} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 15, ..., 999972, - 999990, 1000000]), - col_indices=tensor([ 1977, 7363, 16479, ..., 91067, 93957, 95744]), - values=tensor([0.8934, 0.4616, 0.7140, ..., 0.3224, 0.7140, 0.9696]), +tensor(crow_indices=tensor([ 0, 7, 18, ..., 999987, + 999992, 1000000]), + col_indices=tensor([23061, 33848, 40530, ..., 69911, 92654, 99136]), + values=tensor([0.3534, 0.8273, 0.6908, ..., 0.2268, 0.3390, 0.9257]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0584, 0.0097, 0.6336, ..., 0.7366, 0.8575, 0.7006]) +tensor([0.2997, 0.5907, 0.9368, ..., 0.3220, 0.5731, 0.8724]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +56,30 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.769784212112427 seconds +Time: 10.181211709976196 seconds -[39.95, 40.61, 39.69, 39.43, 39.55, 38.89, 39.33, 39.12, 39.11, 38.94] -[67.11] -13.645774126052856 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.769784212112427, 'TIME_S_1KI': 1.7264803161449866, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 915.7679015994072, 'W': 67.11} -[39.95, 40.61, 39.69, 39.43, 39.55, 38.89, 39.33, 39.12, 39.11, 38.94, 39.57, 38.89, 39.02, 39.08, 39.5, 39.31, 39.6, 39.05, 39.01, 39.18] -708.01 -35.4005 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.769784212112427, 'TIME_S_1KI': 1.7264803161449866, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 915.7679015994072, 'W': 67.11, 'J_1KI': 146.8047293362307, 'W_1KI': 10.75825585123437, 'W_D': 31.7095, 'J_D': 432.70067465007304, 'W_D_1KI': 5.083279897403013, 'J_D_1KI': 0.8148893711771422} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 18, ..., 999987, + 999992, 1000000]), + col_indices=tensor([23061, 33848, 40530, ..., 69911, 92654, 99136]), + values=tensor([0.3534, 0.8273, 0.6908, ..., 0.2268, 0.3390, 0.9257]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2997, 0.5907, 0.9368, ..., 0.3220, 0.5731, 0.8724]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.181211709976196 seconds + +[40.21, 39.43, 39.39, 39.55, 39.46, 39.42, 38.94, 39.03, 39.08, 39.01] +[66.87] +13.444077491760254 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 5996, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.181211709976196, 'TIME_S_1KI': 1.6980006187418606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 899.0054618740082, 'W': 66.87} +[40.21, 39.43, 39.39, 39.55, 39.46, 39.42, 38.94, 39.03, 39.08, 39.01, 39.76, 41.07, 39.06, 38.89, 39.56, 39.31, 39.45, 39.12, 38.97, 39.41] +708.925 +35.44625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 5996, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.181211709976196, 'TIME_S_1KI': 1.6980006187418606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 899.0054618740082, 'W': 66.87, 'J_1KI': 149.93419977885395, 'W_1KI': 11.152434956637759, 'W_D': 31.423750000000005, 'J_D': 422.4633300817014, 'W_D_1KI': 5.240785523682455, 'J_D_1KI': 0.8740469519150192} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json index a6d9713..d2b5b2f 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 631, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.591248035430908, "TIME_S_1KI": 16.78486217976372, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1175.510341129303, "W": 76.44, "J_1KI": 1862.932394816645, "W_1KI": 121.14104595879556, "W_D": 40.61775, "J_D": 624.6282726112604, "W_D_1KI": 64.37044374009508, "J_D_1KI": 102.0133815215453} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 646, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.64788269996643, "TIME_S_1KI": 16.482790557223577, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1248.8031814670564, "W": 77.37, "J_1KI": 1933.1318598561245, "W_1KI": 119.76780185758516, "W_D": 41.77675000000001, "J_D": 674.3044889667035, "W_D_1KI": 64.66989164086688, "J_D_1KI": 100.10819139453076} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output index 4526ab4..6e9f9fd 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 1.6621592044830322} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 1.6243762969970703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 87, 180, ..., 9999810, - 9999900, 10000000]), - col_indices=tensor([ 1316, 2180, 2488, ..., 99391, 99679, 99852]), - values=tensor([0.4838, 0.8512, 0.8260, ..., 0.7772, 0.9919, 0.5400]), +tensor(crow_indices=tensor([ 0, 105, 209, ..., 9999808, + 9999917, 10000000]), + col_indices=tensor([ 518, 1117, 2607, ..., 97815, 97945, 98368]), + values=tensor([0.8433, 0.9852, 0.6731, ..., 0.0306, 0.9795, 0.2346]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7127, 0.0015, 0.2736, ..., 0.7345, 0.7377, 0.4477]) +tensor([0.4117, 0.6083, 0.5875, ..., 0.6399, 0.8751, 0.5908]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 1.6621592044830322 seconds +Time: 1.6243762969970703 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '631', '-ss', '100000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.591248035430908} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '646', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.64788269996643} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 198, ..., 9999801, - 9999900, 10000000]), - col_indices=tensor([ 1720, 2057, 4608, ..., 98148, 99667, 99757]), - values=tensor([0.5091, 0.6981, 0.1321, ..., 0.4342, 0.6647, 0.6565]), +tensor(crow_indices=tensor([ 0, 95, 203, ..., 9999789, + 9999904, 10000000]), + col_indices=tensor([ 253, 1563, 3273, ..., 97045, 97783, 98258]), + values=tensor([0.1838, 0.7315, 0.7723, ..., 0.9826, 0.1322, 0.0476]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3567, 0.4501, 0.1430, ..., 0.3086, 0.4387, 0.0746]) +tensor([0.6150, 0.7553, 0.5500, ..., 0.1787, 0.2534, 0.8601]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.591248035430908 seconds +Time: 10.64788269996643 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 198, ..., 9999801, - 9999900, 10000000]), - col_indices=tensor([ 1720, 2057, 4608, ..., 98148, 99667, 99757]), - values=tensor([0.5091, 0.6981, 0.1321, ..., 0.4342, 0.6647, 0.6565]), +tensor(crow_indices=tensor([ 0, 95, 203, ..., 9999789, + 9999904, 10000000]), + col_indices=tensor([ 253, 1563, 3273, ..., 97045, 97783, 98258]), + values=tensor([0.1838, 0.7315, 0.7723, ..., 0.9826, 0.1322, 0.0476]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3567, 0.4501, 0.1430, ..., 0.3086, 0.4387, 0.0746]) +tensor([0.6150, 0.7553, 0.5500, ..., 0.1787, 0.2534, 0.8601]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.591248035430908 seconds +Time: 10.64788269996643 seconds -[39.78, 39.0, 39.24, 38.88, 39.37, 39.28, 39.34, 38.84, 41.87, 41.9] -[76.44] -15.378209590911865 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.591248035430908, 'TIME_S_1KI': 16.78486217976372, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1175.510341129303, 'W': 76.44} -[39.78, 39.0, 39.24, 38.88, 39.37, 39.28, 39.34, 38.84, 41.87, 41.9, 39.74, 39.44, 39.02, 44.31, 40.1, 38.93, 39.71, 39.12, 39.62, 39.33] -716.4449999999999 -35.82225 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.591248035430908, 'TIME_S_1KI': 16.78486217976372, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1175.510341129303, 'W': 76.44, 'J_1KI': 1862.932394816645, 'W_1KI': 121.14104595879556, 'W_D': 40.61775, 'J_D': 624.6282726112604, 'W_D_1KI': 64.37044374009508, 'J_D_1KI': 102.0133815215453} +[39.61, 39.03, 39.15, 39.24, 38.96, 39.2, 39.13, 39.19, 39.04, 39.38] +[77.37] +16.140664100646973 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.64788269996643, 'TIME_S_1KI': 16.482790557223577, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.8031814670564, 'W': 77.37} +[39.61, 39.03, 39.15, 39.24, 38.96, 39.2, 39.13, 39.19, 39.04, 39.38, 39.64, 40.35, 39.48, 38.96, 38.99, 39.05, 44.37, 39.04, 39.56, 39.62] +711.865 +35.59325 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.64788269996643, 'TIME_S_1KI': 16.482790557223577, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1248.8031814670564, 'W': 77.37, 'J_1KI': 1933.1318598561245, 'W_1KI': 119.76780185758516, 'W_D': 41.77675000000001, 'J_D': 674.3044889667035, 'W_D_1KI': 64.66989164086688, 'J_D_1KI': 100.10819139453076} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..c4d6a14 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 15.186817169189453, "TIME_S_1KI": 151.86817169189453, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3281.6506419181824, "W": 75.41, "J_1KI": 32816.506419181824, "W_1KI": 754.1, "W_D": 39.74225, "J_D": 1729.4812388777732, "W_D_1KI": 397.4225, "J_D_1KI": 3974.225} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..12a82de --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 15.186817169189453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1054, 2106, ..., + 99998025, 99998968, 100000000]), + col_indices=tensor([ 33, 76, 94, ..., 99342, 99803, 99929]), + values=tensor([0.0074, 0.7661, 0.5151, ..., 0.3205, 0.9444, 0.4858]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.3776, 0.7244, 0.4309, ..., 0.7745, 0.1365, 0.6548]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 15.186817169189453 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1054, 2106, ..., + 99998025, 99998968, 100000000]), + col_indices=tensor([ 33, 76, 94, ..., 99342, 99803, 99929]), + values=tensor([0.0074, 0.7661, 0.5151, ..., 0.3205, 0.9444, 0.4858]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.3776, 0.7244, 0.4309, ..., 0.7745, 0.1365, 0.6548]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 15.186817169189453 seconds + +[39.84, 39.14, 44.16, 39.53, 39.81, 39.87, 39.21, 39.12, 39.4, 39.04] +[75.41] +43.517446517944336 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 15.186817169189453, 'TIME_S_1KI': 151.86817169189453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3281.6506419181824, 'W': 75.41} +[39.84, 39.14, 44.16, 39.53, 39.81, 39.87, 39.21, 39.12, 39.4, 39.04, 40.31, 39.84, 39.22, 39.29, 39.14, 39.05, 39.21, 39.02, 39.08, 39.34] +713.3549999999999 +35.66775 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 15.186817169189453, 'TIME_S_1KI': 151.86817169189453, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3281.6506419181824, 'W': 75.41, 'J_1KI': 32816.506419181824, 'W_1KI': 754.1, 'W_D': 39.74225, 'J_D': 1729.4812388777732, 'W_D_1KI': 397.4225, 'J_D_1KI': 3974.225} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json index 87f4ecf..098fbe2 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12301, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471284627914429, "TIME_S_1KI": 0.8512547457860685, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 857.4546840524674, "W": 65.29, "J_1KI": 69.70609576883729, "W_1KI": 5.307698561092595, "W_D": 29.40700000000001, "J_D": 386.2026327757837, "W_D_1KI": 2.3906186488903347, "J_D_1KI": 0.19434343946755017} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12317, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.456599712371826, "TIME_S_1KI": 0.8489567031234737, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.4664033508301, "W": 65.12, "J_1KI": 69.53530919467647, "W_1KI": 5.287001704960624, "W_D": 29.49900000000001, "J_D": 387.9745459527971, "W_D_1KI": 2.39498254445076, "J_D_1KI": 0.1944452824917399} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output index e82259e..4658af1 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.09777641296386719} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.09817862510681152} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99998, 99998, 100000]), - col_indices=tensor([10415, 34481, 41161, ..., 69185, 8793, 68858]), - values=tensor([0.7697, 0.4410, 0.3075, ..., 0.8657, 0.1828, 0.6667]), + col_indices=tensor([53095, 59802, 83624, ..., 47814, 38432, 88674]), + values=tensor([0.4970, 0.8457, 0.0545, ..., 0.0462, 0.7885, 0.8704]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2533, 0.9138, 0.2717, ..., 0.2019, 0.7103, 0.0862]) +tensor([0.9011, 0.7106, 0.3845, ..., 0.4694, 0.1392, 0.4036]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.09777641296386719 seconds +Time: 0.09817862510681152 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10738', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.165135860443115} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10694', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.116411209106445} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 99996, 99996, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99998, 99999, 100000]), - col_indices=tensor([17140, 55127, 70380, ..., 9005, 21920, 77148]), - values=tensor([0.4913, 0.5196, 0.1867, ..., 0.0903, 0.8718, 0.1023]), + col_indices=tensor([41985, 49475, 77186, ..., 66378, 39934, 68538]), + values=tensor([0.1520, 0.5404, 0.6250, ..., 0.3363, 0.9694, 0.5844]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0174, 0.3477, 0.7027, ..., 0.9312, 0.2138, 0.3974]) +tensor([0.4130, 0.6319, 0.4955, ..., 0.6021, 0.2338, 0.9434]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 9.165135860443115 seconds +Time: 9.116411209106445 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12301', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471284627914429} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12317', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.456599712371826} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 0, 3, ..., 99997, 99998, 100000]), - col_indices=tensor([46597, 403, 54918, ..., 58141, 94085, 20979]), - values=tensor([0.5040, 0.7325, 0.7996, ..., 0.9839, 0.2631, 0.4936]), + col_indices=tensor([13901, 18701, 64861, ..., 8515, 17451, 56478]), + values=tensor([0.6004, 0.7779, 0.2413, ..., 0.1569, 0.9416, 0.1782]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2071, 0.7418, 0.9347, ..., 0.4731, 0.1489, 0.5724]) +tensor([0.5455, 0.6312, 0.2012, ..., 0.1711, 0.1722, 0.6250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.471284627914429 seconds +Time: 10.456599712371826 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 0, 3, ..., 99997, 99998, 100000]), - col_indices=tensor([46597, 403, 54918, ..., 58141, 94085, 20979]), - values=tensor([0.5040, 0.7325, 0.7996, ..., 0.9839, 0.2631, 0.4936]), + col_indices=tensor([13901, 18701, 64861, ..., 8515, 17451, 56478]), + values=tensor([0.6004, 0.7779, 0.2413, ..., 0.1569, 0.9416, 0.1782]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2071, 0.7418, 0.9347, ..., 0.4731, 0.1489, 0.5724]) +tensor([0.5455, 0.6312, 0.2012, ..., 0.1711, 0.1722, 0.6250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.471284627914429 seconds +Time: 10.456599712371826 seconds -[39.92, 39.64, 39.53, 39.59, 39.42, 39.38, 39.39, 39.02, 44.4, 39.0] -[65.29] -13.13301706314087 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471284627914429, 'TIME_S_1KI': 0.8512547457860685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.4546840524674, 'W': 65.29} -[39.92, 39.64, 39.53, 39.59, 39.42, 39.38, 39.39, 39.02, 44.4, 39.0, 39.77, 38.96, 39.1, 39.29, 38.97, 44.08, 39.5, 39.1, 39.5, 38.89] -717.66 -35.882999999999996 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471284627914429, 'TIME_S_1KI': 0.8512547457860685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.4546840524674, 'W': 65.29, 'J_1KI': 69.70609576883729, 'W_1KI': 5.307698561092595, 'W_D': 29.40700000000001, 'J_D': 386.2026327757837, 'W_D_1KI': 2.3906186488903347, 'J_D_1KI': 0.19434343946755017} +[39.71, 39.49, 39.44, 39.4, 39.16, 39.02, 39.36, 39.0, 39.17, 39.0] +[65.12] +13.152125358581543 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.456599712371826, 'TIME_S_1KI': 0.8489567031234737, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4664033508301, 'W': 65.12} +[39.71, 39.49, 39.44, 39.4, 39.16, 39.02, 39.36, 39.0, 39.17, 39.0, 39.89, 39.65, 38.99, 40.22, 39.01, 38.97, 39.04, 44.06, 39.51, 39.26] +712.4199999999998 +35.620999999999995 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12317, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.456599712371826, 'TIME_S_1KI': 0.8489567031234737, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4664033508301, 'W': 65.12, 'J_1KI': 69.53530919467647, 'W_1KI': 5.287001704960624, 'W_D': 29.49900000000001, 'J_D': 387.9745459527971, 'W_D_1KI': 2.39498254445076, 'J_D_1KI': 0.1944452824917399} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json index 96abffc..2035104 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7670, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.499921083450317, "TIME_S_1KI": 1.3689597240482814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 873.9648419952392, "W": 65.94, "J_1KI": 113.94587248960094, "W_1KI": 8.597131681877444, "W_D": 30.4285, "J_D": 403.29753100776674, "W_D_1KI": 3.9672099087353327, "J_D_1KI": 0.5172372762366796} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7457, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.19743800163269, "TIME_S_1KI": 1.3674987262481815, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 852.7737856292725, "W": 65.84, "J_1KI": 114.35882870179329, "W_1KI": 8.829287917393053, "W_D": 30.381500000000003, "J_D": 393.5076969637871, "W_D_1KI": 4.074225559876627, "J_D_1KI": 0.5463625532890742} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output index 7c108ed..1570d36 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.15105009078979492} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.1507415771484375} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 7, ..., 499997, 499999, +tensor(crow_indices=tensor([ 0, 4, 8, ..., 499987, 499993, 500000]), - col_indices=tensor([ 4363, 49954, 63940, ..., 740, 19551, 36085]), - values=tensor([0.7532, 0.6946, 0.3669, ..., 0.0744, 0.6590, 0.6868]), + col_indices=tensor([ 7791, 16626, 25734, ..., 44751, 53769, 83372]), + values=tensor([0.4260, 0.8528, 0.7551, ..., 0.4583, 0.2926, 0.1239]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.0672, 0.6383, 0.6761, ..., 0.1188, 0.9489, 0.0863]) +tensor([0.3576, 0.1829, 0.1325, ..., 0.8642, 0.2263, 0.3102]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.15105009078979492 seconds +Time: 0.1507415771484375 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6951', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.51450252532959} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6965', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.80711817741394} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 13, ..., 499996, 499997, +tensor(crow_indices=tensor([ 0, 5, 10, ..., 499992, 499995, 500000]), - col_indices=tensor([ 4260, 42899, 54575, ..., 5425, 31756, 61151]), - values=tensor([0.4952, 0.8247, 0.2969, ..., 0.2331, 0.9267, 0.2319]), + col_indices=tensor([16269, 59598, 69161, ..., 42613, 67997, 86249]), + values=tensor([0.8036, 0.6259, 0.8288, ..., 0.8919, 0.7503, 0.7598]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.7470, 0.2926, 0.2731, ..., 0.9830, 0.8295, 0.9958]) +tensor([0.9828, 0.5323, 0.9879, ..., 0.1523, 0.5476, 0.9879]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 9.51450252532959 seconds +Time: 9.80711817741394 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7670', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.499921083450317} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7457', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.19743800163269} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 499989, 499996, +tensor(crow_indices=tensor([ 0, 2, 7, ..., 499986, 499995, 500000]), - col_indices=tensor([16754, 23077, 28797, ..., 22620, 46442, 72952]), - values=tensor([0.6737, 0.8129, 0.9335, ..., 0.4581, 0.1021, 0.2391]), + col_indices=tensor([11558, 50648, 13976, ..., 64781, 65283, 76485]), + values=tensor([0.0367, 0.1968, 0.7333, ..., 0.2503, 0.6323, 0.5776]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.9975, 0.9245, 0.4309, ..., 0.4303, 0.6144, 0.3183]) +tensor([0.0500, 0.8091, 0.0490, ..., 0.0151, 0.5519, 0.7523]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.499921083450317 seconds +Time: 10.19743800163269 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 499989, 499996, +tensor(crow_indices=tensor([ 0, 2, 7, ..., 499986, 499995, 500000]), - col_indices=tensor([16754, 23077, 28797, ..., 22620, 46442, 72952]), - values=tensor([0.6737, 0.8129, 0.9335, ..., 0.4581, 0.1021, 0.2391]), + col_indices=tensor([11558, 50648, 13976, ..., 64781, 65283, 76485]), + values=tensor([0.0367, 0.1968, 0.7333, ..., 0.2503, 0.6323, 0.5776]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.9975, 0.9245, 0.4309, ..., 0.4303, 0.6144, 0.3183]) +tensor([0.0500, 0.8091, 0.0490, ..., 0.0151, 0.5519, 0.7523]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.499921083450317 seconds +Time: 10.19743800163269 seconds -[39.66, 38.93, 40.25, 39.53, 39.7, 39.41, 40.1, 39.55, 39.0, 39.05] -[65.94] -13.25394058227539 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7670, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.499921083450317, 'TIME_S_1KI': 1.3689597240482814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 873.9648419952392, 'W': 65.94} -[39.66, 38.93, 40.25, 39.53, 39.7, 39.41, 40.1, 39.55, 39.0, 39.05, 39.72, 39.51, 39.22, 39.24, 39.32, 39.23, 40.03, 39.22, 39.23, 39.09] -710.23 -35.5115 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7670, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.499921083450317, 'TIME_S_1KI': 1.3689597240482814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 873.9648419952392, 'W': 65.94, 'J_1KI': 113.94587248960094, 'W_1KI': 8.597131681877444, 'W_D': 30.4285, 'J_D': 403.29753100776674, 'W_D_1KI': 3.9672099087353327, 'J_D_1KI': 0.5172372762366796} +[40.05, 39.37, 39.66, 39.38, 39.07, 40.14, 39.07, 39.02, 40.82, 38.87] +[65.84] +12.952214241027832 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7457, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.19743800163269, 'TIME_S_1KI': 1.3674987262481815, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.7737856292725, 'W': 65.84} +[40.05, 39.37, 39.66, 39.38, 39.07, 40.14, 39.07, 39.02, 40.82, 38.87, 39.59, 39.1, 39.82, 38.89, 39.48, 38.91, 39.49, 39.19, 38.92, 39.17] +709.1700000000001 +35.4585 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7457, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.19743800163269, 'TIME_S_1KI': 1.3674987262481815, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.7737856292725, 'W': 65.84, 'J_1KI': 114.35882870179329, 'W_1KI': 8.829287917393053, 'W_D': 30.381500000000003, 'J_D': 393.5076969637871, 'W_D_1KI': 4.074225559876627, 'J_D_1KI': 0.5463625532890742} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json index a0173f2..f3e1ac9 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 237172, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.292231321334839, "TIME_S_1KI": 0.04339564249293693, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 862.8344623732567, "W": 65.91, "J_1KI": 3.638011495342016, "W_1KI": 0.2778995834246875, "W_D": 30.572500000000005, "J_D": 400.22768321812157, "W_D_1KI": 0.12890433946671614, "J_D_1KI": 0.0005435057235538602} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 237686, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.452977180480957, "TIME_S_1KI": 0.043978093705481, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 865.2581436538696, "W": 66.08, "J_1KI": 3.640341221838348, "W_1KI": 0.2780138502057336, "W_D": 29.752000000000002, "J_D": 389.57567024803166, "W_D_1KI": 0.12517354829480912, "J_D_1KI": 0.0005266340814974762} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output index 104c518..50b6048 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.016118526458740234} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.016066312789916992} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 3, ..., 9995, 9998, 10000]), - col_indices=tensor([3770, 7218, 7901, ..., 7147, 2189, 2422]), - values=tensor([0.0682, 0.4925, 0.4932, ..., 0.9859, 0.2682, 0.5675]), +tensor(crow_indices=tensor([ 0, 3, 3, ..., 9997, 9999, 10000]), + col_indices=tensor([1468, 1838, 1987, ..., 60, 1530, 9720]), + values=tensor([0.8272, 0.8780, 0.7097, ..., 0.5113, 0.4922, 0.1493]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1703, 0.4753, 0.7272, ..., 0.9852, 0.8357, 0.1698]) +tensor([0.2034, 0.3353, 0.9918, ..., 0.5313, 0.4103, 0.3120]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.016118526458740234 seconds +Time: 0.016066312789916992 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65142', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8839359283447266} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65354', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8870651721954346} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 9999, 10000, 10000]), - col_indices=tensor([6160, 1315, 448, ..., 9882, 6598, 7658]), - values=tensor([0.4764, 0.2622, 0.7017, ..., 0.9860, 0.1866, 0.7529]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 9997, 9998, 10000]), + col_indices=tensor([6857, 6728, 4748, ..., 94, 5167, 5766]), + values=tensor([0.3641, 0.8896, 0.0675, ..., 0.3656, 0.2913, 0.3223]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4338, 0.9515, 0.6308, ..., 0.9365, 0.1556, 0.4912]) +tensor([0.3076, 0.2027, 0.0704, ..., 0.9202, 0.3518, 0.8650]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 2.8839359283447266 seconds +Time: 2.8870651721954346 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '237172', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.292231321334839} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '237686', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.452977180480957} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 10000, 10000]), - col_indices=tensor([1278, 7265, 6993, ..., 9863, 6468, 3133]), - values=tensor([0.6288, 0.8682, 0.0748, ..., 0.3062, 0.2031, 0.3525]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 10000, 10000, 10000]), + col_indices=tensor([6668, 9602, 2407, ..., 1580, 8139, 9959]), + values=tensor([0.7282, 0.8866, 0.9389, ..., 0.3308, 0.0411, 0.7608]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1078, 0.8244, 0.8698, ..., 0.0830, 0.2322, 0.6518]) +tensor([0.6674, 0.2718, 0.8109, ..., 0.1456, 0.2717, 0.0651]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.292231321334839 seconds +Time: 10.452977180480957 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 10000, 10000]), - col_indices=tensor([1278, 7265, 6993, ..., 9863, 6468, 3133]), - values=tensor([0.6288, 0.8682, 0.0748, ..., 0.3062, 0.2031, 0.3525]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 10000, 10000, 10000]), + col_indices=tensor([6668, 9602, 2407, ..., 1580, 8139, 9959]), + values=tensor([0.7282, 0.8866, 0.9389, ..., 0.3308, 0.0411, 0.7608]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1078, 0.8244, 0.8698, ..., 0.0830, 0.2322, 0.6518]) +tensor([0.6674, 0.2718, 0.8109, ..., 0.1456, 0.2717, 0.0651]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.292231321334839 seconds +Time: 10.452977180480957 seconds -[40.27, 39.31, 39.28, 38.86, 41.16, 39.32, 39.02, 39.09, 38.81, 39.11] -[65.91] -13.091100931167603 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.292231321334839, 'TIME_S_1KI': 0.04339564249293693, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.8344623732567, 'W': 65.91} -[40.27, 39.31, 39.28, 38.86, 41.16, 39.32, 39.02, 39.09, 38.81, 39.11, 39.9, 39.01, 39.35, 39.15, 39.0, 38.86, 39.09, 38.89, 38.89, 40.04] -706.7499999999999 -35.33749999999999 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.292231321334839, 'TIME_S_1KI': 0.04339564249293693, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.8344623732567, 'W': 65.91, 'J_1KI': 3.638011495342016, 'W_1KI': 0.2778995834246875, 'W_D': 30.572500000000005, 'J_D': 400.22768321812157, 'W_D_1KI': 0.12890433946671614, 'J_D_1KI': 0.0005435057235538602} +[40.22, 39.25, 40.45, 42.27, 43.15, 39.86, 45.08, 39.41, 39.35, 39.02] +[66.08] +13.0941002368927 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237686, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.452977180480957, 'TIME_S_1KI': 0.043978093705481, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.2581436538696, 'W': 66.08} +[40.22, 39.25, 40.45, 42.27, 43.15, 39.86, 45.08, 39.41, 39.35, 39.02, 39.91, 38.89, 39.02, 44.29, 39.06, 39.59, 39.64, 38.87, 39.01, 39.59] +726.56 +36.327999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237686, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.452977180480957, 'TIME_S_1KI': 0.043978093705481, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 865.2581436538696, 'W': 66.08, 'J_1KI': 3.640341221838348, 'W_1KI': 0.2780138502057336, 'W_D': 29.752000000000002, 'J_D': 389.57567024803166, 'W_D_1KI': 0.12517354829480912, 'J_D_1KI': 0.0005266340814974762} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json index deb9985..dec33aa 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 75716, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.424914360046387, "TIME_S_1KI": 0.1376844307682179, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 940.2084917426108, "W": 66.49, "J_1KI": 12.41756685169067, "W_1KI": 0.8781499286808601, "W_D": 31.17374999999999, "J_D": 440.81552819162596, "W_D_1KI": 0.4117194516350572, "J_D_1KI": 0.005437680960894093} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 68823, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.113624572753906, "TIME_S_1KI": 0.1469512310238424, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 874.6530011487007, "W": 66.63, "J_1KI": 12.708731109493929, "W_1KI": 0.9681356523255307, "W_D": 31.388249999999992, "J_D": 412.0340246631502, "W_D_1KI": 0.45607209799049725, "J_D_1KI": 0.006626739578200562} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output index 169706c..d5877e6 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02692556381225586} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02646350860595703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 22, ..., 99984, 99990, +tensor(crow_indices=tensor([ 0, 10, 29, ..., 99979, 99986, 100000]), - col_indices=tensor([ 947, 1869, 5338, ..., 6268, 7050, 7942]), - values=tensor([0.2237, 0.7540, 0.0617, ..., 0.6862, 0.3906, 0.7890]), + col_indices=tensor([ 215, 5321, 5688, ..., 8927, 9338, 9590]), + values=tensor([0.5862, 0.8692, 0.4231, ..., 0.9338, 0.8910, 0.1386]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6838, 0.4222, 0.9597, ..., 0.5474, 0.0680, 0.5394]) +tensor([0.2934, 0.9397, 0.3873, ..., 0.8200, 0.8199, 0.8958]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.02692556381225586 seconds +Time: 0.02646350860595703 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '38996', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.952186584472656} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '39677', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.0533013343811035} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 19, ..., 99978, 99990, +tensor(crow_indices=tensor([ 0, 14, 24, ..., 99988, 99991, 100000]), - col_indices=tensor([ 7, 556, 703, ..., 8117, 8865, 9056]), - values=tensor([0.2495, 0.4435, 0.2550, ..., 0.5409, 0.7823, 0.3947]), + col_indices=tensor([ 978, 1764, 2285, ..., 6145, 6169, 8642]), + values=tensor([0.7268, 0.3214, 0.9698, ..., 0.0439, 0.1225, 0.5111]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8603, 0.6651, 0.2785, ..., 0.2036, 0.7755, 0.1415]) +tensor([0.3012, 0.3329, 0.3234, ..., 0.5299, 0.8683, 0.4760]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 5.952186584472656 seconds +Time: 6.0533013343811035 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '68791', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.539567232131958} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '68823', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.113624572753906} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 99987, 99995, +tensor(crow_indices=tensor([ 0, 9, 16, ..., 99982, 99993, 100000]), - col_indices=tensor([ 696, 997, 2062, ..., 1211, 1590, 9690]), - values=tensor([0.0377, 0.1568, 0.2160, ..., 0.8237, 0.6309, 0.0587]), + col_indices=tensor([1859, 2396, 2947, ..., 5703, 6678, 8489]), + values=tensor([0.4084, 0.9747, 0.1669, ..., 0.3937, 0.7409, 0.3744]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0823, 0.1873, 0.3356, ..., 0.2591, 0.5771, 0.7059]) +tensor([0.3062, 0.5448, 0.1896, ..., 0.4888, 0.7194, 0.5362]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,19 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 9.539567232131958 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75716', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.424914360046387} +Time: 10.113624572753906 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 17, ..., 99986, 99994, +tensor(crow_indices=tensor([ 0, 9, 16, ..., 99982, 99993, 100000]), - col_indices=tensor([1284, 3776, 5103, ..., 6955, 7171, 8445]), - values=tensor([0.9684, 0.2053, 0.3935, ..., 0.8592, 0.0314, 0.3677]), + col_indices=tensor([1859, 2396, 2947, ..., 5703, 6678, 8489]), + values=tensor([0.4084, 0.9747, 0.1669, ..., 0.3937, 0.7409, 0.3744]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4306, 0.9725, 0.6597, ..., 0.5969, 0.7821, 0.5134]) +tensor([0.3062, 0.5448, 0.1896, ..., 0.4888, 0.7194, 0.5362]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -76,30 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.424914360046387 seconds +Time: 10.113624572753906 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 17, ..., 99986, 99994, - 100000]), - col_indices=tensor([1284, 3776, 5103, ..., 6955, 7171, 8445]), - values=tensor([0.9684, 0.2053, 0.3935, ..., 0.8592, 0.0314, 0.3677]), - size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4306, 0.9725, 0.6597, ..., 0.5969, 0.7821, 0.5134]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 100000 -Density: 0.001 -Time: 10.424914360046387 seconds - -[40.22, 38.81, 38.98, 39.63, 38.95, 39.23, 38.84, 39.29, 39.31, 38.77] -[66.49] -14.140599966049194 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.424914360046387, 'TIME_S_1KI': 0.1376844307682179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.2084917426108, 'W': 66.49} -[40.22, 38.81, 38.98, 39.63, 38.95, 39.23, 38.84, 39.29, 39.31, 38.77, 41.54, 39.08, 39.06, 38.87, 39.0, 38.96, 39.2, 38.82, 39.06, 41.94] -706.325 -35.316250000000004 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.424914360046387, 'TIME_S_1KI': 0.1376844307682179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.2084917426108, 'W': 66.49, 'J_1KI': 12.41756685169067, 'W_1KI': 0.8781499286808601, 'W_D': 31.17374999999999, 'J_D': 440.81552819162596, 'W_D_1KI': 0.4117194516350572, 'J_D_1KI': 0.005437680960894093} +[39.41, 38.89, 39.07, 39.08, 38.94, 39.24, 38.94, 39.57, 39.65, 39.28] +[66.63] +13.127014875411987 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 68823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.113624572753906, 'TIME_S_1KI': 0.1469512310238424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.6530011487007, 'W': 66.63} +[39.41, 38.89, 39.07, 39.08, 38.94, 39.24, 38.94, 39.57, 39.65, 39.28, 39.77, 39.15, 39.52, 38.76, 38.91, 38.99, 38.81, 39.14, 39.28, 39.33] +704.835 +35.24175 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 68823, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.113624572753906, 'TIME_S_1KI': 0.1469512310238424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.6530011487007, 'W': 66.63, 'J_1KI': 12.708731109493929, 'W_1KI': 0.9681356523255307, 'W_D': 31.388249999999992, 'J_D': 412.0340246631502, 'W_D_1KI': 0.45607209799049725, 'J_D_1KI': 0.006626739578200562} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json index db6444c..1feaf62 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 10206, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48923373222351, "TIME_S_1KI": 1.0277516884404774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 888.1208229660987, "W": 67.21, "J_1KI": 87.01948098825189, "W_1KI": 6.585341955712326, "W_D": 31.27349999999999, "J_D": 413.25169702470293, "W_D_1KI": 3.0642269253380356, "J_D_1KI": 0.30023779397785966} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 10198, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.494524955749512, "TIME_S_1KI": 1.0290767754216035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 885.1674857521058, "W": 66.76, "J_1KI": 86.79814529830415, "W_1KI": 6.546381643459502, "W_D": 31.493750000000006, "J_D": 417.5740488976241, "W_D_1KI": 3.0882280839380276, "J_D_1KI": 0.30282683702079105} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output index 191d2ac..6557113 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.1162419319152832} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.11575102806091309} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 100, 185, ..., 999774, - 999893, 1000000]), - col_indices=tensor([ 36, 100, 149, ..., 9802, 9836, 9872]), - values=tensor([0.2938, 0.2320, 0.9118, ..., 0.8681, 0.8272, 0.2716]), +tensor(crow_indices=tensor([ 0, 102, 219, ..., 999795, + 999887, 1000000]), + col_indices=tensor([ 239, 334, 368, ..., 9762, 9953, 9977]), + values=tensor([0.6967, 0.7485, 0.2350, ..., 0.4077, 0.3876, 0.6539]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9985, 0.1887, 0.5488, ..., 0.6608, 0.9222, 0.7055]) +tensor([0.7350, 0.4858, 0.9581, ..., 0.0149, 0.9326, 0.8066]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.1162419319152832 seconds +Time: 0.11575102806091309 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '9032', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.29158329963684} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '9071', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.338836908340454} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 95, 205, ..., 999782, - 999891, 1000000]), - col_indices=tensor([ 54, 212, 264, ..., 9693, 9804, 9961]), - values=tensor([0.9421, 0.7916, 0.1774, ..., 0.7420, 0.5713, 0.3525]), +tensor(crow_indices=tensor([ 0, 87, 197, ..., 999796, + 999896, 1000000]), + col_indices=tensor([ 36, 195, 517, ..., 9721, 9955, 9963]), + values=tensor([0.4399, 0.3851, 0.1811, ..., 0.7063, 0.8034, 0.6024]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0899, 0.7410, 0.9990, ..., 0.5022, 0.0295, 0.8248]) +tensor([0.8618, 0.7147, 0.8393, ..., 0.8094, 0.9600, 0.5578]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 9.29158329963684 seconds +Time: 9.338836908340454 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10206', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48923373222351} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10198', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.494524955749512} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 108, 196, ..., 999774, - 999884, 1000000]), - col_indices=tensor([ 16, 259, 309, ..., 9528, 9603, 9788]), - values=tensor([0.1649, 0.9890, 0.6907, ..., 0.8956, 0.0145, 0.7596]), +tensor(crow_indices=tensor([ 0, 107, 223, ..., 999793, + 999918, 1000000]), + col_indices=tensor([ 283, 671, 673, ..., 9768, 9909, 9973]), + values=tensor([0.9594, 0.8276, 0.7039, ..., 0.5970, 0.0478, 0.2843]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7287, 0.8351, 0.4943, ..., 0.5583, 0.1274, 0.9823]) +tensor([0.1523, 0.7589, 0.9241, ..., 0.2822, 0.2741, 0.3703]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.48923373222351 seconds +Time: 10.494524955749512 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 108, 196, ..., 999774, - 999884, 1000000]), - col_indices=tensor([ 16, 259, 309, ..., 9528, 9603, 9788]), - values=tensor([0.1649, 0.9890, 0.6907, ..., 0.8956, 0.0145, 0.7596]), +tensor(crow_indices=tensor([ 0, 107, 223, ..., 999793, + 999918, 1000000]), + col_indices=tensor([ 283, 671, 673, ..., 9768, 9909, 9973]), + values=tensor([0.9594, 0.8276, 0.7039, ..., 0.5970, 0.0478, 0.2843]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7287, 0.8351, 0.4943, ..., 0.5583, 0.1274, 0.9823]) +tensor([0.1523, 0.7589, 0.9241, ..., 0.2822, 0.2741, 0.3703]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.48923373222351 seconds +Time: 10.494524955749512 seconds -[39.9, 39.07, 39.52, 38.94, 39.35, 44.79, 39.52, 39.98, 39.47, 39.28] -[67.21] -13.214117288589478 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10206, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48923373222351, 'TIME_S_1KI': 1.0277516884404774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1208229660987, 'W': 67.21} -[39.9, 39.07, 39.52, 38.94, 39.35, 44.79, 39.52, 39.98, 39.47, 39.28, 40.29, 39.27, 43.98, 38.92, 39.16, 39.39, 39.32, 39.34, 39.08, 39.79] -718.73 -35.9365 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10206, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48923373222351, 'TIME_S_1KI': 1.0277516884404774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1208229660987, 'W': 67.21, 'J_1KI': 87.01948098825189, 'W_1KI': 6.585341955712326, 'W_D': 31.27349999999999, 'J_D': 413.25169702470293, 'W_D_1KI': 3.0642269253380356, 'J_D_1KI': 0.30023779397785966} +[40.0, 39.04, 39.52, 39.47, 39.38, 38.92, 39.0, 38.85, 38.88, 38.94] +[66.76] +13.258949756622314 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10198, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.494524955749512, 'TIME_S_1KI': 1.0290767754216035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.1674857521058, 'W': 66.76} +[40.0, 39.04, 39.52, 39.47, 39.38, 38.92, 39.0, 38.85, 38.88, 38.94, 40.44, 40.88, 39.28, 38.74, 38.95, 38.82, 38.85, 38.83, 38.87, 38.71] +705.325 +35.26625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10198, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.494524955749512, 'TIME_S_1KI': 1.0290767754216035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.1674857521058, 'W': 66.76, 'J_1KI': 86.79814529830415, 'W_1KI': 6.546381643459502, 'W_D': 31.493750000000006, 'J_D': 417.5740488976241, 'W_D_1KI': 3.0882280839380276, 'J_D_1KI': 0.30282683702079105} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json index f94ac11..7714234 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1725, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.28298306465149, "TIME_S_1KI": 5.961149602696516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.8662384581567, "W": 75.33, "J_1KI": 608.6181092511052, "W_1KI": 43.66956521739131, "W_D": 39.905, "J_D": 556.151762188673, "W_D_1KI": 23.133333333333336, "J_D_1KI": 13.410628019323674} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1726, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.259830713272095, "TIME_S_1KI": 5.944281989149533, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1045.9503919887543, "W": 74.94, "J_1KI": 605.9967508625459, "W_1KI": 43.418308227114714, "W_D": 39.623, "J_D": 553.0249850783348, "W_D_1KI": 22.956546929316335, "J_D_1KI": 13.300432751631712} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output index bf02e51..365d68e 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.6085023880004883} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.608109712600708} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 508, 930, ..., 4999014, - 4999519, 5000000]), - col_indices=tensor([ 33, 44, 68, ..., 9921, 9984, 9990]), - values=tensor([0.7535, 0.2308, 0.9086, ..., 0.5781, 0.9835, 0.5048]), +tensor(crow_indices=tensor([ 0, 492, 1019, ..., 4999012, + 4999504, 5000000]), + col_indices=tensor([ 5, 7, 101, ..., 9970, 9981, 9995]), + values=tensor([0.7093, 0.2660, 0.4595, ..., 0.3475, 0.1260, 0.7746]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1644, 0.2567, 0.4067, ..., 0.0618, 0.3860, 0.0437]) +tensor([0.0228, 0.9901, 0.9071, ..., 0.5110, 0.0509, 0.6618]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.6085023880004883 seconds +Time: 0.608109712600708 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1725', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.28298306465149} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1726', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.259830713272095} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 453, 934, ..., 4998993, - 4999474, 5000000]), - col_indices=tensor([ 76, 82, 85, ..., 9960, 9963, 9989]), - values=tensor([0.2757, 0.2788, 0.5904, ..., 0.0782, 0.3342, 0.9799]), +tensor(crow_indices=tensor([ 0, 494, 1017, ..., 4999008, + 4999500, 5000000]), + col_indices=tensor([ 10, 13, 31, ..., 9910, 9981, 9994]), + values=tensor([0.9741, 0.2683, 0.9596, ..., 0.6425, 0.6809, 0.5687]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0884, 0.4732, 0.8375, ..., 0.9901, 0.5525, 0.7748]) +tensor([0.4952, 0.1609, 0.9854, ..., 0.4391, 0.0150, 0.6256]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.28298306465149 seconds +Time: 10.259830713272095 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 453, 934, ..., 4998993, - 4999474, 5000000]), - col_indices=tensor([ 76, 82, 85, ..., 9960, 9963, 9989]), - values=tensor([0.2757, 0.2788, 0.5904, ..., 0.0782, 0.3342, 0.9799]), +tensor(crow_indices=tensor([ 0, 494, 1017, ..., 4999008, + 4999500, 5000000]), + col_indices=tensor([ 10, 13, 31, ..., 9910, 9981, 9994]), + values=tensor([0.9741, 0.2683, 0.9596, ..., 0.6425, 0.6809, 0.5687]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0884, 0.4732, 0.8375, ..., 0.9901, 0.5525, 0.7748]) +tensor([0.4952, 0.1609, 0.9854, ..., 0.4391, 0.0150, 0.6256]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.28298306465149 seconds +Time: 10.259830713272095 seconds -[40.86, 38.91, 38.99, 39.35, 38.92, 38.87, 39.32, 39.52, 39.41, 41.56] -[75.33] -13.936894178390503 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.28298306465149, 'TIME_S_1KI': 5.961149602696516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.8662384581567, 'W': 75.33} -[40.86, 38.91, 38.99, 39.35, 38.92, 38.87, 39.32, 39.52, 39.41, 41.56, 39.65, 38.96, 39.21, 38.88, 39.0, 40.01, 39.33, 39.71, 39.42, 39.31] -708.5 -35.425 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.28298306465149, 'TIME_S_1KI': 5.961149602696516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.8662384581567, 'W': 75.33, 'J_1KI': 608.6181092511052, 'W_1KI': 43.66956521739131, 'W_D': 39.905, 'J_D': 556.151762188673, 'W_D_1KI': 23.133333333333336, 'J_D_1KI': 13.410628019323674} +[39.52, 39.03, 40.16, 38.88, 39.0, 38.96, 38.96, 39.27, 39.19, 39.27] +[74.94] +13.957170963287354 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.259830713272095, 'TIME_S_1KI': 5.944281989149533, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1045.9503919887543, 'W': 74.94} +[39.52, 39.03, 40.16, 38.88, 39.0, 38.96, 38.96, 39.27, 39.19, 39.27, 40.28, 39.41, 39.21, 39.32, 38.99, 38.96, 38.82, 40.16, 38.91, 39.15] +706.34 +35.317 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1726, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.259830713272095, 'TIME_S_1KI': 5.944281989149533, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1045.9503919887543, 'W': 74.94, 'J_1KI': 605.9967508625459, 'W_1KI': 43.418308227114714, 'W_D': 39.623, 'J_D': 553.0249850783348, 'W_D_1KI': 22.956546929316335, 'J_D_1KI': 13.300432751631712} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json index 02b08b6..e2a1c88 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 700, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.428780555725098, "TIME_S_1KI": 14.89825793675014, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1164.5532444572448, "W": 76.66, "J_1KI": 1663.6474920817784, "W_1KI": 109.5142857142857, "W_D": 40.888999999999996, "J_D": 621.1507645788192, "W_D_1KI": 58.412857142857135, "J_D_1KI": 83.44693877551019} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 699, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.388232707977295, "TIME_S_1KI": 14.861563244602712, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1164.917454071045, "W": 77.04, "J_1KI": 1666.5485752089344, "W_1KI": 110.21459227467811, "W_D": 41.29950000000001, "J_D": 624.4873882970811, "W_D_1KI": 59.08369098712448, "J_D_1KI": 84.52602430203787} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output index f0881c0..6ddcbaf 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 1.498870611190796} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 1.5013470649719238} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 987, 1979, ..., 9997999, - 9999011, 10000000]), - col_indices=tensor([ 3, 7, 20, ..., 9954, 9962, 9986]), - values=tensor([0.9369, 0.1464, 0.7342, ..., 0.7208, 0.8895, 0.6454]), +tensor(crow_indices=tensor([ 0, 1032, 2050, ..., 9997979, + 9998966, 10000000]), + col_indices=tensor([ 2, 6, 9, ..., 9983, 9984, 9996]), + values=tensor([0.6315, 0.3704, 0.1854, ..., 0.7561, 0.4530, 0.9211]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7880, 0.5272, 0.7128, ..., 0.1762, 0.3407, 0.4321]) +tensor([0.8676, 0.2588, 0.7478, ..., 0.7227, 0.1480, 0.9741]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 1.498870611190796 seconds +Time: 1.5013470649719238 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '700', '-ss', '10000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.428780555725098} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '699', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.388232707977295} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 995, 2027, ..., 9997983, - 9998981, 10000000]), - col_indices=tensor([ 16, 21, 33, ..., 9977, 9983, 9988]), - values=tensor([0.3684, 0.6722, 0.7880, ..., 0.5048, 0.0966, 0.9792]), +tensor(crow_indices=tensor([ 0, 1042, 2068, ..., 9998016, + 9999002, 10000000]), + col_indices=tensor([ 11, 14, 17, ..., 9959, 9962, 9989]), + values=tensor([0.7592, 0.3608, 0.3582, ..., 0.3496, 0.2524, 0.3186]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4422, 0.8800, 0.2165, ..., 0.4558, 0.6103, 0.1393]) +tensor([0.2502, 0.4370, 0.1745, ..., 0.0865, 0.5324, 0.5450]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.428780555725098 seconds +Time: 10.388232707977295 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 995, 2027, ..., 9997983, - 9998981, 10000000]), - col_indices=tensor([ 16, 21, 33, ..., 9977, 9983, 9988]), - values=tensor([0.3684, 0.6722, 0.7880, ..., 0.5048, 0.0966, 0.9792]), +tensor(crow_indices=tensor([ 0, 1042, 2068, ..., 9998016, + 9999002, 10000000]), + col_indices=tensor([ 11, 14, 17, ..., 9959, 9962, 9989]), + values=tensor([0.7592, 0.3608, 0.3582, ..., 0.3496, 0.2524, 0.3186]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4422, 0.8800, 0.2165, ..., 0.4558, 0.6103, 0.1393]) +tensor([0.2502, 0.4370, 0.1745, ..., 0.0865, 0.5324, 0.5450]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.428780555725098 seconds +Time: 10.388232707977295 seconds -[40.18, 38.97, 39.04, 39.1, 38.99, 38.9, 38.95, 39.03, 44.22, 39.22] -[76.66] -15.191145896911621 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 700, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.428780555725098, 'TIME_S_1KI': 14.89825793675014, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.5532444572448, 'W': 76.66} -[40.18, 38.97, 39.04, 39.1, 38.99, 38.9, 38.95, 39.03, 44.22, 39.22, 40.59, 39.18, 39.25, 44.66, 38.99, 39.16, 38.92, 38.8, 39.17, 40.19] -715.42 -35.771 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 700, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.428780555725098, 'TIME_S_1KI': 14.89825793675014, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.5532444572448, 'W': 76.66, 'J_1KI': 1663.6474920817784, 'W_1KI': 109.5142857142857, 'W_D': 40.888999999999996, 'J_D': 621.1507645788192, 'W_D_1KI': 58.412857142857135, 'J_D_1KI': 83.44693877551019} +[39.54, 44.4, 39.76, 39.67, 39.33, 39.67, 39.46, 40.09, 39.15, 38.9] +[77.04] +15.120943069458008 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 699, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.388232707977295, 'TIME_S_1KI': 14.861563244602712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.917454071045, 'W': 77.04} +[39.54, 44.4, 39.76, 39.67, 39.33, 39.67, 39.46, 40.09, 39.15, 38.9, 39.79, 39.54, 39.0, 39.44, 39.38, 39.32, 39.61, 38.94, 39.49, 38.89] +714.81 +35.7405 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 699, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.388232707977295, 'TIME_S_1KI': 14.861563244602712, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.917454071045, 'W': 77.04, 'J_1KI': 1666.5485752089344, 'W_1KI': 110.21459227467811, 'W_D': 41.29950000000001, 'J_D': 624.4873882970811, 'W_D_1KI': 59.08369098712448, 'J_D_1KI': 84.52602430203787} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json index a0b1b52..f376b37 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 343, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.229193925857544, "TIME_S_1KI": 29.822722815911206, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1277.0280195808411, "W": 76.08, "J_1KI": 3723.1137597109073, "W_1KI": 221.8075801749271, "W_D": 40.66575, "J_D": 682.5880939441324, "W_D_1KI": 118.55903790087464, "J_D_1KI": 345.65317172266657} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 347, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.493296146392822, "TIME_S_1KI": 30.24004653139142, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1291.474730873108, "W": 76.2, "J_1KI": 3721.8291955997347, "W_1KI": 219.59654178674353, "W_D": 39.95825, "J_D": 677.2318919279576, "W_D_1KI": 115.15345821325649, "J_D_1KI": 331.8543464358977} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output index 9529016..2154422 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 3.0540103912353516} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 3.020411968231201} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2046, 4073, ..., 19996013, - 19997969, 20000000]), - col_indices=tensor([ 3, 8, 17, ..., 9981, 9984, 9987]), - values=tensor([0.5017, 0.4094, 0.1287, ..., 0.4741, 0.2195, 0.3916]), +tensor(crow_indices=tensor([ 0, 2001, 4067, ..., 19995976, + 19997958, 20000000]), + col_indices=tensor([ 14, 17, 24, ..., 9980, 9989, 9991]), + values=tensor([0.6447, 0.5588, 0.5080, ..., 0.9506, 0.2098, 0.9501]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.6824, 0.3340, 0.1820, ..., 0.2779, 0.3641, 0.6445]) +tensor([0.6065, 0.5137, 0.6769, ..., 0.0627, 0.1758, 0.0050]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 3.0540103912353516 seconds +Time: 3.020411968231201 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '343', '-ss', '10000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.229193925857544} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '347', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.493296146392822} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1942, 3919, ..., 19996087, - 19998013, 20000000]), - col_indices=tensor([ 9, 10, 17, ..., 9985, 9988, 9989]), - values=tensor([0.3594, 0.3340, 0.0020, ..., 0.8034, 0.9201, 0.1838]), +tensor(crow_indices=tensor([ 0, 2004, 4078, ..., 19996056, + 19998039, 20000000]), + col_indices=tensor([ 0, 5, 15, ..., 9986, 9987, 9997]), + values=tensor([0.9556, 0.4662, 0.8624, ..., 0.4531, 0.2100, 0.2819]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.0654, 0.1078, 0.4601, ..., 0.8409, 0.3729, 0.1721]) +tensor([0.7299, 0.4019, 0.2401, ..., 0.0700, 0.4893, 0.3168]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.229193925857544 seconds +Time: 10.493296146392822 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1942, 3919, ..., 19996087, - 19998013, 20000000]), - col_indices=tensor([ 9, 10, 17, ..., 9985, 9988, 9989]), - values=tensor([0.3594, 0.3340, 0.0020, ..., 0.8034, 0.9201, 0.1838]), +tensor(crow_indices=tensor([ 0, 2004, 4078, ..., 19996056, + 19998039, 20000000]), + col_indices=tensor([ 0, 5, 15, ..., 9986, 9987, 9997]), + values=tensor([0.9556, 0.4662, 0.8624, ..., 0.4531, 0.2100, 0.2819]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.0654, 0.1078, 0.4601, ..., 0.8409, 0.3729, 0.1721]) +tensor([0.7299, 0.4019, 0.2401, ..., 0.0700, 0.4893, 0.3168]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.229193925857544 seconds +Time: 10.493296146392822 seconds -[39.53, 39.95, 39.42, 39.3, 39.16, 38.94, 39.07, 38.8, 39.46, 38.91] -[76.08] -16.78533148765564 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.229193925857544, 'TIME_S_1KI': 29.822722815911206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.0280195808411, 'W': 76.08} -[39.53, 39.95, 39.42, 39.3, 39.16, 38.94, 39.07, 38.8, 39.46, 38.91, 39.85, 38.96, 39.07, 39.05, 39.38, 38.91, 39.06, 39.15, 39.29, 44.34] -708.285 -35.414249999999996 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.229193925857544, 'TIME_S_1KI': 29.822722815911206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.0280195808411, 'W': 76.08, 'J_1KI': 3723.1137597109073, 'W_1KI': 221.8075801749271, 'W_D': 40.66575, 'J_D': 682.5880939441324, 'W_D_1KI': 118.55903790087464, 'J_D_1KI': 345.65317172266657} +[40.29, 39.04, 38.94, 40.03, 39.28, 39.25, 38.97, 39.24, 43.24, 53.53] +[76.2] +16.948487281799316 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 347, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.493296146392822, 'TIME_S_1KI': 30.24004653139142, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1291.474730873108, 'W': 76.2} +[40.29, 39.04, 38.94, 40.03, 39.28, 39.25, 38.97, 39.24, 43.24, 53.53, 39.99, 41.6, 41.99, 38.96, 39.44, 39.27, 39.32, 39.27, 40.66, 38.86] +724.835 +36.24175 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 347, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.493296146392822, 'TIME_S_1KI': 30.24004653139142, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1291.474730873108, 'W': 76.2, 'J_1KI': 3721.8291955997347, 'W_1KI': 219.59654178674353, 'W_D': 39.95825, 'J_D': 677.2318919279576, 'W_D_1KI': 115.15345821325649, 'J_D_1KI': 331.8543464358977} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json index a1f4a32..6cb2bfc 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 233, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.295273542404175, "TIME_S_1KI": 44.18572335795783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1458.0208019256593, "W": 76.48, "J_1KI": 6257.6000082646315, "W_1KI": 328.2403433476395, "W_D": 40.760999999999996, "J_D": 777.0709454405307, "W_D_1KI": 174.9399141630901, "J_D_1KI": 750.8150822450219} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 232, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.506343603134155, "TIME_S_1KI": 45.28596380661274, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1468.0002479553223, "W": 76.65, "J_1KI": 6327.587275669493, "W_1KI": 330.3879310344828, "W_D": 40.9075, "J_D": 783.4601453781128, "W_D_1KI": 176.32543103448276, "J_D_1KI": 760.0234096313912} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output index d790296..2622466 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 4.5049638748168945} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 4.508720636367798} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3029, 6057, ..., 29993789, - 29996881, 30000000]), - col_indices=tensor([ 0, 1, 2, ..., 9988, 9991, 9998]), - values=tensor([0.8599, 0.6300, 0.6697, ..., 0.0214, 0.0757, 0.9206]), +tensor(crow_indices=tensor([ 0, 3017, 5967, ..., 29993882, + 29996942, 30000000]), + col_indices=tensor([ 5, 6, 9, ..., 9995, 9996, 9997]), + values=tensor([0.9031, 0.7519, 0.5389, ..., 0.9658, 0.5505, 0.5403]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.5404, 0.3446, 0.4295, ..., 0.2969, 0.5137, 0.1316]) +tensor([0.1266, 0.8827, 0.7696, ..., 0.4615, 0.0898, 0.8625]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 4.5049638748168945 seconds +Time: 4.508720636367798 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '233', '-ss', '10000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.295273542404175} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '232', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.506343603134155} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2988, 6007, ..., 29993964, - 29997046, 30000000]), - col_indices=tensor([ 9, 16, 24, ..., 9996, 9997, 9999]), - values=tensor([0.2433, 0.7720, 0.0178, ..., 0.3342, 0.8303, 0.6867]), +tensor(crow_indices=tensor([ 0, 3001, 5941, ..., 29994005, + 29996945, 30000000]), + col_indices=tensor([ 0, 12, 13, ..., 9992, 9998, 9999]), + values=tensor([0.9006, 0.0187, 0.2224, ..., 0.0969, 0.3958, 0.1522]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.4151, 0.6857, 0.4615, ..., 0.0665, 0.4824, 0.1217]) +tensor([0.4529, 0.0055, 0.3353, ..., 0.7911, 0.9251, 0.0376]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.295273542404175 seconds +Time: 10.506343603134155 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2988, 6007, ..., 29993964, - 29997046, 30000000]), - col_indices=tensor([ 9, 16, 24, ..., 9996, 9997, 9999]), - values=tensor([0.2433, 0.7720, 0.0178, ..., 0.3342, 0.8303, 0.6867]), +tensor(crow_indices=tensor([ 0, 3001, 5941, ..., 29994005, + 29996945, 30000000]), + col_indices=tensor([ 0, 12, 13, ..., 9992, 9998, 9999]), + values=tensor([0.9006, 0.0187, 0.2224, ..., 0.0969, 0.3958, 0.1522]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.4151, 0.6857, 0.4615, ..., 0.0665, 0.4824, 0.1217]) +tensor([0.4529, 0.0055, 0.3353, ..., 0.7911, 0.9251, 0.0376]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.295273542404175 seconds +Time: 10.506343603134155 seconds -[40.6, 39.0, 39.72, 40.47, 39.9, 39.66, 39.13, 38.97, 39.1, 39.62] -[76.48] -19.064079523086548 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.295273542404175, 'TIME_S_1KI': 44.18572335795783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1458.0208019256593, 'W': 76.48} -[40.6, 39.0, 39.72, 40.47, 39.9, 39.66, 39.13, 38.97, 39.1, 39.62, 40.89, 39.12, 39.47, 39.11, 38.93, 39.6, 38.9, 43.73, 39.55, 38.93] -714.3800000000001 -35.71900000000001 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.295273542404175, 'TIME_S_1KI': 44.18572335795783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1458.0208019256593, 'W': 76.48, 'J_1KI': 6257.6000082646315, 'W_1KI': 328.2403433476395, 'W_D': 40.760999999999996, 'J_D': 777.0709454405307, 'W_D_1KI': 174.9399141630901, 'J_D_1KI': 750.8150822450219} +[42.42, 39.55, 39.56, 39.28, 39.37, 39.05, 39.32, 38.9, 38.94, 44.02] +[76.65] +19.151992797851562 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 232, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.506343603134155, 'TIME_S_1KI': 45.28596380661274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1468.0002479553223, 'W': 76.65} +[42.42, 39.55, 39.56, 39.28, 39.37, 39.05, 39.32, 38.9, 38.94, 44.02, 44.66, 39.3, 39.73, 39.13, 39.41, 39.83, 39.61, 39.68, 39.17, 38.94] +714.8500000000001 +35.74250000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 232, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.506343603134155, 'TIME_S_1KI': 45.28596380661274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1468.0002479553223, 'W': 76.65, 'J_1KI': 6327.587275669493, 'W_1KI': 330.3879310344828, 'W_D': 40.9075, 'J_D': 783.4601453781128, 'W_D_1KI': 176.32543103448276, 'J_D_1KI': 760.0234096313912} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..bd3a181 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 176, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.287486791610718, "TIME_S_1KI": 58.45162949778817, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1621.081920976639, "W": 75.64, "J_1KI": 9210.692732821812, "W_1KI": 429.77272727272725, "W_D": 39.956, "J_D": 856.3187365751268, "W_D_1KI": 227.02272727272728, "J_D_1KI": 1289.9018595041323} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..898ee39 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 5.9488725662231445} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3981, 8065, ..., 39992085, + 39996040, 40000000]), + col_indices=tensor([ 0, 1, 2, ..., 9994, 9997, 9999]), + values=tensor([0.2873, 0.1638, 0.8406, ..., 0.3659, 0.7817, 0.0229]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4990, 0.3865, 0.7076, ..., 0.9531, 0.9187, 0.3072]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 5.9488725662231445 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '176', '-ss', '10000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.287486791610718} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4082, 8140, ..., 39991887, + 39995972, 40000000]), + col_indices=tensor([ 0, 1, 6, ..., 9993, 9996, 9997]), + values=tensor([0.9803, 0.3049, 0.7804, ..., 0.2695, 0.0229, 0.9383]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4744, 0.9806, 0.3276, ..., 0.7259, 0.8906, 0.4456]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.287486791610718 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4082, 8140, ..., 39991887, + 39995972, 40000000]), + col_indices=tensor([ 0, 1, 6, ..., 9993, 9996, 9997]), + values=tensor([0.9803, 0.3049, 0.7804, ..., 0.2695, 0.0229, 0.9383]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4744, 0.9806, 0.3276, ..., 0.7259, 0.8906, 0.4456]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.287486791610718 seconds + +[39.92, 39.12, 45.04, 39.04, 40.25, 39.54, 39.53, 39.2, 39.47, 39.49] +[75.64] +21.431543111801147 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 176, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.287486791610718, 'TIME_S_1KI': 58.45162949778817, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1621.081920976639, 'W': 75.64} +[39.92, 39.12, 45.04, 39.04, 40.25, 39.54, 39.53, 39.2, 39.47, 39.49, 40.02, 39.33, 39.41, 39.17, 38.95, 39.09, 39.39, 38.98, 39.01, 38.89] +713.68 +35.684 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 176, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.287486791610718, 'TIME_S_1KI': 58.45162949778817, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1621.081920976639, 'W': 75.64, 'J_1KI': 9210.692732821812, 'W_1KI': 429.77272727272725, 'W_D': 39.956, 'J_D': 856.3187365751268, 'W_D_1KI': 227.02272727272728, 'J_D_1KI': 1289.9018595041323} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..c5c872d --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 143, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.688217878341675, "TIME_S_1KI": 74.74278236602571, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2030.4548524188997, "W": 75.51, "J_1KI": 14198.984981950349, "W_1KI": 528.0419580419581, "W_D": 40.12375000000001, "J_D": 1078.9228298866751, "W_D_1KI": 280.5856643356644, "J_D_1KI": 1962.1375128368138} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..495a024 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 7.325762033462524} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4990, 9987, ..., 49990040, + 49995005, 50000000]), + col_indices=tensor([ 0, 1, 2, ..., 9991, 9992, 9996]), + values=tensor([0.4301, 0.9179, 0.0090, ..., 0.3709, 0.0037, 0.9527]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.4939, 0.2110, 0.5925, ..., 0.4889, 0.3078, 0.7014]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 7.325762033462524 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '143', '-ss', '10000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.688217878341675} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5014, 10078, ..., 49989929, + 49994973, 50000000]), + col_indices=tensor([ 1, 2, 4, ..., 9993, 9995, 9998]), + values=tensor([0.6119, 0.3838, 0.8264, ..., 0.1450, 0.7688, 0.5541]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.4407, 0.3516, 0.4034, ..., 0.4996, 0.2834, 0.2999]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.688217878341675 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5014, 10078, ..., 49989929, + 49994973, 50000000]), + col_indices=tensor([ 1, 2, 4, ..., 9993, 9995, 9998]), + values=tensor([0.6119, 0.3838, 0.8264, ..., 0.1450, 0.7688, 0.5541]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.4407, 0.3516, 0.4034, ..., 0.4996, 0.2834, 0.2999]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.688217878341675 seconds + +[39.74, 39.17, 39.93, 38.97, 39.15, 39.1, 39.19, 39.33, 39.49, 39.47] +[75.51] +26.889880180358887 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 143, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.688217878341675, 'TIME_S_1KI': 74.74278236602571, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2030.4548524188997, 'W': 75.51} +[39.74, 39.17, 39.93, 38.97, 39.15, 39.1, 39.19, 39.33, 39.49, 39.47, 39.67, 39.11, 39.41, 38.97, 39.33, 39.11, 39.98, 39.12, 39.44, 38.97] +707.7249999999999 +35.38625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 143, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.688217878341675, 'TIME_S_1KI': 74.74278236602571, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2030.4548524188997, 'W': 75.51, 'J_1KI': 14198.984981950349, 'W_1KI': 528.0419580419581, 'W_D': 40.12375000000001, 'J_D': 1078.9228298866751, 'W_D_1KI': 280.5856643356644, 'J_D_1KI': 1962.1375128368138} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json index 9c3e1bc..f0b8d48 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 362205, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420795440673828, "TIME_S_1KI": 0.02877043508696409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 858.7538476467132, "W": 65.88, "J_1KI": 2.3709055580312617, "W_1KI": 0.18188594856503915, "W_D": 30.515249999999995, "J_D": 397.7700113752484, "W_D_1KI": 0.08424856089783408, "J_D_1KI": 0.00023259911071860987} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 363895, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.603893041610718, "TIME_S_1KI": 0.02913998005361634, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.6058946228027, "W": 65.88, "J_1KI": 2.351243887997369, "W_1KI": 0.18104123442201733, "W_D": 29.987000000000002, "J_D": 389.45133518600466, "W_D_1KI": 0.08240563898926888, "J_D_1KI": 0.00022645444149897327} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output index 3b7f750..0a09de2 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,1131 +1,373 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.01423192024230957} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 998, 999, 1000]), - col_indices=tensor([3651, 8143, 8284, 1201, 8802, 9084, 518, 1318, 7113, - 4198, 6659, 361, 3967, 2631, 475, 1422, 3709, 9745, - 1114, 731, 1484, 190, 4372, 6889, 5946, 9134, 7399, - 9315, 9547, 5191, 558, 5996, 7786, 36, 2608, 6971, - 3588, 9206, 3929, 9738, 5532, 8672, 3550, 556, 7458, - 8249, 9648, 4644, 9311, 9352, 38, 6820, 8314, 7776, - 4648, 2648, 7188, 7862, 9766, 7529, 130, 2138, 9531, - 8955, 7529, 5567, 4237, 5643, 2920, 8945, 2985, 4202, - 7221, 9523, 9145, 9414, 1727, 482, 6337, 9385, 8259, - 3509, 9326, 4737, 9125, 4925, 237, 7538, 8759, 1847, - 9447, 2922, 470, 1647, 9673, 9620, 4380, 489, 1206, - 7536, 7237, 8859, 8031, 2617, 1541, 3066, 2051, 3249, - 799, 2618, 3289, 7373, 6080, 4605, 5686, 1742, 849, - 6896, 7211, 3112, 7256, 3784, 1106, 2254, 6134, 5896, - 1197, 668, 4080, 9988, 7197, 5551, 2494, 6286, 1000, - 4996, 8332, 1100, 2758, 798, 3892, 3478, 2185, 7918, - 2934, 778, 141, 2782, 3364, 3405, 1218, 2274, 7259, - 5489, 6646, 2341, 6847, 7023, 9242, 3864, 2758, 6683, - 8891, 1342, 9608, 1869, 4064, 5757, 8557, 505, 8921, - 9349, 4634, 352, 1820, 5776, 3014, 8861, 7895, 3485, - 5721, 197, 176, 117, 3954, 4225, 7039, 5248, 2463, - 2095, 11, 1760, 9171, 5730, 5601, 9323, 4622, 2426, - 3559, 8535, 6749, 1931, 7698, 9928, 8891, 5123, 8390, - 8697, 6602, 5844, 2162, 8439, 3522, 3358, 3931, 2301, - 4057, 5775, 5263, 8135, 9212, 9531, 3252, 7052, 6494, - 758, 7356, 4215, 9693, 8845, 7141, 2982, 531, 2113, - 4038, 615, 6178, 4213, 9543, 1003, 8259, 9546, 7414, - 7919, 4200, 52, 4324, 4762, 7129, 7642, 6073, 4560, - 2253, 6444, 669, 7596, 1750, 485, 1814, 6910, 8509, - 4617, 1203, 1628, 8037, 4709, 5873, 8080, 203, 987, - 8514, 9228, 3095, 6122, 3762, 801, 3892, 4470, 2052, - 5864, 5657, 6101, 8630, 7401, 6959, 6944, 6101, 4154, - 3180, 3057, 9407, 723, 3850, 364, 3889, 6447, 9732, - 3930, 7935, 4825, 3256, 5086, 2856, 8438, 4603, 8740, - 9625, 3540, 9399, 5097, 4034, 5917, 531, 7347, 5038, - 6274, 647, 1800, 1181, 8789, 5526, 6301, 3891, 6331, - 1478, 6263, 4830, 6993, 7924, 6574, 5359, 4883, 4837, - 5042, 947, 5442, 5733, 3374, 4555, 3375, 3285, 5832, - 9049, 3233, 166, 4580, 1532, 151, 2765, 7817, 8639, - 4125, 8484, 1732, 5071, 6465, 5679, 8488, 8603, 3469, - 9261, 9776, 846, 5701, 7438, 7977, 8568, 2276, 939, - 9123, 3558, 5796, 1651, 3499, 7225, 1382, 979, 1521, - 7794, 7875, 9433, 8620, 9143, 8536, 3539, 3123, 9572, - 8887, 6698, 648, 1852, 3737, 6106, 5365, 7996, 7696, - 6420, 4967, 723, 9089, 9304, 2947, 4078, 3384, 510, - 6462, 9968, 9761, 6304, 6398, 8470, 7323, 4206, 7644, - 801, 8103, 2105, 1747, 4824, 1219, 9728, 7888, 9670, - 2176, 6904, 3033, 9611, 7207, 9012, 1271, 657, 371, - 9123, 6710, 4619, 7211, 2155, 4610, 6025, 285, 9331, - 4113, 554, 5803, 1639, 6961, 1648, 415, 906, 7866, - 1924, 3252, 3636, 5550, 6715, 7435, 8064, 1106, 274, - 3628, 1664, 6161, 5478, 8830, 6266, 1459, 7707, 6348, - 9645, 9206, 4977, 7028, 6265, 4870, 2230, 9839, 9967, - 347, 6592, 8862, 5370, 9685, 8059, 7554, 1703, 4535, - 4005, 4350, 4668, 1143, 4889, 1870, 2588, 240, 675, - 71, 5834, 2880, 4006, 8645, 5427, 9818, 4307, 738, - 3961, 7964, 5288, 9417, 1982, 329, 8068, 4072, 5879, - 6097, 8617, 7503, 1410, 1823, 7077, 6135, 2488, 3328, - 274, 921, 2060, 2645, 3571, 1043, 6316, 8871, 8412, - 5119, 7069, 2665, 85, 34, 8574, 2590, 637, 2202, - 6952, 7619, 3016, 9668, 7862, 6958, 6039, 6333, 7215, - 5464, 8477, 6548, 5285, 7311, 2003, 1410, 2913, 1526, - 9521, 8698, 6223, 3986, 3657, 6207, 7801, 5709, 3647, - 3252, 1035, 9604, 1181, 7037, 159, 4177, 5759, 4872, - 9240, 5252, 3872, 9571, 6044, 7719, 7565, 7149, 3588, - 6562, 3557, 6068, 2546, 6115, 1355, 6668, 4906, 8986, - 6500, 7496, 630, 8871, 9721, 5188, 8498, 7741, 5019, - 628, 8555, 587, 5660, 9280, 7630, 6539, 5875, 2329, - 5734, 8187, 7487, 1969, 563, 9588, 8397, 4526, 9056, - 9279, 1452, 7145, 4509, 8675, 9423, 4903, 6061, 3078, - 9854, 658, 2800, 8777, 3256, 6502, 655, 4202, 2340, - 7264, 2793, 9667, 7007, 1984, 2597, 8329, 973, 2964, - 5928, 5694, 7, 458, 1678, 8136, 7463, 5577, 1552, - 8802, 7791, 3267, 9658, 7328, 8612, 1072, 8025, 9476, - 1953, 9826, 5398, 8217, 3320, 6354, 9186, 4280, 429, - 8543, 1826, 2844, 6067, 725, 1315, 8628, 7368, 7864, - 4348, 880, 8731, 1212, 8814, 5248, 5301, 5948, 6407, - 5261, 1408, 3246, 3306, 9042, 2193, 3541, 9568, 5425, - 6999, 7594, 1750, 1807, 6659, 2171, 9504, 1409, 9150, - 8663, 1696, 1010, 8263, 1112, 7827, 8063, 2910, 334, - 16, 3900, 2459, 926, 9369, 1805, 4344, 9836, 4115, - 1369, 4988, 1627, 5156, 2353, 7777, 5894, 8446, 4883, - 2998, 1145, 6802, 3712, 7048, 8242, 4573, 1952, 9161, - 2948, 1393, 5629, 6077, 6217, 7741, 5490, 4760, 4928, - 1111, 4030, 3358, 6226, 9638, 8878, 3960, 3811, 7376, - 3518, 4285, 2019, 3229, 8382, 4467, 8984, 5949, 7869, - 9506, 3931, 1371, 3956, 1352, 8445, 4708, 282, 7584, - 1810, 9323, 6419, 249, 2071, 7240, 9768, 3331, 8772, - 8183, 1613, 8842, 2926, 7357, 3452, 3763, 4133, 7805, - 1885, 7240, 2702, 9042, 9269, 3584, 5278, 8194, 3343, - 7179, 8945, 3898, 5523, 3442, 7945, 2587, 5458, 9537, - 6622, 2944, 1110, 771, 236, 8935, 9666, 139, 7612, - 4134, 3984, 5023, 4683, 9275, 111, 7428, 7669, 7747, - 755, 2779, 3468, 8498, 3541, 7482, 944, 4464, 8516, - 463, 5427, 8364, 7748, 7401, 6892, 9346, 1909, 9249, - 86, 7583, 3063, 6943, 626, 6091, 2519, 1737, 8751, - 9124, 1009, 3316, 5731, 5829, 4431, 2023, 2450, 6131, - 8424, 4861, 7933, 8078, 1779, 8877, 9139, 4928, 6951, - 3919, 3482, 9094, 3852, 5471, 7856, 3392, 1994, 9752, - 3756, 1817, 2591, 2817, 8894, 629, 9298, 5223, 8822, - 1201, 4189, 9730, 9744, 8034, 9431, 7558, 9291, 4944, - 6796, 6751, 6082, 5976, 5338, 8944, 6438, 5985, 1828, - 2836, 465, 3960, 4405, 5510, 5454, 8633, 6603, 9533, - 8571, 4583, 7144, 6663, 4393, 2927, 2954, 8837, 5885, - 2961, 5939, 1827, 723, 7500, 2196, 8973, 455, 8932, - 7063, 3686, 5260, 2893, 3247, 6085, 381, 1076, 6381, - 5199, 7305, 6086, 9206, 3702, 9000, 1702, 2385, 6122, - 7104, 8300, 3094, 3205, 8369, 2427, 9735, 4152, 6605, - 5060, 8896, 8622, 7660, 6402, 2450, 225, 677, 5702, - 9898, 1760, 5944, 2917, 4615, 3267, 4326, 273, 948, - 9292, 3919, 1215, 2725, 3011, 184, 7139, 7428, 6341, - 6075]), - values=tensor([4.0893e-01, 7.4647e-01, 4.0914e-01, 3.5864e-01, - 4.8555e-01, 1.8092e-04, 3.0123e-01, 4.6002e-01, - 6.8513e-01, 7.7433e-01, 1.1833e-01, 7.7674e-01, - 2.1230e-02, 6.5005e-01, 2.5378e-03, 1.4380e-02, - 4.7469e-01, 7.3133e-01, 3.5139e-01, 9.5308e-01, - 8.5012e-01, 6.7265e-01, 3.0657e-01, 6.1900e-01, - 5.5589e-01, 9.4629e-01, 3.6042e-01, 3.1773e-02, - 5.5135e-01, 5.7262e-01, 8.6726e-01, 2.2990e-01, - 2.7172e-01, 8.1676e-01, 9.8110e-01, 6.1687e-02, - 9.1464e-01, 8.3395e-01, 5.0196e-02, 2.3382e-01, - 3.5840e-01, 6.2133e-01, 1.0935e-01, 1.9079e-01, - 9.0813e-01, 6.0670e-01, 9.4361e-01, 4.0716e-01, - 2.6226e-01, 2.9494e-01, 4.5161e-01, 1.5794e-02, - 5.5005e-02, 2.7524e-01, 8.4708e-02, 2.7124e-01, - 5.9039e-01, 4.4167e-01, 3.5090e-02, 7.8814e-02, - 3.3788e-01, 3.3405e-01, 9.6722e-01, 2.9590e-01, - 8.2668e-01, 3.8429e-01, 1.1459e-01, 7.2456e-01, - 6.3513e-01, 5.5357e-01, 7.5112e-02, 4.6214e-01, - 8.2042e-01, 8.8648e-01, 5.7232e-01, 5.0082e-01, - 7.2914e-01, 4.7976e-01, 1.9845e-01, 7.8635e-01, - 8.4682e-02, 8.8008e-01, 3.3425e-01, 5.1799e-01, - 4.9607e-01, 3.9452e-03, 5.5016e-02, 3.1020e-01, - 8.9128e-02, 4.3443e-01, 2.9563e-01, 7.3845e-01, - 7.2493e-01, 1.5083e-01, 7.4143e-01, 2.7680e-01, - 8.7421e-01, 5.4248e-02, 4.9964e-01, 8.0841e-01, - 8.4502e-01, 3.6483e-01, 8.3624e-02, 7.4983e-01, - 1.2633e-01, 9.9425e-02, 4.8657e-02, 4.9780e-01, - 4.6150e-01, 6.6866e-01, 7.5273e-01, 6.5112e-01, - 2.2094e-01, 9.3158e-02, 2.0877e-01, 1.4861e-01, - 3.6390e-01, 5.0828e-01, 1.5597e-01, 4.6034e-01, - 9.3777e-02, 6.9770e-01, 6.5803e-02, 9.5397e-01, - 3.0578e-01, 9.4048e-01, 5.9922e-01, 7.7196e-01, - 9.7704e-01, 2.2835e-01, 9.9383e-01, 8.6800e-01, - 8.8874e-01, 7.2989e-01, 4.9534e-01, 1.1077e-01, - 1.9545e-01, 9.8031e-01, 7.0316e-01, 1.0654e-01, - 4.6569e-01, 9.8063e-01, 6.9036e-01, 6.1855e-01, - 1.7249e-01, 1.4035e-01, 6.4122e-01, 4.2890e-01, - 4.4597e-01, 2.0114e-01, 6.2076e-01, 2.6296e-01, - 5.1906e-01, 4.6291e-02, 4.3527e-01, 8.6879e-01, - 2.7018e-02, 6.7597e-01, 9.9164e-01, 2.6552e-01, - 7.9984e-01, 7.7352e-01, 1.7076e-01, 4.2917e-01, - 1.6896e-01, 9.6141e-02, 1.5324e-01, 2.2314e-01, - 1.1977e-01, 9.3202e-01, 8.6227e-01, 7.1184e-01, - 4.1014e-01, 5.2121e-01, 4.3074e-01, 1.5812e-01, - 6.7409e-02, 7.7385e-01, 9.2572e-01, 3.7243e-01, - 7.7639e-01, 1.3124e-01, 9.6748e-01, 9.9585e-01, - 9.4879e-02, 4.2174e-01, 1.0964e-01, 7.7620e-01, - 4.2591e-01, 1.0445e-01, 4.8496e-01, 8.8067e-01, - 4.7605e-01, 4.0209e-02, 2.9210e-01, 9.7444e-01, - 2.6426e-01, 7.9845e-01, 8.6184e-01, 2.8088e-01, - 4.7154e-01, 7.8332e-01, 3.3323e-01, 8.9045e-01, - 5.3742e-01, 5.4121e-01, 3.5738e-01, 2.8552e-01, - 6.6960e-01, 7.9798e-02, 9.5883e-02, 6.4858e-01, - 6.1198e-01, 3.9265e-01, 6.7901e-01, 7.7712e-01, - 7.3831e-01, 3.8690e-01, 2.8814e-01, 1.5900e-01, - 7.4089e-01, 3.3823e-02, 6.0486e-02, 1.9784e-02, - 7.5396e-01, 6.7253e-01, 2.0684e-01, 3.3255e-01, - 5.8123e-01, 9.0516e-01, 8.4270e-02, 5.9737e-01, - 9.1544e-01, 3.8624e-01, 8.6974e-01, 5.9614e-01, - 3.4793e-01, 2.2719e-01, 5.0042e-01, 3.1822e-01, - 1.4557e-02, 3.8869e-02, 2.4391e-01, 2.0417e-01, - 6.8119e-01, 9.6452e-03, 8.2586e-01, 3.7726e-01, - 7.5797e-01, 7.3036e-01, 1.0288e-01, 5.9685e-01, - 4.1494e-01, 6.9899e-01, 3.6873e-01, 8.5351e-01, - 8.1078e-01, 8.1658e-01, 2.2052e-01, 1.4704e-01, - 1.1582e-01, 2.2602e-01, 9.1009e-01, 7.8758e-01, - 2.0662e-01, 9.7393e-01, 1.5186e-01, 2.0325e-01, - 5.2871e-01, 7.1085e-01, 7.7983e-01, 3.6386e-01, - 8.1438e-03, 2.8844e-01, 9.8254e-01, 9.2260e-01, - 9.3691e-01, 4.4583e-01, 6.9386e-01, 7.4592e-01, - 8.5702e-01, 8.7501e-01, 8.6228e-01, 6.8437e-02, - 8.8003e-01, 5.9655e-02, 5.5984e-01, 5.3624e-01, - 5.1137e-01, 6.6329e-02, 4.5292e-01, 9.3699e-01, - 9.3817e-01, 9.4024e-01, 8.9176e-01, 1.0596e-01, - 8.7853e-01, 8.9555e-01, 9.5549e-01, 9.6768e-01, - 6.0145e-01, 5.1318e-01, 9.7239e-01, 8.2053e-01, - 1.5019e-01, 9.3464e-01, 7.3976e-01, 1.5624e-01, - 5.8975e-01, 6.6958e-01, 4.4599e-01, 6.0277e-02, - 8.8260e-01, 9.3177e-01, 8.0332e-01, 9.8789e-01, - 4.1076e-01, 2.3903e-01, 3.9403e-01, 8.9861e-01, - 6.0636e-01, 5.1990e-01, 8.5455e-01, 3.3246e-01, - 7.3782e-01, 1.4832e-01, 5.5070e-01, 4.8315e-01, - 4.3169e-01, 2.1798e-01, 1.4116e-02, 1.8134e-01, - 8.1250e-02, 3.4893e-01, 1.9401e-01, 9.6046e-01, - 3.8487e-01, 4.5091e-01, 4.4411e-01, 6.2029e-02, - 5.7398e-01, 8.6131e-01, 8.9893e-02, 1.7085e-01, - 8.5289e-01, 3.7000e-01, 2.3685e-01, 2.6378e-01, - 9.1867e-01, 6.1129e-01, 8.6874e-01, 1.9254e-01, - 8.3643e-02, 6.8821e-01, 2.7187e-01, 4.6686e-01, - 9.7703e-01, 5.0208e-01, 2.6366e-01, 6.2374e-01, - 2.3604e-01, 9.9467e-01, 8.0512e-01, 8.0824e-02, - 1.8891e-01, 1.0550e-01, 5.5150e-01, 1.2517e-01, - 2.7982e-01, 2.4216e-01, 4.1069e-01, 6.8340e-01, - 7.3819e-02, 9.1765e-01, 5.7174e-01, 9.9094e-01, - 6.6466e-01, 3.5731e-01, 1.8235e-01, 4.5610e-01, - 8.7234e-01, 5.0580e-01, 2.8747e-01, 8.1041e-01, - 4.7505e-01, 2.0576e-01, 8.9010e-01, 8.4393e-01, - 4.7564e-01, 9.1611e-01, 3.3871e-01, 5.8593e-01, - 9.1005e-01, 4.6075e-01, 2.2976e-01, 1.6047e-01, - 6.2572e-02, 8.8079e-01, 7.9591e-01, 3.7074e-02, - 2.5624e-01, 5.3032e-01, 3.1496e-01, 5.5436e-01, - 5.2931e-01, 9.5389e-01, 6.3221e-01, 1.7242e-01, - 8.3925e-01, 7.5080e-01, 8.9636e-01, 7.4764e-01, - 9.0250e-01, 8.2000e-01, 2.9637e-01, 6.3107e-01, - 5.1566e-01, 9.1943e-01, 4.8464e-01, 2.2422e-01, - 7.0865e-01, 3.7071e-03, 8.8154e-01, 8.4300e-01, - 5.9617e-01, 7.8834e-01, 9.3422e-02, 6.2256e-01, - 8.3740e-01, 8.4745e-01, 2.3007e-02, 5.3812e-01, - 2.3587e-01, 2.9263e-01, 8.1439e-01, 2.4432e-01, - 3.1496e-01, 6.8237e-01, 9.9900e-01, 5.1112e-01, - 6.1402e-02, 2.0358e-01, 6.3097e-01, 1.7750e-01, - 3.5024e-01, 3.0596e-02, 7.3322e-01, 8.2818e-01, - 6.9285e-01, 1.8523e-01, 8.6152e-01, 9.3401e-01, - 4.5793e-01, 4.1794e-01, 4.9425e-01, 6.8516e-01, - 6.5776e-01, 6.9317e-01, 3.2227e-02, 2.8983e-01, - 8.6778e-01, 8.3223e-01, 6.3141e-01, 4.1697e-01, - 1.5997e-01, 2.8398e-02, 3.6903e-01, 7.4846e-01, - 9.4236e-01, 6.7992e-01, 7.9570e-01, 7.0022e-01, - 3.9911e-01, 5.4460e-01, 5.3406e-01, 6.1707e-02, - 3.4846e-01, 7.8485e-03, 5.2423e-01, 1.8395e-01, - 1.2384e-02, 2.1221e-01, 9.8422e-01, 2.7750e-02, - 7.8006e-02, 8.5223e-01, 7.3279e-01, 7.5232e-01, - 9.8682e-02, 5.9891e-01, 7.2723e-01, 7.2170e-01, - 6.7640e-01, 3.1676e-01, 7.2952e-01, 8.8475e-01, - 1.0979e-01, 1.0927e-01, 3.1374e-01, 3.8162e-01, - 6.8310e-01, 8.9795e-01, 6.9303e-01, 2.0847e-01, - 7.0549e-01, 4.6013e-01, 1.0482e-02, 8.2480e-02, - 6.3437e-01, 2.2931e-01, 5.8909e-01, 1.5036e-01, - 9.5013e-02, 3.0604e-02, 3.4294e-01, 3.0982e-01, - 3.3888e-01, 7.3004e-01, 2.7535e-01, 4.6383e-01, - 3.7714e-01, 1.6771e-01, 2.8789e-01, 6.4774e-01, - 8.7569e-01, 5.8565e-01, 3.7890e-01, 1.7734e-01, - 8.5514e-01, 6.2505e-01, 2.0834e-01, 4.7834e-01, - 4.1297e-01, 1.9860e-01, 2.9647e-02, 4.3259e-01, - 2.8413e-01, 5.6185e-01, 8.2575e-02, 6.8264e-02, - 7.9173e-01, 9.4058e-01, 9.9665e-01, 1.5687e-01, - 9.0528e-01, 8.6377e-01, 6.8574e-01, 7.6422e-02, - 8.4351e-01, 4.1954e-01, 5.1337e-02, 9.5963e-02, - 7.5659e-01, 8.6958e-01, 7.4293e-02, 8.5173e-01, - 2.3423e-01, 3.5272e-03, 4.5855e-01, 2.5929e-01, - 1.8317e-01, 3.9174e-01, 1.9440e-01, 7.1044e-01, - 7.0894e-01, 8.5999e-02, 8.3721e-01, 6.8479e-01, - 6.4997e-01, 5.8657e-01, 5.1681e-01, 3.9751e-01, - 4.2349e-01, 2.1141e-01, 3.0925e-01, 3.4787e-02, - 3.7440e-01, 3.1224e-01, 2.9507e-01, 4.5275e-01, - 4.4228e-01, 9.2088e-01, 7.0840e-01, 8.1934e-02, - 8.9574e-01, 2.9883e-01, 9.7423e-01, 8.2101e-01, - 6.5546e-02, 3.5597e-01, 7.5741e-01, 3.4616e-01, - 8.3611e-01, 8.8683e-01, 2.5757e-01, 6.8378e-01, - 7.9689e-01, 9.6721e-01, 4.2741e-01, 3.3442e-02, - 8.7813e-01, 6.5055e-01, 1.7699e-01, 9.4707e-01, - 9.8286e-01, 5.1212e-01, 7.7437e-01, 5.3901e-01, - 8.9561e-01, 1.0947e-02, 3.2047e-01, 7.3343e-01, - 2.7138e-01, 9.5091e-01, 2.0941e-01, 5.3765e-01, - 6.1969e-01, 9.5062e-01, 9.8183e-01, 9.1239e-01, - 4.0820e-02, 7.5090e-01, 5.9426e-01, 3.6137e-01, - 5.3664e-01, 2.4273e-01, 7.0358e-01, 8.6205e-01, - 9.1349e-01, 3.4262e-01, 4.5848e-01, 3.6446e-01, - 6.4786e-01, 8.8286e-01, 4.6119e-01, 2.3044e-01, - 3.5263e-01, 6.8361e-01, 4.7663e-01, 5.6685e-03, - 8.3359e-01, 3.5310e-01, 8.9949e-02, 5.1295e-01, - 8.6279e-01, 7.2924e-01, 7.0823e-01, 4.7497e-01, - 9.8642e-01, 9.6950e-01, 3.1822e-01, 8.9707e-01, - 8.2130e-02, 8.3490e-01, 2.5776e-01, 5.1683e-01, - 1.1497e-01, 8.3218e-01, 8.6213e-01, 3.2832e-03, - 7.0292e-01, 2.8831e-01, 4.4333e-01, 7.2009e-01, - 4.0793e-01, 4.5162e-01, 3.6693e-01, 1.2147e-01, - 7.6792e-01, 1.5089e-01, 6.2167e-01, 3.5245e-01, - 9.3500e-01, 7.9017e-01, 9.1171e-01, 7.0950e-01, - 7.5701e-01, 8.5804e-01, 4.4625e-01, 6.2964e-01, - 4.0825e-01, 4.8145e-01, 3.6250e-01, 2.1616e-01, - 5.0502e-01, 2.4223e-02, 4.9976e-01, 9.0505e-01, - 9.3497e-01, 8.6822e-01, 8.5824e-01, 1.7667e-01, - 2.2820e-01, 3.3221e-01, 9.1383e-01, 1.5961e-01, - 2.3015e-01, 5.6818e-01, 9.9248e-01, 9.5778e-01, - 4.5782e-01, 3.0766e-01, 4.2287e-01, 6.0590e-01, - 3.1194e-01, 6.2603e-01, 1.2652e-01, 6.6990e-01, - 2.7961e-01, 8.6904e-01, 6.2781e-01, 8.5423e-01, - 2.5994e-01, 3.5572e-01, 8.9677e-01, 6.9735e-02, - 8.7326e-01, 2.3486e-01, 6.7756e-01, 9.1506e-01, - 6.6235e-01, 6.3638e-01, 6.2212e-01, 4.2214e-01, - 9.6628e-01, 4.7979e-01, 5.5599e-01, 6.6975e-01, - 9.1770e-01, 8.4921e-01, 7.8228e-01, 4.9541e-01, - 2.4676e-01, 5.8229e-01, 6.3363e-01, 5.8999e-01, - 5.5398e-01, 1.7297e-01, 2.6442e-01, 6.6954e-01, - 4.0705e-01, 1.5092e-01, 8.6820e-01, 8.0155e-01, - 9.6246e-01, 5.4199e-02, 6.1116e-01, 1.0232e-01, - 8.6891e-01, 2.4082e-01, 5.6454e-01, 2.4664e-01, - 3.0002e-01, 3.8365e-01, 4.3945e-01, 7.7554e-01, - 3.5987e-01, 4.1034e-01, 1.7631e-01, 4.3247e-01, - 4.9655e-01, 9.4554e-01, 5.2332e-01, 9.9976e-01, - 2.3445e-01, 4.2321e-01, 1.2031e-01, 6.9179e-01, - 9.5785e-01, 9.2308e-01, 2.4116e-01, 9.9876e-01, - 2.0342e-01, 8.9867e-01, 3.4996e-01, 2.3225e-01, - 2.8198e-01, 2.7293e-01, 7.0256e-02, 8.3430e-01, - 9.7407e-01, 2.1501e-02, 5.6137e-02, 8.3712e-01, - 2.1155e-01, 2.0680e-01, 5.8653e-01, 7.0146e-01, - 2.4499e-01, 6.1291e-01, 6.7401e-02, 5.2936e-02, - 5.5710e-01, 3.6277e-01, 6.4019e-01, 8.5335e-01, - 4.7761e-01, 2.3988e-01, 5.6900e-01, 2.0262e-01, - 4.5640e-01, 4.8010e-01, 8.7735e-01, 9.6990e-01, - 1.2396e-02, 7.7949e-01, 7.6676e-01, 2.7081e-01, - 5.2024e-01, 2.6366e-01, 7.9351e-01, 7.7967e-01, - 5.1343e-01, 8.6651e-01, 7.1801e-01, 5.4343e-01, - 5.7478e-01, 6.1938e-01, 7.5548e-02, 9.0783e-01, - 5.9722e-01, 7.1908e-01, 2.7312e-01, 4.9747e-01, - 1.7016e-01, 7.4263e-01, 9.3484e-01, 2.5565e-01, - 7.6354e-01, 6.9252e-01, 8.9558e-01, 5.9991e-01, - 7.4127e-01, 2.4916e-01, 3.2608e-01, 1.1447e-01, - 2.9080e-01, 2.2015e-01, 2.7586e-01, 9.3624e-01, - 5.0901e-01, 9.2773e-01, 2.3779e-01, 7.2414e-01, - 6.2115e-01, 8.8543e-03, 9.5559e-01, 5.5237e-01, - 5.9702e-02, 3.0996e-02, 2.7452e-01, 9.3476e-01, - 9.2334e-01, 1.0158e-01, 8.1141e-01, 4.2710e-01, - 9.4909e-02, 3.4917e-01, 1.6636e-01, 7.4557e-01, - 8.8926e-01, 4.1535e-01, 8.2911e-01, 1.4004e-01, - 1.3374e-02, 1.4100e-01, 9.0306e-01, 4.8827e-02, - 7.7186e-01, 6.3321e-01, 1.9462e-01, 3.9827e-01, - 5.9589e-01, 5.0163e-01, 5.5641e-01, 3.2488e-01, - 6.6930e-01, 5.1964e-01, 3.1040e-01, 8.3101e-01, - 9.1606e-01, 8.0023e-01, 3.2635e-01, 6.2952e-01, - 7.4512e-01, 2.9163e-01, 3.0956e-01, 5.7898e-01, - 3.8483e-01, 3.5295e-01, 7.1281e-02, 9.3899e-02, - 7.5473e-01, 5.6119e-01, 7.2374e-01, 3.3493e-01, - 2.1353e-01, 4.1028e-01, 4.3380e-01, 4.9443e-01, - 8.3914e-01, 4.0980e-01, 2.2353e-01, 4.6856e-01, - 9.8532e-01, 4.1826e-01, 7.2795e-01, 8.8187e-01, - 2.2991e-01, 5.6937e-01, 8.7997e-01, 9.8732e-01, - 8.9366e-01, 3.8819e-01, 8.6363e-01, 1.1832e-01, - 3.8550e-01, 7.5854e-01, 5.3971e-01, 4.0107e-01, - 7.1399e-02, 7.0022e-01, 3.9042e-01, 4.6769e-01, - 4.1568e-01, 1.8784e-03, 5.3264e-01, 3.9975e-01, - 7.9966e-01, 1.8820e-01, 1.7925e-02, 3.9911e-01, - 9.2246e-01, 1.7928e-01, 8.4736e-01, 7.0479e-01, - 5.7742e-01, 3.6414e-01, 6.4537e-01, 6.3918e-01, - 9.7580e-01, 2.1688e-01, 5.4784e-01, 7.4740e-01, - 1.3211e-01, 3.6429e-02, 4.4621e-01, 6.6190e-01, - 5.3119e-01, 2.5916e-01, 1.5296e-03, 5.4547e-01, - 7.1935e-01, 1.0792e-01, 5.1658e-02, 8.1958e-02, - 4.1507e-01, 8.1530e-01, 1.1213e-01, 1.5070e-01, - 6.7624e-01, 8.1166e-01, 8.6140e-01, 2.2385e-01, - 9.3921e-01, 4.0510e-01, 8.1991e-01, 8.7893e-01, - 2.8937e-01, 1.6184e-01, 2.8851e-01, 7.3226e-01, - 9.5694e-01, 6.4905e-01, 2.8654e-01, 3.4884e-01, - 9.3195e-01, 2.5569e-01, 8.9239e-01, 9.7412e-02, - 4.6401e-01, 6.8605e-01, 1.7844e-01, 1.5800e-01, - 6.8510e-01, 6.7166e-01, 3.4981e-02, 3.1457e-02, - 2.0280e-01, 5.6851e-01, 1.3432e-01, 8.7812e-01, - 7.2954e-01, 8.1625e-01, 3.7929e-01, 7.5942e-01, - 5.3153e-01, 5.4974e-01, 9.8914e-01, 7.2178e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.1442, 0.3479, 0.0736, ..., 0.7713, 0.5566, 0.1675]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 0.01423192024230957 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '73777', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.1387269496917725} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([5508, 9575, 656, 2627, 8566, 5820, 3018, 3373, 4414, - 3055, 6937, 1153, 3250, 1389, 6514, 3345, 2855, 7135, - 4792, 7175, 808, 4068, 3395, 4430, 2729, 8023, 6573, - 5208, 6115, 4870, 2959, 9004, 6154, 6036, 7804, 4772, - 5201, 296, 3325, 6381, 5380, 9121, 1445, 4957, 6867, - 4006, 2284, 5865, 3974, 9462, 3299, 49, 9461, 6670, - 3714, 3027, 4310, 2400, 5954, 7235, 2580, 2868, 7198, - 3736, 5562, 9005, 8912, 2276, 8194, 5812, 8468, 2983, - 6818, 255, 9224, 6925, 9166, 298, 3685, 1181, 2606, - 9590, 7743, 2755, 3440, 9622, 8827, 767, 6647, 6657, - 9003, 6161, 3158, 3383, 8984, 4164, 6155, 1581, 6250, - 3148, 5225, 7492, 1641, 3667, 1192, 2607, 7402, 6138, - 2597, 2358, 274, 9224, 7866, 4138, 5342, 38, 3457, - 5561, 5899, 2231, 5345, 7211, 5666, 1005, 6429, 1717, - 4864, 6032, 2544, 6728, 2581, 9245, 4695, 381, 3082, - 2086, 6195, 4752, 4134, 1810, 9562, 3721, 268, 4151, - 5614, 5819, 1155, 8820, 5775, 7686, 2525, 6174, 5674, - 9202, 279, 7651, 9040, 5, 6561, 3605, 6968, 2146, - 953, 377, 4363, 187, 7768, 751, 9638, 1330, 4286, - 1751, 8364, 6205, 8982, 8713, 8415, 8469, 6147, 491, - 5083, 2289, 8302, 8307, 1182, 2477, 2676, 5713, 9967, - 4103, 6320, 780, 3861, 5582, 5133, 3823, 1563, 4798, - 4512, 4446, 7915, 4568, 903, 1523, 3572, 5001, 1933, - 1949, 8734, 4705, 2155, 9618, 715, 6214, 4699, 997, - 7101, 2851, 9068, 1466, 7697, 4478, 7542, 2752, 2412, - 18, 2463, 5978, 4225, 6611, 4731, 5581, 7116, 7970, - 2865, 8155, 4666, 604, 7169, 7837, 4336, 6912, 697, - 5910, 8813, 5358, 8366, 9544, 501, 4615, 2803, 6919, - 2746, 1371, 4480, 7793, 6743, 6203, 1713, 865, 1494, - 9168, 9742, 4247, 3057, 8065, 4346, 2523, 6892, 1113, - 3827, 763, 132, 1453, 4262, 294, 8688, 9365, 3282, - 8252, 7662, 5148, 6621, 3594, 8378, 4159, 6261, 4499, - 7194, 6189, 9208, 1850, 3329, 2636, 8760, 3724, 3020, - 3329, 3956, 184, 9442, 8259, 7422, 5104, 240, 5838, - 1954, 7486, 8601, 3925, 7339, 2015, 1974, 3758, 3482, - 8722, 5421, 9520, 4546, 5331, 6717, 5812, 7975, 7442, - 7940, 4158, 7967, 6116, 3811, 7786, 1220, 4785, 6185, - 579, 3103, 7158, 8394, 2983, 6440, 5229, 2484, 1111, - 16, 2051, 1935, 293, 2058, 4920, 7225, 9659, 5108, - 639, 1031, 4323, 8725, 2666, 7298, 3414, 5109, 6513, - 2574, 266, 919, 1740, 8088, 5280, 894, 6663, 1130, - 7446, 4099, 20, 7217, 3332, 8885, 3112, 2296, 5865, - 56, 4230, 7201, 4767, 2688, 1411, 9682, 7533, 7713, - 5376, 8528, 19, 1667, 3225, 7209, 9374, 9673, 8929, - 5186, 3393, 5754, 458, 7524, 5689, 1857, 584, 2192, - 9671, 8149, 7375, 3650, 7201, 7617, 524, 5283, 3542, - 8092, 5244, 7073, 8028, 4271, 9656, 7882, 874, 2907, - 6918, 3419, 5521, 9656, 2461, 9839, 5975, 5084, 1139, - 6395, 805, 1410, 4457, 9849, 8263, 514, 713, 4473, - 3973, 1009, 6909, 7592, 3285, 3898, 5360, 2525, 440, - 9369, 1527, 3851, 9222, 3247, 3317, 4058, 7353, 2516, - 4234, 7642, 6022, 9914, 5180, 6543, 8560, 1441, 6117, - 2991, 8437, 3934, 9954, 7373, 1482, 1468, 7800, 5928, - 9144, 2690, 4445, 134, 7037, 9825, 1077, 7704, 5017, - 7739, 1905, 2423, 2516, 175, 6651, 6001, 6897, 6784, - 4971, 6198, 365, 1638, 7617, 6571, 7493, 3203, 704, - 2950, 2519, 5939, 5997, 931, 7748, 1950, 8185, 5176, - 890, 3070, 2791, 6438, 8457, 5669, 4411, 1482, 7050, - 601, 287, 5291, 4620, 2195, 1746, 4986, 8569, 3581, - 9031, 6202, 4210, 8431, 5885, 6652, 8914, 5202, 5034, - 816, 9352, 3322, 4967, 2768, 6719, 1660, 5297, 984, - 7606, 4288, 353, 7241, 1058, 9337, 9042, 97, 7482, - 3656, 4781, 9768, 5551, 9382, 6204, 3873, 3658, 9543, - 7766, 9333, 6514, 8310, 3838, 9481, 1887, 7793, 7682, - 9769, 4432, 4077, 5553, 8666, 5009, 9686, 1897, 5687, - 3382, 5919, 6551, 985, 8716, 2117, 7814, 5439, 7352, - 9154, 1069, 2145, 3994, 2450, 9754, 2935, 404, 138, - 6862, 4476, 5505, 2562, 2599, 2713, 4755, 6459, 7992, - 5580, 8294, 5009, 2485, 3945, 804, 8407, 3840, 3255, - 3135, 998, 5023, 8897, 4666, 1858, 4752, 135, 5372, - 1971, 2696, 7255, 1370, 8484, 4531, 4478, 5553, 2961, - 5403, 8082, 2803, 4478, 8089, 2563, 8344, 6470, 7823, - 5312, 5620, 5527, 4242, 8621, 2330, 4398, 642, 8991, - 7171, 9790, 5285, 6026, 3822, 3235, 4680, 6765, 3912, - 6543, 1450, 8818, 7558, 3261, 2113, 2373, 6919, 1131, - 6346, 3897, 1491, 1956, 4891, 7597, 996, 4074, 4722, - 2917, 5715, 8477, 9008, 7897, 4755, 7127, 2468, 4303, - 8306, 3974, 8907, 6722, 8711, 5733, 5028, 9135, 1866, - 7880, 3122, 2881, 8959, 8975, 55, 2311, 518, 7346, - 3155, 8803, 9740, 1957, 1812, 9667, 6289, 8136, 3531, - 1701, 495, 6279, 3572, 1162, 849, 217, 4374, 1639, - 1008, 5290, 4005, 9949, 9499, 7223, 3926, 7792, 354, - 8743, 429, 9875, 4421, 8853, 4992, 7345, 2313, 5947, - 2043, 7821, 7045, 3713, 1991, 9737, 2501, 6974, 8470, - 9355, 1042, 5178, 8527, 3387, 490, 4642, 8954, 1137, - 8655, 3394, 6784, 7051, 1774, 6167, 9750, 8404, 3082, - 6552, 4440, 522, 3508, 6337, 1418, 9452, 7117, 6648, - 8517, 766, 124, 6291, 1485, 2849, 7280, 459, 6691, - 7248, 5962, 9416, 2853, 3078, 5487, 3279, 8934, 9426, - 8788, 3551, 5452, 6149, 8603, 4993, 498, 4502, 2852, - 8524, 8859, 7620, 8162, 97, 6066, 6387, 8989, 817, - 1568, 246, 5600, 6289, 8988, 5342, 8448, 5649, 3692, - 688, 5698, 9881, 5150, 1629, 4058, 8276, 7736, 2593, - 1359, 4844, 9564, 7059, 7297, 9003, 798, 1319, 4119, - 2381, 7125, 4085, 273, 5623, 8157, 7485, 7765, 726, - 6395, 7157, 2290, 1726, 6046, 982, 9604, 2732, 8470, - 7237, 619, 3919, 7222, 540, 9760, 4380, 7356, 6016, - 7784, 4321, 395, 6423, 8568, 6214, 7974, 3353, 8129, - 6464, 9392, 9492, 3759, 472, 9308, 9344, 5127, 7423, - 6806, 1474, 2823, 5706, 1764, 6599, 9778, 8657, 7022, - 3614, 4485, 2452, 7525, 3149, 8601, 9645, 4269, 1503, - 6069, 2741, 3306, 863, 1229, 9656, 4386, 1768, 867, - 4819, 8547, 4244, 4948, 4190, 762, 9133, 131, 6780, - 6079, 8208, 5847, 9316, 7287, 8495, 2586, 1656, 4465, - 8814, 4625, 11, 2919, 3804, 7982, 6935, 9826, 7505, - 1259, 1040, 140, 9789, 2693, 5742, 2306, 7537, 8992, - 8296, 9987, 103, 8057, 643, 1848, 2330, 3674, 7402, - 1282, 4176, 8432, 9278, 6798, 342, 6639, 8590, 2414, - 6534, 9031, 318, 5748, 4899, 3462, 8454, 2064, 8941, - 716, 5685, 1233, 1844, 1555, 318, 7269, 8269, 8949, - 7099, 9893, 538, 3010, 6057, 7431, 5445, 2794, 2504, - 5841]), - values=tensor([3.2633e-01, 7.9191e-01, 3.6473e-01, 5.2553e-02, - 5.7875e-01, 8.9363e-01, 9.6431e-01, 4.0938e-02, - 1.5024e-01, 9.3457e-02, 8.5666e-01, 6.6021e-01, - 1.9759e-01, 8.0939e-01, 7.4204e-01, 2.8880e-01, - 7.9046e-01, 7.8851e-01, 7.0371e-01, 1.3685e-01, - 5.0481e-01, 3.6903e-01, 9.2692e-01, 8.9307e-02, - 4.0071e-01, 8.5389e-02, 6.8778e-01, 9.1185e-01, - 3.9447e-01, 4.1056e-01, 6.9100e-03, 4.4205e-01, - 8.9282e-01, 9.3322e-01, 8.3638e-01, 1.9546e-01, - 4.4549e-01, 8.8377e-01, 8.9362e-01, 6.5160e-01, - 3.2815e-02, 6.3599e-01, 2.9856e-01, 3.8738e-01, - 5.7386e-01, 2.8263e-01, 5.0030e-01, 3.7536e-01, - 2.7007e-01, 9.5128e-01, 6.1301e-01, 9.8955e-01, - 4.8124e-01, 8.0247e-01, 9.1091e-01, 7.1646e-01, - 8.6645e-01, 9.2664e-03, 3.1727e-01, 1.0464e-01, - 7.6432e-01, 3.2572e-01, 6.4872e-01, 4.6614e-01, - 6.3000e-01, 9.5522e-01, 3.9473e-01, 3.8621e-01, - 5.6019e-01, 1.1553e-01, 5.3397e-01, 4.0184e-01, - 6.5600e-01, 7.2218e-01, 5.3125e-01, 6.7148e-01, - 1.5411e-01, 2.4424e-01, 6.2084e-03, 1.7181e-01, - 5.5933e-01, 8.9801e-02, 3.3998e-01, 1.1723e-01, - 3.0450e-01, 3.8459e-01, 9.8748e-01, 9.0307e-01, - 8.4006e-01, 7.4864e-01, 3.1639e-01, 2.1710e-01, - 7.3644e-01, 3.8356e-01, 3.0116e-01, 3.5797e-02, - 6.7960e-01, 3.8349e-01, 8.9489e-01, 6.5540e-01, - 7.5185e-02, 8.8098e-02, 8.4915e-01, 4.5570e-01, - 4.1326e-01, 2.2841e-01, 5.7832e-01, 9.8572e-01, - 2.1038e-01, 3.1384e-01, 6.5102e-01, 7.8064e-02, - 8.1578e-01, 9.6599e-01, 4.3348e-01, 4.0471e-01, - 6.6705e-01, 9.9374e-01, 9.3593e-01, 2.7792e-01, - 7.6408e-01, 7.1910e-01, 3.0730e-01, 9.2789e-02, - 5.0816e-01, 2.0054e-01, 9.0756e-01, 4.2139e-02, - 2.2042e-02, 3.8699e-01, 5.8026e-01, 3.2550e-01, - 1.6808e-01, 8.3002e-01, 4.0081e-01, 9.5268e-01, - 5.0592e-01, 6.5938e-01, 1.9963e-01, 4.0528e-01, - 2.7731e-02, 1.9693e-01, 6.1060e-01, 6.3866e-01, - 5.3688e-01, 5.2347e-01, 2.5349e-01, 2.9615e-01, - 9.7016e-01, 3.4954e-01, 2.6613e-01, 3.3523e-02, - 1.5584e-01, 8.3051e-02, 6.0614e-01, 8.6788e-01, - 6.0641e-01, 3.2123e-01, 3.1342e-01, 8.0290e-01, - 9.8955e-01, 3.0445e-01, 7.7405e-01, 4.4438e-01, - 2.4247e-01, 1.0812e-01, 3.8171e-02, 1.6700e-01, - 9.9278e-01, 1.6124e-01, 1.4018e-01, 9.9672e-01, - 1.7920e-01, 8.7088e-01, 9.3567e-01, 9.0612e-01, - 7.3360e-01, 8.6982e-01, 1.8853e-01, 2.8631e-02, - 3.1908e-01, 5.4943e-01, 5.4554e-01, 5.7148e-01, - 8.7372e-01, 8.1331e-01, 3.5535e-01, 6.6365e-02, - 3.7900e-01, 4.4060e-01, 6.9755e-01, 8.4770e-02, - 2.6765e-01, 7.1295e-01, 5.9500e-01, 9.3206e-01, - 2.8045e-01, 9.7087e-01, 2.9626e-01, 3.4970e-01, - 9.7291e-01, 3.3104e-01, 7.5134e-02, 4.9646e-01, - 7.3787e-01, 9.4535e-01, 9.9298e-01, 7.8352e-02, - 7.1369e-01, 5.2407e-01, 3.4781e-01, 2.9479e-02, - 7.9036e-01, 3.2159e-02, 2.9663e-01, 4.0057e-01, - 7.5187e-01, 7.2985e-02, 9.9586e-01, 3.5315e-01, - 7.0513e-01, 8.3438e-01, 1.1850e-01, 6.1954e-01, - 7.4807e-01, 4.7444e-01, 1.4806e-01, 4.6297e-01, - 8.2386e-01, 3.7491e-01, 5.2912e-01, 4.8493e-01, - 9.8747e-01, 8.2071e-01, 6.0411e-01, 2.6614e-01, - 4.7854e-01, 7.0928e-01, 4.7195e-01, 1.2463e-01, - 6.5685e-01, 3.5210e-01, 3.3489e-01, 3.4371e-01, - 5.6556e-01, 5.8336e-02, 2.5168e-01, 1.1237e-03, - 6.1377e-01, 1.4862e-01, 4.7916e-01, 9.8286e-01, - 7.2047e-01, 6.1947e-01, 2.5084e-02, 8.6610e-01, - 3.6497e-01, 7.1297e-01, 4.6956e-01, 3.6140e-01, - 7.0052e-01, 5.8768e-01, 8.9326e-01, 5.2583e-01, - 9.7503e-01, 5.9070e-01, 9.4841e-01, 7.0689e-01, - 4.8720e-01, 1.2759e-01, 5.2594e-01, 5.1674e-01, - 2.2215e-01, 4.8116e-01, 5.4493e-01, 1.6438e-01, - 1.6212e-01, 1.4592e-01, 9.3453e-01, 5.4120e-01, - 9.9377e-01, 9.5104e-01, 3.6132e-01, 5.3946e-01, - 9.9391e-01, 4.6287e-01, 2.8328e-01, 6.1212e-01, - 4.1816e-01, 7.1117e-01, 7.2036e-01, 4.2976e-02, - 2.1797e-01, 6.2665e-01, 4.2489e-02, 6.7964e-01, - 4.1152e-01, 2.0657e-01, 7.4251e-01, 6.2519e-01, - 4.6706e-01, 6.5784e-01, 1.7252e-01, 8.8152e-01, - 1.6930e-01, 9.4893e-01, 8.8209e-01, 8.7479e-01, - 4.1704e-01, 8.7646e-01, 6.5490e-01, 5.1932e-01, - 8.4152e-01, 6.8650e-01, 9.4596e-01, 6.7239e-01, - 7.6111e-01, 8.3939e-01, 8.3015e-01, 8.0600e-02, - 5.4688e-01, 4.2004e-01, 3.5995e-01, 8.7290e-01, - 2.9848e-01, 4.0104e-01, 7.6500e-01, 7.5102e-01, - 2.0463e-01, 2.7033e-01, 9.7413e-01, 5.9293e-01, - 4.1711e-01, 6.1095e-01, 2.3521e-01, 7.4961e-01, - 7.3623e-01, 8.2256e-01, 6.1390e-01, 5.1919e-01, - 2.8273e-01, 4.5435e-01, 8.5019e-02, 9.4253e-01, - 3.1807e-02, 5.6156e-01, 4.3673e-01, 2.3393e-01, - 4.5771e-01, 1.4899e-01, 8.7682e-01, 9.2175e-01, - 7.3314e-01, 1.1596e-01, 2.1438e-01, 1.1876e-01, - 2.7871e-01, 6.2895e-01, 1.0399e-01, 6.5021e-01, - 9.7906e-01, 6.0746e-01, 1.0704e-01, 8.5925e-01, - 2.2433e-01, 4.9315e-01, 2.7459e-01, 7.3299e-01, - 2.6631e-01, 4.4259e-01, 1.6375e-01, 8.4103e-01, - 6.8640e-02, 9.0139e-01, 3.6033e-01, 8.9148e-01, - 1.4670e-01, 2.1681e-01, 2.2662e-01, 6.6090e-02, - 2.7846e-01, 7.6443e-01, 6.3337e-01, 1.4853e-01, - 3.1082e-01, 4.9872e-01, 8.4014e-01, 9.8999e-01, - 6.7688e-01, 1.6975e-01, 3.7138e-01, 8.1020e-01, - 7.0790e-02, 1.3523e-01, 3.7468e-02, 7.9415e-01, - 2.7812e-01, 1.2167e-01, 4.3224e-01, 5.5136e-01, - 4.0724e-02, 2.6502e-01, 6.1419e-01, 8.7909e-01, - 9.5819e-01, 9.5194e-01, 2.6001e-01, 2.3975e-01, - 6.7454e-01, 5.7007e-01, 5.3707e-01, 8.1178e-01, - 7.9154e-01, 2.2539e-01, 7.8655e-01, 6.5846e-01, - 2.6535e-01, 9.7806e-01, 7.1020e-01, 8.3252e-01, - 9.6863e-01, 7.5960e-01, 5.7156e-01, 2.7669e-01, - 6.3128e-01, 7.7289e-01, 7.1228e-01, 7.7482e-01, - 1.6904e-01, 8.1997e-01, 9.7624e-01, 5.6560e-01, - 2.7663e-01, 2.3951e-01, 4.4379e-01, 2.8076e-02, - 4.4129e-02, 7.9282e-01, 3.5166e-01, 7.1038e-01, - 4.7806e-01, 8.0129e-01, 1.8759e-02, 2.9831e-01, - 9.6097e-01, 7.5028e-01, 6.7067e-01, 2.5064e-01, - 7.3552e-02, 2.2102e-01, 4.0798e-02, 7.3754e-01, - 5.2514e-01, 1.2280e-01, 7.7926e-01, 5.8105e-01, - 5.6506e-01, 3.0079e-01, 5.7893e-01, 4.5377e-01, - 4.5091e-02, 6.1516e-01, 8.0451e-01, 8.1548e-01, - 4.9070e-01, 9.4442e-01, 6.7333e-01, 4.6352e-01, - 9.4087e-01, 6.6036e-01, 7.7251e-01, 4.8492e-01, - 5.5056e-01, 6.1609e-01, 4.1240e-01, 7.6930e-01, - 1.5580e-01, 5.5480e-01, 6.6083e-01, 7.9676e-01, - 2.1381e-01, 9.8321e-01, 6.0074e-01, 2.8389e-01, - 8.3180e-01, 8.8109e-01, 1.2639e-01, 6.2125e-01, - 7.0439e-01, 9.1146e-01, 5.1574e-01, 9.8774e-01, - 8.2024e-01, 2.4417e-01, 8.8016e-01, 8.1631e-01, - 1.4643e-01, 9.8658e-01, 2.7646e-01, 1.2864e-01, - 2.9087e-02, 1.4354e-01, 6.0524e-01, 2.8677e-01, - 1.4899e-01, 4.6367e-01, 5.1784e-01, 9.8821e-01, - 4.8069e-01, 8.3426e-01, 1.7525e-01, 5.0974e-01, - 3.0878e-01, 2.9609e-01, 6.8677e-01, 2.0869e-01, - 3.2086e-01, 3.7111e-01, 6.1587e-02, 7.4378e-02, - 4.2695e-01, 1.4850e-01, 5.4335e-02, 8.4394e-01, - 8.0973e-01, 4.7836e-01, 8.4671e-01, 9.7012e-01, - 1.0626e-01, 4.9016e-01, 4.0803e-01, 9.7115e-01, - 7.5995e-01, 9.5447e-01, 8.6611e-01, 7.0402e-01, - 7.9939e-01, 2.3092e-01, 4.4970e-01, 3.9381e-01, - 1.5460e-01, 9.1987e-01, 4.3357e-01, 7.2170e-01, - 9.1074e-01, 9.8918e-01, 2.9317e-01, 2.4967e-01, - 6.4585e-01, 8.0204e-01, 4.6814e-01, 3.0193e-01, - 8.1103e-01, 1.4968e-01, 8.8709e-01, 5.7762e-01, - 8.7050e-01, 1.3270e-01, 3.7713e-01, 5.9660e-01, - 6.3653e-01, 2.9283e-01, 2.1496e-01, 8.4861e-01, - 5.2665e-01, 4.4147e-01, 5.6040e-01, 2.4280e-01, - 1.0891e-01, 1.3062e-01, 9.9818e-01, 3.2711e-02, - 7.8364e-01, 8.8951e-01, 1.1528e-01, 9.1362e-01, - 1.8879e-01, 4.1744e-01, 9.0491e-01, 2.1200e-01, - 5.8240e-02, 1.4926e-01, 2.8456e-01, 5.7208e-01, - 3.9875e-01, 3.0948e-01, 8.5836e-02, 2.0035e-01, - 7.1400e-01, 7.8279e-01, 6.8278e-01, 6.6938e-01, - 9.4297e-01, 1.8496e-01, 1.5518e-02, 1.0389e-01, - 4.1335e-01, 9.0194e-02, 5.3872e-01, 7.5585e-01, - 3.4338e-01, 1.1621e-01, 7.9511e-01, 5.9093e-01, - 6.0436e-01, 3.4533e-01, 6.2464e-01, 6.2137e-01, - 2.6066e-01, 2.1593e-01, 5.7001e-01, 2.9622e-01, - 8.6756e-01, 4.5066e-01, 6.3181e-01, 7.8115e-01, - 1.1513e-01, 3.3336e-01, 5.9404e-01, 5.5227e-01, - 4.8272e-01, 4.5789e-01, 5.2434e-01, 8.7910e-01, - 3.6237e-01, 3.6341e-01, 2.2645e-01, 2.1768e-01, - 4.5579e-01, 3.4246e-01, 7.0649e-01, 2.9172e-01, - 1.5275e-01, 9.9131e-01, 6.6686e-01, 3.1529e-01, - 9.5275e-01, 5.2154e-01, 6.0311e-01, 6.9413e-01, - 5.2394e-01, 2.9542e-01, 6.9079e-01, 3.5982e-01, - 2.0311e-01, 2.2891e-01, 8.3397e-01, 7.9481e-01, - 7.7724e-01, 6.7393e-01, 5.1568e-01, 8.6496e-01, - 8.7170e-01, 9.9110e-02, 8.4462e-01, 8.3862e-01, - 8.1703e-01, 7.4599e-01, 8.0526e-01, 4.4458e-02, - 9.9751e-01, 7.3077e-01, 4.6991e-01, 7.3467e-01, - 9.3161e-02, 9.5937e-01, 6.5874e-01, 6.4726e-01, - 1.5051e-01, 5.7551e-01, 7.2428e-01, 9.3155e-01, - 6.2676e-01, 9.6143e-01, 8.6169e-01, 4.6347e-01, - 8.7308e-01, 1.8969e-02, 8.3502e-01, 8.9361e-01, - 1.9404e-01, 4.3472e-02, 2.0120e-01, 6.9356e-01, - 9.4889e-01, 5.6077e-01, 8.6433e-01, 8.7016e-01, - 7.3158e-01, 4.4803e-01, 2.3998e-01, 9.7238e-01, - 4.3879e-01, 2.9965e-01, 8.3148e-01, 2.4330e-01, - 6.7838e-01, 6.1669e-01, 7.2623e-01, 1.2354e-01, - 2.2972e-01, 2.7296e-01, 9.8873e-01, 7.5405e-01, - 3.4953e-01, 2.5789e-01, 6.3716e-01, 8.4804e-01, - 1.8777e-01, 5.7827e-01, 4.9683e-01, 8.6743e-01, - 2.8557e-01, 7.6619e-01, 2.2211e-01, 6.8093e-01, - 9.5156e-01, 6.3191e-01, 8.7661e-01, 9.1812e-02, - 8.0288e-01, 9.5932e-01, 8.4664e-01, 5.6317e-01, - 4.1155e-01, 5.6613e-01, 6.7922e-01, 5.0367e-01, - 7.7000e-01, 2.3487e-01, 6.5953e-01, 2.6442e-01, - 4.8671e-01, 1.3645e-01, 3.1847e-01, 3.7867e-01, - 6.8019e-01, 6.2500e-01, 8.7404e-01, 7.5376e-01, - 1.0881e-01, 2.1589e-01, 4.1967e-01, 2.7998e-01, - 9.8292e-01, 1.8108e-01, 4.3913e-01, 1.4646e-01, - 6.9060e-01, 4.8902e-01, 7.7270e-01, 6.7527e-01, - 4.5794e-01, 6.8861e-01, 8.4179e-01, 8.6936e-01, - 8.8470e-01, 8.3332e-01, 4.5104e-01, 2.2109e-01, - 6.7705e-01, 3.9307e-02, 1.4542e-01, 7.5689e-01, - 7.3681e-01, 1.4316e-01, 1.3469e-01, 9.2693e-01, - 7.9835e-02, 4.8747e-01, 2.3991e-01, 5.0674e-01, - 8.1572e-01, 8.9171e-01, 9.4262e-01, 4.4242e-02, - 4.8771e-01, 2.2083e-01, 7.7910e-01, 6.0918e-01, - 7.4097e-02, 1.2246e-01, 6.0510e-01, 9.1036e-01, - 5.6118e-01, 1.6158e-01, 6.7291e-01, 1.8197e-01, - 1.7103e-01, 9.5862e-01, 4.6520e-01, 1.2359e-01, - 3.6908e-01, 9.8844e-01, 3.6924e-03, 4.3265e-01, - 8.5218e-01, 1.4758e-01, 1.7384e-01, 6.5237e-02, - 8.2651e-01, 8.8829e-01, 2.6356e-01, 4.3625e-01, - 2.5112e-01, 9.8859e-01, 7.9307e-01, 6.3628e-01, - 8.3428e-01, 1.7687e-02, 2.6174e-01, 9.5803e-01, - 1.4430e-01, 2.1325e-01, 1.4008e-01, 6.7486e-01, - 2.8336e-01, 1.6215e-01, 9.0703e-01, 8.7796e-01, - 3.4117e-01, 5.4612e-01, 1.4746e-01, 4.1956e-01, - 2.7025e-02, 7.3708e-02, 8.1368e-01, 5.0288e-01, - 5.6119e-01, 4.8670e-01, 5.7631e-01, 3.1818e-01, - 6.6127e-01, 7.7339e-01, 7.0503e-02, 9.2309e-01, - 3.9171e-01, 7.5288e-02, 3.6269e-01, 4.9846e-01, - 8.1495e-01, 9.4113e-01, 4.7373e-02, 3.0034e-01, - 2.2161e-01, 4.3905e-01, 3.3523e-01, 8.3970e-01, - 2.1837e-01, 8.3822e-01, 9.9775e-01, 5.5403e-01, - 8.9486e-01, 1.4148e-01, 9.0127e-01, 9.7059e-02, - 3.2741e-01, 1.0701e-01, 2.7984e-01, 7.7278e-01, - 6.8926e-01, 2.8243e-01, 3.0207e-01, 6.5028e-02, - 2.4311e-01, 2.9351e-01, 7.3131e-01, 4.2415e-01, - 7.4454e-01, 6.9421e-01, 2.5342e-01, 1.5780e-01, - 2.9231e-01, 1.4667e-01, 6.9582e-02, 8.6692e-01, - 5.4682e-01, 4.6227e-01, 4.6657e-01, 1.0327e-01, - 1.8106e-01, 7.5880e-01, 3.2925e-01, 1.2240e-01, - 3.1478e-01, 9.1416e-01, 9.6930e-03, 1.3241e-01, - 8.0456e-01, 7.4784e-01, 6.9926e-02, 8.4918e-01, - 8.2454e-01, 8.7979e-01, 1.6012e-01, 3.4099e-01, - 1.3158e-01, 5.1930e-01, 3.2542e-01, 8.0769e-01, - 1.1181e-02, 9.1890e-02, 7.2786e-01, 6.7172e-02, - 8.3525e-02, 7.9476e-01, 6.4096e-01, 2.8445e-01, - 3.8206e-01, 4.6326e-01, 4.8276e-01, 2.3710e-01, - 5.7913e-01, 5.4204e-01, 7.5509e-01, 5.9955e-01, - 1.1200e-01, 9.5213e-04, 9.0864e-01, 6.2336e-01, - 4.3307e-01, 3.6198e-01, 9.8456e-01, 2.8949e-01, - 4.7497e-01, 5.7509e-01, 3.3429e-01, 7.0479e-01, - 9.6075e-01, 9.2558e-01, 3.6179e-01, 8.2591e-01, - 5.9758e-01, 5.3473e-01, 2.7530e-02, 9.6342e-02, - 9.3529e-02, 1.6467e-01, 8.9391e-01, 8.5922e-01, - 1.4915e-01, 1.8769e-01, 1.0016e-01, 8.4777e-02, - 3.6040e-01, 8.2450e-01, 3.4686e-01, 8.2301e-01, - 8.9691e-02, 4.3378e-01, 1.3710e-01, 4.0908e-01, - 6.9494e-01, 9.9675e-01, 9.8566e-01, 1.3164e-02, - 6.9583e-01, 8.3204e-01, 2.1108e-01, 8.1028e-01, - 6.7781e-01, 2.9978e-01, 3.4866e-01, 2.9128e-01, - 8.2911e-01, 3.9288e-01, 9.7244e-01, 5.5754e-01, - 8.6365e-01, 1.9935e-01, 4.2606e-01, 6.5710e-01, - 1.1321e-01, 8.9438e-01, 5.3287e-02, 6.7747e-01, - 1.3725e-01, 9.6097e-01, 6.1965e-01, 7.3747e-01, - 1.9823e-01, 8.4099e-01, 9.3609e-01, 2.6566e-01, - 2.6173e-01, 6.9673e-01, 9.6055e-01, 7.2272e-03, - 8.9838e-01, 5.5549e-01, 4.9628e-01, 2.5939e-01, - 2.4806e-01, 5.9743e-01, 7.2404e-01, 5.7379e-01, - 9.2371e-01, 9.8965e-02, 1.7327e-01, 7.3460e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0752, 0.0265, 0.5369, ..., 0.1368, 0.6161, 0.4463]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 2.1387269496917725 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '362205', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420795440673828} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.014298200607299805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 367, 5928, 4913, 9662, 9837, 1139, 7195, 709, 1186, - 80, 7429, 8354, 3731, 4238, 1204, 8286, 5350, 6518, - 6581, 5481, 8412, 8567, 4483, 270, 6809, 6052, 9047, - 8886, 6265, 2683, 8840, 6468, 5705, 4463, 7370, 8923, - 7324, 752, 3820, 476, 1511, 1152, 7156, 6378, 1601, - 6251, 5183, 964, 1260, 884, 6109, 4781, 1936, 3415, - 7691, 4888, 5674, 6459, 8140, 8932, 2864, 6432, 1356, - 2665, 9855, 2360, 1289, 3774, 7837, 576, 7725, 1293, - 907, 2542, 7673, 5326, 1656, 1077, 464, 7656, 5438, - 524, 4581, 8099, 2687, 4078, 5342, 5240, 9155, 9342, - 4985, 2318, 5185, 4603, 8415, 3279, 2083, 7925, 8071, - 9166, 9303, 9469, 5789, 7778, 8645, 7134, 7594, 2178, - 218, 3344, 3951, 8834, 4013, 7845, 8331, 9692, 3137, - 7730, 5023, 8474, 4718, 38, 726, 535, 2913, 4635, - 2716, 3591, 9066, 2660, 3345, 7905, 2026, 1175, 9827, - 6627, 4714, 9765, 1301, 2421, 949, 3573, 4881, 5681, - 2117, 1844, 8550, 9611, 6234, 1475, 3891, 4140, 4791, - 497, 5133, 1193, 8459, 1053, 2110, 5799, 6433, 6363, - 6452, 1314, 1357, 383, 1853, 7981, 8433, 7415, 6026, - 6286, 7644, 2079, 2595, 9578, 6356, 4227, 8298, 2069, - 2653, 5142, 7404, 4079, 1966, 7533, 6062, 6415, 8225, - 8783, 5739, 3815, 1201, 4896, 5097, 5808, 8945, 5520, - 6206, 511, 7347, 334, 3119, 1570, 7713, 2906, 3536, - 6334, 7998, 298, 3942, 6558, 2963, 4842, 2263, 5657, - 6492, 6276, 9075, 5298, 2267, 9620, 5320, 1568, 8760, - 7490, 9058, 1722, 5460, 3464, 1842, 2377, 9510, 1053, - 3322, 9012, 5664, 8967, 8790, 9286, 1946, 3141, 8192, - 6659, 3495, 4958, 4102, 7004, 6177, 5317, 6593, 7640, - 6307, 8035, 9650, 8087, 4138, 5288, 54, 277, 9498, - 8689, 2955, 8232, 1954, 321, 4627, 4226, 4258, 7653, - 5234, 2531, 412, 5196, 6249, 3047, 8171, 9428, 3213, - 9763, 1374, 9103, 7900, 6626, 2118, 7765, 3728, 6106, - 4954, 4753, 8313, 550, 6888, 8896, 9965, 3061, 2947, - 8278, 5879, 5273, 5453, 181, 2437, 4117, 7283, 5591, - 8602, 8473, 5720, 1866, 7656, 8837, 3905, 7472, 7158, - 3023, 2882, 9989, 8681, 648, 6274, 6847, 3312, 5656, - 7680, 6098, 4463, 9954, 2825, 6574, 7011, 1957, 6137, - 6725, 3576, 5559, 9535, 4552, 2200, 3109, 9611, 9451, - 1309, 2875, 3230, 5628, 9907, 8010, 6405, 222, 8761, - 2120, 9374, 4858, 4514, 5442, 9330, 8281, 8858, 4407, - 6543, 35, 3825, 9633, 1009, 2627, 3906, 1955, 7496, - 3563, 3539, 8202, 7400, 7587, 7952, 2607, 3232, 9574, - 748, 9829, 4992, 1810, 7308, 6501, 6785, 7235, 1137, - 8724, 2405, 7482, 6759, 4652, 5655, 4530, 6784, 9512, - 4088, 6049, 9230, 7810, 9658, 5737, 1295, 738, 747, - 7672, 2503, 5907, 2220, 201, 5120, 7515, 3259, 3879, - 3039, 9321, 8605, 9482, 4181, 7348, 9537, 587, 6394, - 3984, 306, 1193, 3228, 778, 134, 9046, 7121, 4357, - 4190, 4050, 4701, 5483, 6713, 8937, 2483, 711, 8665, - 3008, 3841, 9929, 1156, 7864, 8900, 4657, 7749, 2947, - 9699, 2414, 4266, 6048, 8585, 5785, 2832, 7866, 2519, - 9463, 5405, 1165, 1777, 9786, 9696, 3031, 653, 1886, - 1959, 8043, 4843, 484, 4688, 1972, 2038, 6598, 7240, - 1462, 7254, 3087, 1879, 7554, 1558, 7001, 9016, 8895, - 5649, 4937, 9704, 4961, 7644, 8442, 2757, 2242, 9502, - 2976, 4095, 4676, 9126, 5855, 4163, 6430, 137, 106, - 3030, 655, 8531, 7089, 648, 854, 5910, 4435, 7738, - 4805, 6752, 444, 2066, 5929, 8674, 9769, 2865, 2752, - 5321, 130, 2489, 807, 9550, 1633, 7468, 9068, 9241, - 6808, 7365, 968, 8661, 9018, 3550, 6104, 2629, 7508, - 35, 8351, 2048, 4626, 2188, 1746, 7387, 3093, 1503, - 1452, 7638, 1336, 9281, 6296, 6140, 7102, 9783, 1071, - 4115, 8592, 8292, 2729, 6495, 356, 8036, 1239, 4223, - 6109, 2038, 6313, 7535, 4790, 2693, 2125, 7954, 5865, - 3268, 2522, 6271, 2184, 4150, 1470, 3491, 6995, 9734, - 2067, 3839, 7338, 6815, 4421, 307, 289, 5776, 7623, - 5550, 9770, 6085, 800, 6471, 6365, 7940, 7253, 9023, - 3056, 1623, 7408, 4931, 2227, 3852, 1329, 3500, 1597, - 680, 4795, 1426, 733, 9436, 1001, 976, 8821, 8330, - 6704, 5168, 673, 3362, 4167, 8558, 5789, 6484, 7568, - 5585, 6431, 6874, 5521, 5213, 7970, 3020, 8092, 5025, - 6800, 7632, 7512, 3989, 4195, 1578, 2940, 3628, 3648, - 6431, 2736, 7676, 5158, 5635, 1844, 6595, 8947, 5903, - 51, 9169, 4098, 5691, 6814, 9766, 6514, 9970, 2239, - 2046, 3280, 2714, 5806, 2677, 82, 1113, 2963, 2180, - 2566, 2541, 3825, 9253, 8388, 6485, 870, 1684, 7333, - 793, 9898, 5870, 5155, 6286, 9563, 9068, 5713, 2343, - 1715, 3887, 723, 1944, 2953, 3319, 9854, 144, 718, - 6674, 4026, 8079, 7531, 4342, 6924, 6779, 6546, 7462, - 9962, 1432, 8607, 1875, 2805, 7948, 2976, 1046, 7036, - 4949, 7102, 477, 7850, 2174, 1961, 4920, 8186, 1308, - 179, 2095, 3644, 8483, 9153, 627, 2488, 890, 3123, - 4350, 5492, 1701, 1645, 3932, 3331, 2933, 6624, 7332, - 7665, 5288, 7681, 6550, 7545, 730, 5687, 802, 6864, - 7451, 2117, 4878, 4869, 1248, 1137, 1380, 8950, 5598, - 7429, 4233, 1060, 5569, 6689, 8608, 3567, 4558, 7535, - 9199, 2985, 2282, 235, 9129, 8582, 2072, 7435, 8445, - 9048, 7905, 3709, 7810, 5385, 5720, 2662, 2668, 7912, - 4854, 9556, 9550, 3132, 8229, 4856, 4161, 1914, 745, - 65, 7058, 9271, 7846, 8928, 5152, 5482, 8156, 4590, - 3430, 90, 6589, 2814, 3078, 1382, 8736, 5505, 2054, - 8404, 4829, 8221, 2458, 952, 6527, 2277, 8961, 5925, - 2066, 2154, 3256, 2466, 7237, 2076, 9446, 4670, 7006, - 2231, 8823, 7168, 5299, 1101, 7045, 7082, 8944, 4016, - 4074, 4258, 3864, 3141, 4450, 5115, 5389, 9949, 1172, - 9433, 1642, 1479, 3769, 9973, 4747, 4872, 389, 8601, - 9119, 8563, 2438, 6873, 1818, 9226, 937, 6705, 3733, - 2036, 669, 2029, 332, 7663, 8899, 3889, 9735, 9326, - 8050, 2621, 3330, 992, 9581, 1488, 8125, 6995, 2528, - 7140, 6221, 1417, 9802, 898, 1689, 3308, 303, 2075, - 6622, 9167, 6725, 8722, 8663, 7196, 3835, 4992, 1316, - 6283, 4758, 9632, 641, 8717, 8903, 403, 3496, 5247, - 9491, 1114, 4440, 9898, 4961, 6497, 4291, 3663, 5161, - 7340, 9289, 8727, 7951, 3216, 9826, 2744, 1742, 7964, - 1100, 4645, 9, 4874, 3732, 9378, 1625, 9820, 3610, - 1545, 6999, 6918, 1895, 3580, 9207, 2130, 5891, 4573, - 1060, 9808, 4116, 943, 4244, 5724, 4961, 3837, 8840, - 389, 1348, 833, 3514, 5170, 1683, 7789, 9831, 5974, - 9125, 5396, 7463, 6389, 1948, 8359, 6877, 7522, 8087, - 7219, 1695, 4668, 662, 3386, 4803, 1414, 7277, 1481, - 9053, 9285, 9508, 5533, 205, 8818, 4845, 7696, 8623, - 9000]), - values=tensor([9.0347e-01, 8.9097e-01, 3.9050e-01, 1.1761e-01, - 7.3236e-01, 1.8076e-01, 1.7435e-01, 3.1968e-01, - 6.2094e-02, 6.9707e-01, 9.2156e-01, 2.5833e-01, - 3.9175e-01, 8.0344e-02, 8.5392e-01, 7.0595e-02, - 6.3035e-01, 4.6849e-01, 4.1355e-01, 5.1765e-01, - 6.7505e-01, 7.7298e-01, 3.0196e-01, 2.7160e-01, - 4.9835e-01, 7.7842e-01, 7.9100e-01, 6.1241e-01, - 9.2766e-01, 8.1158e-01, 9.2979e-01, 2.0087e-01, - 6.3444e-01, 1.1919e-01, 2.9666e-01, 8.1344e-01, - 2.3658e-01, 7.2770e-01, 3.4239e-01, 6.0915e-01, - 3.8756e-01, 4.8989e-01, 5.2955e-01, 9.2633e-01, - 7.9699e-01, 9.0118e-01, 6.9847e-02, 3.2137e-02, - 8.8024e-01, 6.0500e-01, 4.6450e-01, 8.5960e-02, - 5.6130e-01, 3.3831e-01, 9.3616e-01, 8.2763e-01, - 2.4098e-01, 8.9643e-01, 9.8468e-01, 8.8817e-01, - 4.6338e-01, 1.1147e-02, 8.1814e-01, 9.2206e-02, - 1.0005e-01, 6.2906e-01, 6.9146e-01, 1.5603e-01, - 3.1235e-01, 6.2561e-01, 3.7711e-01, 6.5022e-01, - 1.2930e-01, 6.7901e-02, 7.2399e-01, 7.8736e-01, - 8.8706e-02, 7.0933e-01, 9.0665e-01, 1.0525e-01, - 7.0947e-01, 7.6785e-01, 7.4226e-01, 9.8559e-01, - 9.2459e-01, 1.7157e-01, 7.4478e-01, 7.9596e-01, - 7.2144e-01, 1.9958e-01, 6.1043e-01, 2.7464e-01, - 5.8302e-01, 9.7091e-01, 4.8610e-01, 8.4624e-01, - 8.7645e-01, 7.2595e-01, 7.4275e-01, 2.6952e-01, - 8.8491e-01, 6.7085e-02, 7.5574e-01, 6.1435e-01, - 1.3665e-01, 8.8809e-01, 2.2500e-01, 7.6147e-01, - 5.8642e-01, 8.3791e-01, 3.2752e-01, 4.9244e-01, - 6.5160e-01, 6.2237e-01, 5.6666e-01, 1.2589e-01, - 5.4657e-01, 1.6810e-01, 1.1783e-01, 3.3742e-01, - 6.8221e-01, 1.2268e-02, 4.7183e-01, 6.4275e-01, - 3.3058e-01, 6.2514e-01, 2.7607e-01, 5.2090e-01, - 3.9798e-01, 7.4704e-02, 3.1431e-01, 2.3156e-01, - 2.4983e-01, 7.9235e-01, 6.1773e-02, 4.1696e-01, - 2.9905e-01, 6.8152e-01, 3.4211e-01, 6.0393e-01, - 3.6186e-02, 8.0620e-01, 9.8874e-01, 4.6252e-01, - 8.4231e-01, 9.1653e-01, 4.6135e-01, 2.8152e-01, - 9.7319e-01, 4.3848e-01, 5.7170e-01, 4.9901e-01, - 7.4233e-01, 2.6089e-01, 8.3401e-01, 2.3202e-01, - 8.4769e-01, 8.7678e-01, 7.1420e-01, 8.9378e-01, - 5.6124e-01, 4.0637e-01, 3.8693e-01, 8.0881e-02, - 2.6688e-01, 5.6776e-01, 4.3004e-01, 4.7054e-01, - 2.0392e-01, 7.2817e-01, 6.6755e-01, 5.1867e-01, - 5.2670e-01, 1.4618e-01, 7.3183e-01, 8.6973e-01, - 1.9126e-01, 4.9009e-01, 2.5587e-01, 4.0650e-01, - 9.8806e-01, 9.6733e-01, 1.1171e-01, 7.9880e-01, - 6.7184e-01, 7.4651e-02, 1.7680e-01, 9.4485e-01, - 8.4713e-01, 1.4694e-01, 9.3410e-01, 6.8790e-01, - 6.7995e-01, 3.4766e-01, 3.3209e-01, 8.5193e-01, - 5.7959e-01, 2.9187e-01, 3.0229e-01, 9.7462e-01, - 7.8153e-01, 2.9900e-01, 3.0648e-01, 9.4053e-01, - 7.5039e-01, 7.3664e-01, 2.3912e-01, 5.6769e-01, - 9.2225e-01, 1.2617e-01, 4.4870e-01, 5.2352e-01, - 4.9831e-01, 4.2978e-01, 5.8379e-03, 5.1101e-01, - 1.1850e-02, 4.3076e-02, 2.0400e-01, 4.3425e-02, - 2.0565e-01, 2.5026e-01, 5.8630e-01, 9.9979e-01, - 5.5005e-01, 2.8369e-01, 7.0988e-01, 9.9716e-01, - 1.0843e-01, 5.2002e-01, 8.1336e-01, 4.4712e-01, - 1.7801e-01, 4.1042e-01, 8.1718e-01, 7.5846e-01, - 6.4002e-01, 8.1764e-01, 9.2236e-01, 2.7385e-01, - 2.2693e-01, 1.3388e-01, 2.7481e-01, 2.7276e-01, - 6.3817e-01, 8.1593e-01, 7.1721e-01, 1.0760e-01, - 3.2614e-01, 4.3753e-03, 9.9770e-01, 9.1052e-01, - 1.4963e-01, 1.3781e-01, 1.1034e-01, 1.1895e-02, - 8.2173e-01, 2.6975e-01, 3.5686e-01, 5.8749e-01, - 1.5253e-01, 6.6786e-01, 9.1749e-01, 8.5611e-01, - 5.9711e-01, 8.9352e-01, 7.2370e-01, 4.2727e-01, - 4.7201e-01, 6.1878e-01, 1.3255e-01, 4.0640e-01, - 6.1650e-01, 9.0122e-01, 3.3783e-01, 1.2667e-01, - 5.3203e-01, 7.8517e-01, 4.7198e-01, 8.0031e-01, - 9.8415e-02, 6.3035e-01, 8.4415e-01, 4.0094e-01, - 3.8505e-01, 5.1696e-01, 9.5335e-01, 7.6890e-01, - 5.9772e-01, 8.7886e-01, 7.4086e-01, 8.4781e-01, - 1.6493e-01, 3.3683e-01, 7.1960e-03, 2.9042e-01, - 8.4813e-01, 1.8192e-01, 6.0102e-01, 4.2567e-01, - 9.3317e-02, 3.8724e-01, 3.5787e-01, 9.9992e-01, - 1.2443e-01, 8.2890e-01, 2.8195e-01, 6.3051e-01, - 6.0894e-01, 9.5920e-01, 6.4943e-01, 6.5476e-01, - 6.7692e-01, 5.7763e-01, 7.8022e-01, 4.1886e-01, - 5.0205e-01, 2.5778e-01, 9.0479e-01, 8.7006e-01, - 6.1226e-01, 6.4647e-01, 1.5596e-01, 7.0047e-01, - 6.7722e-01, 5.4595e-01, 4.9292e-01, 9.0526e-01, - 4.8174e-02, 8.1995e-02, 7.1448e-01, 2.7007e-02, - 6.2586e-01, 6.7855e-01, 4.6902e-01, 3.6021e-01, - 3.9907e-01, 1.8609e-02, 6.0745e-01, 2.4758e-01, - 6.2019e-02, 9.3085e-01, 3.9521e-01, 8.9117e-01, - 5.9487e-01, 4.2672e-02, 7.4925e-01, 8.7766e-02, - 5.8346e-01, 6.5657e-01, 8.0155e-01, 4.5151e-01, - 5.5583e-01, 7.1994e-01, 6.1938e-01, 4.9424e-01, - 8.4852e-01, 2.5577e-01, 7.5244e-01, 5.8561e-01, - 1.6622e-01, 5.3017e-01, 7.0377e-01, 1.6795e-01, - 6.1195e-01, 5.0235e-01, 4.0852e-01, 2.4062e-01, - 3.7706e-01, 7.1399e-01, 1.5932e-01, 2.0246e-01, - 9.3118e-01, 3.6824e-01, 9.2308e-01, 4.1388e-01, - 3.9074e-01, 2.1232e-02, 4.8230e-01, 3.1334e-02, - 7.6588e-01, 8.9020e-01, 9.5571e-02, 9.3996e-01, - 3.3502e-01, 1.6456e-02, 4.8873e-01, 9.0152e-01, - 6.0836e-01, 6.2449e-02, 2.3911e-01, 2.5569e-01, - 1.9099e-01, 6.5319e-01, 3.4599e-01, 9.4871e-01, - 1.5293e-01, 9.6291e-01, 5.9772e-01, 9.2359e-01, - 6.8691e-01, 6.3677e-01, 7.7151e-01, 5.4282e-01, - 9.3692e-01, 3.1526e-01, 9.7688e-01, 8.3385e-01, - 2.2972e-01, 6.2720e-01, 2.5945e-01, 2.4371e-01, - 1.9518e-01, 9.3585e-01, 5.0154e-01, 4.9345e-01, - 9.2069e-01, 2.8370e-01, 2.0941e-01, 2.7517e-01, - 3.3759e-01, 3.1978e-01, 9.0873e-01, 7.9081e-01, - 6.9871e-01, 2.8844e-01, 7.5941e-01, 4.7964e-01, - 4.1805e-01, 7.2289e-01, 7.1452e-01, 2.2869e-01, - 3.5827e-01, 5.0632e-01, 5.1573e-01, 6.2442e-01, - 5.5513e-01, 3.8608e-01, 6.5218e-02, 2.8971e-01, - 9.3603e-01, 9.9599e-01, 2.9308e-02, 5.0704e-01, - 3.3920e-01, 1.3835e-01, 1.3900e-01, 8.3661e-01, - 8.7030e-02, 1.9115e-01, 8.1969e-01, 2.0766e-01, - 2.0634e-04, 3.2996e-03, 3.5546e-01, 6.9885e-01, - 8.1732e-01, 4.6117e-01, 9.2421e-01, 7.9629e-01, - 9.9088e-01, 9.9274e-01, 1.1080e-01, 1.7902e-01, - 7.9706e-01, 1.8505e-01, 7.4092e-01, 7.3209e-01, - 1.2144e-01, 5.6902e-01, 9.3325e-01, 2.2999e-01, - 6.3010e-01, 9.2148e-01, 2.9383e-01, 4.2274e-01, - 1.2992e-01, 1.8436e-01, 7.7452e-01, 9.5314e-03, - 8.7521e-01, 3.7291e-01, 3.4776e-01, 9.8447e-01, - 9.2182e-01, 6.2103e-01, 7.8077e-01, 6.7523e-01, - 1.6309e-01, 9.9005e-01, 7.0418e-01, 7.5393e-01, - 4.9572e-02, 5.3018e-01, 5.0127e-02, 9.4383e-01, - 9.3893e-01, 2.9396e-01, 1.7931e-01, 3.2387e-01, - 5.3937e-01, 4.4591e-01, 4.1519e-01, 6.4965e-01, - 9.0437e-01, 8.3040e-01, 1.4310e-01, 3.5145e-02, - 4.6746e-01, 5.6639e-01, 4.2557e-01, 8.3449e-01, - 7.7874e-01, 8.0354e-03, 7.7620e-01, 6.3805e-01, - 9.9677e-01, 2.9228e-01, 7.5318e-01, 7.8356e-01, - 9.8216e-01, 6.5817e-01, 3.5696e-01, 2.7232e-01, - 3.7271e-01, 2.1862e-01, 6.4841e-01, 3.4376e-01, - 6.0935e-01, 9.5134e-02, 6.6915e-01, 8.1621e-01, - 4.0272e-01, 6.1882e-01, 5.0284e-02, 8.7929e-01, - 4.2125e-01, 7.6634e-01, 2.5193e-01, 1.3171e-01, - 5.5583e-01, 1.3959e-01, 1.7579e-01, 7.2551e-01, - 6.6240e-01, 1.9258e-01, 3.6612e-01, 1.8147e-01, - 5.7841e-01, 7.8278e-01, 4.2669e-02, 6.6255e-01, - 2.8767e-01, 8.0007e-02, 3.6674e-04, 3.9335e-01, - 2.1417e-01, 2.2919e-01, 1.2683e-01, 9.0001e-01, - 9.9290e-01, 4.5741e-01, 2.3197e-01, 1.8836e-01, - 9.6565e-01, 8.0740e-01, 1.9854e-01, 5.3042e-02, - 8.1469e-01, 4.9040e-01, 7.2177e-01, 6.4045e-01, - 4.9810e-01, 2.5103e-02, 4.7475e-01, 4.7844e-02, - 6.0969e-01, 9.1481e-01, 4.4597e-01, 1.1494e-01, - 4.9666e-01, 6.1275e-01, 8.7093e-01, 1.3668e-01, - 9.5248e-01, 5.9063e-01, 4.9499e-01, 9.1388e-01, - 4.0836e-01, 9.3011e-01, 8.1737e-01, 5.9702e-01, - 9.9251e-01, 9.3234e-01, 7.8850e-01, 5.3497e-01, - 5.6422e-01, 3.4891e-01, 7.9452e-01, 8.5710e-01, - 9.6578e-01, 5.9804e-01, 4.3277e-01, 7.3290e-01, - 1.0449e-01, 4.8889e-01, 9.8684e-01, 3.1549e-02, - 8.2245e-02, 9.4666e-01, 1.7503e-01, 1.3455e-01, - 8.0620e-01, 9.4127e-01, 6.0203e-01, 2.4925e-01, - 2.1869e-01, 3.0844e-02, 4.0776e-01, 2.7968e-01, - 7.2045e-01, 4.5107e-02, 7.6360e-01, 6.9114e-01, - 3.5384e-01, 2.6205e-01, 4.0814e-01, 4.1925e-01, - 9.5840e-01, 2.7948e-01, 9.9578e-01, 5.4533e-01, - 4.9157e-01, 1.7568e-01, 4.9298e-01, 1.2249e-01, - 4.9184e-01, 5.8221e-01, 3.4826e-01, 2.3718e-01, - 4.0414e-02, 9.8825e-01, 6.8724e-01, 4.1618e-01, - 3.5580e-01, 9.8542e-01, 6.1434e-01, 1.5506e-01, - 1.8635e-01, 3.4507e-01, 8.7226e-01, 6.4723e-01, - 1.1927e-01, 6.1913e-02, 5.8115e-01, 5.0795e-02, - 6.9421e-01, 8.5543e-01, 4.6361e-01, 1.7225e-01, - 7.1720e-01, 5.2724e-01, 2.7571e-01, 7.8026e-01, - 9.7859e-01, 3.3684e-01, 9.4074e-01, 8.4877e-01, - 1.6576e-02, 3.2110e-01, 8.1119e-01, 4.4604e-02, - 7.5553e-01, 8.9513e-01, 2.5486e-01, 5.6611e-01, - 1.5194e-01, 8.0984e-01, 1.1473e-02, 5.0922e-02, - 1.9459e-01, 2.4450e-01, 5.2269e-01, 8.8459e-01, - 1.5980e-01, 5.1724e-01, 3.6058e-01, 3.8147e-01, - 8.0778e-02, 6.8899e-01, 9.3518e-01, 3.9577e-01, - 6.8067e-01, 2.9545e-01, 2.1034e-01, 8.0864e-01, - 3.0905e-01, 3.9012e-01, 5.0504e-01, 3.5331e-01, - 2.3990e-01, 1.9663e-01, 4.7629e-02, 8.8145e-01, - 1.6688e-01, 3.2309e-01, 6.0635e-01, 6.9179e-01, - 8.4762e-01, 4.8298e-01, 4.2062e-01, 8.8411e-01, - 3.1172e-01, 4.7812e-02, 1.8446e-01, 3.6828e-01, - 2.3400e-01, 8.2280e-02, 7.9717e-01, 4.5737e-02, - 6.3521e-01, 7.6517e-01, 5.7003e-01, 8.6854e-01, - 6.3527e-01, 5.4238e-01, 6.1423e-02, 9.0375e-01, - 9.5888e-01, 3.1839e-01, 2.4367e-01, 6.4739e-01, - 8.1586e-01, 2.4076e-01, 9.7343e-01, 4.9856e-01, - 7.2246e-01, 5.0023e-01, 1.2692e-01, 7.4359e-01, - 3.5270e-01, 2.8465e-01, 3.0118e-01, 3.5307e-01, - 6.2379e-01, 7.1186e-01, 6.6474e-01, 8.4095e-02, - 8.8565e-01, 2.9464e-01, 5.4755e-01, 9.0701e-01, - 8.9197e-01, 1.8235e-01, 9.3370e-01, 8.6788e-01, - 1.0571e-01, 3.0684e-01, 2.0394e-01, 6.1322e-01, - 7.6393e-01, 7.5679e-01, 2.3964e-01, 2.2054e-01, - 4.5451e-01, 3.5051e-01, 7.3550e-02, 9.3935e-01, - 2.5262e-01, 9.1953e-01, 2.4572e-01, 1.1299e-01, - 5.8933e-01, 2.5009e-01, 8.9652e-01, 6.5729e-02, - 1.7446e-01, 3.5451e-01, 5.3832e-01, 6.5479e-01, - 6.3736e-01, 1.1466e-01, 4.7192e-01, 9.4751e-01, - 4.2249e-01, 8.0013e-01, 8.3662e-01, 2.7945e-01, - 4.9136e-01, 2.0754e-01, 1.5234e-01, 2.6470e-01, - 9.8611e-01, 2.0121e-01, 6.4605e-01, 1.4810e-01, - 3.8203e-01, 1.0221e-02, 6.4883e-01, 3.7738e-01, - 6.6558e-01, 1.0409e-01, 3.3837e-01, 4.1054e-01, - 2.3450e-01, 5.9670e-01, 6.3347e-01, 9.3841e-01, - 7.1202e-01, 8.2758e-02, 3.3401e-01, 4.6725e-01, - 5.5433e-01, 4.1060e-01, 4.8108e-01, 2.6006e-02, - 5.0602e-01, 7.5691e-01, 8.3228e-01, 5.5461e-01, - 9.7723e-01, 9.5142e-02, 4.8502e-01, 4.7276e-01, - 4.6702e-01, 6.8685e-02, 4.7602e-01, 9.8285e-01, - 8.7376e-02, 7.2154e-01, 6.4366e-02, 8.0105e-01, - 6.7920e-01, 3.4169e-01, 9.9690e-01, 8.6900e-02, - 6.8423e-01, 2.2307e-01, 6.6827e-01, 6.2007e-01, - 7.2812e-01, 8.1208e-01, 8.1304e-01, 4.9537e-02, - 2.2654e-01, 1.0913e-01, 1.8736e-01, 7.4063e-01, - 7.0233e-01, 5.2413e-01, 9.2392e-01, 4.3601e-01, - 8.7513e-01, 9.5555e-02, 5.5401e-01, 4.9938e-02, - 6.6611e-02, 5.3894e-01, 7.4381e-01, 8.2785e-01, - 4.4283e-01, 2.1861e-01, 8.1992e-02, 5.3650e-01, - 2.7083e-01, 9.6992e-02, 6.5700e-01, 3.6738e-01, - 5.3582e-02, 6.8961e-01, 5.1692e-01, 4.7811e-01, - 9.5507e-01, 1.1629e-01, 4.4608e-01, 9.1741e-01, - 6.7667e-01, 2.8148e-02, 7.4903e-01, 7.8503e-01, - 4.6236e-02, 4.6999e-02, 1.9415e-01, 5.8769e-01, - 3.9665e-01, 2.8180e-01, 6.6891e-01, 5.2471e-01, - 5.6718e-01, 3.6965e-01, 4.8691e-01, 1.2448e-01, - 3.7830e-01, 7.1508e-01, 5.3843e-01, 1.9964e-01, - 4.6846e-01, 3.0749e-01, 5.0821e-01, 4.7264e-01, - 6.5869e-01, 7.5655e-01, 6.6608e-01, 4.4097e-01, - 8.8246e-01, 5.5178e-01, 3.1991e-01, 6.1838e-01, - 5.4250e-01, 3.8407e-01, 6.9734e-01, 5.6089e-01, - 7.7507e-01, 1.5344e-01, 8.1394e-01, 3.8479e-02, - 5.6357e-02, 3.7774e-01, 7.2734e-01, 7.5830e-02, - 9.5355e-01, 4.6254e-01, 5.1318e-01, 7.7011e-01, - 2.9491e-01, 1.3797e-01, 8.7797e-01, 8.0879e-01, - 5.8383e-01, 1.2735e-01, 2.6792e-01, 7.5423e-01, - 8.5687e-01, 5.6856e-01, 3.0846e-01, 7.2150e-01, - 2.0158e-01, 4.4285e-01, 4.3074e-01, 8.2410e-01, - 5.6984e-01, 2.3044e-01, 8.7317e-01, 5.4302e-01, - 1.6661e-01, 1.5551e-01, 5.3661e-01, 9.8902e-01, - 6.2303e-01, 5.7449e-01, 9.7497e-01, 6.9276e-01, - 2.8973e-01, 2.2008e-01, 7.4155e-02, 6.3535e-01, - 3.2883e-01, 7.0117e-01, 3.0143e-01, 5.7505e-01, - 3.4680e-01, 2.7660e-01, 7.0338e-02, 7.8984e-01, - 6.7461e-01, 2.5361e-01, 8.3933e-01, 7.1929e-01, - 1.4013e-01, 9.9655e-01, 2.4267e-01, 9.3212e-01, - 4.6070e-01, 2.2070e-01, 6.6336e-01, 1.3432e-01, - 5.3597e-01, 5.1768e-01, 7.6964e-01, 9.9864e-01, - 5.3829e-01, 3.1592e-01, 9.3386e-01, 5.8600e-01, - 1.2704e-01, 5.0213e-01, 6.2221e-02, 1.0695e-01, - 2.6995e-01, 2.6387e-01, 9.3927e-01, 2.7555e-01, - 3.1073e-01, 1.1755e-01, 8.1059e-01, 3.6864e-01, - 2.6251e-01, 5.7401e-01, 2.8597e-02, 8.6585e-02]), + col_indices=tensor([8051, 1599, 2283, 8734, 5484, 8052, 1033, 5062, 5616, + 5419, 4308, 2079, 1440, 2127, 2168, 2073, 8984, 8673, + 65, 5048, 6264, 7272, 6134, 8402, 3839, 8826, 5348, + 6807, 8674, 2105, 8060, 5095, 5686, 8441, 79, 1539, + 1131, 7284, 3402, 3347, 9857, 2432, 7845, 2429, 7006, + 6227, 1560, 8467, 4313, 1162, 5895, 7313, 4937, 4397, + 3837, 3830, 1729, 1190, 6513, 1234, 2132, 7931, 4390, + 4136, 4699, 7536, 6619, 8386, 3972, 2096, 7193, 5905, + 4821, 870, 2387, 1709, 4526, 2114, 4874, 6742, 1708, + 3159, 4703, 1183, 3547, 7638, 4183, 2668, 3001, 7510, + 5464, 7713, 562, 1506, 9874, 6437, 7593, 834, 7110, + 3375, 4891, 1244, 7904, 2138, 9927, 1991, 9239, 4040, + 4172, 1928, 916, 731, 2170, 249, 3190, 8648, 5281, + 6305, 1686, 4194, 1, 5784, 8930, 9971, 8188, 6621, + 8222, 2810, 1461, 5246, 8556, 6518, 8182, 8352, 9858, + 273, 4159, 5138, 5706, 2495, 9838, 7148, 1130, 8668, + 8388, 622, 6745, 4309, 2349, 3486, 888, 8283, 2072, + 5856, 2103, 1346, 5852, 701, 4726, 8808, 1594, 4024, + 4082, 7135, 4755, 1653, 8944, 8504, 8847, 6264, 3780, + 2927, 297, 6290, 656, 729, 7747, 6463, 1525, 4911, + 3006, 5176, 538, 2279, 385, 5149, 8464, 9845, 6083, + 8814, 4227, 5205, 2447, 8450, 9052, 5559, 5231, 7304, + 8233, 6683, 6868, 5783, 6558, 8483, 4854, 7923, 954, + 938, 525, 7045, 2144, 6194, 2647, 4645, 7611, 3288, + 2153, 5779, 1447, 6300, 4717, 5019, 9757, 6339, 3977, + 4770, 3421, 2888, 8192, 7859, 4577, 8805, 677, 4510, + 7639, 521, 388, 2010, 6516, 1202, 783, 8076, 2646, + 760, 8224, 3447, 6002, 5344, 6480, 5828, 336, 2895, + 13, 7586, 3619, 8972, 1197, 2559, 5346, 8874, 5682, + 660, 482, 4840, 5153, 1135, 7446, 196, 677, 5194, + 5938, 5797, 7781, 6375, 8349, 7901, 1095, 7355, 6427, + 3347, 1849, 6901, 5297, 8488, 1543, 5876, 9185, 629, + 33, 1517, 7164, 6519, 450, 2120, 2223, 3793, 964, + 9938, 1921, 2286, 7967, 1968, 679, 3401, 8526, 9771, + 5796, 8406, 7919, 7987, 2006, 6388, 6423, 7431, 8279, + 4159, 305, 2452, 4214, 443, 4427, 2190, 8502, 3787, + 5166, 172, 7528, 4257, 4199, 3783, 7109, 3249, 4606, + 4264, 5740, 3240, 5758, 4350, 5046, 1539, 9051, 4952, + 5398, 22, 1575, 2066, 207, 4834, 2222, 2319, 3120, + 2747, 6792, 4636, 779, 9871, 3552, 7370, 7793, 1567, + 9808, 5267, 8882, 9148, 5093, 4408, 7784, 3293, 5675, + 3254, 5525, 6213, 278, 5012, 4916, 9363, 4183, 6270, + 4518, 1481, 8525, 5001, 6826, 9492, 7624, 2038, 6497, + 9047, 8398, 9361, 8087, 881, 8522, 2519, 5074, 8419, + 217, 9296, 802, 4840, 2559, 2965, 6803, 167, 3457, + 2336, 9772, 8558, 1148, 5439, 266, 4256, 1379, 6798, + 2616, 1227, 5990, 8680, 4360, 4876, 8500, 6178, 6464, + 7336, 1522, 6605, 5844, 3275, 7752, 3506, 1202, 9892, + 2231, 5421, 7184, 308, 248, 6872, 7167, 7885, 7565, + 9435, 2723, 513, 9959, 2219, 6515, 9856, 1943, 6801, + 7200, 9268, 4596, 8167, 1169, 2135, 2211, 4460, 7759, + 8575, 5051, 8970, 309, 6997, 5495, 5440, 1032, 1964, + 2040, 3239, 684, 5282, 7092, 2495, 2869, 4243, 7778, + 531, 4186, 8217, 7081, 1126, 2452, 7935, 3224, 2968, + 9372, 1467, 3041, 2259, 8597, 1105, 7054, 2197, 8149, + 5673, 2589, 2690, 1915, 4945, 668, 6490, 8806, 248, + 2809, 9624, 7401, 4804, 5320, 44, 8568, 3474, 8997, + 2983, 2312, 3273, 7936, 4061, 2835, 4939, 4917, 4011, + 5316, 6478, 4057, 2854, 3058, 3956, 5156, 5692, 936, + 6241, 8319, 529, 8414, 7974, 9239, 4999, 5972, 2439, + 4952, 9186, 9225, 242, 8021, 259, 2302, 750, 5222, + 3471, 5759, 2520, 7029, 5143, 4716, 72, 5586, 9384, + 1436, 4253, 9605, 6159, 5557, 2005, 9761, 4496, 1051, + 5211, 7597, 1872, 4616, 6903, 6056, 2866, 9516, 3311, + 7552, 9960, 133, 3682, 6603, 5666, 8620, 5250, 9928, + 3490, 8135, 9229, 7804, 37, 7465, 1582, 7349, 1091, + 5463, 4058, 4039, 3064, 7654, 3274, 5442, 8428, 3579, + 7433, 1314, 2695, 7219, 9829, 2092, 4512, 9734, 2444, + 36, 762, 2697, 6512, 1992, 9121, 1348, 765, 8176, + 7843, 2145, 5132, 3353, 5248, 7710, 8705, 4345, 6592, + 5349, 1262, 5154, 9347, 2070, 7862, 5373, 7495, 8291, + 231, 8719, 4145, 7666, 9077, 9022, 6563, 3652, 261, + 4641, 8713, 9955, 1791, 9044, 2961, 1243, 7643, 6732, + 1860, 8189, 4178, 4770, 679, 9009, 3086, 7941, 9751, + 5161, 2879, 7608, 8566, 4943, 8286, 6113, 4593, 4588, + 5310, 4825, 5945, 5149, 8663, 1183, 1722, 1585, 9139, + 8443, 9839, 8134, 9365, 3821, 9850, 1532, 275, 5690, + 8733, 6144, 1786, 5783, 1871, 8203, 4474, 7691, 5240, + 6995, 5957, 205, 4804, 9579, 5066, 1193, 8788, 7288, + 7592, 4525, 9584, 7411, 6700, 5867, 7709, 4671, 3006, + 5332, 9892, 5274, 3520, 5568, 4861, 5692, 8644, 2505, + 1365, 4748, 8064, 2182, 3684, 1646, 8597, 1026, 8659, + 6415, 2306, 8932, 3144, 6045, 6213, 8158, 8103, 1537, + 1154, 4342, 9949, 3426, 3204, 603, 5268, 1396, 7293, + 9055, 3225, 3149, 6852, 8430, 5854, 2296, 566, 794, + 7736, 6515, 8291, 6121, 9037, 8474, 5891, 6370, 2670, + 3311, 9603, 8497, 9438, 1422, 3980, 2277, 594, 705, + 4061, 2333, 6053, 5594, 5868, 7921, 3938, 9298, 766, + 4773, 1528, 3789, 9383, 8161, 9765, 7628, 8908, 8343, + 599, 201, 4397, 8656, 6053, 5021, 2351, 4778, 5354, + 9889, 5019, 4778, 5765, 7425, 1061, 286, 3532, 3056, + 1198, 6689, 4359, 7388, 5795, 279, 2105, 697, 3185, + 449, 3494, 5630, 9637, 4111, 243, 5258, 9262, 691, + 1104, 7477, 7882, 2463, 4760, 311, 7595, 6156, 7453, + 276, 694, 4930, 2823, 9327, 8480, 1337, 3682, 9846, + 1689, 6320, 4619, 4472, 9593, 9440, 9469, 5078, 9842, + 9731, 6058, 216, 3955, 1361, 7905, 8512, 4680, 1474, + 5441, 1624, 5236, 4575, 538, 3112, 2936, 5311, 1761, + 1107, 2214, 7876, 5040, 9281, 1160, 2647, 8089, 5072, + 9356, 4834, 1087, 8788, 3235, 8923, 2268, 7908, 1428, + 4082, 4709, 7970, 1648, 9122, 160, 5668, 2050, 598, + 1384, 5846, 6720, 8155, 1851, 2728, 6735, 807, 7063, + 2333, 6469, 1518, 9863, 2194, 5201, 3124, 3870, 7766, + 8406, 1473, 8814, 880, 7675, 5453, 2801, 6076, 9823, + 3146, 6262, 2825, 8067, 7204, 7565, 6031, 176, 5203, + 8487, 9078, 604, 5724, 9880, 9519, 1772, 4587, 6166, + 8523, 1583, 5777, 8578, 8238, 2626, 3684, 2707, 1257, + 377, 8616, 3606, 7442, 4317, 9543, 1088, 9073, 1503, + 1185, 9370, 443, 323, 8295, 7046, 2687, 5328, 2859, + 7380, 1784, 3768, 8399, 4787, 5875, 2373, 8607, 2143, + 509, 5009, 6263, 9687, 4783, 8208, 5068, 5676, 61, + 4125]), + values=tensor([4.5946e-01, 7.3258e-01, 3.2183e-01, 3.9526e-02, + 3.4486e-01, 6.5710e-01, 2.3905e-01, 5.2858e-01, + 9.3491e-02, 9.1034e-01, 7.9315e-01, 5.5576e-01, + 9.6631e-01, 1.2445e-02, 7.6340e-02, 8.1172e-01, + 6.7984e-01, 2.2947e-02, 1.9133e-01, 9.5081e-01, + 5.4037e-01, 6.3600e-01, 4.7019e-01, 1.9762e-01, + 5.7647e-01, 2.0710e-01, 9.1872e-01, 5.2648e-01, + 9.7910e-01, 8.9766e-01, 6.9052e-01, 9.7145e-01, + 3.7885e-03, 4.9172e-01, 6.9473e-02, 8.5740e-01, + 5.3512e-01, 1.8649e-01, 9.0594e-01, 5.2295e-03, + 1.4189e-01, 1.3839e-01, 9.9568e-01, 1.8600e-01, + 2.9584e-01, 7.7434e-01, 5.6470e-01, 2.9708e-01, + 4.6194e-01, 9.9959e-01, 9.4283e-01, 4.0618e-02, + 4.4777e-01, 7.5608e-01, 4.5770e-01, 3.2517e-01, + 3.4050e-01, 1.4186e-01, 8.2982e-01, 1.3928e-01, + 7.8422e-01, 5.5421e-01, 7.7509e-01, 9.2245e-01, + 4.5293e-01, 8.1497e-01, 9.6544e-01, 4.6762e-01, + 5.7978e-01, 5.4383e-03, 2.7289e-01, 4.7900e-01, + 4.0078e-01, 8.7468e-01, 9.5529e-01, 5.4730e-01, + 7.7029e-01, 3.6633e-01, 6.4382e-01, 4.5555e-01, + 4.5106e-01, 2.8151e-01, 1.2667e-01, 5.8796e-04, + 1.2104e-01, 1.4784e-01, 3.6453e-02, 3.1444e-01, + 1.0992e-01, 1.6510e-01, 2.9114e-01, 5.5407e-01, + 8.4147e-01, 7.8012e-01, 4.5377e-01, 5.6989e-01, + 8.9604e-01, 4.7816e-01, 1.9104e-01, 6.1345e-01, + 2.5116e-02, 2.4165e-01, 2.2395e-01, 7.4087e-01, + 7.8879e-01, 4.9546e-02, 3.5004e-01, 6.8395e-01, + 6.8123e-01, 7.1124e-01, 6.9444e-01, 7.6560e-01, + 2.4237e-01, 6.2525e-01, 2.8653e-01, 2.7820e-01, + 5.0228e-01, 2.9317e-01, 9.4579e-01, 2.9115e-01, + 4.7597e-01, 5.2354e-01, 1.3772e-01, 2.3282e-01, + 4.5452e-01, 5.4296e-01, 1.9746e-01, 4.9147e-01, + 5.6899e-01, 5.6414e-02, 8.6244e-01, 3.2564e-01, + 1.0659e-01, 7.5965e-01, 9.6472e-01, 7.7750e-01, + 7.2205e-01, 8.0626e-01, 1.6637e-01, 8.5951e-01, + 6.4409e-01, 1.9179e-01, 8.9400e-01, 4.4630e-01, + 5.4902e-01, 3.4189e-01, 9.3649e-01, 5.4304e-01, + 2.6871e-01, 9.9609e-01, 9.7055e-01, 7.8210e-02, + 2.6589e-01, 3.9284e-01, 6.4817e-01, 9.8209e-01, + 9.1302e-01, 8.8956e-01, 7.8342e-01, 6.8394e-02, + 1.9243e-01, 1.8916e-01, 2.0395e-02, 8.9983e-01, + 1.0572e-01, 7.2123e-03, 9.0943e-01, 1.2798e-01, + 4.0205e-01, 6.8312e-01, 6.3117e-01, 7.7124e-01, + 1.5012e-01, 7.0230e-01, 7.6793e-01, 5.5521e-01, + 9.6351e-01, 8.3238e-01, 6.5948e-01, 4.1603e-01, + 8.1212e-01, 1.1518e-01, 9.3599e-01, 9.1141e-01, + 4.0160e-01, 1.1031e-01, 6.8201e-01, 7.9750e-02, + 1.5131e-01, 3.0507e-01, 2.3572e-01, 2.6086e-01, + 5.9391e-01, 1.6987e-01, 6.1560e-01, 1.6026e-01, + 6.8909e-01, 4.2929e-01, 1.2648e-01, 2.4778e-01, + 8.8063e-01, 3.8808e-01, 9.1145e-01, 5.9211e-01, + 2.1313e-01, 8.7309e-01, 4.9041e-01, 5.4458e-01, + 6.0568e-02, 1.5273e-01, 2.1917e-01, 8.9171e-01, + 7.6842e-01, 6.8100e-01, 5.1339e-01, 8.5539e-01, + 7.9014e-01, 3.4106e-01, 9.8054e-01, 8.8725e-01, + 4.8515e-01, 6.3120e-01, 2.8359e-01, 4.4387e-01, + 9.8333e-01, 1.4442e-01, 9.4899e-01, 1.3298e-01, + 1.3725e-01, 1.5417e-01, 4.0333e-01, 7.0661e-01, + 2.9406e-01, 3.3407e-01, 5.4939e-01, 3.7469e-01, + 8.6847e-02, 1.8606e-02, 2.6364e-01, 8.6625e-01, + 4.2538e-01, 7.9018e-02, 8.3385e-02, 8.7432e-01, + 6.9022e-02, 5.6012e-01, 5.5396e-01, 8.6056e-02, + 2.7493e-01, 4.6580e-01, 5.3973e-01, 9.3417e-01, + 2.1754e-01, 5.4306e-01, 7.2273e-01, 7.0728e-01, + 4.0171e-01, 4.4226e-01, 7.5257e-01, 5.5960e-01, + 6.4955e-01, 2.6446e-01, 6.4609e-01, 6.0967e-01, + 8.2189e-01, 2.7222e-01, 8.0485e-01, 8.2585e-01, + 8.8918e-01, 1.1833e-01, 5.3330e-02, 8.2071e-01, + 8.6565e-01, 8.7917e-02, 6.7075e-01, 8.3902e-01, + 2.8517e-01, 1.6814e-01, 8.1921e-01, 7.7657e-01, + 5.3350e-01, 7.1235e-01, 1.6008e-01, 4.5675e-01, + 7.6785e-01, 9.7056e-01, 6.5610e-02, 6.9723e-01, + 1.6378e-01, 6.6450e-01, 9.6388e-01, 5.1290e-01, + 9.3293e-01, 3.6661e-02, 2.4928e-01, 9.5874e-01, + 5.3259e-01, 7.7645e-01, 3.1687e-01, 7.5902e-01, + 3.5244e-01, 1.3920e-01, 9.5771e-01, 9.6623e-01, + 8.5304e-01, 7.2087e-01, 7.8457e-02, 6.7961e-01, + 3.5508e-01, 1.9919e-01, 6.6934e-01, 2.0658e-01, + 4.1045e-01, 2.0825e-01, 5.8313e-01, 7.6872e-01, + 7.0118e-01, 6.8197e-01, 1.6025e-01, 9.4576e-01, + 5.6369e-01, 3.4351e-01, 9.5288e-01, 6.1789e-01, + 3.7849e-01, 8.1046e-01, 7.2634e-01, 6.2178e-01, + 9.0132e-01, 4.2453e-01, 2.4989e-01, 7.5913e-01, + 4.4483e-01, 6.8101e-01, 8.9971e-01, 6.1591e-01, + 4.2701e-01, 8.1809e-01, 8.7847e-01, 6.3719e-01, + 1.9189e-01, 3.1449e-01, 9.5051e-01, 3.9168e-01, + 6.5981e-01, 3.0546e-01, 5.7815e-01, 3.7363e-01, + 9.6624e-01, 9.9249e-01, 5.7548e-02, 5.0491e-01, + 6.7925e-02, 1.7760e-01, 1.6120e-01, 8.2780e-01, + 9.4055e-01, 9.9393e-01, 7.2739e-01, 5.3731e-01, + 4.4199e-01, 9.3566e-01, 9.5553e-01, 8.0232e-01, + 7.2174e-01, 1.6622e-01, 4.9114e-01, 1.5145e-01, + 7.0499e-01, 3.4392e-02, 9.7948e-02, 9.7156e-01, + 4.2698e-01, 8.0234e-01, 9.9610e-01, 2.6332e-01, + 1.1864e-01, 9.1626e-01, 3.5272e-01, 7.0439e-01, + 7.1790e-01, 1.2066e-01, 7.9893e-01, 5.9123e-01, + 6.5508e-01, 6.6859e-01, 1.2754e-01, 9.6390e-01, + 7.4954e-01, 3.6069e-01, 8.7178e-01, 6.7993e-01, + 7.0596e-01, 4.9511e-01, 1.3482e-01, 5.7532e-01, + 7.9822e-01, 3.3260e-01, 7.2901e-01, 8.1016e-01, + 7.4999e-01, 5.2826e-01, 3.7745e-02, 7.3389e-01, + 8.8118e-01, 4.5679e-01, 8.3933e-01, 7.1986e-01, + 1.1778e-02, 7.4856e-01, 9.0986e-01, 3.2757e-01, + 8.7433e-01, 8.0210e-01, 4.4928e-01, 9.8257e-01, + 4.5077e-01, 2.4935e-01, 8.2088e-01, 5.1509e-01, + 9.7175e-01, 6.1798e-01, 3.3494e-01, 5.5720e-01, + 7.1021e-01, 1.2820e-02, 6.0110e-01, 5.1120e-01, + 9.4152e-01, 7.0604e-01, 9.2219e-01, 4.7789e-01, + 2.7184e-02, 9.2364e-02, 1.1378e-01, 8.1903e-02, + 5.2633e-01, 2.2004e-01, 9.4270e-01, 1.5715e-01, + 3.3034e-01, 2.0276e-01, 3.3300e-01, 1.7171e-01, + 5.7790e-01, 1.0489e-01, 9.4021e-01, 6.4008e-01, + 2.6507e-01, 9.8424e-01, 8.3900e-01, 1.0492e-01, + 1.0784e-01, 2.0314e-01, 8.4012e-01, 7.5386e-01, + 8.3103e-01, 6.8133e-01, 1.6198e-01, 6.9745e-01, + 6.5175e-01, 9.7458e-01, 1.6988e-01, 3.2166e-01, + 5.5597e-02, 1.3726e-01, 6.4758e-02, 3.5470e-01, + 4.2157e-02, 3.3140e-01, 2.1755e-01, 8.2344e-01, + 1.1482e-01, 6.6558e-02, 5.0030e-01, 2.6661e-01, + 1.8860e-01, 6.9338e-01, 1.1130e-01, 6.6327e-01, + 2.0672e-02, 9.0766e-03, 5.8228e-01, 6.2774e-01, + 8.7535e-01, 9.4230e-01, 2.5974e-01, 4.4662e-01, + 7.3300e-01, 3.8342e-01, 5.4622e-01, 7.5800e-01, + 1.9445e-01, 1.5889e-01, 4.6194e-01, 1.4057e-01, + 9.8568e-01, 5.8372e-01, 7.4958e-01, 8.0369e-01, + 7.0447e-01, 4.0060e-01, 9.3250e-01, 8.4231e-01, + 4.6412e-01, 8.2441e-02, 3.2440e-01, 9.0483e-01, + 1.4459e-01, 9.3026e-02, 8.0500e-01, 8.6367e-01, + 4.9204e-02, 7.1383e-01, 2.1298e-01, 3.2170e-01, + 6.8371e-01, 2.2845e-01, 9.5039e-01, 8.1321e-01, + 7.9303e-01, 3.9379e-01, 6.2018e-01, 9.3211e-01, + 5.3589e-01, 9.6541e-01, 3.5987e-01, 7.8068e-01, + 3.0999e-02, 8.4578e-02, 2.0643e-01, 7.5566e-01, + 3.2397e-01, 2.6396e-01, 2.4513e-01, 7.2995e-01, + 4.7132e-01, 6.7096e-01, 1.4194e-01, 3.5065e-01, + 5.1518e-01, 2.5044e-01, 4.7192e-01, 3.6179e-01, + 7.1193e-01, 9.4911e-01, 4.9323e-01, 9.6909e-01, + 9.6317e-01, 9.0990e-01, 2.1094e-01, 6.7174e-01, + 4.1568e-01, 4.8016e-01, 8.9218e-01, 5.7976e-02, + 8.8155e-02, 6.3326e-01, 6.3312e-01, 4.8292e-02, + 2.2983e-01, 2.8920e-01, 8.0512e-01, 7.5347e-01, + 2.5479e-01, 2.4410e-01, 3.1450e-01, 5.9742e-01, + 8.2775e-01, 8.2614e-01, 5.4439e-01, 9.9145e-02, + 7.4325e-01, 6.4359e-01, 5.2221e-01, 2.5014e-01, + 8.4686e-01, 7.8366e-01, 7.6215e-02, 1.5888e-01, + 2.5617e-01, 4.9452e-01, 4.6140e-02, 6.3227e-01, + 5.1908e-02, 4.9987e-02, 9.4102e-01, 9.0959e-01, + 4.0255e-01, 9.4050e-01, 2.2150e-01, 6.7272e-01, + 6.2590e-01, 8.7205e-01, 4.1010e-01, 6.7177e-01, + 9.4497e-01, 8.2790e-01, 7.8311e-01, 3.3823e-01, + 9.3136e-02, 7.0745e-01, 4.6701e-01, 5.0318e-01, + 4.1895e-01, 9.2063e-01, 6.7091e-01, 2.7620e-02, + 1.5263e-01, 3.2904e-01, 9.1053e-01, 9.5389e-01, + 3.0124e-02, 4.5331e-01, 3.9720e-01, 4.3489e-01, + 7.2088e-01, 8.3089e-01, 2.6341e-01, 7.9846e-01, + 4.2539e-01, 7.3847e-01, 8.7127e-01, 4.8591e-02, + 3.7103e-01, 1.8029e-01, 2.5845e-01, 7.8917e-01, + 1.8798e-01, 6.5831e-01, 1.4427e-01, 2.2715e-01, + 9.4881e-01, 4.4926e-01, 5.7396e-01, 6.9662e-01, + 2.6348e-01, 9.8925e-01, 2.0588e-01, 6.6288e-01, + 2.6554e-01, 8.7900e-01, 6.7183e-01, 2.0798e-01, + 9.5432e-01, 3.6738e-01, 6.6895e-01, 8.0690e-01, + 6.4573e-01, 9.7508e-01, 2.7844e-03, 2.5202e-01, + 6.5055e-01, 6.0839e-01, 5.0221e-01, 2.5209e-02, + 4.8274e-01, 8.9783e-01, 2.8949e-01, 1.0603e-02, + 5.2770e-02, 4.1803e-01, 1.4730e-02, 3.4437e-01, + 9.2051e-01, 2.5583e-01, 4.0650e-01, 8.5277e-01, + 7.7383e-01, 8.2424e-01, 3.1569e-01, 3.1378e-01, + 2.0269e-01, 7.4843e-01, 1.9357e-01, 8.0908e-01, + 7.7065e-02, 6.7807e-01, 6.5812e-01, 5.5638e-01, + 7.9630e-01, 8.5545e-01, 1.9897e-01, 8.0426e-01, + 3.1084e-01, 3.6259e-01, 7.1234e-01, 8.3327e-01, + 4.5506e-01, 9.3000e-01, 6.7289e-01, 1.8639e-01, + 7.0558e-01, 4.0226e-01, 6.6117e-01, 9.9290e-01, + 5.3513e-01, 5.7954e-01, 5.4816e-01, 9.0527e-01, + 7.7139e-01, 1.3691e-01, 2.6024e-01, 6.3332e-01, + 2.9416e-01, 5.8457e-01, 6.0563e-01, 4.2957e-01, + 5.7718e-01, 3.7601e-01, 2.2875e-01, 9.1269e-01, + 9.1746e-01, 4.1408e-01, 9.1193e-01, 9.1425e-01, + 4.5774e-01, 4.9530e-01, 8.2413e-01, 9.9173e-01, + 6.8800e-01, 1.8364e-01, 3.8060e-01, 7.9923e-02, + 1.7811e-01, 5.6239e-01, 8.8698e-01, 7.9505e-01, + 3.7647e-01, 2.9195e-01, 5.0363e-01, 1.4056e-01, + 3.2292e-01, 1.7051e-01, 2.5814e-01, 8.9064e-01, + 2.6058e-01, 1.6169e-01, 2.2478e-01, 2.8157e-01, + 3.5113e-01, 1.5661e-01, 8.9999e-01, 2.8839e-01, + 5.1515e-01, 5.4275e-01, 7.4938e-01, 4.0159e-02, + 9.8022e-01, 4.4690e-01, 2.5247e-01, 9.0833e-01, + 1.5324e-01, 2.9941e-01, 9.2181e-01, 1.7705e-01, + 7.3821e-01, 8.4347e-01, 4.5861e-01, 5.5850e-01, + 7.9816e-01, 4.0628e-01, 9.1788e-01, 6.8374e-02, + 4.8899e-01, 4.1792e-01, 1.7257e-01, 6.2230e-01, + 8.6558e-01, 3.0861e-02, 9.7305e-01, 7.1257e-01, + 7.3503e-01, 8.6909e-01, 6.1037e-01, 6.8669e-01, + 3.4771e-01, 9.1813e-01, 5.1253e-01, 1.6881e-01, + 9.8483e-01, 1.0620e-01, 6.5109e-01, 1.1000e-01, + 7.7248e-01, 6.8324e-01, 8.7085e-01, 2.3414e-02, + 8.7830e-01, 9.1900e-01, 3.1486e-01, 2.9535e-01, + 2.1099e-01, 8.4478e-01, 4.7439e-01, 2.3826e-01, + 2.3681e-01, 1.2168e-01, 9.2802e-01, 2.0984e-01, + 6.4891e-01, 6.3908e-01, 8.9262e-01, 1.4968e-01, + 6.9662e-01, 7.4814e-01, 7.8693e-01, 7.0368e-01, + 9.3425e-01, 5.4257e-01, 9.2885e-02, 7.5309e-01, + 4.5689e-01, 4.9504e-03, 8.8225e-01, 1.1832e-01, + 6.2659e-01, 7.5979e-01, 7.3064e-01, 7.8875e-01, + 4.0771e-01, 4.3700e-01, 5.9836e-01, 1.0206e-01, + 2.2863e-01, 4.9592e-01, 2.8892e-01, 8.1112e-01, + 7.7690e-01, 1.9089e-01, 3.8180e-01, 2.1344e-01, + 5.8525e-01, 4.7594e-01, 1.7654e-01, 1.0847e-02, + 2.4480e-01, 4.5246e-01, 3.2157e-01, 3.6559e-01, + 2.0346e-01, 5.8418e-01, 6.1346e-01, 9.4715e-01, + 6.0443e-01, 7.6449e-01, 8.1714e-01, 5.8462e-01, + 6.8969e-01, 5.3948e-01, 2.4833e-01, 2.8639e-01, + 6.4782e-01, 2.4754e-01, 4.2240e-01, 2.4084e-01, + 7.4270e-01, 3.0256e-01, 5.9779e-01, 8.4729e-01, + 2.3248e-01, 2.3190e-01, 4.9423e-01, 5.2510e-01, + 9.6706e-01, 1.6290e-01, 5.2968e-01, 7.3626e-01, + 4.4457e-01, 2.9375e-01, 3.7210e-01, 9.0677e-01, + 5.7758e-01, 4.3943e-02, 2.8827e-01, 7.3699e-01, + 7.8900e-01, 1.3989e-01, 7.2221e-01, 7.4366e-01, + 8.2160e-02, 9.6025e-01, 1.2139e-01, 5.0101e-01, + 8.0285e-01, 3.7037e-01, 9.1244e-01, 6.5398e-01, + 9.5521e-01, 2.9762e-01, 3.1879e-01, 7.2641e-01, + 1.5692e-01, 3.0369e-02, 3.4538e-02, 6.6245e-01, + 7.8304e-04, 1.5035e-01, 7.8481e-01, 9.1845e-01, + 7.7045e-01, 6.3589e-01, 3.7783e-03, 8.9417e-01, + 9.3511e-01, 6.6147e-02, 7.0792e-01, 7.8880e-01, + 4.2755e-01, 6.5761e-01, 2.2248e-01, 2.3505e-01, + 4.1693e-01, 1.8980e-01, 8.8345e-01, 9.4755e-01, + 6.9967e-01, 4.4861e-01, 4.8924e-01, 6.2220e-01, + 7.5948e-01, 9.0785e-01, 3.7362e-01, 7.3051e-01, + 4.1557e-01, 8.7165e-01, 7.5585e-01, 7.5145e-01, + 6.4869e-01, 6.4520e-01, 3.1785e-01, 1.8676e-01, + 9.6770e-01, 9.3301e-01, 4.6712e-01, 6.1378e-01, + 1.9663e-01, 1.9982e-02, 5.3737e-01, 2.9947e-01, + 9.2451e-02, 2.8147e-01, 2.1212e-01, 1.2678e-01, + 5.1366e-01, 3.6929e-01, 1.1249e-01, 5.2368e-01, + 9.6393e-01, 5.7740e-01, 8.3447e-01, 5.4799e-01, + 6.0984e-01, 5.8767e-01, 5.0594e-01, 1.5881e-02, + 8.4183e-01, 7.4645e-01, 1.0322e-01, 1.6353e-02, + 2.3554e-01, 5.4241e-01, 8.7490e-01, 3.3487e-01, + 2.0529e-01, 3.3168e-01, 9.8445e-01, 8.3994e-02, + 6.4003e-01, 6.1293e-01, 1.6847e-01, 6.9255e-04, + 3.3691e-01, 6.9188e-01, 8.5657e-01, 4.1035e-01, + 7.8136e-01, 6.6133e-01, 9.7211e-01, 6.7721e-01, + 3.6265e-01, 9.3234e-01, 1.0505e-03, 3.9732e-01, + 8.5010e-01, 1.8507e-01, 5.1799e-01, 4.1238e-01, + 2.6565e-01, 9.9569e-01, 6.1941e-01, 9.8384e-01, + 3.6682e-02, 7.7732e-01, 2.0425e-02, 9.9938e-01, + 4.5646e-01, 9.1031e-01, 7.1849e-01, 8.5889e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8046, 0.6398, 0.4516, ..., 0.6060, 0.1172, 0.9615]) +tensor([0.5663, 0.3065, 0.2094, ..., 0.8818, 0.3165, 0.1028]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1133,375 +375,757 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.420795440673828 seconds +Time: 0.014298200607299805 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '73435', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.1189255714416504} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 998, 1000, 1000]), + col_indices=tensor([3268, 1940, 9160, 6136, 8886, 1331, 7256, 398, 9383, + 5681, 803, 1559, 1818, 3192, 9874, 65, 7479, 2315, + 2903, 3596, 5048, 3628, 7348, 4304, 9283, 4781, 7437, + 4056, 4610, 5614, 7716, 3739, 6721, 2191, 8794, 4463, + 6935, 9878, 7811, 9187, 1993, 7954, 3335, 6582, 3663, + 1121, 8189, 4084, 9956, 7160, 874, 4786, 1477, 7610, + 7097, 9889, 3836, 3893, 7633, 128, 7550, 5741, 3839, + 4519, 2663, 9337, 8323, 9408, 7375, 4288, 6352, 6202, + 9890, 8118, 5248, 3739, 9260, 2426, 3045, 7017, 971, + 4695, 1157, 4174, 6436, 4508, 8338, 3240, 8059, 2621, + 8345, 2343, 7040, 8135, 125, 6433, 8450, 8364, 1051, + 3457, 1279, 1388, 4128, 4971, 5197, 5228, 7503, 1925, + 763, 9509, 6035, 5380, 597, 6393, 9931, 6518, 2735, + 7231, 3318, 5293, 3927, 3445, 1075, 367, 3174, 195, + 1996, 2769, 9250, 691, 3630, 9739, 908, 2572, 2664, + 2100, 1114, 3373, 7706, 7879, 9727, 8611, 1586, 7543, + 3790, 7462, 7540, 705, 9659, 3986, 3457, 3486, 4426, + 383, 2123, 3735, 2581, 4316, 9178, 5079, 2535, 6210, + 2222, 4374, 600, 1575, 6606, 5402, 6779, 1015, 1279, + 1724, 6724, 1757, 2234, 4001, 8179, 3237, 2616, 1804, + 6575, 2682, 4999, 4302, 3238, 8305, 7964, 6185, 9632, + 9789, 6758, 1146, 4914, 3905, 2672, 9386, 9446, 3582, + 900, 1298, 6335, 9266, 7522, 2454, 5500, 5676, 6583, + 3935, 8098, 3515, 6539, 1304, 5034, 6247, 2136, 8941, + 4122, 424, 174, 618, 1440, 4257, 2566, 3656, 3363, + 2240, 5917, 7528, 3291, 8763, 5069, 4941, 2278, 4749, + 3096, 7302, 2167, 9462, 9948, 4827, 5715, 239, 8772, + 9199, 8152, 552, 1988, 5938, 398, 6261, 9614, 1837, + 4305, 7252, 9944, 8131, 7744, 8734, 6553, 1655, 3024, + 8085, 5563, 2935, 9137, 9488, 4476, 4205, 7276, 9930, + 3357, 1023, 7834, 3926, 3280, 8750, 1545, 7404, 8367, + 4943, 8607, 8458, 5678, 1222, 3730, 706, 4731, 1163, + 320, 4009, 7473, 1567, 1055, 4036, 6544, 6382, 204, + 1361, 2163, 3890, 2862, 8465, 1419, 9414, 4969, 3658, + 6454, 4370, 37, 2212, 2676, 5807, 4148, 510, 7387, + 1776, 5840, 8839, 5008, 5833, 4278, 2878, 4844, 4630, + 617, 5182, 6850, 6822, 5141, 9641, 6568, 9304, 1983, + 7486, 5378, 1822, 413, 7045, 2009, 3009, 8773, 959, + 6807, 9360, 5082, 6491, 1897, 6650, 4757, 533, 344, + 5192, 2871, 5058, 5444, 4572, 5417, 1330, 9639, 9449, + 1473, 8411, 8576, 6192, 9945, 5662, 7117, 6824, 4401, + 1860, 9456, 4337, 2978, 5128, 5256, 3155, 7131, 8544, + 2429, 8968, 7918, 5423, 4799, 7403, 338, 8835, 9407, + 2497, 2011, 2493, 8745, 9251, 8515, 6051, 5503, 1807, + 5000, 9688, 1322, 6634, 3825, 8405, 6570, 3816, 2533, + 8788, 7423, 9584, 1832, 4772, 3798, 3403, 5581, 1140, + 4675, 2481, 5556, 914, 4353, 6871, 7675, 6635, 8790, + 6983, 3305, 6346, 1336, 4571, 3237, 8436, 4414, 8551, + 6151, 7185, 7310, 5371, 157, 5025, 8219, 2945, 7198, + 6036, 9787, 8995, 3504, 8748, 4912, 4161, 6291, 5388, + 1745, 1510, 4406, 1304, 6964, 248, 3303, 4655, 2505, + 6167, 9780, 1140, 3264, 2704, 2116, 9971, 8994, 2393, + 2804, 1835, 3160, 249, 8753, 8359, 8671, 9254, 5894, + 3929, 4252, 7286, 6136, 3253, 3824, 3841, 7110, 8226, + 1965, 8178, 9190, 751, 1181, 2030, 5606, 9155, 4372, + 3194, 292, 7077, 1313, 4485, 4505, 8944, 115, 4695, + 342, 3872, 6103, 1967, 7811, 9649, 1632, 428, 1847, + 5759, 9620, 1459, 4592, 4933, 25, 2464, 7148, 3529, + 7508, 2703, 523, 576, 6615, 6339, 2198, 7143, 1096, + 4033, 7799, 3115, 8619, 1669, 3842, 7780, 3872, 1537, + 6200, 3198, 3766, 2038, 3023, 2904, 8822, 9803, 9557, + 242, 6897, 6356, 1612, 4712, 7627, 9766, 262, 9623, + 1531, 276, 4360, 893, 6446, 6185, 7486, 253, 4211, + 1937, 9955, 5382, 2471, 3344, 7986, 1688, 9725, 4805, + 1868, 1291, 2758, 327, 1907, 8580, 162, 925, 5259, + 8589, 7015, 8350, 5154, 4607, 5667, 5558, 1325, 3664, + 3947, 6702, 9397, 3654, 3687, 349, 2919, 5677, 9827, + 6150, 6774, 570, 7973, 5873, 7700, 8930, 9760, 3582, + 7999, 5738, 4091, 7876, 7108, 5131, 1771, 9771, 8127, + 9711, 3774, 372, 9212, 6448, 4580, 8382, 2471, 9822, + 990, 9059, 513, 5144, 9403, 7663, 2636, 7305, 9035, + 3497, 1841, 7908, 3399, 146, 9194, 1492, 770, 657, + 6876, 7849, 668, 8642, 2279, 7031, 6351, 4883, 8455, + 5436, 9117, 6881, 3725, 3467, 4046, 368, 9328, 4883, + 2924, 1147, 2862, 84, 8921, 1609, 2925, 4064, 4863, + 7000, 2035, 5698, 6432, 4481, 1529, 2540, 4184, 1709, + 3185, 1060, 6790, 9180, 5802, 6745, 6522, 5342, 9625, + 6740, 474, 6842, 1650, 5301, 7468, 750, 6962, 9418, + 3668, 6567, 7209, 845, 72, 495, 86, 2113, 5281, + 8331, 529, 1137, 3934, 8009, 8752, 1166, 9430, 4435, + 7841, 5183, 1398, 7826, 6299, 6092, 4278, 6580, 3096, + 4473, 3176, 5798, 2270, 3991, 3522, 9461, 4361, 1796, + 7404, 1841, 287, 1150, 4044, 5561, 8977, 9909, 2170, + 1056, 5159, 5181, 4614, 4406, 5188, 6425, 684, 926, + 920, 6360, 6402, 890, 3305, 46, 9892, 5427, 5552, + 4347, 2969, 9960, 3545, 3292, 2596, 5597, 8752, 7064, + 9545, 6949, 3527, 7658, 1663, 1951, 9314, 6489, 6198, + 2302, 4459, 7626, 4094, 5297, 6637, 8478, 3661, 7258, + 3131, 1422, 4854, 6448, 5101, 1856, 1144, 1858, 8836, + 851, 5975, 1812, 2130, 9716, 8283, 9804, 9993, 9144, + 4299, 8514, 5473, 4269, 4934, 2514, 4452, 8870, 4257, + 7150, 4011, 9323, 6703, 1759, 1163, 2136, 6169, 1188, + 3650, 3246, 2339, 8674, 1310, 1895, 9912, 4621, 8612, + 2392, 7390, 8986, 1492, 6984, 919, 4793, 3006, 5961, + 4221, 1822, 7210, 9020, 4217, 7896, 1138, 1169, 2083, + 5638, 3858, 5852, 6107, 5878, 999, 2469, 3322, 6116, + 7488, 5854, 4509, 5090, 9400, 7364, 7318, 9391, 4847, + 1201, 7501, 363, 8177, 5495, 3447, 6083, 4716, 6248, + 2768, 1235, 8486, 4105, 8220, 642, 3644, 4828, 5827, + 9485, 6238, 2816, 4837, 2806, 1687, 5184, 3861, 2932, + 2418, 7069, 5201, 4484, 5050, 2772, 478, 4144, 1335, + 8595, 8822, 7318, 4866, 3108, 8013, 5153, 2943, 9979, + 3612, 2782, 6634, 5688, 2931, 1375, 6209, 2377, 6742, + 658, 2942, 3062, 8469, 4976, 2039, 9, 136, 4842, + 8338, 1302, 259, 6934, 488, 3818, 3939, 8893, 6826, + 8549, 7590, 7376, 6486, 7829, 2202, 8049, 6007, 2855, + 1631, 5879, 9514, 4595, 439, 2995, 6833, 603, 8390, + 7722, 493, 1173, 8199, 717, 1284, 5191, 6693, 4542, + 7265, 6747, 2981, 940, 7850, 5004, 8944, 3354, 6826, + 6720, 1090, 2311, 6851, 325, 6619, 2584, 9210, 1453, + 9454, 1843, 3783, 5355, 4268, 1096, 1816, 6488, 6820, + 8247]), + values=tensor([2.8771e-01, 1.4146e-01, 3.3980e-01, 2.0699e-01, + 6.6479e-01, 8.7997e-01, 4.8208e-01, 3.5458e-01, + 9.7353e-01, 6.6956e-01, 2.3096e-01, 5.5680e-01, + 3.4247e-01, 1.7007e-01, 2.1541e-03, 3.9348e-01, + 9.5240e-01, 5.5976e-01, 1.5912e-01, 1.4070e-01, + 1.6239e-01, 9.4578e-01, 1.5357e-01, 9.4102e-01, + 4.4400e-01, 2.2040e-01, 9.2526e-01, 6.0042e-01, + 7.6582e-01, 5.4786e-01, 6.5662e-01, 5.6715e-01, + 2.9880e-01, 4.6745e-01, 9.3415e-01, 5.8096e-01, + 8.6327e-01, 6.9869e-01, 6.5835e-01, 6.4535e-01, + 6.2778e-01, 1.6472e-01, 2.8004e-02, 1.7888e-02, + 7.2884e-02, 2.1358e-01, 9.6633e-01, 6.6467e-01, + 2.1565e-01, 4.4754e-01, 9.9484e-01, 5.5072e-01, + 8.1900e-01, 4.2134e-02, 7.0535e-01, 6.1291e-01, + 4.3428e-01, 4.9188e-01, 6.7058e-01, 9.1543e-01, + 3.0886e-01, 4.4728e-03, 7.6408e-01, 4.1722e-01, + 1.3722e-01, 4.5449e-01, 2.0729e-03, 6.7034e-01, + 4.6296e-01, 5.5903e-01, 6.1197e-02, 7.0296e-01, + 6.8854e-01, 3.5088e-01, 4.2963e-01, 5.5173e-01, + 6.3102e-04, 1.1059e-01, 6.8836e-01, 3.5375e-01, + 7.6796e-01, 5.6891e-01, 2.5369e-01, 4.9706e-01, + 8.0259e-01, 5.6293e-01, 2.5210e-01, 9.4266e-01, + 7.0771e-01, 9.3561e-01, 7.4522e-01, 8.4698e-01, + 3.2018e-02, 8.8825e-01, 6.2998e-01, 6.0379e-01, + 7.6188e-01, 9.4184e-01, 3.7892e-01, 7.7530e-01, + 5.9389e-01, 4.8456e-01, 8.3619e-01, 9.1961e-02, + 1.6769e-01, 4.6103e-01, 8.8645e-01, 9.3003e-01, + 2.2913e-01, 9.1094e-01, 6.8476e-01, 1.3704e-01, + 2.1042e-01, 7.6366e-02, 5.3556e-01, 1.0431e-01, + 4.0504e-01, 5.1333e-01, 1.2988e-01, 5.2197e-01, + 2.4381e-01, 2.2984e-01, 7.3713e-01, 3.3760e-01, + 2.0907e-01, 2.9819e-01, 5.9905e-02, 9.8499e-01, + 7.2529e-01, 1.8408e-01, 3.8270e-02, 5.3548e-01, + 9.0192e-01, 6.8138e-01, 9.8476e-01, 2.9891e-02, + 6.2650e-01, 7.3836e-01, 4.5217e-02, 6.8032e-01, + 3.7985e-01, 9.0190e-01, 1.6987e-01, 6.1601e-01, + 8.7321e-02, 7.5144e-01, 3.7708e-01, 2.0647e-01, + 4.8949e-01, 9.9654e-02, 7.0983e-01, 8.8309e-01, + 6.8068e-01, 6.8711e-02, 6.8924e-01, 9.1975e-01, + 9.2360e-01, 2.7118e-01, 5.9334e-01, 8.3664e-01, + 4.5618e-01, 7.7160e-01, 2.2790e-02, 3.6023e-01, + 6.5545e-01, 9.5792e-02, 7.5679e-01, 5.5168e-01, + 8.7319e-01, 8.5217e-02, 6.9991e-01, 3.9318e-01, + 3.0290e-01, 4.7091e-01, 7.0556e-01, 7.9052e-01, + 3.4701e-01, 5.3246e-01, 2.4776e-01, 6.6375e-01, + 5.9447e-01, 6.0241e-01, 3.0224e-01, 7.4881e-01, + 5.7550e-01, 5.8852e-01, 7.7761e-01, 6.3659e-01, + 7.5931e-01, 4.5312e-01, 9.2616e-01, 1.7815e-01, + 4.2034e-01, 4.5928e-01, 7.1985e-01, 7.9142e-01, + 2.7640e-01, 8.3220e-01, 6.5118e-01, 7.0504e-01, + 9.3481e-01, 7.0830e-03, 7.3917e-01, 9.4999e-01, + 7.3897e-01, 6.4052e-01, 8.3689e-01, 9.6680e-01, + 7.3740e-01, 2.7651e-01, 8.4853e-01, 7.5955e-01, + 6.9371e-01, 6.1349e-01, 4.6405e-01, 7.1233e-01, + 3.7113e-01, 7.6436e-01, 6.1209e-01, 6.6404e-01, + 2.3869e-01, 9.5679e-01, 9.8044e-01, 8.6933e-01, + 5.5240e-01, 4.0929e-01, 9.6984e-01, 9.2004e-01, + 6.5006e-01, 8.4987e-01, 3.5598e-01, 9.2081e-02, + 4.6330e-01, 7.7637e-01, 6.2457e-01, 8.6199e-02, + 3.0338e-01, 8.6043e-01, 3.7249e-01, 5.8744e-01, + 7.6158e-01, 8.0705e-01, 9.1742e-01, 5.8421e-01, + 5.5991e-01, 6.1925e-02, 7.2077e-01, 6.6231e-01, + 4.9117e-01, 5.0214e-01, 5.3608e-01, 2.8868e-03, + 8.1461e-01, 2.0794e-01, 7.6814e-01, 7.5160e-01, + 4.4580e-01, 3.1629e-01, 6.3557e-01, 5.0266e-01, + 6.2446e-01, 9.6051e-01, 3.9841e-01, 1.5208e-01, + 2.3771e-01, 9.4959e-02, 4.9265e-01, 1.7307e-01, + 8.3254e-01, 7.5480e-01, 2.9656e-01, 3.5691e-01, + 7.2978e-01, 5.7813e-01, 4.2929e-01, 2.3109e-01, + 5.5352e-01, 6.7058e-01, 1.8004e-01, 8.2964e-01, + 6.9941e-02, 7.4008e-01, 8.4540e-01, 3.8840e-01, + 7.9913e-01, 8.1805e-01, 3.8920e-01, 4.5836e-01, + 7.3680e-01, 7.3437e-02, 8.2151e-01, 9.3348e-01, + 7.5021e-01, 6.4281e-01, 1.8925e-01, 8.9742e-01, + 3.2118e-01, 7.3161e-01, 7.7009e-01, 7.4708e-01, + 7.6841e-01, 6.4245e-02, 4.2297e-01, 3.6313e-01, + 1.7006e-01, 4.3440e-01, 6.6754e-01, 1.1697e-01, + 8.2822e-01, 6.7026e-01, 5.3814e-01, 5.8097e-01, + 5.2692e-01, 3.0927e-01, 2.3656e-01, 8.7066e-01, + 5.6606e-01, 5.1278e-01, 7.7672e-01, 9.5526e-01, + 4.9047e-01, 4.2553e-01, 1.5800e-01, 6.8206e-01, + 9.5823e-01, 6.2284e-01, 6.4640e-01, 6.4180e-01, + 3.4369e-01, 1.9667e-01, 9.4106e-01, 2.1876e-01, + 3.2615e-01, 2.6718e-01, 3.9116e-01, 6.3412e-01, + 3.8696e-01, 4.9824e-01, 6.7892e-01, 1.8264e-01, + 2.1348e-01, 2.4676e-01, 4.9286e-01, 4.1194e-01, + 3.5164e-01, 3.2022e-01, 8.0737e-01, 4.4490e-01, + 1.0765e-01, 5.7311e-01, 4.1094e-01, 8.9379e-01, + 3.7616e-01, 3.7524e-01, 1.7013e-02, 9.5976e-02, + 9.0425e-01, 1.6898e-01, 2.7642e-01, 9.0879e-01, + 5.5926e-01, 1.7484e-01, 1.9853e-01, 6.7293e-02, + 3.5545e-01, 7.9452e-01, 3.8019e-01, 4.0962e-01, + 1.6883e-01, 6.3159e-01, 2.2293e-02, 3.2277e-01, + 4.5160e-01, 3.4846e-01, 6.1317e-01, 9.5602e-01, + 4.1983e-01, 9.5741e-01, 4.7632e-01, 5.7069e-01, + 2.4079e-01, 2.8159e-01, 5.8270e-02, 9.3235e-01, + 3.7123e-01, 1.8458e-01, 8.9656e-01, 8.2934e-01, + 7.9209e-01, 4.4782e-01, 4.7014e-01, 9.2884e-01, + 8.9346e-01, 8.4331e-02, 9.9995e-01, 9.4903e-01, + 6.2718e-01, 3.1964e-02, 7.0362e-01, 1.3593e-01, + 1.5451e-01, 9.9286e-01, 6.5606e-01, 5.4446e-01, + 9.8344e-01, 7.4858e-01, 3.0437e-01, 6.6062e-01, + 7.4720e-01, 5.1366e-01, 1.3449e-01, 6.9077e-01, + 1.2953e-02, 1.8465e-01, 2.4472e-01, 1.6732e-01, + 9.7346e-01, 5.5261e-02, 4.5593e-01, 5.1773e-01, + 1.7951e-01, 4.9106e-01, 6.8869e-01, 4.1446e-01, + 5.5650e-01, 6.2265e-01, 9.2800e-01, 4.3258e-01, + 4.6481e-02, 2.5815e-01, 1.9770e-01, 8.2542e-01, + 7.5356e-01, 9.1710e-01, 2.3308e-01, 2.0851e-01, + 4.7031e-01, 3.8971e-01, 4.9964e-02, 2.9163e-01, + 7.6756e-01, 4.9656e-01, 4.6233e-01, 4.3042e-01, + 9.2027e-02, 9.6889e-01, 7.1009e-01, 7.0162e-02, + 3.2849e-01, 1.1314e-01, 5.6355e-01, 4.5674e-01, + 1.8215e-01, 2.5535e-01, 2.9481e-01, 5.2178e-01, + 9.1171e-01, 6.9847e-01, 3.0863e-01, 2.8530e-01, + 9.3616e-02, 2.1208e-01, 5.2590e-01, 4.9204e-02, + 6.7779e-01, 1.9054e-01, 8.4387e-01, 4.8308e-01, + 5.1858e-01, 1.8257e-02, 4.7010e-01, 2.1653e-01, + 9.5204e-01, 4.5071e-01, 3.7608e-01, 1.3002e-01, + 8.6825e-01, 8.1825e-01, 3.3266e-01, 4.6354e-01, + 9.7147e-01, 9.9710e-01, 7.2262e-01, 6.8738e-01, + 5.6822e-01, 3.5336e-01, 2.5369e-01, 9.8490e-01, + 1.2975e-01, 5.6405e-01, 9.4141e-01, 4.8221e-01, + 2.5048e-01, 2.4717e-01, 9.7158e-01, 6.4051e-01, + 9.0209e-01, 3.9581e-01, 7.1582e-01, 9.1393e-01, + 1.6458e-01, 2.0256e-01, 7.3513e-01, 6.9918e-01, + 1.1739e-01, 4.2867e-01, 8.4670e-01, 2.5064e-01, + 6.6719e-01, 8.4350e-02, 7.0928e-01, 9.9279e-01, + 9.1193e-01, 5.0379e-01, 3.6458e-01, 5.9821e-01, + 9.9872e-01, 4.0942e-01, 1.4277e-01, 4.2265e-01, + 6.5909e-01, 4.1942e-01, 2.4087e-01, 1.0853e-02, + 7.4267e-01, 7.9345e-02, 3.6486e-01, 4.8944e-01, + 3.0601e-01, 3.8405e-01, 5.5448e-02, 7.5146e-01, + 2.6776e-01, 7.8276e-01, 2.9865e-01, 2.5064e-01, + 6.1097e-02, 7.2722e-01, 9.8842e-01, 6.3221e-02, + 2.7551e-01, 9.6716e-01, 7.6175e-02, 7.0192e-01, + 3.4733e-01, 2.5121e-01, 8.0727e-01, 6.4874e-01, + 6.7123e-01, 1.2114e-01, 6.4184e-01, 2.7679e-01, + 8.7813e-01, 6.7859e-01, 8.7411e-01, 3.6975e-01, + 9.0168e-01, 9.4552e-01, 6.3061e-01, 5.2806e-01, + 6.1769e-01, 6.0053e-01, 2.2439e-01, 8.9122e-01, + 5.4754e-01, 7.3651e-01, 8.1088e-01, 9.8040e-01, + 3.0299e-02, 4.6106e-02, 7.6622e-01, 4.3897e-01, + 2.1239e-01, 9.9818e-01, 9.6928e-01, 7.8117e-02, + 1.4361e-01, 5.9817e-01, 2.3090e-01, 4.6211e-01, + 5.8341e-02, 5.7111e-01, 5.6530e-01, 4.7273e-01, + 7.8533e-01, 6.7542e-01, 6.1897e-01, 3.4197e-01, + 2.7038e-01, 2.1650e-01, 1.7095e-01, 3.2617e-01, + 7.7115e-01, 5.9187e-01, 5.6690e-01, 5.2913e-02, + 8.3899e-01, 3.8008e-01, 8.9469e-01, 2.4459e-01, + 4.8857e-01, 1.0111e-01, 4.4750e-01, 3.2032e-01, + 5.7385e-01, 4.7948e-01, 6.8175e-01, 2.2119e-01, + 9.5918e-01, 2.5774e-02, 9.9294e-01, 6.9966e-01, + 9.0720e-01, 2.6637e-01, 4.1012e-01, 1.9018e-01, + 5.5632e-01, 3.8646e-01, 6.7813e-01, 4.9615e-01, + 6.6772e-01, 3.0066e-01, 8.8418e-01, 1.4931e-01, + 4.1799e-01, 3.5348e-01, 1.7049e-01, 9.8076e-02, + 5.9850e-01, 7.7942e-01, 6.2314e-01, 4.9653e-01, + 1.0044e-02, 1.5259e-01, 5.9254e-01, 8.1919e-01, + 3.6718e-01, 9.9112e-01, 4.4607e-01, 2.2125e-02, + 6.5601e-01, 9.3147e-01, 3.3083e-01, 3.7329e-01, + 3.9710e-01, 3.2957e-01, 2.1002e-01, 8.8129e-01, + 5.6113e-01, 8.9744e-01, 1.4077e-01, 9.5118e-01, + 8.1320e-01, 6.5388e-01, 1.3600e-01, 6.6715e-01, + 5.4440e-01, 4.1271e-01, 1.0135e-01, 1.0372e-01, + 9.5882e-02, 3.6572e-01, 9.7288e-01, 4.2360e-01, + 5.8470e-01, 5.1595e-01, 2.7936e-02, 5.1129e-01, + 4.4788e-01, 8.3745e-01, 5.3835e-01, 3.7081e-02, + 4.6294e-01, 5.9312e-01, 3.7678e-01, 4.0783e-01, + 8.2926e-01, 9.8819e-01, 7.5637e-01, 1.0288e-01, + 1.0409e-01, 4.3534e-01, 7.0186e-01, 7.7372e-01, + 3.1881e-01, 3.7023e-01, 4.2949e-01, 7.4031e-03, + 7.4680e-02, 8.8881e-01, 7.9458e-01, 4.9785e-01, + 5.9413e-01, 3.9841e-01, 1.5480e-01, 8.1195e-01, + 1.3920e-02, 2.0513e-01, 3.8289e-01, 1.6912e-01, + 4.2587e-01, 9.6457e-01, 3.0490e-01, 8.6240e-01, + 2.7247e-01, 8.2377e-01, 4.7853e-01, 7.2678e-01, + 1.5140e-01, 9.9396e-01, 3.8000e-01, 7.3078e-01, + 8.1370e-01, 3.3911e-01, 9.5184e-01, 9.9797e-01, + 5.5417e-01, 9.5113e-01, 3.1177e-01, 3.5599e-01, + 6.2754e-01, 7.8860e-01, 6.3492e-01, 5.8439e-01, + 8.0656e-01, 7.0469e-01, 8.0529e-02, 8.6315e-01, + 1.0352e-01, 9.2122e-01, 3.7143e-01, 9.8695e-01, + 9.6353e-01, 6.4594e-01, 8.1990e-01, 2.8509e-01, + 9.3885e-01, 4.1737e-01, 3.9365e-01, 9.6196e-01, + 3.6973e-02, 9.9429e-01, 4.5776e-01, 7.5388e-01, + 2.2114e-01, 1.5209e-01, 6.2461e-01, 1.5314e-01, + 3.7414e-01, 7.4478e-01, 3.8079e-01, 4.2514e-01, + 9.4067e-01, 8.4756e-01, 6.9115e-01, 1.6953e-01, + 4.0282e-01, 9.4198e-02, 3.0556e-01, 6.1409e-01, + 5.7797e-01, 1.2103e-01, 1.5533e-02, 9.3777e-01, + 6.7901e-01, 2.2422e-01, 2.8111e-01, 6.6430e-02, + 6.1363e-01, 9.1234e-01, 6.0708e-01, 2.9924e-01, + 2.0154e-01, 5.8786e-02, 3.2974e-01, 6.9191e-01, + 2.6891e-01, 1.6783e-01, 8.4956e-01, 3.0398e-01, + 7.6334e-01, 5.8308e-01, 4.7019e-01, 6.2333e-01, + 5.8547e-01, 9.2055e-01, 7.4340e-01, 8.1279e-01, + 9.9055e-01, 7.3614e-02, 1.8842e-01, 1.7972e-01, + 3.2375e-01, 1.1725e-01, 1.2493e-01, 3.6958e-01, + 4.4883e-01, 1.8019e-01, 6.8769e-01, 7.8749e-01, + 8.4573e-01, 7.7622e-01, 2.5664e-01, 9.9911e-01, + 3.9163e-01, 3.0836e-01, 3.2564e-01, 6.8481e-02, + 9.6194e-01, 2.9230e-02, 1.8780e-01, 7.8867e-01, + 9.4618e-01, 3.2351e-01, 9.0648e-01, 5.9182e-01, + 1.7703e-02, 7.3278e-02, 7.3218e-01, 1.2123e-01, + 8.3010e-01, 6.7020e-01, 9.2472e-01, 4.5988e-01, + 4.5813e-01, 9.0051e-01, 4.8831e-01, 8.9234e-01, + 4.8068e-01, 4.8808e-01, 6.8430e-01, 8.8869e-01, + 4.2253e-01, 1.5206e-02, 5.3149e-01, 1.4403e-01, + 3.4749e-01, 5.8780e-01, 3.9069e-01, 2.5935e-01, + 8.6621e-01, 8.7761e-01, 4.7673e-01, 6.7192e-01, + 2.8080e-01, 8.9602e-01, 8.5936e-01, 7.1708e-01, + 8.3511e-01, 4.1220e-01, 4.6892e-01, 4.1250e-01, + 2.6419e-01, 1.5667e-01, 4.7562e-01, 7.1202e-01, + 1.0849e-01, 9.1356e-01, 2.8669e-01, 9.9792e-01, + 7.5273e-01, 4.4966e-01, 5.5509e-01, 8.6049e-01, + 8.7889e-01, 8.9251e-01, 9.8641e-02, 7.8106e-01, + 9.3096e-01, 3.2210e-01, 6.6811e-01, 1.1537e-01, + 9.8736e-02, 4.4812e-01, 5.6658e-01, 5.3650e-01, + 2.6873e-01, 3.7077e-01, 8.1482e-01, 7.5572e-01, + 9.1491e-01, 9.1228e-01, 7.8596e-01, 7.6474e-01, + 3.4040e-01, 3.9685e-01, 2.8244e-02, 4.9794e-01, + 8.1154e-01, 7.3329e-01, 8.3308e-01, 8.6496e-01, + 4.5457e-01, 5.9193e-01, 2.2779e-01, 5.2058e-01, + 6.4221e-01, 3.8018e-01, 3.6945e-01, 9.7664e-01, + 4.5093e-01, 3.8167e-01, 2.4021e-01, 4.6584e-01, + 4.6607e-01, 5.9031e-01, 9.4514e-01, 2.8137e-01, + 8.1706e-01, 5.7682e-01, 3.5024e-01, 2.1810e-01, + 7.9739e-01, 3.5521e-01, 3.7166e-01, 8.5125e-01, + 3.2176e-01, 3.3764e-01, 3.4843e-01, 5.7515e-01, + 7.4040e-01, 3.5724e-01, 3.3978e-01, 4.7715e-02, + 6.1276e-01, 7.2790e-02, 5.6510e-01, 1.3865e-01, + 5.6848e-01, 4.4510e-01, 2.0762e-01, 2.7685e-01, + 7.7521e-01, 6.1391e-01, 5.6374e-01, 8.5850e-01, + 2.1283e-01, 5.1543e-01, 1.2434e-01, 5.3456e-01, + 9.5277e-01, 1.3562e-01, 6.4514e-01, 6.1473e-01, + 5.3539e-01, 4.4355e-02, 8.4593e-01, 7.3084e-01, + 1.7425e-01, 7.1106e-01, 5.8012e-01, 9.2605e-01, + 3.3297e-01, 2.9434e-01, 6.2419e-01, 4.3100e-01, + 1.1256e-01, 2.5574e-01, 7.3243e-01, 6.8691e-01, + 3.4481e-01, 2.3702e-01, 2.6038e-01, 4.3581e-01, + 7.7715e-01, 2.6721e-01, 9.2404e-01, 4.7068e-01, + 3.0041e-01, 2.2246e-01, 4.2911e-01, 1.7288e-01, + 4.5293e-01, 8.5632e-01, 4.3717e-01, 5.7097e-01, + 2.7609e-01, 6.8311e-04, 8.6710e-01, 6.6874e-01, + 9.4404e-01, 3.4636e-01, 1.3715e-01, 2.7887e-01, + 6.0571e-01, 8.7055e-01, 6.3735e-01, 4.8338e-01, + 9.4789e-01, 2.2165e-01, 7.9746e-01, 5.8354e-01, + 8.7350e-02, 8.7468e-01, 6.0758e-03, 7.6367e-01, + 2.6460e-01, 9.0713e-01, 5.2305e-02, 2.2998e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0103, 0.5434, 0.7885, ..., 0.2340, 0.5166, 0.1015]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.1189255714416504 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '363895', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.603893041610718} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([ 367, 5928, 4913, 9662, 9837, 1139, 7195, 709, 1186, - 80, 7429, 8354, 3731, 4238, 1204, 8286, 5350, 6518, - 6581, 5481, 8412, 8567, 4483, 270, 6809, 6052, 9047, - 8886, 6265, 2683, 8840, 6468, 5705, 4463, 7370, 8923, - 7324, 752, 3820, 476, 1511, 1152, 7156, 6378, 1601, - 6251, 5183, 964, 1260, 884, 6109, 4781, 1936, 3415, - 7691, 4888, 5674, 6459, 8140, 8932, 2864, 6432, 1356, - 2665, 9855, 2360, 1289, 3774, 7837, 576, 7725, 1293, - 907, 2542, 7673, 5326, 1656, 1077, 464, 7656, 5438, - 524, 4581, 8099, 2687, 4078, 5342, 5240, 9155, 9342, - 4985, 2318, 5185, 4603, 8415, 3279, 2083, 7925, 8071, - 9166, 9303, 9469, 5789, 7778, 8645, 7134, 7594, 2178, - 218, 3344, 3951, 8834, 4013, 7845, 8331, 9692, 3137, - 7730, 5023, 8474, 4718, 38, 726, 535, 2913, 4635, - 2716, 3591, 9066, 2660, 3345, 7905, 2026, 1175, 9827, - 6627, 4714, 9765, 1301, 2421, 949, 3573, 4881, 5681, - 2117, 1844, 8550, 9611, 6234, 1475, 3891, 4140, 4791, - 497, 5133, 1193, 8459, 1053, 2110, 5799, 6433, 6363, - 6452, 1314, 1357, 383, 1853, 7981, 8433, 7415, 6026, - 6286, 7644, 2079, 2595, 9578, 6356, 4227, 8298, 2069, - 2653, 5142, 7404, 4079, 1966, 7533, 6062, 6415, 8225, - 8783, 5739, 3815, 1201, 4896, 5097, 5808, 8945, 5520, - 6206, 511, 7347, 334, 3119, 1570, 7713, 2906, 3536, - 6334, 7998, 298, 3942, 6558, 2963, 4842, 2263, 5657, - 6492, 6276, 9075, 5298, 2267, 9620, 5320, 1568, 8760, - 7490, 9058, 1722, 5460, 3464, 1842, 2377, 9510, 1053, - 3322, 9012, 5664, 8967, 8790, 9286, 1946, 3141, 8192, - 6659, 3495, 4958, 4102, 7004, 6177, 5317, 6593, 7640, - 6307, 8035, 9650, 8087, 4138, 5288, 54, 277, 9498, - 8689, 2955, 8232, 1954, 321, 4627, 4226, 4258, 7653, - 5234, 2531, 412, 5196, 6249, 3047, 8171, 9428, 3213, - 9763, 1374, 9103, 7900, 6626, 2118, 7765, 3728, 6106, - 4954, 4753, 8313, 550, 6888, 8896, 9965, 3061, 2947, - 8278, 5879, 5273, 5453, 181, 2437, 4117, 7283, 5591, - 8602, 8473, 5720, 1866, 7656, 8837, 3905, 7472, 7158, - 3023, 2882, 9989, 8681, 648, 6274, 6847, 3312, 5656, - 7680, 6098, 4463, 9954, 2825, 6574, 7011, 1957, 6137, - 6725, 3576, 5559, 9535, 4552, 2200, 3109, 9611, 9451, - 1309, 2875, 3230, 5628, 9907, 8010, 6405, 222, 8761, - 2120, 9374, 4858, 4514, 5442, 9330, 8281, 8858, 4407, - 6543, 35, 3825, 9633, 1009, 2627, 3906, 1955, 7496, - 3563, 3539, 8202, 7400, 7587, 7952, 2607, 3232, 9574, - 748, 9829, 4992, 1810, 7308, 6501, 6785, 7235, 1137, - 8724, 2405, 7482, 6759, 4652, 5655, 4530, 6784, 9512, - 4088, 6049, 9230, 7810, 9658, 5737, 1295, 738, 747, - 7672, 2503, 5907, 2220, 201, 5120, 7515, 3259, 3879, - 3039, 9321, 8605, 9482, 4181, 7348, 9537, 587, 6394, - 3984, 306, 1193, 3228, 778, 134, 9046, 7121, 4357, - 4190, 4050, 4701, 5483, 6713, 8937, 2483, 711, 8665, - 3008, 3841, 9929, 1156, 7864, 8900, 4657, 7749, 2947, - 9699, 2414, 4266, 6048, 8585, 5785, 2832, 7866, 2519, - 9463, 5405, 1165, 1777, 9786, 9696, 3031, 653, 1886, - 1959, 8043, 4843, 484, 4688, 1972, 2038, 6598, 7240, - 1462, 7254, 3087, 1879, 7554, 1558, 7001, 9016, 8895, - 5649, 4937, 9704, 4961, 7644, 8442, 2757, 2242, 9502, - 2976, 4095, 4676, 9126, 5855, 4163, 6430, 137, 106, - 3030, 655, 8531, 7089, 648, 854, 5910, 4435, 7738, - 4805, 6752, 444, 2066, 5929, 8674, 9769, 2865, 2752, - 5321, 130, 2489, 807, 9550, 1633, 7468, 9068, 9241, - 6808, 7365, 968, 8661, 9018, 3550, 6104, 2629, 7508, - 35, 8351, 2048, 4626, 2188, 1746, 7387, 3093, 1503, - 1452, 7638, 1336, 9281, 6296, 6140, 7102, 9783, 1071, - 4115, 8592, 8292, 2729, 6495, 356, 8036, 1239, 4223, - 6109, 2038, 6313, 7535, 4790, 2693, 2125, 7954, 5865, - 3268, 2522, 6271, 2184, 4150, 1470, 3491, 6995, 9734, - 2067, 3839, 7338, 6815, 4421, 307, 289, 5776, 7623, - 5550, 9770, 6085, 800, 6471, 6365, 7940, 7253, 9023, - 3056, 1623, 7408, 4931, 2227, 3852, 1329, 3500, 1597, - 680, 4795, 1426, 733, 9436, 1001, 976, 8821, 8330, - 6704, 5168, 673, 3362, 4167, 8558, 5789, 6484, 7568, - 5585, 6431, 6874, 5521, 5213, 7970, 3020, 8092, 5025, - 6800, 7632, 7512, 3989, 4195, 1578, 2940, 3628, 3648, - 6431, 2736, 7676, 5158, 5635, 1844, 6595, 8947, 5903, - 51, 9169, 4098, 5691, 6814, 9766, 6514, 9970, 2239, - 2046, 3280, 2714, 5806, 2677, 82, 1113, 2963, 2180, - 2566, 2541, 3825, 9253, 8388, 6485, 870, 1684, 7333, - 793, 9898, 5870, 5155, 6286, 9563, 9068, 5713, 2343, - 1715, 3887, 723, 1944, 2953, 3319, 9854, 144, 718, - 6674, 4026, 8079, 7531, 4342, 6924, 6779, 6546, 7462, - 9962, 1432, 8607, 1875, 2805, 7948, 2976, 1046, 7036, - 4949, 7102, 477, 7850, 2174, 1961, 4920, 8186, 1308, - 179, 2095, 3644, 8483, 9153, 627, 2488, 890, 3123, - 4350, 5492, 1701, 1645, 3932, 3331, 2933, 6624, 7332, - 7665, 5288, 7681, 6550, 7545, 730, 5687, 802, 6864, - 7451, 2117, 4878, 4869, 1248, 1137, 1380, 8950, 5598, - 7429, 4233, 1060, 5569, 6689, 8608, 3567, 4558, 7535, - 9199, 2985, 2282, 235, 9129, 8582, 2072, 7435, 8445, - 9048, 7905, 3709, 7810, 5385, 5720, 2662, 2668, 7912, - 4854, 9556, 9550, 3132, 8229, 4856, 4161, 1914, 745, - 65, 7058, 9271, 7846, 8928, 5152, 5482, 8156, 4590, - 3430, 90, 6589, 2814, 3078, 1382, 8736, 5505, 2054, - 8404, 4829, 8221, 2458, 952, 6527, 2277, 8961, 5925, - 2066, 2154, 3256, 2466, 7237, 2076, 9446, 4670, 7006, - 2231, 8823, 7168, 5299, 1101, 7045, 7082, 8944, 4016, - 4074, 4258, 3864, 3141, 4450, 5115, 5389, 9949, 1172, - 9433, 1642, 1479, 3769, 9973, 4747, 4872, 389, 8601, - 9119, 8563, 2438, 6873, 1818, 9226, 937, 6705, 3733, - 2036, 669, 2029, 332, 7663, 8899, 3889, 9735, 9326, - 8050, 2621, 3330, 992, 9581, 1488, 8125, 6995, 2528, - 7140, 6221, 1417, 9802, 898, 1689, 3308, 303, 2075, - 6622, 9167, 6725, 8722, 8663, 7196, 3835, 4992, 1316, - 6283, 4758, 9632, 641, 8717, 8903, 403, 3496, 5247, - 9491, 1114, 4440, 9898, 4961, 6497, 4291, 3663, 5161, - 7340, 9289, 8727, 7951, 3216, 9826, 2744, 1742, 7964, - 1100, 4645, 9, 4874, 3732, 9378, 1625, 9820, 3610, - 1545, 6999, 6918, 1895, 3580, 9207, 2130, 5891, 4573, - 1060, 9808, 4116, 943, 4244, 5724, 4961, 3837, 8840, - 389, 1348, 833, 3514, 5170, 1683, 7789, 9831, 5974, - 9125, 5396, 7463, 6389, 1948, 8359, 6877, 7522, 8087, - 7219, 1695, 4668, 662, 3386, 4803, 1414, 7277, 1481, - 9053, 9285, 9508, 5533, 205, 8818, 4845, 7696, 8623, - 9000]), - values=tensor([9.0347e-01, 8.9097e-01, 3.9050e-01, 1.1761e-01, - 7.3236e-01, 1.8076e-01, 1.7435e-01, 3.1968e-01, - 6.2094e-02, 6.9707e-01, 9.2156e-01, 2.5833e-01, - 3.9175e-01, 8.0344e-02, 8.5392e-01, 7.0595e-02, - 6.3035e-01, 4.6849e-01, 4.1355e-01, 5.1765e-01, - 6.7505e-01, 7.7298e-01, 3.0196e-01, 2.7160e-01, - 4.9835e-01, 7.7842e-01, 7.9100e-01, 6.1241e-01, - 9.2766e-01, 8.1158e-01, 9.2979e-01, 2.0087e-01, - 6.3444e-01, 1.1919e-01, 2.9666e-01, 8.1344e-01, - 2.3658e-01, 7.2770e-01, 3.4239e-01, 6.0915e-01, - 3.8756e-01, 4.8989e-01, 5.2955e-01, 9.2633e-01, - 7.9699e-01, 9.0118e-01, 6.9847e-02, 3.2137e-02, - 8.8024e-01, 6.0500e-01, 4.6450e-01, 8.5960e-02, - 5.6130e-01, 3.3831e-01, 9.3616e-01, 8.2763e-01, - 2.4098e-01, 8.9643e-01, 9.8468e-01, 8.8817e-01, - 4.6338e-01, 1.1147e-02, 8.1814e-01, 9.2206e-02, - 1.0005e-01, 6.2906e-01, 6.9146e-01, 1.5603e-01, - 3.1235e-01, 6.2561e-01, 3.7711e-01, 6.5022e-01, - 1.2930e-01, 6.7901e-02, 7.2399e-01, 7.8736e-01, - 8.8706e-02, 7.0933e-01, 9.0665e-01, 1.0525e-01, - 7.0947e-01, 7.6785e-01, 7.4226e-01, 9.8559e-01, - 9.2459e-01, 1.7157e-01, 7.4478e-01, 7.9596e-01, - 7.2144e-01, 1.9958e-01, 6.1043e-01, 2.7464e-01, - 5.8302e-01, 9.7091e-01, 4.8610e-01, 8.4624e-01, - 8.7645e-01, 7.2595e-01, 7.4275e-01, 2.6952e-01, - 8.8491e-01, 6.7085e-02, 7.5574e-01, 6.1435e-01, - 1.3665e-01, 8.8809e-01, 2.2500e-01, 7.6147e-01, - 5.8642e-01, 8.3791e-01, 3.2752e-01, 4.9244e-01, - 6.5160e-01, 6.2237e-01, 5.6666e-01, 1.2589e-01, - 5.4657e-01, 1.6810e-01, 1.1783e-01, 3.3742e-01, - 6.8221e-01, 1.2268e-02, 4.7183e-01, 6.4275e-01, - 3.3058e-01, 6.2514e-01, 2.7607e-01, 5.2090e-01, - 3.9798e-01, 7.4704e-02, 3.1431e-01, 2.3156e-01, - 2.4983e-01, 7.9235e-01, 6.1773e-02, 4.1696e-01, - 2.9905e-01, 6.8152e-01, 3.4211e-01, 6.0393e-01, - 3.6186e-02, 8.0620e-01, 9.8874e-01, 4.6252e-01, - 8.4231e-01, 9.1653e-01, 4.6135e-01, 2.8152e-01, - 9.7319e-01, 4.3848e-01, 5.7170e-01, 4.9901e-01, - 7.4233e-01, 2.6089e-01, 8.3401e-01, 2.3202e-01, - 8.4769e-01, 8.7678e-01, 7.1420e-01, 8.9378e-01, - 5.6124e-01, 4.0637e-01, 3.8693e-01, 8.0881e-02, - 2.6688e-01, 5.6776e-01, 4.3004e-01, 4.7054e-01, - 2.0392e-01, 7.2817e-01, 6.6755e-01, 5.1867e-01, - 5.2670e-01, 1.4618e-01, 7.3183e-01, 8.6973e-01, - 1.9126e-01, 4.9009e-01, 2.5587e-01, 4.0650e-01, - 9.8806e-01, 9.6733e-01, 1.1171e-01, 7.9880e-01, - 6.7184e-01, 7.4651e-02, 1.7680e-01, 9.4485e-01, - 8.4713e-01, 1.4694e-01, 9.3410e-01, 6.8790e-01, - 6.7995e-01, 3.4766e-01, 3.3209e-01, 8.5193e-01, - 5.7959e-01, 2.9187e-01, 3.0229e-01, 9.7462e-01, - 7.8153e-01, 2.9900e-01, 3.0648e-01, 9.4053e-01, - 7.5039e-01, 7.3664e-01, 2.3912e-01, 5.6769e-01, - 9.2225e-01, 1.2617e-01, 4.4870e-01, 5.2352e-01, - 4.9831e-01, 4.2978e-01, 5.8379e-03, 5.1101e-01, - 1.1850e-02, 4.3076e-02, 2.0400e-01, 4.3425e-02, - 2.0565e-01, 2.5026e-01, 5.8630e-01, 9.9979e-01, - 5.5005e-01, 2.8369e-01, 7.0988e-01, 9.9716e-01, - 1.0843e-01, 5.2002e-01, 8.1336e-01, 4.4712e-01, - 1.7801e-01, 4.1042e-01, 8.1718e-01, 7.5846e-01, - 6.4002e-01, 8.1764e-01, 9.2236e-01, 2.7385e-01, - 2.2693e-01, 1.3388e-01, 2.7481e-01, 2.7276e-01, - 6.3817e-01, 8.1593e-01, 7.1721e-01, 1.0760e-01, - 3.2614e-01, 4.3753e-03, 9.9770e-01, 9.1052e-01, - 1.4963e-01, 1.3781e-01, 1.1034e-01, 1.1895e-02, - 8.2173e-01, 2.6975e-01, 3.5686e-01, 5.8749e-01, - 1.5253e-01, 6.6786e-01, 9.1749e-01, 8.5611e-01, - 5.9711e-01, 8.9352e-01, 7.2370e-01, 4.2727e-01, - 4.7201e-01, 6.1878e-01, 1.3255e-01, 4.0640e-01, - 6.1650e-01, 9.0122e-01, 3.3783e-01, 1.2667e-01, - 5.3203e-01, 7.8517e-01, 4.7198e-01, 8.0031e-01, - 9.8415e-02, 6.3035e-01, 8.4415e-01, 4.0094e-01, - 3.8505e-01, 5.1696e-01, 9.5335e-01, 7.6890e-01, - 5.9772e-01, 8.7886e-01, 7.4086e-01, 8.4781e-01, - 1.6493e-01, 3.3683e-01, 7.1960e-03, 2.9042e-01, - 8.4813e-01, 1.8192e-01, 6.0102e-01, 4.2567e-01, - 9.3317e-02, 3.8724e-01, 3.5787e-01, 9.9992e-01, - 1.2443e-01, 8.2890e-01, 2.8195e-01, 6.3051e-01, - 6.0894e-01, 9.5920e-01, 6.4943e-01, 6.5476e-01, - 6.7692e-01, 5.7763e-01, 7.8022e-01, 4.1886e-01, - 5.0205e-01, 2.5778e-01, 9.0479e-01, 8.7006e-01, - 6.1226e-01, 6.4647e-01, 1.5596e-01, 7.0047e-01, - 6.7722e-01, 5.4595e-01, 4.9292e-01, 9.0526e-01, - 4.8174e-02, 8.1995e-02, 7.1448e-01, 2.7007e-02, - 6.2586e-01, 6.7855e-01, 4.6902e-01, 3.6021e-01, - 3.9907e-01, 1.8609e-02, 6.0745e-01, 2.4758e-01, - 6.2019e-02, 9.3085e-01, 3.9521e-01, 8.9117e-01, - 5.9487e-01, 4.2672e-02, 7.4925e-01, 8.7766e-02, - 5.8346e-01, 6.5657e-01, 8.0155e-01, 4.5151e-01, - 5.5583e-01, 7.1994e-01, 6.1938e-01, 4.9424e-01, - 8.4852e-01, 2.5577e-01, 7.5244e-01, 5.8561e-01, - 1.6622e-01, 5.3017e-01, 7.0377e-01, 1.6795e-01, - 6.1195e-01, 5.0235e-01, 4.0852e-01, 2.4062e-01, - 3.7706e-01, 7.1399e-01, 1.5932e-01, 2.0246e-01, - 9.3118e-01, 3.6824e-01, 9.2308e-01, 4.1388e-01, - 3.9074e-01, 2.1232e-02, 4.8230e-01, 3.1334e-02, - 7.6588e-01, 8.9020e-01, 9.5571e-02, 9.3996e-01, - 3.3502e-01, 1.6456e-02, 4.8873e-01, 9.0152e-01, - 6.0836e-01, 6.2449e-02, 2.3911e-01, 2.5569e-01, - 1.9099e-01, 6.5319e-01, 3.4599e-01, 9.4871e-01, - 1.5293e-01, 9.6291e-01, 5.9772e-01, 9.2359e-01, - 6.8691e-01, 6.3677e-01, 7.7151e-01, 5.4282e-01, - 9.3692e-01, 3.1526e-01, 9.7688e-01, 8.3385e-01, - 2.2972e-01, 6.2720e-01, 2.5945e-01, 2.4371e-01, - 1.9518e-01, 9.3585e-01, 5.0154e-01, 4.9345e-01, - 9.2069e-01, 2.8370e-01, 2.0941e-01, 2.7517e-01, - 3.3759e-01, 3.1978e-01, 9.0873e-01, 7.9081e-01, - 6.9871e-01, 2.8844e-01, 7.5941e-01, 4.7964e-01, - 4.1805e-01, 7.2289e-01, 7.1452e-01, 2.2869e-01, - 3.5827e-01, 5.0632e-01, 5.1573e-01, 6.2442e-01, - 5.5513e-01, 3.8608e-01, 6.5218e-02, 2.8971e-01, - 9.3603e-01, 9.9599e-01, 2.9308e-02, 5.0704e-01, - 3.3920e-01, 1.3835e-01, 1.3900e-01, 8.3661e-01, - 8.7030e-02, 1.9115e-01, 8.1969e-01, 2.0766e-01, - 2.0634e-04, 3.2996e-03, 3.5546e-01, 6.9885e-01, - 8.1732e-01, 4.6117e-01, 9.2421e-01, 7.9629e-01, - 9.9088e-01, 9.9274e-01, 1.1080e-01, 1.7902e-01, - 7.9706e-01, 1.8505e-01, 7.4092e-01, 7.3209e-01, - 1.2144e-01, 5.6902e-01, 9.3325e-01, 2.2999e-01, - 6.3010e-01, 9.2148e-01, 2.9383e-01, 4.2274e-01, - 1.2992e-01, 1.8436e-01, 7.7452e-01, 9.5314e-03, - 8.7521e-01, 3.7291e-01, 3.4776e-01, 9.8447e-01, - 9.2182e-01, 6.2103e-01, 7.8077e-01, 6.7523e-01, - 1.6309e-01, 9.9005e-01, 7.0418e-01, 7.5393e-01, - 4.9572e-02, 5.3018e-01, 5.0127e-02, 9.4383e-01, - 9.3893e-01, 2.9396e-01, 1.7931e-01, 3.2387e-01, - 5.3937e-01, 4.4591e-01, 4.1519e-01, 6.4965e-01, - 9.0437e-01, 8.3040e-01, 1.4310e-01, 3.5145e-02, - 4.6746e-01, 5.6639e-01, 4.2557e-01, 8.3449e-01, - 7.7874e-01, 8.0354e-03, 7.7620e-01, 6.3805e-01, - 9.9677e-01, 2.9228e-01, 7.5318e-01, 7.8356e-01, - 9.8216e-01, 6.5817e-01, 3.5696e-01, 2.7232e-01, - 3.7271e-01, 2.1862e-01, 6.4841e-01, 3.4376e-01, - 6.0935e-01, 9.5134e-02, 6.6915e-01, 8.1621e-01, - 4.0272e-01, 6.1882e-01, 5.0284e-02, 8.7929e-01, - 4.2125e-01, 7.6634e-01, 2.5193e-01, 1.3171e-01, - 5.5583e-01, 1.3959e-01, 1.7579e-01, 7.2551e-01, - 6.6240e-01, 1.9258e-01, 3.6612e-01, 1.8147e-01, - 5.7841e-01, 7.8278e-01, 4.2669e-02, 6.6255e-01, - 2.8767e-01, 8.0007e-02, 3.6674e-04, 3.9335e-01, - 2.1417e-01, 2.2919e-01, 1.2683e-01, 9.0001e-01, - 9.9290e-01, 4.5741e-01, 2.3197e-01, 1.8836e-01, - 9.6565e-01, 8.0740e-01, 1.9854e-01, 5.3042e-02, - 8.1469e-01, 4.9040e-01, 7.2177e-01, 6.4045e-01, - 4.9810e-01, 2.5103e-02, 4.7475e-01, 4.7844e-02, - 6.0969e-01, 9.1481e-01, 4.4597e-01, 1.1494e-01, - 4.9666e-01, 6.1275e-01, 8.7093e-01, 1.3668e-01, - 9.5248e-01, 5.9063e-01, 4.9499e-01, 9.1388e-01, - 4.0836e-01, 9.3011e-01, 8.1737e-01, 5.9702e-01, - 9.9251e-01, 9.3234e-01, 7.8850e-01, 5.3497e-01, - 5.6422e-01, 3.4891e-01, 7.9452e-01, 8.5710e-01, - 9.6578e-01, 5.9804e-01, 4.3277e-01, 7.3290e-01, - 1.0449e-01, 4.8889e-01, 9.8684e-01, 3.1549e-02, - 8.2245e-02, 9.4666e-01, 1.7503e-01, 1.3455e-01, - 8.0620e-01, 9.4127e-01, 6.0203e-01, 2.4925e-01, - 2.1869e-01, 3.0844e-02, 4.0776e-01, 2.7968e-01, - 7.2045e-01, 4.5107e-02, 7.6360e-01, 6.9114e-01, - 3.5384e-01, 2.6205e-01, 4.0814e-01, 4.1925e-01, - 9.5840e-01, 2.7948e-01, 9.9578e-01, 5.4533e-01, - 4.9157e-01, 1.7568e-01, 4.9298e-01, 1.2249e-01, - 4.9184e-01, 5.8221e-01, 3.4826e-01, 2.3718e-01, - 4.0414e-02, 9.8825e-01, 6.8724e-01, 4.1618e-01, - 3.5580e-01, 9.8542e-01, 6.1434e-01, 1.5506e-01, - 1.8635e-01, 3.4507e-01, 8.7226e-01, 6.4723e-01, - 1.1927e-01, 6.1913e-02, 5.8115e-01, 5.0795e-02, - 6.9421e-01, 8.5543e-01, 4.6361e-01, 1.7225e-01, - 7.1720e-01, 5.2724e-01, 2.7571e-01, 7.8026e-01, - 9.7859e-01, 3.3684e-01, 9.4074e-01, 8.4877e-01, - 1.6576e-02, 3.2110e-01, 8.1119e-01, 4.4604e-02, - 7.5553e-01, 8.9513e-01, 2.5486e-01, 5.6611e-01, - 1.5194e-01, 8.0984e-01, 1.1473e-02, 5.0922e-02, - 1.9459e-01, 2.4450e-01, 5.2269e-01, 8.8459e-01, - 1.5980e-01, 5.1724e-01, 3.6058e-01, 3.8147e-01, - 8.0778e-02, 6.8899e-01, 9.3518e-01, 3.9577e-01, - 6.8067e-01, 2.9545e-01, 2.1034e-01, 8.0864e-01, - 3.0905e-01, 3.9012e-01, 5.0504e-01, 3.5331e-01, - 2.3990e-01, 1.9663e-01, 4.7629e-02, 8.8145e-01, - 1.6688e-01, 3.2309e-01, 6.0635e-01, 6.9179e-01, - 8.4762e-01, 4.8298e-01, 4.2062e-01, 8.8411e-01, - 3.1172e-01, 4.7812e-02, 1.8446e-01, 3.6828e-01, - 2.3400e-01, 8.2280e-02, 7.9717e-01, 4.5737e-02, - 6.3521e-01, 7.6517e-01, 5.7003e-01, 8.6854e-01, - 6.3527e-01, 5.4238e-01, 6.1423e-02, 9.0375e-01, - 9.5888e-01, 3.1839e-01, 2.4367e-01, 6.4739e-01, - 8.1586e-01, 2.4076e-01, 9.7343e-01, 4.9856e-01, - 7.2246e-01, 5.0023e-01, 1.2692e-01, 7.4359e-01, - 3.5270e-01, 2.8465e-01, 3.0118e-01, 3.5307e-01, - 6.2379e-01, 7.1186e-01, 6.6474e-01, 8.4095e-02, - 8.8565e-01, 2.9464e-01, 5.4755e-01, 9.0701e-01, - 8.9197e-01, 1.8235e-01, 9.3370e-01, 8.6788e-01, - 1.0571e-01, 3.0684e-01, 2.0394e-01, 6.1322e-01, - 7.6393e-01, 7.5679e-01, 2.3964e-01, 2.2054e-01, - 4.5451e-01, 3.5051e-01, 7.3550e-02, 9.3935e-01, - 2.5262e-01, 9.1953e-01, 2.4572e-01, 1.1299e-01, - 5.8933e-01, 2.5009e-01, 8.9652e-01, 6.5729e-02, - 1.7446e-01, 3.5451e-01, 5.3832e-01, 6.5479e-01, - 6.3736e-01, 1.1466e-01, 4.7192e-01, 9.4751e-01, - 4.2249e-01, 8.0013e-01, 8.3662e-01, 2.7945e-01, - 4.9136e-01, 2.0754e-01, 1.5234e-01, 2.6470e-01, - 9.8611e-01, 2.0121e-01, 6.4605e-01, 1.4810e-01, - 3.8203e-01, 1.0221e-02, 6.4883e-01, 3.7738e-01, - 6.6558e-01, 1.0409e-01, 3.3837e-01, 4.1054e-01, - 2.3450e-01, 5.9670e-01, 6.3347e-01, 9.3841e-01, - 7.1202e-01, 8.2758e-02, 3.3401e-01, 4.6725e-01, - 5.5433e-01, 4.1060e-01, 4.8108e-01, 2.6006e-02, - 5.0602e-01, 7.5691e-01, 8.3228e-01, 5.5461e-01, - 9.7723e-01, 9.5142e-02, 4.8502e-01, 4.7276e-01, - 4.6702e-01, 6.8685e-02, 4.7602e-01, 9.8285e-01, - 8.7376e-02, 7.2154e-01, 6.4366e-02, 8.0105e-01, - 6.7920e-01, 3.4169e-01, 9.9690e-01, 8.6900e-02, - 6.8423e-01, 2.2307e-01, 6.6827e-01, 6.2007e-01, - 7.2812e-01, 8.1208e-01, 8.1304e-01, 4.9537e-02, - 2.2654e-01, 1.0913e-01, 1.8736e-01, 7.4063e-01, - 7.0233e-01, 5.2413e-01, 9.2392e-01, 4.3601e-01, - 8.7513e-01, 9.5555e-02, 5.5401e-01, 4.9938e-02, - 6.6611e-02, 5.3894e-01, 7.4381e-01, 8.2785e-01, - 4.4283e-01, 2.1861e-01, 8.1992e-02, 5.3650e-01, - 2.7083e-01, 9.6992e-02, 6.5700e-01, 3.6738e-01, - 5.3582e-02, 6.8961e-01, 5.1692e-01, 4.7811e-01, - 9.5507e-01, 1.1629e-01, 4.4608e-01, 9.1741e-01, - 6.7667e-01, 2.8148e-02, 7.4903e-01, 7.8503e-01, - 4.6236e-02, 4.6999e-02, 1.9415e-01, 5.8769e-01, - 3.9665e-01, 2.8180e-01, 6.6891e-01, 5.2471e-01, - 5.6718e-01, 3.6965e-01, 4.8691e-01, 1.2448e-01, - 3.7830e-01, 7.1508e-01, 5.3843e-01, 1.9964e-01, - 4.6846e-01, 3.0749e-01, 5.0821e-01, 4.7264e-01, - 6.5869e-01, 7.5655e-01, 6.6608e-01, 4.4097e-01, - 8.8246e-01, 5.5178e-01, 3.1991e-01, 6.1838e-01, - 5.4250e-01, 3.8407e-01, 6.9734e-01, 5.6089e-01, - 7.7507e-01, 1.5344e-01, 8.1394e-01, 3.8479e-02, - 5.6357e-02, 3.7774e-01, 7.2734e-01, 7.5830e-02, - 9.5355e-01, 4.6254e-01, 5.1318e-01, 7.7011e-01, - 2.9491e-01, 1.3797e-01, 8.7797e-01, 8.0879e-01, - 5.8383e-01, 1.2735e-01, 2.6792e-01, 7.5423e-01, - 8.5687e-01, 5.6856e-01, 3.0846e-01, 7.2150e-01, - 2.0158e-01, 4.4285e-01, 4.3074e-01, 8.2410e-01, - 5.6984e-01, 2.3044e-01, 8.7317e-01, 5.4302e-01, - 1.6661e-01, 1.5551e-01, 5.3661e-01, 9.8902e-01, - 6.2303e-01, 5.7449e-01, 9.7497e-01, 6.9276e-01, - 2.8973e-01, 2.2008e-01, 7.4155e-02, 6.3535e-01, - 3.2883e-01, 7.0117e-01, 3.0143e-01, 5.7505e-01, - 3.4680e-01, 2.7660e-01, 7.0338e-02, 7.8984e-01, - 6.7461e-01, 2.5361e-01, 8.3933e-01, 7.1929e-01, - 1.4013e-01, 9.9655e-01, 2.4267e-01, 9.3212e-01, - 4.6070e-01, 2.2070e-01, 6.6336e-01, 1.3432e-01, - 5.3597e-01, 5.1768e-01, 7.6964e-01, 9.9864e-01, - 5.3829e-01, 3.1592e-01, 9.3386e-01, 5.8600e-01, - 1.2704e-01, 5.0213e-01, 6.2221e-02, 1.0695e-01, - 2.6995e-01, 2.6387e-01, 9.3927e-01, 2.7555e-01, - 3.1073e-01, 1.1755e-01, 8.1059e-01, 3.6864e-01, - 2.6251e-01, 5.7401e-01, 2.8597e-02, 8.6585e-02]), + col_indices=tensor([2855, 8075, 7906, 7079, 944, 2249, 7337, 6716, 5649, + 377, 1658, 3417, 2253, 9016, 4792, 9243, 4028, 3365, + 5636, 2493, 3531, 9602, 5688, 8382, 1488, 4162, 6549, + 1428, 4206, 5248, 4698, 1740, 8876, 8760, 3544, 7864, + 5228, 289, 9460, 7197, 7389, 6190, 936, 2054, 3346, + 2597, 101, 3972, 3004, 8739, 6279, 825, 1589, 5343, + 8530, 2342, 6089, 4162, 6620, 1732, 9085, 1404, 7304, + 5034, 5919, 1230, 7001, 792, 7424, 3978, 4213, 3908, + 4044, 9935, 8144, 5622, 676, 8333, 8732, 4632, 7182, + 2716, 4419, 6085, 2334, 2976, 5334, 1366, 5321, 586, + 7543, 373, 7633, 4456, 8426, 2534, 5462, 4700, 5413, + 7895, 7777, 2822, 4836, 8015, 7334, 5053, 8887, 8662, + 3863, 1071, 1544, 2431, 3837, 3478, 3516, 8664, 8789, + 405, 5182, 3142, 62, 7890, 2146, 4939, 1750, 5579, + 7653, 8950, 59, 1651, 8572, 6675, 1530, 8681, 5127, + 4269, 1652, 1755, 2088, 3760, 939, 9076, 5171, 9725, + 1031, 802, 5365, 1238, 1758, 6773, 1843, 3988, 7438, + 6072, 9396, 7492, 7553, 52, 6947, 7724, 1560, 2595, + 3494, 8515, 8023, 3134, 2618, 5420, 1809, 7341, 472, + 7435, 4688, 921, 2491, 2580, 3977, 779, 1261, 514, + 9701, 3477, 3085, 1329, 438, 7281, 7897, 6166, 7273, + 9576, 9508, 753, 2981, 2172, 9032, 7928, 1556, 3144, + 6041, 2790, 7391, 4042, 7823, 4956, 7598, 7793, 1181, + 9879, 7631, 7457, 6781, 6834, 5456, 3018, 9370, 5904, + 5355, 2080, 5982, 4725, 6270, 2632, 2770, 4437, 6876, + 8719, 3282, 1082, 9173, 793, 7534, 1078, 5657, 7766, + 9840, 6532, 7020, 3090, 5658, 105, 1641, 6909, 9713, + 8763, 1921, 940, 678, 6378, 4711, 8329, 9482, 4061, + 9128, 1128, 3371, 8009, 8766, 1798, 3254, 6369, 4693, + 2081, 730, 8764, 67, 5625, 5865, 700, 7222, 3992, + 2212, 9855, 7568, 5632, 8493, 9842, 1682, 6065, 4055, + 9643, 7177, 1787, 8411, 2540, 2409, 5763, 199, 4839, + 1404, 9641, 4437, 6266, 773, 3108, 9701, 7560, 9727, + 2235, 8228, 7983, 5417, 117, 8365, 8165, 1705, 8058, + 986, 6403, 8188, 9119, 1574, 3152, 6141, 2504, 1296, + 4946, 2167, 1814, 5040, 9116, 5092, 2452, 5634, 499, + 9181, 4909, 9768, 8608, 7445, 6821, 8628, 9271, 5610, + 6992, 9045, 3556, 5252, 3074, 3330, 5586, 4485, 2582, + 7686, 2316, 8360, 7049, 2697, 222, 4577, 7118, 5261, + 5971, 8322, 5850, 6098, 1863, 9633, 6049, 6948, 8889, + 5404, 6096, 7780, 5257, 7447, 7381, 5118, 8887, 6231, + 245, 2336, 3017, 4179, 3979, 2631, 1783, 6423, 2802, + 2658, 102, 5954, 6803, 3271, 3616, 9398, 6377, 4583, + 7305, 8316, 3676, 4870, 288, 755, 5007, 937, 8696, + 3036, 9167, 359, 6067, 5130, 3884, 464, 632, 7245, + 6279, 4372, 5874, 999, 2544, 4911, 7533, 8150, 7685, + 3541, 5174, 6678, 5939, 6369, 5170, 2115, 7812, 1759, + 6344, 86, 7901, 2354, 7970, 8016, 237, 5721, 8736, + 2288, 7311, 6314, 3422, 6291, 9763, 9859, 5803, 5829, + 898, 668, 6849, 9924, 80, 6861, 3489, 3186, 1906, + 8855, 3305, 8006, 990, 7225, 9409, 5768, 1861, 1996, + 7514, 3727, 6932, 4862, 4262, 1137, 2798, 6504, 4576, + 2132, 7101, 4162, 7815, 9813, 3319, 6065, 4737, 7952, + 4333, 5261, 1557, 5493, 3913, 6392, 9231, 860, 8184, + 9857, 8052, 3336, 9827, 1290, 9873, 9951, 3165, 15, + 7, 1909, 5689, 7732, 8756, 969, 1747, 3394, 9140, + 1482, 1923, 7122, 898, 2727, 3070, 3207, 4121, 561, + 198, 2520, 3623, 2753, 4291, 3278, 7341, 9658, 2765, + 724, 826, 7084, 2470, 4526, 7945, 2563, 3545, 4265, + 4216, 9562, 2161, 2660, 7695, 4411, 1180, 6972, 1810, + 3333, 6580, 9961, 4060, 2272, 8725, 2807, 6078, 8633, + 7349, 3307, 5110, 2006, 1827, 9517, 6677, 9240, 1477, + 2277, 2743, 6630, 6426, 6019, 5545, 9008, 4236, 912, + 5563, 7452, 2953, 5214, 7149, 9267, 2673, 6622, 3225, + 2638, 722, 5660, 9705, 8687, 6197, 6046, 1904, 8390, + 5654, 9600, 2115, 9368, 2171, 8326, 5284, 1261, 5224, + 9625, 1480, 6575, 884, 9478, 8438, 2055, 7393, 2672, + 9393, 2808, 4824, 9278, 6743, 3565, 5231, 6778, 7054, + 4769, 9383, 251, 3139, 8763, 8187, 2842, 407, 6397, + 566, 2884, 6481, 18, 7304, 1733, 8006, 7494, 4733, + 7721, 9037, 8098, 6400, 2422, 5667, 7892, 304, 6887, + 568, 3746, 5565, 4230, 918, 1042, 8175, 7862, 758, + 4938, 3672, 5974, 7563, 5414, 9583, 726, 5007, 9857, + 2024, 5897, 770, 3980, 6057, 4116, 8389, 1776, 2697, + 765, 8332, 7035, 8553, 4909, 8424, 7643, 6410, 1658, + 6738, 5281, 1388, 8059, 8630, 1195, 684, 7397, 2148, + 4121, 1776, 4393, 656, 9057, 5906, 4688, 4370, 6805, + 7518, 9928, 1096, 441, 5715, 264, 8790, 110, 6055, + 3307, 9718, 4189, 3405, 468, 1770, 2222, 9919, 5220, + 3581, 4468, 4869, 8564, 8235, 8595, 4652, 7135, 9790, + 3383, 9322, 8462, 5263, 8949, 6138, 1807, 5159, 9401, + 9798, 2395, 2081, 2498, 3456, 2092, 7540, 1340, 331, + 5905, 1591, 1450, 3391, 3596, 2688, 3235, 9753, 8148, + 387, 491, 2434, 6329, 3003, 609, 4635, 4998, 3472, + 1264, 5092, 353, 9769, 9646, 2230, 7257, 1130, 2575, + 6958, 2558, 9465, 485, 8470, 1264, 3436, 7800, 2166, + 2456, 1001, 3559, 1386, 51, 7186, 5601, 4138, 2064, + 5444, 1353, 9189, 7281, 7851, 5375, 608, 731, 8590, + 8514, 3100, 3564, 9506, 9562, 7449, 4131, 284, 2307, + 5159, 1059, 1935, 7840, 5413, 7301, 4546, 1314, 8222, + 4872, 5446, 7341, 883, 4149, 2153, 7772, 8906, 7694, + 4717, 7720, 333, 5775, 6239, 9106, 1055, 9977, 8591, + 2194, 2271, 5786, 6648, 3854, 690, 5148, 890, 9108, + 6456, 7918, 7448, 4979, 8637, 1742, 3986, 9252, 2556, + 5889, 4075, 5817, 3627, 8530, 7561, 3415, 882, 865, + 9802, 6120, 548, 1324, 7217, 9729, 9519, 5035, 5636, + 1349, 9031, 3654, 8939, 8600, 9314, 4810, 3479, 2674, + 7843, 5030, 4532, 1359, 4663, 1201, 3971, 2582, 7804, + 9797, 6537, 5858, 7718, 5744, 5649, 625, 9575, 1874, + 7077, 647, 3063, 2385, 4559, 8596, 6618, 8032, 6127, + 6903, 1070, 5458, 7218, 7838, 8246, 6146, 3594, 2785, + 6250, 5995, 2210, 7881, 1418, 9360, 1591, 1363, 7516, + 8813, 2412, 1352, 5190, 5324, 8852, 9231, 9500, 2706, + 5599, 606, 4761, 767, 3258, 4668, 2695, 1968, 7813, + 7111, 9196, 7803, 7120, 3806, 7798, 2066, 7912, 8130, + 4563, 7674, 2293, 8752, 9993, 8667, 859, 995, 3668, + 381, 518, 9663, 2627, 5710, 4280, 4639, 593, 3787, + 4665, 2454, 3471, 9319, 919, 7281, 5545, 1571, 5275, + 5533, 4893, 5993, 8071, 6470, 466, 6074, 6139, 4863, + 9666, 1697, 961, 2733, 4607, 6856, 1199, 1540, 258, + 3102, 7579, 5169, 1867, 3539, 4869, 8301, 9635, 5071, + 663]), + values=tensor([7.1914e-01, 3.5699e-01, 1.8881e-01, 8.7099e-02, + 4.5318e-01, 9.1357e-01, 4.4773e-01, 6.6474e-01, + 8.3212e-02, 7.4502e-02, 2.7086e-01, 3.9903e-01, + 2.9410e-01, 6.8387e-01, 4.3190e-01, 2.3595e-01, + 1.3214e-01, 4.4705e-01, 5.2584e-01, 1.8393e-02, + 7.3158e-01, 8.9623e-02, 4.9286e-01, 6.6167e-01, + 4.4999e-01, 4.8491e-01, 3.8263e-03, 9.5463e-01, + 1.0969e-01, 1.7069e-01, 5.8460e-01, 1.6835e-01, + 2.0678e-01, 7.1476e-01, 1.5848e-01, 1.7599e-01, + 2.1607e-01, 7.8517e-02, 9.9109e-01, 4.9069e-01, + 7.9988e-01, 2.0882e-01, 2.6426e-01, 3.7107e-01, + 5.5086e-01, 2.7204e-01, 7.8563e-01, 3.9895e-01, + 4.7662e-01, 9.6919e-01, 6.4261e-02, 7.0318e-01, + 2.5080e-01, 7.7692e-01, 9.3246e-02, 2.0470e-01, + 9.1463e-01, 6.6510e-01, 1.0866e-01, 5.4425e-01, + 1.6861e-01, 3.3410e-01, 9.2130e-01, 4.5900e-01, + 4.9683e-01, 1.8400e-01, 1.3129e-01, 1.7994e-01, + 6.4029e-01, 4.5216e-01, 4.8907e-01, 2.0154e-01, + 2.0586e-01, 1.4555e-01, 9.4648e-01, 1.9162e-02, + 7.1885e-01, 2.8774e-01, 6.0571e-02, 7.6133e-01, + 5.0497e-01, 5.3375e-01, 8.8635e-01, 5.9771e-01, + 4.9873e-01, 7.4838e-01, 3.2578e-01, 8.3205e-02, + 2.3410e-01, 6.7108e-01, 6.0135e-01, 7.3169e-01, + 6.6353e-03, 8.5411e-01, 4.6467e-01, 6.1844e-02, + 9.2139e-01, 2.6591e-01, 4.3405e-01, 7.7844e-01, + 5.1991e-01, 6.6140e-01, 5.6448e-01, 1.9481e-01, + 5.6659e-03, 1.4261e-01, 3.7154e-01, 3.6401e-01, + 9.1567e-01, 8.2958e-01, 2.5659e-01, 8.6633e-01, + 9.8368e-02, 3.7465e-02, 1.1452e-01, 9.5572e-01, + 3.8882e-01, 8.7412e-02, 2.5241e-01, 1.3202e-01, + 4.7227e-01, 3.6926e-01, 2.4299e-01, 4.2681e-01, + 4.1625e-01, 8.0675e-01, 4.2739e-01, 5.6265e-01, + 5.8468e-01, 1.1299e-01, 5.4796e-01, 2.2326e-01, + 7.4426e-01, 2.5862e-01, 5.5255e-01, 4.2732e-01, + 5.1427e-01, 1.9244e-01, 8.4336e-01, 2.9593e-01, + 6.3001e-01, 2.9445e-02, 8.3252e-01, 2.7049e-02, + 5.5433e-01, 6.0664e-01, 8.2623e-01, 1.5857e-01, + 8.5901e-01, 2.1526e-01, 2.0539e-01, 4.1157e-01, + 1.5656e-01, 9.9551e-01, 8.1868e-01, 1.1786e-01, + 1.7518e-01, 4.8472e-01, 2.5889e-01, 3.6018e-01, + 3.8010e-01, 5.1461e-01, 8.8916e-01, 2.5142e-01, + 1.3914e-01, 5.8754e-01, 9.8202e-01, 6.0581e-01, + 7.7158e-01, 5.5242e-01, 1.2099e-01, 3.2467e-01, + 8.7514e-01, 8.4057e-01, 1.6976e-01, 5.7975e-01, + 8.7454e-01, 2.2008e-01, 4.4294e-01, 2.4808e-01, + 5.5570e-01, 7.7962e-01, 8.1085e-01, 6.4276e-01, + 4.2107e-01, 4.7643e-01, 4.2677e-01, 5.2699e-01, + 6.7358e-01, 4.1115e-02, 4.8068e-01, 8.9137e-01, + 5.9085e-01, 6.7059e-01, 8.6843e-01, 4.2584e-01, + 2.9715e-01, 6.8883e-01, 7.1645e-01, 4.9335e-01, + 1.5832e-01, 6.9841e-01, 4.8571e-01, 3.2816e-01, + 4.6945e-01, 1.3934e-01, 3.3396e-01, 4.6245e-01, + 4.6554e-01, 6.3637e-01, 2.4725e-01, 5.5364e-01, + 7.2063e-01, 5.6409e-01, 7.9662e-01, 4.5035e-01, + 8.1682e-01, 1.5079e-01, 1.5431e-01, 5.4861e-01, + 2.9258e-01, 8.4324e-01, 9.6444e-01, 4.1461e-01, + 4.2655e-01, 5.9708e-01, 6.6145e-01, 4.8123e-01, + 4.2681e-01, 6.9354e-01, 6.7733e-01, 1.9849e-01, + 5.9965e-01, 7.6633e-01, 6.5048e-01, 6.7667e-02, + 7.8467e-01, 2.5662e-01, 4.4563e-01, 2.5713e-01, + 7.2342e-01, 4.4453e-01, 4.7064e-01, 6.3518e-01, + 1.9396e-02, 2.3510e-01, 3.5305e-01, 1.9507e-01, + 1.2109e-01, 4.9662e-01, 8.9207e-01, 3.7461e-01, + 4.1028e-01, 3.1744e-01, 9.1684e-01, 2.2548e-01, + 4.4755e-01, 6.3841e-02, 8.0879e-02, 1.6013e-01, + 8.1916e-01, 9.5243e-01, 8.5692e-01, 2.9368e-01, + 1.6866e-01, 3.1215e-01, 2.2075e-01, 4.7427e-01, + 2.9583e-01, 4.8480e-01, 3.0389e-01, 4.7854e-01, + 9.3331e-01, 2.0131e-01, 6.4723e-01, 5.9956e-01, + 7.7562e-01, 1.1072e-01, 2.7641e-01, 4.9538e-01, + 8.7955e-01, 2.8402e-01, 1.2436e-01, 8.4258e-01, + 5.5335e-01, 7.6934e-01, 1.0470e-01, 6.8446e-02, + 2.1320e-01, 1.1518e-01, 5.8377e-01, 7.1969e-01, + 1.8347e-02, 9.4135e-01, 2.2230e-01, 8.1285e-02, + 8.0022e-01, 8.0920e-01, 5.9028e-01, 1.7670e-01, + 1.7888e-02, 7.4633e-02, 6.7857e-01, 3.8349e-01, + 2.8117e-01, 9.9463e-01, 1.8073e-01, 5.0053e-01, + 4.1624e-01, 8.9193e-02, 2.6037e-01, 2.9977e-01, + 4.4692e-01, 6.2605e-02, 7.6860e-01, 4.9747e-03, + 5.5906e-01, 9.6162e-01, 2.7556e-01, 9.0166e-01, + 9.4515e-01, 2.8663e-01, 3.4439e-01, 9.1566e-01, + 1.7708e-01, 3.7951e-01, 5.3351e-01, 4.9896e-02, + 7.3733e-01, 1.8349e-02, 1.9235e-02, 1.6484e-02, + 5.8013e-01, 6.5098e-01, 1.7865e-01, 5.3873e-01, + 8.7322e-01, 4.4347e-01, 3.4325e-01, 8.1726e-01, + 1.2185e-01, 5.7874e-01, 1.7520e-01, 4.4937e-01, + 2.3280e-01, 3.9215e-01, 8.1236e-01, 4.2231e-01, + 7.8036e-01, 2.3107e-01, 5.7078e-01, 9.2965e-01, + 4.6720e-01, 3.7843e-01, 9.8572e-01, 7.4716e-02, + 4.2113e-01, 3.5345e-01, 2.4394e-02, 2.9640e-01, + 1.4247e-01, 3.9066e-01, 8.1707e-01, 9.2895e-01, + 8.3942e-01, 7.2112e-01, 9.4111e-03, 2.5797e-01, + 8.9230e-01, 4.6861e-01, 7.6778e-01, 9.3537e-02, + 6.7004e-01, 7.2295e-01, 7.4820e-01, 3.4341e-01, + 7.5489e-02, 4.3837e-01, 2.4889e-01, 9.0173e-01, + 2.4648e-01, 6.4814e-01, 5.9827e-01, 1.3787e-01, + 5.5651e-01, 2.8835e-01, 2.1513e-01, 8.4082e-01, + 9.8978e-01, 1.4380e-01, 8.4743e-01, 8.3186e-01, + 5.0587e-01, 9.5075e-01, 2.6789e-01, 7.9162e-01, + 1.0086e-01, 6.9314e-01, 3.8463e-01, 2.5693e-01, + 1.2958e-01, 6.7064e-02, 7.4356e-01, 7.1345e-01, + 3.3977e-01, 9.1597e-01, 4.3269e-01, 7.5182e-01, + 1.0214e-01, 4.8837e-01, 2.1073e-01, 7.3617e-01, + 7.5067e-01, 9.4559e-01, 1.1332e-01, 7.3871e-01, + 2.2181e-01, 1.5506e-01, 1.8569e-01, 6.9054e-01, + 5.0839e-01, 6.5259e-01, 1.2274e-01, 6.1245e-01, + 2.0467e-01, 2.4814e-01, 3.7083e-01, 3.8862e-01, + 4.9577e-01, 6.6978e-01, 5.0736e-01, 1.9597e-01, + 6.3460e-01, 8.3869e-01, 4.5106e-01, 3.7500e-01, + 9.5105e-02, 8.0765e-01, 7.8259e-01, 7.0514e-01, + 9.7148e-01, 6.3857e-01, 2.3327e-02, 8.3506e-01, + 4.4786e-01, 4.1722e-01, 7.7105e-02, 9.1627e-01, + 5.2201e-01, 6.8986e-01, 4.0576e-01, 1.4139e-01, + 7.5719e-01, 6.7978e-01, 4.3543e-01, 1.6270e-01, + 3.2703e-01, 6.2939e-01, 7.9716e-01, 6.0758e-02, + 4.8901e-01, 8.0568e-01, 3.2122e-01, 7.3112e-01, + 4.7776e-01, 3.4454e-01, 1.2217e-01, 8.7845e-01, + 7.3334e-01, 1.7767e-02, 9.8526e-02, 6.5994e-01, + 4.5432e-01, 5.5027e-01, 6.1022e-01, 5.4396e-01, + 4.6217e-01, 8.8228e-01, 9.7661e-01, 7.7544e-01, + 3.9460e-01, 7.9046e-01, 5.6485e-02, 4.3661e-01, + 9.9155e-01, 7.0178e-01, 8.2688e-01, 6.5376e-01, + 4.8909e-01, 9.7590e-01, 8.4990e-01, 8.4102e-01, + 8.7281e-01, 5.5305e-01, 7.4668e-01, 8.2128e-01, + 5.5457e-01, 5.9761e-01, 6.7722e-01, 4.2857e-01, + 8.3414e-01, 1.7303e-02, 6.5313e-01, 8.5054e-01, + 1.8488e-02, 7.8342e-01, 2.3044e-01, 3.1307e-01, + 9.7133e-02, 2.7489e-01, 7.5709e-01, 2.4391e-02, + 6.6253e-01, 2.7630e-01, 5.0632e-01, 6.6936e-01, + 3.1545e-01, 8.8089e-01, 9.2220e-01, 9.5829e-01, + 5.7617e-01, 8.8071e-01, 2.5377e-01, 9.8168e-01, + 1.3733e-01, 5.9002e-01, 9.2556e-01, 2.3471e-01, + 4.4713e-01, 7.8736e-01, 3.0287e-02, 3.7479e-01, + 2.3371e-01, 2.3312e-01, 2.0577e-01, 4.9982e-01, + 8.9310e-01, 8.8877e-01, 6.6160e-01, 3.9360e-01, + 7.3268e-02, 3.5365e-01, 5.4502e-01, 1.2996e-01, + 6.8004e-02, 9.2458e-01, 6.7526e-01, 1.9083e-02, + 7.2648e-01, 2.3662e-01, 6.1134e-01, 5.8670e-01, + 7.9140e-01, 7.5557e-01, 6.9229e-02, 2.4708e-01, + 2.9978e-01, 9.0461e-01, 5.4313e-01, 3.0432e-01, + 3.8062e-01, 1.4898e-02, 9.2129e-01, 9.4985e-01, + 5.8134e-01, 6.1743e-01, 4.2089e-01, 4.0719e-01, + 7.5168e-01, 6.8000e-01, 7.6022e-01, 9.1390e-01, + 4.5525e-01, 2.3888e-01, 4.5804e-01, 1.5248e-01, + 5.6439e-01, 2.8046e-01, 6.7805e-01, 6.9097e-01, + 5.9275e-01, 5.9135e-01, 1.5522e-01, 1.0470e-01, + 8.4527e-01, 7.0721e-01, 1.7353e-01, 6.9803e-01, + 5.3798e-01, 7.3068e-01, 7.0521e-01, 7.5732e-01, + 8.2180e-01, 1.3041e-01, 5.0990e-01, 3.9364e-02, + 2.6640e-01, 3.2966e-01, 4.4012e-01, 6.6674e-01, + 2.3991e-01, 2.1136e-01, 1.2814e-01, 3.1776e-01, + 3.3583e-01, 2.9175e-01, 1.2930e-02, 5.6292e-01, + 5.5763e-01, 9.9996e-02, 8.6286e-01, 7.6298e-01, + 4.7048e-01, 4.5382e-01, 4.5726e-01, 5.1212e-02, + 5.5617e-01, 1.1455e-01, 9.1890e-01, 3.4407e-01, + 9.0745e-01, 7.3912e-01, 7.3913e-01, 7.8929e-01, + 7.2846e-01, 4.9965e-01, 2.1896e-01, 4.0960e-01, + 3.4976e-02, 6.2852e-01, 8.7510e-01, 2.6167e-01, + 6.9754e-01, 4.9384e-01, 5.9221e-01, 1.6923e-01, + 8.2387e-01, 4.1482e-01, 8.7866e-01, 3.1878e-01, + 4.3075e-01, 9.6728e-01, 6.3768e-01, 7.9501e-01, + 8.2309e-01, 8.2726e-01, 4.0740e-01, 7.3097e-01, + 8.3429e-01, 3.6572e-02, 6.4949e-01, 3.4151e-01, + 3.4415e-01, 1.8339e-01, 2.8646e-01, 7.6318e-01, + 1.9286e-01, 8.7404e-02, 8.3365e-01, 3.8940e-01, + 1.4313e-01, 3.2858e-01, 2.0825e-01, 1.3225e-01, + 4.7153e-01, 5.9345e-04, 6.4316e-03, 3.5093e-01, + 7.4859e-01, 2.5398e-02, 1.7122e-01, 3.6310e-01, + 9.7341e-02, 5.4702e-01, 6.5922e-01, 8.2479e-01, + 1.5754e-01, 6.6982e-01, 2.9553e-01, 5.8182e-01, + 6.0767e-01, 2.2587e-01, 5.9177e-01, 3.5056e-02, + 7.7132e-01, 3.3632e-02, 7.9357e-01, 3.4986e-01, + 5.1546e-01, 6.0746e-01, 8.7129e-02, 1.6569e-02, + 9.0926e-01, 7.2044e-01, 9.3011e-01, 2.2800e-01, + 4.0772e-01, 1.7022e-01, 9.6123e-03, 9.2862e-02, + 7.1853e-02, 6.3655e-02, 9.4267e-01, 2.1552e-01, + 7.6811e-01, 1.9484e-01, 4.9845e-02, 9.7880e-01, + 5.4652e-01, 6.2794e-02, 1.0333e-01, 7.9616e-01, + 7.2635e-01, 5.2013e-01, 5.4113e-01, 9.9067e-01, + 2.2398e-01, 7.6978e-01, 1.1021e-01, 7.6641e-01, + 6.0339e-01, 7.0817e-01, 4.9845e-01, 6.3660e-01, + 4.1097e-01, 4.3331e-01, 6.1036e-01, 1.2769e-01, + 9.6678e-01, 7.9162e-01, 1.1026e-01, 4.5378e-01, + 8.0408e-01, 7.2058e-02, 1.1862e-01, 5.0710e-01, + 3.1342e-01, 3.7765e-01, 3.0979e-01, 7.1625e-01, + 3.5012e-02, 6.9599e-01, 2.3626e-01, 2.7274e-01, + 8.7590e-01, 4.4684e-01, 7.7476e-01, 5.0375e-01, + 3.7941e-02, 5.8674e-02, 2.6070e-01, 7.9297e-01, + 7.0623e-01, 3.3070e-01, 5.7109e-01, 1.2253e-01, + 4.1667e-01, 9.1573e-02, 4.2418e-01, 7.7002e-01, + 7.6640e-01, 5.4212e-01, 2.8612e-01, 9.0739e-01, + 1.5197e-02, 7.8391e-01, 9.9961e-01, 8.4224e-01, + 4.5078e-02, 1.7178e-01, 5.7976e-01, 2.5597e-01, + 3.7693e-01, 7.1487e-01, 9.1756e-01, 9.4329e-01, + 3.4253e-01, 9.8948e-02, 6.1691e-01, 9.7617e-01, + 2.1028e-01, 7.5394e-02, 2.7003e-02, 4.9118e-01, + 4.5413e-01, 4.5744e-01, 9.0629e-02, 3.9792e-02, + 4.8959e-01, 2.5653e-03, 2.4848e-01, 1.5578e-01, + 4.8467e-01, 1.6759e-01, 3.4517e-01, 7.1360e-01, + 6.6365e-01, 8.6784e-01, 3.2600e-02, 5.1813e-01, + 9.7823e-01, 7.1991e-01, 4.8388e-01, 2.9200e-01, + 8.8445e-01, 4.6157e-01, 8.8424e-01, 1.6935e-02, + 9.5479e-01, 2.4626e-01, 3.2601e-01, 7.5883e-01, + 5.4595e-01, 7.4329e-01, 2.7699e-01, 2.6074e-02, + 5.7500e-02, 4.7050e-01, 9.2467e-01, 3.9975e-01, + 2.3830e-03, 8.9591e-02, 6.9714e-01, 9.9067e-01, + 7.3989e-01, 6.1702e-01, 7.4358e-02, 5.0241e-01, + 2.6956e-01, 5.2080e-01, 5.8244e-01, 3.8613e-01, + 7.5686e-01, 1.4862e-01, 4.2541e-01, 4.0442e-01, + 8.6544e-01, 6.5414e-01, 6.0983e-01, 7.9228e-01, + 3.3659e-01, 7.4508e-01, 7.8118e-02, 8.0894e-01, + 4.3755e-01, 9.6531e-01, 2.7971e-01, 1.6120e-01, + 5.6311e-01, 5.2545e-01, 7.0385e-01, 6.7045e-01, + 7.3439e-01, 7.2749e-01, 8.4642e-01, 6.6381e-01, + 5.4125e-01, 9.6001e-01, 8.5499e-01, 3.4083e-01, + 2.9676e-01, 8.8005e-01, 5.5461e-01, 5.2255e-01, + 9.3939e-01, 3.7880e-01, 8.1994e-01, 9.8494e-01, + 5.2830e-01, 7.1390e-01, 9.6245e-01, 2.9338e-01, + 5.6169e-01, 4.6562e-01, 6.2625e-02, 6.0261e-02, + 9.7711e-01, 2.6813e-01, 5.6608e-01, 4.6741e-01, + 4.7599e-01, 5.1983e-01, 8.9555e-02, 7.8497e-01, + 2.6173e-01, 7.2181e-01, 3.4515e-01, 6.2851e-01, + 9.5570e-01, 8.7618e-01, 8.6800e-01, 7.7736e-02, + 9.9029e-02, 6.6877e-01, 4.5136e-01, 4.8328e-01, + 2.1995e-01, 5.3975e-01, 3.0857e-01, 2.9108e-01, + 7.3244e-01, 9.4763e-01, 5.5523e-01, 5.3691e-01, + 7.5862e-01, 8.8397e-01, 9.9768e-01, 8.5263e-01, + 9.2202e-01, 8.9888e-01, 7.2998e-01, 6.1755e-01, + 3.5453e-01, 1.6461e-02, 5.6335e-01, 2.6537e-01, + 6.4050e-01, 7.1585e-01, 5.1312e-01, 2.6271e-01, + 2.6827e-01, 3.0050e-01, 2.6003e-01, 6.8870e-01, + 7.9350e-01, 1.0130e-01, 3.5592e-01, 2.7398e-01, + 3.6442e-01, 5.2514e-01, 5.7656e-01, 9.2620e-02, + 5.0659e-03, 8.0383e-01, 5.7759e-02, 6.4201e-01, + 9.8499e-01, 9.0458e-01, 7.2209e-01, 6.9173e-01, + 1.9235e-01, 1.3523e-01, 2.5251e-01, 4.3643e-01, + 8.3314e-01, 4.1119e-01, 3.5737e-01, 1.4247e-01, + 4.8132e-01, 5.8598e-01, 2.4472e-01, 3.7236e-01, + 7.5418e-01, 7.1316e-01, 6.7213e-01, 3.2274e-01, + 2.1236e-01, 1.6570e-01, 6.9203e-01, 5.6668e-01, + 4.3002e-01, 5.8654e-01, 1.9247e-01, 2.0992e-01, + 6.1953e-01, 8.9656e-01, 1.5362e-01, 4.5501e-01, + 1.1305e-01, 1.5899e-01, 7.4418e-01, 8.8016e-01, + 4.7632e-01, 9.5522e-01, 4.7101e-01, 2.8447e-01, + 3.8704e-01, 8.4621e-01, 5.5945e-01, 5.1588e-02, + 7.6581e-02, 1.0814e-01, 6.1450e-01, 2.6132e-02, + 6.9289e-01, 1.3583e-01, 9.2203e-01, 2.6151e-02, + 2.8007e-01, 5.5882e-01, 6.1010e-02, 2.0485e-01, + 7.9007e-01, 1.2710e-01, 5.2246e-01, 1.4720e-01, + 9.0733e-02, 4.9059e-01, 5.5002e-01, 6.9097e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8046, 0.6398, 0.4516, ..., 0.6060, 0.1172, 0.9615]) +tensor([0.6225, 0.9544, 0.4026, ..., 0.0448, 0.5411, 0.3638]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1509,13 +1133,389 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.420795440673828 seconds +Time: 10.603893041610718 seconds -[40.1, 39.18, 39.44, 38.84, 39.26, 38.78, 38.88, 39.27, 39.5, 39.18] +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([2855, 8075, 7906, 7079, 944, 2249, 7337, 6716, 5649, + 377, 1658, 3417, 2253, 9016, 4792, 9243, 4028, 3365, + 5636, 2493, 3531, 9602, 5688, 8382, 1488, 4162, 6549, + 1428, 4206, 5248, 4698, 1740, 8876, 8760, 3544, 7864, + 5228, 289, 9460, 7197, 7389, 6190, 936, 2054, 3346, + 2597, 101, 3972, 3004, 8739, 6279, 825, 1589, 5343, + 8530, 2342, 6089, 4162, 6620, 1732, 9085, 1404, 7304, + 5034, 5919, 1230, 7001, 792, 7424, 3978, 4213, 3908, + 4044, 9935, 8144, 5622, 676, 8333, 8732, 4632, 7182, + 2716, 4419, 6085, 2334, 2976, 5334, 1366, 5321, 586, + 7543, 373, 7633, 4456, 8426, 2534, 5462, 4700, 5413, + 7895, 7777, 2822, 4836, 8015, 7334, 5053, 8887, 8662, + 3863, 1071, 1544, 2431, 3837, 3478, 3516, 8664, 8789, + 405, 5182, 3142, 62, 7890, 2146, 4939, 1750, 5579, + 7653, 8950, 59, 1651, 8572, 6675, 1530, 8681, 5127, + 4269, 1652, 1755, 2088, 3760, 939, 9076, 5171, 9725, + 1031, 802, 5365, 1238, 1758, 6773, 1843, 3988, 7438, + 6072, 9396, 7492, 7553, 52, 6947, 7724, 1560, 2595, + 3494, 8515, 8023, 3134, 2618, 5420, 1809, 7341, 472, + 7435, 4688, 921, 2491, 2580, 3977, 779, 1261, 514, + 9701, 3477, 3085, 1329, 438, 7281, 7897, 6166, 7273, + 9576, 9508, 753, 2981, 2172, 9032, 7928, 1556, 3144, + 6041, 2790, 7391, 4042, 7823, 4956, 7598, 7793, 1181, + 9879, 7631, 7457, 6781, 6834, 5456, 3018, 9370, 5904, + 5355, 2080, 5982, 4725, 6270, 2632, 2770, 4437, 6876, + 8719, 3282, 1082, 9173, 793, 7534, 1078, 5657, 7766, + 9840, 6532, 7020, 3090, 5658, 105, 1641, 6909, 9713, + 8763, 1921, 940, 678, 6378, 4711, 8329, 9482, 4061, + 9128, 1128, 3371, 8009, 8766, 1798, 3254, 6369, 4693, + 2081, 730, 8764, 67, 5625, 5865, 700, 7222, 3992, + 2212, 9855, 7568, 5632, 8493, 9842, 1682, 6065, 4055, + 9643, 7177, 1787, 8411, 2540, 2409, 5763, 199, 4839, + 1404, 9641, 4437, 6266, 773, 3108, 9701, 7560, 9727, + 2235, 8228, 7983, 5417, 117, 8365, 8165, 1705, 8058, + 986, 6403, 8188, 9119, 1574, 3152, 6141, 2504, 1296, + 4946, 2167, 1814, 5040, 9116, 5092, 2452, 5634, 499, + 9181, 4909, 9768, 8608, 7445, 6821, 8628, 9271, 5610, + 6992, 9045, 3556, 5252, 3074, 3330, 5586, 4485, 2582, + 7686, 2316, 8360, 7049, 2697, 222, 4577, 7118, 5261, + 5971, 8322, 5850, 6098, 1863, 9633, 6049, 6948, 8889, + 5404, 6096, 7780, 5257, 7447, 7381, 5118, 8887, 6231, + 245, 2336, 3017, 4179, 3979, 2631, 1783, 6423, 2802, + 2658, 102, 5954, 6803, 3271, 3616, 9398, 6377, 4583, + 7305, 8316, 3676, 4870, 288, 755, 5007, 937, 8696, + 3036, 9167, 359, 6067, 5130, 3884, 464, 632, 7245, + 6279, 4372, 5874, 999, 2544, 4911, 7533, 8150, 7685, + 3541, 5174, 6678, 5939, 6369, 5170, 2115, 7812, 1759, + 6344, 86, 7901, 2354, 7970, 8016, 237, 5721, 8736, + 2288, 7311, 6314, 3422, 6291, 9763, 9859, 5803, 5829, + 898, 668, 6849, 9924, 80, 6861, 3489, 3186, 1906, + 8855, 3305, 8006, 990, 7225, 9409, 5768, 1861, 1996, + 7514, 3727, 6932, 4862, 4262, 1137, 2798, 6504, 4576, + 2132, 7101, 4162, 7815, 9813, 3319, 6065, 4737, 7952, + 4333, 5261, 1557, 5493, 3913, 6392, 9231, 860, 8184, + 9857, 8052, 3336, 9827, 1290, 9873, 9951, 3165, 15, + 7, 1909, 5689, 7732, 8756, 969, 1747, 3394, 9140, + 1482, 1923, 7122, 898, 2727, 3070, 3207, 4121, 561, + 198, 2520, 3623, 2753, 4291, 3278, 7341, 9658, 2765, + 724, 826, 7084, 2470, 4526, 7945, 2563, 3545, 4265, + 4216, 9562, 2161, 2660, 7695, 4411, 1180, 6972, 1810, + 3333, 6580, 9961, 4060, 2272, 8725, 2807, 6078, 8633, + 7349, 3307, 5110, 2006, 1827, 9517, 6677, 9240, 1477, + 2277, 2743, 6630, 6426, 6019, 5545, 9008, 4236, 912, + 5563, 7452, 2953, 5214, 7149, 9267, 2673, 6622, 3225, + 2638, 722, 5660, 9705, 8687, 6197, 6046, 1904, 8390, + 5654, 9600, 2115, 9368, 2171, 8326, 5284, 1261, 5224, + 9625, 1480, 6575, 884, 9478, 8438, 2055, 7393, 2672, + 9393, 2808, 4824, 9278, 6743, 3565, 5231, 6778, 7054, + 4769, 9383, 251, 3139, 8763, 8187, 2842, 407, 6397, + 566, 2884, 6481, 18, 7304, 1733, 8006, 7494, 4733, + 7721, 9037, 8098, 6400, 2422, 5667, 7892, 304, 6887, + 568, 3746, 5565, 4230, 918, 1042, 8175, 7862, 758, + 4938, 3672, 5974, 7563, 5414, 9583, 726, 5007, 9857, + 2024, 5897, 770, 3980, 6057, 4116, 8389, 1776, 2697, + 765, 8332, 7035, 8553, 4909, 8424, 7643, 6410, 1658, + 6738, 5281, 1388, 8059, 8630, 1195, 684, 7397, 2148, + 4121, 1776, 4393, 656, 9057, 5906, 4688, 4370, 6805, + 7518, 9928, 1096, 441, 5715, 264, 8790, 110, 6055, + 3307, 9718, 4189, 3405, 468, 1770, 2222, 9919, 5220, + 3581, 4468, 4869, 8564, 8235, 8595, 4652, 7135, 9790, + 3383, 9322, 8462, 5263, 8949, 6138, 1807, 5159, 9401, + 9798, 2395, 2081, 2498, 3456, 2092, 7540, 1340, 331, + 5905, 1591, 1450, 3391, 3596, 2688, 3235, 9753, 8148, + 387, 491, 2434, 6329, 3003, 609, 4635, 4998, 3472, + 1264, 5092, 353, 9769, 9646, 2230, 7257, 1130, 2575, + 6958, 2558, 9465, 485, 8470, 1264, 3436, 7800, 2166, + 2456, 1001, 3559, 1386, 51, 7186, 5601, 4138, 2064, + 5444, 1353, 9189, 7281, 7851, 5375, 608, 731, 8590, + 8514, 3100, 3564, 9506, 9562, 7449, 4131, 284, 2307, + 5159, 1059, 1935, 7840, 5413, 7301, 4546, 1314, 8222, + 4872, 5446, 7341, 883, 4149, 2153, 7772, 8906, 7694, + 4717, 7720, 333, 5775, 6239, 9106, 1055, 9977, 8591, + 2194, 2271, 5786, 6648, 3854, 690, 5148, 890, 9108, + 6456, 7918, 7448, 4979, 8637, 1742, 3986, 9252, 2556, + 5889, 4075, 5817, 3627, 8530, 7561, 3415, 882, 865, + 9802, 6120, 548, 1324, 7217, 9729, 9519, 5035, 5636, + 1349, 9031, 3654, 8939, 8600, 9314, 4810, 3479, 2674, + 7843, 5030, 4532, 1359, 4663, 1201, 3971, 2582, 7804, + 9797, 6537, 5858, 7718, 5744, 5649, 625, 9575, 1874, + 7077, 647, 3063, 2385, 4559, 8596, 6618, 8032, 6127, + 6903, 1070, 5458, 7218, 7838, 8246, 6146, 3594, 2785, + 6250, 5995, 2210, 7881, 1418, 9360, 1591, 1363, 7516, + 8813, 2412, 1352, 5190, 5324, 8852, 9231, 9500, 2706, + 5599, 606, 4761, 767, 3258, 4668, 2695, 1968, 7813, + 7111, 9196, 7803, 7120, 3806, 7798, 2066, 7912, 8130, + 4563, 7674, 2293, 8752, 9993, 8667, 859, 995, 3668, + 381, 518, 9663, 2627, 5710, 4280, 4639, 593, 3787, + 4665, 2454, 3471, 9319, 919, 7281, 5545, 1571, 5275, + 5533, 4893, 5993, 8071, 6470, 466, 6074, 6139, 4863, + 9666, 1697, 961, 2733, 4607, 6856, 1199, 1540, 258, + 3102, 7579, 5169, 1867, 3539, 4869, 8301, 9635, 5071, + 663]), + values=tensor([7.1914e-01, 3.5699e-01, 1.8881e-01, 8.7099e-02, + 4.5318e-01, 9.1357e-01, 4.4773e-01, 6.6474e-01, + 8.3212e-02, 7.4502e-02, 2.7086e-01, 3.9903e-01, + 2.9410e-01, 6.8387e-01, 4.3190e-01, 2.3595e-01, + 1.3214e-01, 4.4705e-01, 5.2584e-01, 1.8393e-02, + 7.3158e-01, 8.9623e-02, 4.9286e-01, 6.6167e-01, + 4.4999e-01, 4.8491e-01, 3.8263e-03, 9.5463e-01, + 1.0969e-01, 1.7069e-01, 5.8460e-01, 1.6835e-01, + 2.0678e-01, 7.1476e-01, 1.5848e-01, 1.7599e-01, + 2.1607e-01, 7.8517e-02, 9.9109e-01, 4.9069e-01, + 7.9988e-01, 2.0882e-01, 2.6426e-01, 3.7107e-01, + 5.5086e-01, 2.7204e-01, 7.8563e-01, 3.9895e-01, + 4.7662e-01, 9.6919e-01, 6.4261e-02, 7.0318e-01, + 2.5080e-01, 7.7692e-01, 9.3246e-02, 2.0470e-01, + 9.1463e-01, 6.6510e-01, 1.0866e-01, 5.4425e-01, + 1.6861e-01, 3.3410e-01, 9.2130e-01, 4.5900e-01, + 4.9683e-01, 1.8400e-01, 1.3129e-01, 1.7994e-01, + 6.4029e-01, 4.5216e-01, 4.8907e-01, 2.0154e-01, + 2.0586e-01, 1.4555e-01, 9.4648e-01, 1.9162e-02, + 7.1885e-01, 2.8774e-01, 6.0571e-02, 7.6133e-01, + 5.0497e-01, 5.3375e-01, 8.8635e-01, 5.9771e-01, + 4.9873e-01, 7.4838e-01, 3.2578e-01, 8.3205e-02, + 2.3410e-01, 6.7108e-01, 6.0135e-01, 7.3169e-01, + 6.6353e-03, 8.5411e-01, 4.6467e-01, 6.1844e-02, + 9.2139e-01, 2.6591e-01, 4.3405e-01, 7.7844e-01, + 5.1991e-01, 6.6140e-01, 5.6448e-01, 1.9481e-01, + 5.6659e-03, 1.4261e-01, 3.7154e-01, 3.6401e-01, + 9.1567e-01, 8.2958e-01, 2.5659e-01, 8.6633e-01, + 9.8368e-02, 3.7465e-02, 1.1452e-01, 9.5572e-01, + 3.8882e-01, 8.7412e-02, 2.5241e-01, 1.3202e-01, + 4.7227e-01, 3.6926e-01, 2.4299e-01, 4.2681e-01, + 4.1625e-01, 8.0675e-01, 4.2739e-01, 5.6265e-01, + 5.8468e-01, 1.1299e-01, 5.4796e-01, 2.2326e-01, + 7.4426e-01, 2.5862e-01, 5.5255e-01, 4.2732e-01, + 5.1427e-01, 1.9244e-01, 8.4336e-01, 2.9593e-01, + 6.3001e-01, 2.9445e-02, 8.3252e-01, 2.7049e-02, + 5.5433e-01, 6.0664e-01, 8.2623e-01, 1.5857e-01, + 8.5901e-01, 2.1526e-01, 2.0539e-01, 4.1157e-01, + 1.5656e-01, 9.9551e-01, 8.1868e-01, 1.1786e-01, + 1.7518e-01, 4.8472e-01, 2.5889e-01, 3.6018e-01, + 3.8010e-01, 5.1461e-01, 8.8916e-01, 2.5142e-01, + 1.3914e-01, 5.8754e-01, 9.8202e-01, 6.0581e-01, + 7.7158e-01, 5.5242e-01, 1.2099e-01, 3.2467e-01, + 8.7514e-01, 8.4057e-01, 1.6976e-01, 5.7975e-01, + 8.7454e-01, 2.2008e-01, 4.4294e-01, 2.4808e-01, + 5.5570e-01, 7.7962e-01, 8.1085e-01, 6.4276e-01, + 4.2107e-01, 4.7643e-01, 4.2677e-01, 5.2699e-01, + 6.7358e-01, 4.1115e-02, 4.8068e-01, 8.9137e-01, + 5.9085e-01, 6.7059e-01, 8.6843e-01, 4.2584e-01, + 2.9715e-01, 6.8883e-01, 7.1645e-01, 4.9335e-01, + 1.5832e-01, 6.9841e-01, 4.8571e-01, 3.2816e-01, + 4.6945e-01, 1.3934e-01, 3.3396e-01, 4.6245e-01, + 4.6554e-01, 6.3637e-01, 2.4725e-01, 5.5364e-01, + 7.2063e-01, 5.6409e-01, 7.9662e-01, 4.5035e-01, + 8.1682e-01, 1.5079e-01, 1.5431e-01, 5.4861e-01, + 2.9258e-01, 8.4324e-01, 9.6444e-01, 4.1461e-01, + 4.2655e-01, 5.9708e-01, 6.6145e-01, 4.8123e-01, + 4.2681e-01, 6.9354e-01, 6.7733e-01, 1.9849e-01, + 5.9965e-01, 7.6633e-01, 6.5048e-01, 6.7667e-02, + 7.8467e-01, 2.5662e-01, 4.4563e-01, 2.5713e-01, + 7.2342e-01, 4.4453e-01, 4.7064e-01, 6.3518e-01, + 1.9396e-02, 2.3510e-01, 3.5305e-01, 1.9507e-01, + 1.2109e-01, 4.9662e-01, 8.9207e-01, 3.7461e-01, + 4.1028e-01, 3.1744e-01, 9.1684e-01, 2.2548e-01, + 4.4755e-01, 6.3841e-02, 8.0879e-02, 1.6013e-01, + 8.1916e-01, 9.5243e-01, 8.5692e-01, 2.9368e-01, + 1.6866e-01, 3.1215e-01, 2.2075e-01, 4.7427e-01, + 2.9583e-01, 4.8480e-01, 3.0389e-01, 4.7854e-01, + 9.3331e-01, 2.0131e-01, 6.4723e-01, 5.9956e-01, + 7.7562e-01, 1.1072e-01, 2.7641e-01, 4.9538e-01, + 8.7955e-01, 2.8402e-01, 1.2436e-01, 8.4258e-01, + 5.5335e-01, 7.6934e-01, 1.0470e-01, 6.8446e-02, + 2.1320e-01, 1.1518e-01, 5.8377e-01, 7.1969e-01, + 1.8347e-02, 9.4135e-01, 2.2230e-01, 8.1285e-02, + 8.0022e-01, 8.0920e-01, 5.9028e-01, 1.7670e-01, + 1.7888e-02, 7.4633e-02, 6.7857e-01, 3.8349e-01, + 2.8117e-01, 9.9463e-01, 1.8073e-01, 5.0053e-01, + 4.1624e-01, 8.9193e-02, 2.6037e-01, 2.9977e-01, + 4.4692e-01, 6.2605e-02, 7.6860e-01, 4.9747e-03, + 5.5906e-01, 9.6162e-01, 2.7556e-01, 9.0166e-01, + 9.4515e-01, 2.8663e-01, 3.4439e-01, 9.1566e-01, + 1.7708e-01, 3.7951e-01, 5.3351e-01, 4.9896e-02, + 7.3733e-01, 1.8349e-02, 1.9235e-02, 1.6484e-02, + 5.8013e-01, 6.5098e-01, 1.7865e-01, 5.3873e-01, + 8.7322e-01, 4.4347e-01, 3.4325e-01, 8.1726e-01, + 1.2185e-01, 5.7874e-01, 1.7520e-01, 4.4937e-01, + 2.3280e-01, 3.9215e-01, 8.1236e-01, 4.2231e-01, + 7.8036e-01, 2.3107e-01, 5.7078e-01, 9.2965e-01, + 4.6720e-01, 3.7843e-01, 9.8572e-01, 7.4716e-02, + 4.2113e-01, 3.5345e-01, 2.4394e-02, 2.9640e-01, + 1.4247e-01, 3.9066e-01, 8.1707e-01, 9.2895e-01, + 8.3942e-01, 7.2112e-01, 9.4111e-03, 2.5797e-01, + 8.9230e-01, 4.6861e-01, 7.6778e-01, 9.3537e-02, + 6.7004e-01, 7.2295e-01, 7.4820e-01, 3.4341e-01, + 7.5489e-02, 4.3837e-01, 2.4889e-01, 9.0173e-01, + 2.4648e-01, 6.4814e-01, 5.9827e-01, 1.3787e-01, + 5.5651e-01, 2.8835e-01, 2.1513e-01, 8.4082e-01, + 9.8978e-01, 1.4380e-01, 8.4743e-01, 8.3186e-01, + 5.0587e-01, 9.5075e-01, 2.6789e-01, 7.9162e-01, + 1.0086e-01, 6.9314e-01, 3.8463e-01, 2.5693e-01, + 1.2958e-01, 6.7064e-02, 7.4356e-01, 7.1345e-01, + 3.3977e-01, 9.1597e-01, 4.3269e-01, 7.5182e-01, + 1.0214e-01, 4.8837e-01, 2.1073e-01, 7.3617e-01, + 7.5067e-01, 9.4559e-01, 1.1332e-01, 7.3871e-01, + 2.2181e-01, 1.5506e-01, 1.8569e-01, 6.9054e-01, + 5.0839e-01, 6.5259e-01, 1.2274e-01, 6.1245e-01, + 2.0467e-01, 2.4814e-01, 3.7083e-01, 3.8862e-01, + 4.9577e-01, 6.6978e-01, 5.0736e-01, 1.9597e-01, + 6.3460e-01, 8.3869e-01, 4.5106e-01, 3.7500e-01, + 9.5105e-02, 8.0765e-01, 7.8259e-01, 7.0514e-01, + 9.7148e-01, 6.3857e-01, 2.3327e-02, 8.3506e-01, + 4.4786e-01, 4.1722e-01, 7.7105e-02, 9.1627e-01, + 5.2201e-01, 6.8986e-01, 4.0576e-01, 1.4139e-01, + 7.5719e-01, 6.7978e-01, 4.3543e-01, 1.6270e-01, + 3.2703e-01, 6.2939e-01, 7.9716e-01, 6.0758e-02, + 4.8901e-01, 8.0568e-01, 3.2122e-01, 7.3112e-01, + 4.7776e-01, 3.4454e-01, 1.2217e-01, 8.7845e-01, + 7.3334e-01, 1.7767e-02, 9.8526e-02, 6.5994e-01, + 4.5432e-01, 5.5027e-01, 6.1022e-01, 5.4396e-01, + 4.6217e-01, 8.8228e-01, 9.7661e-01, 7.7544e-01, + 3.9460e-01, 7.9046e-01, 5.6485e-02, 4.3661e-01, + 9.9155e-01, 7.0178e-01, 8.2688e-01, 6.5376e-01, + 4.8909e-01, 9.7590e-01, 8.4990e-01, 8.4102e-01, + 8.7281e-01, 5.5305e-01, 7.4668e-01, 8.2128e-01, + 5.5457e-01, 5.9761e-01, 6.7722e-01, 4.2857e-01, + 8.3414e-01, 1.7303e-02, 6.5313e-01, 8.5054e-01, + 1.8488e-02, 7.8342e-01, 2.3044e-01, 3.1307e-01, + 9.7133e-02, 2.7489e-01, 7.5709e-01, 2.4391e-02, + 6.6253e-01, 2.7630e-01, 5.0632e-01, 6.6936e-01, + 3.1545e-01, 8.8089e-01, 9.2220e-01, 9.5829e-01, + 5.7617e-01, 8.8071e-01, 2.5377e-01, 9.8168e-01, + 1.3733e-01, 5.9002e-01, 9.2556e-01, 2.3471e-01, + 4.4713e-01, 7.8736e-01, 3.0287e-02, 3.7479e-01, + 2.3371e-01, 2.3312e-01, 2.0577e-01, 4.9982e-01, + 8.9310e-01, 8.8877e-01, 6.6160e-01, 3.9360e-01, + 7.3268e-02, 3.5365e-01, 5.4502e-01, 1.2996e-01, + 6.8004e-02, 9.2458e-01, 6.7526e-01, 1.9083e-02, + 7.2648e-01, 2.3662e-01, 6.1134e-01, 5.8670e-01, + 7.9140e-01, 7.5557e-01, 6.9229e-02, 2.4708e-01, + 2.9978e-01, 9.0461e-01, 5.4313e-01, 3.0432e-01, + 3.8062e-01, 1.4898e-02, 9.2129e-01, 9.4985e-01, + 5.8134e-01, 6.1743e-01, 4.2089e-01, 4.0719e-01, + 7.5168e-01, 6.8000e-01, 7.6022e-01, 9.1390e-01, + 4.5525e-01, 2.3888e-01, 4.5804e-01, 1.5248e-01, + 5.6439e-01, 2.8046e-01, 6.7805e-01, 6.9097e-01, + 5.9275e-01, 5.9135e-01, 1.5522e-01, 1.0470e-01, + 8.4527e-01, 7.0721e-01, 1.7353e-01, 6.9803e-01, + 5.3798e-01, 7.3068e-01, 7.0521e-01, 7.5732e-01, + 8.2180e-01, 1.3041e-01, 5.0990e-01, 3.9364e-02, + 2.6640e-01, 3.2966e-01, 4.4012e-01, 6.6674e-01, + 2.3991e-01, 2.1136e-01, 1.2814e-01, 3.1776e-01, + 3.3583e-01, 2.9175e-01, 1.2930e-02, 5.6292e-01, + 5.5763e-01, 9.9996e-02, 8.6286e-01, 7.6298e-01, + 4.7048e-01, 4.5382e-01, 4.5726e-01, 5.1212e-02, + 5.5617e-01, 1.1455e-01, 9.1890e-01, 3.4407e-01, + 9.0745e-01, 7.3912e-01, 7.3913e-01, 7.8929e-01, + 7.2846e-01, 4.9965e-01, 2.1896e-01, 4.0960e-01, + 3.4976e-02, 6.2852e-01, 8.7510e-01, 2.6167e-01, + 6.9754e-01, 4.9384e-01, 5.9221e-01, 1.6923e-01, + 8.2387e-01, 4.1482e-01, 8.7866e-01, 3.1878e-01, + 4.3075e-01, 9.6728e-01, 6.3768e-01, 7.9501e-01, + 8.2309e-01, 8.2726e-01, 4.0740e-01, 7.3097e-01, + 8.3429e-01, 3.6572e-02, 6.4949e-01, 3.4151e-01, + 3.4415e-01, 1.8339e-01, 2.8646e-01, 7.6318e-01, + 1.9286e-01, 8.7404e-02, 8.3365e-01, 3.8940e-01, + 1.4313e-01, 3.2858e-01, 2.0825e-01, 1.3225e-01, + 4.7153e-01, 5.9345e-04, 6.4316e-03, 3.5093e-01, + 7.4859e-01, 2.5398e-02, 1.7122e-01, 3.6310e-01, + 9.7341e-02, 5.4702e-01, 6.5922e-01, 8.2479e-01, + 1.5754e-01, 6.6982e-01, 2.9553e-01, 5.8182e-01, + 6.0767e-01, 2.2587e-01, 5.9177e-01, 3.5056e-02, + 7.7132e-01, 3.3632e-02, 7.9357e-01, 3.4986e-01, + 5.1546e-01, 6.0746e-01, 8.7129e-02, 1.6569e-02, + 9.0926e-01, 7.2044e-01, 9.3011e-01, 2.2800e-01, + 4.0772e-01, 1.7022e-01, 9.6123e-03, 9.2862e-02, + 7.1853e-02, 6.3655e-02, 9.4267e-01, 2.1552e-01, + 7.6811e-01, 1.9484e-01, 4.9845e-02, 9.7880e-01, + 5.4652e-01, 6.2794e-02, 1.0333e-01, 7.9616e-01, + 7.2635e-01, 5.2013e-01, 5.4113e-01, 9.9067e-01, + 2.2398e-01, 7.6978e-01, 1.1021e-01, 7.6641e-01, + 6.0339e-01, 7.0817e-01, 4.9845e-01, 6.3660e-01, + 4.1097e-01, 4.3331e-01, 6.1036e-01, 1.2769e-01, + 9.6678e-01, 7.9162e-01, 1.1026e-01, 4.5378e-01, + 8.0408e-01, 7.2058e-02, 1.1862e-01, 5.0710e-01, + 3.1342e-01, 3.7765e-01, 3.0979e-01, 7.1625e-01, + 3.5012e-02, 6.9599e-01, 2.3626e-01, 2.7274e-01, + 8.7590e-01, 4.4684e-01, 7.7476e-01, 5.0375e-01, + 3.7941e-02, 5.8674e-02, 2.6070e-01, 7.9297e-01, + 7.0623e-01, 3.3070e-01, 5.7109e-01, 1.2253e-01, + 4.1667e-01, 9.1573e-02, 4.2418e-01, 7.7002e-01, + 7.6640e-01, 5.4212e-01, 2.8612e-01, 9.0739e-01, + 1.5197e-02, 7.8391e-01, 9.9961e-01, 8.4224e-01, + 4.5078e-02, 1.7178e-01, 5.7976e-01, 2.5597e-01, + 3.7693e-01, 7.1487e-01, 9.1756e-01, 9.4329e-01, + 3.4253e-01, 9.8948e-02, 6.1691e-01, 9.7617e-01, + 2.1028e-01, 7.5394e-02, 2.7003e-02, 4.9118e-01, + 4.5413e-01, 4.5744e-01, 9.0629e-02, 3.9792e-02, + 4.8959e-01, 2.5653e-03, 2.4848e-01, 1.5578e-01, + 4.8467e-01, 1.6759e-01, 3.4517e-01, 7.1360e-01, + 6.6365e-01, 8.6784e-01, 3.2600e-02, 5.1813e-01, + 9.7823e-01, 7.1991e-01, 4.8388e-01, 2.9200e-01, + 8.8445e-01, 4.6157e-01, 8.8424e-01, 1.6935e-02, + 9.5479e-01, 2.4626e-01, 3.2601e-01, 7.5883e-01, + 5.4595e-01, 7.4329e-01, 2.7699e-01, 2.6074e-02, + 5.7500e-02, 4.7050e-01, 9.2467e-01, 3.9975e-01, + 2.3830e-03, 8.9591e-02, 6.9714e-01, 9.9067e-01, + 7.3989e-01, 6.1702e-01, 7.4358e-02, 5.0241e-01, + 2.6956e-01, 5.2080e-01, 5.8244e-01, 3.8613e-01, + 7.5686e-01, 1.4862e-01, 4.2541e-01, 4.0442e-01, + 8.6544e-01, 6.5414e-01, 6.0983e-01, 7.9228e-01, + 3.3659e-01, 7.4508e-01, 7.8118e-02, 8.0894e-01, + 4.3755e-01, 9.6531e-01, 2.7971e-01, 1.6120e-01, + 5.6311e-01, 5.2545e-01, 7.0385e-01, 6.7045e-01, + 7.3439e-01, 7.2749e-01, 8.4642e-01, 6.6381e-01, + 5.4125e-01, 9.6001e-01, 8.5499e-01, 3.4083e-01, + 2.9676e-01, 8.8005e-01, 5.5461e-01, 5.2255e-01, + 9.3939e-01, 3.7880e-01, 8.1994e-01, 9.8494e-01, + 5.2830e-01, 7.1390e-01, 9.6245e-01, 2.9338e-01, + 5.6169e-01, 4.6562e-01, 6.2625e-02, 6.0261e-02, + 9.7711e-01, 2.6813e-01, 5.6608e-01, 4.6741e-01, + 4.7599e-01, 5.1983e-01, 8.9555e-02, 7.8497e-01, + 2.6173e-01, 7.2181e-01, 3.4515e-01, 6.2851e-01, + 9.5570e-01, 8.7618e-01, 8.6800e-01, 7.7736e-02, + 9.9029e-02, 6.6877e-01, 4.5136e-01, 4.8328e-01, + 2.1995e-01, 5.3975e-01, 3.0857e-01, 2.9108e-01, + 7.3244e-01, 9.4763e-01, 5.5523e-01, 5.3691e-01, + 7.5862e-01, 8.8397e-01, 9.9768e-01, 8.5263e-01, + 9.2202e-01, 8.9888e-01, 7.2998e-01, 6.1755e-01, + 3.5453e-01, 1.6461e-02, 5.6335e-01, 2.6537e-01, + 6.4050e-01, 7.1585e-01, 5.1312e-01, 2.6271e-01, + 2.6827e-01, 3.0050e-01, 2.6003e-01, 6.8870e-01, + 7.9350e-01, 1.0130e-01, 3.5592e-01, 2.7398e-01, + 3.6442e-01, 5.2514e-01, 5.7656e-01, 9.2620e-02, + 5.0659e-03, 8.0383e-01, 5.7759e-02, 6.4201e-01, + 9.8499e-01, 9.0458e-01, 7.2209e-01, 6.9173e-01, + 1.9235e-01, 1.3523e-01, 2.5251e-01, 4.3643e-01, + 8.3314e-01, 4.1119e-01, 3.5737e-01, 1.4247e-01, + 4.8132e-01, 5.8598e-01, 2.4472e-01, 3.7236e-01, + 7.5418e-01, 7.1316e-01, 6.7213e-01, 3.2274e-01, + 2.1236e-01, 1.6570e-01, 6.9203e-01, 5.6668e-01, + 4.3002e-01, 5.8654e-01, 1.9247e-01, 2.0992e-01, + 6.1953e-01, 8.9656e-01, 1.5362e-01, 4.5501e-01, + 1.1305e-01, 1.5899e-01, 7.4418e-01, 8.8016e-01, + 4.7632e-01, 9.5522e-01, 4.7101e-01, 2.8447e-01, + 3.8704e-01, 8.4621e-01, 5.5945e-01, 5.1588e-02, + 7.6581e-02, 1.0814e-01, 6.1450e-01, 2.6132e-02, + 6.9289e-01, 1.3583e-01, 9.2203e-01, 2.6151e-02, + 2.8007e-01, 5.5882e-01, 6.1010e-02, 2.0485e-01, + 7.9007e-01, 1.2710e-01, 5.2246e-01, 1.4720e-01, + 9.0733e-02, 4.9059e-01, 5.5002e-01, 6.9097e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.6225, 0.9544, 0.4026, ..., 0.0448, 0.5411, 0.3638]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.603893041610718 seconds + +[40.44, 38.96, 39.19, 39.04, 39.08, 44.06, 39.06, 39.22, 39.35, 39.22] [65.88] -13.035122156143188 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420795440673828, 'TIME_S_1KI': 0.02877043508696409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 858.7538476467132, 'W': 65.88} -[40.1, 39.18, 39.44, 38.84, 39.26, 38.78, 38.88, 39.27, 39.5, 39.18, 39.55, 39.36, 39.34, 38.83, 39.02, 39.04, 38.92, 39.01, 41.73, 38.96] -707.295 -35.36475 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420795440673828, 'TIME_S_1KI': 0.02877043508696409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 858.7538476467132, 'W': 65.88, 'J_1KI': 2.3709055580312617, 'W_1KI': 0.18188594856503915, 'W_D': 30.515249999999995, 'J_D': 397.7700113752484, 'W_D_1KI': 0.08424856089783408, 'J_D_1KI': 0.00023259911071860987} +12.98733901977539 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 363895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.603893041610718, 'TIME_S_1KI': 0.02913998005361634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.6058946228027, 'W': 65.88} +[40.44, 38.96, 39.19, 39.04, 39.08, 44.06, 39.06, 39.22, 39.35, 39.22, 40.11, 39.4, 44.83, 38.97, 39.52, 39.43, 38.91, 39.25, 40.22, 38.97] +717.8599999999999 +35.892999999999994 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 363895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.603893041610718, 'TIME_S_1KI': 0.02913998005361634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.6058946228027, 'W': 65.88, 'J_1KI': 2.351243887997369, 'W_1KI': 0.18104123442201733, 'W_D': 29.987000000000002, 'J_D': 389.45133518600466, 'W_D_1KI': 0.08240563898926888, 'J_D_1KI': 0.00022645444149897327} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json index 44b0019..4e28c71 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 288650, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.542811155319214, "TIME_S_1KI": 0.036524549299564224, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 872.9266104698181, "W": 66.0, "J_1KI": 3.0241697920312425, "W_1KI": 0.22865061493157804, "W_D": 30.442750000000004, "J_D": 402.64070561939485, "W_D_1KI": 0.10546596223800452, "J_D_1KI": 0.00036537662303136846} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 288310, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.569395542144775, "TIME_S_1KI": 0.03665982984337961, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 854.3347172141076, "W": 65.79, "J_1KI": 2.9632503805421515, "W_1KI": 0.22819187679927858, "W_D": 30.430750000000003, "J_D": 395.167140839994, "W_D_1KI": 0.10554871492490722, "J_D_1KI": 0.0003660945334012252} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output index ec44bfd..917d1d1 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014984130859375} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01482248306274414} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), - col_indices=tensor([5455, 6096, 7620, ..., 8334, 1515, 9556]), - values=tensor([0.3295, 0.9699, 0.1085, ..., 0.1358, 0.2338, 0.9968]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 4998, 4999, 5000]), + col_indices=tensor([9316, 7684, 6114, ..., 4797, 7259, 5250]), + values=tensor([0.2419, 0.8047, 0.2870, ..., 0.0548, 0.5069, 0.3709]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.0525, 0.2160, 0.5197, ..., 0.9729, 0.0490, 0.2973]) +tensor([0.9532, 0.7093, 0.0307, ..., 0.3681, 0.3122, 0.7966]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.014984130859375 seconds +Time: 0.01482248306274414 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '70074', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.549025535583496} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '70838', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.579850435256958} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([3016, 6372, 3284, ..., 9865, 6936, 5486]), - values=tensor([0.2981, 0.0450, 0.6145, ..., 0.3998, 0.9695, 0.2536]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([1876, 3469, 1702, ..., 5400, 8275, 8211]), + values=tensor([0.2455, 0.2245, 0.5061, ..., 0.2422, 0.9251, 0.2949]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.7165, 0.5810, 0.9668, ..., 0.2745, 0.2690, 0.0815]) +tensor([0.1745, 0.7196, 0.3176, ..., 0.6620, 0.3214, 0.1312]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 2.549025535583496 seconds +Time: 2.579850435256958 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288650', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.542811155319214} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288310', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.569395542144775} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([4532, 8082, 1862, ..., 2662, 2473, 4062]), - values=tensor([0.2290, 0.0977, 0.7273, ..., 0.3334, 0.1586, 0.6128]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 5000]), + col_indices=tensor([1328, 8485, 8629, ..., 6703, 7772, 8456]), + values=tensor([0.6066, 0.9936, 0.2970, ..., 0.6397, 0.3202, 0.6133]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.0256, 0.2861, 0.7976, ..., 0.1212, 0.6310, 0.3680]) +tensor([0.1682, 0.1407, 0.6835, ..., 0.2839, 0.3386, 0.4473]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.542811155319214 seconds +Time: 10.569395542144775 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([4532, 8082, 1862, ..., 2662, 2473, 4062]), - values=tensor([0.2290, 0.0977, 0.7273, ..., 0.3334, 0.1586, 0.6128]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 5000]), + col_indices=tensor([1328, 8485, 8629, ..., 6703, 7772, 8456]), + values=tensor([0.6066, 0.9936, 0.2970, ..., 0.6397, 0.3202, 0.6133]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.0256, 0.2861, 0.7976, ..., 0.1212, 0.6310, 0.3680]) +tensor([0.1682, 0.1407, 0.6835, ..., 0.2839, 0.3386, 0.4473]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.542811155319214 seconds +Time: 10.569395542144775 seconds -[40.67, 38.88, 39.03, 38.97, 39.55, 38.85, 39.41, 38.82, 39.41, 38.86] -[66.0] -13.226160764694214 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288650, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.542811155319214, 'TIME_S_1KI': 0.036524549299564224, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.9266104698181, 'W': 66.0} -[40.67, 38.88, 39.03, 38.97, 39.55, 38.85, 39.41, 38.82, 39.41, 38.86, 39.72, 38.82, 38.97, 39.27, 39.27, 39.76, 38.89, 38.89, 45.01, 39.44] -711.145 -35.557249999999996 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288650, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.542811155319214, 'TIME_S_1KI': 0.036524549299564224, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.9266104698181, 'W': 66.0, 'J_1KI': 3.0241697920312425, 'W_1KI': 0.22865061493157804, 'W_D': 30.442750000000004, 'J_D': 402.64070561939485, 'W_D_1KI': 0.10546596223800452, 'J_D_1KI': 0.00036537662303136846} +[40.32, 38.86, 38.98, 39.06, 39.07, 38.89, 40.67, 39.24, 39.32, 39.29] +[65.79] +12.985783815383911 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288310, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.569395542144775, 'TIME_S_1KI': 0.03665982984337961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.3347172141076, 'W': 65.79} +[40.32, 38.86, 38.98, 39.06, 39.07, 38.89, 40.67, 39.24, 39.32, 39.29, 40.79, 39.23, 38.81, 40.07, 38.85, 38.94, 39.21, 39.01, 39.18, 39.19] +707.1850000000001 +35.35925 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288310, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.569395542144775, 'TIME_S_1KI': 0.03665982984337961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.3347172141076, 'W': 65.79, 'J_1KI': 2.9632503805421515, 'W_1KI': 0.22819187679927858, 'W_D': 30.430750000000003, 'J_D': 395.167140839994, 'W_D_1KI': 0.10554871492490722, 'J_D_1KI': 0.0003660945334012252} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json index c479701..ca5cee1 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 203, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.8138906955719, "TIME_S_1KI": 53.27039751513251, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1609.7031775331498, "W": 81.98, "J_1KI": 7929.572303118964, "W_1KI": 403.8423645320197, "W_D": 30.52650000000002, "J_D": 599.3974633930925, "W_D_1KI": 150.37684729064048, "J_D_1KI": 740.7726467519235} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 196, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.51739501953125, "TIME_S_1KI": 53.66017867107781, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1496.0578999996185, "W": 76.6, "J_1KI": 7632.9484693858085, "W_1KI": 390.81632653061223, "W_D": 40.777249999999995, "J_D": 796.4115796704888, "W_D_1KI": 208.047193877551, "J_D_1KI": 1061.4652748854642} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output index e1438f6..23fce00 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.424488306045532} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.346543788909912} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 44, 99, ..., 24999917, - 24999946, 25000000]), - col_indices=tensor([ 4827, 10869, 14232, ..., 471243, 483745, - 496563]), - values=tensor([0.8207, 0.6147, 0.2995, ..., 0.3197, 0.5880, 0.5650]), +tensor(crow_indices=tensor([ 0, 44, 102, ..., 24999894, + 24999947, 25000000]), + col_indices=tensor([ 15535, 26957, 66973, ..., 491167, 491900, + 494241]), + values=tensor([0.1063, 0.6792, 0.3672, ..., 0.6022, 0.3872, 0.3278]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7008, 0.9045, 0.7559, ..., 0.2377, 0.3193, 0.3380]) +tensor([0.6784, 0.4374, 0.6493, ..., 0.2948, 0.2498, 0.9677]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 5.424488306045532 seconds +Time: 5.346543788909912 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '193', '-ss', '500000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.982287168502808} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '196', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.51739501953125} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 53, 103, ..., 24999914, - 24999954, 25000000]), - col_indices=tensor([ 956, 25275, 30712, ..., 470941, 489379, - 489461]), - values=tensor([0.2897, 0.9352, 0.3996, ..., 0.3187, 0.8556, 0.7054]), +tensor(crow_indices=tensor([ 0, 41, 93, ..., 24999884, + 24999939, 25000000]), + col_indices=tensor([ 6721, 21503, 60987, ..., 477439, 477750, + 498940]), + values=tensor([0.1736, 0.2933, 0.3189, ..., 0.5685, 0.2838, 0.4704]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.6888, 0.9226, 0.2376, ..., 0.2155, 0.1168, 0.1817]) +tensor([0.1626, 0.9151, 0.1900, ..., 0.6685, 0.4369, 0.9470]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 9.982287168502808 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '203', '-ss', '500000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.8138906955719} +Time: 10.51739501953125 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 106, ..., 24999892, - 24999946, 25000000]), - col_indices=tensor([ 18694, 24514, 28811, ..., 477104, 482132, - 483877]), - values=tensor([0.0999, 0.2209, 0.5662, ..., 0.8643, 0.1918, 0.8434]), +tensor(crow_indices=tensor([ 0, 41, 93, ..., 24999884, + 24999939, 25000000]), + col_indices=tensor([ 6721, 21503, 60987, ..., 477439, 477750, + 498940]), + values=tensor([0.1736, 0.2933, 0.3189, ..., 0.5685, 0.2838, 0.4704]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7305, 0.0261, 0.0866, ..., 0.4657, 0.9743, 0.1757]) +tensor([0.1626, 0.9151, 0.1900, ..., 0.6685, 0.4369, 0.9470]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,31 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.8138906955719 seconds +Time: 10.51739501953125 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 106, ..., 24999892, - 24999946, 25000000]), - col_indices=tensor([ 18694, 24514, 28811, ..., 477104, 482132, - 483877]), - values=tensor([0.0999, 0.2209, 0.5662, ..., 0.8643, 0.1918, 0.8434]), - size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7305, 0.0261, 0.0866, ..., 0.4657, 0.9743, 0.1757]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 25000000 -Density: 0.0001 -Time: 10.8138906955719 seconds - -[48.25, 66.08, 64.62, 65.48, 60.76, 53.27, 65.63, 72.69, 68.45, 64.01] -[81.98] -19.635315656661987 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 203, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.8138906955719, 'TIME_S_1KI': 53.27039751513251, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1609.7031775331498, 'W': 81.98} -[48.25, 66.08, 64.62, 65.48, 60.76, 53.27, 65.63, 72.69, 68.45, 64.01, 67.95, 64.4, 68.38, 60.78, 45.98, 39.37, 40.58, 39.5, 41.17, 43.65] -1029.0699999999997 -51.453499999999984 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 203, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.8138906955719, 'TIME_S_1KI': 53.27039751513251, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1609.7031775331498, 'W': 81.98, 'J_1KI': 7929.572303118964, 'W_1KI': 403.8423645320197, 'W_D': 30.52650000000002, 'J_D': 599.3974633930925, 'W_D_1KI': 150.37684729064048, 'J_D_1KI': 740.7726467519235} +[40.69, 39.06, 39.39, 44.33, 39.17, 39.34, 39.99, 39.57, 39.73, 39.44] +[76.6] +19.530781984329224 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 196, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.51739501953125, 'TIME_S_1KI': 53.66017867107781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1496.0578999996185, 'W': 76.6} +[40.69, 39.06, 39.39, 44.33, 39.17, 39.34, 39.99, 39.57, 39.73, 39.44, 39.7, 39.13, 39.59, 39.43, 39.58, 39.07, 40.03, 39.42, 39.65, 40.12] +716.4549999999999 +35.82275 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 196, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.51739501953125, 'TIME_S_1KI': 53.66017867107781, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1496.0578999996185, 'W': 76.6, 'J_1KI': 7632.9484693858085, 'W_1KI': 390.81632653061223, 'W_D': 40.777249999999995, 'J_D': 796.4115796704888, 'W_D_1KI': 208.047193877551, 'J_D_1KI': 1061.4652748854642} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json index 9445831..212baaa 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1357, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.301345348358154, "TIME_S_1KI": 7.591264073955898, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 974.2936608052254, "W": 72.47, "J_1KI": 717.9761686110725, "W_1KI": 53.404568901989684, "W_D": 37.03175, "J_D": 497.85841415101294, "W_D_1KI": 27.28942520265291, "J_D_1KI": 20.11011437188866} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1357, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.248650312423706, "TIME_S_1KI": 7.5524320651611685, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1022.5026794672012, "W": 76.5, "J_1KI": 753.5023430119389, "W_1KI": 56.37435519528371, "W_D": 40.9355, "J_D": 547.1458618997335, "W_D_1KI": 30.166175386882827, "J_D_1KI": 22.23004818488049} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output index d835f69..72e126c 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7734920978546143} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7732758522033691} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 2499990, +tensor(crow_indices=tensor([ 0, 2, 8, ..., 2499985, 2499995, 2500000]), - col_indices=tensor([ 99860, 161360, 168008, ..., 375780, 443860, - 468048]), - values=tensor([0.7731, 0.7975, 0.7314, ..., 0.7653, 0.4860, 0.2739]), + col_indices=tensor([165838, 337341, 38617, ..., 154134, 161638, + 492399]), + values=tensor([0.6433, 0.5779, 0.8791, ..., 0.7984, 0.6408, 0.2732]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.9501, 0.6169, 0.8449, ..., 0.9228, 0.9726, 0.5004]) +tensor([0.3555, 0.2811, 0.1161, ..., 0.1603, 0.0646, 0.8128]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.7734920978546143 seconds +Time: 0.7732758522033691 seconds ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1357', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.301345348358154} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.248650312423706} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499991, - 2499994, 2500000]), - col_indices=tensor([238281, 305722, 262347, ..., 326599, 364388, - 410788]), - values=tensor([0.2261, 0.8621, 0.1222, ..., 0.7643, 0.7262, 0.6796]), +tensor(crow_indices=tensor([ 0, 7, 10, ..., 2499987, + 2499995, 2500000]), + col_indices=tensor([ 15665, 113136, 205303, ..., 435688, 452170, + 496606]), + values=tensor([0.2643, 0.1016, 0.1243, ..., 0.0819, 0.7361, 0.8109]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7756, 0.5142, 0.7476, ..., 0.1970, 0.9731, 0.3396]) +tensor([0.7385, 0.0080, 0.1289, ..., 0.5753, 0.7010, 0.8136]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.301345348358154 seconds +Time: 10.248650312423706 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499991, - 2499994, 2500000]), - col_indices=tensor([238281, 305722, 262347, ..., 326599, 364388, - 410788]), - values=tensor([0.2261, 0.8621, 0.1222, ..., 0.7643, 0.7262, 0.6796]), +tensor(crow_indices=tensor([ 0, 7, 10, ..., 2499987, + 2499995, 2500000]), + col_indices=tensor([ 15665, 113136, 205303, ..., 435688, 452170, + 496606]), + values=tensor([0.2643, 0.1016, 0.1243, ..., 0.0819, 0.7361, 0.8109]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7756, 0.5142, 0.7476, ..., 0.1970, 0.9731, 0.3396]) +tensor([0.7385, 0.0080, 0.1289, ..., 0.5753, 0.7010, 0.8136]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.301345348358154 seconds +Time: 10.248650312423706 seconds -[40.45, 39.15, 39.07, 39.07, 39.08, 39.31, 39.47, 39.12, 39.58, 39.08] -[72.47] -13.444096326828003 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.301345348358154, 'TIME_S_1KI': 7.591264073955898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 974.2936608052254, 'W': 72.47} -[40.45, 39.15, 39.07, 39.07, 39.08, 39.31, 39.47, 39.12, 39.58, 39.08, 39.87, 38.95, 39.89, 38.93, 39.13, 39.06, 39.33, 39.5, 40.16, 40.53] -708.7649999999999 -35.43825 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.301345348358154, 'TIME_S_1KI': 7.591264073955898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 974.2936608052254, 'W': 72.47, 'J_1KI': 717.9761686110725, 'W_1KI': 53.404568901989684, 'W_D': 37.03175, 'J_D': 497.85841415101294, 'W_D_1KI': 27.28942520265291, 'J_D_1KI': 20.11011437188866} +[40.95, 39.1, 39.07, 39.06, 39.74, 39.13, 39.28, 40.31, 39.91, 39.48] +[76.5] +13.366048097610474 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.248650312423706, 'TIME_S_1KI': 7.5524320651611685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.5026794672012, 'W': 76.5} +[40.95, 39.1, 39.07, 39.06, 39.74, 39.13, 39.28, 40.31, 39.91, 39.48, 40.65, 39.55, 39.56, 40.76, 39.09, 39.0, 39.34, 39.02, 39.1, 39.46] +711.2900000000001 +35.5645 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.248650312423706, 'TIME_S_1KI': 7.5524320651611685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1022.5026794672012, 'W': 76.5, 'J_1KI': 753.5023430119389, 'W_1KI': 56.37435519528371, 'W_D': 40.9355, 'J_D': 547.1458618997335, 'W_D_1KI': 30.166175386882827, 'J_D_1KI': 22.23004818488049} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json index 016557b..e33888d 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 374, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481700897216797, "TIME_S_1KI": 28.025938227852397, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1245.1822829723358, "W": 76.84, "J_1KI": 3329.3643929741597, "W_1KI": 205.45454545454547, "W_D": 40.990750000000006, "J_D": 664.2498134532572, "W_D_1KI": 109.60093582887701, "J_D_1KI": 293.0506305584947} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 367, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.611295223236084, "TIME_S_1KI": 28.91361096249614, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1245.9440638375281, "W": 76.77, "J_1KI": 3394.9429532357717, "W_1KI": 209.1825613079019, "W_D": 41.03874999999999, "J_D": 666.041252439916, "W_D_1KI": 111.82220708446864, "J_D_1KI": 304.69266235550043} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output index 9fc6dc1..57a96f1 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.8034374713897705} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.85378098487854} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 31, 56, ..., 12499945, - 12499972, 12500000]), - col_indices=tensor([ 16534, 21956, 27589, ..., 400032, 455487, - 480702]), - values=tensor([0.5221, 0.3710, 0.3411, ..., 0.2701, 0.3669, 0.2928]), +tensor(crow_indices=tensor([ 0, 27, 47, ..., 12499954, + 12499979, 12500000]), + col_indices=tensor([ 3727, 4076, 5662, ..., 478918, 481556, + 488053]), + values=tensor([0.7295, 0.5074, 0.0514, ..., 0.7151, 0.3203, 0.1737]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.5983, 0.4656, 0.7235, ..., 0.5590, 0.7340, 0.5167]) +tensor([0.3494, 0.9842, 0.6199, ..., 0.2555, 0.1047, 0.7043]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 2.8034374713897705 seconds +Time: 2.85378098487854 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '374', '-ss', '500000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481700897216797} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '367', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.611295223236084} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 20, 47, ..., 12499941, - 12499971, 12500000]), - col_indices=tensor([ 37298, 48174, 79945, ..., 425979, 429124, - 477898]), - values=tensor([0.8892, 0.8073, 0.3867, ..., 0.6750, 0.3130, 0.8587]), +tensor(crow_indices=tensor([ 0, 22, 47, ..., 12499952, + 12499976, 12500000]), + col_indices=tensor([ 40600, 52241, 60280, ..., 462159, 480272, + 492405]), + values=tensor([0.1478, 0.5447, 0.8572, ..., 0.8924, 0.4722, 0.1649]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.8772, 0.7830, 0.9014, ..., 0.3941, 0.0151, 0.6871]) +tensor([0.6153, 0.4540, 0.3734, ..., 0.2866, 0.0219, 0.2153]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.481700897216797 seconds +Time: 10.611295223236084 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 20, 47, ..., 12499941, - 12499971, 12500000]), - col_indices=tensor([ 37298, 48174, 79945, ..., 425979, 429124, - 477898]), - values=tensor([0.8892, 0.8073, 0.3867, ..., 0.6750, 0.3130, 0.8587]), +tensor(crow_indices=tensor([ 0, 22, 47, ..., 12499952, + 12499976, 12500000]), + col_indices=tensor([ 40600, 52241, 60280, ..., 462159, 480272, + 492405]), + values=tensor([0.1478, 0.5447, 0.8572, ..., 0.8924, 0.4722, 0.1649]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.8772, 0.7830, 0.9014, ..., 0.3941, 0.0151, 0.6871]) +tensor([0.6153, 0.4540, 0.3734, ..., 0.2866, 0.0219, 0.2153]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.481700897216797 seconds +Time: 10.611295223236084 seconds -[39.77, 39.61, 39.09, 39.46, 39.43, 39.2, 39.07, 38.94, 44.28, 39.72] -[76.84] -16.20487093925476 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481700897216797, 'TIME_S_1KI': 28.025938227852397, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.1822829723358, 'W': 76.84} -[39.77, 39.61, 39.09, 39.46, 39.43, 39.2, 39.07, 38.94, 44.28, 39.72, 40.11, 44.49, 39.49, 38.95, 39.21, 39.06, 39.21, 38.96, 39.02, 39.43] -716.9849999999999 -35.84925 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481700897216797, 'TIME_S_1KI': 28.025938227852397, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.1822829723358, 'W': 76.84, 'J_1KI': 3329.3643929741597, 'W_1KI': 205.45454545454547, 'W_D': 40.990750000000006, 'J_D': 664.2498134532572, 'W_D_1KI': 109.60093582887701, 'J_D_1KI': 293.0506305584947} +[39.76, 39.03, 39.59, 44.84, 39.46, 39.92, 39.47, 39.26, 39.26, 38.97] +[76.77] +16.229569673538208 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.611295223236084, 'TIME_S_1KI': 28.91361096249614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.9440638375281, 'W': 76.77} +[39.76, 39.03, 39.59, 44.84, 39.46, 39.92, 39.47, 39.26, 39.26, 38.97, 39.57, 39.12, 39.61, 41.28, 39.15, 38.89, 39.25, 38.84, 38.93, 39.15] +714.625 +35.73125 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 367, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.611295223236084, 'TIME_S_1KI': 28.91361096249614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.9440638375281, 'W': 76.77, 'J_1KI': 3394.9429532357717, 'W_1KI': 209.1825613079019, 'W_D': 41.03874999999999, 'J_D': 666.041252439916, 'W_D_1KI': 111.82220708446864, 'J_D_1KI': 304.69266235550043} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json index e80dbeb..131a7d9 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15655, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.832781791687012, "TIME_S_1KI": 0.691969453317599, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 866.9169475746155, "W": 65.84, "J_1KI": 55.37636202967841, "W_1KI": 4.205685084637497, "W_D": 30.269999999999996, "J_D": 398.5658566689491, "W_D_1KI": 1.933567550303417, "J_D_1KI": 0.12351118175045782} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15623, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467512369155884, "TIME_S_1KI": 0.6700065524646921, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 863.5877960205078, "W": 65.57, "J_1KI": 55.27669436219085, "W_1KI": 4.197017218203929, "W_D": 29.687999999999995, "J_D": 391.0049487304687, "W_D_1KI": 1.900275235230109, "J_D_1KI": 0.12163318410229208} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output index da8990f..90bfa00 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.08056378364562988} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.08077287673950195} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 7, ..., 249990, 249996, +tensor(crow_indices=tensor([ 0, 1, 5, ..., 249990, 249993, 250000]), - col_indices=tensor([28795, 30379, 41102, ..., 5633, 6424, 22447]), - values=tensor([0.9841, 0.4564, 0.4138, ..., 0.4352, 0.7831, 0.6427]), + col_indices=tensor([49667, 2847, 11171, ..., 33210, 37627, 43823]), + values=tensor([0.9038, 0.9262, 0.9323, ..., 0.5485, 0.0953, 0.5119]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6096, 0.2856, 0.5951, ..., 0.5564, 0.0665, 0.9869]) +tensor([0.9974, 0.7759, 0.0777, ..., 0.5009, 0.8013, 0.6672]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.08056378364562988 seconds +Time: 0.08077287673950195 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13033', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.741129398345947} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12999', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.735981464385986} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 13, ..., 249987, 249991, +tensor(crow_indices=tensor([ 0, 8, 10, ..., 249991, 249996, 250000]), - col_indices=tensor([ 370, 1086, 2786, ..., 43615, 44396, 45243]), - values=tensor([0.9664, 0.8693, 0.7422, ..., 0.8293, 0.8225, 0.1476]), + col_indices=tensor([ 1241, 1316, 11333, ..., 39099, 41509, 45214]), + values=tensor([0.5750, 0.7297, 0.2943, ..., 0.4119, 0.3205, 0.0800]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.9116, 0.5607, 0.8635, ..., 0.8139, 0.6651, 0.4589]) +tensor([0.2599, 0.8475, 0.1715, ..., 0.6309, 0.7959, 0.3593]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 8.741129398345947 seconds +Time: 8.735981464385986 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15655', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.832781791687012} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15623', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467512369155884} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 249991, 249998, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249988, 249995, 250000]), - col_indices=tensor([12466, 31687, 41380, ..., 43099, 30794, 44210]), - values=tensor([0.5535, 0.2801, 0.3869, ..., 0.8607, 0.0342, 0.7001]), + col_indices=tensor([16459, 17260, 17489, ..., 12154, 40898, 45606]), + values=tensor([0.4663, 0.9387, 0.6654, ..., 0.8623, 0.7205, 0.6266]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1141, 0.0022, 0.7559, ..., 0.9683, 0.9705, 0.8203]) +tensor([0.6508, 0.5612, 0.5925, ..., 0.2779, 0.1556, 0.3568]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.832781791687012 seconds +Time: 10.467512369155884 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 10, ..., 249991, 249998, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249988, 249995, 250000]), - col_indices=tensor([12466, 31687, 41380, ..., 43099, 30794, 44210]), - values=tensor([0.5535, 0.2801, 0.3869, ..., 0.8607, 0.0342, 0.7001]), + col_indices=tensor([16459, 17260, 17489, ..., 12154, 40898, 45606]), + values=tensor([0.4663, 0.9387, 0.6654, ..., 0.8623, 0.7205, 0.6266]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1141, 0.0022, 0.7559, ..., 0.9683, 0.9705, 0.8203]) +tensor([0.6508, 0.5612, 0.5925, ..., 0.2779, 0.1556, 0.3568]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.832781791687012 seconds +Time: 10.467512369155884 seconds -[39.71, 38.91, 39.08, 39.03, 39.04, 38.89, 39.49, 39.37, 39.35, 39.31] -[65.84] -13.167025327682495 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15655, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.832781791687012, 'TIME_S_1KI': 0.691969453317599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.9169475746155, 'W': 65.84} -[39.71, 38.91, 39.08, 39.03, 39.04, 38.89, 39.49, 39.37, 39.35, 39.31, 40.75, 39.44, 39.59, 39.09, 39.39, 39.11, 38.98, 38.78, 44.38, 39.19] -711.4000000000001 -35.57000000000001 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15655, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.832781791687012, 'TIME_S_1KI': 0.691969453317599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.9169475746155, 'W': 65.84, 'J_1KI': 55.37636202967841, 'W_1KI': 4.205685084637497, 'W_D': 30.269999999999996, 'J_D': 398.5658566689491, 'W_D_1KI': 1.933567550303417, 'J_D_1KI': 0.12351118175045782} +[39.73, 38.98, 39.07, 38.93, 44.53, 39.49, 39.88, 39.5, 39.09, 39.05] +[65.57] +13.17047119140625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467512369155884, 'TIME_S_1KI': 0.6700065524646921, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.5877960205078, 'W': 65.57} +[39.73, 38.98, 39.07, 38.93, 44.53, 39.49, 39.88, 39.5, 39.09, 39.05, 39.67, 44.12, 39.07, 39.4, 39.1, 38.88, 39.62, 39.01, 40.11, 39.27] +717.64 +35.882 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15623, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467512369155884, 'TIME_S_1KI': 0.6700065524646921, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.5877960205078, 'W': 65.57, 'J_1KI': 55.27669436219085, 'W_1KI': 4.197017218203929, 'W_D': 29.687999999999995, 'J_D': 391.0049487304687, 'W_D_1KI': 1.900275235230109, 'J_D_1KI': 0.12163318410229208} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json index 4e67596..f14cabf 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3401, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135668277740479, "TIME_S_1KI": 2.9802023751074618, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 929.8197621202468, "W": 70.57, "J_1KI": 273.39599003829665, "W_1KI": 20.74977947662452, "W_D": 35.09774999999999, "J_D": 462.44270307433595, "W_D_1KI": 10.319832402234633, "J_D_1KI": 3.0343523676079487} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3392, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.097672939300537, "TIME_S_1KI": 2.9769082957843565, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 929.1495970487595, "W": 70.5, "J_1KI": 273.92381988465786, "W_1KI": 20.784198113207548, "W_D": 35.409, "J_D": 466.6703274028301, "W_D_1KI": 10.438974056603774, "J_D_1KI": 3.0775277289515843} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output index 73dd96f..dcd3c8d 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3087193965911865} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3095407485961914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 100, ..., 2499908, +tensor(crow_indices=tensor([ 0, 45, 100, ..., 2499911, 2499955, 2500000]), - col_indices=tensor([ 1811, 3820, 5210, ..., 47398, 47518, 48036]), - values=tensor([0.8154, 0.8090, 0.3024, ..., 0.4722, 0.9116, 0.7561]), + col_indices=tensor([ 793, 1442, 1576, ..., 46694, 46780, 48162]), + values=tensor([0.1199, 0.8412, 0.1837, ..., 0.3360, 0.3672, 0.4593]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4260, 0.8795, 0.3202, ..., 0.3159, 0.0406, 0.9752]) +tensor([0.5589, 0.6592, 0.7865, ..., 0.1895, 0.4954, 0.2523]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.3087193965911865 seconds +Time: 0.3095407485961914 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3401', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135668277740479} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3392', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.097672939300537} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 41, 88, ..., 2499883, - 2499951, 2500000]), - col_indices=tensor([ 638, 2365, 2400, ..., 44467, 46636, 49496]), - values=tensor([0.8518, 0.8769, 0.3572, ..., 0.1360, 0.1673, 0.1097]), +tensor(crow_indices=tensor([ 0, 53, 117, ..., 2499918, + 2499954, 2500000]), + col_indices=tensor([ 468, 760, 1747, ..., 45524, 47546, 48668]), + values=tensor([0.6628, 0.6341, 0.7211, ..., 0.0837, 0.7250, 0.8485]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0408, 0.6133, 0.1624, ..., 0.7272, 0.2583, 0.9038]) +tensor([0.8535, 0.5776, 0.2662, ..., 0.1591, 0.5920, 0.7540]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.135668277740479 seconds +Time: 10.097672939300537 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 41, 88, ..., 2499883, - 2499951, 2500000]), - col_indices=tensor([ 638, 2365, 2400, ..., 44467, 46636, 49496]), - values=tensor([0.8518, 0.8769, 0.3572, ..., 0.1360, 0.1673, 0.1097]), +tensor(crow_indices=tensor([ 0, 53, 117, ..., 2499918, + 2499954, 2500000]), + col_indices=tensor([ 468, 760, 1747, ..., 45524, 47546, 48668]), + values=tensor([0.6628, 0.6341, 0.7211, ..., 0.0837, 0.7250, 0.8485]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0408, 0.6133, 0.1624, ..., 0.7272, 0.2583, 0.9038]) +tensor([0.8535, 0.5776, 0.2662, ..., 0.1591, 0.5920, 0.7540]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.135668277740479 seconds +Time: 10.097672939300537 seconds -[40.86, 44.19, 38.96, 38.91, 39.18, 38.84, 39.26, 38.86, 39.41, 39.32] -[70.57] -13.17585039138794 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135668277740479, 'TIME_S_1KI': 2.9802023751074618, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.8197621202468, 'W': 70.57} -[40.86, 44.19, 38.96, 38.91, 39.18, 38.84, 39.26, 38.86, 39.41, 39.32, 40.03, 39.02, 38.84, 38.98, 38.82, 40.3, 38.82, 38.8, 38.79, 38.72] -709.445 -35.47225 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135668277740479, 'TIME_S_1KI': 2.9802023751074618, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.8197621202468, 'W': 70.57, 'J_1KI': 273.39599003829665, 'W_1KI': 20.74977947662452, 'W_D': 35.09774999999999, 'J_D': 462.44270307433595, 'W_D_1KI': 10.319832402234633, 'J_D_1KI': 3.0343523676079487} +[39.8, 38.8, 38.97, 38.81, 38.82, 39.18, 38.86, 39.35, 38.86, 38.73] +[70.5] +13.179426908493042 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3392, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.097672939300537, 'TIME_S_1KI': 2.9769082957843565, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.1495970487595, 'W': 70.5} +[39.8, 38.8, 38.97, 38.81, 38.82, 39.18, 38.86, 39.35, 38.86, 38.73, 39.46, 38.86, 38.88, 38.78, 39.03, 38.81, 39.0, 39.3, 38.88, 39.27] +701.82 +35.091 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3392, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.097672939300537, 'TIME_S_1KI': 2.9769082957843565, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.1495970487595, 'W': 70.5, 'J_1KI': 273.92381988465786, 'W_1KI': 20.784198113207548, 'W_D': 35.409, 'J_D': 466.6703274028301, 'W_D_1KI': 10.438974056603774, 'J_D_1KI': 3.0775277289515843} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json index 05ef34d..a468b12 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 277, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.601917266845703, "TIME_S_1KI": 38.27406955539965, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.4378191399574, "W": 76.47, "J_1KI": 5423.963245992626, "W_1KI": 276.0649819494585, "W_D": 40.946749999999994, "J_D": 804.497786986649, "W_D_1KI": 147.82220216606495, "J_D_1KI": 533.6541594442779} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 275, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.616158246994019, "TIME_S_1KI": 38.60421180725098, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1499.0550417494774, "W": 76.47, "J_1KI": 5451.109242725372, "W_1KI": 278.07272727272726, "W_D": 41.14999999999999, "J_D": 806.6707855105399, "W_D_1KI": 149.6363636363636, "J_D_1KI": 544.1322314049586} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output index e414a83..7563719 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.789712905883789} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.8045737743377686} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 551, 1060, ..., 24998953, - 24999494, 25000000]), - col_indices=tensor([ 18, 53, 90, ..., 49926, 49944, 49970]), - values=tensor([0.6546, 0.1735, 0.7966, ..., 0.3203, 0.8871, 0.0598]), +tensor(crow_indices=tensor([ 0, 499, 1030, ..., 24998983, + 24999514, 25000000]), + col_indices=tensor([ 49, 197, 201, ..., 49750, 49850, 49875]), + values=tensor([0.4470, 0.5814, 0.1175, ..., 0.1864, 0.8893, 0.5129]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.4855, 0.9619, 0.0930, ..., 0.6959, 0.6112, 0.3764]) +tensor([0.3234, 0.6125, 0.0098, ..., 0.2894, 0.7048, 0.9675]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 3.789712905883789 seconds +Time: 3.8045737743377686 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '277', '-ss', '50000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.601917266845703} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '275', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.616158246994019} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 523, 1012, ..., 24998990, - 24999509, 25000000]), - col_indices=tensor([ 171, 246, 332, ..., 49640, 49825, 49863]), - values=tensor([0.1620, 0.2511, 0.7784, ..., 0.0916, 0.2856, 0.2435]), +tensor(crow_indices=tensor([ 0, 461, 959, ..., 24999025, + 24999527, 25000000]), + col_indices=tensor([ 62, 115, 156, ..., 49765, 49783, 49784]), + values=tensor([0.8824, 0.1334, 0.7937, ..., 0.1865, 0.7051, 0.7927]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9840, 0.8151, 0.6106, ..., 0.4542, 0.6992, 0.9833]) +tensor([0.8987, 0.8601, 0.5995, ..., 0.4986, 0.6962, 0.5259]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.601917266845703 seconds +Time: 10.616158246994019 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 523, 1012, ..., 24998990, - 24999509, 25000000]), - col_indices=tensor([ 171, 246, 332, ..., 49640, 49825, 49863]), - values=tensor([0.1620, 0.2511, 0.7784, ..., 0.0916, 0.2856, 0.2435]), +tensor(crow_indices=tensor([ 0, 461, 959, ..., 24999025, + 24999527, 25000000]), + col_indices=tensor([ 62, 115, 156, ..., 49765, 49783, 49784]), + values=tensor([0.8824, 0.1334, 0.7937, ..., 0.1865, 0.7051, 0.7927]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9840, 0.8151, 0.6106, ..., 0.4542, 0.6992, 0.9833]) +tensor([0.8987, 0.8601, 0.5995, ..., 0.4986, 0.6962, 0.5259]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.601917266845703 seconds +Time: 10.616158246994019 seconds -[39.75, 38.88, 39.04, 38.94, 38.9, 44.6, 39.87, 39.01, 39.47, 38.87] +[39.75, 39.76, 39.16, 38.84, 39.46, 40.31, 39.13, 39.17, 38.94, 39.02] [76.47] -19.647414922714233 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.601917266845703, 'TIME_S_1KI': 38.27406955539965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.4378191399574, 'W': 76.47} -[39.75, 38.88, 39.04, 38.94, 38.9, 44.6, 39.87, 39.01, 39.47, 38.87, 39.65, 39.46, 38.97, 39.37, 39.36, 38.88, 39.18, 38.94, 39.03, 38.86] -710.465 -35.523250000000004 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.601917266845703, 'TIME_S_1KI': 38.27406955539965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.4378191399574, 'W': 76.47, 'J_1KI': 5423.963245992626, 'W_1KI': 276.0649819494585, 'W_D': 40.946749999999994, 'J_D': 804.497786986649, 'W_D_1KI': 147.82220216606495, 'J_D_1KI': 533.6541594442779} +19.60317826271057 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.616158246994019, 'TIME_S_1KI': 38.60421180725098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1499.0550417494774, 'W': 76.47} +[39.75, 39.76, 39.16, 38.84, 39.46, 40.31, 39.13, 39.17, 38.94, 39.02, 39.62, 38.87, 39.31, 38.86, 39.4, 39.85, 38.89, 38.85, 38.98, 38.85] +706.4000000000001 +35.32000000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 275, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.616158246994019, 'TIME_S_1KI': 38.60421180725098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1499.0550417494774, 'W': 76.47, 'J_1KI': 5451.109242725372, 'W_1KI': 278.07272727272726, 'W_D': 41.14999999999999, 'J_D': 806.6707855105399, 'W_D_1KI': 149.6363636363636, 'J_D_1KI': 544.1322314049586} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..c434506 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 18.401755332946777, "TIME_S_1KI": 184.01755332946777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3785.108492267132, "W": 75.15, "J_1KI": 37851.08492267132, "W_1KI": 751.5, "W_D": 39.52975000000001, "J_D": 1991.009879204214, "W_D_1KI": 395.29750000000007, "J_D_1KI": 3952.975000000001} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..18ebdf4 --- /dev/null +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 18.401755332946777} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2445, 4988, ..., + 124994983, 124997456, 125000000]), + col_indices=tensor([ 8, 12, 48, ..., 49949, 49951, 49988]), + values=tensor([0.9557, 0.9594, 0.4511, ..., 0.5325, 0.7193, 0.5719]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.0493, 0.8291, 0.8551, ..., 0.3701, 0.3910, 0.0950]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 18.401755332946777 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2445, 4988, ..., + 124994983, 124997456, 125000000]), + col_indices=tensor([ 8, 12, 48, ..., 49949, 49951, 49988]), + values=tensor([0.9557, 0.9594, 0.4511, ..., 0.5325, 0.7193, 0.5719]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.0493, 0.8291, 0.8551, ..., 0.3701, 0.3910, 0.0950]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 18.401755332946777 seconds + +[40.13, 39.81, 39.2, 39.05, 39.17, 39.04, 39.25, 39.01, 39.25, 44.34] +[75.15] +50.36737847328186 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 18.401755332946777, 'TIME_S_1KI': 184.01755332946777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3785.108492267132, 'W': 75.15} +[40.13, 39.81, 39.2, 39.05, 39.17, 39.04, 39.25, 39.01, 39.25, 44.34, 41.29, 39.67, 39.65, 39.33, 39.15, 40.25, 39.42, 39.16, 39.57, 39.09] +712.405 +35.62025 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 18.401755332946777, 'TIME_S_1KI': 184.01755332946777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3785.108492267132, 'W': 75.15, 'J_1KI': 37851.08492267132, 'W_1KI': 751.5, 'W_D': 39.52975000000001, 'J_D': 1991.009879204214, 'W_D_1KI': 395.29750000000007, 'J_D_1KI': 3952.975000000001} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json index 41355a8..7be222f 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 35925, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.468754291534424, "TIME_S_1KI": 0.29140582579079816, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.3263425445557, "W": 65.12, "J_1KI": 23.808666459138642, "W_1KI": 1.8126652748782186, "W_D": 29.625000000000007, "J_D": 389.1130666136743, "W_D_1KI": 0.8246346555323593, "J_D_1KI": 0.022954339750378826} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 35826, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.541781902313232, "TIME_S_1KI": 0.2942494808885511, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 853.0015280151367, "W": 65.44, "J_1KI": 23.809566460535272, "W_1KI": 1.8266063752581922, "W_D": 30.0845, "J_D": 392.14737881374356, "W_D_1KI": 0.8397392954837268, "J_D_1KI": 0.023439381887001808} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output index 77e4f27..aa71314 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04216480255126953} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.050371646881103516} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), - col_indices=tensor([ 8605, 8537, 29290, ..., 9179, 13978, 1469]), - values=tensor([0.2780, 0.8342, 0.8502, ..., 0.9082, 0.5496, 0.9536]), + col_indices=tensor([25964, 21843, 7312, ..., 22871, 36058, 42523]), + values=tensor([0.5989, 0.2502, 0.2611, ..., 0.6108, 0.6850, 0.5455]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7685, 0.9033, 0.7153, ..., 0.8654, 0.8274, 0.9503]) +tensor([0.4790, 0.5854, 0.5560, ..., 0.2153, 0.4221, 0.1522]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.04216480255126953 seconds +Time: 0.050371646881103516 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24902', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.278246164321899} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20845', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.109277963638306} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([37031, 32096, 18727, ..., 44552, 41451, 6296]), - values=tensor([0.0751, 0.3287, 0.1662, ..., 0.5788, 0.0483, 0.4147]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), + col_indices=tensor([14323, 781, 26094, ..., 16343, 47701, 45326]), + values=tensor([0.2851, 0.3280, 0.8584, ..., 0.1683, 0.6282, 0.2761]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5341, 0.4675, 0.3327, ..., 0.1193, 0.3106, 0.6128]) +tensor([0.5463, 0.7584, 0.3297, ..., 0.3885, 0.0995, 0.5723]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 7.278246164321899 seconds +Time: 6.109277963638306 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35925', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.468754291534424} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35826', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.541781902313232} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24998, 24998, 25000]), - col_indices=tensor([44391, 10770, 45928, ..., 5594, 4079, 17032]), - values=tensor([0.6882, 0.4791, 0.4331, ..., 0.9470, 0.6037, 0.4941]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([10673, 29891, 46300, ..., 43219, 43038, 35784]), + values=tensor([0.4019, 0.4089, 0.6276, ..., 0.0249, 0.8752, 0.4213]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1889, 0.2559, 0.4696, ..., 0.5278, 0.4768, 0.4458]) +tensor([0.3571, 0.8012, 0.5494, ..., 0.5264, 0.9794, 0.7992]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.468754291534424 seconds +Time: 10.541781902313232 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 24998, 24998, 25000]), - col_indices=tensor([44391, 10770, 45928, ..., 5594, 4079, 17032]), - values=tensor([0.6882, 0.4791, 0.4331, ..., 0.9470, 0.6037, 0.4941]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([10673, 29891, 46300, ..., 43219, 43038, 35784]), + values=tensor([0.4019, 0.4089, 0.6276, ..., 0.0249, 0.8752, 0.4213]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1889, 0.2559, 0.4696, ..., 0.5278, 0.4768, 0.4458]) +tensor([0.3571, 0.8012, 0.5494, ..., 0.5264, 0.9794, 0.7992]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.468754291534424 seconds +Time: 10.541781902313232 seconds -[39.59, 39.87, 39.3, 39.3, 39.78, 38.89, 39.35, 40.52, 39.13, 38.83] -[65.12] -13.134618282318115 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.468754291534424, 'TIME_S_1KI': 0.29140582579079816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3263425445557, 'W': 65.12} -[39.59, 39.87, 39.3, 39.3, 39.78, 38.89, 39.35, 40.52, 39.13, 38.83, 39.59, 39.23, 39.11, 38.91, 39.77, 40.62, 39.49, 39.29, 38.91, 38.85] -709.9 -35.495 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.468754291534424, 'TIME_S_1KI': 0.29140582579079816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3263425445557, 'W': 65.12, 'J_1KI': 23.808666459138642, 'W_1KI': 1.8126652748782186, 'W_D': 29.625000000000007, 'J_D': 389.1130666136743, 'W_D_1KI': 0.8246346555323593, 'J_D_1KI': 0.022954339750378826} +[40.53, 39.16, 39.31, 39.02, 39.02, 38.84, 39.02, 39.47, 39.01, 39.03] +[65.44] +13.03486442565918 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35826, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.541781902313232, 'TIME_S_1KI': 0.2942494808885511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.0015280151367, 'W': 65.44} +[40.53, 39.16, 39.31, 39.02, 39.02, 38.84, 39.02, 39.47, 39.01, 39.03, 40.83, 39.59, 39.16, 39.59, 39.02, 38.83, 38.9, 40.19, 39.35, 38.87] +707.11 +35.3555 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35826, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.541781902313232, 'TIME_S_1KI': 0.2942494808885511, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.0015280151367, 'W': 65.44, 'J_1KI': 23.809566460535272, 'W_1KI': 1.8266063752581922, 'W_D': 30.0845, 'J_D': 392.14737881374356, 'W_D_1KI': 0.8397392954837268, 'J_D_1KI': 0.023439381887001808} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json index 54cfc66..caaa3d4 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 18331, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45155644416809, "TIME_S_1KI": 0.570157462449844, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 852.8894222688675, "W": 65.62, "J_1KI": 46.52716285357414, "W_1KI": 3.5797283290600626, "W_D": 29.885000000000005, "J_D": 388.42731460690504, "W_D_1KI": 1.6302984016147513, "J_D_1KI": 0.08893668657545967} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 18383, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.474826574325562, "TIME_S_1KI": 0.569810508313418, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.8708778381347, "W": 65.6, "J_1KI": 46.612135007242266, "W_1KI": 3.568514388293532, "W_D": 30.095249999999993, "J_D": 393.105842778325, "W_D_1KI": 1.6371239732361418, "J_D_1KI": 0.0890564093584367} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output index bef46d2..fb843e8 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0693967342376709} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.06945943832397461} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 5, ..., 124992, 124996, +tensor(crow_indices=tensor([ 0, 0, 4, ..., 124995, 124998, 125000]), - col_indices=tensor([17438, 29688, 13553, ..., 36532, 44163, 44855]), - values=tensor([0.2242, 0.0224, 0.1461, ..., 0.6740, 0.5355, 0.8238]), + col_indices=tensor([ 8346, 9814, 28925, ..., 45142, 46623, 48893]), + values=tensor([0.3255, 0.7993, 0.2066, ..., 0.8340, 0.0401, 0.6472]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.2153, 0.3458, 0.3359, ..., 0.7412, 0.9011, 0.6249]) +tensor([0.4856, 0.9773, 0.6116, ..., 0.9065, 0.3370, 0.6933]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.0693967342376709 seconds +Time: 0.06945943832397461 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15130', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.666174411773682} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15116', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.63381552696228} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 124994, 124997, +tensor(crow_indices=tensor([ 0, 3, 6, ..., 124994, 124997, 125000]), - col_indices=tensor([17329, 36001, 38373, ..., 475, 21379, 35295]), - values=tensor([0.0803, 0.2135, 0.1853, ..., 0.6523, 0.5299, 0.1396]), + col_indices=tensor([ 9498, 22845, 30286, ..., 10404, 16253, 48049]), + values=tensor([0.7056, 0.4273, 0.3718, ..., 0.2477, 0.7014, 0.4740]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.4435, 0.4362, 0.5554, ..., 0.5263, 0.8506, 0.5178]) +tensor([0.1815, 0.9730, 0.6971, ..., 0.9915, 0.0180, 0.6275]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 8.666174411773682 seconds +Time: 8.63381552696228 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18331', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45155644416809} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18383', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.474826574325562} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 124997, 124997, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124991, 124998, 125000]), - col_indices=tensor([14185, 16264, 7088, ..., 33383, 46641, 46645]), - values=tensor([0.3059, 0.0880, 0.9320, ..., 0.7602, 0.8512, 0.4645]), + col_indices=tensor([20459, 34680, 37330, ..., 46670, 18109, 44848]), + values=tensor([0.3676, 0.3673, 0.7663, ..., 0.4241, 0.5790, 0.3050]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5025, 0.7968, 0.2806, ..., 0.1024, 0.8091, 0.6972]) +tensor([0.5597, 0.5275, 0.6777, ..., 0.9335, 0.3355, 0.0117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.45155644416809 seconds +Time: 10.474826574325562 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 124997, 124997, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124991, 124998, 125000]), - col_indices=tensor([14185, 16264, 7088, ..., 33383, 46641, 46645]), - values=tensor([0.3059, 0.0880, 0.9320, ..., 0.7602, 0.8512, 0.4645]), + col_indices=tensor([20459, 34680, 37330, ..., 46670, 18109, 44848]), + values=tensor([0.3676, 0.3673, 0.7663, ..., 0.4241, 0.5790, 0.3050]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5025, 0.7968, 0.2806, ..., 0.1024, 0.8091, 0.6972]) +tensor([0.5597, 0.5275, 0.6777, ..., 0.9335, 0.3355, 0.0117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.45155644416809 seconds +Time: 10.474826574325562 seconds -[39.86, 38.92, 44.85, 39.01, 39.6, 39.39, 38.97, 39.8, 41.08, 39.48] -[65.62] -12.997400522232056 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18331, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45155644416809, 'TIME_S_1KI': 0.570157462449844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.8894222688675, 'W': 65.62} -[39.86, 38.92, 44.85, 39.01, 39.6, 39.39, 38.97, 39.8, 41.08, 39.48, 40.67, 39.02, 39.09, 39.05, 39.17, 38.87, 39.48, 39.3, 39.44, 39.31] -714.7 -35.735 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18331, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45155644416809, 'TIME_S_1KI': 0.570157462449844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.8894222688675, 'W': 65.62, 'J_1KI': 46.52716285357414, 'W_1KI': 3.5797283290600626, 'W_D': 29.885000000000005, 'J_D': 388.42731460690504, 'W_D_1KI': 1.6302984016147513, 'J_D_1KI': 0.08893668657545967} +[40.75, 38.89, 41.81, 39.09, 39.01, 39.1, 39.32, 39.89, 39.11, 38.97] +[65.6] +13.062056064605713 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18383, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.474826574325562, 'TIME_S_1KI': 0.569810508313418, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.8708778381347, 'W': 65.6} +[40.75, 38.89, 41.81, 39.09, 39.01, 39.1, 39.32, 39.89, 39.11, 38.97, 39.62, 38.92, 38.94, 39.58, 39.13, 39.33, 39.44, 39.74, 39.35, 39.55] +710.095 +35.50475 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18383, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.474826574325562, 'TIME_S_1KI': 0.569810508313418, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.8708778381347, 'W': 65.6, 'J_1KI': 46.612135007242266, 'W_1KI': 3.568514388293532, 'W_D': 30.095249999999993, 'J_D': 393.105842778325, 'W_D_1KI': 1.6371239732361418, 'J_D_1KI': 0.0890564093584367} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json index 2d402b4..e88993f 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 461205, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.546576023101807, "TIME_S_1KI": 0.022867436439548153, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 854.1005449390411, "W": 65.22, "J_1KI": 1.8518891706270337, "W_1KI": 0.1414121702930367, "W_D": 29.99125, "J_D": 392.75594861090184, "W_D_1KI": 0.065028024414306, "J_D_1KI": 0.0001409959224516343} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 466510, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.420932292938232, "TIME_S_1KI": 0.022338068407833127, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 867.0800234699249, "W": 65.24, "J_1KI": 1.8586525979505797, "W_1KI": 0.13984694861846475, "W_D": 30.02799999999999, "J_D": 399.09072570133196, "W_D_1KI": 0.06436732331568452, "J_D_1KI": 0.00013797629914832378} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output index a788520..9e46e33 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0131683349609375} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.013342857360839844} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), - col_indices=tensor([1743, 190, 2771, ..., 2075, 3388, 3957]), - values=tensor([0.1814, 0.3494, 0.7591, ..., 0.1503, 0.3935, 0.5274]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([ 467, 3498, 4566, ..., 3900, 759, 3936]), + values=tensor([0.2546, 0.8323, 0.6213, ..., 0.2937, 0.3931, 0.2918]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.3288, 0.8511, 0.6239, ..., 0.9211, 0.6649, 0.5940]) +tensor([0.8048, 0.7031, 0.1236, ..., 0.7176, 0.5601, 0.1346]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.0131683349609375 seconds +Time: 0.013342857360839844 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '79736', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8153021335601807} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '78693', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.771183967590332} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), - col_indices=tensor([3060, 1065, 3686, ..., 959, 268, 4999]), - values=tensor([0.4441, 0.5663, 0.4237, ..., 0.7927, 0.1815, 0.5098]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([4956, 1185, 2120, ..., 4420, 3904, 4456]), + values=tensor([0.8759, 0.0506, 0.8723, ..., 0.0545, 0.1125, 0.7584]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.5827, 0.3654, 0.8856, ..., 0.0793, 0.3634, 0.4632]) +tensor([0.0884, 0.5653, 0.4168, ..., 0.9870, 0.2343, 0.4597]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 1.8153021335601807 seconds +Time: 1.771183967590332 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '461205', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.546576023101807} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '466510', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.420932292938232} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), - col_indices=tensor([ 400, 983, 3289, ..., 2520, 735, 710]), - values=tensor([0.5782, 0.5847, 0.2189, ..., 0.6822, 0.4901, 0.5172]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([1055, 721, 2056, ..., 172, 3588, 4261]), + values=tensor([0.6923, 0.0689, 0.1686, ..., 0.9272, 0.3017, 0.9913]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8041, 0.1477, 0.3593, ..., 0.2372, 0.8803, 0.6540]) +tensor([0.3960, 0.9874, 0.3648, ..., 0.2104, 0.2644, 0.1773]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.546576023101807 seconds +Time: 10.420932292938232 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), - col_indices=tensor([ 400, 983, 3289, ..., 2520, 735, 710]), - values=tensor([0.5782, 0.5847, 0.2189, ..., 0.6822, 0.4901, 0.5172]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([1055, 721, 2056, ..., 172, 3588, 4261]), + values=tensor([0.6923, 0.0689, 0.1686, ..., 0.9272, 0.3017, 0.9913]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.8041, 0.1477, 0.3593, ..., 0.2372, 0.8803, 0.6540]) +tensor([0.3960, 0.9874, 0.3648, ..., 0.2104, 0.2644, 0.1773]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.546576023101807 seconds +Time: 10.420932292938232 seconds -[42.91, 38.91, 39.2, 38.78, 38.91, 38.88, 39.21, 39.32, 39.19, 38.86] -[65.22] -13.09568452835083 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 461205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.546576023101807, 'TIME_S_1KI': 0.022867436439548153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.1005449390411, 'W': 65.22} -[42.91, 38.91, 39.2, 38.78, 38.91, 38.88, 39.21, 39.32, 39.19, 38.86, 39.94, 38.72, 39.13, 38.84, 39.27, 38.74, 39.16, 38.84, 39.21, 38.82] -704.5749999999999 -35.22875 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 461205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.546576023101807, 'TIME_S_1KI': 0.022867436439548153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.1005449390411, 'W': 65.22, 'J_1KI': 1.8518891706270337, 'W_1KI': 0.1414121702930367, 'W_D': 29.99125, 'J_D': 392.75594861090184, 'W_D_1KI': 0.065028024414306, 'J_D_1KI': 0.0001409959224516343} +[40.48, 39.11, 41.11, 39.09, 39.06, 38.72, 38.7, 38.64, 39.16, 39.15] +[65.24] +13.290619611740112 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 466510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.420932292938232, 'TIME_S_1KI': 0.022338068407833127, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 867.0800234699249, 'W': 65.24} +[40.48, 39.11, 41.11, 39.09, 39.06, 38.72, 38.7, 38.64, 39.16, 39.15, 40.21, 39.12, 38.73, 38.98, 38.96, 38.81, 38.73, 38.84, 39.18, 38.76] +704.24 +35.212 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 466510, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.420932292938232, 'TIME_S_1KI': 0.022338068407833127, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 867.0800234699249, 'W': 65.24, 'J_1KI': 1.8586525979505797, 'W_1KI': 0.13984694861846475, 'W_D': 30.02799999999999, 'J_D': 399.09072570133196, 'W_D_1KI': 0.06436732331568452, 'J_D_1KI': 0.00013797629914832378} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json index 89f214d..22fb7e0 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 244335, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.417778253555298, "TIME_S_1KI": 0.04263727363478543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.0210603523253, "W": 66.32, "J_1KI": 3.4993801966657467, "W_1KI": 0.27143061779933286, "W_D": 30.753, "J_D": 396.4786289055347, "W_D_1KI": 0.1258640800540242, "J_D_1KI": 0.0005151291466798624} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 246954, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.452702045440674, "TIME_S_1KI": 0.04232651443362194, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 872.7546772527694, "W": 66.74, "J_1KI": 3.534077914319142, "W_1KI": 0.2702527596232497, "W_D": 31.230999999999995, "J_D": 408.4057735283374, "W_D_1KI": 0.12646484770443076, "J_D_1KI": 0.0005120988026289542} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output index b5c1026..4a43c23 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.01594710350036621} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.016002416610717773} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 12, ..., 24992, 24999, 25000]), - col_indices=tensor([ 675, 1985, 2689, ..., 3047, 3313, 3022]), - values=tensor([0.4390, 0.7079, 0.1777, ..., 0.9145, 0.2520, 0.4929]), +tensor(crow_indices=tensor([ 0, 5, 11, ..., 24990, 24996, 25000]), + col_indices=tensor([ 911, 1463, 3145, ..., 1571, 1933, 2090]), + values=tensor([0.0181, 0.5592, 0.9261, ..., 0.7648, 0.3954, 0.5475]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8163, 0.8334, 0.7201, ..., 0.8897, 0.9022, 0.1765]) +tensor([0.4688, 0.8015, 0.1374, ..., 0.6427, 0.0862, 0.7007]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.01594710350036621 seconds +Time: 0.016002416610717773 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65842', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.8294758796691895} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65615', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.7898170948028564} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 9, ..., 24993, 24997, 25000]), - col_indices=tensor([ 33, 1908, 3594, ..., 299, 386, 4209]), - values=tensor([0.2193, 0.5619, 0.9883, ..., 0.1847, 0.2793, 0.9697]), +tensor(crow_indices=tensor([ 0, 6, 8, ..., 24994, 24996, 25000]), + col_indices=tensor([ 939, 1014, 1959, ..., 2032, 2340, 3396]), + values=tensor([0.4320, 0.5074, 0.9233, ..., 0.9013, 0.0461, 0.5547]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6444, 0.1854, 0.4415, ..., 0.6088, 0.2901, 0.0299]) +tensor([0.8806, 0.5054, 0.0674, ..., 0.2584, 0.6926, 0.9912]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 2.8294758796691895 seconds +Time: 2.7898170948028564 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '244335', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.417778253555298} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '246954', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.452702045440674} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 8, ..., 24990, 24997, 25000]), - col_indices=tensor([2557, 3235, 4228, ..., 486, 3364, 4712]), - values=tensor([0.2079, 0.9240, 0.7430, ..., 0.9071, 0.8940, 0.1396]), +tensor(crow_indices=tensor([ 0, 9, 13, ..., 24993, 24995, 25000]), + col_indices=tensor([ 74, 935, 1178, ..., 2445, 4655, 4967]), + values=tensor([0.5811, 0.4060, 0.5355, ..., 0.4068, 0.4273, 0.6206]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3103, 0.6909, 0.1567, ..., 0.3064, 0.4398, 0.1480]) +tensor([0.7726, 0.0975, 0.1934, ..., 0.7258, 0.0094, 0.2425]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.417778253555298 seconds +Time: 10.452702045440674 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 8, ..., 24990, 24997, 25000]), - col_indices=tensor([2557, 3235, 4228, ..., 486, 3364, 4712]), - values=tensor([0.2079, 0.9240, 0.7430, ..., 0.9071, 0.8940, 0.1396]), +tensor(crow_indices=tensor([ 0, 9, 13, ..., 24993, 24995, 25000]), + col_indices=tensor([ 74, 935, 1178, ..., 2445, 4655, 4967]), + values=tensor([0.5811, 0.4060, 0.5355, ..., 0.4068, 0.4273, 0.6206]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3103, 0.6909, 0.1567, ..., 0.3064, 0.4398, 0.1480]) +tensor([0.7726, 0.0975, 0.1934, ..., 0.7258, 0.0094, 0.2425]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.417778253555298 seconds +Time: 10.452702045440674 seconds -[45.51, 38.78, 39.51, 38.99, 39.48, 39.2, 39.32, 39.33, 39.41, 38.75] -[66.32] -12.892356157302856 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 244335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.417778253555298, 'TIME_S_1KI': 0.04263727363478543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.0210603523253, 'W': 66.32} -[45.51, 38.78, 39.51, 38.99, 39.48, 39.2, 39.32, 39.33, 39.41, 38.75, 39.41, 38.8, 39.14, 39.28, 40.7, 39.8, 39.35, 39.07, 39.32, 40.05] -711.3399999999999 -35.56699999999999 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 244335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.417778253555298, 'TIME_S_1KI': 0.04263727363478543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.0210603523253, 'W': 66.32, 'J_1KI': 3.4993801966657467, 'W_1KI': 0.27143061779933286, 'W_D': 30.753, 'J_D': 396.4786289055347, 'W_D_1KI': 0.1258640800540242, 'J_D_1KI': 0.0005151291466798624} +[39.42, 38.72, 38.82, 38.89, 39.34, 39.2, 40.59, 38.72, 39.01, 38.68] +[66.74] +13.076935529708862 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 246954, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.452702045440674, 'TIME_S_1KI': 0.04232651443362194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.7546772527694, 'W': 66.74} +[39.42, 38.72, 38.82, 38.89, 39.34, 39.2, 40.59, 38.72, 39.01, 38.68, 39.74, 40.05, 39.22, 38.96, 38.83, 38.91, 39.32, 38.82, 44.18, 39.36] +710.1800000000001 +35.509 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 246954, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.452702045440674, 'TIME_S_1KI': 0.04232651443362194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.7546772527694, 'W': 66.74, 'J_1KI': 3.534077914319142, 'W_1KI': 0.2702527596232497, 'W_D': 31.230999999999995, 'J_D': 408.4057735283374, 'W_D_1KI': 0.12646484770443076, 'J_D_1KI': 0.0005120988026289542} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json index d1e9eee..e3c6133 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 41575, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.472304821014404, "TIME_S_1KI": 0.2518894725439424, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 875.4065453624726, "W": 66.47, "J_1KI": 21.056080465723934, "W_1KI": 1.5987973541791942, "W_D": 30.839750000000002, "J_D": 406.1579510657788, "W_D_1KI": 0.7417859290438966, "J_D_1KI": 0.017842114949943394} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 41474, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.47754454612732, "TIME_S_1KI": 0.2526292266510903, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 870.6883107089997, "W": 66.37, "J_1KI": 20.99359383490861, "W_1KI": 1.600279693301828, "W_D": 30.931000000000004, "J_D": 405.7745990438462, "W_D_1KI": 0.7457925447268169, "J_D_1KI": 0.017982170630438755} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output index 9d6f728..b039ea4 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03741025924682617} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03760981559753418} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 50, 95, ..., 249917, 249960, +tensor(crow_indices=tensor([ 0, 44, 97, ..., 249905, 249953, 250000]), - col_indices=tensor([ 56, 131, 133, ..., 4645, 4665, 4841]), - values=tensor([0.2594, 0.3669, 0.3309, ..., 0.9204, 0.7750, 0.3008]), + col_indices=tensor([ 104, 120, 133, ..., 4848, 4973, 4994]), + values=tensor([0.9158, 0.4016, 0.6409, ..., 0.0985, 0.9307, 0.4768]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7286, 0.8651, 0.2961, ..., 0.7120, 0.4132, 0.7079]) +tensor([0.3635, 0.2613, 0.6667, ..., 0.7993, 0.8141, 0.7335]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.03741025924682617 seconds +Time: 0.03760981559753418 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28067', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.088342666625977} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27918', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.06800651550293} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 51, 92, ..., 249888, 249951, +tensor(crow_indices=tensor([ 0, 45, 97, ..., 249917, 249970, 250000]), - col_indices=tensor([ 195, 232, 275, ..., 4637, 4801, 4910]), - values=tensor([0.9933, 0.3255, 0.9817, ..., 0.2679, 0.2640, 0.0554]), + col_indices=tensor([ 188, 618, 867, ..., 4726, 4860, 4863]), + values=tensor([0.1821, 0.6463, 0.0831, ..., 0.8462, 0.9621, 0.9280]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2297, 0.2173, 0.6945, ..., 0.5761, 0.5521, 0.3650]) +tensor([0.3413, 0.0178, 0.3377, ..., 0.2404, 0.9085, 0.3597]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 7.088342666625977 seconds +Time: 7.06800651550293 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '41575', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.472304821014404} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '41474', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.47754454612732} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 88, ..., 249902, 249950, +tensor(crow_indices=tensor([ 0, 49, 96, ..., 249910, 249953, 250000]), - col_indices=tensor([ 75, 83, 302, ..., 4746, 4941, 4952]), - values=tensor([0.9930, 0.3893, 0.6584, ..., 0.0382, 0.8338, 0.2904]), + col_indices=tensor([ 53, 94, 136, ..., 4853, 4896, 4944]), + values=tensor([0.5427, 0.3847, 0.9677, ..., 0.3094, 0.3612, 0.4453]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5807, 0.7893, 0.2250, ..., 0.2178, 0.8594, 0.2155]) +tensor([0.3280, 0.3953, 0.4391, ..., 0.5285, 0.6882, 0.9283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.472304821014404 seconds +Time: 10.47754454612732 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 88, ..., 249902, 249950, +tensor(crow_indices=tensor([ 0, 49, 96, ..., 249910, 249953, 250000]), - col_indices=tensor([ 75, 83, 302, ..., 4746, 4941, 4952]), - values=tensor([0.9930, 0.3893, 0.6584, ..., 0.0382, 0.8338, 0.2904]), + col_indices=tensor([ 53, 94, 136, ..., 4853, 4896, 4944]), + values=tensor([0.5427, 0.3847, 0.9677, ..., 0.3094, 0.3612, 0.4453]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5807, 0.7893, 0.2250, ..., 0.2178, 0.8594, 0.2155]) +tensor([0.3280, 0.3953, 0.4391, ..., 0.5285, 0.6882, 0.9283]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.472304821014404 seconds +Time: 10.47754454612732 seconds -[39.83, 38.85, 39.09, 38.79, 39.53, 38.8, 39.37, 39.26, 39.29, 38.81] -[66.47] -13.169949531555176 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41575, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.472304821014404, 'TIME_S_1KI': 0.2518894725439424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 875.4065453624726, 'W': 66.47} -[39.83, 38.85, 39.09, 38.79, 39.53, 38.8, 39.37, 39.26, 39.29, 38.81, 39.98, 38.99, 38.83, 39.07, 38.9, 38.8, 38.86, 39.29, 47.91, 39.33] -712.6049999999999 -35.63025 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41575, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.472304821014404, 'TIME_S_1KI': 0.2518894725439424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 875.4065453624726, 'W': 66.47, 'J_1KI': 21.056080465723934, 'W_1KI': 1.5987973541791942, 'W_D': 30.839750000000002, 'J_D': 406.1579510657788, 'W_D_1KI': 0.7417859290438966, 'J_D_1KI': 0.017842114949943394} +[40.24, 38.82, 38.94, 38.82, 40.71, 39.34, 39.06, 40.43, 39.15, 39.41] +[66.37] +13.11870288848877 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41474, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.47754454612732, 'TIME_S_1KI': 0.2526292266510903, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 870.6883107089997, 'W': 66.37} +[40.24, 38.82, 38.94, 38.82, 40.71, 39.34, 39.06, 40.43, 39.15, 39.41, 39.5, 39.09, 38.93, 39.1, 40.54, 38.88, 39.1, 39.52, 39.14, 39.27] +708.78 +35.439 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41474, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.47754454612732, 'TIME_S_1KI': 0.2526292266510903, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 870.6883107089997, 'W': 66.37, 'J_1KI': 20.99359383490861, 'W_1KI': 1.600279693301828, 'W_D': 30.931000000000004, 'J_D': 405.7745990438462, 'W_D_1KI': 0.7457925447268169, 'J_D_1KI': 0.017982170630438755} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json index e407283..3843606 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 8178, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511467218399048, "TIME_S_1KI": 1.2853347051111579, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 880.7755374526977, "W": 66.16, "J_1KI": 107.70060374818999, "W_1KI": 8.089997554414282, "W_D": 30.69249999999999, "J_D": 408.60343384623513, "W_D_1KI": 3.7530569821472226, "J_D_1KI": 0.4589211276775767} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 8146, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.451539278030396, "TIME_S_1KI": 1.283027164010606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 878.7411267375946, "W": 66.37, "J_1KI": 107.87394141144054, "W_1KI": 8.147557083231035, "W_D": 30.001250000000006, "J_D": 397.2176017558576, "W_D_1KI": 3.682942548490057, "J_D_1KI": 0.45211668898724} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output index b9e58d8..5956044 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,54 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.14169001579284668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.14194607734680176} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 246, 493, ..., 1249505, + 1249740, 1250000]), + col_indices=tensor([ 15, 19, 107, ..., 4903, 4917, 4933]), + values=tensor([0.8230, 0.2997, 0.5773, ..., 0.5953, 0.5313, 0.7363]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.8375, 0.6095, 0.7115, ..., 0.7001, 0.7437, 0.1414]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.14194607734680176 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7397', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.53364109992981} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 264, 530, ..., 1249523, + 1249761, 1250000]), + col_indices=tensor([ 49, 51, 79, ..., 4928, 4960, 4997]), + values=tensor([0.7644, 0.4849, 0.3773, ..., 0.1775, 0.5516, 0.1427]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6007, 0.0753, 0.6199, ..., 0.2126, 0.6661, 0.0074]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.53364109992981 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '8146', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.451539278030396} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 264, 521, ..., 1249500, - 1249758, 1250000]), - col_indices=tensor([ 22, 35, 54, ..., 4954, 4963, 4982]), - values=tensor([0.3715, 0.6699, 0.1465, ..., 0.9132, 0.2376, 0.1878]), + 1249752, 1250000]), + col_indices=tensor([ 43, 45, 102, ..., 4936, 4979, 4982]), + values=tensor([0.0537, 0.1465, 0.6339, ..., 0.3428, 0.4774, 0.7151]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.3853, 0.7833, 0.5244, ..., 0.5756, 0.3818, 0.8103]) +tensor([0.1318, 0.3066, 0.8294, ..., 0.3003, 0.7098, 0.9249]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.14169001579284668 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7410', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.513462543487549} +Time: 10.451539278030396 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 245, 477, ..., 1249530, - 1249759, 1250000]), - col_indices=tensor([ 15, 78, 164, ..., 4968, 4978, 4992]), - values=tensor([0.5947, 0.7215, 0.2123, ..., 0.7493, 0.6227, 0.0355]), +tensor(crow_indices=tensor([ 0, 264, 521, ..., 1249500, + 1249752, 1250000]), + col_indices=tensor([ 43, 45, 102, ..., 4936, 4979, 4982]), + values=tensor([0.0537, 0.1465, 0.6339, ..., 0.3428, 0.4774, 0.7151]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4245, 0.9857, 0.9704, ..., 0.1643, 0.0459, 0.0545]) +tensor([0.1318, 0.3066, 0.8294, ..., 0.3003, 0.7098, 0.9249]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,50 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 9.513462543487549 seconds +Time: 10.451539278030396 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '8178', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511467218399048} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 243, 531, ..., 1249480, - 1249728, 1250000]), - col_indices=tensor([ 10, 29, 31, ..., 4961, 4980, 4981]), - values=tensor([0.2878, 0.6436, 0.4714, ..., 0.0074, 0.1096, 0.3758]), - size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4226, 0.4894, 0.7354, ..., 0.5546, 0.7888, 0.4627]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250000 -Density: 0.05 -Time: 10.511467218399048 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 243, 531, ..., 1249480, - 1249728, 1250000]), - col_indices=tensor([ 10, 29, 31, ..., 4961, 4980, 4981]), - values=tensor([0.2878, 0.6436, 0.4714, ..., 0.0074, 0.1096, 0.3758]), - size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.4226, 0.4894, 0.7354, ..., 0.5546, 0.7888, 0.4627]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250000 -Density: 0.05 -Time: 10.511467218399048 seconds - -[40.21, 39.14, 39.56, 39.02, 39.43, 41.29, 39.49, 39.07, 39.13, 39.19] -[66.16] -13.31281042098999 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511467218399048, 'TIME_S_1KI': 1.2853347051111579, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 880.7755374526977, 'W': 66.16} -[40.21, 39.14, 39.56, 39.02, 39.43, 41.29, 39.49, 39.07, 39.13, 39.19, 40.14, 39.04, 40.64, 38.87, 39.1, 38.93, 39.37, 39.03, 38.8, 39.34] -709.3500000000001 -35.46750000000001 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511467218399048, 'TIME_S_1KI': 1.2853347051111579, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 880.7755374526977, 'W': 66.16, 'J_1KI': 107.70060374818999, 'W_1KI': 8.089997554414282, 'W_D': 30.69249999999999, 'J_D': 408.60343384623513, 'W_D_1KI': 3.7530569821472226, 'J_D_1KI': 0.4589211276775767} +[54.34, 40.8, 39.08, 38.82, 38.81, 39.44, 40.48, 39.19, 39.4, 39.2] +[66.37] +13.240035057067871 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8146, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.451539278030396, 'TIME_S_1KI': 1.283027164010606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 878.7411267375946, 'W': 66.37} +[54.34, 40.8, 39.08, 38.82, 38.81, 39.44, 40.48, 39.19, 39.4, 39.2, 39.48, 38.91, 38.85, 39.0, 38.87, 39.13, 38.77, 39.03, 39.22, 66.13] +727.375 +36.36875 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8146, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.451539278030396, 'TIME_S_1KI': 1.283027164010606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 878.7411267375946, 'W': 66.37, 'J_1KI': 107.87394141144054, 'W_1KI': 8.147557083231035, 'W_D': 30.001250000000006, 'J_D': 397.2176017558576, 'W_D_1KI': 3.682942548490057, 'J_D_1KI': 0.45211668898724} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json index cc784c7..d4894d2 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3463, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.092161417007446, "TIME_S_1KI": 2.9142828232767677, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 888.6800115585327, "W": 67.72, "J_1KI": 256.6214298465298, "W_1KI": 19.555298873808837, "W_D": 32.353, "J_D": 424.56385726451873, "W_D_1KI": 9.34247762056021, "J_D_1KI": 2.697798908622642} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3523, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.174333810806274, "TIME_S_1KI": 2.887974399888241, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 994.8108104133606, "W": 75.11, "J_1KI": 282.37604610086873, "W_1KI": 21.319897814362758, "W_D": 26.86425, "J_D": 355.8094303507805, "W_D_1KI": 7.625390292364462, "J_D_1KI": 2.1644593506569576} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output index 8f92a77..203e1f8 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.30318737030029297} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.2979764938354492} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 515, 1018, ..., 2498999, - 2499493, 2500000]), - col_indices=tensor([ 4, 21, 24, ..., 4955, 4957, 4967]), - values=tensor([0.5325, 0.9134, 0.0336, ..., 0.9679, 0.3618, 0.7033]), +tensor(crow_indices=tensor([ 0, 469, 956, ..., 2498994, + 2499509, 2500000]), + col_indices=tensor([ 5, 10, 14, ..., 4977, 4982, 4995]), + values=tensor([0.8447, 0.4119, 0.6745, ..., 0.6408, 0.0559, 0.2121]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8211, 0.4248, 0.8998, ..., 0.9656, 0.3613, 0.2992]) +tensor([0.8219, 0.5051, 0.6624, ..., 0.8040, 0.6012, 0.0437]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.30318737030029297 seconds +Time: 0.2979764938354492 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3463', '-ss', '5000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.092161417007446} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3523', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.174333810806274} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 498, 1000, ..., 2499016, - 2499523, 2500000]), - col_indices=tensor([ 2, 43, 50, ..., 4978, 4986, 4997]), - values=tensor([0.9352, 0.1649, 0.8628, ..., 0.6965, 0.7003, 0.5455]), +tensor(crow_indices=tensor([ 0, 464, 974, ..., 2499027, + 2499519, 2500000]), + col_indices=tensor([ 10, 16, 20, ..., 4979, 4983, 4992]), + values=tensor([0.3603, 0.6762, 0.3613, ..., 0.2093, 0.6480, 0.9505]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0499, 0.6757, 0.4614, ..., 0.0542, 0.7848, 0.0169]) +tensor([0.7173, 0.5165, 0.9096, ..., 0.8929, 0.9385, 0.9563]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.092161417007446 seconds +Time: 10.174333810806274 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 498, 1000, ..., 2499016, - 2499523, 2500000]), - col_indices=tensor([ 2, 43, 50, ..., 4978, 4986, 4997]), - values=tensor([0.9352, 0.1649, 0.8628, ..., 0.6965, 0.7003, 0.5455]), +tensor(crow_indices=tensor([ 0, 464, 974, ..., 2499027, + 2499519, 2500000]), + col_indices=tensor([ 10, 16, 20, ..., 4979, 4983, 4992]), + values=tensor([0.3603, 0.6762, 0.3613, ..., 0.2093, 0.6480, 0.9505]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0499, 0.6757, 0.4614, ..., 0.0542, 0.7848, 0.0169]) +tensor([0.7173, 0.5165, 0.9096, ..., 0.8929, 0.9385, 0.9563]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.092161417007446 seconds +Time: 10.174333810806274 seconds -[40.21, 38.8, 38.93, 38.71, 39.03, 38.9, 38.74, 39.19, 39.25, 38.74] -[67.72] -13.122859001159668 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3463, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.092161417007446, 'TIME_S_1KI': 2.9142828232767677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.6800115585327, 'W': 67.72} -[40.21, 38.8, 38.93, 38.71, 39.03, 38.9, 38.74, 39.19, 39.25, 38.74, 40.21, 38.79, 39.15, 38.78, 38.85, 38.73, 44.44, 39.11, 38.88, 38.96] -707.3399999999999 -35.367 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3463, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.092161417007446, 'TIME_S_1KI': 2.9142828232767677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.6800115585327, 'W': 67.72, 'J_1KI': 256.6214298465298, 'W_1KI': 19.555298873808837, 'W_D': 32.353, 'J_D': 424.56385726451873, 'W_D_1KI': 9.34247762056021, 'J_D_1KI': 2.697798908622642} +[64.94, 67.66, 66.1, 67.43, 68.81, 68.75, 66.05, 68.95, 70.43, 66.03] +[75.11] +13.244718551635742 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3523, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.174333810806274, 'TIME_S_1KI': 2.887974399888241, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 994.8108104133606, 'W': 75.11} +[64.94, 67.66, 66.1, 67.43, 68.81, 68.75, 66.05, 68.95, 70.43, 66.03, 40.1, 39.77, 40.59, 38.97, 39.63, 39.07, 38.99, 39.02, 39.56, 39.2] +964.915 +48.24575 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3523, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.174333810806274, 'TIME_S_1KI': 2.887974399888241, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 994.8108104133606, 'W': 75.11, 'J_1KI': 282.37604610086873, 'W_1KI': 21.319897814362758, 'W_D': 26.86425, 'J_D': 355.8094303507805, 'W_D_1KI': 7.625390292364462, 'J_D_1KI': 2.1644593506569576} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json index b65b863..33229e8 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1741, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.298704862594604, "TIME_S_1KI": 5.915396245028492, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1027.8759766101837, "W": 74.46, "J_1KI": 590.3940129868946, "W_1KI": 42.76852383687535, "W_D": 39.113499999999995, "J_D": 539.9385846245289, "W_D_1KI": 22.466111430212518, "J_D_1KI": 12.904142119593635} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1742, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.279394626617432, "TIME_S_1KI": 5.900915399895196, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1034.989099354744, "W": 74.74, "J_1KI": 594.1384037627693, "W_1KI": 42.904707233065444, "W_D": 38.945249999999994, "J_D": 539.3083920477031, "W_D_1KI": 22.356630309988518, "J_D_1KI": 12.833886515492836} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output index 987a685..57a2980 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.6030943393707275} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.6026511192321777} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1017, 2015, ..., 4997969, - 4998981, 5000000]), - col_indices=tensor([ 0, 31, 34, ..., 4984, 4989, 4995]), - values=tensor([0.9331, 0.7841, 0.5601, ..., 0.4749, 0.8668, 0.1575]), +tensor(crow_indices=tensor([ 0, 1041, 1982, ..., 4998017, + 4999003, 5000000]), + col_indices=tensor([ 5, 7, 9, ..., 4978, 4988, 4995]), + values=tensor([0.5465, 0.8152, 0.9813, ..., 0.3845, 0.7573, 0.1959]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.1492, 0.3030, 0.4894, ..., 0.2440, 0.4033, 0.0238]) +tensor([0.4559, 0.3153, 0.5904, ..., 0.7273, 0.1313, 0.8409]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 0.6030943393707275 seconds +Time: 0.6026511192321777 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1741', '-ss', '5000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.298704862594604} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1742', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.279394626617432} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 961, 1935, ..., 4998003, - 4999013, 5000000]), - col_indices=tensor([ 0, 3, 5, ..., 4968, 4970, 4989]), - values=tensor([0.3710, 0.5603, 0.2176, ..., 0.5072, 0.2694, 0.8987]), +tensor(crow_indices=tensor([ 0, 1007, 2030, ..., 4998041, + 4999049, 5000000]), + col_indices=tensor([ 0, 1, 4, ..., 4982, 4988, 4994]), + values=tensor([0.5108, 0.8042, 0.8113, ..., 0.2803, 0.6803, 0.4820]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6029, 0.2962, 0.8345, ..., 0.0907, 0.1121, 0.2666]) +tensor([0.3972, 0.0450, 0.2251, ..., 0.8758, 0.6674, 0.2144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.298704862594604 seconds +Time: 10.279394626617432 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 961, 1935, ..., 4998003, - 4999013, 5000000]), - col_indices=tensor([ 0, 3, 5, ..., 4968, 4970, 4989]), - values=tensor([0.3710, 0.5603, 0.2176, ..., 0.5072, 0.2694, 0.8987]), +tensor(crow_indices=tensor([ 0, 1007, 2030, ..., 4998041, + 4999049, 5000000]), + col_indices=tensor([ 0, 1, 4, ..., 4982, 4988, 4994]), + values=tensor([0.5108, 0.8042, 0.8113, ..., 0.2803, 0.6803, 0.4820]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6029, 0.2962, 0.8345, ..., 0.0907, 0.1121, 0.2666]) +tensor([0.3972, 0.0450, 0.2251, ..., 0.8758, 0.6674, 0.2144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.298704862594604 seconds +Time: 10.279394626617432 seconds -[39.56, 39.38, 38.97, 38.88, 38.82, 39.21, 39.23, 39.16, 39.31, 39.25] -[74.46] -13.804404735565186 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.298704862594604, 'TIME_S_1KI': 5.915396245028492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1027.8759766101837, 'W': 74.46} -[39.56, 39.38, 38.97, 38.88, 38.82, 39.21, 39.23, 39.16, 39.31, 39.25, 40.48, 40.05, 39.23, 38.85, 39.62, 39.32, 38.87, 39.04, 39.64, 39.41] -706.93 -35.3465 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.298704862594604, 'TIME_S_1KI': 5.915396245028492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1027.8759766101837, 'W': 74.46, 'J_1KI': 590.3940129868946, 'W_1KI': 42.76852383687535, 'W_D': 39.113499999999995, 'J_D': 539.9385846245289, 'W_D_1KI': 22.466111430212518, 'J_D_1KI': 12.904142119593635} +[39.81, 39.1, 38.99, 40.23, 39.14, 39.0, 39.11, 38.99, 44.61, 38.94] +[74.74] +13.84786057472229 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.279394626617432, 'TIME_S_1KI': 5.900915399895196, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1034.989099354744, 'W': 74.74} +[39.81, 39.1, 38.99, 40.23, 39.14, 39.0, 39.11, 38.99, 44.61, 38.94, 39.65, 38.98, 38.94, 38.86, 44.2, 39.35, 39.05, 39.58, 38.86, 39.41] +715.895 +35.79475 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.279394626617432, 'TIME_S_1KI': 5.900915399895196, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1034.989099354744, 'W': 74.74, 'J_1KI': 594.1384037627693, 'W_1KI': 42.904707233065444, 'W_D': 38.945249999999994, 'J_D': 539.3083920477031, 'W_D_1KI': 22.356630309988518, 'J_D_1KI': 12.833886515492836} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json index 20f95b6..f0630a3 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1166, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.334495306015015, "TIME_S_1KI": 8.8632035214537, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1095.1619911956786, "W": 75.64, "J_1KI": 939.2469907338582, "W_1KI": 64.8713550600343, "W_D": 40.43425, "J_D": 585.4316993985176, "W_D_1KI": 34.67774442538593, "J_D_1KI": 29.74077566499651} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.355679035186768, "TIME_S_1KI": 8.851007722381853, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1094.0326872682572, "W": 75.42, "J_1KI": 935.0706728788523, "W_1KI": 64.46153846153845, "W_D": 39.963750000000005, "J_D": 579.7089473059774, "W_D_1KI": 34.157051282051285, "J_D_1KI": 29.194060924830158} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output index be3ca1f..eff6595 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.9000704288482666} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.8971807956695557} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1542, 2985, ..., 7497037, - 7498525, 7500000]), - col_indices=tensor([ 0, 1, 6, ..., 4988, 4994, 4997]), - values=tensor([0.4057, 0.2930, 0.9549, ..., 0.4574, 0.5414, 0.6416]), +tensor(crow_indices=tensor([ 0, 1483, 2970, ..., 7497043, + 7498532, 7500000]), + col_indices=tensor([ 0, 4, 8, ..., 4996, 4998, 4999]), + values=tensor([0.5744, 0.9844, 0.0965, ..., 0.2441, 0.2287, 0.5506]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.4314, 0.0098, 0.8060, ..., 0.6655, 0.0522, 0.0757]) +tensor([0.1028, 0.0300, 0.5873, ..., 0.0934, 0.0185, 0.3679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 0.9000704288482666 seconds +Time: 0.8971807956695557 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1166', '-ss', '5000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.334495306015015} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1170', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.355679035186768} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1525, 3027, ..., 7497015, - 7498498, 7500000]), - col_indices=tensor([ 12, 15, 18, ..., 4991, 4994, 4995]), - values=tensor([0.8648, 0.2387, 0.0206, ..., 0.7504, 0.0755, 0.9898]), +tensor(crow_indices=tensor([ 0, 1495, 2989, ..., 7497030, + 7498472, 7500000]), + col_indices=tensor([ 0, 9, 11, ..., 4987, 4988, 4996]), + values=tensor([0.5267, 0.7493, 0.0080, ..., 0.6762, 0.4849, 0.8981]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.0126, 0.2581, 0.5840, ..., 0.5862, 0.5778, 0.3525]) +tensor([0.2074, 0.3614, 0.1652, ..., 0.2488, 0.9353, 0.1679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.334495306015015 seconds +Time: 10.355679035186768 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1525, 3027, ..., 7497015, - 7498498, 7500000]), - col_indices=tensor([ 12, 15, 18, ..., 4991, 4994, 4995]), - values=tensor([0.8648, 0.2387, 0.0206, ..., 0.7504, 0.0755, 0.9898]), +tensor(crow_indices=tensor([ 0, 1495, 2989, ..., 7497030, + 7498472, 7500000]), + col_indices=tensor([ 0, 9, 11, ..., 4987, 4988, 4996]), + values=tensor([0.5267, 0.7493, 0.0080, ..., 0.6762, 0.4849, 0.8981]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.0126, 0.2581, 0.5840, ..., 0.5862, 0.5778, 0.3525]) +tensor([0.2074, 0.3614, 0.1652, ..., 0.2488, 0.9353, 0.1679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.334495306015015 seconds +Time: 10.355679035186768 seconds -[41.12, 38.83, 38.85, 38.96, 39.38, 38.75, 39.14, 38.88, 38.85, 38.77] -[75.64] -14.478609085083008 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.334495306015015, 'TIME_S_1KI': 8.8632035214537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1095.1619911956786, 'W': 75.64} -[41.12, 38.83, 38.85, 38.96, 39.38, 38.75, 39.14, 38.88, 38.85, 38.77, 40.18, 38.79, 39.32, 38.91, 39.22, 38.92, 39.3, 39.28, 39.3, 38.8] -704.115 -35.20575 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.334495306015015, 'TIME_S_1KI': 8.8632035214537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1095.1619911956786, 'W': 75.64, 'J_1KI': 939.2469907338582, 'W_1KI': 64.8713550600343, 'W_D': 40.43425, 'J_D': 585.4316993985176, 'W_D_1KI': 34.67774442538593, 'J_D_1KI': 29.74077566499651} +[41.13, 39.47, 39.12, 39.58, 39.41, 39.34, 39.14, 39.18, 39.03, 38.88] +[75.42] +14.505869626998901 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.355679035186768, 'TIME_S_1KI': 8.851007722381853, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1094.0326872682572, 'W': 75.42} +[41.13, 39.47, 39.12, 39.58, 39.41, 39.34, 39.14, 39.18, 39.03, 38.88, 39.75, 39.09, 39.44, 39.56, 39.53, 39.42, 39.86, 39.14, 39.21, 39.45] +709.125 +35.45625 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.355679035186768, 'TIME_S_1KI': 8.851007722381853, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1094.0326872682572, 'W': 75.42, 'J_1KI': 935.0706728788523, 'W_1KI': 64.46153846153845, 'W_D': 39.963750000000005, 'J_D': 579.7089473059774, 'W_D_1KI': 34.157051282051285, 'J_D_1KI': 29.194060924830158} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json index 973144c..10a3a18 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 704, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.429930925369263, "TIME_S_1KI": 14.815242791717703, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1157.2951932907106, "W": 76.7, "J_1KI": 1643.8852177424865, "W_1KI": 108.94886363636364, "W_D": 41.2785, "J_D": 622.8345454530717, "W_D_1KI": 58.63423295454546, "J_D_1KI": 83.2872627195248} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 701, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.335088968276978, "TIME_S_1KI": 14.743350881992836, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1160.8654913902283, "W": 77.0, "J_1KI": 1656.0135397863455, "W_1KI": 109.84308131241085, "W_D": 40.2995, "J_D": 607.562322990656, "W_D_1KI": 57.488587731811705, "J_D_1KI": 82.00939762027348} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output index 0f653eb..f7bd8e4 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 1.4912726879119873} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 1.496537685394287} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1951, 3944, ..., 9995965, - 9997988, 10000000]), - col_indices=tensor([ 6, 14, 15, ..., 4996, 4997, 4998]), - values=tensor([0.7071, 0.0905, 0.6037, ..., 0.4689, 0.3914, 0.3516]), +tensor(crow_indices=tensor([ 0, 2015, 4077, ..., 9996098, + 9998057, 10000000]), + col_indices=tensor([ 1, 12, 18, ..., 4995, 4997, 4998]), + values=tensor([0.7308, 0.3114, 0.6641, ..., 0.6125, 0.8831, 0.5870]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7487, 0.9804, 0.4485, ..., 0.3781, 0.3818, 0.0598]) +tensor([0.5034, 0.4347, 0.2307, ..., 0.8205, 0.5767, 0.4764]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 1.4912726879119873 seconds +Time: 1.496537685394287 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '704', '-ss', '5000', '-sd', '0.4', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.429930925369263} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '701', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.335088968276978} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1962, 3997, ..., 9995983, - 9998031, 10000000]), - col_indices=tensor([ 2, 3, 4, ..., 4989, 4991, 4999]), - values=tensor([0.3156, 0.5704, 0.4238, ..., 0.9413, 0.7746, 0.6098]), +tensor(crow_indices=tensor([ 0, 2022, 3993, ..., 9996030, + 9998001, 10000000]), + col_indices=tensor([ 3, 4, 6, ..., 4993, 4995, 4999]), + values=tensor([0.7874, 0.2302, 0.6902, ..., 0.9237, 0.4350, 0.1889]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3215, 0.3275, 0.6323, ..., 0.0024, 0.8893, 0.0654]) +tensor([0.0065, 0.1220, 0.5563, ..., 0.5954, 0.2879, 0.4390]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.429930925369263 seconds +Time: 10.335088968276978 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1962, 3997, ..., 9995983, - 9998031, 10000000]), - col_indices=tensor([ 2, 3, 4, ..., 4989, 4991, 4999]), - values=tensor([0.3156, 0.5704, 0.4238, ..., 0.9413, 0.7746, 0.6098]), +tensor(crow_indices=tensor([ 0, 2022, 3993, ..., 9996030, + 9998001, 10000000]), + col_indices=tensor([ 3, 4, 6, ..., 4993, 4995, 4999]), + values=tensor([0.7874, 0.2302, 0.6902, ..., 0.9237, 0.4350, 0.1889]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3215, 0.3275, 0.6323, ..., 0.0024, 0.8893, 0.0654]) +tensor([0.0065, 0.1220, 0.5563, ..., 0.5954, 0.2879, 0.4390]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.429930925369263 seconds +Time: 10.335088968276978 seconds -[40.15, 39.37, 39.68, 38.87, 39.22, 39.13, 41.47, 38.91, 39.0, 39.01] -[76.7] -15.088594436645508 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.429930925369263, 'TIME_S_1KI': 14.815242791717703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1157.2951932907106, 'W': 76.7} -[40.15, 39.37, 39.68, 38.87, 39.22, 39.13, 41.47, 38.91, 39.0, 39.01, 40.04, 39.56, 38.96, 39.33, 38.96, 38.95, 39.21, 38.91, 38.9, 40.8] -708.4300000000001 -35.4215 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.429930925369263, 'TIME_S_1KI': 14.815242791717703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1157.2951932907106, 'W': 76.7, 'J_1KI': 1643.8852177424865, 'W_1KI': 108.94886363636364, 'W_D': 41.2785, 'J_D': 622.8345454530717, 'W_D_1KI': 58.63423295454546, 'J_D_1KI': 83.2872627195248} +[47.96, 39.51, 39.1, 38.93, 39.13, 39.04, 39.5, 38.9, 39.29, 44.42] +[77.0] +15.076175212860107 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.335088968276978, 'TIME_S_1KI': 14.743350881992836, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.8654913902283, 'W': 77.0} +[47.96, 39.51, 39.1, 38.93, 39.13, 39.04, 39.5, 38.9, 39.29, 44.42, 40.01, 39.61, 39.18, 38.98, 52.5, 47.1, 39.47, 39.0, 39.03, 39.09] +734.01 +36.7005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.335088968276978, 'TIME_S_1KI': 14.743350881992836, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1160.8654913902283, 'W': 77.0, 'J_1KI': 1656.0135397863455, 'W_1KI': 109.84308131241085, 'W_D': 40.2995, 'J_D': 607.562322990656, 'W_D_1KI': 57.488587731811705, 'J_D_1KI': 82.00939762027348} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json index 70ebf3a..bce4b14 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 569, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.483191013336182, "TIME_S_1KI": 18.42388578793705, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1197.8375442028046, "W": 76.6, "J_1KI": 2105.162643590166, "W_1KI": 134.62214411247803, "W_D": 41.0435, "J_D": 641.8204340142012, "W_D_1KI": 72.13268892794376, "J_D_1KI": 126.77098229867093} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.359947919845581, "TIME_S_1KI": 18.303794911387953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1202.7482558536528, "W": 76.69, "J_1KI": 2124.996918469351, "W_1KI": 135.49469964664308, "W_D": 41.312749999999994, "J_D": 647.9180858914851, "W_D_1KI": 72.99072438162543, "J_D_1KI": 128.9588769993382} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output index d4faa36..36b5688 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.8439099788665771} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.8520567417144775} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2531, 5020, ..., 12494917, - 12497450, 12500000]), - col_indices=tensor([ 0, 1, 9, ..., 4992, 4994, 4999]), - values=tensor([0.6676, 0.2754, 0.2712, ..., 0.4447, 0.2547, 0.8500]), +tensor(crow_indices=tensor([ 0, 2533, 5033, ..., 12494920, + 12497517, 12500000]), + col_indices=tensor([ 3, 11, 12, ..., 4993, 4996, 4999]), + values=tensor([0.5117, 0.6458, 0.5264, ..., 0.9665, 0.6027, 0.6828]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9338, 0.0413, 0.5968, ..., 0.6366, 0.2029, 0.7249]) +tensor([0.3100, 0.6010, 0.6518, ..., 0.6095, 0.4947, 0.0178]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,20 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 1.8439099788665771 seconds +Time: 1.8520567417144775 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '569', '-ss', '5000', '-sd', '0.5', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.483191013336182} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '566', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.359947919845581} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2491, 4957, ..., 12495037, - 12497545, 12500000]), - col_indices=tensor([ 0, 2, 5, ..., 4995, 4998, 4999]), - values=tensor([0.5758, 0.7291, 0.3910, ..., 0.8483, 0.9816, 0.9388]), +tensor(crow_indices=tensor([ 0, 2504, 4980, ..., 12495045, + 12497491, 12500000]), + col_indices=tensor([ 1, 4, 6, ..., 4991, 4992, 4994]), + values=tensor([0.0596, 0.1686, 0.0698, ..., 0.4257, 0.0613, 0.7539]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0835, 0.8623, 0.0534, ..., 0.1116, 0.3605, 0.8512]) +tensor([5.7186e-01, 1.5043e-01, 1.3815e-01, ..., 6.7949e-01, 6.0593e-01, + 4.5055e-04]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +37,17 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.483191013336182 seconds +Time: 10.359947919845581 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2491, 4957, ..., 12495037, - 12497545, 12500000]), - col_indices=tensor([ 0, 2, 5, ..., 4995, 4998, 4999]), - values=tensor([0.5758, 0.7291, 0.3910, ..., 0.8483, 0.9816, 0.9388]), +tensor(crow_indices=tensor([ 0, 2504, 4980, ..., 12495045, + 12497491, 12500000]), + col_indices=tensor([ 1, 4, 6, ..., 4991, 4992, 4994]), + values=tensor([0.0596, 0.1686, 0.0698, ..., 0.4257, 0.0613, 0.7539]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0835, 0.8623, 0.0534, ..., 0.1116, 0.3605, 0.8512]) +tensor([5.7186e-01, 1.5043e-01, 1.3815e-01, ..., 6.7949e-01, 6.0593e-01, + 4.5055e-04]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +55,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.483191013336182 seconds +Time: 10.359947919845581 seconds -[39.92, 38.82, 39.41, 39.19, 39.02, 39.24, 39.27, 38.82, 38.86, 38.89] -[76.6] -15.637565851211548 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.483191013336182, 'TIME_S_1KI': 18.42388578793705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.8375442028046, 'W': 76.6} -[39.92, 38.82, 39.41, 39.19, 39.02, 39.24, 39.27, 38.82, 38.86, 38.89, 40.74, 39.81, 39.32, 39.32, 39.28, 44.23, 39.36, 38.92, 39.11, 38.75] -711.1299999999999 -35.55649999999999 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.483191013336182, 'TIME_S_1KI': 18.42388578793705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.8375442028046, 'W': 76.6, 'J_1KI': 2105.162643590166, 'W_1KI': 134.62214411247803, 'W_D': 41.0435, 'J_D': 641.8204340142012, 'W_D_1KI': 72.13268892794376, 'J_D_1KI': 126.77098229867093} +[39.65, 38.86, 39.36, 38.85, 40.08, 38.82, 39.08, 40.17, 39.07, 38.83] +[76.69] +15.683247566223145 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.359947919845581, 'TIME_S_1KI': 18.303794911387953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1202.7482558536528, 'W': 76.69} +[39.65, 38.86, 39.36, 38.85, 40.08, 38.82, 39.08, 40.17, 39.07, 38.83, 39.68, 39.84, 39.36, 38.95, 38.9, 39.34, 39.9, 38.81, 39.44, 39.27] +707.5450000000001 +35.377250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.359947919845581, 'TIME_S_1KI': 18.303794911387953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1202.7482558536528, 'W': 76.69, 'J_1KI': 2124.996918469351, 'W_1KI': 135.49469964664308, 'W_D': 41.312749999999994, 'J_D': 647.9180858914851, 'W_D_1KI': 72.99072438162543, 'J_D_1KI': 128.9588769993382} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json index c3ed634..d7cbf34 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 569391, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.146198511123657, "TIME_S_1KI": 0.017819386873209546, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 837.5037681818008, "W": 65.05, "J_1KI": 1.4708763717406856, "W_1KI": 0.11424486864035434, "W_D": 29.76475, "J_D": 383.2143010605574, "W_D_1KI": 0.05227471105092985, "J_D_1KI": 9.180810910416543e-05} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 557047, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.404626369476318, "TIME_S_1KI": 0.018678184012258063, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 821.7697546434403, "W": 64.83, "J_1KI": 1.4752251688698446, "W_1KI": 0.11638156205849774, "W_D": 29.701, "J_D": 376.4828548922539, "W_D_1KI": 0.0533186607234219, "J_D_1KI": 9.571662844144552e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output index 6482c19..23ac588 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,75 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.025912761688232422} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.05718708038330078} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2955, 4682, 3449, 1969, 3854, 251, 2319, 668, 498, - 3572, 3854, 2322, 4794, 541, 1204, 3498, 557, 2286, - 3599, 1633, 263, 572, 401, 2359, 4315, 911, 3580, - 647, 4452, 4876, 2180, 2875, 4244, 3344, 2209, 559, - 1303, 3655, 2944, 2633, 4973, 1628, 4101, 143, 4999, - 4517, 2519, 3173, 1189, 2853, 1263, 1640, 2218, 2187, - 500, 639, 1141, 4234, 755, 459, 531, 3206, 4041, - 4859, 2211, 4404, 1761, 2224, 3743, 1719, 4799, 2055, - 4619, 4316, 4729, 1778, 2613, 3964, 2169, 739, 4154, - 1973, 1793, 4308, 801, 3310, 342, 3095, 520, 3764, - 4097, 3032, 1897, 3411, 2128, 1565, 3684, 2810, 2598, - 1157, 2540, 3590, 4555, 145, 3631, 3914, 308, 84, - 3223, 2928, 1083, 1157, 2416, 2092, 1789, 4234, 2339, - 866, 1332, 582, 480, 407, 2460, 594, 1184, 1302, - 4554, 3210, 212, 2368, 3876, 3742, 2272, 1393, 406, - 80, 3371, 4169, 2118, 452, 2986, 932, 49, 3160, - 2327, 713, 2877, 3484, 2257, 182, 3066, 1789, 610, - 3257, 2119, 4757, 577, 310, 2782, 116, 2689, 2876, - 2666, 144, 4643, 3955, 517, 1439, 3012, 4289, 3484, - 1701, 3447, 1824, 1951, 3681, 4272, 2740, 4790, 1804, - 3833, 1146, 672, 4424, 3920, 2173, 2155, 2090, 4174, - 4084, 560, 3425, 3143, 3470, 1712, 3489, 1811, 1766, - 4872, 3288, 771, 4485, 3269, 2695, 4584, 535, 200, - 2606, 3547, 3737, 102, 696, 271, 2048, 2470, 1740, - 2219, 829, 4785, 1264, 2283, 4517, 1344, 1726, 844, - 2384, 4310, 42, 3459, 2017, 2199, 3093, 4339, 313, - 1332, 303, 3733, 2074, 4094, 4006, 429, 1830, 3447, - 3735, 3446, 1158, 2156, 2171, 2458, 989]), - values=tensor([0.1546, 0.0338, 0.3832, 0.6845, 0.5359, 0.2115, 0.5134, - 0.5236, 0.8368, 0.0286, 0.8691, 0.3848, 0.7412, 0.7376, - 0.0492, 0.2561, 0.4285, 0.6403, 0.4264, 0.9823, 0.2432, - 0.9496, 0.9648, 0.1994, 0.5806, 0.9987, 0.8946, 0.3796, - 0.9742, 0.5421, 0.7745, 0.0707, 0.4271, 0.7771, 0.6813, - 0.4916, 0.5119, 0.7062, 0.2752, 0.9391, 0.5947, 0.6220, - 0.1073, 0.5082, 0.8956, 0.6658, 0.1040, 0.0454, 0.1560, - 0.7150, 0.4739, 0.5494, 0.1706, 0.6723, 0.3127, 0.2460, - 0.1585, 0.6938, 0.6937, 0.2200, 0.6734, 0.5021, 0.7544, - 0.1720, 0.3861, 0.8179, 0.3810, 0.0801, 0.6567, 0.1205, - 0.2925, 0.1255, 0.7882, 0.3753, 0.2290, 0.4877, 0.9183, - 0.7718, 0.3047, 0.6194, 0.2851, 0.3419, 0.7256, 0.2356, - 0.5102, 0.4050, 0.2487, 0.5983, 0.2252, 0.4330, 0.1968, - 0.1306, 0.4006, 0.3092, 0.2645, 0.4635, 0.3587, 0.7536, - 0.7019, 0.0792, 0.1678, 0.7485, 0.9817, 0.9065, 0.9994, - 0.3911, 0.8833, 0.5411, 0.3679, 0.9711, 0.9853, 0.6437, - 0.7861, 0.5048, 0.2591, 0.7726, 0.6174, 0.1004, 0.1489, - 0.6017, 0.8836, 0.5571, 0.7423, 0.4796, 0.5887, 0.7010, - 0.6198, 0.7601, 0.9790, 0.0717, 0.8254, 0.1983, 0.6497, - 0.9092, 0.9977, 0.7279, 0.3767, 0.5564, 0.8901, 0.2896, - 0.4914, 0.7288, 0.7282, 0.1569, 0.7491, 0.2024, 0.5103, - 0.2006, 0.2616, 0.3680, 0.8784, 0.5095, 0.3100, 0.5468, - 0.7473, 0.8067, 0.1566, 0.7922, 0.7836, 0.0734, 0.2895, - 0.0658, 0.3948, 0.1442, 0.9166, 0.4276, 0.4709, 0.0305, - 0.5373, 0.3415, 0.0906, 0.7116, 0.3303, 0.4381, 0.5263, - 0.3654, 0.9325, 0.0370, 0.6385, 0.6709, 0.3766, 0.7549, - 0.4298, 0.9883, 0.6034, 0.5195, 0.8067, 0.8173, 0.5194, - 0.5329, 0.4999, 0.5357, 0.5843, 0.4652, 0.6589, 0.6127, - 0.9625, 0.8533, 0.0618, 0.2250, 0.1341, 0.3851, 0.9176, - 0.6106, 0.0281, 0.2538, 0.5580, 0.0137, 0.4927, 0.5743, - 0.6268, 0.5818, 0.7719, 0.1711, 0.1084, 0.6064, 0.1367, - 0.6312, 0.8778, 0.2960, 0.3372, 0.8224, 0.9699, 0.6070, - 0.2907, 0.4693, 0.5694, 0.7710, 0.6091, 0.5452, 0.3569, - 0.0226, 0.4986, 0.6727, 0.5738, 0.8629, 0.9155, 0.9081, - 0.9105, 0.9222, 0.7776, 0.3699, 0.9402, 0.5035, 0.4769, - 0.4797, 0.1466, 0.6411, 0.6861, 0.6601]), + col_indices=tensor([3449, 1955, 2868, 1468, 3827, 2362, 3401, 1933, 1944, + 3520, 3549, 1812, 860, 250, 2888, 277, 2361, 3585, + 744, 561, 1238, 1123, 884, 2427, 794, 3047, 466, + 4682, 3653, 2076, 3118, 2632, 368, 3061, 2089, 1222, + 2965, 918, 170, 1048, 4511, 4163, 257, 2801, 3986, + 3835, 775, 1044, 4338, 2013, 3879, 2852, 1425, 1297, + 2403, 788, 4186, 1669, 4776, 4705, 3885, 4018, 3352, + 2146, 2158, 2078, 3194, 2003, 3693, 431, 1501, 4668, + 1519, 1981, 1016, 585, 4044, 4250, 3687, 1357, 1909, + 3559, 4876, 2763, 1797, 3616, 339, 3936, 5, 3745, + 2105, 1992, 41, 3542, 3703, 2369, 4941, 2160, 24, + 4557, 2114, 3304, 165, 3322, 3064, 1297, 2903, 530, + 2101, 3405, 4520, 4259, 2917, 4927, 1067, 4086, 580, + 1082, 3151, 2370, 1375, 194, 4330, 4293, 3802, 4088, + 3982, 646, 2936, 1384, 1462, 3574, 3129, 3873, 2482, + 841, 4734, 930, 4440, 1175, 4642, 3859, 2671, 3396, + 778, 318, 3464, 4974, 4716, 3949, 4132, 183, 903, + 2108, 1952, 1896, 2345, 1004, 1832, 1787, 623, 87, + 1354, 740, 4583, 3647, 2301, 4186, 975, 2791, 250, + 1626, 2133, 2115, 468, 2491, 813, 378, 43, 825, + 2792, 3608, 2818, 2094, 3026, 2392, 1652, 4988, 4624, + 1501, 2659, 3540, 706, 2961, 2895, 3472, 4914, 4295, + 1947, 593, 4846, 660, 538, 4330, 2168, 4711, 2114, + 384, 3453, 2439, 3147, 2761, 2530, 57, 2521, 1000, + 4869, 4178, 3418, 507, 1072, 2220, 4340, 4010, 4397, + 1461, 1413, 4671, 1469, 1785, 774, 3491, 3736, 933, + 371, 3054, 702, 2583, 922, 1484, 109, 1559, 3828, + 314, 4650, 2338, 4026, 2066, 363, 1244]), + values=tensor([0.9425, 0.9521, 0.0556, 0.6017, 0.2927, 0.6616, 0.3585, + 0.4070, 0.7686, 0.7208, 0.2651, 0.4236, 0.6353, 0.2259, + 0.1807, 0.8658, 0.5145, 0.0837, 0.6363, 0.5515, 0.9950, + 0.9332, 0.5727, 0.1890, 0.9419, 0.8780, 0.7061, 0.1908, + 0.3034, 0.5025, 0.4926, 0.4937, 0.9765, 0.2865, 0.3270, + 0.3515, 0.9179, 0.6704, 0.5496, 0.0872, 0.5685, 0.5742, + 0.2181, 0.9505, 0.8351, 0.7950, 0.8018, 0.0170, 0.0765, + 0.3986, 0.5911, 0.1001, 0.3613, 0.7140, 0.1100, 0.0698, + 0.5721, 0.9904, 0.6710, 0.9732, 0.8712, 0.7271, 0.6292, + 0.8982, 0.9228, 0.6494, 0.1191, 0.5216, 0.2869, 0.2640, + 0.6555, 0.3821, 0.0401, 0.5134, 0.8649, 0.0019, 0.3349, + 0.9164, 0.7466, 0.0937, 0.9396, 0.7034, 0.8723, 0.8977, + 0.1578, 0.6323, 0.8416, 0.5422, 0.9027, 0.0700, 0.3175, + 0.0951, 0.9736, 0.5994, 0.5178, 0.8272, 0.1449, 0.4638, + 0.8211, 0.6744, 0.1257, 0.7946, 0.1265, 0.4626, 0.4341, + 0.4690, 0.3190, 0.3779, 0.0504, 0.0855, 0.5792, 0.8575, + 0.4179, 0.5050, 0.6313, 0.9886, 0.9258, 0.2362, 0.6358, + 0.3474, 0.2155, 0.9009, 0.2378, 0.6894, 0.8011, 0.4992, + 0.1437, 0.3214, 0.9712, 0.7491, 0.2335, 0.4135, 0.8252, + 0.2337, 0.4845, 0.8431, 0.3798, 0.3048, 0.3709, 0.1944, + 0.1377, 0.3230, 0.8016, 0.5449, 0.5158, 0.6969, 0.3113, + 0.1127, 0.5762, 0.9504, 0.4152, 0.9679, 0.0045, 0.8544, + 0.0745, 0.7681, 0.4159, 0.0811, 0.6984, 0.1470, 0.5169, + 0.4047, 0.7210, 0.5534, 0.2132, 0.5322, 0.8210, 0.8677, + 0.4980, 0.7144, 0.8495, 0.6392, 0.2231, 0.3694, 0.8734, + 0.7644, 0.4268, 0.1343, 0.5825, 0.7685, 0.2513, 0.8403, + 0.8728, 0.3675, 0.7200, 0.7541, 0.8096, 0.6719, 0.6988, + 0.5208, 0.8688, 0.6226, 0.7903, 0.3447, 0.7321, 0.7214, + 0.5515, 0.1720, 0.8270, 0.5766, 0.3603, 0.7352, 0.4219, + 0.6201, 0.5008, 0.5434, 0.9333, 0.5556, 0.6923, 0.7482, + 0.0125, 0.7765, 0.5129, 0.1430, 0.6425, 0.8783, 0.0143, + 0.1840, 0.4954, 0.0539, 0.9183, 0.6777, 0.0801, 0.8137, + 0.5435, 0.3644, 0.7674, 0.7580, 0.1924, 0.1251, 0.5532, + 0.1834, 0.9137, 0.8028, 0.3376, 0.6738, 0.5666, 0.0953, + 0.8386, 0.3636, 0.0669, 0.7853, 0.9872, 0.9951, 0.6260, + 0.7005, 0.5128, 0.8470, 0.9874, 0.9785]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.1758, 0.5990, 0.4260, ..., 0.6457, 0.1523, 0.4408]) +tensor([0.6607, 0.9449, 0.8972, ..., 0.8025, 0.1142, 0.5030]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +77,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.025912761688232422 seconds +Time: 0.05718708038330078 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '40520', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7472190856933594} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18360', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.34607481956481934} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4979, 4303, 4566, 4762, 4549, 190, 2004, 2509, 751, - 928, 1907, 2345, 3485, 628, 2908, 2256, 1744, 3960, - 753, 741, 1691, 3236, 822, 110, 1716, 2413, 2905, - 1494, 3365, 3162, 4499, 3337, 4292, 2749, 1497, 1083, - 571, 2576, 1389, 2374, 4678, 2621, 3191, 2850, 4442, - 3038, 2838, 3147, 1692, 3297, 1369, 316, 44, 3096, - 1795, 1502, 4078, 1530, 382, 1493, 2347, 369, 4086, - 3463, 1734, 324, 4159, 222, 2163, 3883, 2624, 3179, - 1955, 2358, 4353, 3311, 2924, 323, 448, 10, 1850, - 3616, 4600, 4760, 641, 4296, 2902, 1005, 4495, 3758, - 2177, 1780, 1919, 576, 3690, 2414, 4915, 960, 2888, - 4117, 1616, 3504, 3045, 3420, 3788, 3250, 4185, 565, - 1378, 1958, 1079, 3142, 700, 3775, 2755, 2658, 4044, - 3937, 2159, 2450, 691, 2810, 2854, 3907, 15, 3162, - 4347, 4859, 392, 4662, 1450, 2377, 2601, 2733, 2929, - 1056, 3918, 1597, 324, 1218, 4516, 2665, 4847, 4488, - 1656, 2761, 652, 3181, 916, 67, 4641, 3028, 215, - 2829, 990, 4480, 3427, 3715, 1658, 1471, 2613, 915, - 4016, 2089, 4, 4230, 2407, 4012, 246, 2239, 359, - 1734, 4596, 4776, 1554, 2314, 1733, 3775, 4475, 4112, - 4261, 4903, 3913, 101, 2681, 3353, 525, 3858, 185, - 877, 1621, 4340, 4135, 463, 2930, 2999, 1174, 3548, - 1690, 2905, 3325, 3911, 711, 2787, 2453, 456, 3056, - 3218, 3802, 3733, 687, 2791, 1304, 4779, 2332, 3970, - 155, 2041, 2588, 2371, 3243, 3760, 181, 1764, 909, - 39, 3595, 2215, 3497, 2575, 3579, 257, 3215, 1268, - 660, 3425, 4720, 1730, 3977, 302, 2922, 4097, 2384, - 2222, 3777, 3527, 253, 2444, 2627, 880]), - values=tensor([0.6428, 0.9934, 0.2143, 0.2775, 0.8801, 0.9209, 0.7937, - 0.3950, 0.9558, 0.4336, 0.7173, 0.5611, 0.9142, 0.7220, - 0.9288, 0.0252, 0.1643, 0.3238, 0.3899, 0.9050, 0.8167, - 0.0316, 0.4642, 0.2775, 0.6749, 0.3101, 0.4910, 0.2421, - 0.2427, 0.1619, 0.6985, 0.1109, 0.5934, 0.6019, 0.5879, - 0.2363, 0.8960, 0.8107, 0.6787, 0.6439, 0.2015, 0.5182, - 0.3918, 0.2235, 0.7280, 0.1655, 0.1523, 0.7307, 0.2277, - 0.0713, 0.3285, 0.3448, 0.9658, 0.6123, 0.3458, 0.8019, - 0.7467, 0.1797, 0.0926, 0.5584, 0.6018, 0.7448, 0.7637, - 0.6467, 0.4119, 0.4865, 0.3304, 0.8004, 0.4028, 0.3316, - 0.4346, 0.2111, 0.8264, 0.5751, 0.1845, 0.5351, 0.6490, - 0.1782, 0.3206, 0.0372, 0.4209, 0.6069, 0.2848, 0.1110, - 0.4496, 0.7402, 0.9814, 0.1676, 0.9158, 0.3694, 0.5027, - 0.9759, 0.3630, 0.0452, 0.6637, 0.4099, 0.7193, 0.9349, - 0.4031, 0.4884, 0.0588, 0.8852, 0.4143, 0.6287, 0.2603, - 0.0585, 0.6828, 0.0068, 0.4013, 0.8395, 0.5456, 0.8367, - 0.2039, 0.1976, 0.9018, 0.5362, 0.0977, 0.9421, 0.0954, - 0.0477, 0.2774, 0.5744, 0.7438, 0.6477, 0.2957, 0.5835, - 0.7384, 0.3101, 0.2011, 0.7432, 0.1000, 0.7629, 0.0287, - 0.1747, 0.7484, 0.6466, 0.2628, 0.6546, 0.9448, 0.6789, - 0.3641, 0.3340, 0.2888, 0.6351, 0.9463, 0.7934, 0.2568, - 0.0882, 0.7783, 0.9130, 0.2947, 0.2702, 0.1584, 0.1104, - 0.9967, 0.4045, 0.5340, 0.4029, 0.5998, 0.3784, 0.8814, - 0.3307, 0.4333, 0.0862, 0.8030, 0.7150, 0.6042, 0.4672, - 0.1420, 0.3126, 0.5565, 0.5063, 0.1371, 0.4110, 0.5830, - 0.3460, 0.8459, 0.9932, 0.3370, 0.3110, 0.0982, 0.6162, - 0.8866, 0.9938, 0.9137, 0.2599, 0.1340, 0.1968, 0.4654, - 0.2563, 0.6561, 0.7845, 0.2903, 0.6104, 0.9219, 0.6603, - 0.0736, 0.2080, 0.8730, 0.0269, 0.5148, 0.9185, 0.0914, - 0.7374, 0.6494, 0.0513, 0.2661, 0.6485, 0.3876, 0.3399, - 0.5727, 0.6526, 0.6545, 0.8063, 0.5866, 0.7739, 0.1262, - 0.2849, 0.1051, 0.1115, 0.2427, 0.0104, 0.2599, 0.3134, - 0.0451, 0.7262, 0.6349, 0.5852, 0.2103, 0.4468, 0.0131, - 0.9703, 0.2087, 0.9981, 0.0746, 0.6323, 0.0776, 0.3216, - 0.8062, 0.8168, 0.8982, 0.6078, 0.4816, 0.3037, 0.1198, - 0.4605, 0.7585, 0.3262, 0.3531, 0.6379]), + col_indices=tensor([ 154, 2999, 825, 1636, 3815, 3438, 2253, 1331, 3004, + 2336, 1340, 4444, 3755, 23, 1857, 4181, 841, 2960, + 2359, 1673, 3660, 260, 2136, 4201, 1017, 3614, 3389, + 385, 4401, 638, 83, 2706, 1092, 2185, 3545, 2662, + 3426, 2283, 2969, 750, 1942, 664, 799, 1871, 2827, + 1701, 2382, 1103, 158, 3456, 4664, 540, 854, 3544, + 2588, 1575, 14, 1354, 4055, 3154, 1056, 3487, 4617, + 2638, 1557, 2677, 1391, 1688, 1866, 1072, 1468, 805, + 1511, 2239, 1932, 4956, 935, 667, 4636, 1082, 3069, + 1553, 2866, 4473, 939, 181, 4233, 578, 4872, 2926, + 27, 4197, 1683, 1175, 354, 4990, 3247, 1192, 2543, + 1921, 1245, 402, 183, 2028, 4755, 4722, 4897, 4057, + 4199, 3692, 3801, 2338, 390, 2381, 2342, 4822, 355, + 3493, 4969, 2020, 2364, 2777, 1961, 3280, 3063, 594, + 4483, 3113, 3111, 2642, 2628, 3480, 1335, 2924, 4164, + 3476, 4323, 2942, 969, 4253, 4676, 2565, 4055, 1805, + 3141, 4413, 1505, 399, 694, 1702, 2868, 2397, 3921, + 4175, 347, 228, 3769, 3973, 4114, 2282, 1434, 1544, + 2128, 4394, 1947, 3625, 4144, 4385, 4087, 330, 1601, + 1008, 3146, 1436, 2736, 3298, 1643, 4255, 318, 4590, + 2208, 1346, 358, 1201, 607, 1359, 2529, 3833, 2234, + 3171, 4701, 3063, 1408, 2546, 3193, 2490, 311, 2714, + 2003, 1337, 3020, 24, 327, 944, 1452, 3291, 532, + 1242, 2200, 1562, 842, 129, 1344, 1375, 1282, 1802, + 1329, 4743, 4218, 2218, 1429, 794, 2896, 3394, 3419, + 2442, 4412, 429, 4681, 4197, 549, 2947, 1379, 3800, + 3844, 3545, 214, 2263, 3398, 2668, 505, 1351, 813, + 4155, 2188, 4068, 1098, 3436, 213, 4345]), + values=tensor([0.0441, 0.2021, 0.4197, 0.3787, 0.4994, 0.0669, 0.2390, + 0.2704, 0.9990, 0.1473, 0.2147, 0.3802, 0.2154, 0.6864, + 0.4396, 0.6944, 0.3516, 0.8152, 0.9467, 0.2367, 0.8994, + 0.7397, 0.2450, 0.4427, 0.3643, 0.8919, 0.5574, 0.3650, + 0.8956, 0.5939, 0.0074, 0.4771, 0.9906, 0.7713, 0.2468, + 0.7858, 0.3719, 0.9869, 0.0300, 0.0194, 0.2997, 0.4790, + 0.9720, 0.4598, 0.6857, 0.6617, 0.0857, 0.9502, 0.0267, + 0.6026, 0.7878, 0.7836, 0.9094, 0.0831, 0.5794, 0.8258, + 0.6094, 0.6387, 0.9121, 0.8473, 0.1980, 0.4751, 0.0414, + 0.8247, 0.1787, 0.0097, 0.4571, 0.3982, 0.4922, 0.4891, + 0.8475, 0.0514, 0.7241, 0.0872, 0.4347, 0.9952, 0.9308, + 0.8944, 0.1767, 0.3621, 0.2017, 0.5881, 0.6965, 0.0167, + 0.8849, 0.0743, 0.7859, 0.2383, 0.2486, 0.0765, 0.8707, + 0.0956, 0.3222, 0.2548, 0.2929, 0.7001, 0.3399, 0.4249, + 0.4626, 0.5228, 0.7478, 0.6870, 0.3208, 0.9248, 0.6958, + 0.9957, 0.6015, 0.5089, 0.8528, 0.2222, 0.2908, 0.1025, + 0.2847, 0.7649, 0.1984, 0.9493, 0.1393, 0.0256, 0.1513, + 0.8366, 0.9367, 0.1692, 0.6894, 0.8653, 0.5545, 0.9328, + 0.6474, 0.7781, 0.9736, 0.0288, 0.5905, 0.0099, 0.0815, + 0.5213, 0.1370, 0.1267, 0.0048, 0.6358, 0.3658, 0.7032, + 0.8768, 0.0033, 0.4004, 0.4728, 0.8507, 0.7044, 0.3384, + 0.2356, 0.3620, 0.9012, 0.7812, 0.6493, 0.2764, 0.9638, + 0.9188, 0.7181, 0.0706, 0.6992, 0.7806, 0.6933, 0.8242, + 0.9719, 0.5933, 0.2614, 0.9070, 0.4305, 0.5793, 0.9157, + 0.0228, 0.2483, 0.0723, 0.4007, 0.6597, 0.9055, 0.1242, + 0.3842, 0.7644, 0.3749, 0.3884, 0.4720, 0.1943, 0.6080, + 0.9153, 0.2979, 0.6694, 0.3162, 0.5341, 0.5710, 0.6084, + 0.2810, 0.5920, 0.9053, 0.6371, 0.5180, 0.5371, 0.5702, + 0.1114, 0.7775, 0.2537, 0.6227, 0.7769, 0.9996, 0.7352, + 0.7778, 0.2235, 0.2246, 0.4832, 0.1362, 0.1938, 0.0802, + 0.7137, 0.9233, 0.8754, 0.8385, 0.5993, 0.0697, 0.3377, + 0.8404, 0.6026, 0.1994, 0.8080, 0.6060, 0.6730, 0.9376, + 0.2653, 0.4796, 0.9320, 0.1391, 0.3535, 0.4087, 0.4501, + 0.2984, 0.9140, 0.2307, 0.8809, 0.1999, 0.7800, 0.6899, + 0.2576, 0.6360, 0.9717, 0.9757, 0.5583, 0.8318, 0.2902, + 0.9368, 0.9864, 0.9211, 0.2216, 0.9786]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.7653, 0.7067, 0.4116, ..., 0.4300, 0.1070, 0.3611]) +tensor([0.5639, 0.0624, 0.1703, ..., 0.2915, 0.6003, 0.2642]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +158,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.7472190856933594 seconds +Time: 0.34607481956481934 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '569391', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.146198511123657} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '557047', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.404626369476318} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), - col_indices=tensor([ 989, 4634, 172, 296, 437, 988, 4701, 40, 3459, - 2902, 284, 2223, 3489, 379, 2336, 3854, 3801, 4913, - 1784, 189, 1121, 2886, 4344, 1044, 1507, 1629, 4228, - 950, 3157, 372, 4392, 249, 3355, 4991, 61, 3311, - 365, 3749, 2426, 4689, 420, 1130, 2303, 3276, 2058, - 3417, 2635, 1997, 4469, 149, 3640, 2817, 310, 1358, - 4005, 314, 2266, 836, 2231, 2153, 4590, 1083, 2752, - 2577, 2539, 3832, 170, 4839, 1662, 908, 3409, 160, - 1208, 2792, 1394, 3839, 404, 2657, 1041, 2651, 3377, - 3822, 3581, 2353, 3591, 2000, 4401, 4545, 4324, 3328, - 3711, 2291, 2328, 732, 536, 660, 2140, 2401, 566, - 1414, 1235, 4049, 1072, 4129, 3797, 3825, 3260, 1333, - 2653, 3617, 58, 3265, 1036, 4854, 392, 4867, 4701, - 3576, 771, 2398, 4330, 1034, 4559, 2708, 409, 3139, - 2114, 3463, 923, 763, 2766, 4868, 1142, 1459, 3024, - 2321, 1511, 1594, 1553, 98, 954, 4757, 1367, 2284, - 321, 4282, 4827, 311, 3480, 705, 1128, 255, 1664, - 653, 1381, 1987, 2729, 634, 2582, 4911, 3144, 1242, - 3821, 2906, 2900, 547, 195, 264, 1462, 3048, 2738, - 753, 4689, 302, 1125, 2387, 532, 854, 131, 4228, - 2001, 3802, 1432, 364, 2122, 3, 492, 481, 3737, - 3945, 2016, 4040, 4587, 3047, 678, 2781, 1044, 3568, - 1574, 3813, 2876, 1656, 4200, 1707, 1113, 3551, 4496, - 1942, 1480, 4429, 3975, 2412, 3934, 2906, 952, 4773, - 1043, 3314, 572, 4511, 1843, 4636, 1964, 2523, 457, - 3459, 2009, 1681, 9, 2459, 3710, 4173, 1493, 3773, - 2982, 4418, 4646, 1091, 541, 4902, 4735, 4604, 3735, - 3670, 955, 687, 2373, 4360, 1850, 1893]), - values=tensor([0.2345, 0.0289, 0.8583, 0.9123, 0.0874, 0.7501, 0.2033, - 0.8326, 0.8469, 0.1882, 0.3285, 0.3183, 0.8931, 0.0457, - 0.8868, 0.7189, 0.4379, 0.1462, 0.4719, 0.1691, 0.1099, - 0.8022, 0.0756, 0.2871, 0.6213, 0.4582, 0.2170, 0.3357, - 0.7252, 0.0149, 0.2470, 0.4898, 0.0035, 0.1331, 0.4871, - 0.7295, 0.2640, 0.3186, 0.3619, 0.0774, 0.2757, 0.9917, - 0.3749, 0.2825, 0.4846, 0.8782, 0.2242, 0.0584, 0.4269, - 0.3007, 0.5193, 0.9227, 0.9773, 0.6304, 0.0725, 0.4260, - 0.4518, 0.5456, 0.3019, 0.2067, 0.3845, 0.8768, 0.2863, - 0.4471, 0.0208, 0.9135, 0.0548, 0.1836, 0.9804, 0.3038, - 0.5045, 0.8119, 0.2476, 0.4867, 0.9780, 0.3338, 0.2853, - 0.7670, 0.4677, 0.5075, 0.3848, 0.5236, 0.0031, 0.3726, - 0.6233, 0.1936, 0.1739, 0.4139, 0.1871, 0.5920, 0.8457, - 0.8536, 0.8234, 0.3531, 0.8514, 0.1766, 0.5797, 0.3086, - 0.0545, 0.2101, 0.0864, 0.3338, 0.2356, 0.3200, 0.7401, - 0.4108, 0.5013, 0.5320, 0.4414, 0.7825, 0.0249, 0.2494, - 0.0429, 0.7080, 0.9162, 0.6423, 0.2821, 0.2742, 0.5289, - 0.2928, 0.0848, 0.8315, 0.7088, 0.8269, 0.3671, 0.5127, - 0.2282, 0.7407, 0.1379, 0.8288, 0.2763, 0.1471, 0.0918, - 0.7196, 0.6693, 0.6326, 0.9413, 0.1511, 0.6888, 0.3336, - 0.2545, 0.9984, 0.8005, 0.8337, 0.2430, 0.7476, 0.3204, - 0.0554, 0.5080, 0.0854, 0.1850, 0.7747, 0.5775, 0.2057, - 0.7868, 0.8337, 0.6964, 0.9562, 0.1725, 0.3223, 0.4786, - 0.5641, 0.5075, 0.5871, 0.6849, 0.6564, 0.2437, 0.1937, - 0.6389, 0.0952, 0.9817, 0.1000, 0.7393, 0.9387, 0.8443, - 0.9838, 0.1009, 0.7329, 0.9758, 0.9984, 0.0689, 0.6045, - 0.3081, 0.8442, 0.7079, 0.3197, 0.6314, 0.2885, 0.9946, - 0.0894, 0.3380, 0.0723, 0.8864, 0.2114, 0.6387, 0.7774, - 0.5705, 0.9374, 0.3114, 0.6458, 0.5623, 0.1687, 0.3946, - 0.8120, 0.4227, 0.8777, 0.4345, 0.8346, 0.0514, 0.7320, - 0.0137, 0.2630, 0.1970, 0.0196, 0.2035, 0.6052, 0.7403, - 0.6899, 0.2449, 0.2769, 0.3900, 0.8664, 0.9461, 0.5286, - 0.0997, 0.7438, 0.0400, 0.7885, 0.5277, 0.1693, 0.7534, - 0.3649, 0.5259, 0.9420, 0.2968, 0.8974, 0.5468, 0.5308, - 0.9748, 0.7021, 0.7026, 0.1970, 0.7386, 0.9856, 0.8826, - 0.6766, 0.7905, 0.8999, 0.3805, 0.8437]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3535, 752, 4871, 285, 193, 3991, 181, 4103, 4994, + 4819, 778, 2802, 4285, 1915, 3561, 4335, 50, 3054, + 3873, 4370, 4522, 2186, 4742, 3995, 625, 30, 4180, + 3451, 1966, 2287, 4077, 2723, 1679, 2113, 4904, 2129, + 1034, 416, 2370, 4458, 2567, 1199, 1800, 4401, 1438, + 3875, 3659, 4779, 4705, 4866, 3850, 1810, 4124, 1884, + 90, 3943, 3202, 2197, 4269, 3069, 2905, 1163, 120, + 1599, 1021, 1929, 2781, 2917, 191, 173, 3508, 4451, + 867, 4130, 818, 2038, 1807, 60, 2279, 362, 1352, + 2983, 3060, 4061, 3783, 2530, 3122, 1387, 2365, 1264, + 3836, 3665, 551, 1008, 1253, 3694, 2443, 3292, 3911, + 2085, 2516, 1186, 3005, 3743, 302, 4892, 3208, 2118, + 3857, 3023, 441, 4190, 2955, 1688, 4134, 4280, 1065, + 2373, 2513, 2898, 1630, 4739, 4377, 735, 1709, 8, + 4149, 3261, 1198, 4464, 524, 1523, 3524, 4361, 2389, + 3440, 2909, 3425, 3957, 3833, 142, 3466, 4004, 363, + 2584, 1779, 4216, 3099, 4171, 1410, 1560, 4925, 598, + 600, 1688, 2981, 4335, 3575, 3990, 70, 651, 2774, + 2274, 2334, 500, 2649, 3505, 1527, 3170, 2669, 939, + 4369, 3117, 4409, 3921, 2913, 2672, 90, 189, 2416, + 4648, 2158, 4498, 2787, 2777, 427, 1647, 596, 32, + 3758, 3798, 1490, 2780, 536, 2521, 511, 3978, 4192, + 4339, 3544, 936, 2045, 1778, 4789, 1856, 262, 1089, + 642, 2366, 2554, 3581, 71, 2262, 2483, 4988, 827, + 4853, 4171, 1818, 4822, 2101, 1941, 2179, 2348, 1002, + 1769, 4513, 2538, 2796, 827, 3722, 2583, 3562, 2718, + 2215, 2863, 4777, 291, 3837, 1110, 4297, 2738, 986, + 4392, 938, 4212, 3392, 4029, 4006, 3789]), + values=tensor([0.2023, 0.8187, 0.5198, 0.7670, 0.2627, 0.6008, 0.5311, + 0.8539, 0.3635, 0.1994, 0.9519, 0.6280, 0.7439, 0.4916, + 0.4110, 0.6007, 0.0466, 0.2207, 0.4184, 0.4846, 0.2712, + 0.1303, 0.1180, 0.5130, 0.1621, 0.5385, 0.8464, 0.2856, + 0.7650, 0.3522, 0.2748, 0.6971, 0.5615, 0.4695, 0.8702, + 0.8957, 0.1425, 0.4572, 0.8699, 0.9647, 0.1193, 0.9814, + 0.0413, 0.8860, 0.2349, 0.6689, 0.4064, 0.8116, 0.1071, + 0.2195, 0.2890, 0.2579, 0.4438, 0.6245, 0.4398, 0.0869, + 0.9152, 0.4439, 0.6949, 0.2975, 0.8353, 0.1252, 0.8116, + 0.6012, 0.7192, 0.6124, 0.4150, 0.4552, 0.6692, 0.9519, + 0.5595, 0.9511, 0.4391, 0.3332, 0.2767, 0.5331, 0.4974, + 0.9181, 0.1964, 0.0743, 0.7761, 0.8892, 0.1605, 0.8005, + 0.1158, 0.7113, 0.0699, 0.9667, 0.0886, 0.6206, 0.4614, + 0.4981, 0.8561, 0.4178, 0.0614, 0.1711, 0.4511, 0.4612, + 0.6347, 0.7605, 0.6549, 0.8747, 0.7453, 0.5427, 0.0521, + 0.0769, 0.8320, 0.0535, 0.0986, 0.1164, 0.5041, 0.8307, + 0.5913, 0.3421, 0.3672, 0.0839, 0.4439, 0.3274, 0.8462, + 0.0753, 0.5743, 0.4336, 0.2315, 0.7502, 0.6125, 0.2294, + 0.7800, 0.5684, 0.9272, 0.6866, 0.0274, 0.6553, 0.6854, + 0.3119, 0.1113, 0.3305, 0.4940, 0.8530, 0.1185, 0.3421, + 0.1736, 0.3136, 0.5608, 0.3923, 0.7039, 0.8823, 0.9229, + 0.8681, 0.9040, 0.9731, 0.4768, 0.5091, 0.9307, 0.5945, + 0.1103, 0.4819, 0.8047, 0.4694, 0.2887, 0.2838, 0.3189, + 0.2238, 0.9533, 0.9758, 0.4073, 0.2681, 0.6063, 0.4492, + 0.2424, 0.0013, 0.4796, 0.4396, 0.3595, 0.0630, 0.3254, + 0.1162, 0.8933, 0.5119, 0.0665, 0.1748, 0.2380, 0.6503, + 0.4346, 0.4795, 0.2241, 0.5218, 0.9445, 0.0909, 0.0892, + 0.2052, 0.9134, 0.2381, 0.2170, 0.9650, 0.9711, 0.4432, + 0.0629, 0.4315, 0.8955, 0.2044, 0.2668, 0.3191, 0.8287, + 0.2249, 0.5210, 0.6771, 0.7730, 0.2266, 0.3403, 0.4586, + 0.1469, 0.2324, 0.6734, 0.0843, 0.5033, 0.0074, 0.1433, + 0.7814, 0.0379, 0.8469, 0.8898, 0.1937, 0.5462, 0.0482, + 0.2123, 0.5237, 0.1975, 0.8515, 0.2026, 0.2206, 0.7011, + 0.4381, 0.0204, 0.6609, 0.3691, 0.3074, 0.6258, 0.3408, + 0.8391, 0.4305, 0.9402, 0.0381, 0.6451, 0.9800, 0.4677, + 0.3257, 0.3529, 0.6511, 0.2780, 0.8455]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.6631, 0.6256, 0.9086, ..., 0.3830, 0.1647, 0.1472]) +tensor([0.3989, 0.3808, 0.9151, ..., 0.1528, 0.1783, 0.6798]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +239,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.146198511123657 seconds +Time: 10.404626369476318 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), - col_indices=tensor([ 989, 4634, 172, 296, 437, 988, 4701, 40, 3459, - 2902, 284, 2223, 3489, 379, 2336, 3854, 3801, 4913, - 1784, 189, 1121, 2886, 4344, 1044, 1507, 1629, 4228, - 950, 3157, 372, 4392, 249, 3355, 4991, 61, 3311, - 365, 3749, 2426, 4689, 420, 1130, 2303, 3276, 2058, - 3417, 2635, 1997, 4469, 149, 3640, 2817, 310, 1358, - 4005, 314, 2266, 836, 2231, 2153, 4590, 1083, 2752, - 2577, 2539, 3832, 170, 4839, 1662, 908, 3409, 160, - 1208, 2792, 1394, 3839, 404, 2657, 1041, 2651, 3377, - 3822, 3581, 2353, 3591, 2000, 4401, 4545, 4324, 3328, - 3711, 2291, 2328, 732, 536, 660, 2140, 2401, 566, - 1414, 1235, 4049, 1072, 4129, 3797, 3825, 3260, 1333, - 2653, 3617, 58, 3265, 1036, 4854, 392, 4867, 4701, - 3576, 771, 2398, 4330, 1034, 4559, 2708, 409, 3139, - 2114, 3463, 923, 763, 2766, 4868, 1142, 1459, 3024, - 2321, 1511, 1594, 1553, 98, 954, 4757, 1367, 2284, - 321, 4282, 4827, 311, 3480, 705, 1128, 255, 1664, - 653, 1381, 1987, 2729, 634, 2582, 4911, 3144, 1242, - 3821, 2906, 2900, 547, 195, 264, 1462, 3048, 2738, - 753, 4689, 302, 1125, 2387, 532, 854, 131, 4228, - 2001, 3802, 1432, 364, 2122, 3, 492, 481, 3737, - 3945, 2016, 4040, 4587, 3047, 678, 2781, 1044, 3568, - 1574, 3813, 2876, 1656, 4200, 1707, 1113, 3551, 4496, - 1942, 1480, 4429, 3975, 2412, 3934, 2906, 952, 4773, - 1043, 3314, 572, 4511, 1843, 4636, 1964, 2523, 457, - 3459, 2009, 1681, 9, 2459, 3710, 4173, 1493, 3773, - 2982, 4418, 4646, 1091, 541, 4902, 4735, 4604, 3735, - 3670, 955, 687, 2373, 4360, 1850, 1893]), - values=tensor([0.2345, 0.0289, 0.8583, 0.9123, 0.0874, 0.7501, 0.2033, - 0.8326, 0.8469, 0.1882, 0.3285, 0.3183, 0.8931, 0.0457, - 0.8868, 0.7189, 0.4379, 0.1462, 0.4719, 0.1691, 0.1099, - 0.8022, 0.0756, 0.2871, 0.6213, 0.4582, 0.2170, 0.3357, - 0.7252, 0.0149, 0.2470, 0.4898, 0.0035, 0.1331, 0.4871, - 0.7295, 0.2640, 0.3186, 0.3619, 0.0774, 0.2757, 0.9917, - 0.3749, 0.2825, 0.4846, 0.8782, 0.2242, 0.0584, 0.4269, - 0.3007, 0.5193, 0.9227, 0.9773, 0.6304, 0.0725, 0.4260, - 0.4518, 0.5456, 0.3019, 0.2067, 0.3845, 0.8768, 0.2863, - 0.4471, 0.0208, 0.9135, 0.0548, 0.1836, 0.9804, 0.3038, - 0.5045, 0.8119, 0.2476, 0.4867, 0.9780, 0.3338, 0.2853, - 0.7670, 0.4677, 0.5075, 0.3848, 0.5236, 0.0031, 0.3726, - 0.6233, 0.1936, 0.1739, 0.4139, 0.1871, 0.5920, 0.8457, - 0.8536, 0.8234, 0.3531, 0.8514, 0.1766, 0.5797, 0.3086, - 0.0545, 0.2101, 0.0864, 0.3338, 0.2356, 0.3200, 0.7401, - 0.4108, 0.5013, 0.5320, 0.4414, 0.7825, 0.0249, 0.2494, - 0.0429, 0.7080, 0.9162, 0.6423, 0.2821, 0.2742, 0.5289, - 0.2928, 0.0848, 0.8315, 0.7088, 0.8269, 0.3671, 0.5127, - 0.2282, 0.7407, 0.1379, 0.8288, 0.2763, 0.1471, 0.0918, - 0.7196, 0.6693, 0.6326, 0.9413, 0.1511, 0.6888, 0.3336, - 0.2545, 0.9984, 0.8005, 0.8337, 0.2430, 0.7476, 0.3204, - 0.0554, 0.5080, 0.0854, 0.1850, 0.7747, 0.5775, 0.2057, - 0.7868, 0.8337, 0.6964, 0.9562, 0.1725, 0.3223, 0.4786, - 0.5641, 0.5075, 0.5871, 0.6849, 0.6564, 0.2437, 0.1937, - 0.6389, 0.0952, 0.9817, 0.1000, 0.7393, 0.9387, 0.8443, - 0.9838, 0.1009, 0.7329, 0.9758, 0.9984, 0.0689, 0.6045, - 0.3081, 0.8442, 0.7079, 0.3197, 0.6314, 0.2885, 0.9946, - 0.0894, 0.3380, 0.0723, 0.8864, 0.2114, 0.6387, 0.7774, - 0.5705, 0.9374, 0.3114, 0.6458, 0.5623, 0.1687, 0.3946, - 0.8120, 0.4227, 0.8777, 0.4345, 0.8346, 0.0514, 0.7320, - 0.0137, 0.2630, 0.1970, 0.0196, 0.2035, 0.6052, 0.7403, - 0.6899, 0.2449, 0.2769, 0.3900, 0.8664, 0.9461, 0.5286, - 0.0997, 0.7438, 0.0400, 0.7885, 0.5277, 0.1693, 0.7534, - 0.3649, 0.5259, 0.9420, 0.2968, 0.8974, 0.5468, 0.5308, - 0.9748, 0.7021, 0.7026, 0.1970, 0.7386, 0.9856, 0.8826, - 0.6766, 0.7905, 0.8999, 0.3805, 0.8437]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3535, 752, 4871, 285, 193, 3991, 181, 4103, 4994, + 4819, 778, 2802, 4285, 1915, 3561, 4335, 50, 3054, + 3873, 4370, 4522, 2186, 4742, 3995, 625, 30, 4180, + 3451, 1966, 2287, 4077, 2723, 1679, 2113, 4904, 2129, + 1034, 416, 2370, 4458, 2567, 1199, 1800, 4401, 1438, + 3875, 3659, 4779, 4705, 4866, 3850, 1810, 4124, 1884, + 90, 3943, 3202, 2197, 4269, 3069, 2905, 1163, 120, + 1599, 1021, 1929, 2781, 2917, 191, 173, 3508, 4451, + 867, 4130, 818, 2038, 1807, 60, 2279, 362, 1352, + 2983, 3060, 4061, 3783, 2530, 3122, 1387, 2365, 1264, + 3836, 3665, 551, 1008, 1253, 3694, 2443, 3292, 3911, + 2085, 2516, 1186, 3005, 3743, 302, 4892, 3208, 2118, + 3857, 3023, 441, 4190, 2955, 1688, 4134, 4280, 1065, + 2373, 2513, 2898, 1630, 4739, 4377, 735, 1709, 8, + 4149, 3261, 1198, 4464, 524, 1523, 3524, 4361, 2389, + 3440, 2909, 3425, 3957, 3833, 142, 3466, 4004, 363, + 2584, 1779, 4216, 3099, 4171, 1410, 1560, 4925, 598, + 600, 1688, 2981, 4335, 3575, 3990, 70, 651, 2774, + 2274, 2334, 500, 2649, 3505, 1527, 3170, 2669, 939, + 4369, 3117, 4409, 3921, 2913, 2672, 90, 189, 2416, + 4648, 2158, 4498, 2787, 2777, 427, 1647, 596, 32, + 3758, 3798, 1490, 2780, 536, 2521, 511, 3978, 4192, + 4339, 3544, 936, 2045, 1778, 4789, 1856, 262, 1089, + 642, 2366, 2554, 3581, 71, 2262, 2483, 4988, 827, + 4853, 4171, 1818, 4822, 2101, 1941, 2179, 2348, 1002, + 1769, 4513, 2538, 2796, 827, 3722, 2583, 3562, 2718, + 2215, 2863, 4777, 291, 3837, 1110, 4297, 2738, 986, + 4392, 938, 4212, 3392, 4029, 4006, 3789]), + values=tensor([0.2023, 0.8187, 0.5198, 0.7670, 0.2627, 0.6008, 0.5311, + 0.8539, 0.3635, 0.1994, 0.9519, 0.6280, 0.7439, 0.4916, + 0.4110, 0.6007, 0.0466, 0.2207, 0.4184, 0.4846, 0.2712, + 0.1303, 0.1180, 0.5130, 0.1621, 0.5385, 0.8464, 0.2856, + 0.7650, 0.3522, 0.2748, 0.6971, 0.5615, 0.4695, 0.8702, + 0.8957, 0.1425, 0.4572, 0.8699, 0.9647, 0.1193, 0.9814, + 0.0413, 0.8860, 0.2349, 0.6689, 0.4064, 0.8116, 0.1071, + 0.2195, 0.2890, 0.2579, 0.4438, 0.6245, 0.4398, 0.0869, + 0.9152, 0.4439, 0.6949, 0.2975, 0.8353, 0.1252, 0.8116, + 0.6012, 0.7192, 0.6124, 0.4150, 0.4552, 0.6692, 0.9519, + 0.5595, 0.9511, 0.4391, 0.3332, 0.2767, 0.5331, 0.4974, + 0.9181, 0.1964, 0.0743, 0.7761, 0.8892, 0.1605, 0.8005, + 0.1158, 0.7113, 0.0699, 0.9667, 0.0886, 0.6206, 0.4614, + 0.4981, 0.8561, 0.4178, 0.0614, 0.1711, 0.4511, 0.4612, + 0.6347, 0.7605, 0.6549, 0.8747, 0.7453, 0.5427, 0.0521, + 0.0769, 0.8320, 0.0535, 0.0986, 0.1164, 0.5041, 0.8307, + 0.5913, 0.3421, 0.3672, 0.0839, 0.4439, 0.3274, 0.8462, + 0.0753, 0.5743, 0.4336, 0.2315, 0.7502, 0.6125, 0.2294, + 0.7800, 0.5684, 0.9272, 0.6866, 0.0274, 0.6553, 0.6854, + 0.3119, 0.1113, 0.3305, 0.4940, 0.8530, 0.1185, 0.3421, + 0.1736, 0.3136, 0.5608, 0.3923, 0.7039, 0.8823, 0.9229, + 0.8681, 0.9040, 0.9731, 0.4768, 0.5091, 0.9307, 0.5945, + 0.1103, 0.4819, 0.8047, 0.4694, 0.2887, 0.2838, 0.3189, + 0.2238, 0.9533, 0.9758, 0.4073, 0.2681, 0.6063, 0.4492, + 0.2424, 0.0013, 0.4796, 0.4396, 0.3595, 0.0630, 0.3254, + 0.1162, 0.8933, 0.5119, 0.0665, 0.1748, 0.2380, 0.6503, + 0.4346, 0.4795, 0.2241, 0.5218, 0.9445, 0.0909, 0.0892, + 0.2052, 0.9134, 0.2381, 0.2170, 0.9650, 0.9711, 0.4432, + 0.0629, 0.4315, 0.8955, 0.2044, 0.2668, 0.3191, 0.8287, + 0.2249, 0.5210, 0.6771, 0.7730, 0.2266, 0.3403, 0.4586, + 0.1469, 0.2324, 0.6734, 0.0843, 0.5033, 0.0074, 0.1433, + 0.7814, 0.0379, 0.8469, 0.8898, 0.1937, 0.5462, 0.0482, + 0.2123, 0.5237, 0.1975, 0.8515, 0.2026, 0.2206, 0.7011, + 0.4381, 0.0204, 0.6609, 0.3691, 0.3074, 0.6258, 0.3408, + 0.8391, 0.4305, 0.9402, 0.0381, 0.6451, 0.9800, 0.4677, + 0.3257, 0.3529, 0.6511, 0.2780, 0.8455]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.6631, 0.6256, 0.9086, ..., 0.3830, 0.1647, 0.1472]) +tensor([0.3989, 0.3808, 0.9151, ..., 0.1528, 0.1783, 0.6798]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +317,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.146198511123657 seconds +Time: 10.404626369476318 seconds -[39.27, 38.69, 38.63, 39.39, 38.61, 38.66, 38.59, 38.96, 38.73, 38.93] -[65.05] -12.874769687652588 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569391, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.146198511123657, 'TIME_S_1KI': 0.017819386873209546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 837.5037681818008, 'W': 65.05} -[39.27, 38.69, 38.63, 39.39, 38.61, 38.66, 38.59, 38.96, 38.73, 38.93, 39.57, 38.76, 39.0, 38.65, 39.12, 39.14, 39.15, 44.31, 38.75, 39.36] -705.7049999999999 -35.28525 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569391, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.146198511123657, 'TIME_S_1KI': 0.017819386873209546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 837.5037681818008, 'W': 65.05, 'J_1KI': 1.4708763717406856, 'W_1KI': 0.11424486864035434, 'W_D': 29.76475, 'J_D': 383.2143010605574, 'W_D_1KI': 0.05227471105092985, 'J_D_1KI': 9.180810910416543e-05} +[39.16, 38.54, 38.85, 38.84, 38.69, 38.52, 39.94, 38.95, 39.04, 38.48] +[64.83] +12.675763607025146 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 557047, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.404626369476318, 'TIME_S_1KI': 0.018678184012258063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7697546434403, 'W': 64.83} +[39.16, 38.54, 38.85, 38.84, 38.69, 38.52, 39.94, 38.95, 39.04, 38.48, 40.52, 39.13, 38.73, 38.89, 39.81, 38.78, 39.14, 39.03, 39.08, 39.08] +702.5799999999999 +35.129 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 557047, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.404626369476318, 'TIME_S_1KI': 0.018678184012258063, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 821.7697546434403, 'W': 64.83, 'J_1KI': 1.4752251688698446, 'W_1KI': 0.11638156205849774, 'W_D': 29.701, 'J_D': 376.4828548922539, 'W_D_1KI': 0.0533186607234219, 'J_D_1KI': 9.571662844144552e-05} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json index 1f26acc..a830ef2 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 520646, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.21049165725708, "TIME_S_1KI": 0.01961119773753583, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 853.8924551653862, "W": 65.17, "J_1KI": 1.6400634119255428, "W_1KI": 0.1251714216569416, "W_D": 29.995000000000005, "J_D": 393.0106520283223, "W_D_1KI": 0.05761112156820566, "J_D_1KI": 0.0001106531531370752} +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 519426, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.050918102264404, "TIME_S_1KI": 0.019350048134410686, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 851.1004844999312, "W": 65.07, "J_1KI": 1.638540397477083, "W_1KI": 0.12527289739058112, "W_D": 29.469499999999996, "J_D": 385.4542143533229, "W_D_1KI": 0.056734741811153074, "J_D_1KI": 0.00010922584123850764} diff --git a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output index 65d6ab4..5930d2c 100644 --- a/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_1core/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,32 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.012744903564453125} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1249, 1250]), - col_indices=tensor([1621, 4974, 1997, ..., 3786, 4849, 461]), - values=tensor([0.2109, 0.3256, 0.0266, ..., 0.3581, 0.6264, 0.0778]), - size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8357, 0.0383, 0.1188, ..., 0.4462, 0.9461, 0.1099]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250 -Density: 5e-05 -Time: 0.012744903564453125 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '82385', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6614789962768555} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.013159751892089844} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([1213, 1571, 1960, ..., 2843, 4867, 4843]), - values=tensor([0.3029, 0.3061, 0.3000, ..., 0.6016, 0.9759, 0.4960]), + col_indices=tensor([4644, 3332, 1667, ..., 4027, 3585, 4089]), + values=tensor([0.3040, 0.1880, 0.0650, ..., 0.6030, 0.8433, 0.6282]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.1072, 0.2899, 0.3055, ..., 0.7146, 0.5978, 0.9959]) +tensor([0.9378, 0.4555, 0.5641, ..., 0.1661, 0.3942, 0.0187]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 1.6614789962768555 seconds +Time: 0.013159751892089844 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '520646', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.21049165725708} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '79788', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.612882375717163} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([ 472, 3691, 4268, ..., 1601, 3041, 533]), - values=tensor([0.9317, 0.8516, 0.8376, ..., 0.6191, 0.8435, 0.3776]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([2035, 3979, 625, ..., 1946, 1967, 3787]), + values=tensor([0.7408, 0.7466, 0.0824, ..., 0.9968, 0.3510, 0.5639]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.6520, 0.6755, 0.7512, ..., 0.2262, 0.3599, 0.0025]) +tensor([0.7993, 0.7242, 0.8180, ..., 0.0616, 0.2082, 0.9168]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.21049165725708 seconds +Time: 1.612882375717163 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '519426', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.050918102264404} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([ 472, 3691, 4268, ..., 1601, 3041, 533]), - values=tensor([0.9317, 0.8516, 0.8376, ..., 0.6191, 0.8435, 0.3776]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([3746, 319, 215, ..., 2829, 1777, 1134]), + values=tensor([0.2010, 0.1278, 0.2720, ..., 0.9161, 0.9746, 0.1029]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.6520, 0.6755, 0.7512, ..., 0.2262, 0.3599, 0.0025]) +tensor([0.7359, 0.6956, 0.4535, ..., 0.4670, 0.7765, 0.5592]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +53,29 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.21049165725708 seconds +Time: 10.050918102264404 seconds -[39.48, 39.4, 39.17, 39.09, 40.79, 38.97, 38.82, 38.72, 38.81, 38.71] -[65.17] -13.102538824081421 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 520646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.21049165725708, 'TIME_S_1KI': 0.01961119773753583, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.8924551653862, 'W': 65.17} -[39.48, 39.4, 39.17, 39.09, 40.79, 38.97, 38.82, 38.72, 38.81, 38.71, 39.49, 39.17, 38.98, 38.86, 39.13, 38.88, 38.85, 38.81, 38.77, 38.88] -703.5 -35.175 -{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 520646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.21049165725708, 'TIME_S_1KI': 0.01961119773753583, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.8924551653862, 'W': 65.17, 'J_1KI': 1.6400634119255428, 'W_1KI': 0.1251714216569416, 'W_D': 29.995000000000005, 'J_D': 393.0106520283223, 'W_D_1KI': 0.05761112156820566, 'J_D_1KI': 0.0001106531531370752} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([3746, 319, 215, ..., 2829, 1777, 1134]), + values=tensor([0.2010, 0.1278, 0.2720, ..., 0.9161, 0.9746, 0.1029]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.7359, 0.6956, 0.4535, ..., 0.4670, 0.7765, 0.5592]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.050918102264404 seconds + +[40.13, 39.06, 39.29, 39.18, 38.97, 39.06, 38.88, 38.75, 38.74, 45.05] +[65.07] +13.07976770401001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 519426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.050918102264404, 'TIME_S_1KI': 0.019350048134410686, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 851.1004844999312, 'W': 65.07} +[40.13, 39.06, 39.29, 39.18, 38.97, 39.06, 38.88, 38.75, 38.74, 45.05, 41.25, 38.94, 38.79, 38.73, 39.27, 44.19, 39.53, 38.82, 39.21, 38.77] +712.01 +35.6005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 519426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.050918102264404, 'TIME_S_1KI': 0.019350048134410686, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 851.1004844999312, 'W': 65.07, 'J_1KI': 1.638540397477083, 'W_1KI': 0.12527289739058112, 'W_D': 29.469499999999996, 'J_D': 385.4542143533229, 'W_D_1KI': 0.056734741811153074, 'J_D_1KI': 0.00010922584123850764} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..1415bfa --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 286, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.389951467514038, "TIME_S_1KI": 36.32850163466446, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 766.2996877527237, "W": 52.98, "J_1KI": 2679.369537596936, "W_1KI": 185.24475524475523, "W_D": 35.61999999999999, "J_D": 515.2056413316725, "W_D_1KI": 124.54545454545452, "J_D_1KI": 435.47361729179903} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..261069e --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.6701581478118896} + +tensor(indices=tensor([[ 4090, 6368, 64940, ..., 24872, 43951, 99722], + [43072, 23887, 57626, ..., 2599, 92945, 52721]]), + values=tensor([0.5284, 0.2277, 0.6442, ..., 0.1523, 0.8543, 0.1278]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2916, 0.8737, 0.3685, ..., 0.6723, 0.4085, 0.8827]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 3.6701581478118896 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '286', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.389951467514038} + +tensor(indices=tensor([[98736, 3415, 59338, ..., 83719, 15132, 74509], + [ 7425, 50464, 22332, ..., 74155, 79832, 59244]]), + values=tensor([0.4216, 0.2510, 0.1273, ..., 0.4179, 0.2683, 0.4239]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8702, 0.6240, 0.9511, ..., 0.9442, 0.7623, 0.9941]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.389951467514038 seconds + +tensor(indices=tensor([[98736, 3415, 59338, ..., 83719, 15132, 74509], + [ 7425, 50464, 22332, ..., 74155, 79832, 59244]]), + values=tensor([0.4216, 0.2510, 0.1273, ..., 0.4179, 0.2683, 0.4239]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8702, 0.6240, 0.9511, ..., 0.9442, 0.7623, 0.9941]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.389951467514038 seconds + +[19.01, 18.61, 18.64, 19.12, 18.6, 18.51, 23.01, 19.03, 18.6, 18.91] +[52.98] +14.463942766189575 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 286, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.389951467514038, 'TIME_S_1KI': 36.32850163466446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 766.2996877527237, 'W': 52.98} +[19.01, 18.61, 18.64, 19.12, 18.6, 18.51, 23.01, 19.03, 18.6, 18.91, 18.97, 21.7, 20.23, 18.41, 19.41, 19.64, 18.54, 18.57, 18.71, 18.85] +347.20000000000005 +17.360000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 286, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.389951467514038, 'TIME_S_1KI': 36.32850163466446, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 766.2996877527237, 'W': 52.98, 'J_1KI': 2679.369537596936, 'W_1KI': 185.24475524475523, 'W_D': 35.61999999999999, 'J_D': 515.2056413316725, 'W_D_1KI': 124.54545454545452, 'J_D_1KI': 435.47361729179903} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..3012046 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 41.292503356933594, "TIME_S_1KI": 412.92503356933594, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2580.4754705643654, "W": 53.19, "J_1KI": 25804.754705643652, "W_1KI": 531.9, "W_D": 35.57125, "J_D": 1725.7141959449648, "W_D_1KI": 355.7125, "J_D_1KI": 3557.1249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..bac9654 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 41.292503356933594} + +tensor(indices=tensor([[59394, 19276, 27218, ..., 48112, 53619, 82346], + [ 8217, 46972, 15744, ..., 94107, 44064, 30572]]), + values=tensor([0.2722, 0.6221, 0.9850, ..., 0.8195, 0.0493, 0.9859]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8645, 0.5700, 0.0104, ..., 0.9825, 0.6484, 0.5754]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 41.292503356933594 seconds + +tensor(indices=tensor([[59394, 19276, 27218, ..., 48112, 53619, 82346], + [ 8217, 46972, 15744, ..., 94107, 44064, 30572]]), + values=tensor([0.2722, 0.6221, 0.9850, ..., 0.8195, 0.0493, 0.9859]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8645, 0.5700, 0.0104, ..., 0.9825, 0.6484, 0.5754]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 41.292503356933594 seconds + +[19.11, 18.44, 18.56, 18.48, 19.19, 18.45, 19.21, 18.66, 18.92, 18.41] +[53.19] +48.51429724693298 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 41.292503356933594, 'TIME_S_1KI': 412.92503356933594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2580.4754705643654, 'W': 53.19} +[19.11, 18.44, 18.56, 18.48, 19.19, 18.45, 19.21, 18.66, 18.92, 18.41, 18.92, 18.27, 18.38, 19.08, 18.78, 33.39, 19.45, 18.71, 18.63, 19.11] +352.37499999999994 +17.61875 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 41.292503356933594, 'TIME_S_1KI': 412.92503356933594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2580.4754705643654, 'W': 53.19, 'J_1KI': 25804.754705643652, 'W_1KI': 531.9, 'W_D': 35.57125, 'J_D': 1725.7141959449648, 'W_D_1KI': 355.7125, 'J_D_1KI': 3557.1249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..71646db --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 429.7097659111023, "TIME_S_1KI": 4297.097659111023, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 24245.604853191377, "W": 53.22, "J_1KI": 242456.04853191378, "W_1KI": 532.2, "W_D": 35.86475, "J_D": 16339.018351343393, "W_D_1KI": 358.64750000000004, "J_D_1KI": 3586.4750000000004} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..3b8854f --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 429.7097659111023} + +tensor(indices=tensor([[45550, 27488, 59840, ..., 29384, 1287, 96784], + [15145, 15817, 57726, ..., 96141, 1211, 43102]]), + values=tensor([0.9105, 0.8450, 0.7220, ..., 0.2486, 0.5072, 0.3381]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.9907, 0.5259, 0.8933, ..., 0.9239, 0.0835, 0.7437]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 429.7097659111023 seconds + +tensor(indices=tensor([[45550, 27488, 59840, ..., 29384, 1287, 96784], + [15145, 15817, 57726, ..., 96141, 1211, 43102]]), + values=tensor([0.9105, 0.8450, 0.7220, ..., 0.2486, 0.5072, 0.3381]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.9907, 0.5259, 0.8933, ..., 0.9239, 0.0835, 0.7437]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 429.7097659111023 seconds + +[19.44, 18.62, 21.6, 19.84, 19.23, 18.84, 18.82, 18.53, 18.64, 18.42] +[53.22] +455.5731840133667 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 429.7097659111023, 'TIME_S_1KI': 4297.097659111023, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24245.604853191377, 'W': 53.22} +[19.44, 18.62, 21.6, 19.84, 19.23, 18.84, 18.82, 18.53, 18.64, 18.42, 19.7, 18.84, 18.76, 18.67, 19.01, 18.65, 19.0, 23.03, 19.01, 18.47] +347.105 +17.35525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 429.7097659111023, 'TIME_S_1KI': 4297.097659111023, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24245.604853191377, 'W': 53.22, 'J_1KI': 242456.04853191378, 'W_1KI': 532.2, 'W_D': 35.86475, 'J_D': 16339.018351343393, 'W_D_1KI': 358.64750000000004, 'J_D_1KI': 3586.4750000000004} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..58b91a0 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2724, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.093451261520386, "TIME_S_1KI": 3.7053785835243707, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 734.1387234449387, "W": 52.86000000000001, "J_1KI": 269.50760772574847, "W_1KI": 19.40528634361234, "W_D": 35.55900000000001, "J_D": 493.85620255351085, "W_D_1KI": 13.053964757709256, "J_D_1KI": 4.792204389761108} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..10f259d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3854491710662842} + +tensor(indices=tensor([[55102, 24162, 47477, ..., 1641, 27945, 96455], + [48361, 3693, 59360, ..., 86601, 84390, 91492]]), + values=tensor([0.4770, 0.5969, 0.7353, ..., 0.7880, 0.3261, 0.0262]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.5704, 0.6401, 0.7937, ..., 0.6238, 0.9560, 0.6940]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.3854491710662842 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2724', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.093451261520386} + +tensor(indices=tensor([[17345, 46519, 99801, ..., 65831, 66294, 46607], + [89634, 5145, 88454, ..., 72126, 29671, 71811]]), + values=tensor([0.1219, 0.9233, 0.1476, ..., 0.0275, 0.2989, 0.9160]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8218, 0.2081, 0.2952, ..., 0.6822, 0.0240, 0.6229]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.093451261520386 seconds + +tensor(indices=tensor([[17345, 46519, 99801, ..., 65831, 66294, 46607], + [89634, 5145, 88454, ..., 72126, 29671, 71811]]), + values=tensor([0.1219, 0.9233, 0.1476, ..., 0.0275, 0.2989, 0.9160]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8218, 0.2081, 0.2952, ..., 0.6822, 0.0240, 0.6229]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.093451261520386 seconds + +[18.95, 18.56, 18.88, 18.83, 18.59, 18.44, 22.58, 19.67, 18.47, 19.03] +[52.86] +13.888360261917114 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2724, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.093451261520386, 'TIME_S_1KI': 3.7053785835243707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 734.1387234449387, 'W': 52.86000000000001} +[18.95, 18.56, 18.88, 18.83, 18.59, 18.44, 22.58, 19.67, 18.47, 19.03, 19.23, 18.84, 19.68, 22.0, 18.68, 19.1, 18.78, 18.47, 18.67, 18.35] +346.02 +17.301 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2724, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.093451261520386, 'TIME_S_1KI': 3.7053785835243707, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 734.1387234449387, 'W': 52.86000000000001, 'J_1KI': 269.50760772574847, 'W_1KI': 19.40528634361234, 'W_D': 35.55900000000001, 'J_D': 493.85620255351085, 'W_D_1KI': 13.053964757709256, 'J_D_1KI': 4.792204389761108} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..c15d0a1 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 573, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.49574875831604, "TIME_S_1KI": 18.317188059888377, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 756.856569442749, "W": 52.94, "J_1KI": 1320.8666133381307, "W_1KI": 92.39092495636999, "W_D": 36.09925, "J_D": 516.0928317804336, "W_D_1KI": 63.00043630017452, "J_D_1KI": 109.94840541042673} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..a4ac413 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.8319754600524902} + +tensor(indices=tensor([[47838, 47100, 19233, ..., 41641, 53716, 35598], + [74813, 88822, 78296, ..., 33162, 9646, 86304]]), + values=tensor([0.8634, 0.4276, 0.7260, ..., 0.3848, 0.6948, 0.3358]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.5941, 0.0634, 0.7672, ..., 0.8189, 0.2331, 0.0247]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.8319754600524902 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '573', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.49574875831604} + +tensor(indices=tensor([[38009, 89038, 54268, ..., 32577, 19838, 48612], + [88858, 41395, 35069, ..., 78719, 38684, 64505]]), + values=tensor([0.2791, 0.2343, 0.4897, ..., 0.4301, 0.7790, 0.9437]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.0009, 0.2565, 0.1766, ..., 0.5332, 0.0593, 0.5297]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.49574875831604 seconds + +tensor(indices=tensor([[38009, 89038, 54268, ..., 32577, 19838, 48612], + [88858, 41395, 35069, ..., 78719, 38684, 64505]]), + values=tensor([0.2791, 0.2343, 0.4897, ..., 0.4301, 0.7790, 0.9437]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.0009, 0.2565, 0.1766, ..., 0.5332, 0.0593, 0.5297]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.49574875831604 seconds + +[18.93, 18.52, 18.63, 18.76, 18.87, 18.81, 18.65, 18.6, 18.9, 18.44] +[52.94] +14.296497344970703 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 573, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.49574875831604, 'TIME_S_1KI': 18.317188059888377, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.856569442749, 'W': 52.94} +[18.93, 18.52, 18.63, 18.76, 18.87, 18.81, 18.65, 18.6, 18.9, 18.44, 18.91, 19.29, 18.57, 18.42, 18.72, 18.74, 18.56, 18.39, 18.69, 19.11] +336.815 +16.84075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 573, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.49574875831604, 'TIME_S_1KI': 18.317188059888377, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.856569442749, 'W': 52.94, 'J_1KI': 1320.8666133381307, 'W_1KI': 92.39092495636999, 'W_D': 36.09925, 'J_D': 516.0928317804336, 'W_D_1KI': 63.00043630017452, 'J_D_1KI': 109.94840541042673} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..c6530b5 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 29249, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.530925750732422, "TIME_S_1KI": 0.36004395879286205, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 755.9761116027832, "W": 53.2, "J_1KI": 25.84622078029277, "W_1KI": 1.8188656022428118, "W_D": 36.273, "J_D": 515.442133386612, "W_D_1KI": 1.2401449622209306, "J_D_1KI": 0.0423995679244053} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..e3e1a69 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04764437675476074} + +tensor(indices=tensor([[5814, 9094, 1458, ..., 34, 2110, 8341], + [4237, 6782, 6829, ..., 19, 9116, 3192]]), + values=tensor([0.8820, 0.4230, 0.9421, ..., 0.7423, 0.4962, 0.3514]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.8589, 0.4418, 0.7896, ..., 0.0174, 0.2612, 0.3360]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.04764437675476074 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '22038', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.911324739456177} + +tensor(indices=tensor([[ 793, 1089, 3494, ..., 3665, 5335, 5528], + [9627, 4085, 7614, ..., 9405, 6406, 503]]), + values=tensor([0.6035, 0.8180, 0.5000, ..., 0.5932, 0.4942, 0.2657]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0929, 0.3906, 0.2452, ..., 0.4138, 0.1079, 0.6748]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 7.911324739456177 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '29249', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.530925750732422} + +tensor(indices=tensor([[4141, 1049, 815, ..., 9680, 4230, 8781], + [6981, 2174, 1361, ..., 6746, 198, 3859]]), + values=tensor([0.6143, 0.6570, 0.4229, ..., 0.8375, 0.1628, 0.5245]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.3833, 0.1873, 0.6775, ..., 0.4820, 0.3305, 0.6173]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.530925750732422 seconds + +tensor(indices=tensor([[4141, 1049, 815, ..., 9680, 4230, 8781], + [6981, 2174, 1361, ..., 6746, 198, 3859]]), + values=tensor([0.6143, 0.6570, 0.4229, ..., 0.8375, 0.1628, 0.5245]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.3833, 0.1873, 0.6775, ..., 0.4820, 0.3305, 0.6173]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.530925750732422 seconds + +[19.12, 18.53, 18.9, 18.6, 18.74, 18.54, 18.67, 18.81, 18.87, 18.5] +[53.2] +14.210077285766602 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 29249, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.530925750732422, 'TIME_S_1KI': 0.36004395879286205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.9761116027832, 'W': 53.2} +[19.12, 18.53, 18.9, 18.6, 18.74, 18.54, 18.67, 18.81, 18.87, 18.5, 19.21, 18.83, 18.69, 18.46, 19.16, 18.49, 19.93, 18.53, 18.93, 18.89] +338.54 +16.927 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 29249, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.530925750732422, 'TIME_S_1KI': 0.36004395879286205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.9761116027832, 'W': 53.2, 'J_1KI': 25.84622078029277, 'W_1KI': 1.8188656022428118, 'W_D': 36.273, 'J_D': 515.442133386612, 'W_D_1KI': 1.2401449622209306, 'J_D_1KI': 0.0423995679244053} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..96c3f6b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2888, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.225720405578613, "TIME_S_1KI": 3.5407619132889936, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 737.7439665412903, "W": 52.92, "J_1KI": 255.45151196028058, "W_1KI": 18.32409972299169, "W_D": 35.52125, "J_D": 495.19251457870007, "W_D_1KI": 12.299601800554017, "J_D_1KI": 4.25886488938851} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..a6776c2 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.36356496810913086} + +tensor(indices=tensor([[8070, 8899, 3966, ..., 9525, 8845, 9724], + [2585, 1906, 1004, ..., 3283, 8088, 896]]), + values=tensor([0.5255, 0.9821, 0.8733, ..., 0.0144, 0.2245, 0.8317]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9872, 0.0901, 0.6971, ..., 0.5153, 0.7017, 0.0912]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.36356496810913086 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2888', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.225720405578613} + +tensor(indices=tensor([[7853, 7408, 8535, ..., 3232, 3169, 9803], + [ 674, 9704, 9334, ..., 372, 9472, 1043]]), + values=tensor([0.6076, 0.3484, 0.8180, ..., 0.3971, 0.8683, 0.7597]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1961, 0.9816, 0.3285, ..., 0.8819, 0.7602, 0.9682]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.225720405578613 seconds + +tensor(indices=tensor([[7853, 7408, 8535, ..., 3232, 3169, 9803], + [ 674, 9704, 9334, ..., 372, 9472, 1043]]), + values=tensor([0.6076, 0.3484, 0.8180, ..., 0.3971, 0.8683, 0.7597]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1961, 0.9816, 0.3285, ..., 0.8819, 0.7602, 0.9682]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.225720405578613 seconds + +[18.95, 18.61, 19.18, 18.34, 18.72, 22.89, 18.86, 18.45, 19.14, 18.51] +[52.92] +13.94074010848999 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2888, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.225720405578613, 'TIME_S_1KI': 3.5407619132889936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 737.7439665412903, 'W': 52.92} +[18.95, 18.61, 19.18, 18.34, 18.72, 22.89, 18.86, 18.45, 19.14, 18.51, 19.02, 23.18, 19.2, 19.85, 19.69, 18.41, 18.62, 18.63, 18.76, 18.41] +347.975 +17.39875 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2888, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.225720405578613, 'TIME_S_1KI': 3.5407619132889936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 737.7439665412903, 'W': 52.92, 'J_1KI': 255.45151196028058, 'W_1KI': 18.32409972299169, 'W_D': 35.52125, 'J_D': 495.19251457870007, 'W_D_1KI': 12.299601800554017, 'J_D_1KI': 4.25886488938851} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..8c291a4 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 298, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.47829008102417, "TIME_S_1KI": 35.16204725175896, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 767.5406039357185, "W": 53.01, "J_1KI": 2575.639610522546, "W_1KI": 177.88590604026842, "W_D": 36.254999999999995, "J_D": 524.9421730935572, "W_D_1KI": 121.66107382550334, "J_D_1KI": 408.2586369983334} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..e784027 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.5194344520568848} + +tensor(indices=tensor([[3654, 6452, 587, ..., 850, 5136, 577], + [6053, 7007, 5699, ..., 4834, 9822, 1975]]), + values=tensor([0.0110, 0.0233, 0.3725, ..., 0.9397, 0.6782, 0.1946]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4410, 0.5527, 0.9521, ..., 0.4811, 0.5857, 0.0363]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 3.5194344520568848 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '298', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.47829008102417} + +tensor(indices=tensor([[5487, 6843, 6460, ..., 6149, 3051, 4869], + [4765, 2140, 6995, ..., 6813, 1753, 4926]]), + values=tensor([0.3736, 0.6438, 0.8428, ..., 0.4015, 0.6317, 0.4357]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8014, 0.7170, 0.4660, ..., 0.2951, 0.6544, 0.0082]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.47829008102417 seconds + +tensor(indices=tensor([[5487, 6843, 6460, ..., 6149, 3051, 4869], + [4765, 2140, 6995, ..., 6813, 1753, 4926]]), + values=tensor([0.3736, 0.6438, 0.8428, ..., 0.4015, 0.6317, 0.4357]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.8014, 0.7170, 0.4660, ..., 0.2951, 0.6544, 0.0082]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.47829008102417 seconds + +[19.05, 18.74, 18.99, 18.45, 18.52, 18.5, 18.89, 18.49, 18.48, 18.44] +[53.01] +14.479166269302368 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 298, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.47829008102417, 'TIME_S_1KI': 35.16204725175896, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 767.5406039357185, 'W': 53.01} +[19.05, 18.74, 18.99, 18.45, 18.52, 18.5, 18.89, 18.49, 18.48, 18.44, 18.98, 18.44, 18.68, 18.54, 18.55, 18.63, 18.48, 18.76, 18.46, 18.53] +335.1 +16.755000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 298, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.47829008102417, 'TIME_S_1KI': 35.16204725175896, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 767.5406039357185, 'W': 53.01, 'J_1KI': 2575.639610522546, 'W_1KI': 177.88590604026842, 'W_D': 36.254999999999995, 'J_D': 524.9421730935572, 'W_D_1KI': 121.66107382550334, 'J_D_1KI': 408.2586369983334} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..dcff7e0 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.49826955795288, "TIME_S_1KI": 174.9826955795288, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1223.0167962312698, "W": 53.25, "J_1KI": 12230.167962312698, "W_1KI": 532.5, "W_D": 36.153, "J_D": 830.3422766976356, "W_D_1KI": 361.53, "J_D_1KI": 3615.2999999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..fbdfcf6 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.49826955795288} + +tensor(indices=tensor([[7796, 2939, 8332, ..., 3525, 3120, 8313], + [4658, 5724, 1205, ..., 696, 1199, 2082]]), + values=tensor([0.1558, 0.1516, 0.9095, ..., 0.9314, 0.5341, 0.6592]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9120, 0.7513, 0.5896, ..., 0.2518, 0.7399, 0.6609]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.49826955795288 seconds + +tensor(indices=tensor([[7796, 2939, 8332, ..., 3525, 3120, 8313], + [4658, 5724, 1205, ..., 696, 1199, 2082]]), + values=tensor([0.1558, 0.1516, 0.9095, ..., 0.9314, 0.5341, 0.6592]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9120, 0.7513, 0.5896, ..., 0.2518, 0.7399, 0.6609]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.49826955795288 seconds + +[19.16, 22.16, 19.28, 18.41, 21.32, 18.71, 18.54, 18.66, 18.95, 18.36] +[53.25] +22.967451572418213 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.49826955795288, 'TIME_S_1KI': 174.9826955795288, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.0167962312698, 'W': 53.25} +[19.16, 22.16, 19.28, 18.41, 21.32, 18.71, 18.54, 18.66, 18.95, 18.36, 19.08, 18.74, 18.4, 18.5, 18.64, 18.51, 18.69, 18.28, 18.44, 18.82] +341.94000000000005 +17.097 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.49826955795288, 'TIME_S_1KI': 174.9826955795288, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.0167962312698, 'W': 53.25, 'J_1KI': 12230.167962312698, 'W_1KI': 532.5, 'W_D': 36.153, 'J_D': 830.3422766976356, 'W_D_1KI': 361.53, 'J_D_1KI': 3615.2999999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..e5cb697 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.12812423706055, "TIME_S_1KI": 351.28124237060547, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2250.224948012829, "W": 53.39, "J_1KI": 22502.24948012829, "W_1KI": 533.9000000000001, "W_D": 35.64075, "J_D": 1502.148432588279, "W_D_1KI": 356.40749999999997, "J_D_1KI": 3564.075} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..7a72e0d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.12812423706055} + +tensor(indices=tensor([[6698, 9307, 6437, ..., 2289, 5848, 3791], + [7340, 1765, 7872, ..., 2927, 307, 9302]]), + values=tensor([0.0196, 0.8715, 0.3597, ..., 0.8985, 0.2718, 0.2751]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8883, 0.3132, 0.4078, ..., 0.3205, 0.4829, 0.4579]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.12812423706055 seconds + +tensor(indices=tensor([[6698, 9307, 6437, ..., 2289, 5848, 3791], + [7340, 1765, 7872, ..., 2927, 307, 9302]]), + values=tensor([0.0196, 0.8715, 0.3597, ..., 0.8985, 0.2718, 0.2751]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8883, 0.3132, 0.4078, ..., 0.3205, 0.4829, 0.4579]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.12812423706055 seconds + +[18.88, 18.58, 18.83, 18.87, 18.47, 18.3, 18.52, 18.85, 19.26, 18.67] +[53.39] +42.146936655044556 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.12812423706055, 'TIME_S_1KI': 351.28124237060547, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2250.224948012829, 'W': 53.39} +[18.88, 18.58, 18.83, 18.87, 18.47, 18.3, 18.52, 18.85, 19.26, 18.67, 19.39, 18.78, 18.95, 18.43, 18.71, 18.8, 18.53, 36.48, 18.79, 18.73] +354.985 +17.74925 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.12812423706055, 'TIME_S_1KI': 351.28124237060547, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2250.224948012829, 'W': 53.39, 'J_1KI': 22502.24948012829, 'W_1KI': 533.9000000000001, 'W_D': 35.64075, 'J_D': 1502.148432588279, 'W_D_1KI': 356.40749999999997, 'J_D_1KI': 3564.075} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..2762ec0 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.18838477134705, "TIME_S_1KI": 701.8838477134705, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4299.211010894775, "W": 53.36, "J_1KI": 42992.11010894775, "W_1KI": 533.5999999999999, "W_D": 36.20225, "J_D": 2916.812440389156, "W_D_1KI": 362.0225, "J_D_1KI": 3620.2249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..8885133 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.18838477134705} + +tensor(indices=tensor([[5797, 5482, 210, ..., 7841, 5780, 4358], + [4414, 5324, 9857, ..., 6704, 5929, 3437]]), + values=tensor([0.9887, 0.5554, 0.6963, ..., 0.8302, 0.2539, 0.4394]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.2946, 0.8628, 0.5214, ..., 0.5536, 0.8504, 0.0648]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.18838477134705 seconds + +tensor(indices=tensor([[5797, 5482, 210, ..., 7841, 5780, 4358], + [4414, 5324, 9857, ..., 6704, 5929, 3437]]), + values=tensor([0.9887, 0.5554, 0.6963, ..., 0.8302, 0.2539, 0.4394]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.2946, 0.8628, 0.5214, ..., 0.5536, 0.8504, 0.0648]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.18838477134705 seconds + +[19.87, 18.93, 18.53, 22.08, 20.02, 18.5, 19.13, 18.47, 18.66, 18.52] +[53.36] +80.56992149353027 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.18838477134705, 'TIME_S_1KI': 701.8838477134705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4299.211010894775, 'W': 53.36} +[19.87, 18.93, 18.53, 22.08, 20.02, 18.5, 19.13, 18.47, 18.66, 18.52, 19.48, 19.12, 18.72, 18.59, 19.13, 18.59, 18.66, 18.44, 19.41, 18.48] +343.155 +17.15775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.18838477134705, 'TIME_S_1KI': 701.8838477134705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4299.211010894775, 'W': 53.36, 'J_1KI': 42992.11010894775, 'W_1KI': 533.5999999999999, 'W_D': 36.20225, 'J_D': 2916.812440389156, 'W_D_1KI': 362.0225, 'J_D_1KI': 3620.2249999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..1a46f1f --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.28500413894653, "TIME_S_1KI": 1052.8500413894653, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6421.246587808132, "W": 54.01, "J_1KI": 64212.46587808132, "W_1KI": 540.1, "W_D": 36.906499999999994, "J_D": 4387.812205016493, "W_D_1KI": 369.06499999999994, "J_D_1KI": 3690.649999999999} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..79b7be4 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.28500413894653} + +tensor(indices=tensor([[1309, 2263, 4256, ..., 6284, 7791, 3791], + [9140, 8066, 1918, ..., 8537, 9814, 4363]]), + values=tensor([0.8932, 0.2978, 0.2898, ..., 0.4756, 0.9985, 0.1813]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.3467, 0.3617, 0.9528, ..., 0.4984, 0.6391, 0.6014]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.28500413894653 seconds + +tensor(indices=tensor([[1309, 2263, 4256, ..., 6284, 7791, 3791], + [9140, 8066, 1918, ..., 8537, 9814, 4363]]), + values=tensor([0.8932, 0.2978, 0.2898, ..., 0.4756, 0.9985, 0.1813]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.3467, 0.3617, 0.9528, ..., 0.4984, 0.6391, 0.6014]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.28500413894653 seconds + +[19.03, 19.03, 19.02, 18.46, 22.55, 19.01, 18.57, 18.99, 18.68, 18.68] +[54.01] +118.88995718955994 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.28500413894653, 'TIME_S_1KI': 1052.8500413894653, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6421.246587808132, 'W': 54.01} +[19.03, 19.03, 19.02, 18.46, 22.55, 19.01, 18.57, 18.99, 18.68, 18.68, 19.05, 18.42, 18.65, 18.72, 18.87, 18.36, 18.99, 18.42, 19.7, 18.5] +342.07000000000005 +17.103500000000004 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.28500413894653, 'TIME_S_1KI': 1052.8500413894653, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6421.246587808132, 'W': 54.01, 'J_1KI': 64212.46587808132, 'W_1KI': 540.1, 'W_D': 36.906499999999994, 'J_D': 4387.812205016493, 'W_D_1KI': 369.06499999999994, 'J_D_1KI': 3690.649999999999} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..baaf72b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.32227563858032, "TIME_S_1KI": 1403.2227563858032, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8384.47321164131, "W": 53.53999999999999, "J_1KI": 83844.73211641311, "W_1KI": 535.3999999999999, "W_D": 36.693749999999994, "J_D": 5746.316098424792, "W_D_1KI": 366.93749999999994, "J_D_1KI": 3669.3749999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..6cec52c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.32227563858032} + +tensor(indices=tensor([[ 595, 7978, 3279, ..., 8901, 5345, 7774], + [8014, 1927, 8978, ..., 1113, 9015, 414]]), + values=tensor([0.0613, 0.8201, 0.3391, ..., 0.1226, 0.6645, 0.0568]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.7600, 0.1703, 0.4154, ..., 0.4075, 0.3991, 0.5788]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.32227563858032 seconds + +tensor(indices=tensor([[ 595, 7978, 3279, ..., 8901, 5345, 7774], + [8014, 1927, 8978, ..., 1113, 9015, 414]]), + values=tensor([0.0613, 0.8201, 0.3391, ..., 0.1226, 0.6645, 0.0568]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.7600, 0.1703, 0.4154, ..., 0.4075, 0.3991, 0.5788]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.32227563858032 seconds + +[19.35, 18.66, 18.96, 18.79, 18.6, 18.61, 18.86, 18.6, 18.53, 18.56] +[53.54] +156.60203981399536 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.32227563858032, 'TIME_S_1KI': 1403.2227563858032, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8384.47321164131, 'W': 53.53999999999999} +[19.35, 18.66, 18.96, 18.79, 18.6, 18.61, 18.86, 18.6, 18.53, 18.56, 19.03, 18.72, 18.65, 18.62, 18.75, 18.94, 18.53, 18.38, 18.78, 18.95] +336.925 +16.84625 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.32227563858032, 'TIME_S_1KI': 1403.2227563858032, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8384.47321164131, 'W': 53.53999999999999, 'J_1KI': 83844.73211641311, 'W_1KI': 535.3999999999999, 'W_D': 36.693749999999994, 'J_D': 5746.316098424792, 'W_D_1KI': 366.93749999999994, 'J_D_1KI': 3669.3749999999995} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..01a8a26 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.3649799823761, "TIME_S_1KI": 1753.649799823761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10459.217702465057, "W": 53.56, "J_1KI": 104592.17702465056, "W_1KI": 535.6, "W_D": 36.71175, "J_D": 7169.084867223144, "W_D_1KI": 367.11750000000006, "J_D_1KI": 3671.1750000000006} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..78e8d09 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.3649799823761} + +tensor(indices=tensor([[3328, 5614, 1786, ..., 2116, 9994, 4657], + [7159, 9986, 7146, ..., 7832, 6698, 5324]]), + values=tensor([0.6988, 0.5829, 0.8912, ..., 0.6255, 0.2496, 0.7717]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8806, 0.8140, 0.3380, ..., 0.3737, 0.7757, 0.9081]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.3649799823761 seconds + +tensor(indices=tensor([[3328, 5614, 1786, ..., 2116, 9994, 4657], + [7159, 9986, 7146, ..., 7832, 6698, 5324]]), + values=tensor([0.6988, 0.5829, 0.8912, ..., 0.6255, 0.2496, 0.7717]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8806, 0.8140, 0.3380, ..., 0.3737, 0.7757, 0.9081]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.3649799823761 seconds + +[19.06, 18.6, 18.88, 18.62, 18.56, 18.63, 18.78, 18.49, 18.85, 18.59] +[53.56] +195.28039026260376 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.3649799823761, 'TIME_S_1KI': 1753.649799823761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10459.217702465057, 'W': 53.56} +[19.06, 18.6, 18.88, 18.62, 18.56, 18.63, 18.78, 18.49, 18.85, 18.59, 18.99, 18.32, 18.57, 18.71, 18.48, 19.21, 19.14, 18.59, 18.98, 18.47] +336.965 +16.84825 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.3649799823761, 'TIME_S_1KI': 1753.649799823761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10459.217702465057, 'W': 53.56, 'J_1KI': 104592.17702465056, 'W_1KI': 535.6, 'W_D': 36.71175, 'J_D': 7169.084867223144, 'W_D_1KI': 367.11750000000006, 'J_D_1KI': 3671.1750000000006} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..9d3f07b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 243279, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.448915243148804, "TIME_S_1KI": 0.04295033785550255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 749.9146813058853, "W": 52.97, "J_1KI": 3.082529446873283, "W_1KI": 0.21773354872389314, "W_D": 35.7745, "J_D": 506.47201748871805, "W_D_1KI": 0.14705132789924327, "J_D_1KI": 0.0006044554930727407} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..25fe7f1 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1069 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.016295909881591797} + +tensor(indices=tensor([[1893, 5744, 7259, ..., 1543, 2844, 6725], + [1973, 1738, 2637, ..., 444, 6562, 2862]]), + values=tensor([7.6366e-01, 7.2006e-01, 8.9890e-01, 4.0741e-01, + 3.0453e-01, 5.9903e-01, 3.9418e-01, 6.4119e-02, + 4.4724e-01, 9.5756e-01, 9.5629e-01, 7.0174e-01, + 9.3965e-01, 6.9448e-01, 4.6276e-01, 7.4121e-01, + 2.5921e-01, 5.5226e-01, 3.7141e-01, 2.3353e-01, + 1.5098e-03, 8.5306e-01, 5.0953e-01, 2.1640e-01, + 8.9284e-02, 8.7097e-01, 6.4556e-01, 6.5166e-01, + 6.1651e-01, 5.5198e-01, 9.5599e-01, 2.9964e-01, + 6.1153e-01, 2.1368e-01, 2.3893e-01, 2.3075e-02, + 6.4202e-01, 9.3380e-01, 4.0609e-01, 5.8107e-01, + 4.5730e-01, 7.3697e-02, 2.5735e-01, 3.7286e-01, + 5.7404e-01, 9.7056e-01, 2.0962e-01, 5.8451e-01, + 7.1054e-01, 1.4715e-01, 1.6931e-01, 6.4809e-01, + 6.5025e-01, 3.9466e-03, 7.9839e-01, 8.9756e-02, + 8.6532e-01, 7.8706e-01, 3.2276e-01, 5.6572e-01, + 3.0348e-02, 7.8497e-01, 6.3816e-01, 1.8693e-01, + 5.8240e-01, 4.3859e-01, 5.7997e-01, 7.9517e-01, + 7.1819e-01, 1.2368e-01, 6.1617e-01, 8.2170e-01, + 4.1260e-01, 7.6475e-01, 4.6024e-01, 6.7793e-01, + 8.1448e-01, 5.7855e-01, 9.0943e-01, 4.1022e-01, + 5.7035e-01, 3.0408e-01, 1.7708e-01, 9.3292e-01, + 7.2378e-01, 2.0210e-01, 8.1006e-01, 1.7445e-01, + 9.8759e-01, 8.3233e-01, 1.4123e-01, 2.8335e-01, + 8.4725e-01, 5.7493e-01, 4.5647e-01, 7.4835e-01, + 2.0901e-01, 3.1534e-01, 5.5372e-01, 9.7570e-01, + 9.8749e-02, 4.0364e-01, 4.6708e-01, 8.9380e-02, + 2.1678e-01, 6.3934e-01, 6.8350e-01, 8.3258e-02, + 2.4446e-01, 5.9284e-01, 5.8256e-01, 6.2868e-01, + 2.6028e-01, 1.0085e-01, 7.0392e-01, 7.3295e-01, + 6.9946e-02, 5.5962e-01, 5.8387e-03, 1.0716e-01, + 2.4736e-01, 6.4700e-02, 4.1315e-01, 3.4965e-01, + 1.5845e-01, 4.0573e-01, 3.8275e-01, 6.6676e-01, + 6.9798e-01, 8.0212e-01, 8.5004e-01, 4.5068e-01, + 6.3631e-01, 5.9295e-01, 1.7129e-01, 8.8242e-01, + 5.1059e-01, 7.0758e-01, 8.2971e-01, 9.5626e-01, + 8.3860e-01, 2.6516e-01, 4.7338e-02, 2.7864e-01, + 9.5604e-01, 7.6373e-01, 8.4050e-01, 7.6968e-01, + 3.9068e-01, 9.0302e-01, 2.9489e-01, 5.4188e-01, + 1.2723e-01, 5.2253e-02, 5.2618e-01, 6.3010e-01, + 8.7796e-02, 2.1447e-01, 3.7767e-01, 4.4474e-02, + 2.1553e-01, 6.9158e-01, 3.9182e-01, 9.7340e-01, + 6.8468e-01, 8.3068e-01, 4.5750e-01, 1.7330e-02, + 1.1742e-01, 9.1369e-01, 4.4347e-01, 7.8846e-01, + 3.7680e-01, 9.3853e-02, 1.7117e-01, 6.1448e-02, + 3.5999e-02, 9.1247e-01, 7.7892e-01, 1.9347e-01, + 2.4058e-01, 3.8535e-01, 2.7223e-01, 7.7910e-01, + 3.6639e-02, 9.5754e-01, 5.8294e-01, 1.5266e-01, + 6.4858e-01, 1.3558e-01, 6.7876e-01, 8.6038e-02, + 3.7387e-01, 9.9351e-01, 6.5162e-01, 9.2518e-01, + 5.2885e-03, 8.5567e-01, 1.5817e-01, 6.8405e-01, + 6.3390e-01, 1.5217e-01, 7.5291e-01, 9.8893e-01, + 1.8018e-01, 7.3826e-01, 2.0358e-01, 6.5376e-01, + 4.8354e-01, 7.4526e-02, 5.4525e-01, 2.8526e-01, + 9.6809e-01, 3.3403e-01, 6.5543e-01, 9.7910e-01, + 7.2733e-01, 4.7372e-01, 3.4346e-01, 7.8242e-01, + 3.6193e-01, 7.0558e-01, 1.2773e-01, 1.0945e-01, + 5.7803e-01, 9.0997e-01, 1.7286e-01, 6.6642e-01, + 4.8877e-01, 2.3764e-01, 8.5922e-02, 2.5488e-02, + 1.1041e-01, 5.6699e-01, 8.8500e-01, 6.5809e-01, + 7.3786e-01, 7.4005e-01, 2.6103e-01, 8.5701e-01, + 6.9005e-01, 7.1121e-01, 6.4793e-01, 9.1155e-01, + 4.0921e-01, 2.1258e-01, 8.9770e-02, 6.2828e-01, + 5.9456e-01, 1.7843e-01, 1.8065e-01, 7.7342e-01, + 1.3986e-01, 6.5798e-01, 6.8403e-02, 4.7516e-02, + 9.0463e-01, 2.1433e-01, 9.4433e-01, 3.3385e-01, + 5.4546e-01, 5.3822e-01, 1.3718e-01, 9.5318e-01, + 3.5832e-01, 2.3610e-01, 2.8527e-01, 1.5799e-01, + 2.4920e-01, 3.5272e-01, 7.3833e-01, 3.8810e-01, + 8.6568e-01, 3.8985e-01, 3.7966e-01, 5.4301e-02, + 2.3256e-01, 5.7437e-01, 9.7183e-01, 9.1034e-01, + 9.0925e-01, 9.2809e-02, 7.4210e-01, 2.3298e-01, + 2.0171e-01, 2.4545e-02, 1.4218e-01, 1.9548e-01, + 4.4248e-01, 8.5278e-01, 4.8797e-01, 2.9280e-01, + 6.7650e-01, 9.2949e-01, 2.9516e-01, 6.3883e-01, + 4.6627e-02, 7.2870e-01, 3.9185e-01, 5.7767e-01, + 5.8581e-01, 6.6386e-01, 8.6209e-01, 8.8296e-01, + 9.7099e-01, 6.2353e-01, 8.7653e-01, 3.9346e-01, + 8.3622e-01, 1.4166e-01, 1.1092e-01, 6.4694e-01, + 5.6085e-01, 7.9158e-01, 1.0037e-01, 7.9925e-01, + 4.8466e-01, 9.2448e-01, 9.4755e-02, 6.0216e-01, + 2.5057e-01, 7.1518e-01, 7.7508e-01, 6.1752e-01, + 7.7380e-01, 4.5195e-01, 6.4422e-01, 1.4590e-01, + 9.5685e-01, 1.2279e-01, 8.5831e-01, 5.0904e-01, + 4.2707e-01, 5.4917e-01, 1.0405e-01, 2.5070e-01, + 6.3469e-01, 7.7563e-01, 9.2705e-01, 7.6467e-01, + 4.8995e-02, 4.2823e-01, 7.1263e-01, 1.0067e-01, + 8.7600e-01, 8.1979e-01, 7.7398e-01, 9.8253e-01, + 9.1596e-01, 4.7317e-01, 3.2293e-01, 5.2503e-01, + 4.4777e-01, 1.9734e-01, 3.2926e-01, 1.4615e-01, + 4.6559e-01, 1.7641e-01, 5.6372e-01, 3.5662e-01, + 7.3841e-02, 7.1475e-01, 7.6684e-01, 8.8574e-01, + 8.5861e-01, 8.6478e-01, 2.1818e-01, 8.2734e-02, + 8.6217e-01, 9.9331e-01, 4.9574e-01, 9.6409e-02, + 2.7451e-01, 6.6938e-01, 6.2741e-01, 3.8885e-01, + 4.2114e-02, 4.5732e-01, 4.7238e-01, 1.4805e-01, + 7.8927e-01, 2.3341e-01, 6.6210e-02, 2.0739e-01, + 1.0606e-01, 9.2433e-01, 6.6017e-01, 5.8480e-01, + 4.1005e-01, 4.8688e-01, 5.0360e-01, 5.6680e-01, + 2.4206e-01, 8.5313e-01, 6.9481e-01, 7.4892e-01, + 5.9624e-01, 4.5256e-01, 7.7041e-01, 9.5807e-01, + 3.2582e-01, 2.3720e-02, 8.5779e-01, 7.0687e-01, + 5.7982e-02, 2.4589e-01, 3.8218e-02, 9.1826e-01, + 6.9686e-01, 4.1478e-01, 7.3310e-02, 5.9644e-03, + 7.9470e-01, 5.4605e-01, 1.9208e-01, 2.2807e-01, + 9.1400e-02, 3.5782e-01, 2.2914e-02, 9.7713e-01, + 6.9538e-01, 3.9570e-01, 4.4355e-01, 1.9055e-01, + 2.3989e-02, 5.0368e-01, 7.5055e-01, 6.4749e-02, + 6.0824e-01, 2.7956e-01, 4.6555e-01, 3.1494e-01, + 1.0202e-01, 1.9190e-01, 1.5056e-01, 2.7226e-01, + 2.6752e-01, 4.2763e-02, 9.3570e-01, 8.4005e-01, + 5.9089e-01, 4.6283e-01, 2.6943e-01, 8.7203e-01, + 3.3211e-01, 5.0462e-01, 7.4323e-02, 9.9060e-01, + 1.7384e-01, 4.4163e-01, 3.5416e-01, 2.7933e-01, + 3.5624e-01, 5.2136e-01, 7.4303e-01, 1.0321e-01, + 8.3722e-02, 9.7777e-03, 6.3904e-01, 8.4914e-01, + 1.1125e-01, 1.4356e-02, 4.7351e-01, 4.8305e-01, + 9.3748e-01, 9.2963e-01, 6.9261e-01, 4.2930e-01, + 6.7162e-01, 9.2559e-02, 9.8601e-01, 2.8868e-01, + 5.9027e-02, 8.1277e-01, 9.9709e-01, 3.5236e-01, + 8.4706e-01, 9.4544e-01, 4.5877e-01, 3.4742e-02, + 4.3278e-01, 4.2268e-01, 9.6423e-01, 1.7305e-01, + 4.6527e-01, 7.5189e-01, 4.7314e-01, 8.5211e-01, + 4.8757e-01, 7.8064e-02, 2.0846e-01, 9.3233e-01, + 4.0999e-01, 8.0056e-01, 1.8961e-01, 5.3700e-01, + 5.4484e-01, 3.1488e-01, 2.9899e-01, 7.3613e-01, + 1.0159e-01, 8.7665e-01, 9.8671e-01, 6.6826e-01, + 7.8693e-01, 5.8338e-01, 5.1336e-01, 5.3445e-02, + 9.7973e-01, 2.1508e-03, 6.9664e-01, 2.1389e-01, + 4.5271e-01, 5.5558e-01, 1.2878e-01, 1.9234e-01, + 3.6765e-01, 3.7563e-01, 6.5337e-01, 2.7721e-01, + 8.7914e-01, 7.3771e-01, 5.9330e-01, 1.8680e-01, + 8.5425e-01, 3.1298e-01, 7.9992e-01, 2.9561e-01, + 6.4342e-01, 4.5067e-01, 1.0201e-01, 7.2405e-01, + 7.0561e-01, 5.6563e-01, 6.4630e-01, 5.0692e-01, + 6.9870e-01, 5.4229e-01, 1.5696e-01, 7.1609e-01, + 8.4390e-01, 8.8185e-01, 2.0419e-01, 9.5263e-01, + 4.7516e-01, 7.0588e-01, 3.7163e-02, 7.3811e-01, + 1.4374e-01, 9.6573e-01, 6.2968e-01, 6.0249e-02, + 5.7367e-01, 8.5406e-01, 9.4714e-01, 8.5160e-01, + 2.9443e-01, 6.2594e-01, 8.8318e-02, 3.7863e-01, + 5.4702e-01, 4.2321e-01, 6.8268e-02, 2.5493e-01, + 6.0083e-01, 5.7661e-01, 1.9728e-01, 1.4552e-01, + 2.0000e-01, 3.7679e-01, 2.8452e-01, 2.9176e-01, + 8.0461e-01, 7.9856e-01, 5.5510e-01, 3.0314e-01, + 2.0159e-01, 7.4098e-01, 5.9412e-01, 9.3161e-01, + 8.2200e-01, 6.4749e-01, 1.7452e-03, 5.5322e-01, + 9.9396e-01, 3.3180e-02, 8.8972e-01, 4.7164e-01, + 4.4212e-01, 6.6624e-01, 1.3227e-02, 8.6396e-01, + 5.9999e-01, 3.2996e-01, 9.6019e-01, 9.2524e-01, + 5.3346e-01, 7.0700e-01, 4.8550e-01, 7.2704e-02, + 7.1336e-01, 9.1157e-01, 9.2791e-01, 3.3463e-01, + 5.3329e-01, 8.5214e-01, 1.0265e-01, 1.5772e-01, + 6.4894e-01, 4.0446e-01, 4.0757e-01, 5.6796e-01, + 8.2827e-01, 7.2768e-01, 9.9021e-01, 8.6795e-01, + 3.9817e-01, 2.7334e-03, 6.3092e-01, 4.4055e-01, + 2.2546e-01, 5.3439e-01, 2.0714e-01, 6.5407e-01, + 5.1324e-01, 9.5817e-01, 7.4359e-01, 5.1248e-01, + 5.9603e-01, 9.1313e-01, 6.2888e-01, 8.3222e-01, + 1.0337e-01, 3.0029e-01, 8.3608e-01, 4.7075e-01, + 8.8966e-01, 1.6458e-01, 1.7984e-01, 2.8147e-01, + 2.1862e-01, 4.9554e-01, 2.5998e-01, 1.6570e-01, + 1.0133e-02, 2.0548e-01, 5.9109e-01, 1.2774e-01, + 3.7972e-01, 4.2843e-01, 9.3431e-01, 8.9402e-02, + 5.1128e-01, 5.5896e-01, 8.0714e-01, 9.7239e-01, + 5.8669e-01, 2.2639e-01, 7.7397e-01, 9.3650e-01, + 5.2573e-02, 8.8668e-01, 9.2035e-01, 8.2559e-02, + 5.9971e-01, 1.2359e-01, 6.3442e-01, 6.9054e-01, + 3.2237e-01, 5.8141e-01, 1.0384e-01, 1.1923e-01, + 9.5930e-01, 5.5720e-01, 6.3483e-01, 9.0411e-01, + 3.5665e-01, 3.3461e-01, 2.1764e-01, 1.6487e-01, + 6.9841e-01, 5.5157e-01, 7.3962e-01, 7.9879e-01, + 8.8661e-01, 1.5677e-01, 6.0302e-04, 2.1194e-02, + 4.5582e-01, 4.7398e-01, 8.0012e-02, 8.6329e-01, + 2.9895e-01, 8.0026e-01, 1.2674e-01, 7.6974e-01, + 7.5795e-01, 3.1940e-01, 9.9987e-02, 7.7933e-01, + 4.5220e-01, 8.4388e-01, 8.9526e-01, 7.4225e-01, + 3.9714e-01, 3.6706e-01, 1.4095e-01, 5.6830e-01, + 1.7350e-03, 9.2836e-01, 3.6965e-01, 4.0020e-01, + 3.8428e-01, 2.9910e-01, 7.5803e-01, 6.6971e-01, + 3.3674e-02, 8.1332e-01, 6.4828e-01, 4.6369e-01, + 6.1349e-01, 4.5357e-01, 1.7694e-02, 3.7016e-01, + 8.7431e-01, 8.2091e-01, 4.2112e-01, 3.8416e-01, + 1.0054e-01, 4.5559e-01, 6.5716e-01, 5.0234e-01, + 1.7976e-01, 8.2553e-01, 4.0505e-01, 4.5497e-02, + 6.1251e-01, 3.6688e-01, 8.8254e-01, 5.1178e-01, + 1.2345e-01, 6.8154e-02, 3.5937e-01, 3.3145e-01, + 3.2447e-01, 9.0365e-01, 4.5707e-01, 7.6143e-01, + 8.6628e-01, 9.1051e-01, 2.2626e-01, 5.5284e-01, + 5.2270e-01, 6.0383e-01, 3.4241e-01, 3.1220e-01, + 5.4309e-01, 8.1985e-01, 8.9703e-01, 3.5122e-01, + 9.1925e-01, 2.7477e-01, 1.1099e-01, 5.6769e-01, + 4.6516e-01, 2.3405e-02, 5.2923e-01, 8.4996e-01, + 6.2487e-02, 8.4512e-01, 9.6715e-01, 6.7813e-01, + 4.8563e-01, 4.4915e-01, 6.9562e-01, 8.1796e-01, + 3.2891e-01, 7.9628e-01, 7.6712e-01, 8.6412e-01, + 9.1381e-01, 7.0467e-02, 1.8964e-01, 4.5388e-01, + 9.5684e-01, 1.8470e-01, 3.3336e-01, 5.7368e-01, + 7.1960e-01, 1.6995e-01, 4.0317e-01, 4.4006e-01, + 6.2192e-01, 1.8097e-01, 1.9189e-01, 6.1451e-01, + 9.1905e-01, 3.0652e-01, 2.6016e-01, 4.7390e-03, + 2.3463e-01, 2.0106e-01, 4.9993e-01, 8.9383e-01, + 4.5591e-01, 1.0929e-01, 9.1752e-01, 8.1905e-01, + 8.1471e-01, 1.8331e-01, 3.6500e-01, 5.5811e-01, + 1.4590e-01, 8.7975e-01, 7.2586e-02, 6.4853e-01, + 7.7034e-01, 5.7493e-01, 8.8134e-01, 9.5249e-01, + 7.5363e-01, 7.0651e-01, 3.1530e-01, 8.2129e-01, + 5.6738e-01, 4.6273e-01, 2.5923e-01, 4.7911e-01, + 2.5495e-01, 1.1736e-01, 4.5276e-01, 4.7090e-01, + 1.2907e-01, 6.2521e-01, 1.5886e-01, 9.4850e-01, + 6.7396e-01, 7.7832e-01, 1.8478e-01, 7.8410e-01, + 2.6194e-01, 2.6139e-01, 9.8958e-01, 8.9977e-01, + 7.0959e-01, 8.5144e-01, 9.2601e-01, 3.9302e-01, + 4.1973e-01, 9.6188e-01, 1.6634e-01, 3.8654e-02, + 8.4472e-01, 5.1119e-02, 9.1720e-01, 1.9919e-01, + 3.0580e-01, 1.9747e-01, 1.5676e-01, 5.3963e-01, + 6.3020e-01, 4.5602e-01, 9.4117e-01, 2.5115e-01, + 5.5474e-01, 1.6561e-01, 6.3267e-01, 9.4311e-01, + 4.9223e-01, 2.5086e-02, 1.2992e-01, 2.2104e-01, + 2.0660e-01, 9.3520e-01, 4.0810e-01, 6.9336e-01, + 4.5024e-01, 1.9487e-01, 7.4435e-01, 4.3437e-01, + 8.4404e-01, 9.8423e-01, 1.5771e-02, 9.7194e-01, + 7.1273e-01, 7.7338e-01, 8.6412e-01, 2.3179e-01, + 2.6862e-01, 9.7174e-01, 9.4124e-01, 4.6733e-01, + 6.6848e-01, 6.7063e-01, 1.5081e-01, 3.0360e-01, + 9.7444e-01, 5.5671e-01, 7.9369e-01, 1.6900e-01, + 6.8850e-02, 5.5846e-01, 6.9223e-02, 1.9778e-02, + 4.3130e-01, 4.2780e-01, 2.7383e-02, 6.0766e-01, + 7.3265e-01, 8.5045e-01, 8.9638e-01, 9.3134e-01, + 6.8985e-01, 4.8971e-01, 7.7024e-02, 5.9098e-01, + 3.9032e-01, 3.8986e-01, 4.3959e-01, 3.9449e-02, + 4.9229e-01, 9.1727e-01, 5.2509e-01, 7.7125e-01, + 6.2749e-01, 5.4291e-02, 4.2953e-01, 8.6633e-01, + 3.2330e-01, 3.2546e-01, 7.5190e-01, 9.3814e-01, + 1.2600e-01, 3.2619e-01, 4.4390e-01, 3.7104e-01, + 4.2718e-01, 9.8485e-01, 1.2454e-01, 1.4912e-01, + 7.4157e-03, 5.2259e-01, 1.0082e-01, 6.3061e-01, + 6.1387e-01, 8.6953e-01, 3.6091e-01, 8.4586e-02, + 7.4359e-01, 4.0504e-01, 9.0559e-01, 2.3450e-01, + 3.4234e-01, 1.5409e-01, 8.7969e-01, 9.2450e-02, + 5.3494e-01, 3.5743e-01, 5.2866e-01, 1.5012e-01, + 4.4070e-01, 7.9494e-01, 9.2640e-01, 9.2804e-01, + 4.5500e-01, 3.6548e-01, 6.1646e-01, 1.2197e-01, + 1.9639e-01, 3.7081e-01, 2.2514e-02, 5.2718e-01, + 5.0390e-01, 4.4615e-01, 2.0261e-01, 6.1684e-02, + 2.7770e-01, 2.1046e-02, 8.2175e-01, 7.3848e-01, + 1.9911e-01, 9.9028e-01, 2.3282e-01, 9.3322e-01, + 7.6054e-01, 1.4970e-01, 3.8753e-01, 3.7645e-01, + 5.7939e-01, 7.7596e-02, 5.6648e-01, 5.1868e-01, + 3.6137e-02, 7.5442e-01, 6.0670e-02, 4.1071e-01, + 9.2648e-01, 9.2680e-01, 7.5774e-01, 2.6325e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.2778, 0.0259, 0.2558, ..., 0.3655, 0.0131, 0.3370]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.016295909881591797 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '64433', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.7809412479400635} + +tensor(indices=tensor([[4956, 4313, 6277, ..., 3933, 3896, 6194], + [8499, 2460, 3678, ..., 4075, 4977, 3310]]), + values=tensor([8.8598e-01, 7.2528e-02, 4.9609e-01, 1.1187e-01, + 3.8909e-01, 2.6275e-01, 5.4428e-01, 5.5640e-01, + 4.9040e-01, 2.6928e-01, 7.7574e-01, 5.7354e-01, + 6.0061e-01, 6.6023e-01, 7.5093e-01, 7.1823e-01, + 9.0372e-02, 7.7719e-01, 6.7629e-01, 9.3621e-01, + 5.0462e-01, 9.9501e-02, 1.9092e-01, 1.4085e-01, + 3.4178e-01, 9.1093e-01, 2.2172e-01, 1.1020e-01, + 2.0456e-01, 7.7932e-01, 6.5586e-02, 1.5020e-01, + 5.1755e-02, 1.8222e-01, 5.3542e-02, 4.7339e-01, + 9.8606e-01, 4.5352e-01, 4.0351e-01, 1.7322e-01, + 5.5044e-01, 5.6779e-01, 4.6138e-01, 1.6039e-01, + 8.4593e-01, 4.4678e-01, 3.6711e-01, 1.1958e-01, + 4.0284e-01, 2.9045e-01, 4.1998e-02, 9.8879e-01, + 2.5756e-01, 2.2747e-01, 8.6579e-01, 9.2381e-01, + 1.9209e-02, 3.2616e-01, 4.1166e-01, 8.5359e-01, + 6.4292e-03, 8.7760e-01, 3.3960e-01, 2.2942e-01, + 5.9349e-02, 7.2580e-03, 8.4465e-01, 5.6293e-01, + 9.8950e-01, 3.1535e-01, 8.6913e-02, 9.4508e-01, + 3.4266e-01, 1.8015e-01, 3.4861e-01, 1.2027e-01, + 6.7966e-01, 6.1723e-01, 3.7772e-01, 7.2677e-01, + 4.1862e-01, 6.9165e-01, 9.3314e-01, 3.8432e-01, + 6.7604e-01, 6.2070e-01, 2.1652e-01, 2.2003e-01, + 3.9822e-01, 3.7987e-01, 1.1706e-01, 4.2023e-01, + 4.3998e-01, 1.4823e-01, 6.5209e-01, 4.7300e-01, + 5.3002e-01, 7.3948e-02, 7.8073e-01, 4.9674e-01, + 8.9755e-01, 1.6207e-01, 5.1156e-01, 8.2609e-01, + 8.8470e-01, 2.0784e-01, 5.3133e-02, 4.3728e-01, + 9.5681e-01, 4.6318e-01, 2.7776e-01, 8.2469e-01, + 9.5630e-01, 4.5568e-01, 8.4641e-01, 3.2661e-01, + 8.2628e-01, 6.0514e-01, 2.9279e-02, 7.5151e-01, + 9.7074e-01, 9.1890e-01, 1.1237e-01, 1.1527e-01, + 8.3370e-01, 8.3352e-01, 4.4550e-02, 8.4873e-01, + 7.5722e-02, 7.9121e-01, 4.2402e-01, 8.9335e-01, + 9.3115e-01, 3.7925e-01, 1.9639e-01, 6.3100e-01, + 7.1477e-01, 1.6790e-01, 8.4528e-01, 5.8622e-01, + 8.4465e-01, 1.5091e-02, 4.7835e-02, 8.7725e-01, + 8.3249e-01, 8.8233e-01, 1.5546e-01, 1.5203e-01, + 9.6027e-01, 3.1316e-02, 3.3490e-01, 1.1026e-01, + 5.3368e-01, 1.7356e-01, 1.9277e-01, 1.9269e-01, + 6.9275e-01, 2.8661e-02, 3.9031e-01, 2.4672e-02, + 2.8368e-01, 7.9525e-01, 4.0028e-02, 8.3603e-02, + 9.2610e-01, 7.5235e-01, 6.4526e-02, 3.3751e-01, + 5.0586e-01, 8.4405e-02, 1.2137e-02, 7.0161e-01, + 1.7614e-01, 3.7728e-01, 8.3532e-01, 5.6076e-01, + 9.7522e-01, 1.4646e-01, 9.6786e-01, 3.8158e-01, + 1.2248e-01, 8.1970e-01, 4.0161e-01, 9.0733e-02, + 5.5695e-01, 5.0598e-02, 2.2562e-01, 6.9129e-01, + 4.6218e-01, 9.9306e-01, 2.1867e-01, 7.9969e-01, + 3.1980e-01, 3.1776e-01, 4.6388e-01, 8.2624e-01, + 5.9889e-01, 8.6046e-01, 4.9350e-01, 4.3145e-02, + 8.9103e-01, 4.1839e-01, 5.6362e-01, 3.4268e-01, + 1.6144e-01, 7.9260e-01, 3.8651e-01, 3.6622e-01, + 6.2355e-02, 8.4649e-03, 5.9591e-01, 3.0841e-01, + 2.2517e-02, 7.3582e-01, 9.6791e-01, 9.4695e-01, + 5.0667e-01, 4.5562e-01, 1.3969e-01, 6.7203e-02, + 5.1457e-01, 3.2345e-01, 8.4564e-01, 5.1825e-01, + 2.2461e-01, 7.5014e-01, 1.7213e-02, 7.3448e-01, + 8.4071e-01, 2.5253e-01, 3.3665e-01, 5.2107e-01, + 6.1341e-01, 7.7772e-01, 5.9428e-01, 6.6460e-01, + 1.0111e-01, 4.8113e-02, 7.9772e-01, 4.6887e-01, + 9.1541e-01, 2.5383e-01, 7.5473e-01, 7.6993e-01, + 3.6912e-01, 4.7897e-01, 4.8033e-01, 9.2947e-01, + 5.3236e-01, 2.4760e-01, 9.5600e-01, 2.1149e-01, + 4.0682e-01, 2.9947e-01, 3.0656e-01, 9.0495e-01, + 3.5182e-01, 2.4586e-01, 7.1635e-01, 8.4796e-01, + 4.0382e-02, 7.1298e-01, 6.9831e-01, 2.7730e-01, + 3.8053e-01, 4.2106e-01, 3.8511e-01, 2.9818e-01, + 1.8874e-01, 1.4719e-01, 3.4398e-02, 1.4849e-01, + 9.2047e-01, 6.7827e-01, 5.9393e-01, 4.2526e-01, + 2.6598e-01, 5.7767e-01, 6.0432e-02, 3.9571e-01, + 5.1177e-01, 1.6902e-01, 8.8096e-01, 1.2675e-01, + 8.1362e-01, 2.6485e-01, 2.9218e-01, 8.5609e-01, + 5.7869e-01, 3.0526e-01, 5.9441e-01, 1.2752e-01, + 7.2949e-02, 3.8120e-02, 2.4072e-01, 8.2447e-01, + 5.0517e-02, 9.1082e-01, 7.7109e-01, 5.9386e-01, + 1.0644e-01, 2.8324e-01, 6.1204e-01, 6.4110e-04, + 7.6266e-01, 5.4141e-02, 8.9627e-02, 5.6564e-01, + 8.2543e-01, 4.2250e-01, 5.5696e-01, 7.7609e-01, + 2.9287e-01, 9.2333e-01, 5.0038e-01, 3.2645e-01, + 6.8770e-01, 8.4221e-02, 6.2684e-01, 5.8685e-01, + 7.8488e-01, 4.7117e-01, 5.0820e-01, 4.2975e-01, + 6.4645e-01, 1.0595e-01, 1.2738e-01, 8.8690e-01, + 2.9126e-01, 8.0690e-01, 2.2058e-01, 9.6060e-01, + 3.4055e-01, 1.5371e-01, 8.5316e-01, 9.4210e-01, + 4.5985e-01, 1.4483e-01, 2.0337e-01, 6.7897e-01, + 9.8297e-01, 8.9915e-01, 2.0276e-01, 8.0107e-01, + 3.8820e-02, 4.9179e-01, 8.3814e-01, 6.8269e-02, + 3.3350e-01, 2.1788e-01, 8.6221e-01, 1.5502e-01, + 3.0660e-01, 6.4393e-02, 6.8596e-01, 8.8161e-01, + 8.4932e-01, 9.4297e-01, 4.3218e-01, 4.5826e-01, + 5.6829e-01, 5.0302e-02, 1.7585e-01, 8.9436e-01, + 5.7550e-01, 4.6086e-02, 4.2162e-01, 7.4471e-02, + 7.5468e-01, 7.9099e-01, 4.3482e-01, 6.1178e-01, + 8.5876e-01, 5.0540e-01, 7.1383e-01, 9.1544e-01, + 7.4191e-01, 7.5395e-01, 4.4254e-01, 5.1539e-01, + 1.0786e-01, 8.7475e-01, 6.1691e-01, 6.6175e-01, + 7.6243e-01, 2.6227e-01, 9.5930e-01, 6.2000e-01, + 5.1370e-01, 8.4366e-01, 5.6421e-01, 2.3290e-01, + 5.3571e-01, 7.5666e-01, 9.8378e-01, 7.9547e-01, + 1.1517e-01, 5.0594e-01, 7.2903e-02, 9.2875e-01, + 1.7856e-01, 3.9253e-03, 1.3847e-01, 6.4257e-01, + 6.9846e-01, 3.5532e-01, 2.9091e-01, 9.5780e-01, + 5.5494e-01, 4.7778e-02, 8.7354e-01, 1.0269e-01, + 8.1902e-01, 9.3149e-01, 4.6003e-01, 4.4069e-01, + 7.3572e-01, 1.9700e-01, 1.1873e-01, 1.9503e-01, + 1.6331e-01, 3.3089e-01, 3.6121e-01, 7.5118e-01, + 5.2180e-01, 2.0036e-01, 1.5719e-01, 4.2558e-01, + 9.9531e-01, 9.9613e-01, 3.9908e-01, 3.3477e-01, + 2.7096e-01, 6.7791e-01, 1.2942e-01, 9.3291e-01, + 9.6898e-01, 7.1296e-03, 6.7171e-01, 5.6510e-01, + 8.0775e-01, 6.2157e-01, 8.9751e-01, 3.4214e-01, + 4.3880e-01, 7.1611e-01, 2.4727e-01, 9.4859e-01, + 4.9406e-01, 5.9874e-01, 9.4673e-02, 5.1336e-01, + 4.7367e-01, 5.3637e-01, 7.3777e-01, 1.4120e-01, + 7.7163e-01, 1.7059e-02, 3.6066e-02, 9.7823e-01, + 5.3771e-01, 3.0840e-02, 9.1991e-02, 6.3165e-01, + 3.4669e-01, 3.2954e-01, 1.8754e-01, 4.9846e-01, + 9.0782e-01, 1.5787e-01, 3.4028e-02, 2.6233e-02, + 4.1891e-01, 2.9554e-01, 6.5580e-01, 7.8866e-01, + 9.2102e-01, 2.6180e-01, 9.0379e-01, 5.6532e-01, + 6.7509e-01, 3.7573e-01, 4.1192e-01, 8.5496e-01, + 8.7801e-01, 2.1118e-02, 7.7590e-01, 3.4153e-01, + 8.5484e-02, 3.1649e-02, 5.1466e-02, 6.2083e-01, + 7.2212e-01, 3.1516e-01, 3.8762e-01, 2.2570e-01, + 9.2301e-01, 3.2042e-01, 1.5498e-01, 8.5561e-01, + 1.0290e-01, 7.2947e-01, 3.9851e-01, 6.5018e-01, + 6.2749e-01, 6.0682e-01, 7.8471e-02, 3.8599e-02, + 6.5804e-01, 8.7841e-02, 2.0583e-01, 3.8612e-01, + 6.1751e-01, 5.4279e-01, 7.2615e-01, 3.5659e-01, + 2.9783e-01, 5.8370e-01, 6.8820e-01, 8.2542e-01, + 9.7904e-01, 4.2460e-01, 9.1376e-01, 2.0571e-01, + 2.9312e-01, 2.3971e-01, 1.7336e-01, 2.4531e-01, + 9.6397e-01, 7.6320e-01, 9.7277e-01, 2.3373e-01, + 1.4369e-01, 9.3760e-01, 9.0329e-01, 3.8886e-01, + 6.7850e-02, 4.6879e-01, 4.4397e-01, 4.1645e-01, + 4.2158e-01, 3.4832e-01, 3.4918e-02, 3.7074e-01, + 6.3639e-01, 4.1448e-01, 5.1512e-01, 5.5361e-01, + 4.3164e-01, 3.1583e-01, 3.9165e-01, 1.7429e-01, + 4.1237e-01, 5.0128e-01, 9.7980e-01, 6.0369e-01, + 4.6263e-02, 3.5461e-01, 9.6210e-01, 9.2243e-01, + 9.2681e-01, 9.9244e-01, 9.1407e-01, 1.5403e-01, + 8.4569e-01, 8.4205e-02, 6.4898e-01, 3.4228e-01, + 9.9040e-01, 9.4653e-02, 1.3711e-01, 4.1590e-02, + 2.7929e-01, 9.8112e-01, 4.0446e-01, 4.8688e-01, + 2.6701e-01, 5.6931e-02, 8.7151e-01, 2.0045e-01, + 7.6536e-01, 4.2710e-01, 1.9922e-01, 7.5901e-02, + 6.1626e-01, 4.2545e-01, 8.0363e-01, 5.6063e-01, + 9.2539e-01, 1.4590e-01, 1.7597e-01, 9.8168e-01, + 5.9641e-01, 5.9081e-02, 9.0072e-01, 9.3040e-01, + 8.5535e-02, 4.1175e-01, 3.7000e-01, 2.3126e-01, + 5.4533e-01, 6.2806e-01, 5.1411e-01, 2.0285e-01, + 3.5772e-01, 9.7978e-01, 8.6206e-01, 5.5145e-01, + 5.4217e-01, 3.6222e-01, 9.4154e-01, 9.3831e-01, + 1.2369e-01, 5.2663e-01, 1.3103e-01, 5.3546e-01, + 3.8175e-01, 3.6898e-01, 8.7337e-01, 7.4665e-01, + 2.6395e-01, 1.9721e-01, 1.2521e-01, 4.1127e-01, + 7.2385e-01, 7.5100e-01, 1.4588e-01, 3.1484e-01, + 8.7883e-01, 5.5098e-01, 2.3037e-01, 7.2878e-01, + 8.6636e-01, 7.5556e-01, 8.2382e-01, 8.8294e-01, + 2.0436e-01, 4.7379e-01, 1.1506e-02, 3.6998e-01, + 4.8691e-02, 3.5388e-01, 7.7140e-01, 4.4892e-01, + 9.2490e-01, 6.0754e-01, 9.3184e-01, 2.9576e-01, + 2.8672e-02, 6.1707e-01, 3.3398e-01, 3.3180e-01, + 5.7096e-01, 6.8657e-01, 6.4932e-01, 3.9777e-02, + 4.2951e-01, 5.9183e-01, 3.6356e-01, 2.0332e-01, + 6.4297e-01, 3.9753e-01, 5.1110e-01, 2.9661e-01, + 2.7477e-01, 4.4458e-01, 4.0995e-03, 1.4332e-01, + 6.6509e-01, 2.3765e-01, 3.8186e-01, 3.5293e-01, + 5.8094e-01, 4.0896e-01, 8.1942e-01, 7.0758e-01, + 4.6266e-01, 1.9519e-01, 9.7249e-01, 5.6987e-01, + 5.4200e-01, 5.7458e-01, 8.4792e-01, 5.9820e-01, + 6.7860e-02, 7.0673e-01, 9.7042e-01, 2.1627e-01, + 8.3728e-01, 9.1294e-01, 6.0402e-02, 6.5436e-01, + 4.4962e-01, 6.5229e-01, 3.5443e-01, 6.3312e-01, + 1.0353e-01, 9.9552e-01, 6.4540e-01, 2.4929e-01, + 9.2099e-02, 5.4689e-01, 9.5107e-01, 9.4512e-02, + 6.9038e-01, 1.2647e-01, 7.2218e-02, 4.6401e-01, + 8.2832e-02, 8.1024e-01, 9.3393e-01, 9.2499e-01, + 8.0581e-01, 8.6132e-01, 7.9752e-01, 6.1856e-01, + 5.1087e-01, 8.3499e-01, 7.4870e-01, 9.5008e-01, + 8.5561e-01, 2.1452e-01, 5.8921e-01, 4.5798e-01, + 5.8578e-02, 2.8872e-01, 9.4011e-01, 3.1109e-01, + 2.5961e-01, 5.7702e-01, 2.8141e-01, 6.8050e-01, + 1.2057e-01, 5.9469e-01, 5.5647e-01, 3.2488e-01, + 6.3327e-02, 5.2348e-01, 5.7656e-02, 2.7546e-01, + 7.9710e-01, 5.0513e-01, 6.6621e-01, 1.6496e-01, + 4.3276e-01, 3.4305e-01, 5.4027e-01, 4.5982e-01, + 6.6688e-01, 7.2807e-01, 3.9068e-01, 3.5664e-01, + 9.8535e-01, 2.8286e-01, 6.8675e-01, 9.8235e-01, + 5.7765e-01, 2.6268e-02, 2.4418e-01, 9.8618e-01, + 8.6170e-01, 6.8356e-01, 5.6308e-01, 8.4236e-01, + 6.2310e-01, 2.0339e-01, 4.2393e-01, 3.7549e-01, + 1.6201e-01, 3.0550e-01, 5.7854e-01, 7.4392e-01, + 4.2665e-01, 2.4410e-01, 7.6743e-01, 3.8324e-01, + 7.5680e-01, 1.6480e-01, 9.1988e-01, 5.8283e-02, + 3.5749e-01, 6.4400e-02, 2.3321e-01, 3.3709e-01, + 4.3109e-01, 6.6515e-01, 9.5703e-01, 3.4121e-01, + 1.2024e-01, 9.2209e-01, 6.6885e-01, 2.6983e-01, + 4.5425e-01, 3.7905e-01, 1.4001e-01, 4.7110e-01, + 9.9889e-01, 9.9982e-01, 1.5841e-01, 9.8741e-01, + 4.5334e-01, 6.5744e-01, 9.9230e-01, 3.5676e-01, + 9.8896e-02, 5.8588e-01, 9.7845e-01, 1.1323e-01, + 5.6851e-01, 5.3220e-01, 2.2894e-01, 8.5355e-01, + 4.4067e-01, 2.9527e-03, 8.9506e-01, 8.6706e-01, + 9.7359e-01, 8.9530e-01, 3.0017e-01, 9.4358e-01, + 5.0160e-01, 2.5433e-01, 1.2202e-01, 4.5732e-04, + 5.0189e-01, 5.3273e-01, 5.6864e-01, 4.5479e-01, + 2.6222e-01, 1.8306e-01, 2.1511e-01, 6.2500e-01, + 2.6499e-01, 2.4113e-01, 9.6670e-01, 7.8401e-01, + 2.7198e-01, 3.7118e-01, 4.4807e-02, 2.1278e-01, + 6.5702e-01, 5.5385e-01, 7.4667e-01, 8.4403e-01, + 4.5430e-01, 1.8530e-01, 8.9785e-01, 8.2562e-01, + 5.9524e-02, 9.8443e-01, 5.7315e-01, 7.9228e-01, + 8.6134e-01, 4.4607e-01, 2.9915e-02, 3.9586e-01, + 3.8967e-01, 3.0495e-01, 8.0422e-02, 1.4729e-01, + 6.6124e-01, 7.7924e-01, 2.3484e-02, 4.5396e-01, + 5.7560e-01, 5.4899e-01, 7.2940e-01, 4.5459e-01, + 5.0619e-01, 9.2907e-01, 2.6377e-01, 2.0862e-01, + 7.8194e-01, 3.0229e-01, 9.2202e-01, 4.4461e-01, + 7.1257e-01, 6.6973e-01, 9.4569e-01, 8.0850e-01, + 6.7805e-01, 1.6463e-01, 1.0688e-01, 4.9352e-01, + 5.2410e-01, 1.6871e-01, 4.7562e-01, 5.4036e-01, + 1.6193e-01, 1.0648e-01, 5.1943e-01, 8.9463e-01, + 4.4317e-01, 9.5783e-02, 8.8297e-01, 8.1677e-01, + 6.8653e-01, 6.5369e-01, 1.7501e-01, 4.2868e-01, + 5.1637e-01, 9.7198e-01, 3.6741e-01, 9.6422e-01, + 4.5196e-01, 4.6694e-01, 1.9777e-01, 3.2848e-01, + 2.7045e-01, 4.9558e-01, 4.9443e-01, 3.5205e-01, + 4.4193e-01, 2.7119e-01, 2.2582e-01, 4.3652e-01, + 3.9207e-01, 3.1979e-01, 8.6561e-01, 2.7233e-01, + 3.9930e-01, 9.6736e-01, 9.4994e-01, 5.0563e-02, + 7.8371e-01, 4.5770e-01, 4.5927e-02, 2.4072e-01, + 9.5848e-01, 1.5317e-01, 3.0628e-01, 7.3280e-01, + 1.1950e-03, 9.7738e-01, 7.3795e-01, 9.7671e-01, + 8.5877e-01, 6.1419e-01, 9.2512e-01, 7.4388e-01, + 8.4213e-01, 2.9634e-01, 9.8509e-01, 6.6900e-02, + 4.5316e-02, 4.1694e-01, 7.1200e-01, 3.6514e-01, + 8.1444e-01, 7.8145e-01, 7.6825e-01, 2.2762e-01, + 7.5676e-01, 8.3028e-02, 3.5066e-01, 5.9154e-01, + 5.1374e-01, 2.5955e-01, 6.8810e-02, 7.8733e-01, + 6.1132e-01, 3.4790e-01, 3.7414e-01, 5.9036e-01, + 5.7427e-01, 6.3017e-01, 5.8398e-01, 3.5988e-01, + 3.3419e-01, 7.1370e-01, 6.8582e-01, 8.9618e-01, + 6.6720e-01, 2.9133e-01, 4.5138e-01, 3.7804e-01, + 4.5068e-02, 9.6865e-01, 4.0918e-01, 5.2958e-01, + 8.8060e-01, 2.8164e-01, 8.6585e-01, 5.1281e-01, + 7.9258e-01, 6.0138e-01, 4.5390e-01, 7.3913e-01, + 7.2184e-02, 5.8627e-01, 7.8958e-01, 1.1961e-01, + 6.2399e-01, 1.9917e-01, 5.9269e-01, 4.7649e-01, + 2.3361e-01, 4.8163e-01, 1.9401e-01, 9.9441e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.1927, 0.3376, 0.5829, ..., 0.5323, 0.6382, 0.2928]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.7809412479400635 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '243279', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.448915243148804} + +tensor(indices=tensor([[6057, 400, 5519, ..., 3841, 5980, 1182], + [ 373, 615, 2154, ..., 7309, 8499, 5577]]), + values=tensor([4.8314e-01, 5.0293e-01, 1.7146e-01, 9.2645e-01, + 6.4498e-01, 9.3224e-01, 1.3928e-01, 6.8929e-01, + 6.1478e-01, 8.0445e-01, 9.3443e-01, 3.1225e-01, + 5.6432e-01, 2.8192e-01, 7.3138e-01, 6.3539e-01, + 7.3477e-01, 4.5271e-01, 7.9196e-01, 2.0374e-01, + 5.2458e-01, 8.2163e-01, 2.0609e-01, 7.6665e-01, + 8.8472e-01, 6.2352e-01, 2.0327e-01, 9.6326e-01, + 1.1306e-01, 5.2290e-01, 5.2410e-01, 2.0724e-01, + 8.8412e-01, 6.7138e-01, 4.8028e-01, 4.6555e-01, + 6.2837e-01, 6.7916e-01, 7.2563e-01, 6.2376e-01, + 9.2579e-01, 3.2811e-01, 4.4165e-03, 7.4105e-01, + 4.0091e-01, 5.9459e-01, 7.1404e-01, 8.5057e-01, + 4.2446e-01, 4.1666e-01, 2.7410e-01, 5.4740e-01, + 7.4317e-01, 5.2217e-01, 8.7417e-02, 3.1278e-01, + 6.2784e-01, 5.7214e-01, 2.6202e-01, 7.6815e-01, + 7.9105e-01, 1.8723e-01, 7.1156e-01, 5.6566e-01, + 7.2218e-01, 4.1881e-01, 8.9000e-01, 2.8443e-01, + 5.7332e-01, 2.8495e-01, 9.5095e-01, 3.6533e-01, + 6.9246e-02, 3.1827e-02, 4.3140e-01, 2.7050e-01, + 2.7338e-01, 2.9976e-01, 6.5325e-01, 2.3459e-01, + 1.9793e-01, 5.7280e-01, 3.4425e-01, 1.0618e-01, + 2.5570e-01, 2.1155e-01, 5.8627e-01, 3.9998e-01, + 5.4571e-01, 6.1355e-01, 1.8685e-01, 2.2324e-01, + 8.7922e-01, 2.7730e-03, 9.0808e-01, 9.1886e-01, + 5.5513e-01, 3.6603e-02, 1.7779e-01, 5.0864e-01, + 4.3599e-01, 8.2585e-01, 2.8329e-01, 8.2657e-01, + 3.1233e-01, 4.8494e-01, 8.0312e-01, 9.9785e-01, + 6.5101e-01, 5.1010e-01, 2.6498e-04, 1.0170e-01, + 9.0003e-01, 5.9562e-02, 3.1241e-01, 5.1709e-01, + 3.8356e-01, 5.1417e-01, 3.5095e-02, 6.8323e-01, + 7.6406e-01, 5.1469e-01, 6.2120e-01, 6.9929e-01, + 1.8267e-02, 3.8070e-01, 8.0236e-02, 3.0714e-01, + 2.1455e-01, 9.8444e-04, 3.6719e-01, 6.1629e-01, + 3.1781e-01, 2.0462e-01, 1.1342e-01, 3.6548e-01, + 3.6103e-01, 5.3456e-01, 5.4670e-01, 8.7233e-01, + 2.4420e-01, 7.4123e-01, 5.6769e-01, 5.5563e-01, + 5.3224e-01, 7.7709e-01, 9.0444e-01, 4.5820e-01, + 7.2259e-01, 2.4990e-01, 6.0149e-01, 2.1364e-02, + 5.0615e-01, 8.5023e-01, 4.9177e-02, 6.7409e-01, + 6.8513e-01, 4.4353e-01, 9.5889e-01, 8.8245e-01, + 7.9641e-01, 9.1719e-01, 6.3125e-01, 9.8706e-01, + 8.3091e-01, 7.8700e-02, 3.2209e-01, 2.5162e-01, + 9.5662e-01, 8.2342e-01, 5.5747e-01, 1.2566e-01, + 3.7025e-01, 6.9843e-02, 5.7688e-01, 9.4593e-01, + 8.6299e-01, 5.4653e-01, 8.9012e-01, 3.1900e-01, + 7.9041e-01, 4.6428e-01, 3.0140e-01, 4.6784e-01, + 9.7388e-01, 9.0781e-01, 8.7590e-01, 7.7776e-01, + 7.1851e-01, 5.2132e-01, 3.5454e-01, 7.6517e-01, + 4.8007e-01, 7.0706e-01, 8.5516e-02, 6.1109e-01, + 1.5972e-01, 3.4871e-01, 9.0875e-01, 3.4193e-01, + 3.0315e-01, 5.8826e-01, 1.1656e-01, 8.9163e-01, + 3.8336e-01, 7.5169e-01, 6.3788e-01, 4.1949e-01, + 3.4518e-01, 6.1365e-01, 8.1683e-01, 3.0870e-01, + 7.5626e-03, 8.8665e-01, 9.3500e-01, 3.2048e-01, + 6.6239e-02, 1.5877e-01, 9.5064e-02, 6.8221e-02, + 8.9181e-01, 4.7853e-01, 9.1645e-01, 5.0768e-01, + 5.4180e-01, 1.0519e-01, 9.8294e-02, 8.5204e-01, + 2.0866e-01, 8.7943e-01, 9.9479e-01, 2.1673e-01, + 3.9504e-01, 4.9317e-01, 2.4394e-01, 4.4087e-01, + 5.3132e-01, 8.0536e-01, 5.5427e-01, 7.3616e-01, + 7.5198e-01, 4.3281e-01, 1.7441e-01, 4.6008e-01, + 3.2255e-01, 5.0473e-01, 2.0464e-01, 7.0253e-01, + 2.6500e-01, 5.9212e-01, 4.1372e-01, 5.6076e-01, + 6.5079e-01, 6.6345e-01, 8.9370e-01, 4.4151e-01, + 7.5261e-01, 9.3436e-01, 9.0426e-01, 3.5341e-01, + 3.1107e-02, 3.7627e-01, 5.5499e-01, 8.0794e-01, + 1.1957e-01, 7.0718e-01, 1.7724e-02, 5.4599e-01, + 5.4034e-01, 2.8116e-01, 5.2301e-03, 3.7265e-01, + 1.0825e-01, 9.2466e-01, 8.8151e-01, 5.4868e-01, + 9.2362e-01, 9.3549e-02, 9.5836e-01, 4.1685e-01, + 7.8251e-01, 9.2833e-01, 1.0704e-01, 2.4607e-01, + 8.7394e-01, 2.8186e-01, 3.2168e-02, 7.2510e-01, + 2.6879e-01, 9.7464e-02, 1.3409e-01, 3.3062e-01, + 8.8278e-01, 4.0858e-01, 4.7202e-01, 5.8437e-01, + 9.8604e-01, 3.6536e-01, 4.6498e-01, 7.2753e-01, + 5.3385e-01, 3.9092e-01, 7.0911e-01, 3.3352e-01, + 5.5497e-01, 9.7267e-02, 6.9691e-01, 5.5484e-01, + 9.1751e-01, 8.8460e-02, 7.3616e-02, 3.5998e-01, + 1.3471e-01, 6.4500e-01, 2.4703e-01, 1.5591e-01, + 8.4074e-01, 4.3653e-01, 6.1247e-01, 4.1213e-01, + 5.3951e-01, 6.7111e-01, 6.3140e-01, 2.6497e-01, + 4.4695e-01, 4.4986e-01, 5.9120e-01, 6.5187e-01, + 3.0056e-01, 5.9338e-01, 1.8317e-01, 4.5320e-01, + 5.6754e-01, 2.3987e-01, 7.4706e-02, 4.0133e-01, + 8.8012e-01, 1.4342e-01, 2.0960e-01, 1.3241e-01, + 6.4255e-01, 2.9791e-01, 9.2725e-01, 8.9829e-01, + 5.1115e-01, 6.4747e-01, 9.8096e-01, 3.9074e-01, + 3.0210e-01, 3.1061e-01, 6.8823e-01, 7.5328e-01, + 1.5867e-01, 5.9823e-01, 2.9737e-01, 8.4317e-01, + 2.6299e-01, 3.8415e-01, 7.1391e-01, 1.6739e-01, + 3.1571e-01, 2.3499e-01, 1.5129e-01, 7.0845e-01, + 6.5063e-01, 1.1192e-01, 5.3314e-01, 3.5378e-01, + 5.5449e-02, 9.0028e-01, 5.7673e-01, 5.7277e-01, + 9.0263e-01, 4.3529e-01, 5.7680e-01, 6.2525e-01, + 9.0810e-01, 2.7103e-01, 5.1492e-01, 5.0133e-01, + 3.4588e-01, 7.3315e-01, 8.9434e-01, 4.7066e-01, + 1.7544e-02, 4.8194e-01, 5.4906e-01, 1.5558e-01, + 2.1744e-01, 7.6079e-01, 4.8651e-01, 3.5549e-01, + 7.2595e-01, 5.7134e-01, 4.7736e-01, 6.2765e-01, + 1.5475e-01, 7.8358e-01, 9.1492e-01, 9.0075e-01, + 5.0689e-01, 6.9501e-02, 4.5682e-01, 8.2907e-01, + 1.4651e-01, 7.6667e-01, 2.5912e-01, 3.0656e-01, + 8.7188e-01, 7.3824e-01, 7.2183e-01, 2.8199e-01, + 7.7480e-01, 1.8764e-01, 3.8212e-01, 2.9652e-02, + 5.2722e-01, 6.9146e-01, 3.9911e-01, 2.7766e-01, + 1.4404e-01, 8.7560e-01, 2.8648e-01, 9.4768e-01, + 6.7746e-03, 7.5801e-02, 6.2813e-01, 3.9781e-01, + 8.0702e-01, 1.2572e-01, 5.5327e-01, 9.6521e-01, + 2.6806e-01, 2.4618e-01, 8.1689e-01, 2.3784e-01, + 7.6903e-03, 3.7179e-01, 2.9906e-01, 4.2609e-01, + 4.7857e-01, 5.8806e-01, 2.3974e-01, 9.1742e-01, + 3.9813e-01, 6.2189e-01, 3.0918e-01, 8.7854e-01, + 4.6423e-02, 8.4128e-01, 8.1039e-01, 2.6682e-01, + 9.1925e-01, 9.7354e-01, 2.3395e-02, 3.0720e-01, + 6.7602e-01, 6.7656e-01, 5.4145e-01, 6.1314e-02, + 8.9924e-01, 6.4719e-01, 4.9837e-01, 4.4099e-01, + 1.9512e-01, 8.3038e-02, 8.9228e-01, 6.2742e-01, + 8.2100e-01, 5.9263e-01, 8.1269e-01, 3.5823e-01, + 3.5513e-01, 3.6720e-01, 9.3768e-01, 1.3988e-01, + 9.7540e-02, 7.6460e-01, 4.3708e-01, 5.6815e-01, + 8.3998e-02, 5.9339e-01, 8.7812e-02, 2.3119e-01, + 8.9352e-01, 1.2615e-01, 6.2478e-01, 9.1375e-01, + 2.8843e-01, 4.0416e-01, 1.2753e-01, 2.6921e-01, + 7.7956e-01, 2.0159e-01, 1.4880e-01, 5.7795e-01, + 7.7439e-02, 7.6665e-02, 4.7053e-02, 3.2850e-03, + 1.4162e-01, 2.7010e-01, 2.2562e-01, 6.8973e-01, + 1.2782e-01, 9.6320e-01, 3.3146e-01, 8.5191e-01, + 1.1799e-01, 8.0380e-01, 6.2200e-01, 8.6155e-01, + 2.7536e-03, 1.0555e-01, 7.7395e-02, 4.7649e-01, + 7.8700e-01, 4.4258e-01, 7.1689e-01, 7.0040e-01, + 7.8334e-01, 7.4078e-02, 1.0312e-01, 8.8193e-01, + 3.9114e-01, 1.7927e-01, 8.3399e-01, 9.1773e-01, + 4.5159e-01, 6.0510e-01, 8.2281e-01, 1.6564e-01, + 2.1390e-01, 5.9743e-02, 7.6504e-01, 6.1133e-01, + 6.7155e-01, 8.3260e-01, 4.2074e-01, 1.6188e-01, + 5.4055e-01, 5.4034e-01, 5.1827e-01, 6.4072e-01, + 9.1437e-01, 8.0662e-01, 7.0467e-01, 5.7738e-01, + 1.3520e-01, 5.2585e-01, 8.5175e-01, 8.4468e-01, + 3.0361e-01, 8.5751e-01, 1.7277e-01, 9.6359e-01, + 8.6128e-01, 2.9765e-01, 5.3919e-01, 6.1371e-01, + 3.8046e-01, 4.8226e-01, 1.3437e-01, 5.0432e-01, + 1.8892e-01, 6.5133e-01, 2.6499e-01, 7.1236e-01, + 9.6542e-01, 5.7861e-01, 7.7088e-01, 8.8734e-01, + 6.3359e-01, 1.7758e-01, 2.7085e-01, 4.3296e-01, + 5.4653e-01, 6.0021e-01, 9.5719e-02, 4.5326e-01, + 8.3059e-01, 6.8966e-02, 5.0285e-01, 5.6595e-01, + 1.5023e-01, 9.0196e-01, 8.4544e-01, 1.0865e-01, + 7.5520e-01, 1.3423e-01, 4.0270e-01, 8.6072e-01, + 6.2697e-01, 3.2967e-01, 8.4051e-01, 7.5529e-01, + 7.0811e-01, 8.6908e-01, 2.9653e-01, 1.2958e-01, + 9.7291e-01, 6.4502e-01, 8.0253e-01, 8.8907e-01, + 2.7251e-01, 4.4411e-01, 4.7057e-01, 8.9308e-01, + 1.3930e-01, 3.2340e-01, 1.6439e-01, 1.5432e-01, + 9.7173e-01, 9.0521e-01, 3.4371e-01, 4.0490e-01, + 4.6653e-01, 7.1837e-01, 7.9360e-01, 5.4664e-01, + 9.4729e-01, 5.5325e-01, 5.1251e-01, 5.7572e-01, + 7.5254e-01, 1.2146e-02, 2.5185e-01, 5.1271e-01, + 2.7655e-01, 6.0389e-01, 3.8128e-01, 5.3860e-01, + 1.3426e-01, 6.6692e-01, 9.7070e-02, 5.4658e-01, + 4.3834e-01, 7.1205e-01, 9.0608e-01, 5.0276e-01, + 7.2740e-01, 4.9999e-01, 9.8219e-01, 2.5527e-01, + 9.9839e-01, 4.4277e-01, 7.4335e-01, 5.2927e-01, + 4.0147e-01, 3.0481e-01, 5.0779e-01, 6.2447e-01, + 4.1423e-01, 1.7832e-01, 2.7916e-01, 9.9786e-02, + 2.4396e-02, 9.0325e-01, 1.8473e-01, 5.1315e-01, + 7.4856e-01, 7.2333e-01, 7.3631e-01, 6.6483e-01, + 5.8875e-01, 1.2949e-01, 9.2499e-01, 4.2606e-01, + 1.2427e-01, 5.9750e-01, 6.8181e-01, 3.2032e-01, + 5.9607e-01, 4.8650e-01, 6.1343e-01, 8.3800e-01, + 4.1249e-01, 5.4277e-01, 5.1267e-01, 5.8284e-01, + 2.6744e-01, 3.3676e-01, 4.4564e-01, 3.3742e-01, + 1.8033e-01, 9.9759e-01, 5.5480e-01, 8.5469e-02, + 1.5223e-01, 4.7243e-01, 6.4551e-02, 1.0404e-01, + 9.1424e-01, 1.8783e-01, 8.3795e-01, 5.6527e-01, + 3.0543e-01, 2.2606e-01, 3.4035e-01, 1.2321e-01, + 6.3580e-01, 5.1722e-01, 7.7951e-01, 6.5095e-01, + 9.5081e-01, 1.8720e-01, 3.7574e-01, 1.0842e-01, + 8.9786e-01, 5.3056e-01, 9.1637e-01, 2.9814e-01, + 1.2798e-01, 2.2609e-01, 6.4805e-01, 9.3799e-01, + 3.2670e-03, 2.0420e-01, 8.4821e-01, 4.8440e-01, + 7.5172e-01, 9.4055e-01, 5.5659e-01, 9.7478e-02, + 2.3372e-01, 8.5619e-01, 3.4988e-01, 4.0783e-01, + 9.8429e-02, 9.8374e-01, 8.1964e-01, 1.4047e-01, + 1.2908e-01, 1.7080e-01, 6.9650e-01, 5.7250e-02, + 5.7554e-01, 9.8411e-01, 4.7299e-01, 8.8420e-01, + 8.2475e-01, 3.6581e-01, 8.3189e-01, 6.0186e-01, + 4.7676e-01, 7.1719e-01, 7.4824e-02, 4.4193e-02, + 5.8170e-01, 6.8150e-01, 3.6726e-02, 2.6419e-01, + 1.6276e-01, 1.2947e-01, 9.0307e-01, 7.9831e-01, + 9.3271e-02, 5.2367e-01, 4.9455e-01, 4.9804e-01, + 1.7249e-01, 6.8633e-01, 3.6808e-01, 4.2856e-01, + 8.7386e-01, 2.0002e-01, 4.5518e-01, 2.2888e-01, + 2.2350e-01, 3.8328e-01, 8.0557e-01, 1.8242e-01, + 5.2051e-01, 1.4979e-01, 3.4747e-01, 4.2048e-01, + 2.3832e-01, 6.2383e-01, 1.3351e-02, 3.7202e-01, + 3.5029e-01, 2.2524e-02, 7.6718e-01, 6.1161e-01, + 3.1224e-01, 2.8659e-02, 9.0404e-02, 4.2520e-01, + 4.9767e-01, 2.2752e-01, 2.1180e-01, 2.9852e-01, + 5.0825e-02, 1.1592e-01, 7.6533e-01, 4.7888e-02, + 5.7053e-01, 9.8417e-01, 6.0783e-01, 1.2227e-01, + 9.7016e-01, 2.0638e-01, 3.0425e-01, 3.9947e-02, + 9.0234e-01, 2.6704e-01, 7.2920e-01, 8.4551e-01, + 5.8286e-01, 8.1310e-01, 1.3082e-01, 5.9846e-01, + 3.3391e-01, 9.3071e-02, 3.0659e-01, 8.9902e-03, + 8.2513e-01, 6.9419e-01, 1.7624e-01, 4.2008e-01, + 5.8744e-01, 4.8844e-01, 5.8423e-02, 2.3564e-01, + 9.3439e-01, 9.7089e-01, 8.6264e-01, 9.7042e-01, + 8.7013e-01, 6.0291e-01, 9.1248e-01, 3.4274e-01, + 3.5745e-01, 2.1946e-01, 1.0444e-01, 8.5652e-01, + 7.6108e-02, 5.6230e-01, 5.6428e-01, 6.2069e-01, + 2.2683e-02, 5.7501e-01, 3.5375e-01, 7.8703e-01, + 6.6308e-01, 7.4038e-01, 9.3660e-01, 3.4831e-01, + 3.9361e-01, 4.0072e-03, 1.6103e-01, 8.5753e-01, + 7.6312e-01, 3.4146e-01, 5.9079e-01, 7.0291e-01, + 1.3256e-01, 2.2869e-01, 6.0451e-01, 5.9436e-01, + 5.3361e-01, 7.6419e-01, 2.7024e-01, 6.8091e-01, + 4.9998e-01, 3.4192e-01, 2.8797e-03, 5.8640e-01, + 1.7082e-01, 6.1514e-01, 9.8979e-01, 4.7219e-01, + 9.4330e-01, 3.1986e-01, 3.6346e-01, 4.9807e-01, + 8.4419e-02, 9.2554e-01, 8.2927e-02, 7.0710e-01, + 6.3602e-01, 3.9341e-01, 4.1112e-01, 2.0481e-01, + 6.2342e-01, 8.1162e-01, 5.2079e-01, 9.6347e-01, + 5.6725e-01, 2.2812e-01, 8.3504e-01, 8.4527e-01, + 2.1915e-01, 3.2541e-01, 4.7805e-01, 7.9254e-01, + 1.1463e-02, 6.4787e-01, 7.7038e-01, 8.7874e-01, + 3.4853e-01, 1.4547e-01, 8.0736e-01, 7.7148e-01, + 6.4522e-01, 4.1223e-01, 6.6583e-01, 2.3489e-01, + 7.5618e-02, 5.7345e-01, 6.0048e-01, 6.8905e-01, + 7.0429e-01, 6.9057e-02, 8.8357e-01, 5.1606e-01, + 1.3926e-01, 6.1546e-01, 4.3300e-01, 6.6197e-02, + 2.9262e-01, 2.9336e-01, 8.9253e-01, 9.5784e-01, + 1.1128e-01, 7.2099e-02, 7.1162e-01, 2.9179e-01, + 7.3996e-01, 7.0497e-01, 7.7206e-01, 3.4136e-01, + 1.0003e-02, 6.0418e-01, 3.8677e-01, 5.1479e-01, + 8.8069e-01, 4.4083e-01, 3.9369e-01, 9.6981e-01, + 8.6164e-01, 1.9798e-01, 3.3966e-01, 1.5113e-01, + 5.6949e-01, 4.2415e-01, 8.1822e-01, 1.8858e-01, + 3.6826e-01, 2.8324e-01, 4.2211e-01, 8.6172e-01, + 6.6398e-01, 5.4593e-01, 3.6808e-01, 3.3212e-01, + 2.8244e-02, 2.9601e-01, 4.5022e-01, 4.3624e-01, + 7.4292e-01, 1.2296e-01, 5.8630e-01, 9.2863e-01, + 4.8906e-02, 7.1181e-01, 5.9321e-01, 2.6944e-01, + 1.2286e-02, 6.1956e-01, 6.7301e-01, 8.8121e-01, + 8.0907e-01, 4.8580e-01, 5.0446e-01, 7.7445e-01, + 7.7929e-01, 5.1740e-01, 9.1796e-01, 4.0471e-01, + 8.0774e-01, 6.8630e-01, 6.5360e-01, 1.6695e-01, + 6.5084e-01, 3.2945e-01, 5.4305e-01, 5.0112e-01, + 3.8383e-01, 8.9022e-01, 9.2979e-01, 7.6494e-02, + 9.2941e-01, 2.9211e-01, 5.7058e-01, 5.5095e-01, + 6.4051e-01, 4.0649e-01, 8.4281e-01, 2.8756e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8266, 0.4178, 0.7886, ..., 0.6077, 0.8668, 0.9039]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.448915243148804 seconds + +tensor(indices=tensor([[6057, 400, 5519, ..., 3841, 5980, 1182], + [ 373, 615, 2154, ..., 7309, 8499, 5577]]), + values=tensor([4.8314e-01, 5.0293e-01, 1.7146e-01, 9.2645e-01, + 6.4498e-01, 9.3224e-01, 1.3928e-01, 6.8929e-01, + 6.1478e-01, 8.0445e-01, 9.3443e-01, 3.1225e-01, + 5.6432e-01, 2.8192e-01, 7.3138e-01, 6.3539e-01, + 7.3477e-01, 4.5271e-01, 7.9196e-01, 2.0374e-01, + 5.2458e-01, 8.2163e-01, 2.0609e-01, 7.6665e-01, + 8.8472e-01, 6.2352e-01, 2.0327e-01, 9.6326e-01, + 1.1306e-01, 5.2290e-01, 5.2410e-01, 2.0724e-01, + 8.8412e-01, 6.7138e-01, 4.8028e-01, 4.6555e-01, + 6.2837e-01, 6.7916e-01, 7.2563e-01, 6.2376e-01, + 9.2579e-01, 3.2811e-01, 4.4165e-03, 7.4105e-01, + 4.0091e-01, 5.9459e-01, 7.1404e-01, 8.5057e-01, + 4.2446e-01, 4.1666e-01, 2.7410e-01, 5.4740e-01, + 7.4317e-01, 5.2217e-01, 8.7417e-02, 3.1278e-01, + 6.2784e-01, 5.7214e-01, 2.6202e-01, 7.6815e-01, + 7.9105e-01, 1.8723e-01, 7.1156e-01, 5.6566e-01, + 7.2218e-01, 4.1881e-01, 8.9000e-01, 2.8443e-01, + 5.7332e-01, 2.8495e-01, 9.5095e-01, 3.6533e-01, + 6.9246e-02, 3.1827e-02, 4.3140e-01, 2.7050e-01, + 2.7338e-01, 2.9976e-01, 6.5325e-01, 2.3459e-01, + 1.9793e-01, 5.7280e-01, 3.4425e-01, 1.0618e-01, + 2.5570e-01, 2.1155e-01, 5.8627e-01, 3.9998e-01, + 5.4571e-01, 6.1355e-01, 1.8685e-01, 2.2324e-01, + 8.7922e-01, 2.7730e-03, 9.0808e-01, 9.1886e-01, + 5.5513e-01, 3.6603e-02, 1.7779e-01, 5.0864e-01, + 4.3599e-01, 8.2585e-01, 2.8329e-01, 8.2657e-01, + 3.1233e-01, 4.8494e-01, 8.0312e-01, 9.9785e-01, + 6.5101e-01, 5.1010e-01, 2.6498e-04, 1.0170e-01, + 9.0003e-01, 5.9562e-02, 3.1241e-01, 5.1709e-01, + 3.8356e-01, 5.1417e-01, 3.5095e-02, 6.8323e-01, + 7.6406e-01, 5.1469e-01, 6.2120e-01, 6.9929e-01, + 1.8267e-02, 3.8070e-01, 8.0236e-02, 3.0714e-01, + 2.1455e-01, 9.8444e-04, 3.6719e-01, 6.1629e-01, + 3.1781e-01, 2.0462e-01, 1.1342e-01, 3.6548e-01, + 3.6103e-01, 5.3456e-01, 5.4670e-01, 8.7233e-01, + 2.4420e-01, 7.4123e-01, 5.6769e-01, 5.5563e-01, + 5.3224e-01, 7.7709e-01, 9.0444e-01, 4.5820e-01, + 7.2259e-01, 2.4990e-01, 6.0149e-01, 2.1364e-02, + 5.0615e-01, 8.5023e-01, 4.9177e-02, 6.7409e-01, + 6.8513e-01, 4.4353e-01, 9.5889e-01, 8.8245e-01, + 7.9641e-01, 9.1719e-01, 6.3125e-01, 9.8706e-01, + 8.3091e-01, 7.8700e-02, 3.2209e-01, 2.5162e-01, + 9.5662e-01, 8.2342e-01, 5.5747e-01, 1.2566e-01, + 3.7025e-01, 6.9843e-02, 5.7688e-01, 9.4593e-01, + 8.6299e-01, 5.4653e-01, 8.9012e-01, 3.1900e-01, + 7.9041e-01, 4.6428e-01, 3.0140e-01, 4.6784e-01, + 9.7388e-01, 9.0781e-01, 8.7590e-01, 7.7776e-01, + 7.1851e-01, 5.2132e-01, 3.5454e-01, 7.6517e-01, + 4.8007e-01, 7.0706e-01, 8.5516e-02, 6.1109e-01, + 1.5972e-01, 3.4871e-01, 9.0875e-01, 3.4193e-01, + 3.0315e-01, 5.8826e-01, 1.1656e-01, 8.9163e-01, + 3.8336e-01, 7.5169e-01, 6.3788e-01, 4.1949e-01, + 3.4518e-01, 6.1365e-01, 8.1683e-01, 3.0870e-01, + 7.5626e-03, 8.8665e-01, 9.3500e-01, 3.2048e-01, + 6.6239e-02, 1.5877e-01, 9.5064e-02, 6.8221e-02, + 8.9181e-01, 4.7853e-01, 9.1645e-01, 5.0768e-01, + 5.4180e-01, 1.0519e-01, 9.8294e-02, 8.5204e-01, + 2.0866e-01, 8.7943e-01, 9.9479e-01, 2.1673e-01, + 3.9504e-01, 4.9317e-01, 2.4394e-01, 4.4087e-01, + 5.3132e-01, 8.0536e-01, 5.5427e-01, 7.3616e-01, + 7.5198e-01, 4.3281e-01, 1.7441e-01, 4.6008e-01, + 3.2255e-01, 5.0473e-01, 2.0464e-01, 7.0253e-01, + 2.6500e-01, 5.9212e-01, 4.1372e-01, 5.6076e-01, + 6.5079e-01, 6.6345e-01, 8.9370e-01, 4.4151e-01, + 7.5261e-01, 9.3436e-01, 9.0426e-01, 3.5341e-01, + 3.1107e-02, 3.7627e-01, 5.5499e-01, 8.0794e-01, + 1.1957e-01, 7.0718e-01, 1.7724e-02, 5.4599e-01, + 5.4034e-01, 2.8116e-01, 5.2301e-03, 3.7265e-01, + 1.0825e-01, 9.2466e-01, 8.8151e-01, 5.4868e-01, + 9.2362e-01, 9.3549e-02, 9.5836e-01, 4.1685e-01, + 7.8251e-01, 9.2833e-01, 1.0704e-01, 2.4607e-01, + 8.7394e-01, 2.8186e-01, 3.2168e-02, 7.2510e-01, + 2.6879e-01, 9.7464e-02, 1.3409e-01, 3.3062e-01, + 8.8278e-01, 4.0858e-01, 4.7202e-01, 5.8437e-01, + 9.8604e-01, 3.6536e-01, 4.6498e-01, 7.2753e-01, + 5.3385e-01, 3.9092e-01, 7.0911e-01, 3.3352e-01, + 5.5497e-01, 9.7267e-02, 6.9691e-01, 5.5484e-01, + 9.1751e-01, 8.8460e-02, 7.3616e-02, 3.5998e-01, + 1.3471e-01, 6.4500e-01, 2.4703e-01, 1.5591e-01, + 8.4074e-01, 4.3653e-01, 6.1247e-01, 4.1213e-01, + 5.3951e-01, 6.7111e-01, 6.3140e-01, 2.6497e-01, + 4.4695e-01, 4.4986e-01, 5.9120e-01, 6.5187e-01, + 3.0056e-01, 5.9338e-01, 1.8317e-01, 4.5320e-01, + 5.6754e-01, 2.3987e-01, 7.4706e-02, 4.0133e-01, + 8.8012e-01, 1.4342e-01, 2.0960e-01, 1.3241e-01, + 6.4255e-01, 2.9791e-01, 9.2725e-01, 8.9829e-01, + 5.1115e-01, 6.4747e-01, 9.8096e-01, 3.9074e-01, + 3.0210e-01, 3.1061e-01, 6.8823e-01, 7.5328e-01, + 1.5867e-01, 5.9823e-01, 2.9737e-01, 8.4317e-01, + 2.6299e-01, 3.8415e-01, 7.1391e-01, 1.6739e-01, + 3.1571e-01, 2.3499e-01, 1.5129e-01, 7.0845e-01, + 6.5063e-01, 1.1192e-01, 5.3314e-01, 3.5378e-01, + 5.5449e-02, 9.0028e-01, 5.7673e-01, 5.7277e-01, + 9.0263e-01, 4.3529e-01, 5.7680e-01, 6.2525e-01, + 9.0810e-01, 2.7103e-01, 5.1492e-01, 5.0133e-01, + 3.4588e-01, 7.3315e-01, 8.9434e-01, 4.7066e-01, + 1.7544e-02, 4.8194e-01, 5.4906e-01, 1.5558e-01, + 2.1744e-01, 7.6079e-01, 4.8651e-01, 3.5549e-01, + 7.2595e-01, 5.7134e-01, 4.7736e-01, 6.2765e-01, + 1.5475e-01, 7.8358e-01, 9.1492e-01, 9.0075e-01, + 5.0689e-01, 6.9501e-02, 4.5682e-01, 8.2907e-01, + 1.4651e-01, 7.6667e-01, 2.5912e-01, 3.0656e-01, + 8.7188e-01, 7.3824e-01, 7.2183e-01, 2.8199e-01, + 7.7480e-01, 1.8764e-01, 3.8212e-01, 2.9652e-02, + 5.2722e-01, 6.9146e-01, 3.9911e-01, 2.7766e-01, + 1.4404e-01, 8.7560e-01, 2.8648e-01, 9.4768e-01, + 6.7746e-03, 7.5801e-02, 6.2813e-01, 3.9781e-01, + 8.0702e-01, 1.2572e-01, 5.5327e-01, 9.6521e-01, + 2.6806e-01, 2.4618e-01, 8.1689e-01, 2.3784e-01, + 7.6903e-03, 3.7179e-01, 2.9906e-01, 4.2609e-01, + 4.7857e-01, 5.8806e-01, 2.3974e-01, 9.1742e-01, + 3.9813e-01, 6.2189e-01, 3.0918e-01, 8.7854e-01, + 4.6423e-02, 8.4128e-01, 8.1039e-01, 2.6682e-01, + 9.1925e-01, 9.7354e-01, 2.3395e-02, 3.0720e-01, + 6.7602e-01, 6.7656e-01, 5.4145e-01, 6.1314e-02, + 8.9924e-01, 6.4719e-01, 4.9837e-01, 4.4099e-01, + 1.9512e-01, 8.3038e-02, 8.9228e-01, 6.2742e-01, + 8.2100e-01, 5.9263e-01, 8.1269e-01, 3.5823e-01, + 3.5513e-01, 3.6720e-01, 9.3768e-01, 1.3988e-01, + 9.7540e-02, 7.6460e-01, 4.3708e-01, 5.6815e-01, + 8.3998e-02, 5.9339e-01, 8.7812e-02, 2.3119e-01, + 8.9352e-01, 1.2615e-01, 6.2478e-01, 9.1375e-01, + 2.8843e-01, 4.0416e-01, 1.2753e-01, 2.6921e-01, + 7.7956e-01, 2.0159e-01, 1.4880e-01, 5.7795e-01, + 7.7439e-02, 7.6665e-02, 4.7053e-02, 3.2850e-03, + 1.4162e-01, 2.7010e-01, 2.2562e-01, 6.8973e-01, + 1.2782e-01, 9.6320e-01, 3.3146e-01, 8.5191e-01, + 1.1799e-01, 8.0380e-01, 6.2200e-01, 8.6155e-01, + 2.7536e-03, 1.0555e-01, 7.7395e-02, 4.7649e-01, + 7.8700e-01, 4.4258e-01, 7.1689e-01, 7.0040e-01, + 7.8334e-01, 7.4078e-02, 1.0312e-01, 8.8193e-01, + 3.9114e-01, 1.7927e-01, 8.3399e-01, 9.1773e-01, + 4.5159e-01, 6.0510e-01, 8.2281e-01, 1.6564e-01, + 2.1390e-01, 5.9743e-02, 7.6504e-01, 6.1133e-01, + 6.7155e-01, 8.3260e-01, 4.2074e-01, 1.6188e-01, + 5.4055e-01, 5.4034e-01, 5.1827e-01, 6.4072e-01, + 9.1437e-01, 8.0662e-01, 7.0467e-01, 5.7738e-01, + 1.3520e-01, 5.2585e-01, 8.5175e-01, 8.4468e-01, + 3.0361e-01, 8.5751e-01, 1.7277e-01, 9.6359e-01, + 8.6128e-01, 2.9765e-01, 5.3919e-01, 6.1371e-01, + 3.8046e-01, 4.8226e-01, 1.3437e-01, 5.0432e-01, + 1.8892e-01, 6.5133e-01, 2.6499e-01, 7.1236e-01, + 9.6542e-01, 5.7861e-01, 7.7088e-01, 8.8734e-01, + 6.3359e-01, 1.7758e-01, 2.7085e-01, 4.3296e-01, + 5.4653e-01, 6.0021e-01, 9.5719e-02, 4.5326e-01, + 8.3059e-01, 6.8966e-02, 5.0285e-01, 5.6595e-01, + 1.5023e-01, 9.0196e-01, 8.4544e-01, 1.0865e-01, + 7.5520e-01, 1.3423e-01, 4.0270e-01, 8.6072e-01, + 6.2697e-01, 3.2967e-01, 8.4051e-01, 7.5529e-01, + 7.0811e-01, 8.6908e-01, 2.9653e-01, 1.2958e-01, + 9.7291e-01, 6.4502e-01, 8.0253e-01, 8.8907e-01, + 2.7251e-01, 4.4411e-01, 4.7057e-01, 8.9308e-01, + 1.3930e-01, 3.2340e-01, 1.6439e-01, 1.5432e-01, + 9.7173e-01, 9.0521e-01, 3.4371e-01, 4.0490e-01, + 4.6653e-01, 7.1837e-01, 7.9360e-01, 5.4664e-01, + 9.4729e-01, 5.5325e-01, 5.1251e-01, 5.7572e-01, + 7.5254e-01, 1.2146e-02, 2.5185e-01, 5.1271e-01, + 2.7655e-01, 6.0389e-01, 3.8128e-01, 5.3860e-01, + 1.3426e-01, 6.6692e-01, 9.7070e-02, 5.4658e-01, + 4.3834e-01, 7.1205e-01, 9.0608e-01, 5.0276e-01, + 7.2740e-01, 4.9999e-01, 9.8219e-01, 2.5527e-01, + 9.9839e-01, 4.4277e-01, 7.4335e-01, 5.2927e-01, + 4.0147e-01, 3.0481e-01, 5.0779e-01, 6.2447e-01, + 4.1423e-01, 1.7832e-01, 2.7916e-01, 9.9786e-02, + 2.4396e-02, 9.0325e-01, 1.8473e-01, 5.1315e-01, + 7.4856e-01, 7.2333e-01, 7.3631e-01, 6.6483e-01, + 5.8875e-01, 1.2949e-01, 9.2499e-01, 4.2606e-01, + 1.2427e-01, 5.9750e-01, 6.8181e-01, 3.2032e-01, + 5.9607e-01, 4.8650e-01, 6.1343e-01, 8.3800e-01, + 4.1249e-01, 5.4277e-01, 5.1267e-01, 5.8284e-01, + 2.6744e-01, 3.3676e-01, 4.4564e-01, 3.3742e-01, + 1.8033e-01, 9.9759e-01, 5.5480e-01, 8.5469e-02, + 1.5223e-01, 4.7243e-01, 6.4551e-02, 1.0404e-01, + 9.1424e-01, 1.8783e-01, 8.3795e-01, 5.6527e-01, + 3.0543e-01, 2.2606e-01, 3.4035e-01, 1.2321e-01, + 6.3580e-01, 5.1722e-01, 7.7951e-01, 6.5095e-01, + 9.5081e-01, 1.8720e-01, 3.7574e-01, 1.0842e-01, + 8.9786e-01, 5.3056e-01, 9.1637e-01, 2.9814e-01, + 1.2798e-01, 2.2609e-01, 6.4805e-01, 9.3799e-01, + 3.2670e-03, 2.0420e-01, 8.4821e-01, 4.8440e-01, + 7.5172e-01, 9.4055e-01, 5.5659e-01, 9.7478e-02, + 2.3372e-01, 8.5619e-01, 3.4988e-01, 4.0783e-01, + 9.8429e-02, 9.8374e-01, 8.1964e-01, 1.4047e-01, + 1.2908e-01, 1.7080e-01, 6.9650e-01, 5.7250e-02, + 5.7554e-01, 9.8411e-01, 4.7299e-01, 8.8420e-01, + 8.2475e-01, 3.6581e-01, 8.3189e-01, 6.0186e-01, + 4.7676e-01, 7.1719e-01, 7.4824e-02, 4.4193e-02, + 5.8170e-01, 6.8150e-01, 3.6726e-02, 2.6419e-01, + 1.6276e-01, 1.2947e-01, 9.0307e-01, 7.9831e-01, + 9.3271e-02, 5.2367e-01, 4.9455e-01, 4.9804e-01, + 1.7249e-01, 6.8633e-01, 3.6808e-01, 4.2856e-01, + 8.7386e-01, 2.0002e-01, 4.5518e-01, 2.2888e-01, + 2.2350e-01, 3.8328e-01, 8.0557e-01, 1.8242e-01, + 5.2051e-01, 1.4979e-01, 3.4747e-01, 4.2048e-01, + 2.3832e-01, 6.2383e-01, 1.3351e-02, 3.7202e-01, + 3.5029e-01, 2.2524e-02, 7.6718e-01, 6.1161e-01, + 3.1224e-01, 2.8659e-02, 9.0404e-02, 4.2520e-01, + 4.9767e-01, 2.2752e-01, 2.1180e-01, 2.9852e-01, + 5.0825e-02, 1.1592e-01, 7.6533e-01, 4.7888e-02, + 5.7053e-01, 9.8417e-01, 6.0783e-01, 1.2227e-01, + 9.7016e-01, 2.0638e-01, 3.0425e-01, 3.9947e-02, + 9.0234e-01, 2.6704e-01, 7.2920e-01, 8.4551e-01, + 5.8286e-01, 8.1310e-01, 1.3082e-01, 5.9846e-01, + 3.3391e-01, 9.3071e-02, 3.0659e-01, 8.9902e-03, + 8.2513e-01, 6.9419e-01, 1.7624e-01, 4.2008e-01, + 5.8744e-01, 4.8844e-01, 5.8423e-02, 2.3564e-01, + 9.3439e-01, 9.7089e-01, 8.6264e-01, 9.7042e-01, + 8.7013e-01, 6.0291e-01, 9.1248e-01, 3.4274e-01, + 3.5745e-01, 2.1946e-01, 1.0444e-01, 8.5652e-01, + 7.6108e-02, 5.6230e-01, 5.6428e-01, 6.2069e-01, + 2.2683e-02, 5.7501e-01, 3.5375e-01, 7.8703e-01, + 6.6308e-01, 7.4038e-01, 9.3660e-01, 3.4831e-01, + 3.9361e-01, 4.0072e-03, 1.6103e-01, 8.5753e-01, + 7.6312e-01, 3.4146e-01, 5.9079e-01, 7.0291e-01, + 1.3256e-01, 2.2869e-01, 6.0451e-01, 5.9436e-01, + 5.3361e-01, 7.6419e-01, 2.7024e-01, 6.8091e-01, + 4.9998e-01, 3.4192e-01, 2.8797e-03, 5.8640e-01, + 1.7082e-01, 6.1514e-01, 9.8979e-01, 4.7219e-01, + 9.4330e-01, 3.1986e-01, 3.6346e-01, 4.9807e-01, + 8.4419e-02, 9.2554e-01, 8.2927e-02, 7.0710e-01, + 6.3602e-01, 3.9341e-01, 4.1112e-01, 2.0481e-01, + 6.2342e-01, 8.1162e-01, 5.2079e-01, 9.6347e-01, + 5.6725e-01, 2.2812e-01, 8.3504e-01, 8.4527e-01, + 2.1915e-01, 3.2541e-01, 4.7805e-01, 7.9254e-01, + 1.1463e-02, 6.4787e-01, 7.7038e-01, 8.7874e-01, + 3.4853e-01, 1.4547e-01, 8.0736e-01, 7.7148e-01, + 6.4522e-01, 4.1223e-01, 6.6583e-01, 2.3489e-01, + 7.5618e-02, 5.7345e-01, 6.0048e-01, 6.8905e-01, + 7.0429e-01, 6.9057e-02, 8.8357e-01, 5.1606e-01, + 1.3926e-01, 6.1546e-01, 4.3300e-01, 6.6197e-02, + 2.9262e-01, 2.9336e-01, 8.9253e-01, 9.5784e-01, + 1.1128e-01, 7.2099e-02, 7.1162e-01, 2.9179e-01, + 7.3996e-01, 7.0497e-01, 7.7206e-01, 3.4136e-01, + 1.0003e-02, 6.0418e-01, 3.8677e-01, 5.1479e-01, + 8.8069e-01, 4.4083e-01, 3.9369e-01, 9.6981e-01, + 8.6164e-01, 1.9798e-01, 3.3966e-01, 1.5113e-01, + 5.6949e-01, 4.2415e-01, 8.1822e-01, 1.8858e-01, + 3.6826e-01, 2.8324e-01, 4.2211e-01, 8.6172e-01, + 6.6398e-01, 5.4593e-01, 3.6808e-01, 3.3212e-01, + 2.8244e-02, 2.9601e-01, 4.5022e-01, 4.3624e-01, + 7.4292e-01, 1.2296e-01, 5.8630e-01, 9.2863e-01, + 4.8906e-02, 7.1181e-01, 5.9321e-01, 2.6944e-01, + 1.2286e-02, 6.1956e-01, 6.7301e-01, 8.8121e-01, + 8.0907e-01, 4.8580e-01, 5.0446e-01, 7.7445e-01, + 7.7929e-01, 5.1740e-01, 9.1796e-01, 4.0471e-01, + 8.0774e-01, 6.8630e-01, 6.5360e-01, 1.6695e-01, + 6.5084e-01, 3.2945e-01, 5.4305e-01, 5.0112e-01, + 3.8383e-01, 8.9022e-01, 9.2979e-01, 7.6494e-02, + 9.2941e-01, 2.9211e-01, 5.7058e-01, 5.5095e-01, + 6.4051e-01, 4.0649e-01, 8.4281e-01, 2.8756e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8266, 0.4178, 0.7886, ..., 0.6077, 0.8668, 0.9039]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.448915243148804 seconds + +[19.15, 18.86, 18.53, 22.39, 19.61, 18.63, 19.95, 18.63, 18.74, 18.82] +[52.97] +14.157347202301025 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 243279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.448915243148804, 'TIME_S_1KI': 0.04295033785550255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9146813058853, 'W': 52.97} +[19.15, 18.86, 18.53, 22.39, 19.61, 18.63, 19.95, 18.63, 18.74, 18.82, 19.72, 18.55, 20.1, 18.63, 18.78, 18.45, 18.86, 18.54, 18.49, 18.65] +343.90999999999997 +17.1955 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 243279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.448915243148804, 'TIME_S_1KI': 0.04295033785550255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9146813058853, 'W': 52.97, 'J_1KI': 3.082529446873283, 'W_1KI': 0.21773354872389314, 'W_D': 35.7745, 'J_D': 506.47201748871805, 'W_D_1KI': 0.14705132789924327, 'J_D_1KI': 0.0006044554930727407} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..2dd7277 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 57254, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50862431526184, "TIME_S_1KI": 0.183543932568237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 754.4204360270501, "W": 52.99, "J_1KI": 13.176728892776925, "W_1KI": 0.9255248541586614, "W_D": 35.93475, "J_D": 511.6042604929805, "W_D_1KI": 0.6276373703147379, "J_D_1KI": 0.010962332244292763} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..cb3cde8 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.030549287796020508} + +tensor(indices=tensor([[9685, 9896, 5323, ..., 9862, 9170, 8961], + [7477, 1409, 4642, ..., 9450, 8975, 93]]), + values=tensor([0.2946, 0.7621, 0.2330, ..., 0.6433, 0.6074, 0.5177]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.6039, 0.1516, 0.5833, ..., 0.6158, 0.1058, 0.9742]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.030549287796020508 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '34370', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.303138732910156} + +tensor(indices=tensor([[5399, 8252, 5639, ..., 4717, 5016, 676], + [8835, 8115, 6055, ..., 2242, 7565, 9945]]), + values=tensor([0.4451, 0.2461, 0.1136, ..., 0.4300, 0.9018, 0.3972]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.7515, 0.3604, 0.4649, ..., 0.0602, 0.2766, 0.0836]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.303138732910156 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '57254', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50862431526184} + +tensor(indices=tensor([[7039, 1609, 8530, ..., 3843, 5520, 5337], + [1541, 7706, 1376, ..., 5851, 1191, 2062]]), + values=tensor([0.5571, 0.6043, 0.4105, ..., 0.3579, 0.3166, 0.4906]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8666, 0.4737, 0.8919, ..., 0.0961, 0.5052, 0.4552]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.50862431526184 seconds + +tensor(indices=tensor([[7039, 1609, 8530, ..., 3843, 5520, 5337], + [1541, 7706, 1376, ..., 5851, 1191, 2062]]), + values=tensor([0.5571, 0.6043, 0.4105, ..., 0.3579, 0.3166, 0.4906]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8666, 0.4737, 0.8919, ..., 0.0961, 0.5052, 0.4552]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.50862431526184 seconds + +[18.99, 18.46, 18.92, 18.51, 19.57, 18.74, 18.76, 18.38, 18.73, 18.62] +[52.99] +14.23703408241272 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 57254, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50862431526184, 'TIME_S_1KI': 0.183543932568237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4204360270501, 'W': 52.99} +[18.99, 18.46, 18.92, 18.51, 19.57, 18.74, 18.76, 18.38, 18.73, 18.62, 19.21, 18.5, 18.7, 18.54, 18.67, 18.43, 18.53, 23.14, 18.98, 18.27] +341.105 +17.05525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 57254, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50862431526184, 'TIME_S_1KI': 0.183543932568237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4204360270501, 'W': 52.99, 'J_1KI': 13.176728892776925, 'W_1KI': 0.9255248541586614, 'W_D': 35.93475, 'J_D': 511.6042604929805, 'W_D_1KI': 0.6276373703147379, 'J_D_1KI': 0.010962332244292763} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..bdade1d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 121.61708164215088, "TIME_S_1KI": 1216.1708164215088, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 7122.070309627056, "W": 53.15, "J_1KI": 71220.70309627058, "W_1KI": 531.5, "W_D": 36.320499999999996, "J_D": 4866.926710833668, "W_D_1KI": 363.2049999999999, "J_D_1KI": 3632.0499999999993} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..cb8582c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 121.61708164215088} + +tensor(indices=tensor([[259648, 20445, 71925, ..., 32348, 321477, 171551], + [405052, 385707, 334337, ..., 400146, 318936, 302894]]), + values=tensor([0.9724, 0.7213, 0.1607, ..., 0.6742, 0.0323, 0.6944]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.1295, 0.8590, 0.1296, ..., 0.2625, 0.0779, 0.1541]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 121.61708164215088 seconds + +tensor(indices=tensor([[259648, 20445, 71925, ..., 32348, 321477, 171551], + [405052, 385707, 334337, ..., 400146, 318936, 302894]]), + values=tensor([0.9724, 0.7213, 0.1607, ..., 0.6742, 0.0323, 0.6944]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.1295, 0.8590, 0.1296, ..., 0.2625, 0.0779, 0.1541]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 121.61708164215088 seconds + +[18.98, 18.54, 19.0, 18.61, 18.8, 18.63, 18.85, 18.74, 18.48, 18.45] +[53.15] +133.99944138526917 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 121.61708164215088, 'TIME_S_1KI': 1216.1708164215088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 7122.070309627056, 'W': 53.15} +[18.98, 18.54, 19.0, 18.61, 18.8, 18.63, 18.85, 18.74, 18.48, 18.45, 19.28, 18.45, 18.54, 18.53, 19.28, 18.43, 18.61, 18.41, 19.08, 18.51] +336.59000000000003 +16.829500000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 121.61708164215088, 'TIME_S_1KI': 1216.1708164215088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 7122.070309627056, 'W': 53.15, 'J_1KI': 71220.70309627058, 'W_1KI': 531.5, 'W_D': 36.320499999999996, 'J_D': 4866.926710833668, 'W_D_1KI': 363.2049999999999, 'J_D_1KI': 3632.0499999999993} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..4d75520 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.497106790542603, "TIME_S_1KI": 114.97106790542603, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 836.9120391440392, "W": 52.77, "J_1KI": 8369.120391440392, "W_1KI": 527.7, "W_D": 35.596500000000006, "J_D": 564.5468903049232, "W_D_1KI": 355.9650000000001, "J_D_1KI": 3559.650000000001} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..4be700b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.497106790542603} + +tensor(indices=tensor([[337951, 376697, 329092, ..., 403608, 239222, 55857], + [485131, 348707, 141040, ..., 406961, 298131, 293954]]), + values=tensor([0.1547, 0.9987, 0.2964, ..., 0.2917, 0.0223, 0.0016]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5278, 0.6993, 0.0090, ..., 0.1942, 0.6461, 0.6893]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.497106790542603 seconds + +tensor(indices=tensor([[337951, 376697, 329092, ..., 403608, 239222, 55857], + [485131, 348707, 141040, ..., 406961, 298131, 293954]]), + values=tensor([0.1547, 0.9987, 0.2964, ..., 0.2917, 0.0223, 0.0016]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5278, 0.6993, 0.0090, ..., 0.1942, 0.6461, 0.6893]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.497106790542603 seconds + +[19.3, 22.7, 19.02, 18.84, 19.68, 19.7, 19.08, 18.54, 18.7, 18.57] +[52.77] +15.859617948532104 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.497106790542603, 'TIME_S_1KI': 114.97106790542603, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 836.9120391440392, 'W': 52.77} +[19.3, 22.7, 19.02, 18.84, 19.68, 19.7, 19.08, 18.54, 18.7, 18.57, 19.03, 18.69, 18.6, 18.6, 18.76, 18.84, 18.59, 18.47, 18.93, 18.56] +343.46999999999997 +17.173499999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.497106790542603, 'TIME_S_1KI': 114.97106790542603, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 836.9120391440392, 'W': 52.77, 'J_1KI': 8369.120391440392, 'W_1KI': 527.7, 'W_D': 35.596500000000006, 'J_D': 564.5468903049232, 'W_D_1KI': 355.9650000000001, 'J_D_1KI': 3559.650000000001} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..b7140cf --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 60.857818365097046, "TIME_S_1KI": 608.5781836509705, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3662.191264090538, "W": 52.97, "J_1KI": 36621.91264090538, "W_1KI": 529.6999999999999, "W_D": 36.1015, "J_D": 2495.952386644602, "W_D_1KI": 361.01500000000004, "J_D_1KI": 3610.1500000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..036a203 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 60.857818365097046} + +tensor(indices=tensor([[ 951, 120279, 465661, ..., 287503, 384707, 133194], + [173597, 161334, 406419, ..., 156254, 252009, 443489]]), + values=tensor([0.2221, 0.7099, 0.9372, ..., 0.6116, 0.9272, 0.7202]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1646, 0.8033, 0.5091, ..., 0.5821, 0.3961, 0.4196]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 60.857818365097046 seconds + +tensor(indices=tensor([[ 951, 120279, 465661, ..., 287503, 384707, 133194], + [173597, 161334, 406419, ..., 156254, 252009, 443489]]), + values=tensor([0.2221, 0.7099, 0.9372, ..., 0.6116, 0.9272, 0.7202]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1646, 0.8033, 0.5091, ..., 0.5821, 0.3961, 0.4196]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 60.857818365097046 seconds + +[19.15, 18.53, 18.69, 18.58, 18.47, 18.48, 19.28, 18.5, 18.8, 18.45] +[52.97] +69.13708257675171 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 60.857818365097046, 'TIME_S_1KI': 608.5781836509705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3662.191264090538, 'W': 52.97} +[19.15, 18.53, 18.69, 18.58, 18.47, 18.48, 19.28, 18.5, 18.8, 18.45, 19.12, 18.98, 18.62, 18.6, 18.62, 18.74, 18.74, 19.14, 18.64, 19.2] +337.37 +16.8685 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 60.857818365097046, 'TIME_S_1KI': 608.5781836509705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3662.191264090538, 'W': 52.97, 'J_1KI': 36621.91264090538, 'W_1KI': 529.6999999999999, 'W_D': 36.1015, 'J_D': 2495.952386644602, 'W_D_1KI': 361.01500000000004, 'J_D_1KI': 3610.1500000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..3c6c56c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1175, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.40561294555664, "TIME_S_1KI": 8.855840804729056, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 749.8353310585022, "W": 52.84, "J_1KI": 638.1577285604274, "W_1KI": 44.970212765957456, "W_D": 36.08175000000001, "J_D": 512.024431423545, "W_D_1KI": 30.70787234042554, "J_D_1KI": 26.13435943866003} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..313a610 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.89335036277771} + +tensor(indices=tensor([[ 9490, 40672, 25901, ..., 26297, 18451, 46341], + [24604, 34459, 37775, ..., 37437, 654, 20720]]), + values=tensor([0.5850, 0.5754, 0.0013, ..., 0.4759, 0.1987, 0.1477]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0909, 0.0552, 0.3962, ..., 0.1581, 0.3835, 0.7646]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.89335036277771 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1175', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.40561294555664} + +tensor(indices=tensor([[ 9796, 21726, 48108, ..., 49564, 41083, 42989], + [ 1577, 2862, 48806, ..., 24774, 8218, 42316]]), + values=tensor([0.4290, 0.2222, 0.0536, ..., 0.8181, 0.3029, 0.7655]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4594, 0.1020, 0.5009, ..., 0.2905, 0.9785, 0.9097]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.40561294555664 seconds + +tensor(indices=tensor([[ 9796, 21726, 48108, ..., 49564, 41083, 42989], + [ 1577, 2862, 48806, ..., 24774, 8218, 42316]]), + values=tensor([0.4290, 0.2222, 0.0536, ..., 0.8181, 0.3029, 0.7655]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4594, 0.1020, 0.5009, ..., 0.2905, 0.9785, 0.9097]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.40561294555664 seconds + +[18.91, 18.51, 18.85, 18.48, 18.55, 18.34, 18.76, 18.51, 18.98, 18.37] +[52.84] +14.190676212310791 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.40561294555664, 'TIME_S_1KI': 8.855840804729056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.8353310585022, 'W': 52.84} +[18.91, 18.51, 18.85, 18.48, 18.55, 18.34, 18.76, 18.51, 18.98, 18.37, 19.36, 18.62, 18.59, 18.45, 18.79, 18.61, 18.43, 18.47, 18.69, 18.43] +335.16499999999996 +16.758249999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.40561294555664, 'TIME_S_1KI': 8.855840804729056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.8353310585022, 'W': 52.84, 'J_1KI': 638.1577285604274, 'W_1KI': 44.970212765957456, 'W_D': 36.08175000000001, 'J_D': 512.024431423545, 'W_D_1KI': 30.70787234042554, 'J_D_1KI': 26.13435943866003} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..a420966 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 117, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.380040407180786, "TIME_S_1KI": 88.71829407846826, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 793.54994743824, "W": 52.98, "J_1KI": 6782.47818323282, "W_1KI": 452.82051282051276, "W_D": 35.899, "J_D": 537.705729767561, "W_D_1KI": 306.8290598290598, "J_D_1KI": 2622.470596829571} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..652c07a --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.901776790618896} + +tensor(indices=tensor([[36395, 7294, 45349, ..., 7523, 3829, 45990], + [10881, 42612, 48486, ..., 31508, 28624, 40369]]), + values=tensor([0.1114, 0.2625, 0.3491, ..., 0.3026, 0.3855, 0.2105]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.3534, 0.8690, 0.2833, ..., 0.3550, 0.9005, 0.0733]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 8.901776790618896 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '117', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.380040407180786} + +tensor(indices=tensor([[47205, 41176, 23177, ..., 42413, 49737, 35916], + [16083, 24941, 28479, ..., 3646, 31510, 18791]]), + values=tensor([0.3290, 0.3869, 0.6656, ..., 0.3339, 0.0385, 0.5772]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5858, 0.1772, 0.2941, ..., 0.1825, 0.4859, 0.2245]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.380040407180786 seconds + +tensor(indices=tensor([[47205, 41176, 23177, ..., 42413, 49737, 35916], + [16083, 24941, 28479, ..., 3646, 31510, 18791]]), + values=tensor([0.3290, 0.3869, 0.6656, ..., 0.3339, 0.0385, 0.5772]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5858, 0.1772, 0.2941, ..., 0.1825, 0.4859, 0.2245]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.380040407180786 seconds + +[19.03, 18.37, 18.49, 22.89, 18.86, 18.56, 19.04, 18.36, 18.85, 18.47] +[52.98] +14.97829270362854 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 117, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.380040407180786, 'TIME_S_1KI': 88.71829407846826, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 793.54994743824, 'W': 52.98} +[19.03, 18.37, 18.49, 22.89, 18.86, 18.56, 19.04, 18.36, 18.85, 18.47, 19.27, 20.41, 18.54, 18.36, 18.53, 18.71, 18.61, 18.48, 18.81, 18.73] +341.62 +17.081 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 117, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.380040407180786, 'TIME_S_1KI': 88.71829407846826, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 793.54994743824, 'W': 52.98, 'J_1KI': 6782.47818323282, 'W_1KI': 452.82051282051276, 'W_D': 35.899, 'J_D': 537.705729767561, 'W_D_1KI': 306.8290598290598, 'J_D_1KI': 2622.470596829571} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..7c5e4ff --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 88.84721422195435, "TIME_S_1KI": 888.4721422195435, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5454.322017431259, "W": 53.4, "J_1KI": 54543.22017431259, "W_1KI": 534.0, "W_D": 36.530499999999996, "J_D": 3731.256750145554, "W_D_1KI": 365.305, "J_D_1KI": 3653.0499999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..d24d27f --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 88.84721422195435} + +tensor(indices=tensor([[18847, 42104, 45010, ..., 26204, 27036, 21397], + [ 8719, 33420, 16526, ..., 1190, 16260, 39046]]), + values=tensor([0.6135, 0.2239, 0.8021, ..., 0.4533, 0.6086, 0.4137]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6174, 0.4050, 0.2302, ..., 0.4491, 0.9961, 0.7817]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 88.84721422195435 seconds + +tensor(indices=tensor([[18847, 42104, 45010, ..., 26204, 27036, 21397], + [ 8719, 33420, 16526, ..., 1190, 16260, 39046]]), + values=tensor([0.6135, 0.2239, 0.8021, ..., 0.4533, 0.6086, 0.4137]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6174, 0.4050, 0.2302, ..., 0.4491, 0.9961, 0.7817]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 88.84721422195435 seconds + +[18.97, 18.37, 19.07, 19.17, 18.58, 18.58, 18.95, 18.5, 18.88, 18.36] +[53.4] +102.14086174964905 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 88.84721422195435, 'TIME_S_1KI': 888.4721422195435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5454.322017431259, 'W': 53.4} +[18.97, 18.37, 19.07, 19.17, 18.58, 18.58, 18.95, 18.5, 18.88, 18.36, 19.35, 18.46, 18.63, 18.55, 19.07, 18.67, 18.86, 18.56, 18.81, 18.68] +337.39000000000004 +16.869500000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 88.84721422195435, 'TIME_S_1KI': 888.4721422195435, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5454.322017431259, 'W': 53.4, 'J_1KI': 54543.22017431259, 'W_1KI': 534.0, 'W_D': 36.530499999999996, 'J_D': 3731.256750145554, 'W_D_1KI': 365.305, 'J_D_1KI': 3653.0499999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..16e5953 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 444.36517548561096, "TIME_S_1KI": 4443.65175485611, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 26456.16959920168, "W": 53.51, "J_1KI": 264561.69599201676, "W_1KI": 535.1, "W_D": 36.66175, "J_D": 18126.134849626837, "W_D_1KI": 366.6175, "J_D_1KI": 3666.175} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..626eb3c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 444.36517548561096} + +tensor(indices=tensor([[27162, 20193, 48468, ..., 25037, 29932, 49923], + [47658, 37329, 30141, ..., 45159, 2995, 43115]]), + values=tensor([0.4028, 0.7949, 0.7188, ..., 0.5901, 0.8228, 0.7252]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.3746, 0.3628, 0.7536, ..., 0.5345, 0.4021, 0.2964]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 444.36517548561096 seconds + +tensor(indices=tensor([[27162, 20193, 48468, ..., 25037, 29932, 49923], + [47658, 37329, 30141, ..., 45159, 2995, 43115]]), + values=tensor([0.4028, 0.7949, 0.7188, ..., 0.5901, 0.8228, 0.7252]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.3746, 0.3628, 0.7536, ..., 0.5345, 0.4021, 0.2964]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 444.36517548561096 seconds + +[19.38, 18.39, 19.12, 18.47, 18.83, 18.52, 18.81, 18.45, 18.88, 18.33] +[53.51] +494.41542887687683 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 444.36517548561096, 'TIME_S_1KI': 4443.65175485611, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 26456.16959920168, 'W': 53.51} +[19.38, 18.39, 19.12, 18.47, 18.83, 18.52, 18.81, 18.45, 18.88, 18.33, 19.01, 18.38, 18.44, 18.35, 19.34, 18.7, 19.02, 18.85, 18.76, 18.59] +336.96500000000003 +16.84825 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 444.36517548561096, 'TIME_S_1KI': 4443.65175485611, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 26456.16959920168, 'W': 53.51, 'J_1KI': 264561.69599201676, 'W_1KI': 535.1, 'W_D': 36.66175, 'J_D': 18126.134849626837, 'W_D_1KI': 366.6175, 'J_D_1KI': 3666.175} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..2a471a6 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11506, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.493435859680176, "TIME_S_1KI": 0.911996859002275, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 755.218931055069, "W": 52.940000000000005, "J_1KI": 65.63696602251599, "W_1KI": 4.601077698592039, "W_D": 36.18225, "J_D": 516.1601845139265, "W_D_1KI": 3.144641925951678, "J_D_1KI": 0.273304530327801} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..b197b5c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.10554790496826172} + +tensor(indices=tensor([[37799, 11125, 17985, ..., 16695, 24615, 16681], + [45534, 14650, 191, ..., 780, 8527, 18310]]), + values=tensor([0.8131, 0.2553, 0.4857, ..., 0.5002, 0.8750, 0.1569]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0476, 0.6266, 0.4611, ..., 0.7310, 0.2205, 0.1676]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.10554790496826172 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '9948', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.077801704406738} + +tensor(indices=tensor([[13818, 31279, 34298, ..., 24295, 24506, 20583], + [31294, 10009, 49198, ..., 47674, 4058, 20962]]), + values=tensor([0.3071, 0.8645, 0.8990, ..., 0.6636, 0.4107, 0.4982]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5006, 0.4302, 0.5651, ..., 0.9042, 0.8647, 0.3106]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.077801704406738 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '11506', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.493435859680176} + +tensor(indices=tensor([[17833, 7925, 5541, ..., 28765, 26792, 47732], + [ 1162, 38863, 44356, ..., 37240, 23573, 42658]]), + values=tensor([0.2998, 0.0011, 0.3588, ..., 0.8901, 0.2789, 0.3093]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.2796, 0.5836, 0.3811, ..., 0.7105, 0.3441, 0.3692]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.493435859680176 seconds + +tensor(indices=tensor([[17833, 7925, 5541, ..., 28765, 26792, 47732], + [ 1162, 38863, 44356, ..., 37240, 23573, 42658]]), + values=tensor([0.2998, 0.0011, 0.3588, ..., 0.8901, 0.2789, 0.3093]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.2796, 0.5836, 0.3811, ..., 0.7105, 0.3441, 0.3692]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.493435859680176 seconds + +[19.07, 18.59, 18.82, 18.55, 18.8, 18.47, 18.7, 18.48, 18.71, 18.72] +[52.94] +14.265563488006592 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11506, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.493435859680176, 'TIME_S_1KI': 0.911996859002275, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.218931055069, 'W': 52.940000000000005} +[19.07, 18.59, 18.82, 18.55, 18.8, 18.47, 18.7, 18.48, 18.71, 18.72, 19.01, 18.58, 18.78, 18.45, 18.37, 18.43, 18.4, 18.67, 18.62, 18.67] +335.15500000000003 +16.75775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11506, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.493435859680176, 'TIME_S_1KI': 0.911996859002275, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 755.218931055069, 'W': 52.940000000000005, 'J_1KI': 65.63696602251599, 'W_1KI': 4.601077698592039, 'W_D': 36.18225, 'J_D': 516.1601845139265, 'W_D_1KI': 3.144641925951678, 'J_D_1KI': 0.273304530327801} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..cecd5e3 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2299, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.162626504898071, "TIME_S_1KI": 4.420455200042658, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 737.8573550343513, "W": 52.86999999999999, "J_1KI": 320.94708787923065, "W_1KI": 22.99695519791213, "W_D": 35.57199999999999, "J_D": 496.4452777242659, "W_D_1KI": 15.472814267072634, "J_D_1KI": 6.7302367407884445} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..9e2cae8 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4565856456756592} + +tensor(indices=tensor([[24610, 8886, 22938, ..., 45216, 11461, 33117], + [37722, 48273, 354, ..., 19289, 17683, 20268]]), + values=tensor([0.9632, 0.1457, 0.9746, ..., 0.1150, 0.9681, 0.4241]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.5249, 0.5378, 0.3521, ..., 0.4788, 0.2395, 0.9511]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.4565856456756592 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2299', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.162626504898071} + +tensor(indices=tensor([[31571, 47952, 14460, ..., 7592, 4193, 18326], + [45202, 26799, 24362, ..., 31107, 49265, 39997]]), + values=tensor([0.1607, 0.0167, 0.0988, ..., 0.8605, 0.6097, 0.5006]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.1258, 0.7004, 0.9707, ..., 0.1555, 0.5591, 0.6925]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.162626504898071 seconds + +tensor(indices=tensor([[31571, 47952, 14460, ..., 7592, 4193, 18326], + [45202, 26799, 24362, ..., 31107, 49265, 39997]]), + values=tensor([0.1607, 0.0167, 0.0988, ..., 0.8605, 0.6097, 0.5006]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.1258, 0.7004, 0.9707, ..., 0.1555, 0.5591, 0.6925]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.162626504898071 seconds + +[18.9, 19.2, 18.71, 18.57, 18.56, 18.73, 18.51, 18.65, 22.58, 19.2] +[52.87] +13.956068754196167 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2299, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.162626504898071, 'TIME_S_1KI': 4.420455200042658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 737.8573550343513, 'W': 52.86999999999999} +[18.9, 19.2, 18.71, 18.57, 18.56, 18.73, 18.51, 18.65, 22.58, 19.2, 19.11, 19.01, 18.65, 18.56, 22.48, 19.62, 18.45, 19.24, 18.55, 18.57] +345.96000000000004 +17.298000000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2299, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.162626504898071, 'TIME_S_1KI': 4.420455200042658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 737.8573550343513, 'W': 52.86999999999999, 'J_1KI': 320.94708787923065, 'W_1KI': 22.99695519791213, 'W_D': 35.57199999999999, 'J_D': 496.4452777242659, 'W_D_1KI': 15.472814267072634, 'J_D_1KI': 6.7302367407884445} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..498b7bf --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 110682, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485157012939453, "TIME_S_1KI": 0.09473226913987327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 754.4865761089325, "W": 53.16, "J_1KI": 6.816705300852283, "W_1KI": 0.48029489889955, "W_D": 36.256, "J_D": 514.572334526062, "W_D_1KI": 0.3275690717551183, "J_D_1KI": 0.002959551433431979} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..cc96991 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02198195457458496} + +tensor(indices=tensor([[3357, 462, 4353, ..., 3080, 2253, 4288], + [2749, 3125, 2482, ..., 1310, 1526, 4597]]), + values=tensor([0.2770, 0.6466, 0.3291, ..., 0.7595, 0.9488, 0.7129]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.7058, 0.7026, 0.5296, ..., 0.8512, 0.7474, 0.0473]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.02198195457458496 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '47766', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.531348466873169} + +tensor(indices=tensor([[1732, 4057, 1051, ..., 1906, 931, 4539], + [2826, 1198, 3776, ..., 1414, 120, 1889]]), + values=tensor([0.5487, 0.1522, 0.4759, ..., 0.8365, 0.9620, 0.0085]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.1511, 0.8118, 0.8481, ..., 0.1084, 0.8361, 0.3367]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.531348466873169 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '110682', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485157012939453} + +tensor(indices=tensor([[2047, 743, 2200, ..., 1429, 3571, 2544], + [3703, 1196, 2969, ..., 3818, 4950, 1039]]), + values=tensor([0.4212, 0.5939, 0.5837, ..., 0.8268, 0.0991, 0.7311]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8692, 0.7388, 0.6361, ..., 0.9749, 0.5162, 0.5228]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.485157012939453 seconds + +tensor(indices=tensor([[2047, 743, 2200, ..., 1429, 3571, 2544], + [3703, 1196, 2969, ..., 3818, 4950, 1039]]), + values=tensor([0.4212, 0.5939, 0.5837, ..., 0.8268, 0.0991, 0.7311]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8692, 0.7388, 0.6361, ..., 0.9749, 0.5162, 0.5228]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.485157012939453 seconds + +[19.06, 18.84, 18.81, 18.64, 18.5, 18.83, 18.55, 18.41, 18.67, 18.67] +[53.16] +14.192749738693237 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 110682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485157012939453, 'TIME_S_1KI': 0.09473226913987327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4865761089325, 'W': 53.16} +[19.06, 18.84, 18.81, 18.64, 18.5, 18.83, 18.55, 18.41, 18.67, 18.67, 19.13, 18.52, 18.78, 18.81, 18.74, 18.33, 19.19, 18.77, 19.88, 18.76] +338.0799999999999 +16.903999999999996 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 110682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485157012939453, 'TIME_S_1KI': 0.09473226913987327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4865761089325, 'W': 53.16, 'J_1KI': 6.816705300852283, 'W_1KI': 0.48029489889955, 'W_D': 36.256, 'J_D': 514.572334526062, 'W_D_1KI': 0.3275690717551183, 'J_D_1KI': 0.002959551433431979} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..4e3bd6f --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11912, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.549734830856323, "TIME_S_1KI": 0.8856392571236, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 757.0913680386543, "W": 52.83, "J_1KI": 63.55703223964526, "W_1KI": 4.435023505708529, "W_D": 35.7725, "J_D": 512.6452955359221, "W_D_1KI": 3.003064137004701, "J_D_1KI": 0.25210410821060286} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..65bf887 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.10046219825744629} + +tensor(indices=tensor([[3814, 3588, 2383, ..., 3799, 3335, 4939], + [3239, 2739, 2241, ..., 3871, 4755, 3030]]), + values=tensor([0.8014, 0.6694, 0.7110, ..., 0.4497, 0.1435, 0.4755]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0128, 0.1384, 0.3080, ..., 0.7168, 0.8793, 0.4155]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.10046219825744629 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '10451', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.211597919464111} + +tensor(indices=tensor([[ 286, 1227, 2595, ..., 4203, 4138, 2254], + [ 438, 1909, 4839, ..., 3314, 3803, 2152]]), + values=tensor([0.1220, 0.9605, 0.1506, ..., 0.6905, 0.1091, 0.4553]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.7336, 0.8033, 0.2239, ..., 0.0093, 0.7832, 0.8479]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.211597919464111 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '11912', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.549734830856323} + +tensor(indices=tensor([[2065, 1809, 3718, ..., 4733, 53, 59], + [1955, 3766, 653, ..., 4680, 637, 1203]]), + values=tensor([0.4905, 0.9168, 0.3618, ..., 0.6318, 0.9448, 0.4353]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8671, 0.5450, 0.5794, ..., 0.1138, 0.5572, 0.2685]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.549734830856323 seconds + +tensor(indices=tensor([[2065, 1809, 3718, ..., 4733, 53, 59], + [1955, 3766, 653, ..., 4680, 637, 1203]]), + values=tensor([0.4905, 0.9168, 0.3618, ..., 0.6318, 0.9448, 0.4353]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8671, 0.5450, 0.5794, ..., 0.1138, 0.5572, 0.2685]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.549734830856323 seconds + +[19.16, 18.77, 18.98, 18.54, 23.46, 18.52, 18.93, 18.74, 19.06, 18.35] +[52.83] +14.330709218978882 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11912, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.549734830856323, 'TIME_S_1KI': 0.8856392571236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.0913680386543, 'W': 52.83} +[19.16, 18.77, 18.98, 18.54, 23.46, 18.52, 18.93, 18.74, 19.06, 18.35, 18.88, 18.84, 18.57, 18.51, 18.55, 18.55, 18.51, 18.56, 18.54, 18.65] +341.15 +17.057499999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11912, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.549734830856323, 'TIME_S_1KI': 0.8856392571236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 757.0913680386543, 'W': 52.83, 'J_1KI': 63.55703223964526, 'W_1KI': 4.435023505708529, 'W_D': 35.7725, 'J_D': 512.6452955359221, 'W_D_1KI': 3.003064137004701, 'J_D_1KI': 0.25210410821060286} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..eba8d65 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1175, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.336398839950562, "TIME_S_1KI": 8.796935182936647, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 746.7368591165542, "W": 52.89, "J_1KI": 635.5207311630248, "W_1KI": 45.0127659574468, "W_D": 35.812250000000006, "J_D": 505.6216124578715, "W_D_1KI": 30.478510638297877, "J_D_1KI": 25.939157990040744} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..01b79f5 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.8931059837341309} + +tensor(indices=tensor([[ 731, 4685, 395, ..., 1020, 4637, 1718], + [3618, 2633, 319, ..., 909, 2673, 143]]), + values=tensor([0.8002, 0.0499, 0.6616, ..., 0.1544, 0.8139, 0.7503]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.4582, 0.9286, 0.3506, ..., 0.2655, 0.8216, 0.0287]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.8931059837341309 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1175', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.336398839950562} + +tensor(indices=tensor([[ 441, 4185, 4611, ..., 3907, 2673, 4614], + [ 507, 731, 3299, ..., 1339, 3770, 3928]]), + values=tensor([0.4750, 0.0579, 0.3195, ..., 0.2464, 0.6264, 0.3023]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.9906, 0.7877, 0.8030, ..., 0.8219, 0.8851, 0.6819]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.336398839950562 seconds + +tensor(indices=tensor([[ 441, 4185, 4611, ..., 3907, 2673, 4614], + [ 507, 731, 3299, ..., 1339, 3770, 3928]]), + values=tensor([0.4750, 0.0579, 0.3195, ..., 0.2464, 0.6264, 0.3023]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.9906, 0.7877, 0.8030, ..., 0.8219, 0.8851, 0.6819]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.336398839950562 seconds + +[18.75, 18.53, 18.93, 18.49, 19.42, 18.73, 18.95, 18.96, 18.8, 18.54] +[52.89] +14.118677616119385 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.336398839950562, 'TIME_S_1KI': 8.796935182936647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.7368591165542, 'W': 52.89} +[18.75, 18.53, 18.93, 18.49, 19.42, 18.73, 18.95, 18.96, 18.8, 18.54, 19.6, 18.44, 18.51, 18.65, 19.1, 18.61, 18.43, 22.29, 19.04, 18.46] +341.55499999999995 +17.077749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1175, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.336398839950562, 'TIME_S_1KI': 8.796935182936647, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.7368591165542, 'W': 52.89, 'J_1KI': 635.5207311630248, 'W_1KI': 45.0127659574468, 'W_D': 35.812250000000006, 'J_D': 505.6216124578715, 'W_D_1KI': 30.478510638297877, 'J_D_1KI': 25.939157990040744} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..8b173ff --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 239, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.492762088775635, "TIME_S_1KI": 43.90277024592316, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 778.1596210622787, "W": 53.23, "J_1KI": 3255.89799607648, "W_1KI": 222.7196652719665, "W_D": 36.39574999999999, "J_D": 532.0628034619092, "W_D_1KI": 152.28347280334725, "J_D_1KI": 637.1693422734195} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..4fef78d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 4.379525661468506} + +tensor(indices=tensor([[ 140, 1733, 1225, ..., 1673, 4314, 2178], + [3612, 3576, 1934, ..., 2457, 915, 2709]]), + values=tensor([0.0678, 0.6590, 0.3899, ..., 0.3844, 0.3305, 0.5064]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.0804, 0.3830, 0.8558, ..., 0.8435, 0.2496, 0.0277]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 4.379525661468506 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '239', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.492762088775635} + +tensor(indices=tensor([[1655, 2195, 4472, ..., 1043, 2213, 3514], + [4786, 3450, 2849, ..., 4155, 2026, 3425]]), + values=tensor([0.1913, 0.9120, 0.5753, ..., 0.3253, 0.2157, 0.1056]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.9789, 0.2512, 0.2440, ..., 0.9027, 0.6694, 0.5645]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.492762088775635 seconds + +tensor(indices=tensor([[1655, 2195, 4472, ..., 1043, 2213, 3514], + [4786, 3450, 2849, ..., 4155, 2026, 3425]]), + values=tensor([0.1913, 0.9120, 0.5753, ..., 0.3253, 0.2157, 0.1056]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.9789, 0.2512, 0.2440, ..., 0.9027, 0.6694, 0.5645]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.492762088775635 seconds + +[18.73, 19.34, 18.42, 18.52, 18.72, 18.89, 18.59, 18.55, 18.62, 18.61] +[53.23] +14.61881685256958 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 239, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.492762088775635, 'TIME_S_1KI': 43.90277024592316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 778.1596210622787, 'W': 53.23} +[18.73, 19.34, 18.42, 18.52, 18.72, 18.89, 18.59, 18.55, 18.62, 18.61, 19.2, 18.61, 19.05, 18.51, 18.61, 18.52, 18.82, 18.56, 18.78, 18.61] +336.68500000000006 +16.834250000000004 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 239, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.492762088775635, 'TIME_S_1KI': 43.90277024592316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 778.1596210622787, 'W': 53.23, 'J_1KI': 3255.89799607648, 'W_1KI': 222.7196652719665, 'W_D': 36.39574999999999, 'J_D': 532.0628034619092, 'W_D_1KI': 152.28347280334725, 'J_D_1KI': 637.1693422734195} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..0ee178d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 119, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.445932626724243, "TIME_S_1KI": 87.78094644306087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 794.5498427605629, "W": 53.09, "J_1KI": 6676.8894349627135, "W_1KI": 446.1344537815126, "W_D": 36.087500000000006, "J_D": 540.088857612014, "W_D_1KI": 303.2563025210084, "J_D_1KI": 2548.3722900925077} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..ab7fe98 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.757755756378174} + +tensor(indices=tensor([[2953, 234, 4868, ..., 3698, 4212, 1909], + [2703, 4458, 2899, ..., 3784, 4376, 3718]]), + values=tensor([0.2834, 0.0535, 0.7451, ..., 0.5059, 0.8333, 0.9231]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4996, 0.7088, 0.6416, ..., 0.4672, 0.1887, 0.4295]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.757755756378174 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '119', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.445932626724243} + +tensor(indices=tensor([[3687, 3896, 4455, ..., 2180, 1318, 2402], + [3927, 2651, 4560, ..., 3164, 1982, 4334]]), + values=tensor([0.5254, 0.2601, 0.8538, ..., 0.4961, 0.2504, 0.0197]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7745, 0.0532, 0.6577, ..., 0.7397, 0.6506, 0.7515]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.445932626724243 seconds + +tensor(indices=tensor([[3687, 3896, 4455, ..., 2180, 1318, 2402], + [3927, 2651, 4560, ..., 3164, 1982, 4334]]), + values=tensor([0.5254, 0.2601, 0.8538, ..., 0.4961, 0.2504, 0.0197]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7745, 0.0532, 0.6577, ..., 0.7397, 0.6506, 0.7515]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.445932626724243 seconds + +[18.82, 22.1, 19.4, 18.34, 19.2, 18.52, 18.65, 18.44, 18.84, 18.47] +[53.09] +14.966092348098755 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.445932626724243, 'TIME_S_1KI': 87.78094644306087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 794.5498427605629, 'W': 53.09} +[18.82, 22.1, 19.4, 18.34, 19.2, 18.52, 18.65, 18.44, 18.84, 18.47, 18.95, 18.53, 18.43, 18.82, 18.76, 18.31, 18.98, 18.72, 18.48, 18.82] +340.04999999999995 +17.002499999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.445932626724243, 'TIME_S_1KI': 87.78094644306087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 794.5498427605629, 'W': 53.09, 'J_1KI': 6676.8894349627135, 'W_1KI': 446.1344537815126, 'W_D': 36.087500000000006, 'J_D': 540.088857612014, 'W_D_1KI': 303.2563025210084, 'J_D_1KI': 2548.3722900925077} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..a86bb5b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.579725742340088, "TIME_S_1KI": 175.79725742340088, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1215.9591540527344, "W": 53.330000000000005, "J_1KI": 12159.591540527344, "W_1KI": 533.3000000000001, "W_D": 36.331500000000005, "J_D": 828.3821489868166, "W_D_1KI": 363.31500000000005, "J_D_1KI": 3633.1500000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..cb11879 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.579725742340088} + +tensor(indices=tensor([[1041, 1504, 3699, ..., 1279, 3292, 2293], + [2124, 387, 2742, ..., 2850, 4147, 1644]]), + values=tensor([0.1642, 0.9459, 0.9100, ..., 0.2504, 0.1529, 0.8530]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.1158, 0.0778, 0.1338, ..., 0.2986, 0.0086, 0.8295]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.579725742340088 seconds + +tensor(indices=tensor([[1041, 1504, 3699, ..., 1279, 3292, 2293], + [2124, 387, 2742, ..., 2850, 4147, 1644]]), + values=tensor([0.1642, 0.9459, 0.9100, ..., 0.2504, 0.1529, 0.8530]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.1158, 0.0778, 0.1338, ..., 0.2986, 0.0086, 0.8295]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.579725742340088 seconds + +[19.38, 18.38, 18.31, 18.62, 18.98, 18.61, 19.94, 18.46, 18.73, 18.56] +[53.33] +22.8006591796875 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.579725742340088, 'TIME_S_1KI': 175.79725742340088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1215.9591540527344, 'W': 53.330000000000005} +[19.38, 18.38, 18.31, 18.62, 18.98, 18.61, 19.94, 18.46, 18.73, 18.56, 22.86, 19.08, 18.47, 18.95, 18.54, 18.85, 19.01, 18.72, 18.54, 18.76] +339.97 +16.9985 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.579725742340088, 'TIME_S_1KI': 175.79725742340088, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1215.9591540527344, 'W': 53.330000000000005, 'J_1KI': 12159.591540527344, 'W_1KI': 533.3000000000001, 'W_D': 36.331500000000005, 'J_D': 828.3821489868166, 'W_D_1KI': 363.31500000000005, 'J_D_1KI': 3633.1500000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..9b914b0 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.379098653793335, "TIME_S_1KI": 263.79098653793335, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1731.2462457871436, "W": 53.41, "J_1KI": 17312.462457871436, "W_1KI": 534.1, "W_D": 36.3955, "J_D": 1179.7336217664479, "W_D_1KI": 363.955, "J_D_1KI": 3639.5499999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..f902365 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.379098653793335} + +tensor(indices=tensor([[ 386, 1957, 1559, ..., 2478, 1085, 4412], + [ 29, 3220, 1335, ..., 3608, 2941, 1585]]), + values=tensor([0.6096, 0.3396, 0.2859, ..., 0.4952, 0.0380, 0.1024]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.1918, 0.0876, 0.5397, ..., 0.7140, 0.9679, 0.7231]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.379098653793335 seconds + +tensor(indices=tensor([[ 386, 1957, 1559, ..., 2478, 1085, 4412], + [ 29, 3220, 1335, ..., 3608, 2941, 1585]]), + values=tensor([0.6096, 0.3396, 0.2859, ..., 0.4952, 0.0380, 0.1024]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.1918, 0.0876, 0.5397, ..., 0.7140, 0.9679, 0.7231]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.379098653793335 seconds + +[19.08, 18.69, 18.55, 18.41, 18.89, 18.58, 18.54, 20.25, 21.01, 18.82] +[53.41] +32.41427159309387 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.379098653793335, 'TIME_S_1KI': 263.79098653793335, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1731.2462457871436, 'W': 53.41} +[19.08, 18.69, 18.55, 18.41, 18.89, 18.58, 18.54, 20.25, 21.01, 18.82, 18.97, 18.64, 18.83, 18.5, 18.85, 18.66, 18.75, 18.56, 18.85, 18.59] +340.28999999999996 +17.014499999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.379098653793335, 'TIME_S_1KI': 263.79098653793335, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1731.2462457871436, 'W': 53.41, 'J_1KI': 17312.462457871436, 'W_1KI': 534.1, 'W_D': 36.3955, 'J_D': 1179.7336217664479, 'W_D_1KI': 363.955, 'J_D_1KI': 3639.5499999999997} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..6432ccd --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 34.94771075248718, "TIME_S_1KI": 349.4771075248718, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2247.251918399334, "W": 53.550000000000004, "J_1KI": 22472.519183993343, "W_1KI": 535.5000000000001, "W_D": 36.6275, "J_D": 1537.0909363430737, "W_D_1KI": 366.275, "J_D_1KI": 3662.75} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..15fbf2c --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 34.94771075248718} + +tensor(indices=tensor([[ 341, 2125, 2941, ..., 2481, 3481, 943], + [2128, 980, 2280, ..., 2746, 1908, 4985]]), + values=tensor([0.2713, 0.6415, 0.0835, ..., 0.2173, 0.4665, 0.7810]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8918, 0.0864, 0.8458, ..., 0.9888, 0.3713, 0.4277]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 34.94771075248718 seconds + +tensor(indices=tensor([[ 341, 2125, 2941, ..., 2481, 3481, 943], + [2128, 980, 2280, ..., 2746, 1908, 4985]]), + values=tensor([0.2713, 0.6415, 0.0835, ..., 0.2173, 0.4665, 0.7810]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8918, 0.0864, 0.8458, ..., 0.9888, 0.3713, 0.4277]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 34.94771075248718 seconds + +[18.89, 18.42, 18.83, 18.46, 19.2, 18.49, 18.74, 18.72, 18.75, 18.46] +[53.55] +41.96548867225647 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 34.94771075248718, 'TIME_S_1KI': 349.4771075248718, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2247.251918399334, 'W': 53.550000000000004} +[18.89, 18.42, 18.83, 18.46, 19.2, 18.49, 18.74, 18.72, 18.75, 18.46, 19.54, 18.73, 18.79, 18.77, 19.13, 18.6, 18.7, 19.24, 19.01, 18.85] +338.45000000000005 +16.922500000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 34.94771075248718, 'TIME_S_1KI': 349.4771075248718, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2247.251918399334, 'W': 53.550000000000004, 'J_1KI': 22472.519183993343, 'W_1KI': 535.5000000000001, 'W_D': 36.6275, 'J_D': 1537.0909363430737, 'W_D_1KI': 366.275, 'J_D_1KI': 3662.75} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..18c5dd3 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.97061109542847, "TIME_S_1KI": 439.70611095428467, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2751.18099268198, "W": 53.43, "J_1KI": 27511.809926819802, "W_1KI": 534.3, "W_D": 36.369249999999994, "J_D": 1872.7005300037263, "W_D_1KI": 363.69249999999994, "J_D_1KI": 3636.9249999999993} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..c312c82 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.97061109542847} + +tensor(indices=tensor([[3145, 1307, 4347, ..., 4045, 3429, 3199], + [4962, 4520, 3297, ..., 791, 3059, 3340]]), + values=tensor([0.7023, 0.5600, 0.8014, ..., 0.2262, 0.5513, 0.0605]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3141, 0.0727, 0.1677, ..., 0.2736, 0.9208, 0.9057]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.97061109542847 seconds + +tensor(indices=tensor([[3145, 1307, 4347, ..., 4045, 3429, 3199], + [4962, 4520, 3297, ..., 791, 3059, 3340]]), + values=tensor([0.7023, 0.5600, 0.8014, ..., 0.2262, 0.5513, 0.0605]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3141, 0.0727, 0.1677, ..., 0.2736, 0.9208, 0.9057]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.97061109542847 seconds + +[19.98, 18.88, 19.24, 18.82, 18.59, 18.58, 18.66, 18.72, 22.36, 18.84] +[53.43] +51.491315603256226 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.97061109542847, 'TIME_S_1KI': 439.70611095428467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2751.18099268198, 'W': 53.43} +[19.98, 18.88, 19.24, 18.82, 18.59, 18.58, 18.66, 18.72, 22.36, 18.84, 19.16, 18.81, 18.59, 18.57, 18.58, 18.76, 18.53, 18.46, 18.55, 19.05] +341.21500000000003 +17.060750000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.97061109542847, 'TIME_S_1KI': 439.70611095428467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2751.18099268198, 'W': 53.43, 'J_1KI': 27511.809926819802, 'W_1KI': 534.3, 'W_D': 36.369249999999994, 'J_D': 1872.7005300037263, 'W_D_1KI': 363.69249999999994, 'J_D_1KI': 3636.9249999999993} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..52214c0 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 657339, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507478475570679, "TIME_S_1KI": 0.015984870022272644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 738.8582760810853, "W": 52.6, "J_1KI": 1.1240140567973074, "W_1KI": 0.08001959415157171, "W_D": 35.739000000000004, "J_D": 502.0162724118233, "W_D_1KI": 0.054369206756331216, "J_D_1KI": 8.27110619578805e-05} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..1addc0a --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,429 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.014267444610595703} + +tensor(indices=tensor([[3350, 3579, 2628, 2842, 29, 4840, 3408, 243, 4610, + 279, 987, 3404, 952, 4985, 3776, 3356, 3029, 3975, + 4315, 1213, 2350, 2510, 4626, 536, 1275, 4725, 2428, + 1855, 2453, 3553, 2315, 4816, 1659, 1073, 4472, 3092, + 1398, 4291, 4966, 7, 1675, 2353, 1651, 4795, 1616, + 4946, 1730, 4498, 664, 1625, 3818, 328, 2154, 4266, + 643, 1855, 798, 80, 3798, 3037, 2181, 4270, 1406, + 4421, 3490, 3440, 777, 1702, 3308, 3899, 578, 4884, + 4022, 1031, 2144, 2052, 4690, 1272, 4172, 4999, 4973, + 4193, 362, 4516, 4302, 2191, 921, 2860, 593, 3609, + 1463, 2983, 4128, 743, 1156, 3329, 1848, 1095, 4737, + 4212, 901, 3318, 3830, 1688, 421, 3440, 3492, 3834, + 2638, 2771, 1012, 2383, 3591, 4630, 1737, 1937, 1854, + 3570, 4345, 4000, 1215, 3054, 3363, 4201, 4904, 2593, + 1155, 4746, 4292, 4450, 1513, 2505, 3767, 3024, 1709, + 2445, 3474, 4087, 2493, 2095, 4613, 3597, 1138, 623, + 1740, 3255, 1069, 4907, 4, 652, 1644, 615, 4284, + 4661, 1555, 1376, 285, 4923, 3669, 3265, 1083, 2383, + 3380, 1027, 3621, 1898, 4736, 861, 698, 3751, 3699, + 1387, 4909, 1976, 4101, 1313, 470, 4105, 770, 1724, + 4365, 3925, 4437, 2045, 2738, 2318, 323, 2764, 2519, + 3009, 2051, 3721, 2398, 1048, 3608, 1290, 3686, 2843, + 884, 2720, 2151, 742, 3472, 4194, 2461, 1260, 2701, + 4512, 2866, 3087, 1684, 901, 1231, 3819, 650, 2871, + 1962, 1068, 3068, 987, 1289, 653, 4071, 751, 2964, + 4744, 971, 874, 422, 4999, 2097, 4264, 650, 910, + 1003, 1004, 4306, 3103, 4338, 920, 461, 657, 1906, + 1453, 10, 739, 85, 2037, 3465, 1075], + [3219, 678, 1425, 3399, 1253, 1316, 4113, 3313, 4348, + 562, 1883, 1791, 4291, 4685, 848, 51, 2764, 3490, + 3526, 1176, 3899, 1823, 3086, 1779, 3100, 2211, 2040, + 1162, 3221, 1762, 2812, 2033, 209, 974, 4387, 778, + 4145, 2951, 3037, 4013, 2224, 229, 1109, 2370, 1979, + 2368, 2886, 4514, 3830, 1775, 695, 3876, 871, 2311, + 2010, 594, 3154, 596, 3963, 2982, 4874, 4410, 4283, + 2931, 4718, 4059, 1793, 3888, 3555, 4196, 2734, 723, + 2578, 4224, 2173, 4531, 2954, 4549, 4219, 2720, 4232, + 302, 166, 1224, 789, 1738, 4284, 615, 3810, 765, + 2248, 3749, 2959, 3933, 1316, 1480, 1432, 1211, 3902, + 4301, 3256, 1399, 2189, 1660, 3459, 3098, 3470, 2540, + 685, 4591, 2566, 4069, 48, 3734, 1984, 705, 1033, + 1461, 3332, 4332, 783, 247, 3574, 3261, 2095, 1086, + 1739, 4238, 3052, 1035, 23, 901, 1013, 1564, 678, + 2581, 732, 4579, 1752, 1202, 1604, 2819, 2576, 4020, + 3140, 4737, 685, 1436, 3085, 455, 3171, 4988, 568, + 550, 4101, 3751, 2089, 4453, 2250, 1238, 4353, 2609, + 3661, 4960, 4194, 880, 2377, 731, 2504, 3010, 3042, + 988, 1241, 1301, 3988, 3847, 2392, 1982, 1427, 4666, + 1481, 790, 1171, 416, 2043, 369, 1377, 3659, 1825, + 886, 2281, 2578, 606, 2389, 3972, 868, 1070, 1340, + 3283, 4585, 590, 4030, 1130, 3971, 135, 3337, 773, + 438, 3282, 4417, 2765, 1344, 836, 3629, 4100, 2339, + 4609, 2856, 2459, 3039, 616, 110, 2511, 4350, 3129, + 1679, 4751, 3879, 1347, 3906, 3395, 1704, 3982, 2921, + 3025, 2844, 892, 2964, 4466, 4158, 2040, 3207, 3312, + 3743, 1123, 298, 1144, 1268, 4706, 1760]]), + values=tensor([0.9300, 0.0135, 0.2559, 0.0258, 0.4795, 0.5644, 0.9646, + 0.7265, 0.9004, 0.7805, 0.5849, 0.6255, 0.8873, 0.3444, + 0.6069, 0.8641, 0.6938, 0.4578, 0.9990, 0.4318, 0.7874, + 0.6852, 0.2268, 0.6379, 0.8278, 0.8031, 0.0740, 0.0405, + 0.2744, 0.9011, 0.3091, 0.1673, 0.4777, 0.8224, 0.4322, + 0.7326, 0.9530, 0.3044, 0.3435, 0.3336, 0.6991, 0.4658, + 0.8791, 0.1086, 0.9612, 0.5462, 0.0585, 0.0607, 0.0217, + 0.4276, 0.2959, 0.3142, 0.2983, 0.9545, 0.5835, 0.0730, + 0.1467, 0.6611, 0.0972, 0.1770, 0.1073, 0.3589, 0.2458, + 0.4000, 0.5589, 0.7323, 0.9776, 0.1060, 0.7977, 0.7184, + 0.2749, 0.4653, 0.0102, 0.8633, 0.3900, 0.5562, 0.9308, + 0.6866, 0.7450, 0.1593, 0.1101, 0.3311, 0.4698, 0.9638, + 0.2753, 0.3311, 0.3951, 0.1960, 0.3345, 0.3664, 0.6921, + 0.3366, 0.1916, 0.2045, 0.4107, 0.9946, 0.4530, 0.8944, + 0.2997, 0.0147, 0.8193, 0.7514, 0.8245, 0.1256, 0.0419, + 0.1027, 0.0765, 0.5046, 0.7443, 0.3003, 0.2242, 0.0012, + 0.3642, 0.8406, 0.5971, 0.3942, 0.5826, 0.9030, 0.0635, + 0.8064, 0.7616, 0.5519, 0.1794, 0.3785, 0.7013, 0.1781, + 0.6209, 0.3620, 0.5337, 0.7833, 0.9370, 0.1257, 0.3666, + 0.6031, 0.5356, 0.8052, 0.9117, 0.1739, 0.1515, 0.6672, + 0.6920, 0.3617, 0.2507, 0.8751, 0.5575, 0.1821, 0.7933, + 0.3800, 0.0300, 0.7871, 0.2909, 0.2445, 0.8029, 0.1923, + 0.8894, 0.4805, 0.5106, 0.2169, 0.1770, 0.0477, 0.7765, + 0.7159, 0.5537, 0.1263, 0.9648, 0.0473, 0.3391, 0.1676, + 0.0787, 0.6666, 0.4579, 0.4928, 0.8260, 0.1654, 0.7945, + 0.6619, 0.1805, 0.2340, 0.8177, 0.6470, 0.1744, 0.5306, + 0.2495, 0.5744, 0.2084, 0.0056, 0.7498, 0.1298, 0.9924, + 0.9309, 0.4667, 0.3159, 0.5843, 0.9324, 0.1261, 0.3939, + 0.4438, 0.0522, 0.1793, 0.9543, 0.5761, 0.5083, 0.5984, + 0.8186, 0.8940, 0.8317, 0.3031, 0.0898, 0.7286, 0.9130, + 0.2146, 0.5673, 0.4997, 0.7131, 0.8619, 0.1445, 0.5894, + 0.4874, 0.8996, 0.6969, 0.8637, 0.8309, 0.9668, 0.5423, + 0.9644, 0.0347, 0.8716, 0.3258, 0.8389, 0.5446, 0.4509, + 0.2284, 0.3080, 0.0934, 0.3222, 0.8512, 0.6840, 0.4302, + 0.2008, 0.0541, 0.8894, 0.4039, 0.2245, 0.3663, 0.1981, + 0.4139, 0.0981, 0.2004, 0.3822, 0.0151]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9768, 0.2800, 0.3107, ..., 0.6018, 0.6548, 0.0688]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.014267444610595703 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '73594', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.1755521297454834} + +tensor(indices=tensor([[ 5, 2762, 3594, 2816, 1732, 536, 386, 2670, 3478, + 1584, 124, 3702, 903, 2825, 217, 4374, 4445, 3306, + 3440, 3237, 1614, 3908, 3833, 4185, 1782, 1762, 1282, + 1237, 2839, 2700, 1989, 527, 4229, 1924, 2291, 632, + 630, 2854, 4571, 1300, 2674, 2647, 4163, 1650, 968, + 4568, 3711, 2710, 518, 1743, 2403, 3804, 3466, 4006, + 819, 2269, 3399, 1939, 2713, 4870, 167, 3787, 4204, + 3365, 3397, 366, 213, 3780, 3665, 2397, 2166, 3681, + 3124, 4070, 617, 2614, 4992, 2242, 100, 3302, 4607, + 3992, 1474, 3984, 4692, 1700, 1158, 2990, 355, 942, + 4812, 3380, 4506, 3705, 3845, 1738, 815, 1079, 2352, + 2530, 4966, 4212, 4888, 3600, 4863, 2120, 491, 939, + 1287, 4684, 4250, 4355, 1029, 3746, 1422, 4739, 411, + 1214, 1388, 233, 4974, 891, 2404, 3121, 4292, 4861, + 2735, 2438, 4956, 4235, 598, 1534, 3340, 3402, 310, + 4996, 1419, 503, 1770, 3979, 3520, 4449, 716, 3020, + 2552, 111, 635, 3437, 811, 3660, 1949, 2184, 4591, + 3714, 1433, 1551, 1530, 3773, 1820, 2025, 4547, 2071, + 562, 2152, 1177, 272, 977, 4008, 3623, 4786, 1453, + 1527, 3245, 2232, 4629, 2633, 388, 2617, 1411, 2477, + 861, 4340, 4762, 436, 3461, 3954, 1990, 3133, 382, + 1775, 1836, 4239, 3239, 423, 3367, 26, 1798, 2158, + 2804, 4757, 4785, 4557, 4421, 3872, 2469, 583, 701, + 2326, 3053, 3281, 28, 3507, 4862, 2279, 2640, 4389, + 2428, 3032, 2650, 4589, 3081, 3601, 2683, 4501, 2051, + 856, 4883, 3437, 2200, 2832, 3418, 1644, 2536, 4734, + 2939, 1867, 359, 1425, 983, 2585, 3214, 256, 4014, + 3910, 3292, 2002, 1069, 61, 3677, 851], + [4263, 164, 4938, 2393, 3797, 3028, 3097, 2799, 2603, + 711, 2574, 817, 4130, 1642, 1084, 635, 1484, 633, + 4299, 2943, 1472, 4211, 2962, 1213, 954, 2350, 2665, + 3706, 457, 746, 588, 549, 3431, 1549, 3895, 4988, + 1992, 3558, 538, 2595, 731, 401, 1472, 4821, 4991, + 611, 4255, 2726, 3316, 3597, 2997, 1503, 2530, 3569, + 4544, 2633, 4831, 789, 2792, 3344, 1336, 4303, 22, + 2507, 4981, 1515, 3885, 4637, 3730, 2881, 4049, 1613, + 2951, 2260, 3166, 2413, 2667, 3940, 4304, 4492, 4293, + 427, 2141, 138, 3870, 231, 1129, 4212, 3389, 783, + 4909, 465, 3306, 2962, 920, 4261, 426, 565, 278, + 4958, 3510, 3438, 2857, 2461, 2649, 4244, 3999, 4887, + 1224, 3994, 3000, 4558, 2625, 2923, 4799, 527, 3842, + 3476, 1460, 1884, 3675, 939, 2982, 3229, 1015, 2719, + 1355, 1199, 3485, 153, 347, 2978, 4462, 10, 2099, + 2807, 1789, 4561, 1528, 2362, 948, 1123, 2734, 1679, + 1129, 4215, 4, 2485, 773, 1225, 1572, 2298, 202, + 1536, 1220, 158, 3331, 3447, 322, 3020, 1360, 441, + 2977, 1483, 2050, 2489, 3902, 4150, 1165, 3539, 2956, + 3877, 3023, 3008, 20, 3403, 2370, 310, 767, 3310, + 2758, 1002, 1974, 1972, 3660, 3336, 24, 2974, 4478, + 2652, 4902, 3895, 676, 3312, 982, 3479, 931, 455, + 833, 177, 4501, 2334, 3309, 3314, 4861, 4829, 1431, + 55, 2234, 3951, 2594, 4047, 3473, 1192, 892, 71, + 4601, 3945, 2625, 4129, 122, 2456, 2313, 140, 1367, + 2586, 1495, 2151, 2136, 3748, 1873, 1201, 1538, 4919, + 2188, 3433, 756, 2777, 818, 250, 754, 3178, 1309, + 4290, 4005, 524, 606, 3210, 3083, 4871]]), + values=tensor([0.8538, 0.5876, 0.1866, 0.6805, 0.7969, 0.7607, 0.0028, + 0.4849, 0.8852, 0.6112, 0.1502, 0.8246, 0.0967, 0.1369, + 0.0948, 0.8248, 0.4269, 0.5676, 0.3145, 0.7683, 0.4208, + 0.0623, 0.6923, 0.2777, 0.7877, 0.4200, 0.9519, 0.6274, + 0.7109, 0.2101, 0.8716, 0.8507, 0.5024, 0.5347, 0.4931, + 0.2954, 0.7968, 0.1568, 0.1012, 0.0319, 0.2691, 0.6759, + 0.8706, 0.5644, 0.7235, 0.0448, 0.3929, 0.5229, 0.6696, + 0.8227, 0.5841, 0.3483, 0.2279, 0.0660, 0.4764, 0.4278, + 0.9170, 0.7192, 0.9762, 0.7562, 0.3588, 0.9805, 0.2056, + 0.2707, 0.3860, 0.5329, 0.8173, 0.9399, 0.8198, 0.1700, + 0.0395, 0.8313, 0.8856, 0.2683, 0.1689, 0.8310, 0.6829, + 0.6639, 0.1937, 0.5626, 0.3124, 0.4695, 0.1349, 0.7759, + 0.3686, 0.9490, 0.5612, 0.1999, 0.7908, 0.9959, 0.8651, + 0.7907, 0.4234, 0.0525, 0.3047, 0.2972, 0.6800, 0.6701, + 0.7655, 0.8482, 0.4714, 0.1608, 0.7025, 0.3186, 0.2670, + 0.7872, 0.5735, 0.1205, 0.4893, 0.6751, 0.2809, 0.0961, + 0.7543, 0.5760, 0.6631, 0.4396, 0.8524, 0.7838, 0.3503, + 0.3185, 0.3366, 0.8623, 0.5372, 0.1804, 0.7729, 0.7835, + 0.5603, 0.0307, 0.4858, 0.3349, 0.8951, 0.4831, 0.4343, + 0.5311, 0.8466, 0.0109, 0.1035, 0.2469, 0.2974, 0.2532, + 0.5015, 0.8049, 0.0487, 0.7247, 0.5543, 0.5513, 0.2918, + 0.6942, 0.6983, 0.0111, 0.5949, 0.6103, 0.5808, 0.1465, + 0.4692, 0.3725, 0.7373, 0.6350, 0.3187, 0.2271, 0.9478, + 0.3679, 0.1784, 0.9946, 0.8607, 0.1839, 0.0076, 0.5386, + 0.0405, 0.5925, 0.0487, 0.7285, 0.3176, 0.1639, 0.4865, + 0.9939, 0.4440, 0.8078, 0.8593, 0.1360, 0.3176, 0.2179, + 0.4373, 0.0791, 0.9527, 0.4256, 0.2839, 0.2689, 0.5904, + 0.6813, 0.2502, 0.1822, 0.9042, 0.0392, 0.7692, 0.0460, + 0.4911, 0.2203, 0.4069, 0.2641, 0.6981, 0.9989, 0.0859, + 0.8436, 0.3011, 0.9379, 0.9770, 0.2563, 0.9970, 0.7596, + 0.5299, 0.8584, 0.1828, 0.8970, 0.7972, 0.1844, 0.5278, + 0.6350, 0.9031, 0.9336, 0.6004, 0.6127, 0.0245, 0.3810, + 0.2531, 0.4103, 0.6362, 0.0474, 0.9419, 0.5951, 0.3075, + 0.5787, 0.9237, 0.4442, 0.5586, 0.5401, 0.4390, 0.7374, + 0.6005, 0.5398, 0.2113, 0.9187, 0.6595, 0.1120, 0.6347, + 0.8756, 0.2691, 0.3806, 0.2395, 0.7466]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4897, 0.8868, 0.3803, ..., 0.3576, 0.0104, 0.1215]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.1755521297454834 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '657339', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507478475570679} + +tensor(indices=tensor([[1996, 3828, 3829, 4214, 979, 3679, 257, 397, 2049, + 1637, 29, 46, 3611, 1284, 308, 539, 4032, 1822, + 3771, 1432, 744, 2944, 3369, 3114, 4074, 3218, 1576, + 4439, 2378, 3570, 2720, 450, 734, 3908, 2547, 2927, + 2351, 4440, 1142, 501, 2217, 1530, 762, 3112, 3483, + 806, 4184, 3862, 1559, 2443, 4828, 4378, 480, 865, + 3963, 2946, 683, 3716, 4144, 4501, 3245, 4636, 2229, + 1999, 3891, 3550, 1939, 453, 2063, 2765, 2310, 3992, + 563, 4966, 4457, 1117, 1245, 298, 3753, 788, 1984, + 4474, 4183, 1614, 662, 645, 3378, 4230, 2258, 2148, + 4463, 277, 4875, 3091, 3065, 95, 2552, 345, 2472, + 953, 2714, 1251, 3615, 3594, 1241, 462, 1223, 4764, + 3786, 2664, 3221, 4831, 2468, 1620, 864, 4963, 1624, + 503, 4488, 4735, 3341, 3538, 3612, 1673, 3059, 1402, + 969, 3307, 2925, 4145, 271, 4433, 3975, 2380, 2427, + 1940, 2349, 959, 4067, 1553, 3025, 1216, 3394, 4995, + 175, 1094, 2167, 1738, 1196, 619, 2965, 524, 892, + 3569, 2931, 393, 4930, 963, 4545, 737, 3017, 3804, + 945, 1305, 4882, 3375, 770, 1216, 2672, 612, 1107, + 1352, 2589, 2579, 3637, 3082, 4932, 876, 4671, 1903, + 3633, 220, 1415, 2886, 2488, 2653, 625, 326, 629, + 4064, 995, 4884, 149, 4098, 4560, 3880, 1632, 4470, + 4241, 4832, 2407, 3914, 4216, 1870, 4793, 1458, 3442, + 877, 247, 4194, 4659, 2216, 218, 727, 441, 4753, + 845, 1217, 1661, 51, 1753, 2163, 1404, 2449, 4065, + 2121, 1362, 520, 2960, 3584, 2903, 2750, 1344, 668, + 3266, 4635, 4933, 1949, 1555, 4541, 1438, 1899, 2825, + 1423, 4942, 3418, 2644, 4197, 546, 3472], + [4151, 2317, 4117, 2080, 1386, 1492, 4197, 94, 3284, + 3971, 3439, 1032, 4971, 949, 4720, 655, 4914, 1723, + 4210, 3426, 858, 3803, 4173, 4147, 1138, 4984, 891, + 390, 4188, 1367, 4379, 3429, 546, 3970, 450, 1877, + 2589, 2456, 3996, 4291, 3989, 1708, 1819, 4702, 3724, + 1461, 2054, 1913, 241, 4157, 856, 4764, 4459, 1873, + 1355, 4884, 3947, 496, 1510, 4653, 2659, 3144, 2531, + 228, 4188, 4832, 4118, 3466, 823, 1960, 707, 4798, + 2920, 2979, 2518, 3736, 3319, 3278, 2444, 1647, 1344, + 2329, 4491, 141, 2114, 2287, 436, 3179, 2586, 776, + 4666, 4450, 2369, 3933, 4907, 524, 3858, 2302, 4094, + 312, 2952, 3786, 4603, 4163, 4990, 399, 594, 1498, + 4162, 3495, 1828, 1365, 2084, 4436, 3469, 4416, 2876, + 3885, 288, 1921, 3125, 1374, 2453, 3751, 3629, 902, + 1515, 2284, 3053, 327, 4133, 3011, 391, 315, 2896, + 1474, 2772, 2895, 2037, 4850, 4216, 2323, 380, 174, + 2782, 885, 2007, 4459, 4383, 3903, 119, 3352, 766, + 1937, 2869, 2657, 19, 3411, 1108, 3551, 4948, 4353, + 2256, 182, 1631, 3494, 929, 4853, 4623, 2672, 2455, + 1941, 2609, 4134, 4108, 2709, 59, 3401, 98, 3576, + 1588, 4101, 959, 602, 3872, 2270, 418, 1619, 3623, + 3965, 3758, 4849, 4530, 269, 2011, 3219, 1893, 4757, + 2309, 960, 875, 3210, 849, 3719, 2567, 3340, 2896, + 2614, 194, 2416, 3880, 1720, 1426, 1133, 727, 2945, + 3859, 3674, 659, 2068, 1451, 73, 1291, 3143, 2834, + 3862, 727, 1356, 1196, 691, 2711, 4168, 4456, 3379, + 56, 4735, 3391, 96, 4369, 269, 4988, 2527, 1836, + 4860, 4102, 346, 1097, 1699, 581, 2200]]), + values=tensor([0.1662, 0.5682, 0.1337, 0.6167, 0.8720, 0.9047, 0.5481, + 0.3002, 0.3894, 0.4450, 0.0341, 0.0040, 0.3505, 0.0350, + 0.3132, 0.0329, 0.7268, 0.3488, 0.7469, 0.3186, 0.3852, + 0.0759, 0.7766, 0.7853, 0.9571, 0.4870, 0.7127, 0.2049, + 0.4029, 0.1772, 0.3662, 0.1642, 0.5213, 0.0750, 0.0759, + 0.9249, 0.4732, 0.6460, 0.9897, 0.6783, 0.8760, 0.8015, + 0.7313, 0.3460, 0.2353, 0.3950, 0.4032, 0.0798, 0.7432, + 0.9548, 0.1731, 0.1292, 0.7915, 0.7719, 0.2307, 0.4730, + 0.9903, 0.7265, 0.9022, 0.6433, 0.0604, 0.9616, 0.2620, + 0.0843, 0.6322, 0.3044, 0.9898, 0.6905, 0.2556, 0.9085, + 0.9880, 0.6707, 0.6758, 0.7103, 0.6734, 0.1296, 0.0165, + 0.8943, 0.6920, 0.4419, 0.5488, 0.1345, 0.5493, 0.4914, + 0.2205, 0.3314, 0.6389, 0.7842, 0.9699, 0.4692, 0.6474, + 0.7745, 0.7992, 0.2645, 0.5332, 0.2474, 0.3018, 0.3130, + 0.6708, 0.8526, 0.9046, 0.3866, 0.1610, 0.7065, 0.0781, + 0.5350, 0.8830, 0.9915, 0.4865, 0.8257, 0.4306, 0.5092, + 0.4702, 0.8054, 0.7559, 0.2012, 0.4322, 0.9357, 0.5091, + 0.2951, 0.4157, 0.6291, 0.6587, 0.2032, 0.9074, 0.0637, + 0.8535, 0.4907, 0.8527, 0.6557, 0.3455, 0.5973, 0.5037, + 0.2804, 0.4067, 0.7667, 0.0988, 0.8470, 0.9610, 0.4946, + 0.9603, 0.8693, 0.3685, 0.8560, 0.5465, 0.7563, 0.9083, + 0.3293, 0.3475, 0.4387, 0.0934, 0.7920, 0.9912, 0.3726, + 0.6590, 0.0404, 0.6502, 0.9535, 0.6878, 0.5702, 0.3342, + 0.7784, 0.1708, 0.9632, 0.7128, 0.3755, 0.0415, 0.9656, + 0.7912, 0.1324, 0.8293, 0.9357, 0.8093, 0.8668, 0.0769, + 0.5226, 0.8821, 0.5627, 0.9417, 0.7723, 0.5581, 0.6417, + 0.9283, 0.2203, 0.9435, 0.7833, 0.4922, 0.7544, 0.4930, + 0.2055, 0.8428, 0.6480, 0.3319, 0.2742, 0.6529, 0.8059, + 0.7574, 0.3947, 0.9694, 0.4828, 0.0404, 0.7292, 0.8415, + 0.6988, 0.7335, 0.8863, 0.9856, 0.8926, 0.8057, 0.7773, + 0.7884, 0.4345, 0.3753, 0.0305, 0.6592, 0.1194, 0.2772, + 0.2512, 0.7531, 0.4528, 0.4777, 0.5961, 0.1356, 0.7459, + 0.1761, 0.4463, 0.9461, 0.6897, 0.3513, 0.4304, 0.5004, + 0.0631, 0.1231, 0.9708, 0.6645, 0.5456, 0.4188, 0.2899, + 0.5126, 0.9546, 0.4252, 0.9228, 0.4180, 0.3045, 0.1706, + 0.2310, 0.6236, 0.9528, 0.9448, 0.9079]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.1555, 0.4727, 0.6780, ..., 0.7452, 0.8720, 0.8261]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.507478475570679 seconds + +tensor(indices=tensor([[1996, 3828, 3829, 4214, 979, 3679, 257, 397, 2049, + 1637, 29, 46, 3611, 1284, 308, 539, 4032, 1822, + 3771, 1432, 744, 2944, 3369, 3114, 4074, 3218, 1576, + 4439, 2378, 3570, 2720, 450, 734, 3908, 2547, 2927, + 2351, 4440, 1142, 501, 2217, 1530, 762, 3112, 3483, + 806, 4184, 3862, 1559, 2443, 4828, 4378, 480, 865, + 3963, 2946, 683, 3716, 4144, 4501, 3245, 4636, 2229, + 1999, 3891, 3550, 1939, 453, 2063, 2765, 2310, 3992, + 563, 4966, 4457, 1117, 1245, 298, 3753, 788, 1984, + 4474, 4183, 1614, 662, 645, 3378, 4230, 2258, 2148, + 4463, 277, 4875, 3091, 3065, 95, 2552, 345, 2472, + 953, 2714, 1251, 3615, 3594, 1241, 462, 1223, 4764, + 3786, 2664, 3221, 4831, 2468, 1620, 864, 4963, 1624, + 503, 4488, 4735, 3341, 3538, 3612, 1673, 3059, 1402, + 969, 3307, 2925, 4145, 271, 4433, 3975, 2380, 2427, + 1940, 2349, 959, 4067, 1553, 3025, 1216, 3394, 4995, + 175, 1094, 2167, 1738, 1196, 619, 2965, 524, 892, + 3569, 2931, 393, 4930, 963, 4545, 737, 3017, 3804, + 945, 1305, 4882, 3375, 770, 1216, 2672, 612, 1107, + 1352, 2589, 2579, 3637, 3082, 4932, 876, 4671, 1903, + 3633, 220, 1415, 2886, 2488, 2653, 625, 326, 629, + 4064, 995, 4884, 149, 4098, 4560, 3880, 1632, 4470, + 4241, 4832, 2407, 3914, 4216, 1870, 4793, 1458, 3442, + 877, 247, 4194, 4659, 2216, 218, 727, 441, 4753, + 845, 1217, 1661, 51, 1753, 2163, 1404, 2449, 4065, + 2121, 1362, 520, 2960, 3584, 2903, 2750, 1344, 668, + 3266, 4635, 4933, 1949, 1555, 4541, 1438, 1899, 2825, + 1423, 4942, 3418, 2644, 4197, 546, 3472], + [4151, 2317, 4117, 2080, 1386, 1492, 4197, 94, 3284, + 3971, 3439, 1032, 4971, 949, 4720, 655, 4914, 1723, + 4210, 3426, 858, 3803, 4173, 4147, 1138, 4984, 891, + 390, 4188, 1367, 4379, 3429, 546, 3970, 450, 1877, + 2589, 2456, 3996, 4291, 3989, 1708, 1819, 4702, 3724, + 1461, 2054, 1913, 241, 4157, 856, 4764, 4459, 1873, + 1355, 4884, 3947, 496, 1510, 4653, 2659, 3144, 2531, + 228, 4188, 4832, 4118, 3466, 823, 1960, 707, 4798, + 2920, 2979, 2518, 3736, 3319, 3278, 2444, 1647, 1344, + 2329, 4491, 141, 2114, 2287, 436, 3179, 2586, 776, + 4666, 4450, 2369, 3933, 4907, 524, 3858, 2302, 4094, + 312, 2952, 3786, 4603, 4163, 4990, 399, 594, 1498, + 4162, 3495, 1828, 1365, 2084, 4436, 3469, 4416, 2876, + 3885, 288, 1921, 3125, 1374, 2453, 3751, 3629, 902, + 1515, 2284, 3053, 327, 4133, 3011, 391, 315, 2896, + 1474, 2772, 2895, 2037, 4850, 4216, 2323, 380, 174, + 2782, 885, 2007, 4459, 4383, 3903, 119, 3352, 766, + 1937, 2869, 2657, 19, 3411, 1108, 3551, 4948, 4353, + 2256, 182, 1631, 3494, 929, 4853, 4623, 2672, 2455, + 1941, 2609, 4134, 4108, 2709, 59, 3401, 98, 3576, + 1588, 4101, 959, 602, 3872, 2270, 418, 1619, 3623, + 3965, 3758, 4849, 4530, 269, 2011, 3219, 1893, 4757, + 2309, 960, 875, 3210, 849, 3719, 2567, 3340, 2896, + 2614, 194, 2416, 3880, 1720, 1426, 1133, 727, 2945, + 3859, 3674, 659, 2068, 1451, 73, 1291, 3143, 2834, + 3862, 727, 1356, 1196, 691, 2711, 4168, 4456, 3379, + 56, 4735, 3391, 96, 4369, 269, 4988, 2527, 1836, + 4860, 4102, 346, 1097, 1699, 581, 2200]]), + values=tensor([0.1662, 0.5682, 0.1337, 0.6167, 0.8720, 0.9047, 0.5481, + 0.3002, 0.3894, 0.4450, 0.0341, 0.0040, 0.3505, 0.0350, + 0.3132, 0.0329, 0.7268, 0.3488, 0.7469, 0.3186, 0.3852, + 0.0759, 0.7766, 0.7853, 0.9571, 0.4870, 0.7127, 0.2049, + 0.4029, 0.1772, 0.3662, 0.1642, 0.5213, 0.0750, 0.0759, + 0.9249, 0.4732, 0.6460, 0.9897, 0.6783, 0.8760, 0.8015, + 0.7313, 0.3460, 0.2353, 0.3950, 0.4032, 0.0798, 0.7432, + 0.9548, 0.1731, 0.1292, 0.7915, 0.7719, 0.2307, 0.4730, + 0.9903, 0.7265, 0.9022, 0.6433, 0.0604, 0.9616, 0.2620, + 0.0843, 0.6322, 0.3044, 0.9898, 0.6905, 0.2556, 0.9085, + 0.9880, 0.6707, 0.6758, 0.7103, 0.6734, 0.1296, 0.0165, + 0.8943, 0.6920, 0.4419, 0.5488, 0.1345, 0.5493, 0.4914, + 0.2205, 0.3314, 0.6389, 0.7842, 0.9699, 0.4692, 0.6474, + 0.7745, 0.7992, 0.2645, 0.5332, 0.2474, 0.3018, 0.3130, + 0.6708, 0.8526, 0.9046, 0.3866, 0.1610, 0.7065, 0.0781, + 0.5350, 0.8830, 0.9915, 0.4865, 0.8257, 0.4306, 0.5092, + 0.4702, 0.8054, 0.7559, 0.2012, 0.4322, 0.9357, 0.5091, + 0.2951, 0.4157, 0.6291, 0.6587, 0.2032, 0.9074, 0.0637, + 0.8535, 0.4907, 0.8527, 0.6557, 0.3455, 0.5973, 0.5037, + 0.2804, 0.4067, 0.7667, 0.0988, 0.8470, 0.9610, 0.4946, + 0.9603, 0.8693, 0.3685, 0.8560, 0.5465, 0.7563, 0.9083, + 0.3293, 0.3475, 0.4387, 0.0934, 0.7920, 0.9912, 0.3726, + 0.6590, 0.0404, 0.6502, 0.9535, 0.6878, 0.5702, 0.3342, + 0.7784, 0.1708, 0.9632, 0.7128, 0.3755, 0.0415, 0.9656, + 0.7912, 0.1324, 0.8293, 0.9357, 0.8093, 0.8668, 0.0769, + 0.5226, 0.8821, 0.5627, 0.9417, 0.7723, 0.5581, 0.6417, + 0.9283, 0.2203, 0.9435, 0.7833, 0.4922, 0.7544, 0.4930, + 0.2055, 0.8428, 0.6480, 0.3319, 0.2742, 0.6529, 0.8059, + 0.7574, 0.3947, 0.9694, 0.4828, 0.0404, 0.7292, 0.8415, + 0.6988, 0.7335, 0.8863, 0.9856, 0.8926, 0.8057, 0.7773, + 0.7884, 0.4345, 0.3753, 0.0305, 0.6592, 0.1194, 0.2772, + 0.2512, 0.7531, 0.4528, 0.4777, 0.5961, 0.1356, 0.7459, + 0.1761, 0.4463, 0.9461, 0.6897, 0.3513, 0.4304, 0.5004, + 0.0631, 0.1231, 0.9708, 0.6645, 0.5456, 0.4188, 0.2899, + 0.5126, 0.9546, 0.4252, 0.9228, 0.4180, 0.3045, 0.1706, + 0.2310, 0.6236, 0.9528, 0.9448, 0.9079]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.1555, 0.4727, 0.6780, ..., 0.7452, 0.8720, 0.8261]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.507478475570679 seconds + +[20.01, 18.58, 19.16, 18.41, 18.57, 18.58, 18.75, 18.28, 18.47, 18.45] +[52.6] +14.046735286712646 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 657339, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507478475570679, 'TIME_S_1KI': 0.015984870022272644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 738.8582760810853, 'W': 52.6} +[20.01, 18.58, 19.16, 18.41, 18.57, 18.58, 18.75, 18.28, 18.47, 18.45, 18.8, 18.82, 19.46, 18.68, 18.45, 18.48, 19.05, 18.77, 18.86, 18.44] +337.22 +16.861 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 657339, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507478475570679, 'TIME_S_1KI': 0.015984870022272644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 738.8582760810853, 'W': 52.6, 'J_1KI': 1.1240140567973074, 'W_1KI': 0.08001959415157171, 'W_D': 35.739000000000004, 'J_D': 502.0162724118233, 'W_D_1KI': 0.054369206756331216, 'J_D_1KI': 8.27110619578805e-05} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..ab5f5e4 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 205269, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.446171522140503, "TIME_S_1KI": 0.05089015643930892, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 746.4549215507507, "W": 52.72, "J_1KI": 3.63647175925615, "W_1KI": 0.25683371575834635, "W_D": 35.52775, "J_D": 503.03231864804025, "W_D_1KI": 0.17307898416224562, "J_D_1KI": 0.0008431813092198316} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..cb7eecb --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01698613166809082} + +tensor(indices=tensor([[2254, 318, 1383, ..., 1891, 4522, 2908], + [2593, 4670, 4638, ..., 2668, 2035, 426]]), + values=tensor([0.0456, 0.8923, 0.6404, ..., 0.6341, 0.6032, 0.2804]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.8987, 0.0761, 0.5515, ..., 0.7891, 0.3447, 0.7080]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.01698613166809082 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '61815', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.1619837284088135} + +tensor(indices=tensor([[ 457, 1002, 2225, ..., 796, 1709, 4999], + [ 974, 1684, 2121, ..., 1078, 81, 2950]]), + values=tensor([0.9488, 0.6975, 0.2006, ..., 0.9426, 0.0186, 0.4857]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.9601, 0.3703, 0.7281, ..., 0.7581, 0.0514, 0.7748]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.1619837284088135 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '205269', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.446171522140503} + +tensor(indices=tensor([[4898, 4877, 1527, ..., 3334, 4519, 4465], + [1066, 3154, 3911, ..., 4483, 4042, 3763]]), + values=tensor([0.8722, 0.9425, 0.0691, ..., 0.8751, 0.4805, 0.0876]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.2146, 0.6281, 0.8949, ..., 0.5356, 0.7290, 0.7869]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.446171522140503 seconds + +tensor(indices=tensor([[4898, 4877, 1527, ..., 3334, 4519, 4465], + [1066, 3154, 3911, ..., 4483, 4042, 3763]]), + values=tensor([0.8722, 0.9425, 0.0691, ..., 0.8751, 0.4805, 0.0876]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.2146, 0.6281, 0.8949, ..., 0.5356, 0.7290, 0.7869]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.446171522140503 seconds + +[18.97, 19.01, 18.98, 19.55, 18.56, 18.58, 19.09, 18.77, 19.57, 18.42] +[52.72] +14.158856630325317 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 205269, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.446171522140503, 'TIME_S_1KI': 0.05089015643930892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.4549215507507, 'W': 52.72} +[18.97, 19.01, 18.98, 19.55, 18.56, 18.58, 19.09, 18.77, 19.57, 18.42, 18.95, 18.48, 18.74, 19.18, 18.82, 18.8, 22.29, 19.11, 18.6, 19.09] +343.845 +17.19225 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 205269, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.446171522140503, 'TIME_S_1KI': 0.05089015643930892, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.4549215507507, 'W': 52.72, 'J_1KI': 3.63647175925615, 'W_1KI': 0.25683371575834635, 'W_D': 35.52775, 'J_D': 503.03231864804025, 'W_D_1KI': 0.17307898416224562, 'J_D_1KI': 0.0008431813092198316} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json index 3a94ff3..1ca2fad 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3659, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.513276815414429, "TIME_S_1KI": 2.8732650493070317, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 699.5738222312928, "W": 48.46, "J_1KI": 191.19262701046534, "W_1KI": 13.244055752937962, "W_D": 31.944000000000003, "J_D": 461.147052772522, "W_D_1KI": 8.73025416780541, "J_D_1KI": 2.3859672500151436} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3625, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.394656419754028, "TIME_S_1KI": 2.867491426139042, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 779.2213072299957, "W": 54.15, "J_1KI": 214.95760199448156, "W_1KI": 14.937931034482759, "W_D": 37.07275, "J_D": 533.4787944157124, "W_D_1KI": 10.226965517241378, "J_D_1KI": 2.821231866825208} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output index 3030a8d..83ec13e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,34 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3051443099975586} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3142099380493164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 20, ..., 999978, +tensor(crow_indices=tensor([ 0, 10, 21, ..., 999982, + 999993, 1000000]), + col_indices=tensor([ 9595, 12371, 15341, ..., 68774, 88209, 89359]), + values=tensor([0.5689, 0.1836, 0.5609, ..., 0.8694, 0.5209, 0.6444]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0233, 0.5023, 0.1367, ..., 0.1519, 0.1947, 0.3720]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.3142099380493164 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3341', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.675478458404541} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 19, ..., 999976, 999988, 1000000]), - col_indices=tensor([10874, 16180, 25759, ..., 85120, 90595, 97571]), - values=tensor([0.6980, 0.1450, 0.2222, ..., 0.0442, 0.2876, 0.2305]), + col_indices=tensor([ 4263, 6426, 34678, ..., 65159, 69813, 89844]), + values=tensor([0.9067, 0.8913, 0.2582, ..., 0.8629, 0.5634, 0.7349]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9393, 0.9771, 0.4381, ..., 0.2097, 0.8256, 0.0395]) +tensor([0.6979, 0.0510, 0.2361, ..., 0.7900, 0.5351, 0.0148]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.3051443099975586 seconds +Time: 9.675478458404541 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3440', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.870911598205566} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3625', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.394656419754028} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 29, ..., 999980, - 999991, 1000000]), - col_indices=tensor([ 5523, 13716, 16446, ..., 89337, 96388, 97674]), - values=tensor([0.9883, 0.0360, 0.0063, ..., 0.9506, 0.8956, 0.5971]), +tensor(crow_indices=tensor([ 0, 12, 23, ..., 999985, + 999995, 1000000]), + col_indices=tensor([ 2685, 12181, 30698, ..., 53529, 78569, 87140]), + values=tensor([0.4991, 0.6096, 0.8526, ..., 0.6405, 0.0983, 0.2024]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2735, 0.3671, 0.2374, ..., 0.9952, 0.6404, 0.3809]) +tensor([0.2731, 0.2403, 0.0212, ..., 0.3306, 0.6694, 0.0606]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 9.870911598205566 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3659', '-ss', '100000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.513276815414429} +Time: 10.394656419754028 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 23, ..., 999980, - 999992, 1000000]), - col_indices=tensor([ 3420, 12508, 17596, ..., 74140, 75972, 84324]), - values=tensor([0.2457, 0.5369, 0.4041, ..., 0.2835, 0.7746, 0.0854]), +tensor(crow_indices=tensor([ 0, 12, 23, ..., 999985, + 999995, 1000000]), + col_indices=tensor([ 2685, 12181, 30698, ..., 53529, 78569, 87140]), + values=tensor([0.4991, 0.6096, 0.8526, ..., 0.6405, 0.0983, 0.2024]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1485, 0.5045, 0.7097, ..., 0.5250, 0.4505, 0.4467]) +tensor([0.2731, 0.2403, 0.0212, ..., 0.3306, 0.6694, 0.0606]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,30 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.513276815414429 seconds +Time: 10.394656419754028 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 15, 23, ..., 999980, - 999992, 1000000]), - col_indices=tensor([ 3420, 12508, 17596, ..., 74140, 75972, 84324]), - values=tensor([0.2457, 0.5369, 0.4041, ..., 0.2835, 0.7746, 0.0854]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.1485, 0.5045, 0.7097, ..., 0.5250, 0.4505, 0.4467]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 10.513276815414429 seconds - -[19.03, 17.89, 18.11, 18.19, 17.92, 19.96, 20.76, 18.13, 18.16, 18.25] -[48.46] -14.436108589172363 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3659, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.513276815414429, 'TIME_S_1KI': 2.8732650493070317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 699.5738222312928, 'W': 48.46} -[19.03, 17.89, 18.11, 18.19, 17.92, 19.96, 20.76, 18.13, 18.16, 18.25, 18.14, 18.17, 18.05, 17.87, 18.21, 18.32, 17.88, 17.89, 18.02, 18.16] -330.31999999999994 -16.516 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3659, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.513276815414429, 'TIME_S_1KI': 2.8732650493070317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 699.5738222312928, 'W': 48.46, 'J_1KI': 191.19262701046534, 'W_1KI': 13.244055752937962, 'W_D': 31.944000000000003, 'J_D': 461.147052772522, 'W_D_1KI': 8.73025416780541, 'J_D_1KI': 2.3859672500151436} +[19.72, 18.58, 18.34, 22.32, 19.76, 18.72, 19.22, 18.5, 18.89, 18.49] +[54.15] +14.39005184173584 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3625, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.394656419754028, 'TIME_S_1KI': 2.867491426139042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 779.2213072299957, 'W': 54.15} +[19.72, 18.58, 18.34, 22.32, 19.76, 18.72, 19.22, 18.5, 18.89, 18.49, 18.86, 18.52, 19.54, 18.62, 18.52, 18.39, 18.6, 18.45, 18.66, 18.76] +341.545 +17.07725 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3625, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.394656419754028, 'TIME_S_1KI': 2.867491426139042, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 779.2213072299957, 'W': 54.15, 'J_1KI': 214.95760199448156, 'W_1KI': 14.937931034482759, 'W_D': 37.07275, 'J_D': 533.4787944157124, 'W_D_1KI': 10.226965517241378, 'J_D_1KI': 2.821231866825208} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json index 253d722..933d87d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 376, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.408750772476196, "TIME_S_1KI": 27.68284779913882, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 864.391910111904, "W": 48.41, "J_1KI": 2298.914654552936, "W_1KI": 128.75, "W_D": 32.1215, "J_D": 573.5501908832788, "W_D_1KI": 85.42952127659574, "J_D_1KI": 227.20617360796737} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 372, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.448328256607056, "TIME_S_1KI": 28.086903915610364, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 951.2793003344535, "W": 54.37, "J_1KI": 2557.2024202539073, "W_1KI": 146.15591397849462, "W_D": 37.12375, "J_D": 649.5320015779138, "W_D_1KI": 99.79502688172043, "J_D_1KI": 268.2662012949474} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output index 8c93db0..0739078 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,34 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.7904272079467773} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.818298578262329} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 83, 173, ..., 9999788, +tensor(crow_indices=tensor([ 0, 94, 190, ..., 9999809, + 9999901, 10000000]), + col_indices=tensor([ 679, 794, 1284, ..., 96944, 98164, 99000]), + values=tensor([0.4981, 0.4934, 0.8562, ..., 0.8870, 0.8354, 0.5851]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2786, 0.4976, 0.3615, ..., 0.4502, 0.8268, 0.0967]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 2.818298578262329 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '372', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.448328256607056} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 105, 209, ..., 9999801, 9999895, 10000000]), - col_indices=tensor([ 687, 1990, 2832, ..., 93491, 98909, 99713]), - values=tensor([0.2182, 0.4312, 0.1873, ..., 0.5994, 0.5663, 0.3895]), + col_indices=tensor([ 873, 1265, 1381, ..., 96409, 96627, 98514]), + values=tensor([0.9757, 0.4166, 0.2628, ..., 0.5352, 0.4288, 0.9605]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6218, 0.2935, 0.0099, ..., 0.4944, 0.2399, 0.4191]) +tensor([0.3038, 0.5505, 0.5965, ..., 0.9015, 0.0486, 0.8079]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 2.7904272079467773 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '376', '-ss', '100000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.408750772476196} +Time: 10.448328256607056 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 101, 179, ..., 9999785, - 9999885, 10000000]), - col_indices=tensor([ 974, 1017, 1175, ..., 97865, 98322, 99037]), - values=tensor([0.8598, 0.2680, 0.8943, ..., 0.1763, 0.5676, 0.4916]), +tensor(crow_indices=tensor([ 0, 105, 209, ..., 9999801, + 9999895, 10000000]), + col_indices=tensor([ 873, 1265, 1381, ..., 96409, 96627, 98514]), + values=tensor([0.9757, 0.4166, 0.2628, ..., 0.5352, 0.4288, 0.9605]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4049, 0.5573, 0.2557, ..., 0.0615, 0.7671, 0.4849]) +tensor([0.3038, 0.5505, 0.5965, ..., 0.9015, 0.0486, 0.8079]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,30 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.408750772476196 seconds +Time: 10.448328256607056 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 101, 179, ..., 9999785, - 9999885, 10000000]), - col_indices=tensor([ 974, 1017, 1175, ..., 97865, 98322, 99037]), - values=tensor([0.8598, 0.2680, 0.8943, ..., 0.1763, 0.5676, 0.4916]), - size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4049, 0.5573, 0.2557, ..., 0.0615, 0.7671, 0.4849]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 10000000 -Density: 0.001 -Time: 10.408750772476196 seconds - -[18.19, 17.86, 18.15, 17.76, 18.57, 17.97, 17.88, 17.91, 18.5, 18.33] -[48.41] -17.855647802352905 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 376, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.408750772476196, 'TIME_S_1KI': 27.68284779913882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 864.391910111904, 'W': 48.41} -[18.19, 17.86, 18.15, 17.76, 18.57, 17.97, 17.88, 17.91, 18.5, 18.33, 18.68, 17.76, 17.89, 18.01, 18.9, 17.73, 18.37, 17.97, 18.0, 17.88] -325.77 -16.2885 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 376, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.408750772476196, 'TIME_S_1KI': 27.68284779913882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 864.391910111904, 'W': 48.41, 'J_1KI': 2298.914654552936, 'W_1KI': 128.75, 'W_D': 32.1215, 'J_D': 573.5501908832788, 'W_D_1KI': 85.42952127659574, 'J_D_1KI': 227.20617360796737} +[18.85, 18.56, 19.07, 19.17, 20.19, 18.59, 18.9, 18.53, 18.6, 18.44] +[54.37] +17.496400594711304 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.448328256607056, 'TIME_S_1KI': 28.086903915610364, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 951.2793003344535, 'W': 54.37} +[18.85, 18.56, 19.07, 19.17, 20.19, 18.59, 18.9, 18.53, 18.6, 18.44, 19.0, 18.67, 18.6, 23.11, 19.22, 19.08, 19.57, 18.58, 18.99, 18.7] +344.925 +17.24625 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 372, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.448328256607056, 'TIME_S_1KI': 28.086903915610364, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 951.2793003344535, 'W': 54.37, 'J_1KI': 2557.2024202539073, 'W_1KI': 146.15591397849462, 'W_D': 37.12375, 'J_D': 649.5320015779138, 'W_D_1KI': 99.79502688172043, 'J_D_1KI': 268.2662012949474} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..267ac01 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 28.13305163383484, "TIME_S_1KI": 281.3305163383484, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3732.906339125633, "W": 53.41, "J_1KI": 37329.063391256335, "W_1KI": 534.1, "W_D": 36.108999999999995, "J_D": 2523.713068704128, "W_D_1KI": 361.09, "J_D_1KI": 3610.8999999999996} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..900f98d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 28.13305163383484} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 1920, ..., + 99998042, 99999027, 100000000]), + col_indices=tensor([ 99, 121, 146, ..., 99677, 99702, 99983]), + values=tensor([0.9922, 0.1937, 0.7553, ..., 0.0650, 0.9551, 0.6219]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.3364, 0.0616, 0.7992, ..., 0.4393, 0.1655, 0.2840]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 28.13305163383484 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 1920, ..., + 99998042, 99999027, 100000000]), + col_indices=tensor([ 99, 121, 146, ..., 99677, 99702, 99983]), + values=tensor([0.9922, 0.1937, 0.7553, ..., 0.0650, 0.9551, 0.6219]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.3364, 0.0616, 0.7992, ..., 0.4393, 0.1655, 0.2840]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 28.13305163383484 seconds + +[18.88, 18.44, 18.83, 23.29, 18.84, 18.42, 19.21, 18.51, 18.57, 18.64] +[53.41] +69.89152479171753 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 28.13305163383484, 'TIME_S_1KI': 281.3305163383484, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3732.906339125633, 'W': 53.41} +[18.88, 18.44, 18.83, 23.29, 18.84, 18.42, 19.21, 18.51, 18.57, 18.64, 19.21, 18.5, 18.61, 22.68, 19.39, 18.55, 19.14, 18.61, 18.77, 18.59] +346.02 +17.301 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 28.13305163383484, 'TIME_S_1KI': 281.3305163383484, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3732.906339125633, 'W': 53.41, 'J_1KI': 37329.063391256335, 'W_1KI': 534.1, 'W_D': 36.108999999999995, 'J_D': 2523.713068704128, 'W_D_1KI': 361.09, 'J_D_1KI': 3610.8999999999996} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json index 9c4b03d..2289536 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8087, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.519959688186646, "TIME_S_1KI": 1.300848236451916, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 670.7503997087479, "W": 46.91, "J_1KI": 82.94180780372794, "W_1KI": 5.800667738345492, "W_D": 30.164499999999997, "J_D": 431.3120961844921, "W_D_1KI": 3.729998763447508, "J_D_1KI": 0.46123392648046346} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8021, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.439693927764893, "TIME_S_1KI": 1.3015451848603532, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 753.044820907116, "W": 52.77, "J_1KI": 93.88415670204662, "W_1KI": 6.578980177035283, "W_D": 35.9735, "J_D": 513.3533800436259, "W_D_1KI": 4.484914599177159, "J_D_1KI": 0.5591465651635905} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output index cc342ed..c0a1649 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14809489250183105} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14688372611999512} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 99993, 99994, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 100000, 100000, 100000]), - col_indices=tensor([11566, 2001, 14819, ..., 49184, 52555, 95716]), - values=tensor([0.6903, 0.1382, 0.4591, ..., 0.3067, 0.8088, 0.6364]), + col_indices=tensor([91487, 18138, 33632, ..., 69542, 2807, 61553]), + values=tensor([0.3172, 0.5107, 0.8105, ..., 0.7415, 0.8638, 0.9887]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.9563, 0.6034, 0.0890, ..., 0.8548, 0.6115, 0.7911]) +tensor([0.9817, 0.8005, 0.9250, ..., 0.8478, 0.4886, 0.4801]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.14809489250183105 seconds +Time: 0.14688372611999512 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7090', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.205029487609863} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7148', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.356493711471558} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([ 7711, 16815, 22150, ..., 77554, 50594, 27282]), - values=tensor([0.3735, 0.5582, 0.2278, ..., 0.2317, 0.9623, 0.5188]), + col_indices=tensor([72680, 33748, 44813, ..., 44201, 1700, 10017]), + values=tensor([0.7425, 0.3395, 0.2846, ..., 0.1862, 0.0564, 0.9994]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8847, 0.2063, 0.0570, ..., 0.4149, 0.1346, 0.4208]) +tensor([0.9156, 0.9400, 0.0601, ..., 0.4846, 0.4224, 0.5521]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 9.205029487609863 seconds +Time: 9.356493711471558 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8087', '-ss', '100000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.519959688186646} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8021', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.439693927764893} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([95443, 50058, 77222, ..., 43317, 3451, 10339]), - values=tensor([0.1078, 0.8522, 0.7935, ..., 0.8133, 0.3945, 0.6126]), + col_indices=tensor([44228, 41904, 44214, ..., 74875, 97256, 58231]), + values=tensor([0.4903, 0.3681, 0.9972, ..., 0.7317, 0.3326, 0.0360]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8231, 0.3876, 0.1205, ..., 0.1479, 0.8608, 0.1605]) +tensor([0.0537, 0.8160, 0.1280, ..., 0.5848, 0.5575, 0.6796]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.519959688186646 seconds +Time: 10.439693927764893 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([95443, 50058, 77222, ..., 43317, 3451, 10339]), - values=tensor([0.1078, 0.8522, 0.7935, ..., 0.8133, 0.3945, 0.6126]), + col_indices=tensor([44228, 41904, 44214, ..., 74875, 97256, 58231]), + values=tensor([0.4903, 0.3681, 0.9972, ..., 0.7317, 0.3326, 0.0360]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8231, 0.3876, 0.1205, ..., 0.1479, 0.8608, 0.1605]) +tensor([0.0537, 0.8160, 0.1280, ..., 0.5848, 0.5575, 0.6796]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.519959688186646 seconds +Time: 10.439693927764893 seconds -[18.45, 18.35, 18.06, 17.99, 17.93, 18.06, 18.08, 17.93, 22.44, 18.22] -[46.91] -14.298665523529053 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8087, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.519959688186646, 'TIME_S_1KI': 1.300848236451916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 670.7503997087479, 'W': 46.91} -[18.45, 18.35, 18.06, 17.99, 17.93, 18.06, 18.08, 17.93, 22.44, 18.22, 19.6, 17.81, 18.45, 17.9, 22.14, 18.02, 18.39, 17.96, 18.34, 17.85] -334.90999999999997 -16.7455 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8087, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.519959688186646, 'TIME_S_1KI': 1.300848236451916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 670.7503997087479, 'W': 46.91, 'J_1KI': 82.94180780372794, 'W_1KI': 5.800667738345492, 'W_D': 30.164499999999997, 'J_D': 431.3120961844921, 'W_D_1KI': 3.729998763447508, 'J_D_1KI': 0.46123392648046346} +[18.87, 19.11, 18.57, 18.45, 18.65, 18.59, 18.65, 18.89, 18.53, 18.76] +[52.77] +14.270320653915405 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8021, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.439693927764893, 'TIME_S_1KI': 1.3015451848603532, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.044820907116, 'W': 52.77} +[18.87, 19.11, 18.57, 18.45, 18.65, 18.59, 18.65, 18.89, 18.53, 18.76, 18.94, 18.41, 18.64, 18.76, 18.81, 18.52, 18.51, 18.74, 18.6, 18.43] +335.93000000000006 +16.7965 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8021, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.439693927764893, 'TIME_S_1KI': 1.3015451848603532, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.044820907116, 'W': 52.77, 'J_1KI': 93.88415670204662, 'W_1KI': 6.578980177035283, 'W_D': 35.9735, 'J_D': 513.3533800436259, 'W_D_1KI': 4.484914599177159, 'J_D_1KI': 0.5591465651635905} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json index 0e52b1a..d4df35b 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4760, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.509625434875488, "TIME_S_1KI": 2.2079045031251026, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 685.1951848602295, "W": 48.04, "J_1KI": 143.94856824794735, "W_1KI": 10.092436974789916, "W_D": 31.590249999999997, "J_D": 450.57217295026777, "W_D_1KI": 6.636607142857142, "J_D_1KI": 1.3942451980792314} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4750, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.478902816772461, "TIME_S_1KI": 2.206084803531044, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 770.5235755848885, "W": 53.69, "J_1KI": 162.2154895968186, "W_1KI": 11.303157894736842, "W_D": 36.774, "J_D": 527.7562668757439, "W_D_1KI": 7.741894736842106, "J_D_1KI": 1.6298725761772854} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output index e5c487f..d80ad0e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.2381742000579834} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.23902273178100586} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 499988, 499996, +tensor(crow_indices=tensor([ 0, 4, 10, ..., 499990, 499996, 500000]), - col_indices=tensor([ 7829, 21471, 22951, ..., 29509, 41224, 66852]), - values=tensor([0.9739, 0.6225, 0.8607, ..., 0.0619, 0.3093, 0.0510]), + col_indices=tensor([46215, 48676, 63655, ..., 18613, 29803, 54196]), + values=tensor([0.6348, 0.4657, 0.3860, ..., 0.9465, 0.8143, 0.3811]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.6694, 0.1094, 0.7903, ..., 0.4860, 0.7386, 0.2172]) +tensor([0.1130, 0.6589, 0.4685, ..., 0.3406, 0.2852, 0.5797]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.2381742000579834 seconds +Time: 0.23902273178100586 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4408', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.722268104553223} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4392', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.70858883857727} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 8, ..., 499991, 499997, +tensor(crow_indices=tensor([ 0, 4, 10, ..., 499991, 499995, 500000]), - col_indices=tensor([ 8468, 11831, 46487, ..., 65418, 70471, 71020]), - values=tensor([0.5611, 0.2625, 0.0139, ..., 0.7643, 0.0263, 0.5630]), + col_indices=tensor([13857, 24838, 53079, ..., 51175, 58095, 86985]), + values=tensor([0.7254, 0.5307, 0.5131, ..., 0.5524, 0.8711, 0.6845]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.6021, 0.3259, 0.4454, ..., 0.1291, 0.3066, 0.3093]) +tensor([0.3462, 0.9722, 0.1255, ..., 0.6830, 0.5801, 0.1671]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 9.722268104553223 seconds +Time: 9.70858883857727 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4760', '-ss', '100000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.509625434875488} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4750', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.478902816772461} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 7, ..., 499986, 499991, +tensor(crow_indices=tensor([ 0, 4, 7, ..., 499991, 499993, 500000]), - col_indices=tensor([37454, 51813, 86506, ..., 62954, 73906, 92773]), - values=tensor([0.8256, 0.7091, 0.8154, ..., 0.4160, 0.7952, 0.1689]), + col_indices=tensor([19509, 54538, 74720, ..., 64544, 67166, 82018]), + values=tensor([0.3550, 0.7838, 0.6148, ..., 0.8985, 0.6480, 0.7596]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.1880, 0.2475, 0.0895, ..., 0.2917, 0.5906, 0.9519]) +tensor([0.5561, 0.3606, 0.7254, ..., 0.6601, 0.9264, 0.9766]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.509625434875488 seconds +Time: 10.478902816772461 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 7, ..., 499986, 499991, +tensor(crow_indices=tensor([ 0, 4, 7, ..., 499991, 499993, 500000]), - col_indices=tensor([37454, 51813, 86506, ..., 62954, 73906, 92773]), - values=tensor([0.8256, 0.7091, 0.8154, ..., 0.4160, 0.7952, 0.1689]), + col_indices=tensor([19509, 54538, 74720, ..., 64544, 67166, 82018]), + values=tensor([0.3550, 0.7838, 0.6148, ..., 0.8985, 0.6480, 0.7596]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.1880, 0.2475, 0.0895, ..., 0.2917, 0.5906, 0.9519]) +tensor([0.5561, 0.3606, 0.7254, ..., 0.6601, 0.9264, 0.9766]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.509625434875488 seconds +Time: 10.478902816772461 seconds -[18.07, 18.03, 18.14, 17.98, 17.8, 18.31, 18.47, 18.23, 18.16, 17.92] -[48.04] -14.26301383972168 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.509625434875488, 'TIME_S_1KI': 2.2079045031251026, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.1951848602295, 'W': 48.04} -[18.07, 18.03, 18.14, 17.98, 17.8, 18.31, 18.47, 18.23, 18.16, 17.92, 18.33, 17.81, 18.24, 18.05, 18.09, 17.75, 17.78, 21.35, 18.67, 17.95] -328.995 -16.44975 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.509625434875488, 'TIME_S_1KI': 2.2079045031251026, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.1951848602295, 'W': 48.04, 'J_1KI': 143.94856824794735, 'W_1KI': 10.092436974789916, 'W_D': 31.590249999999997, 'J_D': 450.57217295026777, 'W_D_1KI': 6.636607142857142, 'J_D_1KI': 1.3942451980792314} +[23.96, 18.57, 18.91, 18.97, 18.6, 18.54, 18.61, 18.77, 18.53, 18.41] +[53.69] +14.35134243965149 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4750, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.478902816772461, 'TIME_S_1KI': 2.206084803531044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 770.5235755848885, 'W': 53.69} +[23.96, 18.57, 18.91, 18.97, 18.6, 18.54, 18.61, 18.77, 18.53, 18.41, 18.78, 18.69, 18.58, 18.42, 18.72, 18.51, 18.59, 18.62, 18.9, 18.43] +338.32 +16.916 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4750, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.478902816772461, 'TIME_S_1KI': 2.206084803531044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 770.5235755848885, 'W': 53.69, 'J_1KI': 162.2154895968186, 'W_1KI': 11.303157894736842, 'W_D': 36.774, 'J_D': 527.7562668757439, 'W_D_1KI': 7.741894736842106, 'J_D_1KI': 1.6298725761772854} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json index 20b7d93..232248f 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 84718, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.541555881500244, "TIME_S_1KI": 0.12443112303761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 661.3758532905579, "W": 46.44, "J_1KI": 7.80679257407585, "W_1KI": 0.5481715810099388, "W_D": 30.123999999999995, "J_D": 429.01133084678645, "W_D_1KI": 0.3555796879057579, "J_D_1KI": 0.004197215325028422} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 84535, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.447430849075317, "TIME_S_1KI": 0.12358704500000375, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 746.7200598120689, "W": 52.35, "J_1KI": 8.833265035926763, "W_1KI": 0.6192701248003786, "W_D": 35.134750000000004, "J_D": 501.1618456825615, "W_D_1KI": 0.41562370615721306, "J_D_1KI": 0.00491658728523349} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output index 79fd885..1cb8566 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.028100967407226562} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.028455734252929688} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 10000, 10000]), - col_indices=tensor([1696, 2591, 3015, ..., 1730, 8585, 3790]), - values=tensor([0.6837, 0.7697, 0.7550, ..., 0.1323, 0.4514, 0.4553]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 9999, 10000, 10000]), + col_indices=tensor([2863, 3203, 5585, ..., 4025, 1664, 4563]), + values=tensor([0.8169, 0.1123, 0.9845, ..., 0.3088, 0.1015, 0.1122]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1557, 0.9230, 0.6401, ..., 0.3725, 0.8926, 0.6402]) +tensor([0.5166, 0.1713, 0.7752, ..., 0.5891, 0.3496, 0.5710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.028100967407226562 seconds +Time: 0.028455734252929688 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '37365', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.631014823913574} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '36899', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.583176374435425} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 9998, 10000]), - col_indices=tensor([2386, 8388, 9261, ..., 1344, 2569, 4425]), - values=tensor([0.6000, 0.2415, 0.7139, ..., 0.1197, 0.4001, 0.0791]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), + col_indices=tensor([ 38, 7868, 9573, ..., 9594, 8912, 2909]), + values=tensor([0.9633, 0.5599, 0.7433, ..., 0.5575, 0.2197, 0.0804]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9194, 0.6903, 0.0708, ..., 0.1917, 0.6424, 0.6800]) +tensor([0.9040, 0.9660, 0.9215, ..., 0.1976, 0.2947, 0.7033]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 4.631014823913574 seconds +Time: 4.583176374435425 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '84718', '-ss', '10000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.541555881500244} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '84535', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.447430849075317} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), - col_indices=tensor([6630, 3375, 966, ..., 5451, 76, 624]), - values=tensor([0.0314, 0.6841, 0.2123, ..., 0.3011, 0.8872, 0.9156]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 10000, 10000, 10000]), + col_indices=tensor([2318, 5199, 4784, ..., 521, 6780, 7890]), + values=tensor([0.6235, 0.7346, 0.8772, ..., 0.1411, 0.9530, 0.2973]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4846, 0.1841, 0.4323, ..., 0.0718, 0.1957, 0.5902]) +tensor([0.9172, 0.1812, 0.4417, ..., 0.6631, 0.2007, 0.7007]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.541555881500244 seconds +Time: 10.447430849075317 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), - col_indices=tensor([6630, 3375, 966, ..., 5451, 76, 624]), - values=tensor([0.0314, 0.6841, 0.2123, ..., 0.3011, 0.8872, 0.9156]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 10000, 10000, 10000]), + col_indices=tensor([2318, 5199, 4784, ..., 521, 6780, 7890]), + values=tensor([0.6235, 0.7346, 0.8772, ..., 0.1411, 0.9530, 0.2973]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.4846, 0.1841, 0.4323, ..., 0.0718, 0.1957, 0.5902]) +tensor([0.9172, 0.1812, 0.4417, ..., 0.6631, 0.2007, 0.7007]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.541555881500244 seconds +Time: 10.447430849075317 seconds -[18.42, 18.02, 18.15, 18.67, 18.24, 18.06, 18.05, 17.99, 18.11, 18.07] -[46.44] -14.241512775421143 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84718, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.541555881500244, 'TIME_S_1KI': 0.12443112303761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.3758532905579, 'W': 46.44} -[18.42, 18.02, 18.15, 18.67, 18.24, 18.06, 18.05, 17.99, 18.11, 18.07, 18.36, 18.1, 17.99, 18.16, 18.02, 18.13, 18.12, 18.03, 18.02, 18.07] -326.32000000000005 -16.316000000000003 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84718, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.541555881500244, 'TIME_S_1KI': 0.12443112303761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.3758532905579, 'W': 46.44, 'J_1KI': 7.80679257407585, 'W_1KI': 0.5481715810099388, 'W_D': 30.123999999999995, 'J_D': 429.01133084678645, 'W_D_1KI': 0.3555796879057579, 'J_D_1KI': 0.004197215325028422} +[19.01, 18.45, 19.05, 18.83, 20.38, 18.78, 18.93, 18.59, 19.06, 18.59] +[52.35] +14.263993501663208 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84535, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.447430849075317, 'TIME_S_1KI': 0.12358704500000375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.7200598120689, 'W': 52.35} +[19.01, 18.45, 19.05, 18.83, 20.38, 18.78, 18.93, 18.59, 19.06, 18.59, 19.13, 18.92, 18.71, 18.62, 18.73, 18.82, 18.69, 23.04, 19.11, 18.46] +344.30499999999995 +17.215249999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84535, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.447430849075317, 'TIME_S_1KI': 0.12358704500000375, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.7200598120689, 'W': 52.35, 'J_1KI': 8.833265035926763, 'W_1KI': 0.6192701248003786, 'W_D': 35.134750000000004, 'J_D': 501.1618456825615, 'W_D_1KI': 0.41562370615721306, 'J_D_1KI': 0.00491658728523349} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json index e07fffc..616dd52 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34651, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.471668004989624, "TIME_S_1KI": 0.30220391922281103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 677.0323536157608, "W": 47.45, "J_1KI": 19.53860937969354, "W_1KI": 1.369368849383856, "W_D": 30.828750000000007, "J_D": 439.8748402851821, "W_D_1KI": 0.8896929381547433, "J_D_1KI": 0.025675822866720825} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34381, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.354862451553345, "TIME_S_1KI": 0.3011797926631961, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 758.8139896988869, "W": 53.45, "J_1KI": 22.070736444515482, "W_1KI": 1.5546377359588146, "W_D": 36.52275, "J_D": 518.5027809593082, "W_D_1KI": 1.0622945813094442, "J_D_1KI": 0.030897722035701233} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output index ff06711..59d193d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.04611039161682129} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.04586505889892578} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 22, ..., 99983, 99992, +tensor(crow_indices=tensor([ 0, 7, 16, ..., 99984, 99991, 100000]), - col_indices=tensor([ 845, 1153, 1508, ..., 8313, 9367, 9854]), - values=tensor([0.8746, 0.4039, 0.9243, ..., 0.5657, 0.2713, 0.6449]), + col_indices=tensor([1286, 1569, 6335, ..., 5067, 8495, 9347]), + values=tensor([0.2454, 0.5356, 0.5719, ..., 0.1387, 0.3718, 0.5544]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3540, 0.2628, 0.4314, ..., 0.0912, 0.3507, 0.8651]) +tensor([0.4425, 0.1808, 0.7035, ..., 0.1236, 0.9167, 0.8594]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.04611039161682129 seconds +Time: 0.04586505889892578 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22771', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.900022268295288} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22893', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.991376161575317} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 19, ..., 99978, 99991, +tensor(crow_indices=tensor([ 0, 7, 22, ..., 99980, 99993, 100000]), - col_indices=tensor([ 917, 1959, 2965, ..., 8075, 8263, 9058]), - values=tensor([0.2271, 0.8712, 0.9636, ..., 0.2167, 0.1262, 0.7253]), + col_indices=tensor([1030, 1120, 3717, ..., 6893, 8161, 8605]), + values=tensor([0.5471, 0.9720, 0.9415, ..., 0.5145, 0.9989, 0.2057]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8029, 0.1325, 0.2655, ..., 0.5832, 0.4718, 0.3144]) +tensor([0.3134, 0.0807, 0.4892, ..., 0.6457, 0.4386, 0.1718]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 6.900022268295288 seconds +Time: 6.991376161575317 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '34651', '-ss', '10000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.471668004989624} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '34381', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.354862451553345} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 99994, 99997, +tensor(crow_indices=tensor([ 0, 13, 24, ..., 99984, 99990, 100000]), - col_indices=tensor([ 24, 1396, 2236, ..., 5590, 6310, 9874]), - values=tensor([0.4982, 0.6812, 0.1465, ..., 0.3747, 0.0311, 0.9162]), + col_indices=tensor([ 47, 400, 2720, ..., 8715, 9139, 9752]), + values=tensor([0.3311, 0.8378, 0.2116, ..., 0.1501, 0.6097, 0.8050]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7533, 0.0703, 0.6276, ..., 0.6008, 0.2603, 0.3256]) +tensor([0.8329, 0.0693, 0.6537, ..., 0.9223, 0.1997, 0.5278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.471668004989624 seconds +Time: 10.354862451553345 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 99994, 99997, +tensor(crow_indices=tensor([ 0, 13, 24, ..., 99984, 99990, 100000]), - col_indices=tensor([ 24, 1396, 2236, ..., 5590, 6310, 9874]), - values=tensor([0.4982, 0.6812, 0.1465, ..., 0.3747, 0.0311, 0.9162]), + col_indices=tensor([ 47, 400, 2720, ..., 8715, 9139, 9752]), + values=tensor([0.3311, 0.8378, 0.2116, ..., 0.1501, 0.6097, 0.8050]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7533, 0.0703, 0.6276, ..., 0.6008, 0.2603, 0.3256]) +tensor([0.8329, 0.0693, 0.6537, ..., 0.9223, 0.1997, 0.5278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.471668004989624 seconds +Time: 10.354862451553345 seconds -[18.34, 18.19, 18.22, 18.02, 22.16, 18.64, 18.12, 17.87, 17.95, 18.81] -[47.45] -14.26833200454712 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.471668004989624, 'TIME_S_1KI': 0.30220391922281103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.0323536157608, 'W': 47.45} -[18.34, 18.19, 18.22, 18.02, 22.16, 18.64, 18.12, 17.87, 17.95, 18.81, 22.38, 17.93, 18.41, 17.77, 18.07, 17.88, 18.52, 17.88, 18.1, 17.86] -332.42499999999995 -16.621249999999996 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.471668004989624, 'TIME_S_1KI': 0.30220391922281103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.0323536157608, 'W': 47.45, 'J_1KI': 19.53860937969354, 'W_1KI': 1.369368849383856, 'W_D': 30.828750000000007, 'J_D': 439.8748402851821, 'W_D_1KI': 0.8896929381547433, 'J_D_1KI': 0.025675822866720825} +[18.93, 18.77, 18.52, 18.65, 18.56, 18.49, 18.5, 18.59, 18.48, 22.8] +[53.45] +14.196707010269165 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.354862451553345, 'TIME_S_1KI': 0.3011797926631961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.8139896988869, 'W': 53.45} +[18.93, 18.77, 18.52, 18.65, 18.56, 18.49, 18.5, 18.59, 18.48, 22.8, 18.87, 18.53, 18.61, 18.49, 18.52, 18.82, 18.88, 19.97, 18.54, 18.65] +338.54499999999996 +16.927249999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.354862451553345, 'TIME_S_1KI': 0.3011797926631961, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.8139896988869, 'W': 53.45, 'J_1KI': 22.070736444515482, 'W_1KI': 1.5546377359588146, 'W_D': 36.52275, 'J_D': 518.5027809593082, 'W_D_1KI': 1.0622945813094442, 'J_D_1KI': 0.030897722035701233} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json index 89c422b..6b71db3 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5583, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.513006925582886, "TIME_S_1KI": 1.8830390337780558, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.6418607521057, "W": 47.919999999999995, "J_1KI": 124.06266536845884, "W_1KI": 8.58319899695504, "W_D": 31.175249999999995, "J_D": 450.611084503591, "W_D_1KI": 5.583960236432024, "J_D_1KI": 1.0001719929127753} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5568, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.491016387939453, "TIME_S_1KI": 1.884162425994873, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 784.1237088561058, "W": 54.21, "J_1KI": 140.82681552731785, "W_1KI": 9.735991379310345, "W_D": 37.368, "J_D": 540.5116169071198, "W_D_1KI": 6.711206896551724, "J_D_1KI": 1.2053173305588585} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output index d152642..3d068e7 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.2040116786956787} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.20917105674743652} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 106, 193, ..., 999811, - 999905, 1000000]), - col_indices=tensor([ 107, 139, 344, ..., 9485, 9560, 9767]), - values=tensor([0.2657, 0.7219, 0.2773, ..., 0.6022, 0.5377, 0.2291]), +tensor(crow_indices=tensor([ 0, 108, 217, ..., 999801, + 999904, 1000000]), + col_indices=tensor([ 201, 293, 363, ..., 9740, 9784, 9831]), + values=tensor([0.0755, 0.8455, 0.1421, ..., 0.4938, 0.3767, 0.3652]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8604, 0.1859, 0.3719, ..., 0.6286, 0.9460, 0.3185]) +tensor([0.5155, 0.3962, 0.4386, ..., 0.0611, 0.7322, 0.1065]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.2040116786956787 seconds +Time: 0.20917105674743652 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5146', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.677676439285278} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5019', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.464475870132446} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 90, 186, ..., 999811, - 999907, 1000000]), - col_indices=tensor([ 74, 208, 311, ..., 9654, 9863, 9976]), - values=tensor([0.0395, 0.4059, 0.0831, ..., 0.6188, 0.9591, 0.8953]), +tensor(crow_indices=tensor([ 0, 94, 204, ..., 999823, + 999910, 1000000]), + col_indices=tensor([ 9, 178, 235, ..., 9823, 9912, 9951]), + values=tensor([0.9644, 0.3014, 0.0048, ..., 0.8699, 0.7725, 0.2107]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6360, 0.9265, 0.4313, ..., 0.6926, 0.7242, 0.0651]) +tensor([0.4099, 0.5837, 0.3039, ..., 0.5378, 0.1585, 0.3682]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 9.677676439285278 seconds +Time: 9.464475870132446 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5583', '-ss', '10000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.513006925582886} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5568', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.491016387939453} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 207, ..., 999792, - 999898, 1000000]), - col_indices=tensor([ 168, 206, 240, ..., 9827, 9842, 9996]), - values=tensor([0.8276, 0.5768, 0.6424, ..., 0.0752, 0.7475, 0.3129]), +tensor(crow_indices=tensor([ 0, 90, 187, ..., 999816, + 999910, 1000000]), + col_indices=tensor([ 130, 331, 358, ..., 9312, 9659, 9864]), + values=tensor([0.2328, 0.9346, 0.2422, ..., 0.1433, 0.4918, 0.2397]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7571, 0.4178, 0.1860, ..., 0.0563, 0.6255, 0.7203]) +tensor([0.3909, 0.7739, 0.3092, ..., 0.2381, 0.8305, 0.8369]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.513006925582886 seconds +Time: 10.491016387939453 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 207, ..., 999792, - 999898, 1000000]), - col_indices=tensor([ 168, 206, 240, ..., 9827, 9842, 9996]), - values=tensor([0.8276, 0.5768, 0.6424, ..., 0.0752, 0.7475, 0.3129]), +tensor(crow_indices=tensor([ 0, 90, 187, ..., 999816, + 999910, 1000000]), + col_indices=tensor([ 130, 331, 358, ..., 9312, 9659, 9864]), + values=tensor([0.2328, 0.9346, 0.2422, ..., 0.1433, 0.4918, 0.2397]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.7571, 0.4178, 0.1860, ..., 0.0563, 0.6255, 0.7203]) +tensor([0.3909, 0.7739, 0.3092, ..., 0.2381, 0.8305, 0.8369]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.513006925582886 seconds +Time: 10.491016387939453 seconds -[18.38, 17.86, 18.24, 17.87, 19.06, 17.88, 18.29, 21.74, 18.54, 18.08] -[47.92] -14.454128980636597 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5583, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.513006925582886, 'TIME_S_1KI': 1.8830390337780558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6418607521057, 'W': 47.919999999999995} -[18.38, 17.86, 18.24, 17.87, 19.06, 17.88, 18.29, 21.74, 18.54, 18.08, 18.58, 17.99, 17.96, 22.09, 18.22, 17.97, 17.93, 18.49, 18.13, 18.23] -334.895 -16.74475 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5583, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.513006925582886, 'TIME_S_1KI': 1.8830390337780558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6418607521057, 'W': 47.919999999999995, 'J_1KI': 124.06266536845884, 'W_1KI': 8.58319899695504, 'W_D': 31.175249999999995, 'J_D': 450.611084503591, 'W_D_1KI': 5.583960236432024, 'J_D_1KI': 1.0001719929127753} +[18.94, 18.64, 18.65, 18.57, 18.36, 18.8, 18.73, 18.52, 18.5, 18.61] +[54.21] +14.464558362960815 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5568, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.491016387939453, 'TIME_S_1KI': 1.884162425994873, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 784.1237088561058, 'W': 54.21} +[18.94, 18.64, 18.65, 18.57, 18.36, 18.8, 18.73, 18.52, 18.5, 18.61, 18.94, 18.45, 18.95, 18.62, 18.72, 18.63, 19.37, 18.88, 18.82, 18.77] +336.84 +16.842 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5568, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.491016387939453, 'TIME_S_1KI': 1.884162425994873, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 784.1237088561058, 'W': 54.21, 'J_1KI': 140.82681552731785, 'W_1KI': 9.735991379310345, 'W_D': 37.368, 'J_D': 540.5116169071198, 'W_D_1KI': 6.711206896551724, 'J_D_1KI': 1.2053173305588585} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json index 20cc2bc..79d1b9e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 959, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.294347047805786, "TIME_S_1KI": 10.73445990386422, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 792.9360737204552, "W": 48.35, "J_1KI": 826.8363646720074, "W_1KI": 50.417101147028156, "W_D": 31.9585, "J_D": 524.1168047982454, "W_D_1KI": 33.324817518248175, "J_D_1KI": 34.74954902841311} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 984, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.35936164855957, "TIME_S_1KI": 10.527806553414196, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 874.1194585132598, "W": 54.41, "J_1KI": 888.3327830419307, "W_1KI": 55.294715447154466, "W_D": 37.35825, "J_D": 600.175946719408, "W_D_1KI": 37.96570121951219, "J_D_1KI": 38.583029694626205} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output index 643494f..8890410 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.0941581726074219} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.0661232471466064} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 514, 1009, ..., 4998988, - 4999478, 5000000]), - col_indices=tensor([ 8, 23, 83, ..., 9969, 9982, 9990]), - values=tensor([0.1393, 0.4453, 0.1108, ..., 0.3215, 0.7885, 0.8444]), +tensor(crow_indices=tensor([ 0, 512, 1006, ..., 4999050, + 4999502, 5000000]), + col_indices=tensor([ 27, 66, 69, ..., 9942, 9959, 9960]), + values=tensor([0.7928, 0.2549, 0.9608, ..., 0.7743, 0.6342, 0.3657]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.3317, 0.0622, 0.5595, ..., 0.2290, 0.2268, 0.9236]) +tensor([0.4712, 0.8826, 0.6721, ..., 0.6632, 0.2423, 0.7939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 1.0941581726074219 seconds +Time: 1.0661232471466064 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '959', '-ss', '10000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.294347047805786} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '984', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.35936164855957} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 514, 994, ..., 4999000, - 4999500, 5000000]), - col_indices=tensor([ 13, 16, 23, ..., 9955, 9988, 9993]), - values=tensor([0.2414, 0.9977, 0.9772, ..., 0.9200, 0.6029, 0.8714]), +tensor(crow_indices=tensor([ 0, 464, 981, ..., 4998963, + 4999483, 5000000]), + col_indices=tensor([ 30, 35, 45, ..., 9959, 9962, 9993]), + values=tensor([0.5322, 0.6168, 0.7005, ..., 0.6069, 0.6645, 0.7150]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0530, 0.1017, 0.7510, ..., 0.2543, 0.0728, 0.9686]) +tensor([0.4721, 0.8060, 0.5631, ..., 0.4372, 0.7905, 0.4509]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.294347047805786 seconds +Time: 10.35936164855957 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 514, 994, ..., 4999000, - 4999500, 5000000]), - col_indices=tensor([ 13, 16, 23, ..., 9955, 9988, 9993]), - values=tensor([0.2414, 0.9977, 0.9772, ..., 0.9200, 0.6029, 0.8714]), +tensor(crow_indices=tensor([ 0, 464, 981, ..., 4998963, + 4999483, 5000000]), + col_indices=tensor([ 30, 35, 45, ..., 9959, 9962, 9993]), + values=tensor([0.5322, 0.6168, 0.7005, ..., 0.6069, 0.6645, 0.7150]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0530, 0.1017, 0.7510, ..., 0.2543, 0.0728, 0.9686]) +tensor([0.4721, 0.8060, 0.5631, ..., 0.4372, 0.7905, 0.4509]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.294347047805786 seconds +Time: 10.35936164855957 seconds -[18.47, 18.05, 17.89, 17.81, 18.54, 18.75, 17.99, 17.87, 18.49, 18.81] -[48.35] -16.399918794631958 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.294347047805786, 'TIME_S_1KI': 10.73445990386422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.9360737204552, 'W': 48.35} -[18.47, 18.05, 17.89, 17.81, 18.54, 18.75, 17.99, 17.87, 18.49, 18.81, 18.47, 17.8, 17.9, 18.12, 18.16, 18.55, 18.0, 18.63, 18.45, 17.91] -327.83 -16.3915 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.294347047805786, 'TIME_S_1KI': 10.73445990386422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.9360737204552, 'W': 48.35, 'J_1KI': 826.8363646720074, 'W_1KI': 50.417101147028156, 'W_D': 31.9585, 'J_D': 524.1168047982454, 'W_D_1KI': 33.324817518248175, 'J_D_1KI': 34.74954902841311} +[19.12, 18.46, 18.53, 18.47, 19.72, 22.18, 18.6, 18.51, 19.45, 18.49] +[54.41] +16.06541919708252 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 984, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.35936164855957, 'TIME_S_1KI': 10.527806553414196, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.1194585132598, 'W': 54.41} +[19.12, 18.46, 18.53, 18.47, 19.72, 22.18, 18.6, 18.51, 19.45, 18.49, 19.15, 18.48, 19.07, 18.4, 18.75, 18.52, 18.64, 18.47, 19.05, 18.71] +341.03499999999997 +17.05175 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 984, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.35936164855957, 'TIME_S_1KI': 10.527806553414196, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 874.1194585132598, 'W': 54.41, 'J_1KI': 888.3327830419307, 'W_1KI': 55.294715447154466, 'W_D': 37.35825, 'J_D': 600.175946719408, 'W_D_1KI': 37.96570121951219, 'J_D_1KI': 38.583029694626205} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json index a3bba53..92a8569 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 389, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.411779165267944, "TIME_S_1KI": 26.765499139506282, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 876.1174001097679, "W": 47.85, "J_1KI": 2252.2298203335936, "W_1KI": 123.00771208226222, "W_D": 31.457250000000002, "J_D": 575.971663210094, "W_D_1KI": 80.86696658097686, "J_D_1KI": 207.88423285598165} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 395, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.455632448196411, "TIME_S_1KI": 26.469955565054203, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 950.4409645557403, "W": 54.12, "J_1KI": 2406.1796571031396, "W_1KI": 137.01265822784808, "W_D": 36.991749999999996, "J_D": 649.6392193385958, "W_D_1KI": 93.64999999999999, "J_D_1KI": 237.0886075949367} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output index 086f941..8761a48 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.693603992462158} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.653668165206909} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1004, 2049, ..., 9997955, - 9998949, 10000000]), - col_indices=tensor([ 1, 3, 35, ..., 9984, 9987, 9993]), - values=tensor([0.3631, 0.8073, 0.7190, ..., 0.1286, 0.7057, 0.1104]), +tensor(crow_indices=tensor([ 0, 976, 1914, ..., 9997996, + 9998980, 10000000]), + col_indices=tensor([ 22, 23, 24, ..., 9979, 9981, 9994]), + values=tensor([0.6028, 0.2174, 0.1558, ..., 0.2391, 0.7709, 0.5491]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7885, 0.1169, 0.1101, ..., 0.4416, 0.5822, 0.3212]) +tensor([0.9646, 0.0721, 0.9565, ..., 0.6113, 0.6933, 0.7146]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 2.693603992462158 seconds +Time: 2.653668165206909 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '389', '-ss', '10000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.411779165267944} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '395', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.455632448196411} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 966, 1941, ..., 9997989, - 9998973, 10000000]), - col_indices=tensor([ 2, 11, 26, ..., 9956, 9965, 9978]), - values=tensor([0.0342, 0.9218, 0.6993, ..., 0.4506, 0.1146, 0.2093]), +tensor(crow_indices=tensor([ 0, 1007, 2030, ..., 9998046, + 9999015, 10000000]), + col_indices=tensor([ 7, 39, 40, ..., 9952, 9976, 9988]), + values=tensor([0.2137, 0.2372, 0.5461, ..., 0.5874, 0.2267, 0.3233]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6037, 0.3505, 0.1319, ..., 0.1315, 0.2126, 0.8791]) +tensor([0.3576, 0.7682, 0.3831, ..., 0.3551, 0.0880, 0.6819]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.411779165267944 seconds +Time: 10.455632448196411 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 966, 1941, ..., 9997989, - 9998973, 10000000]), - col_indices=tensor([ 2, 11, 26, ..., 9956, 9965, 9978]), - values=tensor([0.0342, 0.9218, 0.6993, ..., 0.4506, 0.1146, 0.2093]), +tensor(crow_indices=tensor([ 0, 1007, 2030, ..., 9998046, + 9999015, 10000000]), + col_indices=tensor([ 7, 39, 40, ..., 9952, 9976, 9988]), + values=tensor([0.2137, 0.2372, 0.5461, ..., 0.5874, 0.2267, 0.3233]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6037, 0.3505, 0.1319, ..., 0.1315, 0.2126, 0.8791]) +tensor([0.3576, 0.7682, 0.3831, ..., 0.3551, 0.0880, 0.6819]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.411779165267944 seconds +Time: 10.455632448196411 seconds -[18.87, 18.17, 18.37, 17.93, 17.97, 18.29, 18.55, 17.92, 18.13, 17.84] -[47.85] -18.30966353416443 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.411779165267944, 'TIME_S_1KI': 26.765499139506282, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 876.1174001097679, 'W': 47.85} -[18.87, 18.17, 18.37, 17.93, 17.97, 18.29, 18.55, 17.92, 18.13, 17.84, 19.36, 18.07, 17.99, 17.91, 18.82, 18.49, 18.1, 18.04, 18.09, 17.96] -327.855 -16.39275 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.411779165267944, 'TIME_S_1KI': 26.765499139506282, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 876.1174001097679, 'W': 47.85, 'J_1KI': 2252.2298203335936, 'W_1KI': 123.00771208226222, 'W_D': 31.457250000000002, 'J_D': 575.971663210094, 'W_D_1KI': 80.86696658097686, 'J_D_1KI': 207.88423285598165} +[19.36, 18.76, 18.66, 18.48, 19.47, 18.73, 18.73, 18.42, 18.63, 18.7] +[54.12] +17.561732530593872 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.455632448196411, 'TIME_S_1KI': 26.469955565054203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 950.4409645557403, 'W': 54.12} +[19.36, 18.76, 18.66, 18.48, 19.47, 18.73, 18.73, 18.42, 18.63, 18.7, 18.69, 18.51, 18.48, 19.67, 23.07, 18.55, 18.65, 19.34, 18.71, 18.66] +342.56500000000005 +17.12825 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 395, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.455632448196411, 'TIME_S_1KI': 26.469955565054203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 950.4409645557403, 'W': 54.12, 'J_1KI': 2406.1796571031396, 'W_1KI': 137.01265822784808, 'W_D': 36.991749999999996, 'J_D': 649.6392193385958, 'W_D_1KI': 93.64999999999999, 'J_D_1KI': 237.0886075949367} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json index 9e5a000..d8afc97 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 193, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.419133424758911, "TIME_S_1KI": 53.98514727854358, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1480.8600453448294, "W": 45.41, "J_1KI": 7672.849975879945, "W_1KI": 235.28497409326422, "W_D": 29.142749999999996, "J_D": 950.3707132013438, "W_D_1KI": 150.9987046632124, "J_D_1KI": 782.3767080995461} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 196, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.46868371963501, "TIME_S_1KI": 53.41165163079087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1113.3714706707, "W": 53.72, "J_1KI": 5680.466687095409, "W_1KI": 274.08163265306126, "W_D": 36.585499999999996, "J_D": 758.2511530197858, "W_D_1KI": 186.66071428571425, "J_D_1KI": 952.3505830903788} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output index f051b42..d57c721 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.42013144493103} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.35328221321106} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2043, 4027, ..., 19995921, - 19998006, 20000000]), - col_indices=tensor([ 9, 15, 16, ..., 9989, 9991, 9993]), - values=tensor([0.8685, 0.2737, 0.0800, ..., 0.3440, 0.3550, 0.7008]), +tensor(crow_indices=tensor([ 0, 1985, 4014, ..., 19996011, + 19998003, 20000000]), + col_indices=tensor([ 1, 2, 4, ..., 9982, 9984, 9986]), + values=tensor([0.5600, 0.3798, 0.2809, ..., 0.4925, 0.5579, 0.2402]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.2841, 0.6439, 0.8852, ..., 0.0124, 0.9656, 0.3759]) +tensor([0.1902, 0.0411, 0.2807, ..., 0.8123, 0.1809, 0.3115]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 5.42013144493103 seconds +Time: 5.35328221321106 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '193', '-ss', '10000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.419133424758911} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '196', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.46868371963501} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1925, 3946, ..., 19996050, - 19998038, 20000000]), - col_indices=tensor([ 4, 9, 22, ..., 9994, 9995, 9997]), - values=tensor([0.4143, 0.0158, 0.3991, ..., 0.4975, 0.2189, 0.5132]), +tensor(crow_indices=tensor([ 0, 2016, 4136, ..., 19996065, + 19998058, 20000000]), + col_indices=tensor([ 0, 1, 6, ..., 9981, 9991, 9993]), + values=tensor([0.1996, 0.0892, 0.4643, ..., 0.6653, 0.0272, 0.6524]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.4618, 0.1550, 0.6479, ..., 0.8342, 0.8619, 0.7737]) +tensor([0.9437, 0.4502, 0.6936, ..., 0.4828, 0.5406, 0.8208]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.419133424758911 seconds +Time: 10.46868371963501 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1925, 3946, ..., 19996050, - 19998038, 20000000]), - col_indices=tensor([ 4, 9, 22, ..., 9994, 9995, 9997]), - values=tensor([0.4143, 0.0158, 0.3991, ..., 0.4975, 0.2189, 0.5132]), +tensor(crow_indices=tensor([ 0, 2016, 4136, ..., 19996065, + 19998058, 20000000]), + col_indices=tensor([ 0, 1, 6, ..., 9981, 9991, 9993]), + values=tensor([0.1996, 0.0892, 0.4643, ..., 0.6653, 0.0272, 0.6524]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.4618, 0.1550, 0.6479, ..., 0.8342, 0.8619, 0.7737]) +tensor([0.9437, 0.4502, 0.6936, ..., 0.4828, 0.5406, 0.8208]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.419133424758911 seconds +Time: 10.46868371963501 seconds -[18.6, 18.13, 18.25, 17.88, 17.94, 17.94, 18.23, 18.23, 18.37, 17.79] -[45.41] -32.61087965965271 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.419133424758911, 'TIME_S_1KI': 53.98514727854358, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1480.8600453448294, 'W': 45.41} -[18.6, 18.13, 18.25, 17.88, 17.94, 17.94, 18.23, 18.23, 18.37, 17.79, 18.29, 17.94, 17.98, 17.82, 18.04, 18.23, 17.96, 17.98, 18.11, 17.95] -325.345 -16.26725 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.419133424758911, 'TIME_S_1KI': 53.98514727854358, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1480.8600453448294, 'W': 45.41, 'J_1KI': 7672.849975879945, 'W_1KI': 235.28497409326422, 'W_D': 29.142749999999996, 'J_D': 950.3707132013438, 'W_D_1KI': 150.9987046632124, 'J_D_1KI': 782.3767080995461} +[18.93, 19.98, 18.49, 18.61, 18.55, 19.11, 18.84, 19.4, 22.73, 18.68] +[53.72] +20.72545552253723 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 196, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.46868371963501, 'TIME_S_1KI': 53.41165163079087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1113.3714706707, 'W': 53.72} +[18.93, 19.98, 18.49, 18.61, 18.55, 19.11, 18.84, 19.4, 22.73, 18.68, 19.44, 18.47, 18.8, 18.41, 18.55, 18.58, 18.94, 18.89, 18.64, 18.35] +342.69 +17.1345 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 196, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.46868371963501, 'TIME_S_1KI': 53.41165163079087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1113.3714706707, 'W': 53.72, 'J_1KI': 5680.466687095409, 'W_1KI': 274.08163265306126, 'W_D': 36.585499999999996, 'J_D': 758.2511530197858, 'W_D_1KI': 186.66071428571425, 'J_D_1KI': 952.3505830903788} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json index 28c2d57..5f0eef4 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.80802845954895, "TIME_S_1KI": 108.0802845954895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3840.7128683352466, "W": 37.19, "J_1KI": 38407.128683352465, "W_1KI": 371.9, "W_D": 20.811749999999996, "J_D": 2149.286260757625, "W_D_1KI": 208.11749999999995, "J_D_1KI": 2081.1749999999997} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 130, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.446887969970703, "TIME_S_1KI": 80.36067669208232, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1269.5882425427437, "W": 53.45, "J_1KI": 9766.063404174953, "W_1KI": 411.1538461538462, "W_D": 36.43225, "J_D": 865.3686856759191, "W_D_1KI": 280.24807692307695, "J_D_1KI": 2155.7544378698226} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output index 5b24363..31c14f3 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.80802845954895} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 8.05606722831726} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2964, 5988, ..., 29994017, - 29996992, 30000000]), - col_indices=tensor([ 4, 9, 14, ..., 9990, 9993, 9995]), - values=tensor([0.5510, 0.0884, 0.7125, ..., 0.7844, 0.3492, 0.1801]), +tensor(crow_indices=tensor([ 0, 3029, 6010, ..., 29993971, + 29997008, 30000000]), + col_indices=tensor([ 6, 8, 11, ..., 9995, 9996, 9999]), + values=tensor([0.6372, 0.1660, 0.5359, ..., 0.8195, 0.0231, 0.1758]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7648, 0.2589, 0.8570, ..., 0.0438, 0.7014, 0.5513]) +tensor([0.7326, 0.9528, 0.8980, ..., 0.4303, 0.6858, 0.9257]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.80802845954895 seconds +Time: 8.05606722831726 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '130', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.446887969970703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2964, 5988, ..., 29994017, - 29996992, 30000000]), - col_indices=tensor([ 4, 9, 14, ..., 9990, 9993, 9995]), - values=tensor([0.5510, 0.0884, 0.7125, ..., 0.7844, 0.3492, 0.1801]), +tensor(crow_indices=tensor([ 0, 2978, 6072, ..., 29993838, + 29996899, 30000000]), + col_indices=tensor([ 1, 2, 3, ..., 9981, 9983, 9987]), + values=tensor([0.6017, 0.3434, 0.3105, ..., 0.2774, 0.3106, 0.3686]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7648, 0.2589, 0.8570, ..., 0.0438, 0.7014, 0.5513]) +tensor([0.7100, 0.2738, 0.5674, ..., 0.9189, 0.4155, 0.7852]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +36,30 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.80802845954895 seconds +Time: 10.446887969970703 seconds -[18.36, 18.76, 18.08, 17.8, 18.22, 18.13, 18.92, 17.85, 18.59, 18.05] -[37.19] -103.27273106575012 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.80802845954895, 'TIME_S_1KI': 108.0802845954895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3840.7128683352466, 'W': 37.19} -[18.36, 18.76, 18.08, 17.8, 18.22, 18.13, 18.92, 17.85, 18.59, 18.05, 18.39, 18.01, 18.68, 17.8, 18.23, 17.94, 17.9, 17.92, 18.33, 18.01] -327.56500000000005 -16.37825 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.80802845954895, 'TIME_S_1KI': 108.0802845954895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3840.7128683352466, 'W': 37.19, 'J_1KI': 38407.128683352465, 'W_1KI': 371.9, 'W_D': 20.811749999999996, 'J_D': 2149.286260757625, 'W_D_1KI': 208.11749999999995, 'J_D_1KI': 2081.1749999999997} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2978, 6072, ..., 29993838, + 29996899, 30000000]), + col_indices=tensor([ 1, 2, 3, ..., 9981, 9983, 9987]), + values=tensor([0.6017, 0.3434, 0.3105, ..., 0.2774, 0.3106, 0.3686]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7100, 0.2738, 0.5674, ..., 0.9189, 0.4155, 0.7852]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.446887969970703 seconds + +[19.09, 18.44, 19.0, 18.75, 18.74, 18.86, 18.82, 18.47, 18.69, 18.9] +[53.45] +23.752820253372192 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 130, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.446887969970703, 'TIME_S_1KI': 80.36067669208232, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1269.5882425427437, 'W': 53.45} +[19.09, 18.44, 19.0, 18.75, 18.74, 18.86, 18.82, 18.47, 18.69, 18.9, 19.05, 19.15, 18.83, 18.69, 18.63, 18.47, 18.97, 18.71, 20.03, 21.17] +340.355 +17.01775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 130, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.446887969970703, 'TIME_S_1KI': 80.36067669208232, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1269.5882425427437, 'W': 53.45, 'J_1KI': 9766.063404174953, 'W_1KI': 411.1538461538462, 'W_D': 36.43225, 'J_D': 865.3686856759191, 'W_D_1KI': 280.24807692307695, 'J_D_1KI': 2155.7544378698226} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..f90ee1b --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.716163635253906, "TIME_S_1KI": 107.16163635253906, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1446.293502857685, "W": 53.410000000000004, "J_1KI": 14462.93502857685, "W_1KI": 534.1, "W_D": 36.370250000000006, "J_D": 984.8728004551532, "W_D_1KI": 363.70250000000004, "J_D_1KI": 3637.0250000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..1c49c52 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.716163635253906} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4022, 8006, ..., 39992008, + 39996052, 40000000]), + col_indices=tensor([ 2, 5, 11, ..., 9987, 9990, 9994]), + values=tensor([0.9710, 0.1504, 0.2354, ..., 0.2928, 0.0267, 0.1134]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.1346, 0.0124, 0.5368, ..., 0.7889, 0.7272, 0.1080]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.716163635253906 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4022, 8006, ..., 39992008, + 39996052, 40000000]), + col_indices=tensor([ 2, 5, 11, ..., 9987, 9990, 9994]), + values=tensor([0.9710, 0.1504, 0.2354, ..., 0.2928, 0.0267, 0.1134]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.1346, 0.0124, 0.5368, ..., 0.7889, 0.7272, 0.1080]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.716163635253906 seconds + +[19.2, 18.48, 18.79, 18.84, 18.86, 18.53, 18.57, 18.59, 18.57, 18.36] +[53.41] +27.079077005386353 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.716163635253906, 'TIME_S_1KI': 107.16163635253906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.293502857685, 'W': 53.410000000000004} +[19.2, 18.48, 18.79, 18.84, 18.86, 18.53, 18.57, 18.59, 18.57, 18.36, 19.07, 18.45, 18.49, 18.67, 22.82, 19.3, 18.62, 19.1, 18.61, 18.38] +340.79499999999996 +17.039749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.716163635253906, 'TIME_S_1KI': 107.16163635253906, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.293502857685, 'W': 53.410000000000004, 'J_1KI': 14462.93502857685, 'W_1KI': 534.1, 'W_D': 36.370250000000006, 'J_D': 984.8728004551532, 'W_D_1KI': 363.70250000000004, 'J_D_1KI': 3637.0250000000005} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..0d979b3 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 13.625776529312134, "TIME_S_1KI": 136.25776529312134, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1769.4445055365563, "W": 53.31, "J_1KI": 17694.445055365562, "W_1KI": 533.1, "W_D": 36.382000000000005, "J_D": 1207.5770024466517, "W_D_1KI": 363.82000000000005, "J_D_1KI": 3638.2000000000003} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..b29bf4d --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 13.625776529312134} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5008, 10008, ..., 49989993, + 49994999, 50000000]), + col_indices=tensor([ 0, 5, 6, ..., 9997, 9998, 9999]), + values=tensor([0.9462, 0.7067, 0.0407, ..., 0.1529, 0.0867, 0.3215]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.7597, 0.0756, 0.8548, ..., 0.0318, 0.4702, 0.4476]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 13.625776529312134 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5008, 10008, ..., 49989993, + 49994999, 50000000]), + col_indices=tensor([ 0, 5, 6, ..., 9997, 9998, 9999]), + values=tensor([0.9462, 0.7067, 0.0407, ..., 0.1529, 0.0867, 0.3215]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.7597, 0.0756, 0.8548, ..., 0.0318, 0.4702, 0.4476]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 13.625776529312134 seconds + +[19.52, 18.56, 18.8, 18.93, 20.12, 18.45, 18.74, 18.63, 19.08, 19.12] +[53.31] +33.19160580635071 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 13.625776529312134, 'TIME_S_1KI': 136.25776529312134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1769.4445055365563, 'W': 53.31} +[19.52, 18.56, 18.8, 18.93, 20.12, 18.45, 18.74, 18.63, 19.08, 19.12, 18.94, 18.79, 18.56, 18.5, 18.64, 18.65, 18.62, 18.43, 18.61, 19.32] +338.55999999999995 +16.927999999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 13.625776529312134, 'TIME_S_1KI': 136.25776529312134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1769.4445055365563, 'W': 53.31, 'J_1KI': 17694.445055365562, 'W_1KI': 533.1, 'W_D': 36.382000000000005, 'J_D': 1207.5770024466517, 'W_D_1KI': 363.82000000000005, 'J_D_1KI': 3638.2000000000003} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json index d3d85cb..4995eb6 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 223318, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.272708654403687, "TIME_S_1KI": 0.046000361163917314, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 661.7633163213729, "W": 46.54, "J_1KI": 2.963322778823798, "W_1KI": 0.2084023679237679, "W_D": 29.777749999999997, "J_D": 423.41690143078563, "W_D_1KI": 0.13334236380408207, "J_D_1KI": 0.0005970963549919042} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 226437, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.510953187942505, "TIME_S_1KI": 0.046418885552902155, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 749.9334471988677, "W": 52.58, "J_1KI": 3.311885633526622, "W_1KI": 0.23220586741566088, "W_D": 35.7245, "J_D": 509.5282889778614, "W_D_1KI": 0.15776794428472377, "J_D_1KI": 0.0006967410108980589} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output index 404ad45..9fac80e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02031111717224121} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.021244287490844727} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([4137, 7377, 7043, 6108, 9323, 897, 9654, 227, 2578, - 4698, 9915, 4015, 9949, 2349, 8008, 1966, 4875, 4040, - 6979, 2450, 7633, 6619, 9795, 2725, 5746, 5264, 4390, - 2139, 8315, 3676, 8258, 2550, 7703, 6861, 8246, 1325, - 6548, 4010, 3106, 8161, 3532, 2108, 7993, 386, 7921, - 2704, 9957, 7073, 3331, 9744, 4980, 6346, 1498, 1404, - 8824, 1275, 9681, 2862, 4082, 1465, 5274, 7840, 5924, - 9483, 98, 2679, 7843, 1423, 2659, 4200, 7882, 4396, - 8968, 5460, 7766, 9614, 1590, 8686, 7557, 1225, 5913, - 5155, 9823, 7515, 4242, 147, 5927, 2787, 5542, 7655, - 5944, 2866, 5695, 9408, 3998, 7348, 7456, 5035, 7898, - 6129, 2807, 3798, 4059, 6217, 4055, 6985, 8048, 2902, - 2337, 461, 3316, 7537, 42, 5645, 9082, 1247, 3998, - 9775, 8363, 2735, 93, 476, 9970, 9170, 4926, 3860, - 2000, 8258, 8990, 1304, 1955, 2898, 4931, 9660, 749, - 7970, 1430, 136, 1728, 3493, 5173, 838, 9924, 9475, - 9208, 3918, 2507, 3992, 2000, 7754, 5632, 87, 7861, - 1028, 9056, 3445, 6453, 2164, 8030, 8837, 3400, 1991, - 5976, 7138, 4278, 6917, 8431, 3928, 8200, 9576, 544, - 3683, 3997, 1043, 2590, 5349, 7018, 58, 9350, 6692, - 1721, 3408, 5604, 655, 7107, 7982, 4650, 1645, 8377, - 5270, 5374, 9226, 6822, 6438, 8163, 2878, 5855, 3643, - 4380, 8884, 7438, 20, 8087, 7305, 2720, 1905, 9868, - 6635, 715, 5790, 888, 2709, 5700, 3461, 8946, 8488, - 9562, 8957, 6895, 1530, 8225, 2482, 9848, 6363, 60, - 9048, 4834, 584, 7029, 2291, 6622, 5454, 1482, 6564, - 7253, 4134, 4215, 7307, 255, 2256, 1308, 8832, 24, - 420, 4060, 3545, 3071, 2732, 6945, 8909, 1551, 7964, - 4210, 6238, 8532, 2620, 6983, 8405, 3644, 369, 8495, - 2851, 3884, 8614, 2495, 5435, 1497, 6514, 5937, 4865, - 7874, 7518, 9410, 726, 6738, 5623, 3151, 6922, 7926, - 2790, 1189, 762, 7808, 8573, 5170, 8000, 1707, 6332, - 7054, 4243, 6417, 3948, 1132, 1131, 6589, 5174, 60, - 223, 7683, 4637, 4586, 8258, 6780, 8920, 2046, 1612, - 5178, 8919, 7992, 5147, 6177, 3273, 9105, 8454, 3669, - 6494, 4848, 1427, 8432, 5755, 6577, 3626, 331, 3914, - 3636, 8285, 5491, 2909, 3115, 3372, 4727, 2186, 9534, - 5635, 7951, 3172, 9667, 3745, 5270, 428, 6615, 7258, - 1963, 3714, 6510, 2116, 2898, 5537, 481, 5467, 1168, - 6070, 9569, 7666, 9538, 6782, 3677, 4998, 780, 3405, - 8522, 5807, 9775, 4531, 4145, 9593, 484, 6152, 5639, - 6746, 3067, 8640, 5539, 6804, 5763, 6878, 8527, 9567, - 2461, 88, 3812, 7527, 1732, 2995, 1302, 5999, 4401, - 1636, 9368, 9766, 8129, 7690, 5557, 6058, 8415, 2316, - 3836, 7654, 4129, 8130, 662, 8048, 4078, 5310, 3394, - 697, 8499, 7954, 2695, 9511, 7314, 7366, 7641, 6823, - 6140, 7617, 8756, 7529, 926, 8480, 7592, 9330, 5786, - 5489, 124, 1928, 9569, 8032, 4423, 7047, 765, 9603, - 6041, 2668, 1487, 2665, 9551, 1633, 6181, 84, 7547, - 8082, 7482, 3259, 9213, 5803, 3882, 5394, 1979, 2400, - 6749, 5983, 1058, 6137, 7246, 773, 5714, 4565, 7822, - 6878, 4417, 3086, 1569, 1719, 5174, 5169, 6031, 5251, - 9640, 5409, 14, 2545, 7747, 5114, 5492, 9898, 1409, - 3523, 4063, 6729, 80, 9337, 6181, 8917, 261, 8381, - 6760, 9320, 5295, 3220, 4010, 4744, 8609, 1010, 5305, - 9260, 3490, 8283, 6887, 1747, 9772, 4685, 6659, 290, - 7030, 1170, 5710, 5634, 940, 2186, 3989, 889, 5333, - 6260, 8885, 2061, 5442, 4830, 8951, 3636, 8790, 578, - 5354, 9275, 1915, 5033, 4205, 7287, 2495, 1602, 3870, - 5486, 8600, 8275, 4198, 1084, 888, 495, 9803, 1292, - 8077, 5480, 1678, 5774, 3124, 3887, 332, 2678, 6175, - 409, 8310, 3436, 8252, 4352, 7590, 1442, 1255, 228, - 1152, 5560, 3824, 6610, 6637, 709, 2283, 8512, 6994, - 8985, 8735, 2350, 5740, 1930, 8458, 1361, 5384, 1444, - 7861, 6037, 7951, 3248, 8488, 1791, 1333, 177, 1355, - 6950, 3609, 6501, 8409, 1484, 7136, 5644, 3183, 9492, - 6917, 4022, 2331, 6796, 6370, 8934, 8277, 944, 5653, - 3069, 1456, 5189, 4084, 6941, 6557, 5802, 9989, 2527, - 6957, 9803, 14, 7896, 3797, 8217, 7379, 6116, 3815, - 9642, 9413, 2717, 6241, 1490, 1253, 3463, 2175, 3779, - 4863, 9031, 9205, 1690, 6995, 924, 43, 1394, 9054, - 1581, 3438, 2069, 4045, 5727, 2738, 4333, 5292, 7000, - 5376, 3015, 7111, 9030, 4819, 3485, 7384, 3097, 9362, - 8900, 2958, 8391, 6412, 1332, 5703, 9571, 6546, 2733, - 1890, 2159, 872, 1550, 305, 8578, 3297, 2050, 7548, - 3782, 1940, 640, 686, 8521, 6710, 9047, 3672, 8803, - 8238, 8882, 3472, 9261, 8047, 8763, 3448, 6714, 2416, - 9982, 2400, 3564, 3852, 8717, 2790, 8247, 2277, 887, - 5222, 7805, 9772, 5998, 1226, 9045, 8024, 5, 1965, - 946, 1956, 9244, 9275, 4790, 1278, 551, 2586, 35, - 6129, 7167, 9018, 9329, 5117, 4282, 2002, 7861, 3835, - 5057, 4040, 5353, 9894, 1447, 6457, 4072, 9973, 7775, - 2060, 8382, 3220, 1353, 1529, 3443, 132, 4532, 2405, - 9547, 5941, 254, 7773, 7543, 3043, 7390, 5242, 1337, - 8815, 2460, 9378, 1204, 7745, 4525, 8861, 8564, 8327, - 688, 8999, 6101, 1066, 660, 3165, 3845, 8997, 7684, - 9043, 8631, 6390, 6701, 9914, 2834, 7659, 8364, 6501, - 353, 1688, 3567, 3604, 5656, 5920, 1486, 9285, 8314, - 230, 4186, 6805, 6534, 5779, 6201, 1337, 619, 1368, - 4028, 9239, 5361, 9670, 3165, 8764, 8451, 1749, 5354, - 4757, 9433, 8330, 4044, 8195, 6400, 7150, 969, 9788, - 7121, 3276, 6802, 4876, 1470, 6334, 7325, 2295, 3504, - 4757, 93, 7166, 9475, 6294, 7298, 7786, 8426, 8073, - 3016, 7988, 8621, 7220, 3188, 7756, 7413, 4578, 7672, - 2046, 3507, 519, 6069, 2781, 1914, 7615, 4950, 7118, - 7131, 3707, 5379, 1378, 4426, 7265, 6448, 6606, 9476, - 5509, 2898, 947, 2096, 8223, 9521, 3781, 8358, 8383, - 2204, 5656, 5777, 3613, 5194, 5301, 7557, 8773, 1256, - 6364, 6538, 4220, 9100, 2360, 3056, 9805, 5261, 8211, - 2932, 9519, 1171, 205, 9123, 4449, 7847, 546, 6585, - 7811, 3911, 2070, 1743, 7688, 4267, 306, 7417, 1851, - 1865, 9232, 5014, 5024, 5064, 6511, 2993, 9070, 2950, - 557, 3789, 8751, 5349, 7134, 7831, 8803, 1422, 8903, - 7709, 2862, 91, 1213, 8347, 7430, 5358, 7802, 8452, - 8492, 6556, 5314, 3562, 3630, 6367, 543, 1027, 4551, - 8, 5834, 388, 6891, 1469, 6519, 5988, 3495, 5974, - 8821, 728, 3788, 2391, 8571, 3972, 9896, 1211, 4134, - 5009, 417, 3154, 4927, 8513, 4238, 3565, 1465, 8832, - 3710, 8052, 4595, 350, 943, 174, 7051, 2274, 4617, - 7712, 6029, 6405, 9644, 16, 4952, 1792, 7406, 3238, - 9821, 8222, 3824, 1039, 5851, 9963, 5374, 3591, 6018, - 1911]), - values=tensor([5.8156e-01, 2.8002e-01, 5.9905e-01, 3.3753e-01, - 2.5754e-01, 1.6141e-01, 7.2303e-01, 4.8284e-01, - 3.7468e-02, 3.4012e-01, 4.4953e-01, 4.5698e-01, - 2.9457e-01, 5.1227e-01, 5.2810e-01, 7.8482e-01, - 3.4909e-01, 8.6832e-01, 2.8544e-01, 4.3171e-01, - 4.6706e-01, 6.8312e-01, 4.0593e-01, 5.5164e-01, - 4.3321e-01, 5.3334e-01, 4.0228e-01, 6.6474e-01, - 5.9923e-02, 4.9814e-01, 3.1823e-01, 2.7113e-01, - 9.4443e-01, 6.0224e-01, 7.4800e-01, 8.6611e-01, - 9.1513e-01, 3.6846e-01, 2.0848e-01, 6.1374e-01, - 9.2960e-01, 6.5074e-01, 9.9559e-01, 2.8175e-01, - 4.7907e-01, 2.3395e-01, 4.2341e-01, 3.1732e-01, - 7.4312e-02, 7.5262e-01, 3.5443e-01, 1.0318e-02, - 2.2228e-01, 8.9822e-01, 4.1342e-01, 5.8355e-01, - 1.7611e-01, 1.6604e-02, 4.8518e-01, 8.7768e-01, - 7.1511e-01, 3.0722e-02, 4.5888e-01, 5.7548e-01, - 9.0778e-01, 4.3445e-02, 3.5396e-01, 4.8016e-01, - 4.4230e-02, 9.9890e-01, 3.9145e-01, 2.9800e-01, - 6.1930e-01, 3.9548e-01, 3.4855e-01, 8.7740e-01, - 3.8543e-01, 6.6978e-01, 9.4785e-01, 4.1594e-01, - 8.4757e-01, 8.1133e-01, 5.6867e-02, 5.5226e-01, - 5.8939e-02, 5.7339e-01, 1.1663e-01, 6.6088e-02, - 1.8693e-01, 9.0155e-01, 5.9654e-01, 3.1837e-01, - 5.0504e-01, 6.4980e-01, 9.5165e-01, 7.1873e-01, - 4.5242e-01, 8.6374e-01, 4.6175e-01, 2.2611e-01, - 1.8382e-01, 3.6584e-01, 6.7886e-02, 7.5200e-02, - 8.3070e-01, 3.0320e-01, 9.8572e-01, 9.5629e-01, - 8.1226e-01, 9.5778e-01, 1.0918e-01, 4.0952e-01, - 7.0520e-01, 8.3725e-02, 7.8669e-01, 6.6612e-01, - 4.0295e-01, 4.8172e-01, 6.7617e-01, 8.8344e-01, - 9.7812e-01, 4.7845e-01, 1.0589e-01, 9.0709e-01, - 9.5599e-01, 6.2792e-01, 8.6374e-01, 5.8811e-01, - 6.0978e-01, 5.2608e-01, 9.0717e-01, 8.6568e-01, - 7.4107e-01, 1.1724e-01, 5.3682e-01, 2.8650e-01, - 8.3774e-01, 2.3658e-01, 7.3055e-01, 4.1747e-01, - 3.8409e-01, 8.0411e-01, 7.2906e-01, 8.5363e-01, - 2.0820e-01, 8.0698e-01, 5.4203e-01, 4.8744e-01, - 5.9718e-01, 9.5159e-01, 5.9867e-01, 6.4359e-01, - 2.5597e-01, 3.0317e-01, 6.6206e-01, 5.1466e-01, - 9.4856e-01, 2.4915e-01, 7.6198e-01, 1.5861e-02, - 2.1708e-01, 1.1645e-02, 7.9476e-01, 9.2575e-01, - 6.7664e-01, 4.2133e-01, 3.3009e-01, 7.6266e-01, - 1.6946e-01, 1.6240e-01, 9.3099e-01, 7.5739e-01, - 4.9141e-01, 6.4062e-02, 3.2170e-02, 3.2530e-01, - 1.2979e-01, 2.9704e-01, 3.2789e-01, 2.5535e-01, - 9.4932e-01, 5.1161e-01, 2.7674e-01, 5.8229e-01, - 6.3609e-01, 9.9252e-01, 9.0421e-02, 2.4892e-01, - 5.9265e-01, 9.3719e-01, 3.9770e-01, 3.8402e-01, - 1.3360e-01, 8.5620e-01, 3.1692e-01, 5.1660e-01, - 9.5131e-01, 3.8815e-01, 7.3377e-01, 2.1604e-01, - 8.0984e-01, 5.4329e-01, 8.9375e-01, 9.7503e-01, - 7.6707e-01, 3.0482e-01, 1.3642e-01, 1.5375e-01, - 1.1664e-01, 6.0092e-01, 3.7722e-01, 6.7558e-01, - 9.1306e-01, 4.7166e-01, 4.9053e-01, 4.7589e-01, - 7.3640e-01, 1.8433e-01, 8.1768e-01, 7.1966e-01, - 4.5083e-01, 3.3424e-02, 1.6627e-01, 2.2704e-02, - 3.6462e-01, 8.1434e-01, 3.7133e-01, 8.8699e-01, - 2.4828e-01, 6.5800e-01, 8.5523e-01, 3.6767e-01, - 1.2518e-01, 5.6608e-03, 9.2298e-01, 5.8367e-01, - 2.1481e-01, 5.0742e-01, 1.9039e-01, 2.3495e-01, - 5.4404e-01, 3.6909e-01, 4.9871e-01, 9.8443e-01, - 8.3432e-01, 3.3951e-01, 9.1294e-01, 6.4420e-01, - 3.3950e-01, 5.7430e-01, 9.6796e-01, 8.2992e-01, - 3.6682e-01, 5.1895e-01, 4.5942e-01, 1.0992e-01, - 1.6148e-01, 7.7366e-01, 1.0734e-01, 1.5092e-01, - 5.9359e-01, 5.9202e-01, 6.6171e-01, 9.7101e-01, - 2.3146e-01, 6.7431e-01, 2.2316e-01, 7.9158e-01, - 6.0714e-01, 8.9981e-01, 5.6367e-01, 5.8087e-02, - 3.3026e-01, 3.8922e-01, 4.8605e-01, 4.9868e-01, - 2.3291e-01, 7.2357e-01, 9.8898e-01, 6.9764e-01, - 7.7170e-01, 3.4277e-02, 2.2283e-01, 9.1702e-01, - 6.5624e-02, 6.0656e-01, 3.2293e-01, 3.1288e-01, - 8.9296e-01, 8.0019e-01, 1.5753e-01, 1.6827e-01, - 9.2928e-04, 7.8682e-01, 7.2607e-01, 1.3459e-01, - 1.7782e-01, 4.9229e-01, 7.3527e-01, 6.0846e-01, - 5.9970e-01, 5.9203e-01, 5.0594e-02, 8.4256e-01, - 2.0525e-01, 2.4114e-01, 6.6307e-01, 5.7161e-01, - 3.0589e-01, 6.9218e-01, 2.2955e-01, 5.3125e-01, - 5.4542e-01, 7.7805e-01, 9.8328e-01, 1.5098e-01, - 8.6051e-01, 3.0543e-01, 8.4604e-01, 2.0176e-01, - 4.4216e-01, 6.7534e-01, 4.6552e-01, 1.4717e-01, - 1.3506e-01, 5.8764e-01, 9.3422e-01, 6.4074e-02, - 3.3773e-01, 5.7824e-01, 7.9107e-01, 1.3758e-01, - 4.4704e-02, 5.2611e-01, 3.7652e-01, 5.3796e-01, - 9.8105e-01, 3.4618e-01, 7.2700e-01, 9.4524e-01, - 7.1978e-02, 1.4573e-01, 5.6411e-01, 7.1441e-01, - 1.9767e-01, 1.6079e-01, 2.3638e-01, 3.6422e-01, - 3.9279e-01, 9.1192e-01, 8.2524e-01, 4.6723e-01, - 9.0043e-01, 1.3223e-03, 4.2797e-01, 1.8204e-01, - 3.7267e-01, 6.6259e-01, 7.9631e-01, 3.0493e-01, - 5.0885e-01, 8.5562e-01, 1.1331e-01, 9.8719e-01, - 1.7280e-01, 8.7981e-01, 5.5505e-01, 7.7485e-01, - 4.0930e-01, 7.3886e-01, 5.6965e-02, 4.9206e-01, - 2.7155e-01, 1.5581e-01, 2.3386e-01, 4.3266e-01, - 7.7529e-01, 7.5067e-02, 2.5397e-01, 9.4814e-01, - 9.6142e-01, 8.3097e-01, 5.0218e-01, 9.1265e-01, - 2.8372e-01, 3.9980e-01, 5.4897e-01, 2.1594e-01, - 9.3294e-01, 5.4688e-01, 8.6351e-01, 5.3989e-01, - 8.0554e-01, 9.0137e-01, 9.3937e-01, 7.9889e-01, - 5.0169e-01, 9.8315e-01, 4.6786e-01, 8.1309e-01, - 5.9596e-01, 4.7187e-01, 6.3017e-01, 9.6252e-01, - 8.2351e-01, 1.7638e-01, 3.9039e-02, 5.8827e-01, - 4.1792e-01, 7.3715e-01, 4.2459e-01, 1.6410e-01, - 7.9534e-01, 8.3015e-01, 4.4691e-01, 8.4333e-02, - 1.7140e-01, 9.2049e-01, 8.4713e-01, 9.8495e-01, - 8.7326e-01, 8.9186e-01, 8.9461e-01, 7.6165e-01, - 5.1863e-01, 9.0112e-01, 6.7544e-01, 2.1724e-01, - 4.6683e-01, 2.3202e-01, 9.8523e-01, 8.8973e-02, - 6.0117e-01, 4.6704e-01, 9.9637e-01, 1.6636e-01, - 3.5168e-01, 7.7266e-01, 9.0598e-01, 9.0368e-01, - 4.4043e-01, 4.7120e-01, 7.6647e-01, 1.1084e-01, - 2.3819e-01, 9.6001e-01, 9.8689e-01, 2.3260e-01, - 5.2772e-01, 2.2360e-01, 9.2191e-01, 8.0228e-01, - 8.4431e-01, 5.3234e-01, 2.9951e-02, 6.1181e-02, - 3.6272e-01, 9.8281e-01, 9.9138e-01, 5.9192e-01, - 9.2206e-01, 3.7819e-01, 3.1175e-01, 8.8393e-01, - 7.9849e-01, 5.1811e-01, 2.9878e-01, 7.0659e-03, - 5.3577e-01, 5.7861e-01, 9.5654e-01, 8.4606e-01, - 7.8324e-01, 6.0075e-01, 8.1177e-01, 8.4371e-01, - 3.4033e-01, 9.2437e-01, 4.5828e-01, 5.0331e-01, - 4.4795e-01, 7.0522e-01, 6.9947e-01, 2.2760e-01, - 6.6935e-01, 1.7435e-01, 4.4039e-01, 1.6653e-01, - 4.7153e-02, 9.0149e-02, 7.3259e-01, 6.9735e-01, - 7.5150e-01, 8.6004e-01, 6.1248e-01, 8.5985e-01, - 8.8200e-01, 4.8501e-01, 7.3720e-01, 8.0633e-01, - 4.8185e-01, 6.4752e-01, 5.6419e-01, 3.9386e-01, - 8.0603e-01, 5.0349e-01, 1.2778e-01, 2.0548e-01, - 1.5821e-01, 1.6556e-01, 7.1631e-01, 4.0624e-01, - 1.3606e-01, 6.1183e-01, 3.9555e-01, 1.0561e-01, - 2.8675e-01, 5.2567e-02, 7.5976e-01, 7.9129e-02, - 7.3730e-01, 8.3241e-01, 7.1875e-01, 1.1856e-01, - 8.6258e-02, 5.6471e-01, 2.5785e-02, 8.1635e-01, - 6.3949e-02, 1.8129e-01, 6.7175e-01, 3.8091e-02, - 2.2187e-01, 7.4485e-01, 7.9801e-01, 5.3716e-01, - 4.1435e-02, 9.8612e-01, 5.7251e-01, 7.8581e-01, - 2.8949e-01, 7.0273e-01, 8.5312e-01, 6.0211e-01, - 4.8109e-01, 7.5446e-01, 1.5074e-01, 8.6119e-01, - 6.0038e-01, 9.8757e-01, 4.5262e-03, 4.2661e-01, - 4.7783e-02, 4.0508e-01, 3.0956e-01, 9.5069e-01, - 5.6196e-01, 7.6732e-01, 9.4765e-01, 2.0531e-01, - 6.1458e-01, 5.0798e-02, 5.9745e-01, 5.1827e-01, - 8.9350e-01, 6.1221e-01, 8.1354e-01, 6.6283e-01, - 2.1505e-01, 8.9438e-01, 9.4276e-01, 7.0349e-01, - 3.8506e-01, 4.0799e-01, 3.0782e-02, 5.2799e-01, - 5.3057e-01, 2.7077e-02, 7.8361e-01, 7.3240e-01, - 3.5031e-01, 6.4726e-01, 2.2415e-01, 6.0831e-01, - 2.4833e-02, 3.5753e-01, 6.0782e-01, 5.4172e-02, - 4.9722e-01, 5.4081e-01, 9.9252e-01, 1.6883e-01, - 7.4341e-01, 5.9899e-01, 5.2520e-01, 6.9027e-01, - 3.9066e-01, 2.0380e-01, 6.7595e-01, 1.5434e-01, - 5.9520e-01, 1.2884e-01, 6.0210e-01, 3.2402e-01, - 8.5293e-01, 7.4237e-01, 1.0054e-02, 2.4596e-01, - 7.4130e-01, 5.2280e-01, 8.0440e-01, 6.1296e-01, - 2.0068e-01, 2.5059e-01, 9.8719e-01, 9.6879e-02, - 7.7269e-01, 3.4500e-01, 6.1866e-02, 4.6024e-01, - 2.0564e-01, 4.1027e-01, 6.0301e-01, 4.6657e-01, - 6.1033e-01, 4.8262e-01, 8.8721e-01, 5.7207e-01, - 3.8954e-01, 9.6602e-01, 9.8296e-01, 3.0700e-01, - 4.7009e-01, 6.1393e-01, 3.6798e-01, 4.6415e-01, - 5.5804e-01, 7.3157e-01, 3.9789e-02, 5.7521e-01, - 2.3897e-01, 2.7612e-01, 3.6370e-01, 2.3572e-01, - 7.5048e-01, 7.4924e-01, 5.2436e-01, 4.1265e-01, - 1.0055e-01, 6.8976e-01, 9.0139e-01, 5.4326e-01, - 5.6119e-01, 8.8100e-01, 4.3479e-01, 1.6981e-01, - 5.4168e-01, 4.0741e-02, 3.7419e-01, 6.0348e-01, - 4.7511e-02, 9.1892e-02, 2.9757e-01, 7.9735e-01, - 4.0023e-02, 8.0662e-01, 3.5945e-01, 4.4613e-01, - 4.0985e-01, 6.4909e-01, 8.2913e-01, 5.6092e-01, - 6.1720e-01, 2.2445e-01, 9.2177e-01, 5.4056e-01, - 9.9170e-02, 4.0349e-01, 1.0757e-01, 4.6385e-01, - 9.1243e-01, 9.6524e-01, 1.5630e-02, 1.2372e-02, - 6.8277e-01, 6.6230e-01, 4.2663e-01, 1.1331e-01, - 1.1843e-01, 8.7492e-01, 3.2309e-01, 1.8141e-01, - 4.6109e-01, 9.2364e-01, 3.4677e-01, 9.5054e-01, - 9.0661e-01, 2.2803e-01, 2.8323e-01, 1.1518e-01, - 3.3279e-01, 7.6139e-01, 3.8271e-01, 2.4706e-02, - 3.9825e-01, 9.4575e-02, 1.2221e-01, 3.4147e-01, - 8.3339e-01, 5.0504e-02, 2.8865e-01, 2.4651e-01, - 3.6862e-01, 2.4803e-01, 2.1618e-01, 6.2499e-01, - 3.4456e-01, 6.0018e-02, 2.1179e-02, 1.1625e-01, - 4.2536e-01, 6.7639e-01, 3.6221e-01, 1.9357e-01, - 6.0271e-01, 7.0261e-01, 1.1726e-01, 6.0181e-01, - 8.9599e-01, 8.1322e-01, 3.7877e-01, 4.3523e-01, - 4.3914e-01, 8.8124e-01, 3.0515e-01, 6.7038e-01, - 6.0127e-01, 7.6653e-01, 2.9249e-02, 2.2096e-01, - 9.4668e-01, 8.5878e-01, 4.5109e-01, 4.7936e-01, - 6.5717e-01, 6.8862e-01, 7.6992e-01, 9.3225e-01, - 8.5394e-03, 4.3708e-01, 7.2233e-01, 1.6987e-01, - 5.4738e-02, 2.4823e-02, 6.7652e-01, 1.7166e-01, - 1.1735e-01, 2.5575e-01, 7.2176e-01, 3.0234e-01, - 7.1567e-01, 4.3413e-01, 8.1045e-01, 4.8844e-01, - 1.4022e-01, 7.3062e-01, 4.2223e-01, 4.7326e-01, - 1.9276e-02, 4.8944e-01, 4.0502e-01, 8.2884e-01, - 9.5804e-01, 1.5109e-01, 6.8776e-01, 3.4887e-02, - 6.7033e-01, 7.8144e-01, 1.8732e-01, 3.9434e-01, - 7.8051e-01, 5.3829e-01, 4.1271e-01, 2.6616e-01, - 1.6403e-01, 5.4211e-01, 4.1375e-01, 3.8938e-01, - 3.9980e-01, 8.9495e-01, 9.2956e-01, 5.7320e-01, - 4.4129e-01, 7.3360e-01, 4.8990e-01, 7.0508e-01, - 9.3550e-01, 2.6797e-01, 1.4940e-01, 2.8127e-01, - 5.0946e-01, 8.7169e-01, 5.9653e-01, 5.9401e-01, - 2.2895e-01, 7.8885e-01, 9.8315e-01, 8.8748e-01, - 7.5977e-01, 6.6367e-01, 2.3954e-01, 3.7040e-01, - 1.1940e-01, 4.6258e-01, 9.2222e-01, 4.4173e-01, - 7.7064e-01, 2.1576e-01, 2.7241e-01, 1.4843e-01, - 6.5290e-01, 3.1076e-01, 4.9139e-01, 2.5075e-02, - 9.6030e-01, 9.1738e-01, 6.8115e-01, 8.9410e-01, - 9.2336e-01, 3.2098e-01, 9.9084e-01, 2.9840e-01, - 9.3566e-01, 8.7476e-01, 7.3520e-01, 7.4897e-01, - 5.0484e-01, 2.8760e-01, 5.5015e-01, 3.5047e-01, - 9.0398e-01, 4.8261e-01, 4.4223e-02, 2.8056e-01, - 2.3169e-01, 3.8516e-01, 5.1483e-01, 3.1543e-01, - 6.3024e-01, 9.4733e-02, 4.7810e-01, 5.5539e-01, - 5.7289e-02, 3.2199e-01, 5.0710e-01, 7.9558e-01, - 1.6270e-01, 7.9383e-01, 1.4537e-02, 8.9211e-01, - 3.7452e-01, 2.0368e-01, 5.9692e-01, 2.5907e-01, - 9.5016e-01, 6.1173e-01, 5.1952e-01, 5.1345e-01, - 8.7632e-01, 1.7394e-01, 2.4675e-01, 7.4916e-01, - 9.8520e-02, 4.5166e-01, 9.0804e-01, 8.2228e-01, - 1.9968e-01, 8.0784e-01, 9.4766e-01, 7.3145e-01, - 4.4231e-01, 5.8073e-01, 9.6792e-01, 1.5536e-01, - 1.6897e-01, 4.6301e-02, 6.9308e-01, 6.5970e-01, - 7.7933e-01, 7.7180e-01, 7.6291e-01, 6.9430e-01, - 2.8253e-01, 9.3597e-01, 5.5534e-01, 7.0535e-01, - 5.7773e-01, 1.7953e-01, 8.2295e-02, 6.2018e-01, - 5.0550e-01, 7.1159e-01, 1.0988e-01, 4.4791e-01, - 9.1384e-01, 9.0103e-01, 3.6527e-01, 6.0135e-02, - 2.6150e-02, 4.6299e-01, 2.4526e-01, 3.1743e-01, - 4.2068e-01, 3.1271e-01, 7.5573e-01, 2.9495e-01, - 8.0565e-01, 2.3590e-01, 9.1131e-01, 2.8576e-01, - 2.0058e-01, 5.4518e-01, 6.9041e-01, 5.0889e-01, - 7.8903e-01, 6.8462e-01, 1.1413e-01, 2.1967e-01, - 1.0436e-01, 5.2052e-03, 5.2858e-01, 7.2315e-01, - 5.8394e-01, 7.4019e-01, 1.9528e-01, 9.0704e-01, - 5.3717e-01, 5.1037e-01, 9.7610e-01, 6.4359e-01, - 6.7128e-01, 4.9653e-01, 8.6929e-01, 7.7971e-01, - 8.0348e-01, 9.0766e-01, 7.7183e-01, 3.4928e-01, - 3.9630e-01, 3.8015e-01, 3.9147e-01, 8.9063e-01, - 7.3734e-01, 6.4280e-01, 2.0891e-01, 7.2230e-01, - 6.9356e-01, 2.6885e-01, 7.5346e-01, 1.8015e-02, - 9.7342e-01, 7.5843e-01, 3.9930e-02, 5.3224e-01, - 2.4238e-01, 7.0185e-01, 4.2115e-01, 7.0334e-01, - 9.0349e-02, 6.6656e-01, 3.1757e-01, 6.7208e-01, - 5.6411e-01, 4.9627e-01, 5.3507e-01, 9.0409e-01, - 6.4014e-01, 8.4199e-01, 5.6660e-01, 1.3654e-01, - 7.8412e-01, 4.1112e-01, 2.8374e-01, 6.5040e-01, - 9.6597e-01, 1.1932e-01, 1.4265e-01, 5.1651e-01, - 6.2252e-01, 5.9412e-01, 5.8400e-01, 4.7015e-01, - 8.7865e-01, 2.1175e-01, 4.5409e-01, 9.5729e-01, - 6.2691e-01, 4.0234e-01, 8.7349e-01, 9.0944e-01]), + col_indices=tensor([9288, 3298, 3634, 5170, 8018, 1715, 1139, 1869, 3521, + 5242, 9127, 5369, 5699, 8246, 5341, 1901, 1883, 821, + 9886, 705, 9233, 9625, 9573, 6688, 8904, 6917, 1052, + 3308, 7039, 5104, 5420, 876, 3195, 9360, 7712, 4061, + 3126, 9220, 1808, 7051, 8252, 6261, 3941, 4593, 6372, + 9140, 8421, 8264, 8830, 7289, 9984, 2637, 7992, 3687, + 5215, 6014, 8773, 9621, 4597, 2388, 2720, 7070, 5858, + 4798, 7405, 9234, 5234, 7822, 5582, 30, 7436, 3652, + 5955, 9434, 7649, 5576, 1383, 5420, 38, 2469, 8763, + 4591, 858, 7212, 7499, 7856, 2162, 3280, 8016, 7303, + 9125, 8426, 4647, 1830, 2678, 8197, 931, 8871, 8762, + 8999, 5402, 3033, 8578, 3957, 1908, 1413, 2061, 6814, + 8457, 7962, 6922, 4279, 9727, 1897, 9495, 108, 755, + 500, 4647, 5448, 802, 1674, 9481, 6388, 6703, 6856, + 5001, 4997, 4471, 8560, 9527, 6884, 8752, 8124, 3382, + 1724, 4784, 8154, 9486, 6490, 6216, 3287, 585, 4062, + 347, 5562, 316, 5740, 408, 8274, 7066, 2896, 8839, + 5852, 1061, 5931, 1975, 7218, 4550, 7176, 7091, 8460, + 1885, 2663, 2798, 8598, 7578, 5680, 5951, 8982, 6439, + 8407, 8191, 2344, 7367, 7864, 8949, 9156, 8178, 9531, + 9081, 4881, 8401, 4127, 5615, 1196, 6268, 9757, 7535, + 3945, 7295, 7151, 6125, 7061, 9246, 6653, 4177, 5436, + 7497, 1693, 7845, 8708, 7897, 5733, 2016, 1386, 3452, + 4262, 6328, 7118, 9073, 8356, 1111, 5943, 3238, 4188, + 5123, 951, 6302, 2013, 82, 9974, 4047, 426, 5119, + 5238, 2828, 7836, 7929, 9948, 2323, 1872, 2821, 3553, + 3601, 8490, 4343, 5335, 4461, 9033, 5904, 5410, 2213, + 2933, 3342, 1371, 1708, 3346, 4206, 3910, 584, 6781, + 9882, 8249, 6423, 3253, 7619, 3312, 5257, 6275, 3658, + 7860, 4054, 7735, 1048, 1180, 9466, 4416, 5746, 9774, + 4549, 226, 3434, 2684, 7029, 2164, 2670, 690, 7000, + 6045, 1768, 8015, 8297, 9656, 7968, 9523, 4588, 6313, + 1671, 5608, 1776, 1220, 5458, 1288, 4889, 1379, 8204, + 9833, 4057, 1052, 1693, 6521, 4039, 7923, 2335, 2234, + 1390, 9914, 4344, 5740, 356, 8692, 375, 7254, 1187, + 7893, 5980, 6786, 4412, 1011, 4573, 6323, 5398, 5738, + 3213, 8035, 5607, 5047, 2163, 6611, 4864, 8525, 5485, + 2103, 2599, 720, 8448, 8308, 7076, 2262, 4200, 5509, + 1049, 5050, 1181, 9073, 5080, 9942, 9637, 9255, 6527, + 3832, 9642, 8779, 7533, 8899, 8457, 4664, 9658, 7407, + 5431, 4653, 306, 7097, 258, 7814, 6879, 8856, 2122, + 8653, 3626, 5933, 6411, 7778, 2120, 9787, 9190, 3340, + 9327, 4587, 9328, 7313, 3004, 7439, 5516, 5294, 8697, + 7102, 1077, 5732, 8211, 5820, 6434, 2574, 4477, 7847, + 9772, 2420, 3751, 4622, 8024, 2667, 8618, 157, 4265, + 7997, 1029, 652, 5826, 3964, 9452, 1324, 3931, 3948, + 1625, 5322, 9277, 6818, 7933, 8939, 9821, 9311, 3659, + 8293, 5307, 5188, 9212, 4916, 4698, 8229, 4245, 1169, + 1624, 1095, 896, 1767, 6870, 143, 1828, 5388, 8526, + 6860, 6740, 103, 9066, 4953, 8336, 1565, 2135, 8792, + 6390, 2212, 2501, 2564, 55, 1858, 1062, 1691, 8036, + 354, 7175, 1657, 9664, 502, 8597, 9700, 1733, 2998, + 978, 6490, 131, 3822, 4826, 6139, 5655, 2769, 251, + 6310, 4866, 9702, 7199, 4698, 1183, 3732, 6933, 3086, + 6669, 2479, 7520, 5156, 6603, 1221, 961, 7374, 1796, + 5417, 5601, 4346, 8249, 1621, 6430, 5483, 2593, 820, + 3647, 686, 3931, 252, 5402, 4550, 2276, 5591, 3933, + 3091, 2374, 5092, 785, 9608, 9065, 1733, 3312, 3675, + 6983, 1318, 2241, 8568, 9681, 240, 1441, 3609, 5513, + 3187, 6930, 5447, 4937, 4209, 4634, 7604, 8346, 9976, + 4525, 6723, 8156, 8607, 4384, 9708, 8417, 9085, 676, + 4751, 1472, 6947, 5944, 8843, 5547, 5060, 3471, 1945, + 6407, 2874, 3782, 1003, 2112, 4447, 6823, 9448, 7485, + 7828, 2296, 9525, 473, 2406, 4997, 4290, 2393, 5684, + 4174, 9270, 7086, 9889, 6266, 3630, 7380, 9378, 4988, + 6164, 8421, 4655, 849, 7005, 4932, 9325, 6786, 4232, + 4018, 8025, 1567, 6212, 1710, 5523, 9639, 829, 8805, + 1785, 6566, 6722, 1041, 5610, 8473, 4711, 32, 7683, + 8780, 4935, 5569, 9187, 2227, 6057, 1724, 8515, 7843, + 4608, 4107, 6327, 7152, 5092, 5127, 5443, 5565, 2776, + 8924, 9006, 7422, 7951, 4301, 6421, 5744, 5394, 2801, + 7895, 5716, 6584, 395, 3584, 7226, 8910, 7360, 1693, + 9386, 8625, 5388, 8694, 9957, 5764, 8964, 2328, 5063, + 4507, 2238, 576, 199, 2265, 5936, 6179, 8913, 2889, + 3260, 8899, 7988, 4299, 4168, 8521, 9707, 2823, 5600, + 21, 3625, 8028, 7140, 9999, 5326, 5209, 1012, 2846, + 9010, 836, 1652, 1507, 5450, 9891, 6942, 4901, 4605, + 5960, 2079, 7919, 5213, 7908, 7455, 8444, 1232, 9052, + 2140, 6410, 5325, 7902, 7223, 7776, 915, 1759, 9183, + 163, 3642, 5089, 8586, 5776, 8110, 2813, 9268, 6110, + 9471, 6244, 6799, 7287, 7540, 9438, 7589, 9172, 3618, + 921, 5301, 3806, 3773, 1848, 8101, 4386, 9818, 4431, + 8316, 4410, 8809, 5437, 2926, 2907, 560, 685, 4364, + 778, 465, 9049, 3235, 3359, 1980, 1955, 3751, 2294, + 3595, 4144, 848, 8235, 7286, 6333, 4032, 907, 7967, + 190, 1228, 1937, 8391, 9125, 8738, 2844, 3507, 6591, + 3327, 9597, 8142, 8352, 3736, 2327, 2605, 5898, 9684, + 6148, 161, 3592, 2676, 597, 3986, 4113, 7315, 6834, + 5232, 2387, 6465, 2121, 9841, 8605, 8394, 1925, 6753, + 898, 9245, 4375, 6173, 9872, 1057, 7933, 669, 3638, + 8799, 3624, 6871, 4510, 4274, 275, 5436, 2985, 1644, + 5855, 7916, 8329, 7492, 5438, 6015, 3734, 3404, 4903, + 4297, 3210, 530, 4902, 6635, 1771, 1174, 3556, 7568, + 250, 933, 3951, 1396, 1112, 6305, 8448, 4943, 6780, + 8204, 8788, 6627, 5271, 2677, 4356, 6602, 6332, 6313, + 5286, 2781, 1639, 5982, 4780, 5978, 6839, 8512, 8164, + 2045, 5219, 7967, 209, 2308, 8366, 631, 3470, 9620, + 5482, 7808, 1383, 8585, 8906, 8683, 5929, 4783, 6389, + 72, 5980, 9849, 4256, 5339, 3304, 2736, 5052, 6256, + 4363, 3229, 7494, 5523, 5828, 2160, 5713, 6141, 333, + 9677, 2396, 4824, 6162, 6965, 7136, 5661, 9006, 2338, + 8213, 7837, 3252, 5738, 7393, 8406, 8353, 1306, 2638, + 4235, 6017, 8087, 752, 5135, 9719, 8281, 5674, 8158, + 7782, 5027, 1145, 4637, 5405, 6566, 5946, 9136, 1948, + 2274, 2084, 2912, 4769, 2187, 9161, 5664, 2429, 4234, + 9621, 7687, 730, 4745, 3582, 2693, 9527, 8348, 5023, + 1387, 6375, 828, 1412, 1641, 3172, 7874, 5006, 3903, + 2050, 9818, 1152, 2702, 9295, 5178, 4774, 7253, 9601, + 1981, 6604, 1236, 3236, 4781, 5742, 984, 9449, 1495, + 8792, 1746, 2704, 3520, 7356, 3831, 3463, 1393, 908, + 8946, 5137, 8036, 9712, 3866, 2182, 9588, 232, 5526, + 3280]), + values=tensor([0.7146, 0.6525, 0.1052, 0.6890, 0.6584, 0.3784, 0.5436, + 0.6410, 0.5107, 0.4144, 0.7905, 0.2279, 0.8736, 0.0557, + 0.5923, 0.3624, 0.9415, 0.4681, 0.5885, 0.5572, 0.0489, + 0.9430, 0.9033, 0.9170, 0.3539, 0.3662, 0.0563, 0.2936, + 0.7396, 0.6167, 0.8384, 0.8020, 0.2347, 0.2472, 0.5466, + 0.7867, 0.2123, 0.5649, 0.3018, 0.8239, 0.8481, 0.3142, + 0.1247, 0.5711, 0.9978, 0.2706, 0.2226, 0.6008, 0.5136, + 0.2829, 0.7810, 0.2385, 0.0310, 0.2838, 0.5470, 0.6038, + 0.7403, 0.1705, 0.3468, 0.8728, 0.7452, 0.6008, 0.6206, + 0.0521, 0.3402, 0.6819, 0.7374, 0.6532, 0.8491, 0.4550, + 0.5985, 0.2882, 0.6254, 0.1795, 0.9407, 0.5925, 0.3933, + 0.6481, 0.8631, 0.6084, 0.3538, 0.3000, 0.5842, 0.0353, + 0.0346, 0.5053, 0.5157, 0.9751, 0.8209, 0.6467, 0.2856, + 0.8295, 0.1369, 0.1135, 0.6719, 0.4554, 0.9952, 0.7801, + 0.4687, 0.2258, 0.1250, 0.2524, 0.1939, 0.5935, 0.1456, + 0.9833, 0.1454, 0.9609, 0.3322, 0.2893, 0.3485, 0.0582, + 0.7510, 0.4586, 0.2627, 0.1614, 0.4177, 0.7801, 0.2520, + 0.1126, 0.1830, 0.7881, 0.4972, 0.6477, 0.8190, 0.3643, + 0.8449, 0.6637, 0.0049, 0.0925, 0.4303, 0.6539, 0.8794, + 0.3103, 0.3954, 0.1691, 0.6100, 0.6288, 0.1161, 0.0561, + 0.3591, 0.1723, 0.0135, 0.5303, 0.0993, 0.8883, 0.6854, + 0.6585, 0.3240, 0.8883, 0.4776, 0.6078, 0.1187, 0.1750, + 0.6445, 0.6126, 0.0177, 0.4247, 0.1673, 0.7494, 0.3137, + 0.7069, 0.9365, 0.3607, 0.2503, 0.3778, 0.4968, 0.4639, + 0.1086, 0.6283, 0.4990, 0.0516, 0.1899, 0.8137, 0.5603, + 0.4083, 0.3862, 0.0851, 0.3729, 0.0551, 0.0253, 0.2326, + 0.4458, 0.1761, 0.2212, 0.2958, 0.6610, 0.9485, 0.6159, + 0.7635, 0.9002, 0.6163, 0.5078, 0.2453, 0.1441, 0.5866, + 0.4669, 0.1860, 0.3379, 0.4058, 0.4224, 0.1777, 0.3366, + 0.1787, 0.8191, 0.6271, 0.3075, 0.9856, 0.2059, 0.0377, + 0.1185, 0.1468, 0.8902, 0.8700, 0.3982, 0.5171, 0.8556, + 0.3922, 0.0856, 0.2570, 0.3428, 0.4248, 0.0400, 0.7098, + 0.4396, 0.9539, 0.7413, 0.5648, 0.1076, 0.6132, 0.4467, + 0.8536, 0.0286, 0.7758, 0.8456, 0.8008, 0.7358, 0.8477, + 0.9337, 0.5047, 0.4916, 0.7509, 0.4453, 0.2325, 0.2627, + 0.5044, 0.5449, 0.6597, 0.3100, 0.6083, 0.0632, 0.8185, + 0.0788, 0.1720, 0.4937, 0.4330, 0.2132, 0.8951, 0.6612, + 0.0864, 0.9756, 0.3150, 0.5508, 0.3777, 0.1498, 0.4459, + 0.9262, 0.9946, 0.3538, 0.1965, 0.5015, 0.5343, 0.7101, + 0.9664, 0.9511, 0.2278, 0.3076, 0.2252, 0.8249, 0.4850, + 0.9216, 0.2261, 0.5741, 0.7502, 0.6319, 0.4515, 0.5183, + 0.4512, 0.2016, 0.8156, 0.4701, 0.0796, 0.4170, 0.6989, + 0.4843, 0.6484, 0.7160, 0.1808, 0.9072, 0.8760, 0.5680, + 0.0161, 0.1363, 0.5167, 0.7823, 0.6798, 0.1868, 0.2205, + 0.4492, 0.4467, 0.3582, 0.3026, 0.1021, 0.8544, 0.5327, + 0.7102, 0.6085, 0.2263, 0.5381, 0.5085, 0.3055, 0.7644, + 0.6274, 0.4394, 0.8918, 0.1718, 0.4861, 0.1741, 0.8791, + 0.0746, 0.4705, 0.6733, 0.9841, 0.9001, 0.4643, 0.7648, + 0.5706, 0.2436, 0.1455, 0.6661, 0.8455, 0.1107, 0.2933, + 0.6226, 0.2243, 0.7520, 0.8050, 0.5806, 0.0593, 0.6110, + 0.9298, 0.6022, 0.6341, 0.4901, 0.1933, 0.9631, 0.6116, + 0.4474, 0.1828, 0.1094, 0.4736, 0.1455, 0.1351, 0.7242, + 0.4874, 0.1099, 0.0258, 0.9850, 0.7339, 0.5101, 0.8635, + 0.9298, 0.2516, 0.4980, 0.3365, 0.4980, 0.4200, 0.8485, + 0.1193, 0.1464, 0.6455, 0.4916, 0.5360, 0.0683, 0.9776, + 0.3418, 0.2872, 0.2214, 0.9623, 0.5194, 0.9299, 0.5108, + 0.1902, 0.2221, 0.8158, 0.0292, 0.7626, 0.9732, 0.4482, + 0.5709, 0.5365, 0.1947, 0.5075, 0.3303, 0.4792, 0.5892, + 0.7740, 0.1180, 0.9221, 0.6886, 0.1619, 0.5363, 0.3208, + 0.5788, 0.3671, 0.1327, 0.0316, 0.6587, 0.1588, 0.8157, + 0.6977, 0.8618, 0.8963, 0.1249, 0.0689, 0.7138, 0.7960, + 0.3825, 0.8229, 0.4965, 0.5367, 0.9079, 0.0058, 0.1546, + 0.8701, 0.0443, 0.5009, 0.8267, 0.4285, 0.2212, 0.1615, + 0.3589, 0.7877, 0.9913, 0.7793, 0.9271, 0.5720, 0.8537, + 0.6803, 0.3593, 0.4933, 0.3597, 0.8578, 0.7052, 0.8304, + 0.0474, 0.6227, 0.6475, 0.7974, 0.1420, 0.8012, 0.1837, + 0.8143, 0.2916, 0.9482, 0.0848, 0.4279, 0.2855, 0.9128, + 0.9175, 0.3423, 0.7702, 0.4857, 0.1045, 0.4602, 0.2257, + 0.7340, 0.8322, 0.8662, 0.5234, 0.8539, 0.6181, 0.7141, + 0.7308, 0.4134, 0.1871, 0.8928, 0.8492, 0.9852, 0.8246, + 0.1075, 0.9758, 0.9964, 0.2371, 0.7682, 0.2998, 0.7042, + 0.4289, 0.8715, 0.6155, 0.0226, 0.5334, 0.3928, 0.6015, + 0.0180, 0.5616, 0.0732, 0.9315, 0.1938, 0.1483, 0.9618, + 0.4847, 0.5867, 0.2351, 0.4266, 0.0611, 0.9694, 0.4602, + 0.3006, 0.9283, 0.7954, 0.5240, 0.3156, 0.1409, 0.6876, + 0.6065, 0.4165, 0.0323, 0.6261, 0.0996, 0.4714, 0.1307, + 0.7728, 0.8576, 0.7022, 0.6499, 0.8820, 0.4651, 0.1632, + 0.6291, 0.7876, 0.6474, 0.8615, 0.9928, 0.9775, 0.0583, + 0.5599, 0.5797, 0.9543, 0.2935, 0.5633, 0.0033, 0.6425, + 0.1241, 0.3323, 0.4266, 0.5237, 0.5571, 0.7860, 0.1726, + 0.7193, 0.2448, 0.4613, 0.3313, 0.7598, 0.7997, 0.1645, + 0.6487, 0.3309, 0.4309, 0.4901, 0.8275, 0.6882, 0.8249, + 0.8910, 0.2303, 0.9941, 0.2029, 0.3770, 0.8274, 0.1803, + 0.0994, 0.1550, 0.4560, 0.9566, 0.8616, 0.5037, 0.5335, + 0.3257, 0.9915, 0.6589, 0.4614, 0.9744, 0.4922, 0.0746, + 0.7241, 0.7449, 0.7854, 0.0863, 0.0545, 0.1671, 0.1981, + 0.1788, 0.1223, 0.0276, 0.0740, 0.6502, 0.3448, 0.7941, + 0.7222, 0.5196, 0.8292, 0.1873, 0.9945, 0.6916, 0.1213, + 0.4181, 0.3205, 0.6818, 0.4960, 0.4579, 0.8514, 0.8807, + 0.6099, 0.2274, 0.7019, 0.4841, 0.8355, 0.9877, 0.6709, + 0.0479, 0.7602, 0.2590, 0.2824, 0.0759, 0.6326, 0.2668, + 0.9746, 0.3199, 0.7417, 0.4741, 0.1568, 0.8634, 0.7549, + 0.9518, 0.9993, 0.3844, 0.4609, 0.3430, 0.1254, 0.8464, + 0.5544, 0.5984, 0.6937, 0.2180, 0.4335, 0.8841, 0.9878, + 0.1584, 0.0111, 0.9883, 0.5348, 0.3152, 0.7253, 0.8282, + 0.8053, 0.3027, 0.8374, 0.5607, 0.5994, 0.1455, 0.9568, + 0.4966, 0.5092, 0.1319, 0.7915, 0.8986, 0.9106, 0.6912, + 0.6812, 0.2598, 0.4143, 0.1099, 0.4834, 0.7282, 0.8346, + 0.0435, 0.7268, 0.0863, 0.6116, 0.8716, 0.6626, 0.5510, + 0.9536, 0.3662, 0.9334, 0.6195, 0.3021, 0.6574, 0.3874, + 0.5916, 0.0290, 0.1369, 0.6431, 0.0704, 0.2622, 0.0515, + 0.2519, 0.6198, 0.7448, 0.0956, 0.7411, 0.8983, 0.1515, + 0.6164, 0.6921, 0.8648, 0.7240, 0.4876, 0.7760, 0.5871, + 0.7411, 0.6958, 0.2717, 0.1148, 0.2652, 0.0798, 0.0356, + 0.0973, 0.3904, 0.6654, 0.1442, 0.7171, 0.9852, 0.8594, + 0.5000, 0.2335, 0.1471, 0.7300, 0.0252, 0.3813, 0.4025, + 0.6179, 0.9533, 0.4752, 0.8527, 0.7388, 0.1743, 0.8642, + 0.8011, 0.6421, 0.8161, 0.6860, 0.1933, 0.4807, 0.8334, + 0.4124, 0.0140, 0.3218, 0.0847, 0.6266, 0.8214, 0.2658, + 0.4654, 0.3402, 0.9601, 0.0156, 0.5859, 0.8363, 0.5144, + 0.0546, 0.9466, 0.3080, 0.6467, 0.4221, 0.1709, 0.6142, + 0.6729, 0.9083, 0.4387, 0.4202, 0.9838, 0.9426, 0.3597, + 0.4630, 0.1608, 0.1156, 0.4372, 0.1354, 0.0886, 0.4914, + 0.5636, 0.6365, 0.6393, 0.5700, 0.0345, 0.4416, 0.0210, + 0.2094, 0.1698, 0.7673, 0.2708, 0.0616, 0.9284, 0.0257, + 0.1633, 0.0720, 0.2980, 0.5191, 0.3886, 0.4427, 0.5312, + 0.8293, 0.8055, 0.5224, 0.6134, 0.4744, 0.2121, 0.1825, + 0.6258, 0.2930, 0.7885, 0.9932, 0.5734, 0.5631, 0.4842, + 0.7207, 0.9370, 0.7399, 0.7297, 0.8183, 0.6456, 0.8591, + 0.5354, 0.4124, 0.8520, 0.6092, 0.2848, 0.4444, 0.3303, + 0.7717, 0.9173, 0.0319, 0.2683, 0.0432, 0.6692, 0.3523, + 0.6996, 0.0537, 0.9570, 0.0509, 0.5143, 0.2669, 0.4119, + 0.5407, 0.9785, 0.4823, 0.9552, 0.7405, 0.7546, 0.2189, + 0.3744, 0.9490, 0.5313, 0.4213, 0.4124, 0.1213, 0.9790, + 0.6300, 0.3742, 0.5771, 0.2657, 0.8726, 0.1365, 0.4854, + 0.5089, 0.0167, 0.3394, 0.0681, 0.9723, 0.1350, 0.2206, + 0.8750, 0.8752, 0.8184, 0.8514, 0.0117, 0.6282, 0.3200, + 0.3112, 0.7198, 0.0167, 0.1883, 0.0110, 0.1089, 0.9323, + 0.6887, 0.1484, 0.2933, 0.5159, 0.3548, 0.4945, 0.5380, + 0.7491, 0.7059, 0.1619, 0.5138, 0.3987, 0.5223, 0.5911, + 0.0361, 0.5428, 0.1706, 0.1836, 0.7978, 0.1776, 0.5103, + 0.4852, 0.1991, 0.1333, 0.7645, 0.8586, 0.5581, 0.3272, + 0.0993, 0.1904, 0.6009, 0.1444, 0.4742, 0.4376, 0.8669, + 0.6029, 0.7076, 0.6444, 0.9461, 0.3736, 0.9837, 0.8338, + 0.4227, 0.4500, 0.2587, 0.6040, 0.8561, 0.2250, 0.2190, + 0.4660, 0.2598, 0.4868, 0.3985, 0.9089, 0.0855, 0.5292, + 0.7004, 0.4736, 0.9876, 0.4093, 0.7376, 0.1682, 0.9065, + 0.3558, 0.2015, 0.0204, 0.0232, 0.3700, 0.4520, 0.0517, + 0.3701, 0.0428, 0.3821, 0.0376, 0.4264, 0.4894, 0.0264, + 0.2934, 0.7715, 0.0475, 0.4972, 0.4763, 0.7381, 0.0581, + 0.4297, 0.2473, 0.9622, 0.8031, 0.4876, 0.3660, 0.3001, + 0.9864, 0.7705, 0.2207, 0.7099, 0.7682, 0.1747, 0.2131, + 0.1442, 0.6566, 0.0198, 0.5217, 0.5235, 0.0168]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.3846, 0.0429, 0.8390, ..., 0.4444, 0.4671, 0.5693]) +tensor([0.4657, 0.6211, 0.7331, ..., 0.5211, 0.3283, 0.1551]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +268,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.02031111717224121 seconds +Time: 0.021244287490844727 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51695', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.430596351623535} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49425', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.291856288909912} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2944, 4631, 7838, 64, 7093, 5206, 1028, 310, 9606, - 1018, 6902, 7057, 1823, 4013, 4610, 9989, 4269, 4529, - 6204, 2190, 9040, 4125, 4186, 8485, 9100, 1208, 6942, - 5613, 5410, 982, 7663, 9703, 3365, 643, 8709, 5149, - 1560, 3811, 7285, 2169, 4837, 435, 8158, 77, 5709, - 4392, 8477, 214, 5447, 1230, 2352, 7618, 2372, 1855, - 9698, 8138, 9945, 2370, 5687, 685, 4433, 106, 4886, - 6383, 6506, 8296, 2570, 6015, 7735, 8565, 6870, 6825, - 6162, 5406, 357, 4114, 3650, 9189, 4601, 615, 2109, - 9546, 583, 5240, 2983, 5584, 3664, 6211, 9643, 427, - 3253, 1697, 3784, 7720, 8331, 144, 5167, 9371, 3050, - 8120, 311, 7247, 526, 2016, 559, 5116, 960, 9939, - 4437, 7192, 3190, 5204, 1362, 3918, 6955, 3497, 7938, - 3584, 768, 9858, 357, 7248, 4890, 3019, 5465, 2250, - 6450, 6315, 1323, 7946, 4804, 9385, 2390, 5192, 6218, - 6486, 9725, 8061, 6921, 2589, 581, 1321, 8748, 5258, - 8932, 3600, 1815, 3765, 4806, 1864, 8606, 4321, 8281, - 1210, 8564, 1395, 944, 9269, 4578, 851, 2516, 3733, - 4122, 5377, 8566, 3856, 7526, 2900, 7004, 3137, 4286, - 5951, 6434, 855, 2440, 3924, 4064, 8268, 6266, 7614, - 8695, 6065, 890, 1123, 3892, 8571, 852, 7199, 2977, - 6191, 6117, 9295, 9139, 1914, 8396, 8082, 1888, 5920, - 3434, 2684, 5852, 8569, 776, 8843, 6510, 1633, 8834, - 8526, 1580, 8173, 282, 6642, 7312, 3586, 1518, 1005, - 4706, 4163, 5932, 3672, 2153, 7059, 4002, 6969, 6411, - 6065, 7536, 1968, 2781, 6687, 7194, 6920, 2851, 1789, - 9756, 3005, 9630, 99, 2131, 1722, 5637, 9061, 5906, - 8732, 6783, 9448, 5100, 3577, 9652, 6175, 6234, 6704, - 7274, 4940, 771, 1465, 3148, 2594, 9088, 4863, 4452, - 4262, 3763, 6315, 2297, 9967, 167, 5404, 7440, 1234, - 4491, 9830, 2721, 8188, 3813, 5248, 4965, 9264, 2428, - 8783, 9884, 7556, 5232, 1866, 4881, 4240, 444, 3067, - 767, 9531, 8720, 9797, 8425, 5431, 8011, 9312, 4880, - 8821, 3698, 2971, 5, 3673, 267, 2436, 5203, 2055, - 6531, 9865, 8191, 8816, 5113, 1324, 2406, 853, 5434, - 2987, 1271, 1838, 833, 6506, 1146, 2917, 8166, 7909, - 3648, 1782, 1802, 2838, 4143, 631, 2015, 75, 9460, - 3849, 2112, 456, 3786, 8968, 9520, 6835, 7144, 7621, - 2315, 8703, 1731, 2031, 7103, 9099, 6237, 2016, 8915, - 5251, 258, 3215, 1913, 1648, 6070, 6349, 537, 3231, - 9264, 7547, 5721, 2677, 3735, 2594, 2432, 4830, 5593, - 5054, 5952, 3001, 8697, 8593, 1261, 9838, 9624, 9968, - 7941, 1741, 9156, 5234, 2249, 5886, 5882, 6287, 151, - 4253, 9286, 5777, 3328, 1300, 7552, 1150, 698, 5598, - 6779, 6880, 7828, 8409, 1847, 1177, 9423, 5339, 8040, - 2933, 226, 8668, 9532, 829, 7683, 1021, 5184, 1989, - 6222, 4187, 646, 8577, 348, 4530, 3057, 139, 8413, - 9470, 3528, 8527, 8781, 9727, 2925, 7182, 2238, 8597, - 1505, 4920, 6681, 6866, 4885, 4952, 1302, 8353, 6525, - 4239, 278, 7723, 9708, 2951, 4329, 9499, 847, 1705, - 7179, 4713, 4933, 6431, 7397, 1171, 1797, 9375, 2124, - 9648, 2158, 151, 8529, 5910, 7872, 8196, 6347, 4852, - 6362, 8322, 3066, 4255, 1267, 8971, 9505, 2218, 8058, - 7670, 2180, 5402, 5997, 6533, 932, 8160, 4883, 4362, - 4475, 5441, 4918, 5223, 9438, 3376, 2793, 964, 8748, - 1984, 1432, 1708, 8439, 2947, 4968, 6368, 6713, 6795, - 3148, 9913, 217, 5345, 6001, 8332, 1552, 1199, 7206, - 1308, 7825, 5277, 8289, 85, 2333, 5765, 226, 8927, - 8002, 7029, 1600, 6557, 5595, 3577, 8420, 953, 281, - 5437, 1674, 7978, 3961, 278, 6180, 7438, 6902, 7475, - 9302, 154, 2101, 2559, 9637, 8412, 7907, 1203, 2401, - 9205, 9268, 671, 2239, 1644, 2845, 8071, 5971, 7992, - 5268, 5658, 3402, 9075, 117, 9708, 7839, 6237, 1366, - 4248, 5405, 3450, 2823, 2678, 8128, 3077, 7890, 5525, - 352, 7953, 8994, 2792, 9843, 6836, 2444, 23, 8606, - 5482, 538, 7248, 2671, 2636, 4875, 6392, 1634, 9039, - 206, 8557, 4262, 6345, 2787, 5186, 9690, 3313, 7196, - 8187, 4688, 3636, 5699, 5851, 2918, 4441, 2561, 4610, - 2129, 7033, 6, 654, 3222, 6073, 7625, 8473, 6191, - 2880, 70, 1031, 6689, 6971, 2004, 7000, 9676, 3644, - 2140, 5806, 8059, 6633, 4320, 1663, 4526, 3203, 4616, - 869, 2766, 1163, 1210, 2331, 4282, 6403, 6555, 6365, - 7000, 9544, 3550, 4307, 1368, 6969, 8833, 3383, 9978, - 1612, 6654, 7073, 7110, 9566, 3445, 7597, 360, 3236, - 7275, 5924, 226, 8020, 320, 4244, 2760, 6608, 2555, - 7919, 984, 1478, 3569, 1644, 9945, 9097, 9180, 2886, - 7713, 3094, 1356, 6989, 8789, 3839, 5615, 1182, 7409, - 5000, 430, 5040, 7926, 3097, 6697, 4245, 2151, 950, - 4330, 6071, 639, 3674, 5561, 6529, 7043, 7697, 1166, - 9926, 5506, 4062, 141, 6776, 1000, 5889, 1811, 3805, - 4499, 742, 9756, 6422, 3115, 1421, 2148, 4577, 849, - 2703, 9046, 5675, 6060, 9740, 1043, 1133, 2568, 6264, - 8365, 2819, 7233, 5911, 5860, 7890, 540, 224, 40, - 3677, 6635, 2186, 2545, 1144, 5999, 7948, 1275, 7035, - 6767, 440, 2108, 7801, 2969, 8350, 7659, 9002, 317, - 3973, 8437, 443, 2678, 8935, 6175, 9836, 8183, 4682, - 9089, 9110, 5267, 8976, 8306, 1349, 7138, 7607, 1143, - 8943, 8080, 3638, 5861, 5984, 1101, 7747, 9364, 6794, - 9386, 9795, 7275, 4821, 451, 5323, 9773, 4378, 5864, - 5634, 5713, 173, 9097, 1287, 4073, 5068, 8982, 1727, - 8774, 1428, 3104, 3715, 575, 7243, 3651, 6707, 5075, - 8914, 3157, 1446, 1023, 3445, 7439, 3411, 6822, 8426, - 4690, 8269, 6866, 4650, 7633, 3301, 1775, 9688, 3595, - 2053, 2622, 3250, 7912, 6773, 6316, 177, 2930, 6156, - 1293, 577, 1247, 6081, 5439, 1662, 4881, 8932, 4199, - 138, 9186, 2308, 2400, 770, 4059, 6081, 3117, 8087, - 5005, 7215, 4647, 6804, 6284, 198, 3580, 7361, 1611, - 7627, 5464, 1293, 4984, 2254, 7286, 5884, 9703, 5740, - 9960, 7501, 6278, 972, 9723, 1341, 1549, 9994, 8760, - 3100, 1070, 3050, 126, 7718, 9489, 9768, 6233, 8322, - 198, 1114, 1745, 6052, 5501, 54, 1988, 844, 5891, - 7468, 5693, 6195, 1125, 8757, 6530, 960, 1088, 5817, - 2609, 2322, 5466, 178, 2328, 4211, 8288, 7077, 2606, - 6127, 1116, 3683, 2034, 9765, 3690, 5390, 5302, 9888, - 373, 5882, 1973, 5371, 3576, 9266, 7825, 1187, 1677, - 5997, 3264, 2770, 7555, 2180, 1025, 5640, 6520, 351, - 7466, 7331, 398, 4140, 6895, 6471, 5153, 2278, 5771, - 9295, 4997, 1365, 9802, 7556, 5796, 7043, 4565, 8844, - 8073, 4187, 3352, 7057, 3992, 5284, 8234, 8721, 6018, - 7438, 1774, 1947, 6669, 1070, 6008, 9524, 9578, 5651, - 9208, 8515, 3069, 4213, 1533, 3772, 203, 9477, 7854, - 4029]), - values=tensor([2.3738e-01, 2.6402e-02, 5.3203e-01, 6.2427e-01, - 6.7786e-01, 6.5310e-01, 3.6557e-01, 9.7123e-01, - 8.9972e-01, 9.5830e-01, 3.4232e-01, 1.0001e-01, - 8.4545e-01, 1.7529e-01, 5.2093e-01, 2.8574e-01, - 7.7667e-01, 4.8853e-01, 8.8174e-02, 3.8671e-01, - 6.4054e-01, 1.4478e-01, 1.4161e-01, 1.3033e-01, - 5.8032e-01, 3.2700e-01, 5.7588e-01, 6.4111e-01, - 9.3574e-01, 9.8711e-01, 2.9326e-01, 5.1547e-01, - 5.5550e-01, 9.9624e-01, 6.9869e-01, 8.7238e-01, - 1.4140e-01, 7.8895e-01, 8.0717e-01, 8.4288e-01, - 7.5389e-01, 6.4603e-01, 4.7889e-01, 2.0452e-01, - 6.3552e-01, 7.5543e-01, 4.2326e-01, 1.2044e-01, - 6.4432e-01, 4.9972e-02, 1.5813e-01, 5.4805e-01, - 7.7746e-01, 9.6780e-01, 8.1895e-01, 7.0802e-01, - 2.0264e-01, 4.3775e-01, 8.5595e-01, 2.7169e-01, - 4.0346e-01, 4.0533e-01, 1.4484e-01, 3.1479e-01, - 2.3174e-01, 2.5230e-01, 5.5513e-01, 3.5842e-01, - 8.2852e-01, 8.2638e-01, 9.5400e-01, 1.2552e-01, - 9.9323e-01, 1.6458e-01, 3.0800e-01, 4.9640e-01, - 8.8731e-01, 9.2485e-02, 3.0189e-02, 7.0178e-01, - 3.9376e-01, 6.4217e-01, 3.1370e-01, 5.0653e-01, - 4.3611e-01, 5.4295e-01, 4.0541e-01, 6.4571e-01, - 2.9565e-01, 2.7763e-01, 8.2797e-01, 7.0112e-01, - 1.8712e-01, 6.7060e-01, 2.6354e-01, 5.6298e-01, - 2.5183e-01, 9.8374e-01, 1.1535e-01, 5.6613e-01, - 6.5427e-01, 5.3770e-01, 7.1824e-01, 2.6174e-01, - 7.6309e-01, 7.0956e-01, 3.4060e-01, 8.8854e-01, - 6.3975e-01, 1.3395e-01, 1.6898e-01, 3.7819e-01, - 8.5999e-01, 8.8412e-01, 7.3320e-01, 7.1138e-01, - 2.4100e-01, 9.2249e-01, 8.4111e-01, 2.0516e-02, - 3.2336e-01, 3.0900e-01, 4.7411e-01, 4.1779e-01, - 2.4793e-01, 4.4443e-01, 7.6111e-01, 2.5346e-01, - 9.3721e-01, 9.5365e-01, 1.6967e-01, 9.2786e-01, - 2.4943e-01, 1.3252e-01, 6.9868e-01, 7.1942e-01, - 6.5647e-01, 4.3431e-01, 9.3856e-01, 5.0608e-01, - 3.8007e-01, 6.6370e-01, 3.5720e-01, 8.2733e-01, - 9.5105e-01, 5.6232e-01, 1.4629e-02, 4.8497e-01, - 1.7643e-01, 6.8013e-01, 8.6450e-01, 6.5699e-01, - 7.1222e-02, 2.5273e-01, 4.9082e-01, 3.7325e-01, - 3.1209e-03, 2.1298e-02, 4.6607e-01, 3.9429e-01, - 3.7773e-02, 4.3848e-02, 4.0839e-01, 3.0784e-01, - 2.5508e-01, 3.4852e-01, 7.1756e-01, 7.2894e-01, - 3.6554e-01, 9.2825e-01, 3.6757e-01, 5.1756e-01, - 2.1632e-01, 3.3479e-01, 9.3681e-01, 4.2513e-01, - 9.5757e-01, 9.7212e-01, 3.7930e-01, 7.9385e-01, - 7.5330e-01, 4.3880e-01, 3.0507e-02, 8.3967e-01, - 2.0001e-01, 2.5357e-01, 2.5983e-01, 4.3218e-01, - 4.0356e-02, 9.8588e-01, 1.8766e-01, 9.5399e-02, - 1.9176e-01, 4.3947e-01, 1.3454e-01, 5.5070e-01, - 2.9283e-01, 7.3724e-01, 7.5999e-01, 6.5391e-01, - 3.3144e-01, 2.0963e-01, 7.8419e-02, 3.2595e-01, - 4.8398e-01, 7.0621e-01, 9.4064e-01, 5.0536e-01, - 8.0281e-01, 2.0884e-01, 1.8452e-01, 4.2045e-01, - 7.5716e-01, 4.8000e-01, 1.3992e-01, 9.2003e-01, - 2.9919e-01, 9.8505e-02, 3.8274e-01, 4.3847e-01, - 6.4077e-01, 8.0841e-01, 3.7080e-01, 1.8314e-01, - 5.0345e-01, 6.5621e-01, 9.9426e-01, 2.6686e-01, - 5.2414e-03, 1.2980e-01, 6.4376e-01, 7.5228e-02, - 8.7038e-01, 6.2767e-01, 8.2595e-02, 7.1176e-01, - 2.3989e-01, 3.3127e-01, 2.3020e-02, 9.7489e-01, - 8.6149e-01, 1.2530e-01, 4.6233e-01, 2.9601e-01, - 5.5479e-01, 8.5166e-01, 4.3046e-01, 4.1313e-01, - 2.6780e-01, 5.4984e-01, 4.1855e-01, 4.9210e-01, - 6.2682e-01, 7.4517e-01, 6.6285e-01, 9.3085e-01, - 5.3350e-01, 2.8974e-01, 4.4394e-02, 2.8930e-01, - 7.8976e-01, 4.3678e-01, 7.6157e-01, 3.7944e-01, - 3.3591e-01, 1.2626e-01, 7.2858e-01, 9.3352e-02, - 6.7802e-01, 8.5292e-01, 7.5938e-01, 1.8850e-01, - 9.9956e-01, 5.6141e-01, 7.0078e-01, 9.1202e-01, - 8.7269e-01, 4.9572e-01, 9.6328e-01, 5.5369e-01, - 4.1234e-01, 1.2004e-01, 2.3986e-01, 9.6465e-01, - 4.2990e-01, 1.6000e-01, 4.0186e-01, 2.1829e-01, - 9.7558e-01, 2.7825e-01, 2.9610e-01, 9.7411e-01, - 8.6444e-01, 5.5775e-01, 7.9402e-02, 5.2234e-01, - 7.9853e-01, 1.9558e-01, 9.4287e-01, 1.0306e-01, - 2.8658e-01, 4.0821e-01, 7.9293e-01, 5.1165e-01, - 8.5048e-01, 3.7074e-01, 8.3318e-01, 5.7708e-01, - 5.9855e-01, 1.9828e-01, 3.4368e-01, 4.7604e-01, - 9.6257e-01, 7.8500e-01, 1.6163e-02, 1.5176e-01, - 3.1112e-01, 3.7056e-02, 7.2264e-01, 3.0160e-02, - 4.1949e-01, 9.7850e-01, 1.0112e-01, 8.4497e-01, - 7.3483e-01, 6.4117e-01, 5.4885e-01, 4.8799e-01, - 2.4303e-01, 3.3166e-02, 8.0967e-02, 5.5332e-01, - 9.0394e-01, 4.8740e-01, 5.7113e-01, 1.8688e-01, - 2.5461e-01, 8.0556e-01, 4.2261e-01, 8.4350e-01, - 8.9809e-01, 6.0672e-01, 5.2776e-01, 2.0777e-02, - 2.9352e-01, 3.8160e-01, 5.0311e-01, 7.4170e-01, - 4.7393e-01, 5.2493e-01, 1.4382e-01, 8.3941e-01, - 1.4398e-01, 7.4231e-01, 6.4705e-01, 3.0748e-01, - 9.8149e-01, 8.3684e-01, 4.1167e-01, 9.1083e-01, - 6.8831e-01, 4.9856e-01, 8.1105e-01, 8.6579e-01, - 6.1778e-01, 3.3383e-01, 5.5148e-01, 6.1391e-01, - 4.8355e-01, 5.2801e-01, 3.1579e-01, 4.0657e-01, - 8.3102e-01, 8.8214e-01, 6.2846e-01, 7.7912e-02, - 7.1251e-01, 1.5134e-01, 3.7764e-01, 9.3226e-01, - 9.0073e-01, 4.0255e-01, 8.3469e-01, 5.5163e-02, - 5.5679e-01, 7.7247e-01, 9.9926e-01, 7.1631e-01, - 1.4688e-01, 8.5901e-01, 9.5820e-01, 8.7402e-01, - 3.3750e-01, 8.3379e-01, 1.7997e-01, 3.4024e-02, - 6.0026e-01, 7.2408e-01, 1.6327e-01, 8.5342e-01, - 6.8883e-01, 7.5934e-01, 4.2426e-01, 4.2108e-01, - 7.9901e-01, 3.7324e-01, 1.4921e-01, 3.4700e-02, - 6.9781e-01, 5.9434e-01, 6.0246e-01, 5.8023e-01, - 2.8967e-01, 1.9480e-01, 6.2027e-01, 8.0618e-01, - 3.4684e-03, 2.8542e-01, 7.7883e-01, 9.8518e-01, - 2.6924e-01, 3.0555e-01, 3.6124e-01, 7.4453e-01, - 1.8155e-01, 6.4206e-01, 2.7114e-01, 4.7901e-01, - 8.7451e-01, 9.2856e-02, 3.4326e-01, 2.1555e-01, - 3.0832e-01, 4.0096e-01, 1.5544e-01, 6.8396e-01, - 6.8827e-01, 2.2804e-01, 6.6620e-01, 8.9734e-01, - 3.3590e-01, 9.3187e-01, 9.3009e-01, 3.5177e-01, - 6.8568e-01, 5.5609e-01, 2.0487e-01, 8.8749e-01, - 6.9496e-01, 4.7259e-01, 9.6056e-01, 2.6776e-01, - 8.7342e-01, 9.2249e-01, 1.3092e-01, 5.5780e-01, - 4.7137e-01, 5.0339e-01, 1.1579e-01, 4.4049e-01, - 6.2910e-01, 4.8928e-01, 6.7993e-01, 2.2566e-01, - 8.9579e-01, 3.8693e-01, 1.3706e-01, 7.8526e-01, - 7.7892e-01, 4.7453e-01, 5.5368e-01, 7.7885e-01, - 5.6439e-01, 5.8212e-01, 1.2176e-01, 5.9040e-01, - 8.3832e-01, 4.8913e-01, 3.9403e-01, 7.9724e-01, - 1.9041e-01, 3.0558e-01, 7.3543e-02, 3.8543e-01, - 3.8432e-01, 1.5301e-01, 1.9464e-01, 7.6242e-01, - 8.5603e-01, 2.4348e-01, 3.3823e-01, 1.3497e-01, - 2.9285e-01, 3.1222e-01, 4.8630e-01, 7.9517e-01, - 2.0854e-01, 5.5934e-01, 3.1609e-01, 9.9551e-01, - 3.6527e-01, 3.4714e-01, 6.7307e-01, 2.8007e-01, - 3.8271e-01, 3.9288e-01, 6.5768e-01, 5.8630e-01, - 5.4392e-01, 5.8597e-02, 6.5879e-01, 7.0779e-01, - 8.2889e-01, 7.8725e-01, 2.3580e-01, 1.2659e-01, - 4.6340e-01, 7.1524e-01, 1.4355e-01, 9.3213e-01, - 6.7358e-01, 9.9362e-01, 3.3661e-01, 8.1383e-01, - 6.3340e-01, 5.5372e-01, 9.3278e-01, 4.5087e-01, - 9.2927e-01, 5.3649e-01, 8.7350e-01, 2.7214e-01, - 9.4851e-01, 5.9748e-01, 3.1403e-02, 6.1772e-01, - 2.6300e-01, 6.6770e-01, 1.9508e-01, 6.6179e-01, - 2.5318e-02, 3.1166e-01, 5.9892e-01, 9.4355e-01, - 2.1253e-01, 2.3988e-01, 8.9296e-01, 7.7257e-01, - 6.2487e-01, 9.1523e-01, 9.7362e-01, 5.4365e-01, - 7.0272e-02, 8.3501e-01, 3.0732e-01, 4.5696e-01, - 6.9966e-01, 5.5054e-01, 8.9687e-01, 4.4966e-01, - 4.7031e-01, 3.6367e-01, 7.4112e-01, 1.0620e-01, - 7.1418e-01, 9.5953e-03, 9.8052e-01, 4.1101e-01, - 1.8228e-01, 8.9274e-01, 7.5847e-01, 7.1856e-01, - 5.6637e-01, 5.7611e-01, 3.8877e-01, 4.2294e-01, - 6.2557e-01, 5.3124e-01, 1.6826e-02, 2.3507e-01, - 8.1644e-01, 1.5435e-01, 3.3081e-01, 5.6503e-01, - 1.9146e-01, 6.8300e-01, 2.5627e-02, 1.7776e-01, - 2.7126e-01, 9.1810e-03, 2.4871e-01, 3.6057e-02, - 5.8490e-01, 3.2513e-01, 6.0523e-01, 3.8111e-01, - 9.1639e-01, 2.9765e-01, 7.2000e-01, 2.5948e-01, - 2.5582e-01, 2.4864e-01, 3.7189e-01, 1.0205e-01, - 3.8670e-01, 4.0786e-02, 8.8019e-01, 6.7285e-01, - 7.3613e-01, 3.4637e-01, 5.4532e-01, 1.9835e-02, - 9.5883e-01, 2.3990e-01, 1.1367e-01, 2.3155e-01, - 9.8695e-02, 9.9085e-01, 8.2365e-01, 1.9166e-02, - 1.0994e-01, 7.0415e-01, 7.0864e-01, 3.8621e-01, - 2.6239e-01, 7.7842e-01, 1.3817e-01, 7.9101e-02, - 2.1852e-01, 6.2569e-01, 2.2697e-01, 6.0997e-01, - 5.8456e-01, 2.7736e-02, 9.9867e-01, 1.3290e-01, - 1.4762e-01, 1.4758e-02, 9.2467e-03, 9.5847e-02, - 5.6896e-01, 7.7772e-01, 4.3486e-01, 9.6895e-01, - 5.7190e-01, 9.8043e-01, 5.9680e-02, 1.3507e-01, - 1.1531e-01, 7.4566e-01, 8.1640e-01, 6.3706e-01, - 1.5904e-01, 6.7155e-01, 4.7947e-01, 6.7198e-01, - 8.0175e-01, 2.8197e-02, 4.2399e-04, 8.8622e-01, - 2.9810e-01, 6.2710e-01, 5.3580e-01, 7.0656e-01, - 1.9115e-01, 2.1864e-01, 8.2521e-01, 9.7825e-01, - 3.2694e-01, 4.5699e-01, 9.2332e-01, 8.1085e-01, - 8.6842e-01, 2.6298e-01, 9.5236e-01, 9.6902e-01, - 3.7078e-02, 3.2802e-01, 6.1465e-01, 1.9061e-01, - 7.4225e-02, 9.0832e-01, 6.7882e-01, 9.1376e-01, - 1.8754e-01, 5.8402e-01, 4.0463e-01, 1.6950e-02, - 6.0488e-01, 7.0416e-01, 8.9438e-01, 6.3537e-01, - 9.3311e-01, 7.1219e-01, 1.8304e-01, 2.0294e-01, - 2.4156e-01, 2.5246e-02, 4.7333e-01, 3.6382e-01, - 7.7323e-02, 6.9307e-01, 6.1588e-02, 8.9470e-01, - 3.1037e-01, 8.2865e-01, 6.5366e-01, 1.6570e-01, - 5.7658e-01, 4.8139e-01, 7.1975e-01, 5.1300e-01, - 8.9715e-01, 4.2038e-01, 5.8014e-01, 6.9148e-01, - 3.0599e-01, 9.3319e-02, 4.7731e-01, 2.1704e-01, - 6.4481e-01, 2.4996e-01, 6.0429e-01, 5.5205e-01, - 2.2135e-01, 8.8346e-01, 7.0709e-01, 8.2152e-01, - 9.6879e-01, 9.9632e-01, 2.4221e-01, 4.5972e-01, - 9.6493e-01, 1.4195e-01, 3.1842e-01, 1.4820e-01, - 1.9437e-01, 9.3361e-01, 7.2369e-01, 8.1980e-02, - 3.0563e-01, 8.3087e-01, 9.6210e-01, 3.8570e-01, - 3.8999e-01, 2.6426e-01, 3.8497e-01, 6.2371e-01, - 3.4436e-02, 5.7200e-01, 9.1103e-03, 3.4663e-01, - 7.5139e-01, 8.0402e-02, 1.4851e-01, 5.6103e-01, - 1.4194e-01, 3.7747e-01, 1.2583e-01, 3.4934e-01, - 7.8245e-01, 4.4202e-02, 2.3814e-02, 5.0043e-01, - 4.0549e-02, 9.2123e-01, 6.4456e-01, 5.5668e-01, - 2.2637e-01, 4.5866e-01, 6.5612e-01, 4.8747e-01, - 3.5998e-01, 9.6531e-01, 2.2855e-01, 8.7165e-01, - 2.1451e-01, 6.0932e-01, 1.3079e-01, 8.7426e-01, - 8.1725e-01, 4.2196e-01, 7.1141e-01, 4.3740e-01, - 5.3629e-01, 9.9116e-01, 2.5936e-01, 6.2397e-02, - 4.8627e-01, 7.9972e-01, 5.9392e-01, 1.2708e-01, - 7.6474e-01, 7.6568e-01, 7.2778e-01, 7.1216e-01, - 7.5908e-01, 6.9230e-01, 8.6691e-01, 1.5589e-01, - 9.6845e-01, 3.9806e-01, 5.2987e-01, 4.8572e-02, - 6.1088e-01, 8.4502e-01, 8.7595e-01, 7.9081e-01, - 2.2328e-01, 7.3453e-02, 2.9361e-01, 7.2018e-01, - 8.6362e-01, 5.8239e-01, 9.8886e-01, 9.8207e-01, - 4.9789e-02, 9.0883e-01, 4.6588e-01, 7.8227e-01, - 5.6496e-01, 4.4322e-01, 6.0243e-01, 7.9633e-01, - 1.5407e-01, 4.7116e-01, 4.6135e-02, 7.9439e-01, - 9.4742e-01, 3.0537e-01, 7.4954e-01, 6.1486e-01, - 4.0816e-01, 5.7772e-02, 4.8874e-02, 6.5603e-01, - 6.5378e-01, 1.4657e-01, 7.5423e-01, 2.1719e-01, - 6.1274e-01, 7.2810e-01, 5.9787e-01, 8.5114e-01, - 8.4574e-01, 4.4836e-01, 1.1812e-01, 1.6302e-02, - 2.8183e-01, 2.8454e-01, 7.8868e-01, 5.6188e-01, - 3.7106e-01, 9.5184e-01, 4.6233e-01, 3.0721e-01, - 4.6652e-01, 4.5518e-01, 3.9625e-01, 3.1047e-01, - 6.1431e-01, 7.7994e-03, 5.0223e-01, 8.8821e-01, - 6.2450e-01, 1.5123e-02, 1.6467e-01, 3.7877e-02, - 3.3093e-01, 4.8842e-01, 1.4406e-01, 3.7447e-01, - 4.1940e-01, 4.0252e-01, 1.5874e-01, 9.2837e-01, - 6.5660e-02, 3.7370e-01, 1.6842e-01, 3.0803e-01, - 6.5644e-01, 7.7941e-01, 7.2378e-01, 5.4227e-01, - 1.4685e-01, 2.4221e-01, 2.9616e-01, 1.1769e-01, - 7.8889e-01, 4.7046e-01, 5.6810e-01, 9.0070e-02, - 9.6025e-01, 8.4650e-01, 5.7994e-01, 9.2239e-01, - 5.9694e-01, 7.0729e-01, 9.6887e-01, 6.6587e-01, - 4.3626e-01, 8.5358e-01, 3.8961e-01, 7.2852e-01, - 4.6255e-01, 8.4622e-01, 7.8245e-01, 6.5110e-01, - 6.2516e-01, 5.3600e-01, 4.1244e-01, 2.4712e-01, - 9.1064e-01, 7.1727e-01, 8.3599e-03, 4.9574e-01, - 6.2521e-02, 6.8479e-01, 5.4408e-02, 7.0926e-01, - 9.2773e-01, 4.3862e-01, 1.9592e-02, 8.3484e-01, - 6.2885e-01, 4.3876e-01, 7.8552e-01, 1.0172e-01, - 6.3309e-01, 3.7608e-01, 2.7818e-01, 4.2463e-01, - 5.4210e-01, 8.8253e-01, 7.3945e-02, 9.1054e-03, - 5.1831e-01, 5.1131e-01, 5.9108e-02, 5.0222e-01, - 4.4171e-02, 7.9161e-01, 4.9498e-01, 8.3120e-01, - 8.8602e-01, 1.6136e-01, 6.8591e-01, 5.5565e-01, - 3.7967e-01, 1.6486e-02, 8.2781e-01, 5.2694e-01, - 8.7141e-01, 9.6245e-01, 4.4308e-01, 4.7436e-01, - 9.5535e-01, 4.0917e-01, 6.3676e-01, 2.5075e-01, - 6.4306e-01, 1.4736e-01, 9.1328e-01, 9.5108e-01, - 5.5093e-01, 1.0660e-01, 1.0531e-01, 8.3265e-01, - 5.7798e-01, 9.3816e-01, 7.4004e-01, 2.6624e-01, - 4.6151e-01, 7.3546e-01, 2.5614e-01, 8.3512e-01, - 8.1670e-01, 5.0614e-01, 9.7279e-01, 3.1225e-01, - 7.8442e-01, 7.3051e-01, 5.8183e-01, 3.3468e-01, - 8.6277e-01, 1.6129e-01, 3.9534e-01, 5.0412e-01, - 9.3144e-02, 6.8435e-01, 1.5667e-01, 4.1079e-01, - 2.4207e-01, 4.8373e-01, 4.2507e-01, 6.1906e-01, - 2.8594e-01, 9.8090e-01, 8.5909e-01, 7.8064e-01]), + col_indices=tensor([2466, 1664, 7481, 210, 6334, 470, 4692, 2693, 4711, + 4342, 3301, 5779, 7067, 6248, 125, 2806, 7661, 8844, + 6269, 6344, 6019, 9203, 9871, 5957, 4909, 6849, 6988, + 9430, 5869, 1780, 9539, 6884, 7103, 8427, 9019, 1245, + 7980, 1046, 4137, 925, 4929, 8140, 1986, 6586, 2266, + 516, 7060, 5234, 7513, 7395, 8304, 4237, 2830, 1379, + 5492, 5672, 5636, 6730, 5843, 979, 697, 8819, 4099, + 6731, 3551, 9719, 7610, 5588, 6321, 4086, 8139, 4851, + 6005, 5948, 4211, 3348, 7300, 822, 9321, 7112, 7369, + 4107, 8243, 9309, 5224, 3347, 603, 4792, 3712, 8066, + 5066, 7745, 5956, 655, 7895, 9019, 1244, 5931, 6951, + 5413, 5766, 4536, 3465, 7845, 292, 4482, 3580, 3364, + 3561, 2261, 7839, 9823, 6190, 4939, 8681, 32, 4697, + 2206, 3378, 6685, 7819, 6377, 1890, 7716, 6198, 2252, + 665, 2928, 4236, 6919, 6064, 4061, 4714, 7037, 1246, + 4486, 9735, 2231, 9201, 9788, 7899, 8241, 3652, 9369, + 4636, 7302, 1100, 6795, 7279, 330, 4414, 9292, 8300, + 1197, 8851, 9118, 9279, 5706, 357, 8305, 6050, 4953, + 9830, 5996, 9829, 1003, 8197, 7863, 4811, 9966, 2627, + 5812, 2267, 5644, 4576, 5108, 2161, 4950, 7957, 1792, + 9085, 1175, 8982, 1919, 4887, 5078, 3383, 7142, 4275, + 7428, 436, 4345, 8443, 8756, 2682, 5176, 1033, 4529, + 2982, 6758, 550, 5692, 9654, 8129, 9192, 3521, 4759, + 7699, 3883, 1036, 399, 7762, 1394, 500, 900, 342, + 7623, 3356, 3692, 9600, 4565, 4311, 3724, 871, 5723, + 8925, 3286, 2855, 3504, 1848, 9587, 8228, 2423, 2723, + 5524, 6337, 2926, 5809, 6085, 4754, 471, 6268, 3941, + 696, 4328, 6670, 3894, 7515, 6707, 4809, 8961, 1334, + 9753, 8075, 3911, 6319, 1834, 4, 6701, 5430, 2763, + 5104, 2745, 3043, 2843, 2861, 8426, 1448, 7805, 8791, + 2099, 8287, 9104, 9338, 5259, 8458, 2724, 4658, 2001, + 7282, 5654, 5185, 3693, 477, 2690, 3867, 7222, 9270, + 3728, 1196, 1027, 8957, 9742, 7112, 7009, 5999, 4026, + 6878, 6738, 8653, 3714, 3969, 2756, 9159, 8755, 2042, + 6621, 6529, 843, 4060, 2410, 9806, 5259, 6128, 4326, + 9733, 2572, 9503, 2403, 1877, 5883, 1598, 5225, 7906, + 8009, 5731, 9765, 490, 5648, 8736, 4926, 1141, 8770, + 2741, 399, 1492, 1210, 1707, 9787, 5313, 5856, 3681, + 2134, 2230, 6446, 119, 2825, 5576, 980, 1773, 2635, + 3819, 9428, 1717, 3861, 7046, 54, 5906, 8081, 4236, + 2606, 2671, 4009, 777, 5576, 7238, 7674, 9818, 2508, + 474, 8579, 6829, 5213, 2860, 5482, 1846, 5592, 932, + 5997, 9833, 7977, 8081, 6895, 5009, 8097, 1316, 2434, + 1872, 1998, 9947, 7861, 5298, 6043, 2901, 9806, 9066, + 8050, 9091, 7024, 7490, 3096, 7102, 4359, 9796, 1977, + 2524, 4518, 3257, 6935, 6802, 2079, 5754, 3332, 8631, + 9172, 3329, 5086, 7560, 896, 3128, 2249, 3062, 7159, + 4046, 6270, 6475, 5063, 7672, 9086, 8869, 25, 128, + 387, 1354, 9326, 8992, 8942, 7518, 8430, 8177, 7362, + 6891, 6031, 300, 1744, 8095, 3535, 7278, 4812, 3958, + 2756, 5061, 1607, 9812, 474, 2912, 6988, 6034, 2377, + 9373, 2353, 1285, 2524, 6819, 4346, 3914, 7117, 175, + 9023, 6401, 5285, 2361, 7560, 2894, 1305, 8291, 5480, + 6253, 9704, 7871, 1902, 4092, 4584, 2758, 3352, 7972, + 3307, 4935, 2265, 5807, 7905, 977, 5184, 4472, 7319, + 4491, 7689, 8992, 201, 6043, 6337, 8006, 66, 8819, + 439, 7338, 3436, 6917, 2526, 3011, 5181, 8820, 113, + 7700, 9065, 984, 9615, 6116, 4384, 2621, 6687, 5636, + 4076, 8556, 7933, 197, 5570, 5605, 9009, 1089, 371, + 8294, 2348, 2292, 9335, 3321, 5235, 455, 981, 558, + 1296, 5381, 3962, 5418, 7710, 1631, 6312, 8747, 4333, + 128, 1379, 4670, 5633, 6246, 9948, 4595, 5285, 3114, + 825, 7955, 3868, 4034, 660, 3954, 5836, 4459, 3366, + 7234, 2957, 4659, 1089, 3425, 1657, 7462, 529, 3232, + 1765, 5828, 2451, 1803, 111, 8092, 7852, 6899, 8271, + 7907, 7511, 9472, 7498, 3546, 7473, 304, 2571, 7564, + 2125, 1907, 6399, 6317, 8029, 3045, 7427, 1924, 9455, + 7409, 8946, 9536, 3745, 3421, 2752, 2352, 9108, 8700, + 3487, 5059, 9478, 9562, 6579, 4684, 5651, 8860, 6786, + 1051, 6579, 5237, 2979, 3159, 141, 1356, 8956, 7015, + 7484, 2861, 6772, 8240, 6836, 1872, 4558, 6158, 7558, + 6050, 1193, 5007, 6211, 8442, 6096, 6767, 531, 1675, + 6650, 2565, 4925, 6087, 2581, 4522, 2322, 2226, 1265, + 1793, 1080, 6885, 9924, 8543, 3401, 1176, 7778, 1117, + 9101, 7268, 7801, 6906, 3951, 871, 9828, 8353, 5407, + 6107, 6130, 9174, 819, 2756, 5901, 4959, 3627, 5783, + 171, 9411, 8428, 8983, 4520, 8409, 1215, 1826, 5912, + 2754, 1404, 8407, 6917, 9924, 4266, 2011, 6319, 5969, + 2568, 861, 2544, 6033, 7928, 4401, 734, 5982, 5462, + 8701, 8148, 1987, 715, 5093, 4393, 5524, 5631, 4256, + 2221, 7436, 8053, 2115, 7961, 2362, 9062, 6505, 1059, + 3977, 735, 5135, 2550, 4498, 114, 9822, 5499, 2536, + 8881, 9878, 9234, 6669, 7535, 3602, 7599, 6448, 1145, + 8807, 7382, 8772, 1211, 6602, 4486, 9820, 5792, 8809, + 4615, 4385, 5623, 9175, 3146, 5029, 3310, 9315, 3756, + 3240, 7147, 9593, 7784, 8214, 740, 7718, 3659, 3588, + 1931, 8016, 2627, 3746, 2169, 9079, 5114, 3506, 953, + 9004, 195, 7282, 6303, 4472, 4641, 3298, 3525, 3508, + 5763, 5817, 3662, 2473, 8142, 5461, 6592, 2603, 5697, + 9960, 7110, 4284, 5611, 8456, 2135, 5077, 8932, 1775, + 7366, 5932, 2411, 5832, 4070, 1622, 9641, 3306, 9347, + 1882, 7506, 7399, 570, 2722, 5450, 26, 3892, 4065, + 8107, 6360, 2015, 8537, 1979, 5211, 8199, 9178, 8828, + 8211, 4968, 6954, 6063, 387, 2063, 5921, 9894, 2998, + 4525, 9739, 5492, 3340, 6978, 6846, 3293, 7079, 1467, + 5362, 7188, 1693, 321, 2012, 8634, 6145, 107, 3099, + 8814, 8483, 5247, 9503, 6904, 8661, 6653, 6410, 6174, + 2233, 5720, 5227, 3628, 6596, 6351, 8795, 4564, 3022, + 6798, 5366, 3749, 6430, 267, 1452, 8480, 13, 7548, + 2891, 3542, 9592, 7527, 9439, 9055, 2379, 3069, 2426, + 7792, 8709, 7210, 9086, 7771, 2207, 86, 7304, 1565, + 474, 1647, 3339, 8111, 360, 7565, 8929, 5529, 6229, + 6302, 2893, 6578, 384, 4992, 367, 7505, 6664, 3466, + 8907, 6029, 4936, 6429, 7341, 9658, 3196, 8351, 9855, + 3458, 2025, 6627, 34, 4251, 5762, 8831, 9357, 4273, + 6989, 6210, 38, 2544, 8173, 3731, 2681, 9891, 1995, + 7272, 3951, 9183, 8841, 6075, 4350, 8630, 4882, 6047, + 2216, 1305, 1783, 8753, 9777, 1746, 2275, 4574, 3685, + 939, 1344, 1100, 3276, 6678, 2199, 4317, 8384, 597, + 2080, 2466, 3071, 5454, 9610, 7809, 3658, 6652, 2578, + 3305, 841, 206, 6312, 4961, 6612, 2998, 2946, 7333, + 9031]), + values=tensor([4.5124e-01, 3.7270e-01, 9.0666e-01, 3.9041e-01, + 8.6546e-02, 1.7553e-01, 7.0978e-01, 2.1385e-01, + 7.1677e-01, 9.6209e-01, 8.3819e-01, 3.6879e-01, + 3.5680e-01, 7.4745e-01, 2.8105e-01, 6.5779e-02, + 5.8972e-01, 7.9189e-01, 5.7977e-01, 2.0965e-01, + 9.5019e-01, 9.8956e-01, 2.0994e-01, 1.3201e-01, + 4.3087e-01, 4.2821e-01, 8.8378e-01, 4.6523e-01, + 5.3725e-01, 3.3218e-01, 2.2638e-01, 2.9838e-02, + 7.0884e-01, 7.2423e-01, 2.1189e-01, 9.7455e-01, + 2.5735e-01, 3.3086e-01, 9.7983e-01, 8.5652e-01, + 1.3418e-01, 6.7647e-01, 1.6396e-01, 6.9175e-01, + 1.8028e-01, 7.8387e-01, 8.7422e-01, 8.0332e-01, + 4.2629e-01, 3.2136e-01, 2.0289e-01, 5.0568e-01, + 1.7254e-01, 4.3434e-01, 6.9617e-01, 1.2974e-01, + 3.0740e-01, 7.3065e-01, 6.2228e-01, 1.4376e-01, + 3.3250e-01, 7.7791e-01, 9.6238e-01, 1.2364e-01, + 4.7576e-01, 7.5809e-01, 9.2084e-01, 5.4415e-01, + 9.0101e-02, 2.1611e-01, 3.4588e-01, 3.3039e-01, + 6.8018e-01, 8.7762e-02, 5.5706e-01, 3.7312e-02, + 7.6981e-01, 8.5276e-01, 7.4317e-01, 9.7805e-01, + 4.1412e-01, 4.4320e-01, 2.2156e-01, 9.1442e-01, + 4.9620e-01, 8.2494e-01, 6.2997e-01, 6.0864e-01, + 8.0176e-01, 9.0423e-01, 4.0914e-01, 1.5954e-01, + 5.8120e-01, 8.0475e-01, 8.6415e-02, 6.9729e-01, + 8.2499e-01, 7.3144e-01, 1.7447e-01, 9.9133e-01, + 1.1777e-02, 8.9840e-01, 1.5542e-01, 9.4149e-01, + 8.4609e-01, 2.0308e-01, 2.4320e-01, 2.2823e-01, + 9.8139e-01, 4.2804e-01, 1.1814e-02, 4.0316e-01, + 3.0833e-02, 2.2453e-02, 1.7883e-01, 6.8511e-01, + 9.9891e-01, 3.4515e-01, 5.6629e-01, 4.5682e-01, + 6.6730e-01, 3.2825e-01, 8.5113e-01, 6.0365e-01, + 5.6541e-01, 7.1721e-01, 9.6999e-01, 1.9379e-01, + 4.5518e-01, 5.6391e-01, 8.9960e-01, 2.1312e-02, + 7.8753e-01, 9.8162e-01, 4.9069e-01, 7.0240e-01, + 5.4331e-01, 6.7074e-02, 2.8917e-01, 8.5546e-01, + 1.1067e-01, 9.2154e-01, 8.0886e-01, 5.9693e-01, + 4.0761e-02, 8.4844e-01, 1.2539e-01, 6.8154e-01, + 5.4305e-01, 2.3663e-01, 4.2721e-01, 3.7817e-01, + 3.1358e-02, 8.7505e-02, 9.5243e-01, 8.2708e-01, + 5.6722e-01, 4.8284e-02, 6.9737e-01, 7.5669e-01, + 3.5621e-01, 5.1196e-01, 6.2065e-01, 8.4417e-04, + 2.8759e-01, 7.7985e-01, 3.8792e-01, 2.1430e-01, + 4.2102e-01, 2.7368e-01, 2.8127e-01, 6.7844e-02, + 2.4358e-01, 5.1833e-01, 3.1775e-01, 3.3769e-01, + 5.3372e-01, 5.5695e-01, 3.3825e-01, 3.8919e-02, + 1.2983e-01, 5.0689e-01, 5.6060e-01, 8.4193e-01, + 4.8429e-01, 5.7936e-01, 5.5692e-01, 1.7767e-02, + 7.8962e-01, 7.5567e-01, 7.9579e-02, 1.3934e-01, + 3.5269e-01, 4.3675e-01, 3.6224e-01, 3.6235e-02, + 4.6250e-01, 2.4264e-01, 6.0371e-01, 3.7374e-02, + 7.0528e-01, 2.5657e-01, 2.8375e-01, 4.6469e-02, + 1.4718e-02, 5.6129e-01, 4.8654e-01, 1.1721e-01, + 9.7111e-01, 9.1142e-01, 9.4937e-01, 3.3982e-01, + 3.6921e-01, 9.0859e-01, 9.8274e-01, 7.5353e-02, + 2.5324e-01, 3.9205e-02, 8.9935e-01, 6.7994e-01, + 9.4443e-01, 4.6354e-01, 6.9606e-01, 1.9291e-01, + 4.0225e-01, 3.9462e-01, 9.8207e-01, 1.9306e-01, + 1.4081e-01, 1.1165e-01, 9.1441e-01, 9.3742e-01, + 2.3110e-01, 7.8999e-01, 7.9705e-01, 3.6601e-01, + 1.1825e-01, 8.2487e-01, 5.0946e-01, 1.8787e-01, + 5.0000e-01, 9.1119e-01, 5.7123e-02, 4.8002e-01, + 9.7532e-01, 1.9618e-01, 9.9588e-01, 6.7557e-01, + 9.5040e-01, 7.6860e-01, 7.0444e-01, 6.6683e-01, + 1.3270e-01, 9.8168e-01, 3.5768e-01, 4.9716e-01, + 4.7304e-01, 1.1150e-01, 8.3897e-01, 1.3381e-01, + 9.5309e-03, 3.8265e-01, 2.8742e-01, 8.5723e-01, + 6.6063e-01, 8.1280e-01, 9.4942e-01, 1.9032e-02, + 2.1574e-01, 3.5039e-01, 9.6501e-01, 1.1046e-01, + 4.5988e-01, 5.8821e-01, 9.6673e-01, 1.3262e-01, + 5.0418e-01, 8.3823e-01, 9.7649e-01, 2.5979e-03, + 1.7136e-01, 8.0508e-01, 3.7054e-01, 3.3684e-01, + 6.0146e-01, 7.9024e-01, 5.0221e-01, 4.6160e-01, + 3.3912e-01, 9.4431e-01, 4.3792e-01, 8.1381e-01, + 1.8152e-01, 8.1170e-01, 8.1138e-01, 3.3319e-01, + 4.4465e-01, 8.5400e-01, 1.5920e-01, 7.7993e-01, + 2.2593e-02, 7.3240e-01, 6.8517e-01, 8.5157e-01, + 7.6014e-01, 5.9264e-02, 4.2048e-01, 7.5518e-01, + 4.1505e-02, 6.0424e-01, 6.1915e-01, 4.1399e-01, + 5.3252e-01, 1.2044e-01, 3.2642e-01, 9.5811e-01, + 7.5341e-01, 8.3845e-01, 9.1528e-01, 4.7477e-01, + 5.2762e-01, 3.4909e-01, 4.5360e-01, 8.8820e-01, + 6.0180e-01, 6.2855e-01, 7.0870e-01, 3.0522e-01, + 2.7613e-01, 7.9289e-01, 1.6982e-01, 8.0981e-01, + 8.2618e-01, 7.1507e-01, 6.5308e-01, 4.3440e-01, + 5.9868e-01, 5.3273e-01, 7.3265e-01, 4.6718e-01, + 7.5122e-01, 8.1727e-01, 5.4429e-01, 2.8462e-01, + 5.7949e-01, 8.6908e-01, 4.0240e-01, 1.7709e-01, + 5.5774e-01, 9.5593e-02, 4.7699e-01, 9.7739e-01, + 3.5793e-01, 7.2048e-01, 8.2681e-01, 4.4063e-01, + 9.2540e-01, 2.0677e-01, 9.0295e-01, 9.4334e-01, + 2.7048e-01, 7.6048e-01, 4.8719e-01, 8.9535e-01, + 5.3139e-01, 9.7025e-01, 8.8724e-01, 1.1921e-01, + 5.1989e-01, 8.2672e-02, 3.5864e-01, 4.3866e-01, + 7.3788e-01, 5.6306e-01, 5.3660e-01, 8.6445e-01, + 7.0269e-01, 2.6027e-01, 6.5891e-01, 8.4661e-01, + 8.1119e-01, 8.3235e-01, 4.6169e-02, 9.2446e-01, + 6.9860e-01, 2.2959e-01, 6.4070e-01, 8.2750e-01, + 2.9132e-01, 1.3508e-01, 6.6097e-01, 3.6315e-01, + 9.6929e-01, 6.4361e-01, 4.5008e-01, 5.5293e-01, + 4.0535e-01, 6.1112e-01, 7.6972e-02, 1.9908e-01, + 3.3567e-01, 8.3326e-01, 5.7133e-01, 7.7374e-01, + 9.7250e-01, 6.4317e-01, 9.9608e-03, 6.8481e-01, + 5.2490e-03, 6.2328e-01, 5.9186e-01, 8.9594e-01, + 4.7292e-01, 5.6331e-01, 9.1823e-01, 2.3001e-01, + 9.4121e-01, 9.1357e-01, 5.8743e-01, 9.0431e-01, + 8.0247e-02, 7.1566e-01, 8.2408e-01, 3.2385e-01, + 6.8277e-01, 7.2420e-01, 5.4441e-01, 9.7916e-01, + 4.3397e-01, 2.6325e-01, 9.9641e-01, 1.1171e-01, + 3.5313e-01, 3.9168e-02, 7.9285e-01, 3.0783e-01, + 7.8822e-01, 4.4528e-01, 5.7471e-01, 2.0863e-01, + 1.2314e-01, 7.4280e-01, 3.9871e-01, 5.1455e-01, + 7.5317e-01, 1.1001e-01, 5.2375e-01, 7.2175e-01, + 6.5690e-01, 4.9320e-01, 2.4753e-01, 6.6797e-01, + 7.9576e-01, 8.7515e-02, 1.8857e-01, 4.6155e-01, + 7.0005e-01, 4.2794e-01, 2.0828e-01, 6.3095e-01, + 1.2508e-01, 1.7678e-01, 4.1212e-01, 7.9363e-01, + 7.5646e-01, 2.3935e-01, 6.3493e-01, 5.9037e-01, + 7.9361e-01, 2.5308e-01, 4.1115e-01, 3.6386e-01, + 9.7627e-01, 6.3255e-01, 4.2508e-01, 8.9638e-01, + 2.1737e-01, 1.8711e-01, 5.7424e-01, 3.5150e-02, + 5.3614e-01, 6.2985e-01, 2.6806e-01, 8.2368e-01, + 8.3065e-01, 2.2803e-01, 3.8490e-01, 9.2464e-01, + 3.8623e-01, 2.7250e-01, 4.6082e-01, 6.7170e-01, + 5.0558e-01, 3.2339e-01, 8.0310e-01, 6.1806e-02, + 5.1736e-01, 6.7684e-01, 9.1977e-01, 9.1269e-01, + 4.0075e-01, 5.8524e-01, 1.9980e-01, 7.8282e-02, + 2.3128e-01, 6.0254e-01, 6.1770e-01, 6.8555e-01, + 3.0954e-01, 8.7953e-01, 9.0319e-01, 1.6881e-01, + 8.4697e-01, 1.2115e-01, 4.6155e-01, 4.3480e-01, + 7.2407e-01, 8.1640e-01, 1.5401e-01, 2.3233e-01, + 1.4251e-01, 1.8176e-01, 6.0286e-01, 4.3808e-01, + 4.0826e-01, 4.9659e-01, 4.6570e-02, 4.0401e-01, + 2.2925e-01, 4.6592e-01, 8.0015e-01, 9.8342e-01, + 7.0933e-01, 9.7432e-01, 7.4397e-01, 8.7394e-01, + 6.9926e-01, 9.5276e-01, 3.5505e-02, 8.6020e-01, + 5.1909e-01, 3.5518e-01, 9.3592e-01, 2.6255e-01, + 1.4996e-01, 3.5145e-02, 3.3789e-01, 4.3219e-01, + 2.3878e-01, 1.6027e-01, 4.7158e-01, 4.8415e-02, + 6.8371e-01, 8.2805e-01, 6.1895e-01, 8.8397e-01, + 2.8915e-01, 4.6915e-01, 2.3579e-01, 6.7204e-01, + 4.4894e-01, 2.8521e-01, 8.5738e-02, 3.8108e-01, + 6.1400e-01, 9.3480e-01, 3.4752e-01, 1.2632e-02, + 7.1407e-01, 4.1156e-01, 7.2856e-01, 4.6212e-01, + 2.5240e-01, 7.9263e-01, 1.2015e-01, 9.1667e-01, + 1.2198e-01, 7.4882e-01, 2.0876e-01, 7.1882e-01, + 1.2617e-01, 1.3242e-01, 4.8651e-01, 9.9545e-01, + 5.0042e-02, 6.4545e-01, 7.9176e-01, 6.1292e-01, + 4.9193e-01, 3.7118e-01, 8.5274e-01, 5.7393e-01, + 2.9566e-01, 7.9897e-01, 5.6642e-01, 8.1769e-02, + 5.3718e-01, 9.4140e-01, 6.2021e-01, 6.5036e-01, + 5.8202e-01, 2.9498e-01, 5.9824e-01, 2.5583e-02, + 3.9188e-02, 1.8642e-01, 9.6334e-01, 8.5075e-01, + 3.6870e-01, 6.8613e-01, 5.1750e-01, 3.1633e-01, + 5.7964e-02, 8.6435e-01, 6.4982e-01, 1.7298e-01, + 8.6470e-01, 6.0897e-01, 4.7351e-01, 5.7447e-01, + 7.5549e-01, 3.0524e-01, 5.8832e-01, 5.0614e-01, + 8.0928e-01, 4.8379e-01, 3.9590e-02, 1.1444e-01, + 5.6379e-01, 8.6007e-01, 3.6510e-01, 2.5526e-01, + 8.1983e-01, 8.6730e-01, 7.0682e-01, 6.7074e-02, + 3.6645e-01, 4.4831e-01, 3.7342e-01, 1.2949e-01, + 3.4296e-01, 7.2779e-01, 2.9237e-01, 6.3987e-01, + 5.5991e-01, 8.1918e-01, 1.0124e-01, 6.3395e-01, + 2.5891e-01, 9.5711e-01, 3.0723e-01, 5.4638e-01, + 6.0488e-01, 3.9950e-01, 5.7505e-02, 8.5127e-01, + 9.7511e-01, 2.4688e-01, 5.9618e-01, 5.2723e-01, + 1.1869e-01, 1.9273e-02, 7.1298e-01, 5.0193e-01, + 2.8246e-01, 3.3310e-02, 6.1518e-01, 9.3515e-01, + 6.9422e-01, 9.9243e-01, 4.9601e-01, 8.9142e-01, + 2.8304e-01, 6.5308e-01, 5.0714e-01, 7.9181e-01, + 6.4777e-01, 7.3135e-01, 5.4720e-01, 6.9713e-01, + 1.8341e-01, 5.7872e-02, 9.9925e-01, 1.9791e-01, + 9.2529e-01, 6.4832e-01, 9.6342e-01, 3.2137e-01, + 8.4381e-01, 8.0031e-01, 3.1755e-01, 6.3758e-01, + 5.0450e-01, 5.0456e-01, 9.4519e-01, 8.0440e-01, + 3.4165e-01, 2.8759e-01, 8.3687e-01, 4.7547e-01, + 9.3946e-01, 1.2061e-01, 8.2480e-01, 3.8819e-01, + 3.7722e-01, 4.1495e-01, 8.3633e-01, 1.8071e-01, + 3.4592e-01, 5.4464e-01, 7.4900e-01, 2.8746e-02, + 3.6952e-01, 3.0041e-01, 1.5772e-01, 3.9736e-01, + 6.3412e-01, 4.7343e-01, 2.7451e-03, 7.0379e-01, + 9.6600e-01, 5.5501e-01, 6.6551e-01, 9.2460e-01, + 7.5608e-01, 1.4714e-01, 9.7939e-02, 1.8870e-01, + 1.0242e-01, 1.0276e-01, 9.3063e-01, 7.7604e-01, + 2.0767e-01, 8.7824e-01, 9.5919e-01, 1.3066e-01, + 7.5875e-01, 5.0489e-02, 1.8985e-01, 3.4845e-01, + 5.9511e-01, 2.5431e-01, 7.2500e-01, 4.1691e-01, + 7.3076e-01, 6.2294e-02, 5.3099e-01, 4.1952e-01, + 1.3718e-02, 5.9663e-01, 8.4180e-01, 4.7331e-02, + 4.8208e-01, 9.4686e-01, 4.1967e-01, 5.2315e-01, + 7.7162e-01, 9.3777e-01, 9.4342e-01, 3.0143e-01, + 3.5043e-01, 4.3900e-01, 9.2576e-01, 1.8325e-01, + 3.0903e-01, 8.2791e-01, 4.1519e-01, 8.9530e-01, + 8.3558e-01, 3.9552e-01, 4.1592e-01, 2.9482e-01, + 9.2981e-01, 6.9661e-02, 5.0570e-01, 2.9387e-01, + 1.9979e-01, 4.5956e-01, 3.3855e-01, 8.8126e-01, + 8.1605e-01, 6.1549e-01, 5.2115e-01, 7.8572e-01, + 7.2935e-02, 4.6384e-01, 1.3884e-01, 5.4624e-01, + 4.2663e-01, 7.3772e-01, 7.7171e-01, 1.5222e-01, + 2.1228e-01, 4.0843e-01, 8.6019e-01, 4.6209e-01, + 4.9486e-01, 4.8213e-01, 6.1007e-01, 3.3649e-01, + 6.0019e-01, 8.1909e-01, 2.3865e-01, 5.9427e-01, + 7.2830e-01, 9.4168e-01, 3.4005e-01, 3.7275e-01, + 6.3904e-02, 4.5032e-01, 8.7446e-01, 8.9067e-01, + 9.6544e-01, 2.6422e-01, 8.1113e-01, 2.5143e-01, + 5.1239e-01, 6.8159e-01, 5.6707e-02, 7.9447e-01, + 4.1772e-01, 8.7228e-01, 5.3410e-01, 7.3334e-01, + 6.6377e-01, 5.5713e-01, 5.9279e-01, 6.2155e-01, + 5.5474e-01, 9.4641e-02, 2.6776e-01, 8.5304e-01, + 5.2074e-01, 1.1888e-01, 9.0224e-01, 5.5388e-01, + 3.2249e-02, 6.6348e-01, 8.5896e-01, 3.8235e-01, + 8.1061e-01, 5.5059e-01, 5.9915e-01, 9.3634e-01, + 6.8166e-01, 9.6680e-01, 4.3548e-02, 1.0144e-01, + 6.9069e-01, 1.6355e-01, 4.4217e-01, 1.4285e-01, + 3.4215e-02, 3.2026e-01, 6.7746e-01, 4.8682e-01, + 7.7657e-02, 3.4148e-01, 5.5291e-01, 6.3695e-01, + 9.5522e-01, 4.9084e-02, 9.1925e-01, 6.3163e-01, + 7.7440e-01, 6.4814e-01, 5.7972e-01, 3.6947e-01, + 2.3264e-01, 5.1413e-01, 9.0690e-01, 7.1480e-01, + 1.2216e-01, 1.6994e-02, 7.1926e-01, 4.3942e-01, + 8.0096e-01, 2.8297e-01, 6.4908e-01, 3.0679e-01, + 8.8796e-02, 6.1238e-01, 4.1263e-01, 7.7035e-01, + 6.7555e-01, 6.3600e-01, 8.3776e-01, 6.8202e-01, + 7.8363e-01, 3.5779e-01, 1.6458e-02, 2.6831e-01, + 3.4456e-01, 5.3618e-02, 9.8811e-01, 2.7427e-01, + 9.2105e-01, 9.3749e-01, 1.7191e-01, 1.2581e-02, + 9.9916e-01, 2.2768e-01, 7.8364e-02, 3.1750e-01, + 9.6948e-01, 4.7396e-01, 7.4306e-01, 6.5449e-01, + 1.1668e-01, 4.1905e-01, 8.3475e-01, 5.2043e-01, + 1.1460e-01, 1.6989e-01, 2.4530e-02, 9.6940e-01, + 9.5385e-02, 6.5576e-01, 3.4501e-01, 8.2784e-01, + 3.7405e-01, 8.1038e-01, 4.0522e-01, 6.1686e-01, + 8.7730e-01, 3.7059e-01, 8.1609e-01, 6.3547e-01, + 8.0442e-01, 5.2620e-01, 4.7068e-01, 8.4719e-01, + 6.7244e-01, 9.9111e-01, 4.8585e-01, 7.2098e-01, + 5.3425e-01, 5.4973e-01, 8.5724e-01, 1.7037e-01, + 1.8240e-01, 2.7130e-01, 8.4948e-01, 2.3994e-01, + 8.0979e-01, 2.9375e-01, 5.6822e-01, 6.1765e-01, + 6.3448e-01, 5.8919e-02, 3.3906e-01, 6.0162e-01, + 9.0471e-02, 2.7374e-01, 7.5028e-01, 1.5957e-01, + 4.5270e-01, 1.5058e-02, 3.4894e-01, 1.5488e-01, + 4.9308e-01, 3.8483e-01, 3.7807e-01, 8.5288e-01, + 5.3371e-01, 1.9063e-01, 4.9374e-01, 2.2530e-01, + 6.9940e-01, 6.2419e-01, 3.7397e-01, 1.5121e-01, + 7.8147e-01, 9.0666e-03, 4.3562e-01, 5.0901e-01, + 7.5738e-02, 7.5757e-01, 5.7814e-01, 5.7432e-01, + 7.9725e-01, 4.6128e-02, 9.2067e-01, 4.6649e-01, + 7.3434e-01, 3.2590e-01, 4.8045e-02, 8.9064e-01, + 7.3191e-01, 9.9391e-01, 2.8362e-01, 3.8588e-01, + 4.4711e-01, 3.5410e-02, 6.0491e-01, 6.2397e-02, + 8.4844e-01, 6.4685e-01, 8.1450e-03, 1.7539e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8755, 0.4171, 0.4007, ..., 0.3638, 0.0663, 0.0983]) +tensor([0.6861, 0.5695, 0.5465, ..., 0.1082, 0.7995, 0.1510]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,271 +647,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 2.430596351623535 seconds +Time: 2.291856288909912 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '223318', '-ss', '10000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.272708654403687} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '226437', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.510953187942505} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1701, 5200, 151, 7106, 6780, 2055, 1375, 8242, 376, - 4556, 1864, 1124, 4828, 55, 5866, 9752, 5516, 7381, - 1233, 540, 636, 7075, 5707, 6021, 4061, 4372, 9110, - 8043, 6636, 2721, 2135, 174, 4881, 658, 3469, 6307, - 6633, 7241, 1540, 9161, 3585, 3028, 1940, 352, 9272, - 3623, 3390, 9183, 6840, 1293, 3398, 5823, 704, 5011, - 1738, 9159, 5732, 4747, 2802, 1249, 5171, 5364, 7066, - 4818, 4723, 9883, 1229, 9311, 6671, 7348, 8536, 5413, - 6, 4030, 8060, 4147, 5081, 6166, 1683, 8447, 8806, - 8357, 9243, 6546, 2055, 6450, 9246, 3143, 856, 7551, - 1646, 9323, 5361, 3834, 1319, 9746, 7840, 8141, 2671, - 5949, 5626, 8150, 3730, 796, 1535, 4730, 6915, 6972, - 2766, 9860, 7270, 8106, 2537, 797, 5364, 6621, 5648, - 8548, 5257, 7801, 1229, 8693, 4025, 9202, 2593, 7960, - 5369, 7148, 7940, 9714, 2580, 5784, 7297, 5127, 6802, - 622, 3354, 4226, 9817, 7738, 681, 3311, 7124, 4060, - 4152, 2763, 5861, 4574, 2518, 5568, 7103, 2604, 1355, - 4689, 7527, 1325, 8159, 152, 4720, 4594, 5586, 2562, - 6272, 3785, 7905, 2125, 7915, 9420, 6886, 4693, 2166, - 91, 6490, 4806, 3118, 2672, 66, 1234, 1981, 8691, - 253, 7553, 230, 3625, 559, 8846, 3213, 4483, 4148, - 6841, 4073, 5125, 2223, 9103, 1924, 720, 5412, 4803, - 3085, 8563, 9540, 9315, 3139, 4726, 2626, 5785, 2038, - 6901, 621, 2004, 8854, 1914, 4754, 192, 7935, 5989, - 7322, 8869, 9629, 1813, 2620, 6175, 6750, 6796, 7909, - 5597, 7944, 723, 3767, 4917, 8478, 1868, 384, 772, - 4519, 5478, 4767, 9866, 5553, 2050, 2777, 5275, 9876, - 2088, 6106, 2118, 5471, 7481, 4915, 1002, 5183, 2482, - 7207, 9343, 7965, 4201, 3702, 8145, 2757, 9560, 9718, - 7886, 52, 9777, 985, 9786, 8980, 8450, 853, 4486, - 1238, 3703, 3895, 824, 2368, 4761, 7044, 6976, 7259, - 3408, 4627, 1088, 2233, 2531, 9316, 3007, 7830, 1611, - 2937, 2309, 6160, 3027, 5106, 8960, 6283, 8248, 1806, - 338, 2652, 6414, 8464, 1253, 2007, 5826, 1700, 5189, - 3184, 9274, 7910, 2408, 2889, 1106, 4992, 8140, 123, - 5004, 6911, 4693, 4844, 2907, 9739, 1307, 4690, 6578, - 2490, 1409, 2982, 4236, 7452, 258, 9946, 1594, 9084, - 7506, 349, 1142, 8943, 4241, 4711, 6656, 2419, 7301, - 2041, 4288, 2852, 4987, 6067, 2342, 5031, 1240, 3326, - 809, 1883, 7214, 5948, 4514, 7898, 6737, 2244, 7695, - 5774, 7074, 4380, 338, 3037, 5989, 4689, 2151, 4935, - 8239, 8199, 8742, 8379, 8464, 7648, 7511, 9789, 542, - 2321, 4702, 7444, 4179, 5713, 3500, 440, 3372, 1553, - 7066, 237, 6333, 4983, 9001, 6220, 9958, 7810, 6862, - 5158, 12, 3298, 176, 3276, 228, 4205, 1817, 6762, - 3628, 9393, 8101, 3855, 4143, 1454, 9776, 905, 9823, - 5862, 2192, 4474, 7500, 7608, 7951, 8118, 1256, 8656, - 8920, 6853, 1192, 8163, 2750, 1899, 3066, 8521, 9238, - 1236, 5960, 5040, 5370, 4579, 9179, 1422, 3868, 5989, - 5883, 5467, 8069, 2067, 4039, 8465, 7018, 1691, 4312, - 4117, 4240, 6107, 9816, 943, 9542, 8621, 1040, 2403, - 8739, 7827, 6446, 9590, 3659, 3297, 3557, 6979, 1879, - 3579, 2959, 2874, 7456, 8140, 5191, 5810, 753, 5881, - 2926, 3160, 7447, 9128, 3369, 9643, 866, 1094, 7856, - 2939, 2771, 903, 4019, 1481, 8141, 3059, 5105, 54, - 5797, 8704, 6592, 9683, 238, 941, 9073, 1570, 8149, - 620, 9506, 8170, 935, 287, 4429, 4319, 2568, 1551, - 2824, 4536, 8659, 8268, 4581, 7494, 1661, 2098, 5451, - 7560, 9224, 2281, 7897, 5655, 6711, 6313, 5649, 3624, - 1113, 3337, 9567, 6293, 4554, 4615, 390, 6190, 9206, - 2980, 9994, 298, 8200, 7196, 3147, 82, 7032, 1909, - 6954, 4345, 2438, 2065, 2556, 5426, 8978, 7465, 5220, - 5604, 788, 8414, 2663, 5742, 4974, 462, 1147, 9967, - 2553, 6062, 5166, 3507, 4599, 8067, 9145, 6263, 358, - 6192, 9886, 1258, 1355, 8678, 8303, 9017, 8920, 3248, - 9675, 9017, 2019, 2107, 8167, 8733, 3179, 4669, 528, - 6951, 1546, 9515, 8164, 7191, 1458, 7943, 2239, 8218, - 160, 1727, 1954, 2526, 1135, 2201, 9078, 3991, 6935, - 519, 208, 8883, 3452, 5310, 4150, 3705, 3822, 7401, - 8546, 6404, 3490, 1861, 743, 6663, 7115, 426, 6265, - 5338, 4421, 8479, 4579, 7854, 952, 4160, 5970, 2820, - 6175, 9201, 3918, 8342, 1561, 8841, 9347, 6312, 8811, - 802, 3751, 1504, 9841, 3459, 4709, 7585, 9176, 402, - 8777, 5093, 5366, 1341, 94, 1213, 3151, 5186, 6190, - 9472, 8769, 8920, 5876, 9844, 8088, 3443, 3158, 1522, - 2239, 5911, 1843, 878, 3231, 9219, 9744, 5314, 5187, - 7781, 4935, 1116, 2698, 2909, 7609, 34, 6226, 271, - 7563, 6665, 2928, 7844, 7394, 1971, 1334, 8649, 7028, - 9162, 7777, 8221, 7806, 7876, 9654, 9678, 288, 6495, - 8153, 8652, 8277, 3375, 9963, 69, 2407, 1311, 3891, - 6745, 2980, 7523, 832, 7750, 7534, 9660, 5289, 3385, - 484, 2425, 9377, 7231, 951, 3144, 5429, 1186, 4354, - 946, 3543, 2792, 7235, 7094, 6477, 4712, 3170, 5325, - 6937, 7529, 2618, 9824, 1671, 9430, 3905, 2384, 1668, - 2134, 5903, 8606, 6210, 736, 9653, 2364, 46, 8839, - 3680, 8561, 7610, 2537, 5694, 4491, 3070, 102, 7674, - 94, 8566, 9307, 6073, 5078, 5944, 6543, 2837, 5013, - 5856, 26, 4035, 1232, 2905, 644, 1632, 730, 2714, - 5211, 7999, 7356, 9942, 3050, 3488, 2030, 3261, 990, - 2503, 9947, 5773, 2097, 8736, 2427, 814, 8208, 7952, - 1425, 763, 3478, 2559, 2030, 7355, 2403, 8133, 1521, - 2534, 4093, 6128, 650, 4011, 373, 5526, 1945, 3671, - 9189, 5214, 508, 4155, 9541, 9972, 2781, 9076, 2880, - 3611, 4734, 9885, 7739, 6719, 4377, 7240, 3082, 5164, - 2874, 6457, 2759, 7393, 5423, 443, 9684, 5344, 4129, - 7342, 9307, 5758, 5216, 2857, 6451, 1753, 5670, 3430, - 7468, 6794, 5048, 2729, 9118, 9764, 1919, 5665, 767, - 1418, 1135, 8997, 7779, 2429, 9649, 5229, 7038, 811, - 1832, 5395, 8070, 6294, 826, 5781, 6439, 3995, 1648, - 5102, 6547, 1824, 6293, 2074, 1908, 2741, 7323, 5768, - 7042, 3737, 1819, 2188, 1660, 4954, 7523, 3959, 9961, - 6052, 7983, 9534, 9061, 2300, 6304, 2454, 8016, 2821, - 1085, 1757, 9862, 5100, 1573, 6827, 5866, 5759, 3861, - 1208, 5621, 2294, 4759, 9995, 7707, 978, 9925, 5367, - 9804, 9907, 9336, 1138, 7783, 5709, 9686, 6759, 5257, - 3612, 9813, 741, 965, 354, 5561, 3584, 1206, 483, - 5051, 7676, 6562, 4835, 7136, 2052, 4373, 7923, 9493, - 3447, 2194, 3934, 1186, 6674, 4835, 7761, 3711, 2508, - 8473, 8257, 9236, 9392, 9771, 837, 8611, 4349, 8338, - 9309, 8829, 4296, 7751, 6181, 8890, 2833, 7799, 5871, - 3122, 6337, 1417, 1292, 5004, 6019, 1196, 5382, 6389, - 3275]), - values=tensor([0.2237, 0.0937, 0.9630, 0.1477, 0.5729, 0.1998, 0.9498, - 0.4379, 0.8034, 0.0235, 0.9902, 0.3432, 0.0700, 0.1825, - 0.5442, 0.2482, 0.6281, 0.6943, 0.3994, 0.1696, 0.8276, - 0.4876, 0.7936, 0.7452, 0.0998, 0.2679, 0.8349, 0.3267, - 0.0044, 0.0261, 0.7294, 0.7269, 0.9705, 0.1244, 0.6999, - 0.0989, 0.9010, 0.3293, 0.8054, 0.8993, 0.1777, 0.0858, - 0.0842, 0.5815, 0.0073, 0.8773, 0.5984, 0.9584, 0.9585, - 0.9084, 0.8010, 0.3890, 0.7561, 0.2065, 0.0869, 0.1566, - 0.5409, 0.0893, 0.4037, 0.9105, 0.3894, 0.1083, 0.0753, - 0.9158, 0.4006, 0.0324, 0.5094, 0.9774, 0.7974, 0.7971, - 0.4057, 0.2885, 0.0520, 0.2695, 0.5617, 0.7978, 0.7923, - 0.0655, 0.7159, 0.9957, 0.9205, 0.7614, 0.5249, 0.9947, - 0.7384, 0.2115, 0.5030, 0.2376, 0.3351, 0.0950, 0.7426, - 0.9408, 0.1119, 0.8775, 0.3131, 0.2215, 0.5071, 0.5270, - 0.2205, 0.8988, 0.4070, 0.6511, 0.6751, 0.8449, 0.8296, - 0.8009, 0.0238, 0.1850, 0.1927, 0.3420, 0.8088, 0.0148, - 0.6612, 0.4105, 0.1426, 0.8882, 0.2834, 0.4158, 0.8305, - 0.3284, 0.6229, 0.4603, 0.3207, 0.5603, 0.9335, 0.2077, - 0.3713, 0.2711, 0.3356, 0.1915, 0.1831, 0.6739, 0.8426, - 0.8846, 0.4385, 0.3712, 0.1660, 0.5165, 0.4995, 0.8937, - 0.3826, 0.4064, 0.9294, 0.5277, 0.4615, 0.3664, 0.3226, - 0.2012, 0.4698, 0.4989, 0.3815, 0.0360, 0.2844, 0.1905, - 0.6394, 0.1553, 0.2179, 0.9991, 0.4322, 0.8990, 0.8663, - 0.5714, 0.2616, 0.9118, 0.1532, 0.0794, 0.4487, 0.6562, - 0.8275, 0.3748, 0.9786, 0.2334, 0.4221, 0.0891, 0.4672, - 0.7959, 0.9325, 0.2602, 0.6239, 0.2789, 0.5279, 0.9698, - 0.2620, 0.9710, 0.0659, 0.7009, 0.4749, 0.0914, 0.8769, - 0.1261, 0.4952, 0.9824, 0.3145, 0.3912, 0.7378, 0.2369, - 0.7117, 0.3375, 0.4187, 0.9390, 0.2536, 0.2477, 0.8490, - 0.6862, 0.8212, 0.4774, 0.7214, 0.2067, 0.2441, 0.9886, - 0.6487, 0.5109, 0.5780, 0.0528, 0.8446, 0.2500, 0.0099, - 0.3498, 0.0603, 0.6328, 0.1396, 0.3979, 0.0437, 0.4013, - 0.4045, 0.6436, 0.0264, 0.5833, 0.5292, 0.4239, 0.4451, - 0.5547, 0.9082, 0.0133, 0.9732, 0.3574, 0.5837, 0.2932, - 0.7709, 0.0813, 0.8692, 0.3324, 0.5254, 0.2501, 0.7664, - 0.8569, 0.9977, 0.7986, 0.2734, 0.2131, 0.2578, 0.0864, - 0.4157, 0.7639, 0.4495, 0.8317, 0.0808, 0.3465, 0.3449, - 0.1795, 0.3111, 0.3583, 0.5361, 0.7493, 0.6307, 0.5749, - 0.7117, 0.5264, 0.3319, 0.4855, 0.7740, 0.0389, 0.7199, - 0.1757, 0.7320, 0.3881, 0.8473, 0.7211, 0.0089, 0.8715, - 0.8121, 0.6785, 0.3229, 0.7947, 0.1252, 0.4363, 0.7100, - 0.8129, 0.7275, 0.7882, 0.1837, 0.4836, 0.9665, 0.3166, - 0.8373, 0.4110, 0.9919, 0.7071, 0.1476, 0.4264, 0.6374, - 0.1843, 0.0355, 0.4476, 0.5128, 0.6553, 0.9636, 0.4657, - 0.3003, 0.5021, 0.2604, 0.1725, 0.4826, 0.8980, 0.9472, - 0.5359, 0.5329, 0.7276, 0.9162, 0.7128, 0.3214, 0.4430, - 0.0268, 0.4336, 0.6726, 0.3183, 0.6498, 0.8979, 0.4377, - 0.4508, 0.7071, 0.7374, 0.6664, 0.3207, 0.9118, 0.0607, - 0.6305, 0.1649, 0.7631, 0.3880, 0.1389, 0.3349, 0.7098, - 0.2548, 0.1348, 0.7807, 0.3721, 0.5610, 0.2096, 0.6681, - 0.4693, 0.1901, 0.1930, 0.1622, 0.2963, 0.0603, 0.2890, - 0.1230, 0.6505, 0.5223, 0.4052, 0.7992, 0.2060, 0.6551, - 0.9600, 0.8356, 0.2101, 0.1915, 0.5095, 0.9710, 0.1888, - 0.1645, 0.3590, 0.3490, 0.1788, 0.3894, 0.3501, 0.3981, - 0.8373, 0.9445, 0.9392, 0.0706, 0.5984, 0.1523, 0.8809, - 0.3914, 0.7705, 0.3109, 0.6448, 0.4013, 0.0518, 0.5764, - 0.9274, 0.9661, 0.6301, 0.2194, 0.3082, 0.6486, 0.9235, - 0.3097, 0.0386, 0.1441, 0.2865, 0.6143, 0.4563, 0.3660, - 0.2231, 0.3622, 0.4918, 0.2906, 0.1025, 0.3484, 0.1783, - 0.0793, 0.4740, 0.8350, 0.2433, 0.0513, 0.4263, 0.6836, - 0.8954, 0.2130, 0.9199, 0.7013, 0.7526, 0.7990, 0.3316, - 0.3824, 0.6560, 0.0665, 0.3473, 0.7352, 0.3958, 0.6717, - 0.1787, 0.9112, 0.3996, 0.1342, 0.8973, 0.7673, 0.3783, - 0.7610, 0.5775, 0.8738, 0.8225, 0.6836, 0.9352, 0.4659, - 0.0218, 0.6016, 0.7349, 0.5865, 0.4088, 0.3716, 0.3419, - 0.4366, 0.2265, 0.1625, 0.3329, 0.2090, 0.9832, 0.8807, - 0.9078, 0.0817, 0.5180, 0.2440, 0.2482, 0.7292, 0.3941, - 0.8401, 0.0141, 0.7489, 0.3723, 0.0350, 0.2835, 0.3314, - 0.1685, 0.1121, 0.6204, 0.8138, 0.4395, 0.6725, 0.3304, - 0.8152, 0.9086, 0.9942, 0.4917, 0.4382, 0.2414, 0.3708, - 0.4786, 0.6864, 0.9521, 0.6060, 0.5707, 0.3741, 0.7770, - 0.9669, 0.3737, 0.9687, 0.3704, 0.3850, 0.7958, 0.7808, - 0.1612, 0.9230, 0.8525, 0.6799, 0.0805, 0.9623, 0.0798, - 0.1563, 0.7621, 0.9897, 0.4134, 0.9079, 0.9743, 0.2876, - 0.0876, 0.7910, 0.9308, 0.0610, 0.2187, 0.5615, 0.3229, - 0.9375, 0.5980, 0.9792, 0.8630, 0.9130, 0.2219, 0.2254, - 0.9540, 0.2937, 0.2448, 0.2621, 0.5306, 0.1461, 0.7337, - 0.4278, 0.4789, 0.9525, 0.8487, 0.1678, 0.8083, 0.8602, - 0.5427, 0.9150, 0.6338, 0.0596, 0.3932, 0.3519, 0.0682, - 0.2049, 0.6992, 0.1087, 0.4717, 0.7743, 0.1069, 0.9390, - 0.7215, 0.0549, 0.2173, 0.5676, 0.3363, 0.3647, 0.6589, - 0.8482, 0.0780, 0.4024, 0.0875, 0.0825, 0.2565, 0.7274, - 0.2689, 0.2341, 0.8282, 0.6484, 0.5740, 0.2458, 0.3418, - 0.6679, 0.0928, 0.8043, 0.0375, 0.0455, 0.1021, 0.0727, - 0.6034, 0.7073, 0.9501, 0.1827, 0.6546, 0.0622, 0.8697, - 0.6037, 0.6145, 0.7007, 0.9749, 0.2250, 0.9919, 0.6048, - 0.0930, 0.2305, 0.6915, 0.5382, 0.1474, 0.1571, 0.4897, - 0.4500, 0.3177, 0.8426, 0.5450, 0.6520, 0.4501, 0.7247, - 0.0507, 0.2850, 0.0212, 0.9359, 0.6125, 0.4131, 0.8209, - 0.5987, 0.8486, 0.6476, 0.7153, 0.8008, 0.6473, 0.9929, - 0.7390, 0.7887, 0.4013, 0.3755, 0.5104, 0.8562, 0.2127, - 0.2174, 0.7440, 0.3993, 0.8984, 0.4550, 0.9304, 0.9089, - 0.9769, 0.3961, 0.4588, 0.7478, 0.4077, 0.7220, 0.1553, - 0.0217, 0.3009, 0.6585, 0.6058, 0.6903, 0.5106, 0.3638, - 0.8147, 0.1664, 0.2008, 0.2597, 0.0618, 0.8442, 0.8484, - 0.5244, 0.1691, 0.8820, 0.2008, 0.5312, 0.1231, 0.2789, - 0.2030, 0.9130, 0.3220, 0.9544, 0.0942, 0.3181, 0.3072, - 0.8830, 0.5897, 0.4002, 0.4521, 0.5237, 0.1548, 0.2054, - 0.1146, 0.0603, 0.1840, 0.5799, 0.0520, 0.7941, 0.5931, - 0.8282, 0.2801, 0.8827, 0.2904, 0.6095, 0.1708, 0.2367, - 0.4340, 0.5936, 0.4575, 0.2879, 0.8225, 0.0562, 0.7618, - 0.0424, 0.3732, 0.7233, 0.7772, 0.4983, 0.8623, 0.4285, - 0.4221, 0.1088, 0.3304, 0.1568, 0.5143, 0.6215, 0.5503, - 0.7690, 0.3756, 0.5348, 0.7357, 0.8109, 0.3130, 0.0158, - 0.0023, 0.6162, 0.8535, 0.2750, 0.7217, 0.5469, 0.3265, - 0.1222, 0.4043, 0.3551, 0.1335, 0.2431, 0.0804, 0.8806, - 0.9161, 0.7825, 0.7689, 0.7354, 0.8643, 0.4190, 0.5343, - 0.3840, 0.7807, 0.6542, 0.2807, 0.0621, 0.5952, 0.8649, - 0.8056, 0.2886, 0.8492, 0.0497, 0.6903, 0.0986, 0.3481, - 0.9830, 0.1513, 0.7150, 0.5003, 0.6462, 0.3091, 0.5104, - 0.1090, 0.4875, 0.2487, 0.1973, 0.1005, 0.5965, 0.6503, - 0.6776, 0.1194, 0.6968, 0.4165, 0.3997, 0.2356, 0.9948, - 0.2469, 0.5798, 0.3444, 0.2015, 0.9011, 0.6742, 0.3975, - 0.8612, 0.7831, 0.0862, 0.4379, 0.0809, 0.7931, 0.0607, - 0.3830, 0.8447, 0.6462, 0.9840, 0.5433, 0.4101, 0.8260, - 0.5601, 0.6978, 0.1804, 0.4108, 0.0158, 0.7436, 0.8390, - 0.9475, 0.4933, 0.1487, 0.4644, 0.5877, 0.7798, 0.6315, - 0.5300, 0.7588, 0.5234, 0.0393, 0.3814, 0.5279, 0.6528, - 0.9869, 0.9540, 0.6270, 0.7013, 0.4305, 0.2085, 0.9008, - 0.4866, 0.4271, 0.1271, 0.3216, 0.3653, 0.9917, 0.2290, - 0.3330, 0.6020, 0.4543, 0.6559, 0.8582, 0.2937, 0.1913, - 0.3080, 0.3895, 0.3059, 0.0633, 0.6122, 0.1175, 0.6572, - 0.2524, 0.7704, 0.8210, 0.4794, 0.7538, 0.4031, 0.0275, - 0.5311, 0.0815, 0.4034, 0.6148, 0.1185, 0.7439, 0.4016, - 0.5608, 0.0706, 0.4835, 0.2505, 0.9363, 0.3741, 0.3124, - 0.4634, 0.9844, 0.4189, 0.3458, 0.6504, 0.0883, 0.9154, - 0.1987, 0.8102, 0.2403, 0.6352, 0.9134, 0.0740, 0.6217, - 0.7461, 0.9307, 0.2340, 0.9526, 0.2394, 0.1661, 0.5903, - 0.3531, 0.3579, 0.3335, 0.4941, 0.0937, 0.0190, 0.7400, - 0.6048, 0.0736, 0.8480, 0.7056, 0.9624, 0.8952, 0.2590, - 0.2918, 0.9369, 0.3316, 0.8127, 0.4530, 0.8583, 0.8824, - 0.6823, 0.0554, 0.6007, 0.8647, 0.0307, 0.2993, 0.2476, - 0.7318, 0.8917, 0.9643, 0.6157, 0.2184, 0.8408, 0.3345, - 0.0712, 0.8159, 0.2459, 0.0991, 0.7444, 0.2222, 0.0014, - 0.1305, 0.8914, 0.0089, 0.5321, 0.7917, 0.7163, 0.9580, - 0.3624, 0.0142, 0.8937, 0.5115, 0.5049, 0.8434, 0.7234, - 0.7161, 0.2634, 0.8592, 0.3961, 0.5586, 0.2620, 0.0375, - 0.1665, 0.2915, 0.9139, 0.7009, 0.5095, 0.4519, 0.1213, - 0.3561, 0.0066, 0.4379, 0.3522, 0.6225, 0.6900, 0.8216, - 0.8841, 0.6553, 0.8193, 0.7688, 0.5104, 0.3926, 0.7388, - 0.4735, 0.1897, 0.7788, 0.8825, 0.9103, 0.2988, 0.1239, - 0.1792, 0.1266, 0.4818, 0.8893, 0.6604, 0.1883, 0.9700, - 0.5469, 0.0958, 0.2762, 0.2054, 0.3215, 0.7664]), + col_indices=tensor([2678, 5848, 8932, 6514, 1131, 7281, 3907, 9739, 4368, + 5455, 1563, 6741, 5038, 4433, 3496, 6764, 6783, 2096, + 5379, 5324, 5569, 7929, 3046, 3409, 3168, 2133, 5226, + 8680, 6560, 3626, 4039, 6513, 6765, 3907, 427, 4694, + 7551, 8505, 9656, 3083, 3989, 3546, 1570, 9918, 9896, + 335, 7782, 4759, 8628, 2630, 389, 9397, 2240, 4203, + 1948, 4116, 7432, 8011, 5499, 9353, 6102, 8714, 1872, + 9684, 1851, 1377, 2948, 6445, 8064, 2550, 6003, 934, + 8318, 8998, 1274, 7404, 8158, 7293, 3604, 2408, 6447, + 5245, 1953, 9908, 9236, 3159, 6613, 5123, 2772, 58, + 8158, 2914, 8872, 5012, 4440, 5933, 6728, 9276, 1884, + 6065, 1589, 565, 4394, 1612, 8762, 1583, 9811, 2191, + 8485, 332, 3797, 5984, 6485, 8030, 892, 6266, 9674, + 2139, 2659, 4709, 8571, 3555, 164, 4025, 9949, 195, + 4771, 135, 7191, 4183, 5031, 6805, 1340, 7672, 5235, + 530, 8176, 7205, 1183, 8638, 1346, 2590, 2005, 2202, + 7230, 632, 6331, 52, 6909, 3928, 339, 4933, 4157, + 4033, 1475, 8179, 2625, 6290, 3648, 1249, 9771, 6835, + 9081, 73, 8180, 62, 721, 3068, 7558, 7062, 9806, + 9992, 4714, 8737, 3254, 8028, 1512, 6801, 3742, 6874, + 7568, 4570, 4088, 5489, 1336, 8725, 8588, 811, 1680, + 7459, 3531, 473, 6870, 7194, 8917, 6286, 3103, 9711, + 8304, 9313, 5469, 1651, 1323, 5529, 3429, 9317, 9037, + 2258, 2925, 1605, 4087, 6314, 3409, 2468, 4645, 1651, + 2979, 8063, 4533, 9160, 3997, 264, 2239, 1980, 4712, + 3986, 2454, 9241, 7492, 358, 1561, 1764, 3237, 2128, + 5088, 9092, 919, 6871, 3872, 4410, 5133, 9706, 2227, + 499, 3451, 5390, 599, 943, 8741, 6801, 7625, 2680, + 6629, 563, 8907, 3280, 8235, 9177, 1523, 6255, 8817, + 4372, 267, 9572, 9950, 5968, 9992, 1628, 7005, 8550, + 5512, 8601, 785, 247, 9943, 6496, 312, 47, 6183, + 8205, 2403, 4595, 8512, 4517, 7618, 8133, 1008, 4724, + 7129, 8751, 3237, 8688, 7727, 8509, 9914, 7707, 8187, + 1458, 2539, 7790, 120, 9487, 4175, 2145, 2728, 1252, + 9331, 6861, 1428, 51, 9368, 4128, 9713, 2125, 2393, + 7502, 9806, 9848, 7181, 3492, 8043, 989, 9966, 1640, + 2160, 996, 6341, 5424, 7190, 2715, 3212, 2545, 6704, + 1163, 2990, 3564, 5587, 3653, 2866, 9029, 9388, 6499, + 6760, 4322, 3904, 3816, 7074, 8076, 654, 1157, 5562, + 1009, 6604, 616, 2995, 5906, 4431, 2743, 6209, 6739, + 7760, 9681, 3629, 4368, 6640, 2125, 2154, 7351, 616, + 2800, 8637, 236, 3715, 5387, 4785, 1531, 6341, 8371, + 3434, 7938, 3807, 8679, 1666, 5152, 9359, 4485, 9368, + 7754, 3822, 5441, 9075, 6328, 5113, 6317, 3497, 9597, + 4343, 6828, 4247, 3257, 6984, 9564, 5003, 6447, 918, + 3611, 9704, 1656, 4256, 2966, 6628, 7336, 3286, 3396, + 4655, 8761, 5872, 9641, 851, 9739, 9868, 7273, 6824, + 4512, 1119, 1271, 6159, 3090, 791, 3434, 4968, 8692, + 1078, 4350, 7105, 1059, 7272, 5390, 4013, 94, 4195, + 8383, 8497, 5352, 6780, 5275, 2447, 2937, 853, 7521, + 1602, 6585, 249, 3957, 4342, 5291, 8788, 4179, 3252, + 2754, 5478, 7259, 49, 821, 5243, 3254, 2436, 7817, + 1870, 776, 2046, 2208, 3030, 9918, 3190, 1835, 9413, + 8504, 4648, 5601, 3390, 1744, 6805, 5786, 2013, 8829, + 8282, 4011, 5450, 7546, 8290, 6803, 2328, 4769, 2666, + 8476, 951, 2420, 1209, 2583, 4526, 7030, 2145, 470, + 7236, 8296, 8055, 9904, 236, 2598, 7209, 8631, 5520, + 2875, 1226, 1552, 2489, 5454, 9437, 8339, 9878, 9044, + 7754, 5712, 115, 7980, 3149, 3358, 3584, 9860, 2716, + 35, 3516, 8143, 8204, 4249, 3977, 1279, 1635, 5695, + 5553, 156, 5163, 5303, 8024, 5724, 6309, 7442, 6981, + 1482, 5061, 6377, 7653, 5741, 4512, 5439, 6406, 1758, + 6867, 478, 4490, 2339, 7376, 3188, 9830, 2584, 3630, + 6563, 3221, 1840, 9763, 1437, 3363, 1880, 9285, 9078, + 6954, 693, 6882, 939, 1083, 6111, 5822, 5804, 4912, + 4676, 6584, 679, 1703, 3121, 2876, 4463, 9332, 9872, + 7452, 8384, 2189, 9930, 3656, 812, 4368, 7703, 9371, + 811, 3942, 9089, 502, 3023, 6508, 5694, 3725, 5447, + 6931, 1617, 3715, 2876, 1609, 6738, 2941, 4572, 7351, + 7906, 8022, 1068, 3032, 6204, 1112, 5133, 5464, 2355, + 4984, 4046, 388, 5586, 4612, 7280, 8734, 8754, 1014, + 2205, 5645, 1298, 2729, 8222, 9219, 344, 3715, 6565, + 8800, 9706, 4480, 3826, 9697, 9454, 4394, 2413, 7994, + 7534, 2937, 815, 1688, 9468, 1579, 5140, 2088, 7174, + 9180, 6000, 262, 2796, 9505, 3993, 1286, 8540, 128, + 4586, 3938, 5329, 5519, 6604, 8591, 4699, 6798, 7194, + 7232, 8564, 7353, 1419, 7385, 4902, 7749, 3344, 993, + 3469, 919, 1238, 7882, 9695, 9845, 4627, 5478, 4672, + 881, 5947, 3076, 9844, 609, 5553, 5999, 7665, 3226, + 5457, 4855, 7443, 5121, 7647, 8584, 655, 5215, 1569, + 1648, 6795, 3717, 5720, 4762, 7146, 78, 1928, 1798, + 584, 2514, 7803, 3577, 5724, 6704, 5616, 5830, 1480, + 8193, 5152, 7183, 3956, 182, 6147, 9904, 453, 7425, + 9473, 2466, 6777, 1853, 7681, 8056, 1172, 208, 501, + 5162, 5454, 7591, 9528, 8414, 2073, 8777, 7187, 4165, + 2461, 9993, 5575, 7135, 3440, 2602, 8998, 8072, 3358, + 4906, 1703, 4958, 2883, 6753, 5420, 3979, 1672, 7690, + 3517, 8563, 3726, 1974, 9545, 1379, 4794, 7415, 5252, + 1567, 3734, 2155, 8986, 688, 6018, 9923, 3231, 3497, + 1952, 4404, 2898, 8594, 4361, 607, 8874, 2125, 8201, + 2101, 6590, 1018, 8311, 8335, 8605, 1095, 5163, 2342, + 489, 242, 805, 5500, 2030, 3748, 7566, 8448, 5849, + 1979, 692, 7234, 9645, 6098, 411, 4997, 4004, 2455, + 7396, 3160, 9449, 2896, 2241, 618, 8836, 5257, 7656, + 5412, 1935, 528, 6772, 4893, 7812, 6305, 7490, 3151, + 1203, 3122, 804, 7776, 5737, 382, 5995, 6332, 5793, + 2993, 5892, 3995, 9197, 9133, 8356, 3633, 482, 4515, + 9664, 5501, 5459, 5010, 3336, 1716, 5727, 8286, 1115, + 4288, 4852, 173, 7050, 3796, 8923, 3854, 9464, 2742, + 9054, 1036, 4739, 7463, 2898, 7540, 8656, 1371, 360, + 8627, 8931, 2125, 5688, 3221, 5867, 2323, 3331, 1738, + 5678, 2988, 446, 7760, 5978, 7580, 4093, 6545, 6826, + 9875, 6901, 7056, 5553, 9641, 9758, 2419, 3422, 1260, + 7749, 8311, 791, 7074, 2681, 3921, 7674, 1734, 3362, + 6902, 1648, 7880, 5365, 8236, 98, 965, 8965, 5437, + 2582, 7380, 7564, 9847, 4660, 2519, 4164, 4545, 265, + 4912, 9795, 4707, 7062, 2138, 4947, 5798, 6123, 5205, + 449, 2142, 9239, 9558, 5064, 2148, 9712, 687, 153, + 3028, 58, 2689, 1964, 8074, 7404, 4988, 918, 591, + 1127, 108, 7896, 950, 8567, 5527, 4509, 8789, 2188, + 2277, 4164, 2985, 499, 8205, 8449, 6305, 6305, 3977, + 3018]), + values=tensor([5.1838e-01, 4.8027e-01, 8.3918e-01, 7.7889e-02, + 5.8618e-01, 6.7111e-01, 4.8156e-01, 6.3769e-01, + 5.9556e-01, 1.8322e-02, 6.8184e-01, 2.5539e-01, + 6.9785e-01, 8.5587e-01, 7.5961e-01, 5.8047e-01, + 7.5996e-01, 4.9800e-01, 6.6584e-01, 6.2707e-01, + 3.2854e-01, 5.1026e-01, 3.3865e-01, 1.8919e-01, + 4.9296e-01, 5.0958e-01, 5.6205e-01, 3.8230e-01, + 8.6673e-01, 6.7546e-01, 4.7278e-01, 7.7770e-01, + 9.8633e-02, 5.1322e-01, 9.3110e-01, 3.5684e-02, + 4.0685e-01, 4.6288e-01, 2.3401e-01, 3.7533e-01, + 7.8245e-01, 7.5875e-01, 1.8288e-03, 3.7060e-01, + 9.3250e-01, 4.6835e-01, 4.5945e-01, 2.2816e-01, + 1.6555e-01, 3.5822e-01, 6.7891e-01, 5.7081e-01, + 1.3394e-01, 4.8769e-01, 5.1662e-01, 4.9045e-01, + 8.5425e-01, 3.6198e-01, 4.1382e-01, 6.1676e-01, + 3.5515e-01, 3.6280e-01, 5.4649e-01, 2.2820e-01, + 8.9921e-01, 8.6646e-01, 3.1651e-01, 5.2641e-01, + 9.5379e-01, 4.8047e-01, 7.8712e-01, 1.9703e-02, + 8.3324e-01, 3.6549e-01, 3.9669e-01, 5.9268e-01, + 3.1271e-01, 1.6332e-01, 4.6643e-01, 4.4610e-02, + 8.7929e-02, 4.2486e-01, 2.0859e-01, 4.1765e-01, + 8.9762e-01, 6.0503e-02, 5.3495e-01, 5.8876e-01, + 9.6365e-01, 5.0177e-01, 5.1125e-01, 1.8336e-01, + 4.0672e-01, 5.2722e-01, 4.1700e-01, 7.2902e-01, + 9.0848e-01, 1.7116e-01, 1.3770e-01, 2.8766e-01, + 9.3382e-01, 8.1995e-01, 6.0914e-01, 4.1410e-01, + 4.3364e-01, 3.0832e-01, 3.9481e-02, 3.4106e-01, + 1.3828e-01, 8.8339e-01, 3.8656e-01, 7.5382e-01, + 6.9183e-01, 9.6590e-01, 5.3846e-01, 2.6012e-01, + 3.1593e-01, 5.7672e-01, 2.4808e-01, 8.2200e-01, + 9.6413e-01, 3.7000e-01, 1.3368e-01, 9.3703e-01, + 7.8369e-01, 9.2556e-01, 5.1803e-01, 8.1221e-01, + 4.3677e-01, 2.3575e-01, 3.8139e-01, 2.6701e-01, + 5.8012e-01, 1.7843e-01, 7.5107e-01, 4.9757e-02, + 2.1184e-01, 9.3472e-01, 4.6764e-01, 6.0262e-02, + 9.7295e-01, 1.9960e-01, 1.6947e-01, 5.9944e-01, + 7.5096e-01, 7.2836e-02, 8.6802e-01, 7.9771e-01, + 5.3295e-01, 1.1293e-01, 1.2028e-01, 5.0690e-01, + 7.8024e-01, 8.7793e-01, 4.2517e-01, 8.7115e-01, + 6.0427e-03, 7.4467e-01, 7.3429e-01, 6.6924e-02, + 7.1076e-01, 6.0690e-01, 6.3134e-01, 1.0168e-01, + 8.8732e-01, 5.1557e-01, 9.5840e-01, 2.7378e-01, + 8.7157e-01, 9.0730e-01, 7.6350e-01, 4.0141e-01, + 4.0315e-01, 8.9720e-01, 7.1849e-01, 7.4754e-01, + 7.7728e-01, 1.9576e-01, 6.6072e-01, 8.9195e-01, + 2.3741e-01, 6.8517e-03, 7.9553e-01, 9.5371e-02, + 5.2120e-01, 6.0801e-01, 3.2108e-01, 7.1724e-01, + 3.4707e-02, 9.8635e-01, 6.9417e-01, 8.9998e-01, + 6.6078e-01, 1.3657e-01, 4.6891e-01, 7.2558e-01, + 7.2927e-01, 2.3680e-01, 7.9246e-01, 7.9430e-01, + 8.8668e-01, 9.8271e-01, 3.7716e-01, 9.5253e-01, + 5.7418e-01, 9.1029e-01, 6.6881e-01, 8.7060e-01, + 5.8992e-01, 4.9313e-01, 4.7570e-01, 9.9628e-01, + 8.3694e-01, 8.5239e-01, 9.5707e-01, 3.0963e-01, + 7.1608e-01, 1.5938e-01, 1.0843e-01, 5.7840e-01, + 8.5853e-01, 2.2082e-01, 4.4360e-02, 3.6738e-02, + 9.2371e-01, 9.6308e-01, 4.8745e-01, 5.9983e-01, + 8.3662e-01, 1.9217e-01, 7.1628e-01, 6.8783e-01, + 7.2064e-01, 5.4602e-01, 7.8485e-01, 2.9078e-01, + 9.8070e-01, 7.2043e-01, 4.4170e-01, 1.8063e-01, + 9.3028e-01, 8.5442e-01, 8.4366e-01, 7.1744e-01, + 3.5699e-01, 2.9793e-01, 5.8327e-01, 3.7761e-01, + 8.2260e-01, 3.8203e-01, 4.2986e-01, 3.3714e-01, + 7.2078e-01, 5.7870e-01, 3.2117e-01, 4.8517e-01, + 9.2007e-01, 6.5561e-01, 8.2434e-01, 2.7642e-01, + 3.8746e-01, 6.8592e-01, 3.7973e-01, 9.5875e-01, + 8.6646e-01, 4.6623e-01, 7.4571e-02, 2.2778e-01, + 6.9281e-01, 9.8290e-01, 5.3202e-02, 3.5929e-02, + 4.2291e-01, 4.4277e-01, 1.5349e-01, 4.3231e-01, + 8.2011e-01, 7.2244e-01, 2.6651e-01, 9.8915e-02, + 3.6405e-01, 2.9954e-01, 2.7205e-02, 1.8403e-01, + 6.4235e-01, 8.1990e-01, 7.2246e-01, 6.3070e-01, + 7.1801e-01, 5.6306e-01, 9.1787e-01, 1.1705e-02, + 4.3537e-01, 9.7378e-01, 6.2170e-01, 2.0596e-01, + 7.0586e-02, 2.0969e-02, 5.0558e-01, 1.3130e-01, + 5.7470e-01, 1.6101e-01, 8.3734e-01, 5.7816e-01, + 4.2951e-01, 6.3467e-02, 2.6600e-01, 8.1422e-01, + 8.1080e-01, 4.6367e-01, 8.2893e-01, 4.6013e-01, + 1.0682e-01, 6.1112e-01, 4.0725e-01, 4.2257e-02, + 4.0788e-01, 9.0715e-01, 7.9904e-01, 8.7199e-01, + 5.0507e-01, 2.9835e-01, 3.2475e-01, 3.6805e-01, + 5.7408e-01, 6.7076e-01, 5.4595e-01, 4.4910e-01, + 8.7908e-02, 1.9390e-01, 7.3581e-01, 6.7330e-01, + 8.4758e-01, 1.1538e-01, 5.5833e-01, 6.8745e-01, + 4.6518e-01, 9.8563e-01, 3.1315e-02, 7.1278e-01, + 4.2570e-01, 8.4469e-01, 2.9684e-01, 9.5037e-01, + 9.6684e-01, 7.7939e-01, 6.6961e-01, 1.9207e-01, + 9.0615e-02, 8.9770e-01, 9.3637e-01, 2.3711e-01, + 5.1238e-01, 8.7654e-01, 7.5039e-01, 6.0416e-01, + 6.1096e-01, 2.3286e-01, 1.4012e-01, 7.9028e-01, + 8.3652e-01, 3.0895e-01, 3.9595e-01, 1.1764e-01, + 4.1872e-01, 4.8034e-01, 6.8482e-01, 6.6337e-01, + 7.3923e-01, 8.6261e-01, 2.2457e-01, 3.6397e-01, + 6.2814e-01, 3.6800e-01, 7.0580e-01, 9.1407e-02, + 5.7080e-01, 7.9248e-01, 1.0129e-01, 7.3599e-01, + 3.5061e-01, 1.8993e-01, 6.2121e-01, 7.6740e-01, + 5.7787e-01, 6.0896e-01, 2.5264e-01, 2.0795e-01, + 1.3239e-02, 9.6359e-02, 4.8132e-02, 6.7274e-01, + 2.9477e-01, 1.4102e-01, 2.7292e-01, 1.0473e-01, + 7.7761e-01, 3.7502e-02, 8.0001e-01, 5.1192e-01, + 8.5536e-01, 1.3117e-01, 9.5645e-01, 6.8219e-01, + 6.2019e-01, 3.6495e-02, 8.4527e-01, 9.1630e-02, + 2.8423e-01, 9.2737e-01, 5.9931e-01, 5.4321e-01, + 6.9357e-01, 9.3312e-01, 1.7996e-01, 9.8590e-01, + 6.4823e-01, 2.9656e-01, 4.9547e-01, 9.4245e-01, + 3.6815e-01, 1.3356e-01, 7.8271e-01, 9.4834e-01, + 3.5856e-01, 6.4615e-01, 6.1428e-01, 6.3807e-01, + 1.7057e-01, 1.7740e-01, 8.9389e-01, 6.7825e-01, + 3.9499e-01, 1.9776e-01, 6.6894e-01, 1.7032e-01, + 5.7388e-01, 8.2183e-01, 3.6330e-01, 8.0880e-01, + 2.0305e-01, 1.5434e-01, 7.4128e-01, 3.8989e-01, + 4.3827e-01, 4.0977e-01, 2.9125e-01, 5.2847e-02, + 8.1745e-02, 1.0234e-01, 7.2035e-01, 7.1349e-01, + 3.0386e-01, 7.4587e-02, 9.9025e-01, 3.0347e-01, + 3.8606e-01, 6.8653e-01, 2.7580e-01, 9.6578e-01, + 7.2235e-01, 2.9692e-01, 4.0660e-01, 7.4790e-02, + 7.9686e-01, 8.8094e-01, 8.2564e-01, 4.6655e-01, + 9.6551e-01, 6.3026e-01, 6.0660e-01, 4.7454e-01, + 9.4613e-01, 3.7266e-01, 9.9796e-01, 4.7107e-01, + 1.5403e-02, 9.2743e-01, 4.7063e-02, 3.2584e-01, + 5.8369e-01, 5.6961e-01, 9.0423e-01, 3.6126e-01, + 6.0919e-01, 3.6585e-01, 7.0895e-01, 4.5042e-01, + 3.3875e-01, 5.6184e-01, 9.9711e-01, 5.5435e-01, + 2.8074e-01, 4.0190e-02, 1.3606e-01, 1.6852e-01, + 8.2050e-01, 7.5050e-01, 4.8556e-01, 5.5772e-01, + 3.9981e-01, 6.9659e-01, 4.6937e-01, 8.7705e-02, + 8.6330e-01, 9.6237e-01, 2.6430e-01, 5.9624e-01, + 1.4870e-01, 4.5843e-01, 9.1351e-01, 8.2778e-01, + 8.2247e-01, 2.9720e-01, 8.7732e-01, 2.6596e-01, + 3.3265e-01, 4.7720e-01, 7.2502e-01, 9.4295e-01, + 7.3038e-01, 6.0235e-02, 3.2278e-02, 1.9268e-01, + 4.3019e-01, 1.6406e-01, 2.2580e-02, 4.7677e-01, + 9.3483e-01, 3.5249e-01, 7.2833e-01, 9.5668e-01, + 6.1709e-01, 8.7948e-01, 2.0670e-01, 9.5341e-01, + 3.4142e-01, 5.5463e-01, 4.1957e-01, 3.3458e-01, + 5.0988e-01, 4.9233e-01, 7.9123e-01, 9.4013e-01, + 4.8199e-01, 3.0123e-02, 7.1299e-01, 6.7158e-01, + 7.2073e-01, 3.4907e-01, 7.6090e-02, 4.6263e-01, + 7.1080e-01, 3.5478e-01, 4.2699e-01, 5.3285e-01, + 3.7627e-01, 6.5108e-01, 7.6983e-01, 5.0238e-01, + 3.7186e-01, 7.4101e-01, 1.0180e-01, 9.1081e-01, + 8.2214e-01, 5.3804e-01, 3.9786e-01, 3.1840e-01, + 9.4001e-01, 4.6192e-01, 7.6120e-01, 7.2666e-01, + 3.3125e-01, 4.6892e-01, 6.8600e-01, 3.5894e-01, + 9.2476e-01, 9.6411e-01, 1.9643e-01, 9.1677e-01, + 4.4028e-01, 8.2110e-01, 5.6583e-01, 2.9195e-01, + 4.4459e-01, 8.7108e-01, 4.8275e-02, 4.4066e-01, + 8.0401e-01, 5.6533e-01, 4.4520e-01, 1.5128e-01, + 1.3540e-01, 3.4559e-01, 1.9536e-01, 4.8147e-01, + 5.9237e-02, 7.0397e-01, 3.0700e-01, 6.3088e-02, + 2.0806e-01, 7.4766e-01, 8.4211e-02, 4.3882e-01, + 5.8286e-01, 3.6017e-01, 6.6680e-01, 6.2062e-01, + 8.0258e-01, 4.7332e-01, 9.9449e-01, 8.1089e-01, + 7.2742e-01, 9.5543e-01, 4.2128e-01, 8.2507e-01, + 9.4724e-01, 1.2972e-01, 3.9554e-01, 3.9059e-01, + 7.6634e-01, 6.3078e-01, 1.3847e-01, 2.0194e-01, + 8.3443e-01, 8.0120e-01, 8.3765e-01, 3.7910e-01, + 1.9301e-01, 3.5658e-01, 2.4781e-01, 6.1760e-01, + 8.3152e-01, 6.7280e-01, 4.1041e-01, 5.7212e-01, + 6.5934e-01, 9.8193e-01, 2.6474e-01, 7.2690e-01, + 9.9811e-01, 2.6560e-01, 2.4037e-01, 4.5065e-01, + 1.2872e-01, 3.7925e-01, 2.5061e-01, 4.0492e-01, + 8.5432e-01, 7.4584e-01, 1.8191e-01, 6.1161e-01, + 4.9870e-01, 3.3714e-01, 3.2297e-01, 8.2104e-01, + 4.9746e-01, 9.5588e-01, 2.4725e-01, 7.3493e-01, + 3.4350e-01, 7.9371e-01, 5.0533e-01, 8.5569e-01, + 5.0645e-01, 4.8222e-01, 7.0081e-01, 5.3757e-01, + 3.8262e-01, 8.2935e-01, 7.1445e-01, 2.8785e-01, + 8.3693e-01, 9.8657e-01, 8.9492e-01, 9.0828e-01, + 4.1198e-01, 3.7749e-01, 2.1366e-01, 9.1028e-01, + 8.2317e-01, 6.8193e-01, 6.7945e-01, 3.5136e-01, + 8.4090e-01, 3.5647e-02, 7.7684e-01, 8.3024e-01, + 9.2244e-01, 9.2298e-01, 8.9766e-01, 8.5199e-01, + 4.9978e-01, 8.0846e-01, 2.1637e-01, 4.0517e-01, + 2.1007e-01, 2.1377e-01, 3.5364e-01, 5.9078e-01, + 5.1643e-01, 3.5306e-01, 5.4722e-02, 5.5017e-01, + 4.3671e-01, 3.9569e-01, 7.7582e-02, 5.6536e-01, + 1.2087e-02, 8.6814e-01, 2.7761e-02, 2.7180e-01, + 3.3139e-01, 7.8569e-01, 1.9535e-01, 8.5077e-01, + 9.8790e-01, 1.7846e-01, 1.9440e-01, 5.7366e-01, + 6.2797e-01, 3.7094e-01, 9.8894e-01, 7.5262e-01, + 4.1880e-01, 8.1698e-01, 8.5831e-01, 2.2690e-01, + 3.5308e-01, 9.0459e-01, 6.8343e-01, 8.1904e-02, + 8.9611e-01, 5.1413e-01, 1.9147e-01, 1.1419e-01, + 5.4172e-01, 7.3272e-01, 1.5703e-01, 7.0208e-01, + 1.5550e-01, 4.2626e-01, 2.4197e-01, 4.2718e-01, + 6.2171e-01, 1.2803e-01, 7.1940e-01, 5.6744e-01, + 3.2125e-01, 7.4377e-01, 3.1274e-01, 3.4274e-01, + 4.7478e-01, 6.7334e-01, 1.3565e-01, 3.4904e-01, + 5.7371e-01, 2.4459e-01, 8.9790e-01, 5.5714e-03, + 9.0159e-01, 9.9937e-01, 6.1757e-01, 8.5558e-02, + 1.4341e-01, 7.5241e-01, 1.8058e-01, 1.9063e-01, + 6.7688e-01, 9.4618e-01, 9.1275e-01, 3.2585e-01, + 6.6877e-01, 2.2418e-01, 6.8450e-01, 5.1895e-01, + 2.1437e-02, 8.8828e-01, 4.8335e-01, 7.4366e-01, + 9.8953e-01, 5.8257e-01, 2.9039e-04, 1.1123e-01, + 5.1846e-02, 1.3108e-01, 4.2323e-01, 6.5304e-01, + 1.8625e-01, 3.7199e-02, 7.0216e-01, 6.0287e-01, + 2.3789e-01, 9.2863e-01, 1.2348e-01, 8.6839e-01, + 4.9520e-01, 2.7288e-01, 4.4592e-01, 4.2396e-01, + 6.0645e-01, 6.2528e-01, 6.5293e-01, 9.2566e-01, + 9.4540e-01, 5.8610e-02, 8.7155e-01, 2.2119e-01, + 8.7605e-01, 8.8102e-01, 8.2781e-01, 3.8576e-01, + 6.3776e-01, 7.8203e-01, 1.0462e-02, 9.1986e-01, + 6.5027e-01, 2.7230e-01, 2.1864e-01, 2.5327e-01, + 9.8263e-01, 4.8835e-01, 8.8863e-01, 5.6728e-01, + 8.0559e-01, 1.2815e-01, 4.6205e-01, 8.5618e-01, + 1.8311e-01, 4.8907e-01, 6.4311e-01, 3.8609e-01, + 9.3749e-01, 8.9364e-01, 7.1836e-01, 3.0713e-01, + 5.5364e-01, 7.7259e-01, 1.0362e-01, 2.7281e-01, + 2.1067e-01, 8.0247e-01, 1.0462e-01, 6.2383e-03, + 4.1906e-01, 4.6448e-02, 8.4964e-01, 1.8122e-01, + 6.4569e-01, 6.6590e-01, 9.6978e-01, 5.3446e-01, + 9.2632e-02, 8.2326e-01, 2.3167e-02, 7.1511e-01, + 7.4454e-01, 8.3709e-01, 2.7395e-01, 3.9752e-01, + 2.3413e-01, 4.1262e-02, 8.1277e-01, 8.4946e-01, + 5.3420e-01, 2.8694e-01, 6.3996e-01, 8.6404e-01, + 7.7503e-01, 8.7692e-01, 4.2612e-01, 4.9620e-02, + 3.3084e-01, 8.5854e-01, 7.7131e-01, 1.9680e-01, + 1.6464e-01, 9.8493e-01, 6.7014e-01, 9.6494e-01, + 7.2374e-01, 5.8901e-01, 7.9710e-01, 6.1744e-01, + 9.8465e-01, 9.2253e-01, 3.8584e-01, 4.0856e-01, + 1.8180e-01, 8.8451e-01, 5.0687e-01, 7.6331e-01, + 2.9799e-01, 3.8883e-01, 9.8601e-01, 6.4816e-01, + 4.7480e-03, 2.3454e-01, 3.3701e-02, 6.5933e-01, + 5.6913e-01, 1.7944e-01, 7.1449e-01, 8.1288e-01, + 4.0565e-01, 1.1448e-01, 7.6631e-02, 5.9701e-01, + 9.9955e-01, 8.5047e-01, 6.2163e-01, 3.4651e-02, + 4.7043e-01, 6.7147e-01, 4.1853e-01, 5.6234e-01, + 4.9969e-01, 8.8040e-01, 9.8673e-01, 8.3060e-02, + 8.6189e-02, 9.1021e-01, 8.1437e-01, 9.1474e-01, + 1.0110e-01, 4.2937e-01, 6.7076e-02, 5.5100e-01, + 1.4978e-01, 2.3091e-01, 9.2119e-01, 8.8302e-01, + 7.1686e-01, 8.5039e-01, 6.3669e-01, 6.9416e-01, + 9.9660e-01, 4.4720e-01, 4.8955e-01, 2.9264e-01, + 1.2478e-01, 6.6834e-01, 1.5549e-01, 8.6643e-01, + 4.8584e-01, 6.2762e-01, 8.9681e-01, 9.9150e-01, + 8.9800e-01, 4.9195e-02, 3.4261e-02, 9.9473e-02, + 6.5421e-01, 3.1869e-02, 7.3417e-01, 7.2239e-01, + 5.2055e-01, 7.2939e-01, 4.0424e-01, 5.3910e-01, + 1.4767e-01, 2.2639e-01, 9.4374e-01, 7.8686e-01, + 1.2226e-02, 5.6413e-01, 5.1201e-01, 4.0050e-01, + 7.2622e-01, 7.4764e-01, 3.7669e-01, 4.7057e-01, + 3.6367e-02, 9.6347e-01, 1.0702e-01, 9.4801e-01, + 5.9118e-01, 1.3243e-01, 2.9751e-01, 3.7908e-01, + 6.3144e-01, 4.1336e-01, 4.4719e-01, 9.2700e-01, + 5.3549e-01, 6.8263e-01, 1.0998e-01, 7.2406e-01, + 8.0101e-01, 2.8276e-01, 5.0451e-01, 9.4162e-01, + 3.8205e-01, 3.9509e-01, 2.6665e-02, 9.7060e-01, + 1.1248e-01, 7.1314e-01, 9.2116e-01, 4.1416e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5980, 0.9421, 0.9493, ..., 0.6518, 0.5202, 0.4457]) +tensor([0.0845, 0.2230, 0.9209, ..., 0.5627, 0.3340, 0.7945]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,268 +1026,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.272708654403687 seconds +Time: 10.510953187942505 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([1701, 5200, 151, 7106, 6780, 2055, 1375, 8242, 376, - 4556, 1864, 1124, 4828, 55, 5866, 9752, 5516, 7381, - 1233, 540, 636, 7075, 5707, 6021, 4061, 4372, 9110, - 8043, 6636, 2721, 2135, 174, 4881, 658, 3469, 6307, - 6633, 7241, 1540, 9161, 3585, 3028, 1940, 352, 9272, - 3623, 3390, 9183, 6840, 1293, 3398, 5823, 704, 5011, - 1738, 9159, 5732, 4747, 2802, 1249, 5171, 5364, 7066, - 4818, 4723, 9883, 1229, 9311, 6671, 7348, 8536, 5413, - 6, 4030, 8060, 4147, 5081, 6166, 1683, 8447, 8806, - 8357, 9243, 6546, 2055, 6450, 9246, 3143, 856, 7551, - 1646, 9323, 5361, 3834, 1319, 9746, 7840, 8141, 2671, - 5949, 5626, 8150, 3730, 796, 1535, 4730, 6915, 6972, - 2766, 9860, 7270, 8106, 2537, 797, 5364, 6621, 5648, - 8548, 5257, 7801, 1229, 8693, 4025, 9202, 2593, 7960, - 5369, 7148, 7940, 9714, 2580, 5784, 7297, 5127, 6802, - 622, 3354, 4226, 9817, 7738, 681, 3311, 7124, 4060, - 4152, 2763, 5861, 4574, 2518, 5568, 7103, 2604, 1355, - 4689, 7527, 1325, 8159, 152, 4720, 4594, 5586, 2562, - 6272, 3785, 7905, 2125, 7915, 9420, 6886, 4693, 2166, - 91, 6490, 4806, 3118, 2672, 66, 1234, 1981, 8691, - 253, 7553, 230, 3625, 559, 8846, 3213, 4483, 4148, - 6841, 4073, 5125, 2223, 9103, 1924, 720, 5412, 4803, - 3085, 8563, 9540, 9315, 3139, 4726, 2626, 5785, 2038, - 6901, 621, 2004, 8854, 1914, 4754, 192, 7935, 5989, - 7322, 8869, 9629, 1813, 2620, 6175, 6750, 6796, 7909, - 5597, 7944, 723, 3767, 4917, 8478, 1868, 384, 772, - 4519, 5478, 4767, 9866, 5553, 2050, 2777, 5275, 9876, - 2088, 6106, 2118, 5471, 7481, 4915, 1002, 5183, 2482, - 7207, 9343, 7965, 4201, 3702, 8145, 2757, 9560, 9718, - 7886, 52, 9777, 985, 9786, 8980, 8450, 853, 4486, - 1238, 3703, 3895, 824, 2368, 4761, 7044, 6976, 7259, - 3408, 4627, 1088, 2233, 2531, 9316, 3007, 7830, 1611, - 2937, 2309, 6160, 3027, 5106, 8960, 6283, 8248, 1806, - 338, 2652, 6414, 8464, 1253, 2007, 5826, 1700, 5189, - 3184, 9274, 7910, 2408, 2889, 1106, 4992, 8140, 123, - 5004, 6911, 4693, 4844, 2907, 9739, 1307, 4690, 6578, - 2490, 1409, 2982, 4236, 7452, 258, 9946, 1594, 9084, - 7506, 349, 1142, 8943, 4241, 4711, 6656, 2419, 7301, - 2041, 4288, 2852, 4987, 6067, 2342, 5031, 1240, 3326, - 809, 1883, 7214, 5948, 4514, 7898, 6737, 2244, 7695, - 5774, 7074, 4380, 338, 3037, 5989, 4689, 2151, 4935, - 8239, 8199, 8742, 8379, 8464, 7648, 7511, 9789, 542, - 2321, 4702, 7444, 4179, 5713, 3500, 440, 3372, 1553, - 7066, 237, 6333, 4983, 9001, 6220, 9958, 7810, 6862, - 5158, 12, 3298, 176, 3276, 228, 4205, 1817, 6762, - 3628, 9393, 8101, 3855, 4143, 1454, 9776, 905, 9823, - 5862, 2192, 4474, 7500, 7608, 7951, 8118, 1256, 8656, - 8920, 6853, 1192, 8163, 2750, 1899, 3066, 8521, 9238, - 1236, 5960, 5040, 5370, 4579, 9179, 1422, 3868, 5989, - 5883, 5467, 8069, 2067, 4039, 8465, 7018, 1691, 4312, - 4117, 4240, 6107, 9816, 943, 9542, 8621, 1040, 2403, - 8739, 7827, 6446, 9590, 3659, 3297, 3557, 6979, 1879, - 3579, 2959, 2874, 7456, 8140, 5191, 5810, 753, 5881, - 2926, 3160, 7447, 9128, 3369, 9643, 866, 1094, 7856, - 2939, 2771, 903, 4019, 1481, 8141, 3059, 5105, 54, - 5797, 8704, 6592, 9683, 238, 941, 9073, 1570, 8149, - 620, 9506, 8170, 935, 287, 4429, 4319, 2568, 1551, - 2824, 4536, 8659, 8268, 4581, 7494, 1661, 2098, 5451, - 7560, 9224, 2281, 7897, 5655, 6711, 6313, 5649, 3624, - 1113, 3337, 9567, 6293, 4554, 4615, 390, 6190, 9206, - 2980, 9994, 298, 8200, 7196, 3147, 82, 7032, 1909, - 6954, 4345, 2438, 2065, 2556, 5426, 8978, 7465, 5220, - 5604, 788, 8414, 2663, 5742, 4974, 462, 1147, 9967, - 2553, 6062, 5166, 3507, 4599, 8067, 9145, 6263, 358, - 6192, 9886, 1258, 1355, 8678, 8303, 9017, 8920, 3248, - 9675, 9017, 2019, 2107, 8167, 8733, 3179, 4669, 528, - 6951, 1546, 9515, 8164, 7191, 1458, 7943, 2239, 8218, - 160, 1727, 1954, 2526, 1135, 2201, 9078, 3991, 6935, - 519, 208, 8883, 3452, 5310, 4150, 3705, 3822, 7401, - 8546, 6404, 3490, 1861, 743, 6663, 7115, 426, 6265, - 5338, 4421, 8479, 4579, 7854, 952, 4160, 5970, 2820, - 6175, 9201, 3918, 8342, 1561, 8841, 9347, 6312, 8811, - 802, 3751, 1504, 9841, 3459, 4709, 7585, 9176, 402, - 8777, 5093, 5366, 1341, 94, 1213, 3151, 5186, 6190, - 9472, 8769, 8920, 5876, 9844, 8088, 3443, 3158, 1522, - 2239, 5911, 1843, 878, 3231, 9219, 9744, 5314, 5187, - 7781, 4935, 1116, 2698, 2909, 7609, 34, 6226, 271, - 7563, 6665, 2928, 7844, 7394, 1971, 1334, 8649, 7028, - 9162, 7777, 8221, 7806, 7876, 9654, 9678, 288, 6495, - 8153, 8652, 8277, 3375, 9963, 69, 2407, 1311, 3891, - 6745, 2980, 7523, 832, 7750, 7534, 9660, 5289, 3385, - 484, 2425, 9377, 7231, 951, 3144, 5429, 1186, 4354, - 946, 3543, 2792, 7235, 7094, 6477, 4712, 3170, 5325, - 6937, 7529, 2618, 9824, 1671, 9430, 3905, 2384, 1668, - 2134, 5903, 8606, 6210, 736, 9653, 2364, 46, 8839, - 3680, 8561, 7610, 2537, 5694, 4491, 3070, 102, 7674, - 94, 8566, 9307, 6073, 5078, 5944, 6543, 2837, 5013, - 5856, 26, 4035, 1232, 2905, 644, 1632, 730, 2714, - 5211, 7999, 7356, 9942, 3050, 3488, 2030, 3261, 990, - 2503, 9947, 5773, 2097, 8736, 2427, 814, 8208, 7952, - 1425, 763, 3478, 2559, 2030, 7355, 2403, 8133, 1521, - 2534, 4093, 6128, 650, 4011, 373, 5526, 1945, 3671, - 9189, 5214, 508, 4155, 9541, 9972, 2781, 9076, 2880, - 3611, 4734, 9885, 7739, 6719, 4377, 7240, 3082, 5164, - 2874, 6457, 2759, 7393, 5423, 443, 9684, 5344, 4129, - 7342, 9307, 5758, 5216, 2857, 6451, 1753, 5670, 3430, - 7468, 6794, 5048, 2729, 9118, 9764, 1919, 5665, 767, - 1418, 1135, 8997, 7779, 2429, 9649, 5229, 7038, 811, - 1832, 5395, 8070, 6294, 826, 5781, 6439, 3995, 1648, - 5102, 6547, 1824, 6293, 2074, 1908, 2741, 7323, 5768, - 7042, 3737, 1819, 2188, 1660, 4954, 7523, 3959, 9961, - 6052, 7983, 9534, 9061, 2300, 6304, 2454, 8016, 2821, - 1085, 1757, 9862, 5100, 1573, 6827, 5866, 5759, 3861, - 1208, 5621, 2294, 4759, 9995, 7707, 978, 9925, 5367, - 9804, 9907, 9336, 1138, 7783, 5709, 9686, 6759, 5257, - 3612, 9813, 741, 965, 354, 5561, 3584, 1206, 483, - 5051, 7676, 6562, 4835, 7136, 2052, 4373, 7923, 9493, - 3447, 2194, 3934, 1186, 6674, 4835, 7761, 3711, 2508, - 8473, 8257, 9236, 9392, 9771, 837, 8611, 4349, 8338, - 9309, 8829, 4296, 7751, 6181, 8890, 2833, 7799, 5871, - 3122, 6337, 1417, 1292, 5004, 6019, 1196, 5382, 6389, - 3275]), - values=tensor([0.2237, 0.0937, 0.9630, 0.1477, 0.5729, 0.1998, 0.9498, - 0.4379, 0.8034, 0.0235, 0.9902, 0.3432, 0.0700, 0.1825, - 0.5442, 0.2482, 0.6281, 0.6943, 0.3994, 0.1696, 0.8276, - 0.4876, 0.7936, 0.7452, 0.0998, 0.2679, 0.8349, 0.3267, - 0.0044, 0.0261, 0.7294, 0.7269, 0.9705, 0.1244, 0.6999, - 0.0989, 0.9010, 0.3293, 0.8054, 0.8993, 0.1777, 0.0858, - 0.0842, 0.5815, 0.0073, 0.8773, 0.5984, 0.9584, 0.9585, - 0.9084, 0.8010, 0.3890, 0.7561, 0.2065, 0.0869, 0.1566, - 0.5409, 0.0893, 0.4037, 0.9105, 0.3894, 0.1083, 0.0753, - 0.9158, 0.4006, 0.0324, 0.5094, 0.9774, 0.7974, 0.7971, - 0.4057, 0.2885, 0.0520, 0.2695, 0.5617, 0.7978, 0.7923, - 0.0655, 0.7159, 0.9957, 0.9205, 0.7614, 0.5249, 0.9947, - 0.7384, 0.2115, 0.5030, 0.2376, 0.3351, 0.0950, 0.7426, - 0.9408, 0.1119, 0.8775, 0.3131, 0.2215, 0.5071, 0.5270, - 0.2205, 0.8988, 0.4070, 0.6511, 0.6751, 0.8449, 0.8296, - 0.8009, 0.0238, 0.1850, 0.1927, 0.3420, 0.8088, 0.0148, - 0.6612, 0.4105, 0.1426, 0.8882, 0.2834, 0.4158, 0.8305, - 0.3284, 0.6229, 0.4603, 0.3207, 0.5603, 0.9335, 0.2077, - 0.3713, 0.2711, 0.3356, 0.1915, 0.1831, 0.6739, 0.8426, - 0.8846, 0.4385, 0.3712, 0.1660, 0.5165, 0.4995, 0.8937, - 0.3826, 0.4064, 0.9294, 0.5277, 0.4615, 0.3664, 0.3226, - 0.2012, 0.4698, 0.4989, 0.3815, 0.0360, 0.2844, 0.1905, - 0.6394, 0.1553, 0.2179, 0.9991, 0.4322, 0.8990, 0.8663, - 0.5714, 0.2616, 0.9118, 0.1532, 0.0794, 0.4487, 0.6562, - 0.8275, 0.3748, 0.9786, 0.2334, 0.4221, 0.0891, 0.4672, - 0.7959, 0.9325, 0.2602, 0.6239, 0.2789, 0.5279, 0.9698, - 0.2620, 0.9710, 0.0659, 0.7009, 0.4749, 0.0914, 0.8769, - 0.1261, 0.4952, 0.9824, 0.3145, 0.3912, 0.7378, 0.2369, - 0.7117, 0.3375, 0.4187, 0.9390, 0.2536, 0.2477, 0.8490, - 0.6862, 0.8212, 0.4774, 0.7214, 0.2067, 0.2441, 0.9886, - 0.6487, 0.5109, 0.5780, 0.0528, 0.8446, 0.2500, 0.0099, - 0.3498, 0.0603, 0.6328, 0.1396, 0.3979, 0.0437, 0.4013, - 0.4045, 0.6436, 0.0264, 0.5833, 0.5292, 0.4239, 0.4451, - 0.5547, 0.9082, 0.0133, 0.9732, 0.3574, 0.5837, 0.2932, - 0.7709, 0.0813, 0.8692, 0.3324, 0.5254, 0.2501, 0.7664, - 0.8569, 0.9977, 0.7986, 0.2734, 0.2131, 0.2578, 0.0864, - 0.4157, 0.7639, 0.4495, 0.8317, 0.0808, 0.3465, 0.3449, - 0.1795, 0.3111, 0.3583, 0.5361, 0.7493, 0.6307, 0.5749, - 0.7117, 0.5264, 0.3319, 0.4855, 0.7740, 0.0389, 0.7199, - 0.1757, 0.7320, 0.3881, 0.8473, 0.7211, 0.0089, 0.8715, - 0.8121, 0.6785, 0.3229, 0.7947, 0.1252, 0.4363, 0.7100, - 0.8129, 0.7275, 0.7882, 0.1837, 0.4836, 0.9665, 0.3166, - 0.8373, 0.4110, 0.9919, 0.7071, 0.1476, 0.4264, 0.6374, - 0.1843, 0.0355, 0.4476, 0.5128, 0.6553, 0.9636, 0.4657, - 0.3003, 0.5021, 0.2604, 0.1725, 0.4826, 0.8980, 0.9472, - 0.5359, 0.5329, 0.7276, 0.9162, 0.7128, 0.3214, 0.4430, - 0.0268, 0.4336, 0.6726, 0.3183, 0.6498, 0.8979, 0.4377, - 0.4508, 0.7071, 0.7374, 0.6664, 0.3207, 0.9118, 0.0607, - 0.6305, 0.1649, 0.7631, 0.3880, 0.1389, 0.3349, 0.7098, - 0.2548, 0.1348, 0.7807, 0.3721, 0.5610, 0.2096, 0.6681, - 0.4693, 0.1901, 0.1930, 0.1622, 0.2963, 0.0603, 0.2890, - 0.1230, 0.6505, 0.5223, 0.4052, 0.7992, 0.2060, 0.6551, - 0.9600, 0.8356, 0.2101, 0.1915, 0.5095, 0.9710, 0.1888, - 0.1645, 0.3590, 0.3490, 0.1788, 0.3894, 0.3501, 0.3981, - 0.8373, 0.9445, 0.9392, 0.0706, 0.5984, 0.1523, 0.8809, - 0.3914, 0.7705, 0.3109, 0.6448, 0.4013, 0.0518, 0.5764, - 0.9274, 0.9661, 0.6301, 0.2194, 0.3082, 0.6486, 0.9235, - 0.3097, 0.0386, 0.1441, 0.2865, 0.6143, 0.4563, 0.3660, - 0.2231, 0.3622, 0.4918, 0.2906, 0.1025, 0.3484, 0.1783, - 0.0793, 0.4740, 0.8350, 0.2433, 0.0513, 0.4263, 0.6836, - 0.8954, 0.2130, 0.9199, 0.7013, 0.7526, 0.7990, 0.3316, - 0.3824, 0.6560, 0.0665, 0.3473, 0.7352, 0.3958, 0.6717, - 0.1787, 0.9112, 0.3996, 0.1342, 0.8973, 0.7673, 0.3783, - 0.7610, 0.5775, 0.8738, 0.8225, 0.6836, 0.9352, 0.4659, - 0.0218, 0.6016, 0.7349, 0.5865, 0.4088, 0.3716, 0.3419, - 0.4366, 0.2265, 0.1625, 0.3329, 0.2090, 0.9832, 0.8807, - 0.9078, 0.0817, 0.5180, 0.2440, 0.2482, 0.7292, 0.3941, - 0.8401, 0.0141, 0.7489, 0.3723, 0.0350, 0.2835, 0.3314, - 0.1685, 0.1121, 0.6204, 0.8138, 0.4395, 0.6725, 0.3304, - 0.8152, 0.9086, 0.9942, 0.4917, 0.4382, 0.2414, 0.3708, - 0.4786, 0.6864, 0.9521, 0.6060, 0.5707, 0.3741, 0.7770, - 0.9669, 0.3737, 0.9687, 0.3704, 0.3850, 0.7958, 0.7808, - 0.1612, 0.9230, 0.8525, 0.6799, 0.0805, 0.9623, 0.0798, - 0.1563, 0.7621, 0.9897, 0.4134, 0.9079, 0.9743, 0.2876, - 0.0876, 0.7910, 0.9308, 0.0610, 0.2187, 0.5615, 0.3229, - 0.9375, 0.5980, 0.9792, 0.8630, 0.9130, 0.2219, 0.2254, - 0.9540, 0.2937, 0.2448, 0.2621, 0.5306, 0.1461, 0.7337, - 0.4278, 0.4789, 0.9525, 0.8487, 0.1678, 0.8083, 0.8602, - 0.5427, 0.9150, 0.6338, 0.0596, 0.3932, 0.3519, 0.0682, - 0.2049, 0.6992, 0.1087, 0.4717, 0.7743, 0.1069, 0.9390, - 0.7215, 0.0549, 0.2173, 0.5676, 0.3363, 0.3647, 0.6589, - 0.8482, 0.0780, 0.4024, 0.0875, 0.0825, 0.2565, 0.7274, - 0.2689, 0.2341, 0.8282, 0.6484, 0.5740, 0.2458, 0.3418, - 0.6679, 0.0928, 0.8043, 0.0375, 0.0455, 0.1021, 0.0727, - 0.6034, 0.7073, 0.9501, 0.1827, 0.6546, 0.0622, 0.8697, - 0.6037, 0.6145, 0.7007, 0.9749, 0.2250, 0.9919, 0.6048, - 0.0930, 0.2305, 0.6915, 0.5382, 0.1474, 0.1571, 0.4897, - 0.4500, 0.3177, 0.8426, 0.5450, 0.6520, 0.4501, 0.7247, - 0.0507, 0.2850, 0.0212, 0.9359, 0.6125, 0.4131, 0.8209, - 0.5987, 0.8486, 0.6476, 0.7153, 0.8008, 0.6473, 0.9929, - 0.7390, 0.7887, 0.4013, 0.3755, 0.5104, 0.8562, 0.2127, - 0.2174, 0.7440, 0.3993, 0.8984, 0.4550, 0.9304, 0.9089, - 0.9769, 0.3961, 0.4588, 0.7478, 0.4077, 0.7220, 0.1553, - 0.0217, 0.3009, 0.6585, 0.6058, 0.6903, 0.5106, 0.3638, - 0.8147, 0.1664, 0.2008, 0.2597, 0.0618, 0.8442, 0.8484, - 0.5244, 0.1691, 0.8820, 0.2008, 0.5312, 0.1231, 0.2789, - 0.2030, 0.9130, 0.3220, 0.9544, 0.0942, 0.3181, 0.3072, - 0.8830, 0.5897, 0.4002, 0.4521, 0.5237, 0.1548, 0.2054, - 0.1146, 0.0603, 0.1840, 0.5799, 0.0520, 0.7941, 0.5931, - 0.8282, 0.2801, 0.8827, 0.2904, 0.6095, 0.1708, 0.2367, - 0.4340, 0.5936, 0.4575, 0.2879, 0.8225, 0.0562, 0.7618, - 0.0424, 0.3732, 0.7233, 0.7772, 0.4983, 0.8623, 0.4285, - 0.4221, 0.1088, 0.3304, 0.1568, 0.5143, 0.6215, 0.5503, - 0.7690, 0.3756, 0.5348, 0.7357, 0.8109, 0.3130, 0.0158, - 0.0023, 0.6162, 0.8535, 0.2750, 0.7217, 0.5469, 0.3265, - 0.1222, 0.4043, 0.3551, 0.1335, 0.2431, 0.0804, 0.8806, - 0.9161, 0.7825, 0.7689, 0.7354, 0.8643, 0.4190, 0.5343, - 0.3840, 0.7807, 0.6542, 0.2807, 0.0621, 0.5952, 0.8649, - 0.8056, 0.2886, 0.8492, 0.0497, 0.6903, 0.0986, 0.3481, - 0.9830, 0.1513, 0.7150, 0.5003, 0.6462, 0.3091, 0.5104, - 0.1090, 0.4875, 0.2487, 0.1973, 0.1005, 0.5965, 0.6503, - 0.6776, 0.1194, 0.6968, 0.4165, 0.3997, 0.2356, 0.9948, - 0.2469, 0.5798, 0.3444, 0.2015, 0.9011, 0.6742, 0.3975, - 0.8612, 0.7831, 0.0862, 0.4379, 0.0809, 0.7931, 0.0607, - 0.3830, 0.8447, 0.6462, 0.9840, 0.5433, 0.4101, 0.8260, - 0.5601, 0.6978, 0.1804, 0.4108, 0.0158, 0.7436, 0.8390, - 0.9475, 0.4933, 0.1487, 0.4644, 0.5877, 0.7798, 0.6315, - 0.5300, 0.7588, 0.5234, 0.0393, 0.3814, 0.5279, 0.6528, - 0.9869, 0.9540, 0.6270, 0.7013, 0.4305, 0.2085, 0.9008, - 0.4866, 0.4271, 0.1271, 0.3216, 0.3653, 0.9917, 0.2290, - 0.3330, 0.6020, 0.4543, 0.6559, 0.8582, 0.2937, 0.1913, - 0.3080, 0.3895, 0.3059, 0.0633, 0.6122, 0.1175, 0.6572, - 0.2524, 0.7704, 0.8210, 0.4794, 0.7538, 0.4031, 0.0275, - 0.5311, 0.0815, 0.4034, 0.6148, 0.1185, 0.7439, 0.4016, - 0.5608, 0.0706, 0.4835, 0.2505, 0.9363, 0.3741, 0.3124, - 0.4634, 0.9844, 0.4189, 0.3458, 0.6504, 0.0883, 0.9154, - 0.1987, 0.8102, 0.2403, 0.6352, 0.9134, 0.0740, 0.6217, - 0.7461, 0.9307, 0.2340, 0.9526, 0.2394, 0.1661, 0.5903, - 0.3531, 0.3579, 0.3335, 0.4941, 0.0937, 0.0190, 0.7400, - 0.6048, 0.0736, 0.8480, 0.7056, 0.9624, 0.8952, 0.2590, - 0.2918, 0.9369, 0.3316, 0.8127, 0.4530, 0.8583, 0.8824, - 0.6823, 0.0554, 0.6007, 0.8647, 0.0307, 0.2993, 0.2476, - 0.7318, 0.8917, 0.9643, 0.6157, 0.2184, 0.8408, 0.3345, - 0.0712, 0.8159, 0.2459, 0.0991, 0.7444, 0.2222, 0.0014, - 0.1305, 0.8914, 0.0089, 0.5321, 0.7917, 0.7163, 0.9580, - 0.3624, 0.0142, 0.8937, 0.5115, 0.5049, 0.8434, 0.7234, - 0.7161, 0.2634, 0.8592, 0.3961, 0.5586, 0.2620, 0.0375, - 0.1665, 0.2915, 0.9139, 0.7009, 0.5095, 0.4519, 0.1213, - 0.3561, 0.0066, 0.4379, 0.3522, 0.6225, 0.6900, 0.8216, - 0.8841, 0.6553, 0.8193, 0.7688, 0.5104, 0.3926, 0.7388, - 0.4735, 0.1897, 0.7788, 0.8825, 0.9103, 0.2988, 0.1239, - 0.1792, 0.1266, 0.4818, 0.8893, 0.6604, 0.1883, 0.9700, - 0.5469, 0.0958, 0.2762, 0.2054, 0.3215, 0.7664]), + col_indices=tensor([2678, 5848, 8932, 6514, 1131, 7281, 3907, 9739, 4368, + 5455, 1563, 6741, 5038, 4433, 3496, 6764, 6783, 2096, + 5379, 5324, 5569, 7929, 3046, 3409, 3168, 2133, 5226, + 8680, 6560, 3626, 4039, 6513, 6765, 3907, 427, 4694, + 7551, 8505, 9656, 3083, 3989, 3546, 1570, 9918, 9896, + 335, 7782, 4759, 8628, 2630, 389, 9397, 2240, 4203, + 1948, 4116, 7432, 8011, 5499, 9353, 6102, 8714, 1872, + 9684, 1851, 1377, 2948, 6445, 8064, 2550, 6003, 934, + 8318, 8998, 1274, 7404, 8158, 7293, 3604, 2408, 6447, + 5245, 1953, 9908, 9236, 3159, 6613, 5123, 2772, 58, + 8158, 2914, 8872, 5012, 4440, 5933, 6728, 9276, 1884, + 6065, 1589, 565, 4394, 1612, 8762, 1583, 9811, 2191, + 8485, 332, 3797, 5984, 6485, 8030, 892, 6266, 9674, + 2139, 2659, 4709, 8571, 3555, 164, 4025, 9949, 195, + 4771, 135, 7191, 4183, 5031, 6805, 1340, 7672, 5235, + 530, 8176, 7205, 1183, 8638, 1346, 2590, 2005, 2202, + 7230, 632, 6331, 52, 6909, 3928, 339, 4933, 4157, + 4033, 1475, 8179, 2625, 6290, 3648, 1249, 9771, 6835, + 9081, 73, 8180, 62, 721, 3068, 7558, 7062, 9806, + 9992, 4714, 8737, 3254, 8028, 1512, 6801, 3742, 6874, + 7568, 4570, 4088, 5489, 1336, 8725, 8588, 811, 1680, + 7459, 3531, 473, 6870, 7194, 8917, 6286, 3103, 9711, + 8304, 9313, 5469, 1651, 1323, 5529, 3429, 9317, 9037, + 2258, 2925, 1605, 4087, 6314, 3409, 2468, 4645, 1651, + 2979, 8063, 4533, 9160, 3997, 264, 2239, 1980, 4712, + 3986, 2454, 9241, 7492, 358, 1561, 1764, 3237, 2128, + 5088, 9092, 919, 6871, 3872, 4410, 5133, 9706, 2227, + 499, 3451, 5390, 599, 943, 8741, 6801, 7625, 2680, + 6629, 563, 8907, 3280, 8235, 9177, 1523, 6255, 8817, + 4372, 267, 9572, 9950, 5968, 9992, 1628, 7005, 8550, + 5512, 8601, 785, 247, 9943, 6496, 312, 47, 6183, + 8205, 2403, 4595, 8512, 4517, 7618, 8133, 1008, 4724, + 7129, 8751, 3237, 8688, 7727, 8509, 9914, 7707, 8187, + 1458, 2539, 7790, 120, 9487, 4175, 2145, 2728, 1252, + 9331, 6861, 1428, 51, 9368, 4128, 9713, 2125, 2393, + 7502, 9806, 9848, 7181, 3492, 8043, 989, 9966, 1640, + 2160, 996, 6341, 5424, 7190, 2715, 3212, 2545, 6704, + 1163, 2990, 3564, 5587, 3653, 2866, 9029, 9388, 6499, + 6760, 4322, 3904, 3816, 7074, 8076, 654, 1157, 5562, + 1009, 6604, 616, 2995, 5906, 4431, 2743, 6209, 6739, + 7760, 9681, 3629, 4368, 6640, 2125, 2154, 7351, 616, + 2800, 8637, 236, 3715, 5387, 4785, 1531, 6341, 8371, + 3434, 7938, 3807, 8679, 1666, 5152, 9359, 4485, 9368, + 7754, 3822, 5441, 9075, 6328, 5113, 6317, 3497, 9597, + 4343, 6828, 4247, 3257, 6984, 9564, 5003, 6447, 918, + 3611, 9704, 1656, 4256, 2966, 6628, 7336, 3286, 3396, + 4655, 8761, 5872, 9641, 851, 9739, 9868, 7273, 6824, + 4512, 1119, 1271, 6159, 3090, 791, 3434, 4968, 8692, + 1078, 4350, 7105, 1059, 7272, 5390, 4013, 94, 4195, + 8383, 8497, 5352, 6780, 5275, 2447, 2937, 853, 7521, + 1602, 6585, 249, 3957, 4342, 5291, 8788, 4179, 3252, + 2754, 5478, 7259, 49, 821, 5243, 3254, 2436, 7817, + 1870, 776, 2046, 2208, 3030, 9918, 3190, 1835, 9413, + 8504, 4648, 5601, 3390, 1744, 6805, 5786, 2013, 8829, + 8282, 4011, 5450, 7546, 8290, 6803, 2328, 4769, 2666, + 8476, 951, 2420, 1209, 2583, 4526, 7030, 2145, 470, + 7236, 8296, 8055, 9904, 236, 2598, 7209, 8631, 5520, + 2875, 1226, 1552, 2489, 5454, 9437, 8339, 9878, 9044, + 7754, 5712, 115, 7980, 3149, 3358, 3584, 9860, 2716, + 35, 3516, 8143, 8204, 4249, 3977, 1279, 1635, 5695, + 5553, 156, 5163, 5303, 8024, 5724, 6309, 7442, 6981, + 1482, 5061, 6377, 7653, 5741, 4512, 5439, 6406, 1758, + 6867, 478, 4490, 2339, 7376, 3188, 9830, 2584, 3630, + 6563, 3221, 1840, 9763, 1437, 3363, 1880, 9285, 9078, + 6954, 693, 6882, 939, 1083, 6111, 5822, 5804, 4912, + 4676, 6584, 679, 1703, 3121, 2876, 4463, 9332, 9872, + 7452, 8384, 2189, 9930, 3656, 812, 4368, 7703, 9371, + 811, 3942, 9089, 502, 3023, 6508, 5694, 3725, 5447, + 6931, 1617, 3715, 2876, 1609, 6738, 2941, 4572, 7351, + 7906, 8022, 1068, 3032, 6204, 1112, 5133, 5464, 2355, + 4984, 4046, 388, 5586, 4612, 7280, 8734, 8754, 1014, + 2205, 5645, 1298, 2729, 8222, 9219, 344, 3715, 6565, + 8800, 9706, 4480, 3826, 9697, 9454, 4394, 2413, 7994, + 7534, 2937, 815, 1688, 9468, 1579, 5140, 2088, 7174, + 9180, 6000, 262, 2796, 9505, 3993, 1286, 8540, 128, + 4586, 3938, 5329, 5519, 6604, 8591, 4699, 6798, 7194, + 7232, 8564, 7353, 1419, 7385, 4902, 7749, 3344, 993, + 3469, 919, 1238, 7882, 9695, 9845, 4627, 5478, 4672, + 881, 5947, 3076, 9844, 609, 5553, 5999, 7665, 3226, + 5457, 4855, 7443, 5121, 7647, 8584, 655, 5215, 1569, + 1648, 6795, 3717, 5720, 4762, 7146, 78, 1928, 1798, + 584, 2514, 7803, 3577, 5724, 6704, 5616, 5830, 1480, + 8193, 5152, 7183, 3956, 182, 6147, 9904, 453, 7425, + 9473, 2466, 6777, 1853, 7681, 8056, 1172, 208, 501, + 5162, 5454, 7591, 9528, 8414, 2073, 8777, 7187, 4165, + 2461, 9993, 5575, 7135, 3440, 2602, 8998, 8072, 3358, + 4906, 1703, 4958, 2883, 6753, 5420, 3979, 1672, 7690, + 3517, 8563, 3726, 1974, 9545, 1379, 4794, 7415, 5252, + 1567, 3734, 2155, 8986, 688, 6018, 9923, 3231, 3497, + 1952, 4404, 2898, 8594, 4361, 607, 8874, 2125, 8201, + 2101, 6590, 1018, 8311, 8335, 8605, 1095, 5163, 2342, + 489, 242, 805, 5500, 2030, 3748, 7566, 8448, 5849, + 1979, 692, 7234, 9645, 6098, 411, 4997, 4004, 2455, + 7396, 3160, 9449, 2896, 2241, 618, 8836, 5257, 7656, + 5412, 1935, 528, 6772, 4893, 7812, 6305, 7490, 3151, + 1203, 3122, 804, 7776, 5737, 382, 5995, 6332, 5793, + 2993, 5892, 3995, 9197, 9133, 8356, 3633, 482, 4515, + 9664, 5501, 5459, 5010, 3336, 1716, 5727, 8286, 1115, + 4288, 4852, 173, 7050, 3796, 8923, 3854, 9464, 2742, + 9054, 1036, 4739, 7463, 2898, 7540, 8656, 1371, 360, + 8627, 8931, 2125, 5688, 3221, 5867, 2323, 3331, 1738, + 5678, 2988, 446, 7760, 5978, 7580, 4093, 6545, 6826, + 9875, 6901, 7056, 5553, 9641, 9758, 2419, 3422, 1260, + 7749, 8311, 791, 7074, 2681, 3921, 7674, 1734, 3362, + 6902, 1648, 7880, 5365, 8236, 98, 965, 8965, 5437, + 2582, 7380, 7564, 9847, 4660, 2519, 4164, 4545, 265, + 4912, 9795, 4707, 7062, 2138, 4947, 5798, 6123, 5205, + 449, 2142, 9239, 9558, 5064, 2148, 9712, 687, 153, + 3028, 58, 2689, 1964, 8074, 7404, 4988, 918, 591, + 1127, 108, 7896, 950, 8567, 5527, 4509, 8789, 2188, + 2277, 4164, 2985, 499, 8205, 8449, 6305, 6305, 3977, + 3018]), + values=tensor([5.1838e-01, 4.8027e-01, 8.3918e-01, 7.7889e-02, + 5.8618e-01, 6.7111e-01, 4.8156e-01, 6.3769e-01, + 5.9556e-01, 1.8322e-02, 6.8184e-01, 2.5539e-01, + 6.9785e-01, 8.5587e-01, 7.5961e-01, 5.8047e-01, + 7.5996e-01, 4.9800e-01, 6.6584e-01, 6.2707e-01, + 3.2854e-01, 5.1026e-01, 3.3865e-01, 1.8919e-01, + 4.9296e-01, 5.0958e-01, 5.6205e-01, 3.8230e-01, + 8.6673e-01, 6.7546e-01, 4.7278e-01, 7.7770e-01, + 9.8633e-02, 5.1322e-01, 9.3110e-01, 3.5684e-02, + 4.0685e-01, 4.6288e-01, 2.3401e-01, 3.7533e-01, + 7.8245e-01, 7.5875e-01, 1.8288e-03, 3.7060e-01, + 9.3250e-01, 4.6835e-01, 4.5945e-01, 2.2816e-01, + 1.6555e-01, 3.5822e-01, 6.7891e-01, 5.7081e-01, + 1.3394e-01, 4.8769e-01, 5.1662e-01, 4.9045e-01, + 8.5425e-01, 3.6198e-01, 4.1382e-01, 6.1676e-01, + 3.5515e-01, 3.6280e-01, 5.4649e-01, 2.2820e-01, + 8.9921e-01, 8.6646e-01, 3.1651e-01, 5.2641e-01, + 9.5379e-01, 4.8047e-01, 7.8712e-01, 1.9703e-02, + 8.3324e-01, 3.6549e-01, 3.9669e-01, 5.9268e-01, + 3.1271e-01, 1.6332e-01, 4.6643e-01, 4.4610e-02, + 8.7929e-02, 4.2486e-01, 2.0859e-01, 4.1765e-01, + 8.9762e-01, 6.0503e-02, 5.3495e-01, 5.8876e-01, + 9.6365e-01, 5.0177e-01, 5.1125e-01, 1.8336e-01, + 4.0672e-01, 5.2722e-01, 4.1700e-01, 7.2902e-01, + 9.0848e-01, 1.7116e-01, 1.3770e-01, 2.8766e-01, + 9.3382e-01, 8.1995e-01, 6.0914e-01, 4.1410e-01, + 4.3364e-01, 3.0832e-01, 3.9481e-02, 3.4106e-01, + 1.3828e-01, 8.8339e-01, 3.8656e-01, 7.5382e-01, + 6.9183e-01, 9.6590e-01, 5.3846e-01, 2.6012e-01, + 3.1593e-01, 5.7672e-01, 2.4808e-01, 8.2200e-01, + 9.6413e-01, 3.7000e-01, 1.3368e-01, 9.3703e-01, + 7.8369e-01, 9.2556e-01, 5.1803e-01, 8.1221e-01, + 4.3677e-01, 2.3575e-01, 3.8139e-01, 2.6701e-01, + 5.8012e-01, 1.7843e-01, 7.5107e-01, 4.9757e-02, + 2.1184e-01, 9.3472e-01, 4.6764e-01, 6.0262e-02, + 9.7295e-01, 1.9960e-01, 1.6947e-01, 5.9944e-01, + 7.5096e-01, 7.2836e-02, 8.6802e-01, 7.9771e-01, + 5.3295e-01, 1.1293e-01, 1.2028e-01, 5.0690e-01, + 7.8024e-01, 8.7793e-01, 4.2517e-01, 8.7115e-01, + 6.0427e-03, 7.4467e-01, 7.3429e-01, 6.6924e-02, + 7.1076e-01, 6.0690e-01, 6.3134e-01, 1.0168e-01, + 8.8732e-01, 5.1557e-01, 9.5840e-01, 2.7378e-01, + 8.7157e-01, 9.0730e-01, 7.6350e-01, 4.0141e-01, + 4.0315e-01, 8.9720e-01, 7.1849e-01, 7.4754e-01, + 7.7728e-01, 1.9576e-01, 6.6072e-01, 8.9195e-01, + 2.3741e-01, 6.8517e-03, 7.9553e-01, 9.5371e-02, + 5.2120e-01, 6.0801e-01, 3.2108e-01, 7.1724e-01, + 3.4707e-02, 9.8635e-01, 6.9417e-01, 8.9998e-01, + 6.6078e-01, 1.3657e-01, 4.6891e-01, 7.2558e-01, + 7.2927e-01, 2.3680e-01, 7.9246e-01, 7.9430e-01, + 8.8668e-01, 9.8271e-01, 3.7716e-01, 9.5253e-01, + 5.7418e-01, 9.1029e-01, 6.6881e-01, 8.7060e-01, + 5.8992e-01, 4.9313e-01, 4.7570e-01, 9.9628e-01, + 8.3694e-01, 8.5239e-01, 9.5707e-01, 3.0963e-01, + 7.1608e-01, 1.5938e-01, 1.0843e-01, 5.7840e-01, + 8.5853e-01, 2.2082e-01, 4.4360e-02, 3.6738e-02, + 9.2371e-01, 9.6308e-01, 4.8745e-01, 5.9983e-01, + 8.3662e-01, 1.9217e-01, 7.1628e-01, 6.8783e-01, + 7.2064e-01, 5.4602e-01, 7.8485e-01, 2.9078e-01, + 9.8070e-01, 7.2043e-01, 4.4170e-01, 1.8063e-01, + 9.3028e-01, 8.5442e-01, 8.4366e-01, 7.1744e-01, + 3.5699e-01, 2.9793e-01, 5.8327e-01, 3.7761e-01, + 8.2260e-01, 3.8203e-01, 4.2986e-01, 3.3714e-01, + 7.2078e-01, 5.7870e-01, 3.2117e-01, 4.8517e-01, + 9.2007e-01, 6.5561e-01, 8.2434e-01, 2.7642e-01, + 3.8746e-01, 6.8592e-01, 3.7973e-01, 9.5875e-01, + 8.6646e-01, 4.6623e-01, 7.4571e-02, 2.2778e-01, + 6.9281e-01, 9.8290e-01, 5.3202e-02, 3.5929e-02, + 4.2291e-01, 4.4277e-01, 1.5349e-01, 4.3231e-01, + 8.2011e-01, 7.2244e-01, 2.6651e-01, 9.8915e-02, + 3.6405e-01, 2.9954e-01, 2.7205e-02, 1.8403e-01, + 6.4235e-01, 8.1990e-01, 7.2246e-01, 6.3070e-01, + 7.1801e-01, 5.6306e-01, 9.1787e-01, 1.1705e-02, + 4.3537e-01, 9.7378e-01, 6.2170e-01, 2.0596e-01, + 7.0586e-02, 2.0969e-02, 5.0558e-01, 1.3130e-01, + 5.7470e-01, 1.6101e-01, 8.3734e-01, 5.7816e-01, + 4.2951e-01, 6.3467e-02, 2.6600e-01, 8.1422e-01, + 8.1080e-01, 4.6367e-01, 8.2893e-01, 4.6013e-01, + 1.0682e-01, 6.1112e-01, 4.0725e-01, 4.2257e-02, + 4.0788e-01, 9.0715e-01, 7.9904e-01, 8.7199e-01, + 5.0507e-01, 2.9835e-01, 3.2475e-01, 3.6805e-01, + 5.7408e-01, 6.7076e-01, 5.4595e-01, 4.4910e-01, + 8.7908e-02, 1.9390e-01, 7.3581e-01, 6.7330e-01, + 8.4758e-01, 1.1538e-01, 5.5833e-01, 6.8745e-01, + 4.6518e-01, 9.8563e-01, 3.1315e-02, 7.1278e-01, + 4.2570e-01, 8.4469e-01, 2.9684e-01, 9.5037e-01, + 9.6684e-01, 7.7939e-01, 6.6961e-01, 1.9207e-01, + 9.0615e-02, 8.9770e-01, 9.3637e-01, 2.3711e-01, + 5.1238e-01, 8.7654e-01, 7.5039e-01, 6.0416e-01, + 6.1096e-01, 2.3286e-01, 1.4012e-01, 7.9028e-01, + 8.3652e-01, 3.0895e-01, 3.9595e-01, 1.1764e-01, + 4.1872e-01, 4.8034e-01, 6.8482e-01, 6.6337e-01, + 7.3923e-01, 8.6261e-01, 2.2457e-01, 3.6397e-01, + 6.2814e-01, 3.6800e-01, 7.0580e-01, 9.1407e-02, + 5.7080e-01, 7.9248e-01, 1.0129e-01, 7.3599e-01, + 3.5061e-01, 1.8993e-01, 6.2121e-01, 7.6740e-01, + 5.7787e-01, 6.0896e-01, 2.5264e-01, 2.0795e-01, + 1.3239e-02, 9.6359e-02, 4.8132e-02, 6.7274e-01, + 2.9477e-01, 1.4102e-01, 2.7292e-01, 1.0473e-01, + 7.7761e-01, 3.7502e-02, 8.0001e-01, 5.1192e-01, + 8.5536e-01, 1.3117e-01, 9.5645e-01, 6.8219e-01, + 6.2019e-01, 3.6495e-02, 8.4527e-01, 9.1630e-02, + 2.8423e-01, 9.2737e-01, 5.9931e-01, 5.4321e-01, + 6.9357e-01, 9.3312e-01, 1.7996e-01, 9.8590e-01, + 6.4823e-01, 2.9656e-01, 4.9547e-01, 9.4245e-01, + 3.6815e-01, 1.3356e-01, 7.8271e-01, 9.4834e-01, + 3.5856e-01, 6.4615e-01, 6.1428e-01, 6.3807e-01, + 1.7057e-01, 1.7740e-01, 8.9389e-01, 6.7825e-01, + 3.9499e-01, 1.9776e-01, 6.6894e-01, 1.7032e-01, + 5.7388e-01, 8.2183e-01, 3.6330e-01, 8.0880e-01, + 2.0305e-01, 1.5434e-01, 7.4128e-01, 3.8989e-01, + 4.3827e-01, 4.0977e-01, 2.9125e-01, 5.2847e-02, + 8.1745e-02, 1.0234e-01, 7.2035e-01, 7.1349e-01, + 3.0386e-01, 7.4587e-02, 9.9025e-01, 3.0347e-01, + 3.8606e-01, 6.8653e-01, 2.7580e-01, 9.6578e-01, + 7.2235e-01, 2.9692e-01, 4.0660e-01, 7.4790e-02, + 7.9686e-01, 8.8094e-01, 8.2564e-01, 4.6655e-01, + 9.6551e-01, 6.3026e-01, 6.0660e-01, 4.7454e-01, + 9.4613e-01, 3.7266e-01, 9.9796e-01, 4.7107e-01, + 1.5403e-02, 9.2743e-01, 4.7063e-02, 3.2584e-01, + 5.8369e-01, 5.6961e-01, 9.0423e-01, 3.6126e-01, + 6.0919e-01, 3.6585e-01, 7.0895e-01, 4.5042e-01, + 3.3875e-01, 5.6184e-01, 9.9711e-01, 5.5435e-01, + 2.8074e-01, 4.0190e-02, 1.3606e-01, 1.6852e-01, + 8.2050e-01, 7.5050e-01, 4.8556e-01, 5.5772e-01, + 3.9981e-01, 6.9659e-01, 4.6937e-01, 8.7705e-02, + 8.6330e-01, 9.6237e-01, 2.6430e-01, 5.9624e-01, + 1.4870e-01, 4.5843e-01, 9.1351e-01, 8.2778e-01, + 8.2247e-01, 2.9720e-01, 8.7732e-01, 2.6596e-01, + 3.3265e-01, 4.7720e-01, 7.2502e-01, 9.4295e-01, + 7.3038e-01, 6.0235e-02, 3.2278e-02, 1.9268e-01, + 4.3019e-01, 1.6406e-01, 2.2580e-02, 4.7677e-01, + 9.3483e-01, 3.5249e-01, 7.2833e-01, 9.5668e-01, + 6.1709e-01, 8.7948e-01, 2.0670e-01, 9.5341e-01, + 3.4142e-01, 5.5463e-01, 4.1957e-01, 3.3458e-01, + 5.0988e-01, 4.9233e-01, 7.9123e-01, 9.4013e-01, + 4.8199e-01, 3.0123e-02, 7.1299e-01, 6.7158e-01, + 7.2073e-01, 3.4907e-01, 7.6090e-02, 4.6263e-01, + 7.1080e-01, 3.5478e-01, 4.2699e-01, 5.3285e-01, + 3.7627e-01, 6.5108e-01, 7.6983e-01, 5.0238e-01, + 3.7186e-01, 7.4101e-01, 1.0180e-01, 9.1081e-01, + 8.2214e-01, 5.3804e-01, 3.9786e-01, 3.1840e-01, + 9.4001e-01, 4.6192e-01, 7.6120e-01, 7.2666e-01, + 3.3125e-01, 4.6892e-01, 6.8600e-01, 3.5894e-01, + 9.2476e-01, 9.6411e-01, 1.9643e-01, 9.1677e-01, + 4.4028e-01, 8.2110e-01, 5.6583e-01, 2.9195e-01, + 4.4459e-01, 8.7108e-01, 4.8275e-02, 4.4066e-01, + 8.0401e-01, 5.6533e-01, 4.4520e-01, 1.5128e-01, + 1.3540e-01, 3.4559e-01, 1.9536e-01, 4.8147e-01, + 5.9237e-02, 7.0397e-01, 3.0700e-01, 6.3088e-02, + 2.0806e-01, 7.4766e-01, 8.4211e-02, 4.3882e-01, + 5.8286e-01, 3.6017e-01, 6.6680e-01, 6.2062e-01, + 8.0258e-01, 4.7332e-01, 9.9449e-01, 8.1089e-01, + 7.2742e-01, 9.5543e-01, 4.2128e-01, 8.2507e-01, + 9.4724e-01, 1.2972e-01, 3.9554e-01, 3.9059e-01, + 7.6634e-01, 6.3078e-01, 1.3847e-01, 2.0194e-01, + 8.3443e-01, 8.0120e-01, 8.3765e-01, 3.7910e-01, + 1.9301e-01, 3.5658e-01, 2.4781e-01, 6.1760e-01, + 8.3152e-01, 6.7280e-01, 4.1041e-01, 5.7212e-01, + 6.5934e-01, 9.8193e-01, 2.6474e-01, 7.2690e-01, + 9.9811e-01, 2.6560e-01, 2.4037e-01, 4.5065e-01, + 1.2872e-01, 3.7925e-01, 2.5061e-01, 4.0492e-01, + 8.5432e-01, 7.4584e-01, 1.8191e-01, 6.1161e-01, + 4.9870e-01, 3.3714e-01, 3.2297e-01, 8.2104e-01, + 4.9746e-01, 9.5588e-01, 2.4725e-01, 7.3493e-01, + 3.4350e-01, 7.9371e-01, 5.0533e-01, 8.5569e-01, + 5.0645e-01, 4.8222e-01, 7.0081e-01, 5.3757e-01, + 3.8262e-01, 8.2935e-01, 7.1445e-01, 2.8785e-01, + 8.3693e-01, 9.8657e-01, 8.9492e-01, 9.0828e-01, + 4.1198e-01, 3.7749e-01, 2.1366e-01, 9.1028e-01, + 8.2317e-01, 6.8193e-01, 6.7945e-01, 3.5136e-01, + 8.4090e-01, 3.5647e-02, 7.7684e-01, 8.3024e-01, + 9.2244e-01, 9.2298e-01, 8.9766e-01, 8.5199e-01, + 4.9978e-01, 8.0846e-01, 2.1637e-01, 4.0517e-01, + 2.1007e-01, 2.1377e-01, 3.5364e-01, 5.9078e-01, + 5.1643e-01, 3.5306e-01, 5.4722e-02, 5.5017e-01, + 4.3671e-01, 3.9569e-01, 7.7582e-02, 5.6536e-01, + 1.2087e-02, 8.6814e-01, 2.7761e-02, 2.7180e-01, + 3.3139e-01, 7.8569e-01, 1.9535e-01, 8.5077e-01, + 9.8790e-01, 1.7846e-01, 1.9440e-01, 5.7366e-01, + 6.2797e-01, 3.7094e-01, 9.8894e-01, 7.5262e-01, + 4.1880e-01, 8.1698e-01, 8.5831e-01, 2.2690e-01, + 3.5308e-01, 9.0459e-01, 6.8343e-01, 8.1904e-02, + 8.9611e-01, 5.1413e-01, 1.9147e-01, 1.1419e-01, + 5.4172e-01, 7.3272e-01, 1.5703e-01, 7.0208e-01, + 1.5550e-01, 4.2626e-01, 2.4197e-01, 4.2718e-01, + 6.2171e-01, 1.2803e-01, 7.1940e-01, 5.6744e-01, + 3.2125e-01, 7.4377e-01, 3.1274e-01, 3.4274e-01, + 4.7478e-01, 6.7334e-01, 1.3565e-01, 3.4904e-01, + 5.7371e-01, 2.4459e-01, 8.9790e-01, 5.5714e-03, + 9.0159e-01, 9.9937e-01, 6.1757e-01, 8.5558e-02, + 1.4341e-01, 7.5241e-01, 1.8058e-01, 1.9063e-01, + 6.7688e-01, 9.4618e-01, 9.1275e-01, 3.2585e-01, + 6.6877e-01, 2.2418e-01, 6.8450e-01, 5.1895e-01, + 2.1437e-02, 8.8828e-01, 4.8335e-01, 7.4366e-01, + 9.8953e-01, 5.8257e-01, 2.9039e-04, 1.1123e-01, + 5.1846e-02, 1.3108e-01, 4.2323e-01, 6.5304e-01, + 1.8625e-01, 3.7199e-02, 7.0216e-01, 6.0287e-01, + 2.3789e-01, 9.2863e-01, 1.2348e-01, 8.6839e-01, + 4.9520e-01, 2.7288e-01, 4.4592e-01, 4.2396e-01, + 6.0645e-01, 6.2528e-01, 6.5293e-01, 9.2566e-01, + 9.4540e-01, 5.8610e-02, 8.7155e-01, 2.2119e-01, + 8.7605e-01, 8.8102e-01, 8.2781e-01, 3.8576e-01, + 6.3776e-01, 7.8203e-01, 1.0462e-02, 9.1986e-01, + 6.5027e-01, 2.7230e-01, 2.1864e-01, 2.5327e-01, + 9.8263e-01, 4.8835e-01, 8.8863e-01, 5.6728e-01, + 8.0559e-01, 1.2815e-01, 4.6205e-01, 8.5618e-01, + 1.8311e-01, 4.8907e-01, 6.4311e-01, 3.8609e-01, + 9.3749e-01, 8.9364e-01, 7.1836e-01, 3.0713e-01, + 5.5364e-01, 7.7259e-01, 1.0362e-01, 2.7281e-01, + 2.1067e-01, 8.0247e-01, 1.0462e-01, 6.2383e-03, + 4.1906e-01, 4.6448e-02, 8.4964e-01, 1.8122e-01, + 6.4569e-01, 6.6590e-01, 9.6978e-01, 5.3446e-01, + 9.2632e-02, 8.2326e-01, 2.3167e-02, 7.1511e-01, + 7.4454e-01, 8.3709e-01, 2.7395e-01, 3.9752e-01, + 2.3413e-01, 4.1262e-02, 8.1277e-01, 8.4946e-01, + 5.3420e-01, 2.8694e-01, 6.3996e-01, 8.6404e-01, + 7.7503e-01, 8.7692e-01, 4.2612e-01, 4.9620e-02, + 3.3084e-01, 8.5854e-01, 7.7131e-01, 1.9680e-01, + 1.6464e-01, 9.8493e-01, 6.7014e-01, 9.6494e-01, + 7.2374e-01, 5.8901e-01, 7.9710e-01, 6.1744e-01, + 9.8465e-01, 9.2253e-01, 3.8584e-01, 4.0856e-01, + 1.8180e-01, 8.8451e-01, 5.0687e-01, 7.6331e-01, + 2.9799e-01, 3.8883e-01, 9.8601e-01, 6.4816e-01, + 4.7480e-03, 2.3454e-01, 3.3701e-02, 6.5933e-01, + 5.6913e-01, 1.7944e-01, 7.1449e-01, 8.1288e-01, + 4.0565e-01, 1.1448e-01, 7.6631e-02, 5.9701e-01, + 9.9955e-01, 8.5047e-01, 6.2163e-01, 3.4651e-02, + 4.7043e-01, 6.7147e-01, 4.1853e-01, 5.6234e-01, + 4.9969e-01, 8.8040e-01, 9.8673e-01, 8.3060e-02, + 8.6189e-02, 9.1021e-01, 8.1437e-01, 9.1474e-01, + 1.0110e-01, 4.2937e-01, 6.7076e-02, 5.5100e-01, + 1.4978e-01, 2.3091e-01, 9.2119e-01, 8.8302e-01, + 7.1686e-01, 8.5039e-01, 6.3669e-01, 6.9416e-01, + 9.9660e-01, 4.4720e-01, 4.8955e-01, 2.9264e-01, + 1.2478e-01, 6.6834e-01, 1.5549e-01, 8.6643e-01, + 4.8584e-01, 6.2762e-01, 8.9681e-01, 9.9150e-01, + 8.9800e-01, 4.9195e-02, 3.4261e-02, 9.9473e-02, + 6.5421e-01, 3.1869e-02, 7.3417e-01, 7.2239e-01, + 5.2055e-01, 7.2939e-01, 4.0424e-01, 5.3910e-01, + 1.4767e-01, 2.2639e-01, 9.4374e-01, 7.8686e-01, + 1.2226e-02, 5.6413e-01, 5.1201e-01, 4.0050e-01, + 7.2622e-01, 7.4764e-01, 3.7669e-01, 4.7057e-01, + 3.6367e-02, 9.6347e-01, 1.0702e-01, 9.4801e-01, + 5.9118e-01, 1.3243e-01, 2.9751e-01, 3.7908e-01, + 6.3144e-01, 4.1336e-01, 4.4719e-01, 9.2700e-01, + 5.3549e-01, 6.8263e-01, 1.0998e-01, 7.2406e-01, + 8.0101e-01, 2.8276e-01, 5.0451e-01, 9.4162e-01, + 3.8205e-01, 3.9509e-01, 2.6665e-02, 9.7060e-01, + 1.1248e-01, 7.1314e-01, 9.2116e-01, 4.1416e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5980, 0.9421, 0.9493, ..., 0.6518, 0.5202, 0.4457]) +tensor([0.0845, 0.2230, 0.9209, ..., 0.5627, 0.3340, 0.7945]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1295,13 +1402,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.272708654403687 seconds +Time: 10.510953187942505 seconds -[18.39, 17.85, 18.29, 17.9, 18.11, 17.8, 18.16, 22.08, 18.79, 18.14] -[46.54] -14.219237565994263 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 223318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.272708654403687, 'TIME_S_1KI': 0.046000361163917314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.7633163213729, 'W': 46.54} -[18.39, 17.85, 18.29, 17.9, 18.11, 17.8, 18.16, 22.08, 18.79, 18.14, 20.06, 18.33, 18.25, 21.91, 18.14, 18.0, 18.29, 18.13, 17.88, 18.08] -335.245 -16.76225 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 223318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.272708654403687, 'TIME_S_1KI': 0.046000361163917314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.7633163213729, 'W': 46.54, 'J_1KI': 2.963322778823798, 'W_1KI': 0.2084023679237679, 'W_D': 29.777749999999997, 'J_D': 423.41690143078563, 'W_D_1KI': 0.13334236380408207, 'J_D_1KI': 0.0005970963549919042} +[19.17, 18.72, 18.85, 18.68, 18.89, 18.73, 18.67, 18.78, 18.6, 18.53] +[52.58] +14.262712955474854 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 226437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.510953187942505, 'TIME_S_1KI': 0.046418885552902155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9334471988677, 'W': 52.58} +[19.17, 18.72, 18.85, 18.68, 18.89, 18.73, 18.67, 18.78, 18.6, 18.53, 19.4, 18.65, 18.9, 18.42, 18.63, 18.51, 18.55, 18.66, 18.95, 18.74] +337.11 +16.8555 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 226437, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.510953187942505, 'TIME_S_1KI': 0.046418885552902155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9334471988677, 'W': 52.58, 'J_1KI': 3.311885633526622, 'W_1KI': 0.23220586741566088, 'W_D': 35.7245, 'J_D': 509.5282889778614, 'W_D_1KI': 0.15776794428472377, 'J_D_1KI': 0.0006967410108980589} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json index d34e261..b1102d4 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 115566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.362370014190674, "TIME_S_1KI": 0.08966625144238508, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 657.2445391845703, "W": 46.58, "J_1KI": 5.687179094063741, "W_1KI": 0.40305972344807295, "W_D": 30.2825, "J_D": 427.28655555725095, "W_D_1KI": 0.2620364120935223, "J_D_1KI": 0.002267417857272228} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 113233, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.205250024795532, "TIME_S_1KI": 0.09012611186487625, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 752.6565923953057, "W": 53.59, "J_1KI": 6.646972105263533, "W_1KI": 0.4732719260286313, "W_D": 22.6315, "J_D": 317.853100779891, "W_D_1KI": 0.1998666466489451, "J_D_1KI": 0.0017650918605790283} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output index 30badbd..407df54 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.024602413177490234} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.023785829544067383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 4997, 4999, 5000]), - col_indices=tensor([5115, 9337, 5262, ..., 1244, 4227, 2124]), - values=tensor([0.7036, 0.8839, 0.8989, ..., 0.3409, 0.8377, 0.4572]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4997, 4997, 5000]), + col_indices=tensor([1263, 3281, 2385, ..., 1647, 8857, 9729]), + values=tensor([0.1325, 0.5780, 0.0735, ..., 0.4975, 0.1005, 0.9200]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.4155, 0.9580, 0.1653, ..., 0.8843, 0.0512, 0.6581]) +tensor([0.9661, 0.4765, 0.8121, ..., 0.6083, 0.6316, 0.2082]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.024602413177490234 seconds +Time: 0.023785829544067383 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '42678', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.8775720596313477} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44143', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.093339204788208} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4999, 5000]), - col_indices=tensor([ 717, 6679, 2344, ..., 3928, 4219, 6236]), - values=tensor([0.9595, 0.5891, 0.3421, ..., 0.3760, 0.2961, 0.2336]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 5000, 5000]), + col_indices=tensor([4240, 5372, 9073, ..., 536, 3650, 487]), + values=tensor([0.8815, 0.3828, 0.1359, ..., 0.3328, 0.9511, 0.5438]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.6496, 0.5288, 0.3835, ..., 0.2038, 0.3313, 0.3083]) +tensor([0.7920, 0.2578, 0.7925, ..., 0.7391, 0.3319, 0.5267]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 3.8775720596313477 seconds +Time: 4.093339204788208 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '115566', '-ss', '10000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.362370014190674} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '113233', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.205250024795532} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([6593, 2332, 3653, ..., 6447, 6839, 4175]), - values=tensor([0.4277, 0.1691, 0.2657, ..., 0.2731, 0.4419, 0.1553]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 5000, 5000, 5000]), + col_indices=tensor([1678, 6917, 7081, ..., 5230, 7690, 5828]), + values=tensor([0.5732, 0.2259, 0.2631, ..., 0.8737, 0.8035, 0.2694]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.3155, 0.6085, 0.7514, ..., 0.0185, 0.1956, 0.3828]) +tensor([0.9981, 0.1612, 0.0478, ..., 0.0903, 0.5916, 0.6765]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.362370014190674 seconds +Time: 10.205250024795532 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([6593, 2332, 3653, ..., 6447, 6839, 4175]), - values=tensor([0.4277, 0.1691, 0.2657, ..., 0.2731, 0.4419, 0.1553]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 5000, 5000, 5000]), + col_indices=tensor([1678, 6917, 7081, ..., 5230, 7690, 5828]), + values=tensor([0.5732, 0.2259, 0.2631, ..., 0.8737, 0.8035, 0.2694]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.3155, 0.6085, 0.7514, ..., 0.0185, 0.1956, 0.3828]) +tensor([0.9981, 0.1612, 0.0478, ..., 0.0903, 0.5916, 0.6765]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.362370014190674 seconds +Time: 10.205250024795532 seconds -[18.44, 17.79, 18.13, 18.16, 18.19, 17.86, 18.01, 17.82, 18.05, 18.06] -[46.58] -14.110015869140625 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 115566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.362370014190674, 'TIME_S_1KI': 0.08966625144238508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 657.2445391845703, 'W': 46.58} -[18.44, 17.79, 18.13, 18.16, 18.19, 17.86, 18.01, 17.82, 18.05, 18.06, 18.35, 17.92, 18.08, 18.77, 17.97, 18.23, 18.59, 17.91, 18.14, 17.81] -325.95 -16.2975 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 115566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.362370014190674, 'TIME_S_1KI': 0.08966625144238508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 657.2445391845703, 'W': 46.58, 'J_1KI': 5.687179094063741, 'W_1KI': 0.40305972344807295, 'W_D': 30.2825, 'J_D': 427.28655555725095, 'W_D_1KI': 0.2620364120935223, 'J_D_1KI': 0.002267417857272228} +[43.74, 52.65, 53.85, 53.25, 53.48, 53.65, 48.94, 44.74, 45.01, 44.67] +[53.59] +14.044720888137817 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 113233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.205250024795532, 'TIME_S_1KI': 0.09012611186487625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.6565923953057, 'W': 53.59} +[43.74, 52.65, 53.85, 53.25, 53.48, 53.65, 48.94, 44.74, 45.01, 44.67, 18.93, 19.56, 18.83, 18.45, 18.72, 18.73, 19.01, 18.63, 18.56, 18.88] +619.1700000000001 +30.958500000000004 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 113233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.205250024795532, 'TIME_S_1KI': 0.09012611186487625, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 752.6565923953057, 'W': 53.59, 'J_1KI': 6.646972105263533, 'W_1KI': 0.4732719260286313, 'W_D': 22.6315, 'J_D': 317.853100779891, 'W_D_1KI': 0.1998666466489451, 'J_D_1KI': 0.0017650918605790283} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json index 5b0e976..bc2885e 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.447505474090576, "TIME_S_1KI": 154.47505474090576, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2621.8117760252953, "W": 44.61, "J_1KI": 26218.117760252953, "W_1KI": 446.1, "W_D": 28.050250000000002, "J_D": 1648.56480095166, "W_D_1KI": 280.50250000000005, "J_D_1KI": 2805.0250000000005} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.074536085128784, "TIME_S_1KI": 140.74536085128784, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1450.0492564201354, "W": 53.4, "J_1KI": 14500.492564201353, "W_1KI": 534.0, "W_D": 36.354, "J_D": 987.173982544899, "W_D_1KI": 363.53999999999996, "J_D_1KI": 3635.3999999999996} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output index 55f3dfa..09615f1 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.447505474090576} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.074536085128784} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 102, ..., 24999899, - 24999940, 25000000]), - col_indices=tensor([ 5577, 28835, 47310, ..., 481805, 486701, - 494412]), - values=tensor([0.4828, 0.8396, 0.7554, ..., 0.0896, 0.7495, 0.8303]), +tensor(crow_indices=tensor([ 0, 60, 101, ..., 24999901, + 24999948, 25000000]), + col_indices=tensor([ 2074, 4285, 14752, ..., 481790, 489574, + 493216]), + values=tensor([0.5532, 0.1710, 0.9566, ..., 0.4584, 0.5329, 0.8738]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9266, 0.8773, 0.4193, ..., 0.0231, 0.6267, 0.1934]) +tensor([0.0537, 0.3552, 0.4509, ..., 0.9134, 0.7144, 0.9089]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 15.447505474090576 seconds +Time: 14.074536085128784 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 102, ..., 24999899, - 24999940, 25000000]), - col_indices=tensor([ 5577, 28835, 47310, ..., 481805, 486701, - 494412]), - values=tensor([0.4828, 0.8396, 0.7554, ..., 0.0896, 0.7495, 0.8303]), +tensor(crow_indices=tensor([ 0, 60, 101, ..., 24999901, + 24999948, 25000000]), + col_indices=tensor([ 2074, 4285, 14752, ..., 481790, 489574, + 493216]), + values=tensor([0.5532, 0.1710, 0.9566, ..., 0.4584, 0.5329, 0.8738]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9266, 0.8773, 0.4193, ..., 0.0231, 0.6267, 0.1934]) +tensor([0.0537, 0.3552, 0.4509, ..., 0.9134, 0.7144, 0.9089]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +35,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 15.447505474090576 seconds +Time: 14.074536085128784 seconds -[18.81, 17.9, 18.4, 17.9, 18.2, 17.84, 18.01, 22.17, 18.54, 18.17] -[44.61] -58.77183985710144 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.447505474090576, 'TIME_S_1KI': 154.47505474090576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.8117760252953, 'W': 44.61} -[18.81, 17.9, 18.4, 17.9, 18.2, 17.84, 18.01, 22.17, 18.54, 18.17, 18.44, 18.17, 17.99, 18.08, 18.12, 18.6, 18.11, 18.22, 18.18, 18.11] -331.19499999999994 -16.559749999999998 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.447505474090576, 'TIME_S_1KI': 154.47505474090576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.8117760252953, 'W': 44.61, 'J_1KI': 26218.117760252953, 'W_1KI': 446.1, 'W_D': 28.050250000000002, 'J_D': 1648.56480095166, 'W_D_1KI': 280.50250000000005, 'J_D_1KI': 2805.0250000000005} +[19.02, 18.78, 18.66, 18.75, 18.74, 18.65, 18.64, 18.75, 18.66, 18.51] +[53.4] +27.154480457305908 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.074536085128784, 'TIME_S_1KI': 140.74536085128784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1450.0492564201354, 'W': 53.4} +[19.02, 18.78, 18.66, 18.75, 18.74, 18.65, 18.64, 18.75, 18.66, 18.51, 19.39, 19.96, 18.64, 18.39, 18.75, 18.41, 19.4, 18.74, 18.66, 23.76] +340.92 +17.046 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.074536085128784, 'TIME_S_1KI': 140.74536085128784, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1450.0492564201354, 'W': 53.4, 'J_1KI': 14500.492564201353, 'W_1KI': 534.0, 'W_D': 36.354, 'J_D': 987.173982544899, 'W_D_1KI': 363.53999999999996, 'J_D_1KI': 3635.3999999999996} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json index 8a43ad7..49fa8c0 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 751, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.154199123382568, "TIME_S_1KI": 13.520904292120598, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 705.406801700592, "W": 48.84, "J_1KI": 939.290015580016, "W_1KI": 65.03328894806924, "W_D": 32.38000000000001, "J_D": 467.67142176628124, "W_D_1KI": 43.115845539280976, "J_D_1KI": 57.41124572474165} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 774, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.392576932907104, "TIME_S_1KI": 13.427101980500135, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 815.1673431968688, "W": 54.76, "J_1KI": 1053.1877819080992, "W_1KI": 70.74935400516794, "W_D": 37.97175, "J_D": 565.2543930612802, "W_D_1KI": 49.05910852713178, "J_D_1KI": 63.38386114616508} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output index 9480055..dda1d3d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,36 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3980276584625244} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3549816608428955} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 2499987, - 2499996, 2500000]), - col_indices=tensor([ 56026, 195485, 327540, ..., 74351, 467081, - 495492]), - values=tensor([0.8691, 0.8234, 0.1160, ..., 0.0380, 0.6115, 0.8262]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8652, 0.4148, 0.1413, ..., 0.7873, 0.1950, 0.8001]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 1.3980276584625244 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '751', '-ss', '500000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.154199123382568} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 11, ..., 2499987, +tensor(crow_indices=tensor([ 0, 5, 9, ..., 2499988, 2499993, 2500000]), - col_indices=tensor([159259, 352180, 455738, ..., 361655, 368506, - 421546]), - values=tensor([0.7015, 0.3878, 0.3559, ..., 0.2417, 0.3895, 0.7278]), + col_indices=tensor([ 26442, 175167, 250311, ..., 373376, 376337, + 459483]), + values=tensor([0.5969, 0.1012, 0.1691, ..., 0.9172, 0.2591, 0.6319]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4879, 0.7909, 0.7587, ..., 0.1983, 0.9582, 0.5253]) +tensor([0.8308, 0.0173, 0.4266, ..., 0.3319, 0.0201, 0.1301]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.154199123382568 seconds +Time: 1.3549816608428955 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '774', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.392576932907104} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 11, ..., 2499987, - 2499993, 2500000]), - col_indices=tensor([159259, 352180, 455738, ..., 361655, 368506, - 421546]), - values=tensor([0.7015, 0.3878, 0.3559, ..., 0.2417, 0.3895, 0.7278]), +tensor(crow_indices=tensor([ 0, 6, 12, ..., 2499991, + 2499997, 2500000]), + col_indices=tensor([ 71123, 409588, 458159, ..., 75663, 128358, + 158973]), + values=tensor([0.2081, 0.8364, 0.6856, ..., 0.2605, 0.4815, 0.1721]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4879, 0.7909, 0.7587, ..., 0.1983, 0.9582, 0.5253]) +tensor([0.3363, 0.8875, 0.8268, ..., 0.0761, 0.0201, 0.4445]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +38,31 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.154199123382568 seconds +Time: 10.392576932907104 seconds -[18.79, 17.69, 18.39, 21.58, 18.07, 18.02, 18.17, 17.87, 17.87, 18.03] -[48.84] -14.44321870803833 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.154199123382568, 'TIME_S_1KI': 13.520904292120598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 705.406801700592, 'W': 48.84} -[18.79, 17.69, 18.39, 21.58, 18.07, 18.02, 18.17, 17.87, 17.87, 18.03, 18.42, 17.82, 17.95, 18.61, 17.96, 18.22, 17.97, 18.42, 18.04, 17.86] -329.19999999999993 -16.459999999999997 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.154199123382568, 'TIME_S_1KI': 13.520904292120598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 705.406801700592, 'W': 48.84, 'J_1KI': 939.290015580016, 'W_1KI': 65.03328894806924, 'W_D': 32.38000000000001, 'J_D': 467.67142176628124, 'W_D_1KI': 43.115845539280976, 'J_D_1KI': 57.41124572474165} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 12, ..., 2499991, + 2499997, 2500000]), + col_indices=tensor([ 71123, 409588, 458159, ..., 75663, 128358, + 158973]), + values=tensor([0.2081, 0.8364, 0.6856, ..., 0.2605, 0.4815, 0.1721]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3363, 0.8875, 0.8268, ..., 0.0761, 0.0201, 0.4445]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.392576932907104 seconds + +[18.95, 18.4, 18.66, 18.8, 18.65, 18.6, 18.52, 18.81, 18.48, 18.43] +[54.76] +14.886182308197021 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 774, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.392576932907104, 'TIME_S_1KI': 13.427101980500135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 815.1673431968688, 'W': 54.76} +[18.95, 18.4, 18.66, 18.8, 18.65, 18.6, 18.52, 18.81, 18.48, 18.43, 19.08, 18.64, 18.64, 18.61, 18.83, 18.38, 18.69, 18.54, 18.85, 18.87] +335.765 +16.788249999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 774, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.392576932907104, 'TIME_S_1KI': 13.427101980500135, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 815.1673431968688, 'W': 54.76, 'J_1KI': 1053.1877819080992, 'W_1KI': 70.74935400516794, 'W_D': 37.97175, 'J_D': 565.2543930612802, 'W_D_1KI': 49.05910852713178, 'J_D_1KI': 63.38386114616508} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json index e9ce521..d253e00 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 147, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.563251495361328, "TIME_S_1KI": 71.85885370994102, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 973.9529043245316, "W": 47.49, "J_1KI": 6625.529961391371, "W_1KI": 323.0612244897959, "W_D": 30.996250000000003, "J_D": 635.6893600898982, "W_D_1KI": 210.858843537415, "J_D_1KI": 1434.4139016150682} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 149, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.615598917007446, "TIME_S_1KI": 71.24563031548621, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1009.7270592689514, "W": 53.65, "J_1KI": 6776.691672945982, "W_1KI": 360.0671140939597, "W_D": 36.829, "J_D": 693.1451605930329, "W_D_1KI": 247.17449664429532, "J_D_1KI": 1658.8892392234584} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output index b78ba4c..c7eb0e4 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.09878396987915} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.029554843902588} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 30, 58, ..., 12499941, - 12499962, 12500000]), - col_indices=tensor([ 1470, 2567, 5271, ..., 471166, 490246, - 499700]), - values=tensor([0.1668, 0.7788, 0.4321, ..., 0.7966, 0.9450, 0.5105]), +tensor(crow_indices=tensor([ 0, 24, 43, ..., 12499950, + 12499974, 12500000]), + col_indices=tensor([ 44813, 67635, 69778, ..., 441122, 471109, + 483449]), + values=tensor([0.5651, 0.1070, 0.2363, ..., 0.2654, 0.2204, 0.8535]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9128, 0.0898, 0.7303, ..., 0.7724, 0.8343, 0.8680]) +tensor([0.0975, 0.7325, 0.1851, ..., 0.7815, 0.9825, 0.0299]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 7.09878396987915 seconds +Time: 7.029554843902588 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '147', '-ss', '500000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.563251495361328} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '149', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.615598917007446} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 39, ..., 12499961, - 12499982, 12500000]), - col_indices=tensor([ 42415, 50722, 59820, ..., 419133, 436999, - 480407]), - values=tensor([0.4848, 0.7890, 0.2846, ..., 0.4428, 0.7066, 0.1150]), +tensor(crow_indices=tensor([ 0, 22, 50, ..., 12499947, + 12499977, 12500000]), + col_indices=tensor([121511, 129611, 141383, ..., 433293, 442071, + 465951]), + values=tensor([0.3869, 0.3126, 0.6203, ..., 0.4955, 0.6399, 0.1679]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.7955, 0.8202, 0.0668, ..., 0.2866, 0.4586, 0.4680]) +tensor([0.8747, 0.6262, 0.4850, ..., 0.2052, 0.6103, 0.1532]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.563251495361328 seconds +Time: 10.615598917007446 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 16, 39, ..., 12499961, - 12499982, 12500000]), - col_indices=tensor([ 42415, 50722, 59820, ..., 419133, 436999, - 480407]), - values=tensor([0.4848, 0.7890, 0.2846, ..., 0.4428, 0.7066, 0.1150]), +tensor(crow_indices=tensor([ 0, 22, 50, ..., 12499947, + 12499977, 12500000]), + col_indices=tensor([121511, 129611, 141383, ..., 433293, 442071, + 465951]), + values=tensor([0.3869, 0.3126, 0.6203, ..., 0.4955, 0.6399, 0.1679]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.7955, 0.8202, 0.0668, ..., 0.2866, 0.4586, 0.4680]) +tensor([0.8747, 0.6262, 0.4850, ..., 0.2052, 0.6103, 0.1532]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.563251495361328 seconds +Time: 10.615598917007446 seconds -[18.64, 21.68, 17.93, 17.89, 17.96, 18.08, 18.05, 17.94, 17.92, 17.72] -[47.49] -20.508589267730713 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.563251495361328, 'TIME_S_1KI': 71.85885370994102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.9529043245316, 'W': 47.49} -[18.64, 21.68, 17.93, 17.89, 17.96, 18.08, 18.05, 17.94, 17.92, 17.72, 18.36, 17.99, 18.51, 17.9, 18.05, 18.98, 18.56, 18.01, 18.0, 18.13] -329.875 -16.49375 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.563251495361328, 'TIME_S_1KI': 71.85885370994102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.9529043245316, 'W': 47.49, 'J_1KI': 6625.529961391371, 'W_1KI': 323.0612244897959, 'W_D': 30.996250000000003, 'J_D': 635.6893600898982, 'W_D_1KI': 210.858843537415, 'J_D_1KI': 1434.4139016150682} +[18.79, 18.52, 19.14, 18.58, 18.65, 18.79, 18.51, 18.49, 18.45, 18.51] +[53.65] +18.820634841918945 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 149, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.615598917007446, 'TIME_S_1KI': 71.24563031548621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.7270592689514, 'W': 53.65} +[18.79, 18.52, 19.14, 18.58, 18.65, 18.79, 18.51, 18.49, 18.45, 18.51, 18.83, 18.43, 18.61, 18.61, 18.97, 18.85, 19.22, 18.59, 18.64, 18.61] +336.41999999999996 +16.820999999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 149, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.615598917007446, 'TIME_S_1KI': 71.24563031548621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.7270592689514, 'W': 53.65, 'J_1KI': 6776.691672945982, 'W_1KI': 360.0671140939597, 'W_D': 36.829, 'J_D': 693.1451605930329, 'W_D_1KI': 247.17449664429532, 'J_D_1KI': 1658.8892392234584} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json index 4d243f2..0f29de9 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 9147, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.50059986114502, "TIME_S_1KI": 1.1479829300475588, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 681.5257016324997, "W": 47.63, "J_1KI": 74.50811212774677, "W_1KI": 5.207171750300645, "W_D": 31.135, "J_D": 445.50289146184923, "W_D_1KI": 3.4038482562588825, "J_D_1KI": 0.37212728285327235} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 9113, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.462905645370483, "TIME_S_1KI": 1.14812966590261, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 762.6086788606643, "W": 53.41, "J_1KI": 83.68360351812404, "W_1KI": 5.860858114781081, "W_D": 36.17825, "J_D": 516.5670742555857, "W_D_1KI": 3.9699604959947328, "J_D_1KI": 0.4356370565120962} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output index 2f48a00..d2635eb 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.13143563270568848} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.13171839714050293} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 12, ..., 249990, 249995, +tensor(crow_indices=tensor([ 0, 8, 11, ..., 249994, 249996, 250000]), - col_indices=tensor([33764, 781, 3609, ..., 16676, 21435, 31146]), - values=tensor([0.7781, 0.6572, 0.9120, ..., 0.8330, 0.7571, 0.7121]), + col_indices=tensor([ 469, 11700, 29368, ..., 13379, 15725, 40011]), + values=tensor([0.4755, 0.4561, 0.0643, ..., 0.2814, 0.0236, 0.1702]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7934, 0.6307, 0.2590, ..., 0.3547, 0.1547, 0.8460]) +tensor([0.1153, 0.8349, 0.0283, ..., 0.5756, 0.1210, 0.6674]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.13143563270568848 seconds +Time: 0.13171839714050293 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7988', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.168656826019287} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7971', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.183703422546387} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 18, ..., 249992, 249995, +tensor(crow_indices=tensor([ 0, 8, 16, ..., 249992, 249996, 250000]), - col_indices=tensor([ 6728, 8437, 8523, ..., 40465, 44043, 46138]), - values=tensor([0.4640, 0.7108, 0.7346, ..., 0.1761, 0.2770, 0.7056]), + col_indices=tensor([18157, 19287, 25360, ..., 8756, 10679, 25164]), + values=tensor([0.5500, 0.0351, 0.7744, ..., 0.4439, 0.1084, 0.1584]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0571, 0.2747, 0.4590, ..., 0.7273, 0.2570, 0.3128]) +tensor([0.8126, 0.7393, 0.2533, ..., 0.1690, 0.4952, 0.2081]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 9.168656826019287 seconds +Time: 9.183703422546387 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9147', '-ss', '50000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.50059986114502} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9113', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.462905645370483} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 249987, 249996, +tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249995, 250000]), - col_indices=tensor([ 4811, 5188, 33226, ..., 17568, 20020, 26384]), - values=tensor([0.5580, 0.6578, 0.5141, ..., 0.8482, 0.1339, 0.2046]), + col_indices=tensor([ 658, 8939, 19459, ..., 31752, 38380, 43446]), + values=tensor([0.6807, 0.2742, 0.9335, ..., 0.8451, 0.5422, 0.0945]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2505, 0.9140, 0.1873, ..., 0.2385, 0.4644, 0.1302]) +tensor([0.4836, 0.3449, 0.7596, ..., 0.7679, 0.9836, 0.8519]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.50059986114502 seconds +Time: 10.462905645370483 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 10, ..., 249987, 249996, +tensor(crow_indices=tensor([ 0, 4, 12, ..., 249992, 249995, 250000]), - col_indices=tensor([ 4811, 5188, 33226, ..., 17568, 20020, 26384]), - values=tensor([0.5580, 0.6578, 0.5141, ..., 0.8482, 0.1339, 0.2046]), + col_indices=tensor([ 658, 8939, 19459, ..., 31752, 38380, 43446]), + values=tensor([0.6807, 0.2742, 0.9335, ..., 0.8451, 0.5422, 0.0945]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2505, 0.9140, 0.1873, ..., 0.2385, 0.4644, 0.1302]) +tensor([0.4836, 0.3449, 0.7596, ..., 0.7679, 0.9836, 0.8519]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.50059986114502 seconds +Time: 10.462905645370483 seconds -[18.33, 18.14, 21.59, 19.0, 18.11, 18.16, 17.98, 18.16, 18.03, 18.31] -[47.63] -14.308748722076416 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.50059986114502, 'TIME_S_1KI': 1.1479829300475588, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 681.5257016324997, 'W': 47.63} -[18.33, 18.14, 21.59, 19.0, 18.11, 18.16, 17.98, 18.16, 18.03, 18.31, 18.43, 17.74, 17.82, 18.07, 18.59, 18.18, 17.93, 18.01, 17.93, 17.85] -329.90000000000003 -16.495 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.50059986114502, 'TIME_S_1KI': 1.1479829300475588, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 681.5257016324997, 'W': 47.63, 'J_1KI': 74.50811212774677, 'W_1KI': 5.207171750300645, 'W_D': 31.135, 'J_D': 445.50289146184923, 'W_D_1KI': 3.4038482562588825, 'J_D_1KI': 0.37212728285327235} +[19.09, 18.91, 19.2, 18.56, 18.6, 18.52, 18.84, 18.77, 18.58, 22.57] +[53.41] +14.278387546539307 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9113, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.462905645370483, 'TIME_S_1KI': 1.14812966590261, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 762.6086788606643, 'W': 53.41} +[19.09, 18.91, 19.2, 18.56, 18.6, 18.52, 18.84, 18.77, 18.58, 22.57, 19.29, 18.45, 19.43, 18.57, 19.96, 22.08, 18.58, 18.73, 19.2, 18.36] +344.635 +17.231749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9113, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.462905645370483, 'TIME_S_1KI': 1.14812966590261, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 762.6086788606643, 'W': 53.41, 'J_1KI': 83.68360351812404, 'W_1KI': 5.860858114781081, 'W_D': 36.17825, 'J_D': 516.5670742555857, 'W_D_1KI': 3.9699604959947328, 'J_D_1KI': 0.4356370565120962} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json index db8a72e..61f8eec 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1997, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.44909930229187, "TIME_S_1KI": 5.232398248518714, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 733.6282246422767, "W": 48.73, "J_1KI": 367.3651600612302, "W_1KI": 24.40160240360541, "W_D": 32.179249999999996, "J_D": 484.4573373244404, "W_D_1KI": 16.113795693540307, "J_D_1KI": 8.069001348793345} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1979, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.479721784591675, "TIME_S_1KI": 5.295463256488972, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 828.9978633999824, "W": 55.01, "J_1KI": 418.8973539161104, "W_1KI": 27.79686710459828, "W_D": 37.86475, "J_D": 570.6198299977184, "W_D_1KI": 19.133274381000504, "J_D_1KI": 9.668152794846137} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output index a863f3a..2510e91 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5546464920043945} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5715610980987549} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 55, 117, ..., 2499903, - 2499953, 2500000]), - col_indices=tensor([ 566, 1603, 2858, ..., 47622, 48780, 49985]), - values=tensor([0.9915, 0.7849, 0.7900, ..., 0.9170, 0.4625, 0.4875]), +tensor(crow_indices=tensor([ 0, 43, 95, ..., 2499900, + 2499947, 2500000]), + col_indices=tensor([ 440, 592, 3640, ..., 47603, 48117, 49754]), + values=tensor([0.1661, 0.7674, 0.7986, ..., 0.7717, 0.8953, 0.0926]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6866, 0.0421, 0.1446, ..., 0.6566, 0.6603, 0.7026]) +tensor([0.4593, 0.0804, 0.6079, ..., 0.0764, 0.7492, 0.9367]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,39 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.5546464920043945 seconds +Time: 0.5715610980987549 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1893', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.948294878005981} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1837', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.74488639831543} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 33, 68, ..., 2499891, - 2499938, 2500000]), - col_indices=tensor([ 3534, 3824, 4376, ..., 49368, 49484, 49571]), - values=tensor([0.2824, 0.5783, 0.2215, ..., 0.8826, 0.1249, 0.2741]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5004, 0.8496, 0.6985, ..., 0.2602, 0.0299, 0.5346]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 9.948294878005981 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1997', '-ss', '50000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.44909930229187} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499904, +tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499900, 2499948, 2500000]), - col_indices=tensor([ 1519, 4331, 6515, ..., 42103, 42230, 49135]), - values=tensor([0.6099, 0.9393, 0.8647, ..., 0.8575, 0.6331, 0.4704]), + col_indices=tensor([ 108, 457, 651, ..., 47686, 48796, 49691]), + values=tensor([0.2349, 0.1510, 0.8409, ..., 0.7616, 0.6600, 0.1731]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1131, 0.9150, 0.5556, ..., 0.6033, 0.7715, 0.6124]) +tensor([0.3877, 0.3672, 0.7824, ..., 0.6258, 0.7734, 0.2600]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.44909930229187 seconds +Time: 9.74488639831543 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1979', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.479721784591675} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499904, - 2499948, 2500000]), - col_indices=tensor([ 1519, 4331, 6515, ..., 42103, 42230, 49135]), - values=tensor([0.6099, 0.9393, 0.8647, ..., 0.8575, 0.6331, 0.4704]), +tensor(crow_indices=tensor([ 0, 59, 111, ..., 2499895, + 2499957, 2500000]), + col_indices=tensor([ 1818, 1883, 2319, ..., 46626, 47772, 49582]), + values=tensor([0.6336, 0.5989, 0.4201, ..., 0.3558, 0.1630, 0.4148]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1131, 0.9150, 0.5556, ..., 0.6033, 0.7715, 0.6124]) +tensor([0.7936, 0.0330, 0.0768, ..., 0.8085, 0.2404, 0.8732]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +56,30 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.44909930229187 seconds +Time: 10.479721784591675 seconds -[18.64, 18.18, 18.44, 22.12, 18.9, 18.07, 17.86, 17.86, 18.53, 18.0] -[48.73] -15.054960489273071 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.44909930229187, 'TIME_S_1KI': 5.232398248518714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.6282246422767, 'W': 48.73} -[18.64, 18.18, 18.44, 22.12, 18.9, 18.07, 17.86, 17.86, 18.53, 18.0, 18.34, 18.09, 18.08, 17.94, 18.14, 18.44, 18.04, 17.91, 17.9, 18.05] -331.015 -16.55075 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.44909930229187, 'TIME_S_1KI': 5.232398248518714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.6282246422767, 'W': 48.73, 'J_1KI': 367.3651600612302, 'W_1KI': 24.40160240360541, 'W_D': 32.179249999999996, 'J_D': 484.4573373244404, 'W_D_1KI': 16.113795693540307, 'J_D_1KI': 8.069001348793345} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 59, 111, ..., 2499895, + 2499957, 2500000]), + col_indices=tensor([ 1818, 1883, 2319, ..., 46626, 47772, 49582]), + values=tensor([0.6336, 0.5989, 0.4201, ..., 0.3558, 0.1630, 0.4148]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7936, 0.0330, 0.0768, ..., 0.8085, 0.2404, 0.8732]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.479721784591675 seconds + +[19.91, 18.84, 18.94, 19.94, 18.54, 18.89, 18.6, 18.5, 18.83, 18.88] +[55.01] +15.069948434829712 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1979, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.479721784591675, 'TIME_S_1KI': 5.295463256488972, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 828.9978633999824, 'W': 55.01} +[19.91, 18.84, 18.94, 19.94, 18.54, 18.89, 18.6, 18.5, 18.83, 18.88, 18.92, 18.57, 18.74, 18.6, 18.52, 22.63, 19.15, 18.43, 19.1, 18.46] +342.90500000000003 +17.14525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1979, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.479721784591675, 'TIME_S_1KI': 5.295463256488972, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 828.9978633999824, 'W': 55.01, 'J_1KI': 418.8973539161104, 'W_1KI': 27.79686710459828, 'W_D': 37.86475, 'J_D': 570.6198299977184, 'W_D_1KI': 19.133274381000504, 'J_D_1KI': 9.668152794846137} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json index 981b4ab..e6a8a4b 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 134, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.514585256576538, "TIME_S_1KI": 78.46705415355626, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2494.187549471855, "W": 43.5, "J_1KI": 18613.339921431754, "W_1KI": 324.6268656716418, "W_D": 26.962, "J_D": 1545.9375795140265, "W_D_1KI": 201.2089552238806, "J_D_1KI": 1501.5593673423925} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 152, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.39022421836853, "TIME_S_1KI": 68.35673827874034, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1277.9351173400878, "W": 53.72, "J_1KI": 8407.467877237419, "W_1KI": 353.4210526315789, "W_D": 36.683499999999995, "J_D": 872.6569783496856, "W_D_1KI": 241.33881578947367, "J_D_1KI": 1587.755367036011} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output index 7744b5c..9df8811 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.827495813369751} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.902969121932983} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 509, 1029, ..., 24999021, - 24999525, 25000000]), - col_indices=tensor([ 77, 168, 174, ..., 49716, 49743, 49917]), - values=tensor([0.0871, 0.3865, 0.3717, ..., 0.4376, 0.0483, 0.0994]), +tensor(crow_indices=tensor([ 0, 509, 1046, ..., 24998991, + 24999499, 25000000]), + col_indices=tensor([ 275, 623, 640, ..., 49748, 49845, 49925]), + values=tensor([0.4383, 0.1606, 0.4979, ..., 0.8937, 0.3916, 0.2299]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1988, 0.8388, 0.2584, ..., 0.5965, 0.5005, 0.7795]) +tensor([0.4910, 0.4324, 0.6781, ..., 0.4435, 0.1381, 0.0974]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 7.827495813369751 seconds +Time: 6.902969121932983 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '134', '-ss', '50000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.514585256576538} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '152', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.39022421836853} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 525, 1042, ..., 24999041, - 24999512, 25000000]), - col_indices=tensor([ 163, 320, 387, ..., 49821, 49828, 49920]), - values=tensor([0.8765, 0.3303, 0.5777, ..., 0.2129, 0.9852, 0.0873]), +tensor(crow_indices=tensor([ 0, 516, 1047, ..., 24999044, + 24999539, 25000000]), + col_indices=tensor([ 752, 778, 926, ..., 49813, 49898, 49988]), + values=tensor([0.8686, 0.1586, 0.7679, ..., 0.0255, 0.0592, 0.0678]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2121, 0.5034, 0.7106, ..., 0.6677, 0.7232, 0.0645]) +tensor([0.1218, 0.2324, 0.7166, ..., 0.4698, 0.1352, 0.0532]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,16 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.514585256576538 seconds +Time: 10.39022421836853 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 525, 1042, ..., 24999041, - 24999512, 25000000]), - col_indices=tensor([ 163, 320, 387, ..., 49821, 49828, 49920]), - values=tensor([0.8765, 0.3303, 0.5777, ..., 0.2129, 0.9852, 0.0873]), +tensor(crow_indices=tensor([ 0, 516, 1047, ..., 24999044, + 24999539, 25000000]), + col_indices=tensor([ 752, 778, 926, ..., 49813, 49898, 49988]), + values=tensor([0.8686, 0.1586, 0.7679, ..., 0.0255, 0.0592, 0.0678]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2121, 0.5034, 0.7106, ..., 0.6677, 0.7232, 0.0645]) +tensor([0.1218, 0.2324, 0.7166, ..., 0.4698, 0.1352, 0.0532]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,13 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.514585256576538 seconds +Time: 10.39022421836853 seconds -[18.5, 18.14, 19.17, 17.93, 18.04, 17.95, 18.11, 18.93, 18.07, 18.37] -[43.5] -57.337644815444946 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.514585256576538, 'TIME_S_1KI': 78.46705415355626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2494.187549471855, 'W': 43.5} -[18.5, 18.14, 19.17, 17.93, 18.04, 17.95, 18.11, 18.93, 18.07, 18.37, 22.44, 18.33, 18.23, 18.16, 18.38, 17.89, 18.61, 17.96, 18.23, 17.95] -330.76 -16.538 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.514585256576538, 'TIME_S_1KI': 78.46705415355626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2494.187549471855, 'W': 43.5, 'J_1KI': 18613.339921431754, 'W_1KI': 324.6268656716418, 'W_D': 26.962, 'J_D': 1545.9375795140265, 'W_D_1KI': 201.2089552238806, 'J_D_1KI': 1501.5593673423925} +[18.92, 22.47, 19.1, 18.56, 19.07, 18.37, 19.4, 18.4, 18.48, 18.45] +[53.72] +23.788814544677734 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 152, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.39022421836853, 'TIME_S_1KI': 68.35673827874034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.9351173400878, 'W': 53.72} +[18.92, 22.47, 19.1, 18.56, 19.07, 18.37, 19.4, 18.4, 18.48, 18.45, 18.92, 18.45, 18.96, 18.55, 18.64, 18.41, 18.69, 18.85, 18.72, 18.93] +340.73 +17.0365 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 152, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.39022421836853, 'TIME_S_1KI': 68.35673827874034, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.9351173400878, 'W': 53.72, 'J_1KI': 8407.467877237419, 'W_1KI': 353.4210526315789, 'W_D': 36.683499999999995, 'J_D': 872.6569783496856, 'W_D_1KI': 241.33881578947367, 'J_D_1KI': 1587.755367036011} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..271cf75 --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 34.656867027282715, "TIME_S_1KI": 346.56867027282715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4525.48052918911, "W": 53.5, "J_1KI": 45254.8052918911, "W_1KI": 535.0, "W_D": 36.41225, "J_D": 3080.0547364292743, "W_D_1KI": 364.1225, "J_D_1KI": 3641.225} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..8952b7e --- /dev/null +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 34.656867027282715} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2537, 5021, ..., + 124995008, 124997510, 125000000]), + col_indices=tensor([ 5, 11, 12, ..., 49956, 49966, 49969]), + values=tensor([0.1015, 0.8493, 0.5638, ..., 0.6405, 0.0202, 0.3838]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.9316, 0.6211, 0.3946, ..., 0.1857, 0.3082, 0.5923]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 34.656867027282715 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2537, 5021, ..., + 124995008, 124997510, 125000000]), + col_indices=tensor([ 5, 11, 12, ..., 49956, 49966, 49969]), + values=tensor([0.1015, 0.8493, 0.5638, ..., 0.6405, 0.0202, 0.3838]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.9316, 0.6211, 0.3946, ..., 0.1857, 0.3082, 0.5923]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 34.656867027282715 seconds + +[19.53, 18.65, 23.68, 18.84, 18.77, 19.19, 18.64, 18.44, 18.65, 18.73] +[53.5] +84.5884211063385 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 34.656867027282715, 'TIME_S_1KI': 346.56867027282715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4525.48052918911, 'W': 53.5} +[19.53, 18.65, 23.68, 18.84, 18.77, 19.19, 18.64, 18.44, 18.65, 18.73, 19.15, 18.5, 18.6, 18.49, 18.91, 18.59, 18.51, 18.49, 18.82, 18.56] +341.755 +17.08775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 34.656867027282715, 'TIME_S_1KI': 346.56867027282715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4525.48052918911, 'W': 53.5, 'J_1KI': 45254.8052918911, 'W_1KI': 535.0, 'W_D': 36.41225, 'J_D': 3080.0547364292743, 'W_D_1KI': 364.1225, 'J_D_1KI': 3641.225} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json index 01c9d2b..d393b6d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 22242, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.517975091934204, "TIME_S_1KI": 0.472888008809199, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 668.1220734119415, "W": 47.0, "J_1KI": 30.03875880819807, "W_1KI": 2.113119323801816, "W_D": 30.704, "J_D": 436.4685136604309, "W_D_1KI": 1.3804513982555526, "J_D_1KI": 0.062065075004745646} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 22192, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.510575294494629, "TIME_S_1KI": 0.47362001146785454, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 756.9577831959724, "W": 52.91, "J_1KI": 34.10948914906149, "W_1KI": 2.384192501802451, "W_D": 36.012249999999995, "J_D": 515.2098455471396, "W_D_1KI": 1.6227582011535686, "J_D_1KI": 0.0731235671031709} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output index f23093c..a80afda 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,32 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06578350067138672} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.07807016372680664} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24997, 24997, 25000]), + col_indices=tensor([33411, 44594, 10023, ..., 13012, 22802, 46143]), + values=tensor([0.8310, 0.0198, 0.3050, ..., 0.9692, 0.0759, 0.4781]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.4739, 0.7206, 0.8157, ..., 0.6111, 0.0255, 0.2691]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.07807016372680664 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '13449', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.363297462463379} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([16918, 1143, 1724, ..., 48553, 41363, 39308]), - values=tensor([0.9238, 0.1195, 0.2813, ..., 0.9276, 0.6113, 0.0798]), + col_indices=tensor([47102, 32886, 19412, ..., 8432, 6167, 30462]), + values=tensor([0.3403, 0.1048, 0.7701, ..., 0.0994, 0.7400, 0.5573]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0373, 0.8428, 0.6841, ..., 0.3333, 0.7324, 0.6824]) +tensor([0.6350, 0.1598, 0.4068, ..., 0.6399, 0.8986, 0.2435]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.06578350067138672 seconds +Time: 6.363297462463379 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15961', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.534844875335693} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22192', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.510575294494629} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 25000, 25000]), - col_indices=tensor([29190, 43986, 25006, ..., 44362, 15421, 8070]), - values=tensor([0.3395, 0.8970, 0.1159, ..., 0.8275, 0.4942, 0.3559]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([17655, 8136, 6685, ..., 2023, 30575, 7570]), + values=tensor([0.7615, 0.3642, 0.5154, ..., 0.9021, 0.3033, 0.6664]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7532, 0.7737, 0.3401, ..., 0.4031, 0.1788, 0.5939]) +tensor([0.2301, 0.7680, 0.7694, ..., 0.7461, 0.0620, 0.5948]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 7.534844875335693 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22242', '-ss', '50000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.517975091934204} +Time: 10.510575294494629 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), - col_indices=tensor([22981, 23025, 47875, ..., 28752, 43497, 8642]), - values=tensor([0.4359, 0.4232, 0.6349, ..., 0.3036, 0.5759, 0.5327]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([17655, 8136, 6685, ..., 2023, 30575, 7570]), + values=tensor([0.7615, 0.3642, 0.5154, ..., 0.9021, 0.3033, 0.6664]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8882, 0.3621, 0.9798, ..., 0.1733, 0.2748, 0.2728]) +tensor([0.2301, 0.7680, 0.7694, ..., 0.7461, 0.0620, 0.5948]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,29 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.517975091934204 seconds +Time: 10.510575294494629 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), - col_indices=tensor([22981, 23025, 47875, ..., 28752, 43497, 8642]), - values=tensor([0.4359, 0.4232, 0.6349, ..., 0.3036, 0.5759, 0.5327]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8882, 0.3621, 0.9798, ..., 0.1733, 0.2748, 0.2728]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.517975091934204 seconds - -[18.31, 17.89, 18.04, 17.91, 18.19, 18.28, 17.95, 17.98, 18.92, 18.1] -[47.0] -14.215363264083862 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.517975091934204, 'TIME_S_1KI': 0.472888008809199, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 668.1220734119415, 'W': 47.0} -[18.31, 17.89, 18.04, 17.91, 18.19, 18.28, 17.95, 17.98, 18.92, 18.1, 18.24, 18.64, 18.01, 17.83, 18.02, 18.06, 18.04, 17.88, 17.99, 17.93] -325.92 -16.296 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.517975091934204, 'TIME_S_1KI': 0.472888008809199, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 668.1220734119415, 'W': 47.0, 'J_1KI': 30.03875880819807, 'W_1KI': 2.113119323801816, 'W_D': 30.704, 'J_D': 436.4685136604309, 'W_D_1KI': 1.3804513982555526, 'J_D_1KI': 0.062065075004745646} +[24.17, 19.15, 18.5, 19.16, 18.56, 18.65, 18.51, 18.3, 18.41, 18.37] +[52.91] +14.306516408920288 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.510575294494629, 'TIME_S_1KI': 0.47362001146785454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.9577831959724, 'W': 52.91} +[24.17, 19.15, 18.5, 19.16, 18.56, 18.65, 18.51, 18.3, 18.41, 18.37, 18.8, 18.64, 18.89, 18.42, 18.48, 18.3, 18.84, 18.69, 18.57, 18.43] +337.955 +16.89775 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22192, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.510575294494629, 'TIME_S_1KI': 0.47362001146785454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 756.9577831959724, 'W': 52.91, 'J_1KI': 34.10948914906149, 'W_1KI': 2.384192501802451, 'W_D': 36.012249999999995, 'J_D': 515.2098455471396, 'W_D_1KI': 1.6227582011535686, 'J_D_1KI': 0.0731235671031709} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json index 7de68b9..ce7aa5d 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11256, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481152534484863, "TIME_S_1KI": 0.9311613836607022, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 671.3460792446136, "W": 47.13, "J_1KI": 59.64339723210853, "W_1KI": 4.187100213219616, "W_D": 30.78675, "J_D": 438.54368566060066, "W_D_1KI": 2.735141257995736, "J_D_1KI": 0.24299407053977753} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11204, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.484663248062134, "TIME_S_1KI": 0.9357964341362134, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 758.3656199932099, "W": 52.9, "J_1KI": 67.68704212720544, "W_1KI": 4.721528025705106, "W_D": 35.66575, "J_D": 511.2982724248171, "W_D_1KI": 3.183305069617994, "J_D_1KI": 0.28412219471777883} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output index d1f10c6..f64ec3b 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.1096796989440918} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.11034727096557617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 124996, 124997, +tensor(crow_indices=tensor([ 0, 1, 4, ..., 124991, 124997, 125000]), - col_indices=tensor([38708, 28625, 11454, ..., 884, 22723, 30800]), - values=tensor([0.0038, 0.3289, 0.1581, ..., 0.9719, 0.8303, 0.9998]), + col_indices=tensor([11731, 21946, 36023, ..., 38897, 41892, 44011]), + values=tensor([0.5067, 0.6689, 0.8013, ..., 0.5225, 0.7710, 0.9378]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.0207, 0.8237, 0.8176, ..., 0.3561, 0.4550, 0.3366]) +tensor([0.8519, 0.4822, 0.7850, ..., 0.3577, 0.9381, 0.9299]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.1096796989440918 seconds +Time: 0.11034727096557617 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9573', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.929835081100464} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9515', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.917027235031128} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 4, ..., 124997, 124998, +tensor(crow_indices=tensor([ 0, 2, 2, ..., 124999, 125000, 125000]), - col_indices=tensor([ 5176, 42593, 37500, ..., 3219, 12793, 38658]), - values=tensor([0.6803, 0.7664, 0.9859, ..., 0.5422, 0.9603, 0.0980]), + col_indices=tensor([ 795, 9459, 9721, ..., 26673, 30658, 44375]), + values=tensor([0.5955, 0.9891, 0.5779, ..., 0.4320, 0.5377, 0.0329]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.2299, 0.8541, 0.6200, ..., 0.4981, 0.6521, 0.8502]) +tensor([0.7199, 0.8944, 0.9457, ..., 0.0963, 0.7528, 0.5298]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 8.929835081100464 seconds +Time: 8.917027235031128 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '11256', '-ss', '50000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481152534484863} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '11204', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.484663248062134} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 124996, 124999, +tensor(crow_indices=tensor([ 0, 4, 5, ..., 124990, 124995, 125000]), - col_indices=tensor([ 3517, 32781, 39284, ..., 16837, 28625, 12663]), - values=tensor([0.4051, 0.3118, 0.0683, ..., 0.4752, 0.1421, 0.6822]), + col_indices=tensor([ 7117, 22714, 28697, ..., 16609, 18473, 31107]), + values=tensor([0.4912, 0.1932, 0.7634, ..., 0.8547, 0.4308, 0.8265]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.2287, 0.1097, 0.9835, ..., 0.8729, 0.0701, 0.5217]) +tensor([0.3359, 0.4773, 0.2129, ..., 0.6953, 0.5472, 0.3647]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.481152534484863 seconds +Time: 10.484663248062134 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 124996, 124999, +tensor(crow_indices=tensor([ 0, 4, 5, ..., 124990, 124995, 125000]), - col_indices=tensor([ 3517, 32781, 39284, ..., 16837, 28625, 12663]), - values=tensor([0.4051, 0.3118, 0.0683, ..., 0.4752, 0.1421, 0.6822]), + col_indices=tensor([ 7117, 22714, 28697, ..., 16609, 18473, 31107]), + values=tensor([0.4912, 0.1932, 0.7634, ..., 0.8547, 0.4308, 0.8265]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.2287, 0.1097, 0.9835, ..., 0.8729, 0.0701, 0.5217]) +tensor([0.3359, 0.4773, 0.2129, ..., 0.6953, 0.5472, 0.3647]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.481152534484863 seconds +Time: 10.484663248062134 seconds -[18.46, 17.73, 18.38, 18.1, 18.29, 18.18, 18.04, 18.01, 18.16, 17.9] -[47.13] -14.244559288024902 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481152534484863, 'TIME_S_1KI': 0.9311613836607022, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 671.3460792446136, 'W': 47.13} -[18.46, 17.73, 18.38, 18.1, 18.29, 18.18, 18.04, 18.01, 18.16, 17.9, 18.94, 17.91, 17.96, 17.99, 18.41, 18.07, 18.2, 17.95, 18.46, 18.75] -326.865 -16.34325 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481152534484863, 'TIME_S_1KI': 0.9311613836607022, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 671.3460792446136, 'W': 47.13, 'J_1KI': 59.64339723210853, 'W_1KI': 4.187100213219616, 'W_D': 30.78675, 'J_D': 438.54368566060066, 'W_D_1KI': 2.735141257995736, 'J_D_1KI': 0.24299407053977753} +[19.03, 18.85, 18.61, 18.49, 23.35, 19.32, 18.78, 19.05, 18.99, 18.44] +[52.9] +14.33583402633667 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.484663248062134, 'TIME_S_1KI': 0.9357964341362134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.3656199932099, 'W': 52.9} +[19.03, 18.85, 18.61, 18.49, 23.35, 19.32, 18.78, 19.05, 18.99, 18.44, 23.11, 19.31, 18.54, 18.97, 18.59, 18.59, 18.45, 18.56, 18.61, 18.67] +344.685 +17.23425 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.484663248062134, 'TIME_S_1KI': 0.9357964341362134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 758.3656199932099, 'W': 52.9, 'J_1KI': 67.68704212720544, 'W_1KI': 4.721528025705106, 'W_D': 35.66575, 'J_D': 511.2982724248171, 'W_D_1KI': 3.183305069617994, 'J_D_1KI': 0.28412219471777883} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json index b6b292e..a276d01 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 220904, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.611992359161377, "TIME_S_1KI": 0.04803893256419701, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 660.6772035217285, "W": 46.78, "J_1KI": 2.990788774860249, "W_1KI": 0.211766197081085, "W_D": 30.2935, "J_D": 427.83721387100223, "W_D_1KI": 0.13713423025386592, "J_D_1KI": 0.0006207865419090009} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 220871, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.55235505104065, "TIME_S_1KI": 0.047776100307603306, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 742.3232311749458, "W": 52.61, "J_1KI": 3.3608904345746873, "W_1KI": 0.2381933345708581, "W_D": 35.704499999999996, "J_D": 503.7878693686723, "W_D_1KI": 0.1616531821742103, "J_D_1KI": 0.000731889574340725} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output index bcbbe28..6023764 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.021222829818725586} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.020276308059692383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 2499, 2499, 2500]), - col_indices=tensor([2865, 4172, 3505, ..., 1471, 1829, 2284]), - values=tensor([0.9472, 0.7106, 0.6508, ..., 0.6327, 0.4564, 0.0632]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2705, 4386, 4587, ..., 1742, 2489, 353]), + values=tensor([0.7012, 0.7471, 0.5716, ..., 0.4505, 0.8006, 0.6036]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.0811, 0.3767, 0.7595, ..., 0.7571, 0.6856, 0.3676]) +tensor([0.9629, 0.3143, 0.3893, ..., 0.8901, 0.2021, 0.9990]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.021222829818725586 seconds +Time: 0.020276308059692383 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49475', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.351640462875366} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51784', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.46175479888916} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), - col_indices=tensor([1365, 3411, 751, ..., 1715, 4182, 3544]), - values=tensor([0.2168, 0.4073, 0.8209, ..., 0.1504, 0.5765, 0.8829]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([1585, 227, 3606, ..., 3508, 1030, 4424]), + values=tensor([0.0799, 0.3568, 0.0076, ..., 0.5931, 0.1628, 0.2374]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.3766, 0.6993, 0.2098, ..., 0.7754, 0.7068, 0.6832]) +tensor([0.3993, 0.7902, 0.0512, ..., 0.6094, 0.0083, 0.1392]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 2.351640462875366 seconds +Time: 2.46175479888916 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '220904', '-ss', '5000', '-sd', '0.0001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.611992359161377} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '220871', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.55235505104065} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), - col_indices=tensor([ 469, 3066, 4238, ..., 2570, 4418, 4413]), - values=tensor([0.6778, 0.7938, 0.7053, ..., 0.2703, 0.2957, 0.5133]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([ 3, 4896, 3718, ..., 1484, 995, 1337]), + values=tensor([0.4186, 0.1762, 0.3830, ..., 0.8696, 0.5262, 0.8434]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.1744, 0.7493, 0.4982, ..., 0.1073, 0.6650, 0.9357]) +tensor([0.0085, 0.7795, 0.4163, ..., 0.6854, 0.9665, 0.8065]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.611992359161377 seconds +Time: 10.55235505104065 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), - col_indices=tensor([ 469, 3066, 4238, ..., 2570, 4418, 4413]), - values=tensor([0.6778, 0.7938, 0.7053, ..., 0.2703, 0.2957, 0.5133]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([ 3, 4896, 3718, ..., 1484, 995, 1337]), + values=tensor([0.4186, 0.1762, 0.3830, ..., 0.8696, 0.5262, 0.8434]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.1744, 0.7493, 0.4982, ..., 0.1073, 0.6650, 0.9357]) +tensor([0.0085, 0.7795, 0.4163, ..., 0.6854, 0.9665, 0.8065]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.611992359161377 seconds +Time: 10.55235505104065 seconds -[18.28, 17.78, 18.16, 18.31, 18.21, 17.89, 17.98, 18.38, 18.01, 17.96] -[46.78] -14.123069763183594 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.611992359161377, 'TIME_S_1KI': 0.04803893256419701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 660.6772035217285, 'W': 46.78} -[18.28, 17.78, 18.16, 18.31, 18.21, 17.89, 17.98, 18.38, 18.01, 17.96, 18.63, 18.15, 17.86, 18.23, 18.11, 18.07, 18.06, 17.82, 21.74, 19.07] -329.73 -16.4865 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.611992359161377, 'TIME_S_1KI': 0.04803893256419701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 660.6772035217285, 'W': 46.78, 'J_1KI': 2.990788774860249, 'W_1KI': 0.211766197081085, 'W_D': 30.2935, 'J_D': 427.83721387100223, 'W_D_1KI': 0.13713423025386592, 'J_D_1KI': 0.0006207865419090009} +[19.4, 18.69, 18.81, 18.65, 19.08, 18.57, 18.71, 18.57, 18.99, 18.41] +[52.61] +14.109926462173462 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220871, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.55235505104065, 'TIME_S_1KI': 0.047776100307603306, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 742.3232311749458, 'W': 52.61} +[19.4, 18.69, 18.81, 18.65, 19.08, 18.57, 18.71, 18.57, 18.99, 18.41, 19.41, 18.5, 18.98, 18.6, 18.94, 18.51, 18.72, 18.65, 19.15, 18.76] +338.11 +16.9055 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220871, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.55235505104065, 'TIME_S_1KI': 0.047776100307603306, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 742.3232311749458, 'W': 52.61, 'J_1KI': 3.3608904345746873, 'W_1KI': 0.2381933345708581, 'W_D': 35.704499999999996, 'J_D': 503.7878693686723, 'W_D_1KI': 0.1616531821742103, 'J_D_1KI': 0.000731889574340725} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json index 96eeba5..75d1dff 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 111257, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7375807762146, "TIME_S_1KI": 0.09651150737674573, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.9428442955017, "W": 47.23, "J_1KI": 6.057532059065961, "W_1KI": 0.42451261493658826, "W_D": 30.839999999999996, "J_D": 440.0676967620849, "W_D_1KI": 0.2771960415973826, "J_D_1KI": 0.0024914930440096588} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 108957, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.472262620925903, "TIME_S_1KI": 0.09611372028346873, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 747.4210280299187, "W": 52.85, "J_1KI": 6.859779803316159, "W_1KI": 0.48505373679524955, "W_D": 35.6375, "J_D": 503.99653522074226, "W_D_1KI": 0.32707857228080806, "J_D_1KI": 0.0030019050843985064} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output index 1589984..14c7cd6 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.025800704956054688} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.026506423950195312} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 11, ..., 24988, 24993, 25000]), - col_indices=tensor([ 36, 564, 3279, ..., 4511, 4767, 4922]), - values=tensor([0.2797, 0.2996, 0.9239, ..., 0.1899, 0.3417, 0.3512]), +tensor(crow_indices=tensor([ 0, 6, 16, ..., 24990, 24995, 25000]), + col_indices=tensor([ 221, 1679, 2610, ..., 3544, 4870, 4971]), + values=tensor([0.6544, 0.8016, 0.8634, ..., 0.0114, 0.8406, 0.9999]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3254, 0.3602, 0.2662, ..., 0.3074, 0.8226, 0.4658]) +tensor([0.5772, 0.0116, 0.1248, ..., 0.2008, 0.4824, 0.3225]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.025800704956054688 seconds +Time: 0.026506423950195312 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '40696', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.8407018184661865} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '39613', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.817412853240967} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 11, ..., 24987, 24992, 25000]), - col_indices=tensor([ 298, 713, 1200, ..., 1957, 3799, 4153]), - values=tensor([0.8486, 0.9770, 0.8154, ..., 0.4467, 0.7513, 0.9966]), +tensor(crow_indices=tensor([ 0, 4, 10, ..., 24989, 24995, 25000]), + col_indices=tensor([ 777, 1395, 2062, ..., 2560, 3270, 3983]), + values=tensor([0.6234, 0.4843, 0.1198, ..., 0.4624, 0.3011, 0.6442]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8767, 0.4273, 0.1763, ..., 0.9403, 0.3580, 0.5902]) +tensor([0.7878, 0.6442, 0.3549, ..., 0.6051, 0.0116, 0.1909]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 3.8407018184661865 seconds +Time: 3.817412853240967 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '111257', '-ss', '5000', '-sd', '0.001', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7375807762146} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '108957', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.472262620925903} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 24991, 24996, 25000]), - col_indices=tensor([ 575, 1907, 4405, ..., 1224, 3086, 3740]), - values=tensor([0.1597, 0.6483, 0.2533, ..., 0.7760, 0.1307, 0.6720]), +tensor(crow_indices=tensor([ 0, 5, 13, ..., 24993, 24997, 25000]), + col_indices=tensor([ 217, 231, 2207, ..., 518, 4354, 4594]), + values=tensor([0.3019, 0.6177, 0.0008, ..., 0.6182, 0.7561, 0.0455]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0745, 0.7131, 0.6004, ..., 0.2535, 0.5073, 0.4932]) +tensor([0.2891, 0.1278, 0.3759, ..., 0.1903, 0.5943, 0.5416]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.7375807762146 seconds +Time: 10.472262620925903 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 24991, 24996, 25000]), - col_indices=tensor([ 575, 1907, 4405, ..., 1224, 3086, 3740]), - values=tensor([0.1597, 0.6483, 0.2533, ..., 0.7760, 0.1307, 0.6720]), +tensor(crow_indices=tensor([ 0, 5, 13, ..., 24993, 24997, 25000]), + col_indices=tensor([ 217, 231, 2207, ..., 518, 4354, 4594]), + values=tensor([0.3019, 0.6177, 0.0008, ..., 0.6182, 0.7561, 0.0455]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0745, 0.7131, 0.6004, ..., 0.2535, 0.5073, 0.4932]) +tensor([0.2891, 0.1278, 0.3759, ..., 0.1903, 0.5943, 0.5416]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.7375807762146 seconds +Time: 10.472262620925903 seconds -[18.26, 17.96, 17.92, 18.07, 18.39, 18.16, 18.05, 18.66, 19.34, 18.38] -[47.23] -14.269380569458008 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 111257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7375807762146, 'TIME_S_1KI': 0.09651150737674573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.9428442955017, 'W': 47.23} -[18.26, 17.96, 17.92, 18.07, 18.39, 18.16, 18.05, 18.66, 19.34, 18.38, 18.78, 18.12, 17.96, 17.92, 17.93, 18.7, 18.05, 17.87, 17.97, 18.04] -327.8 -16.39 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 111257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7375807762146, 'TIME_S_1KI': 0.09651150737674573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.9428442955017, 'W': 47.23, 'J_1KI': 6.057532059065961, 'W_1KI': 0.42451261493658826, 'W_D': 30.839999999999996, 'J_D': 440.0676967620849, 'W_D_1KI': 0.2771960415973826, 'J_D_1KI': 0.0024914930440096588} +[19.22, 22.12, 20.05, 18.42, 19.49, 18.7, 18.82, 18.59, 18.76, 18.45] +[52.85] +14.142308950424194 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 108957, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.472262620925903, 'TIME_S_1KI': 0.09611372028346873, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 747.4210280299187, 'W': 52.85} +[19.22, 22.12, 20.05, 18.42, 19.49, 18.7, 18.82, 18.59, 18.76, 18.45, 19.4, 18.53, 18.99, 19.62, 19.0, 18.65, 19.18, 18.8, 18.6, 18.79] +344.25 +17.2125 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 108957, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.472262620925903, 'TIME_S_1KI': 0.09611372028346873, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 747.4210280299187, 'W': 52.85, 'J_1KI': 6.859779803316159, 'W_1KI': 0.48505373679524955, 'W_D': 35.6375, 'J_D': 503.99653522074226, 'W_D_1KI': 0.32707857228080806, 'J_D_1KI': 0.0030019050843985064} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json index a469b78..38f13e3 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 21150, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43382978439331, "TIME_S_1KI": 0.4933252853141046, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 678.8009396362305, "W": 47.74, "J_1KI": 32.09460707499908, "W_1KI": 2.257210401891253, "W_D": 31.383250000000004, "J_D": 446.22914932632455, "W_D_1KI": 1.483841607565012, "J_D_1KI": 0.0701579956295514} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 21489, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.522697687149048, "TIME_S_1KI": 0.48967833250263143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 768.4232043147086, "W": 53.65, "J_1KI": 35.75890941014978, "W_1KI": 2.4966261808367074, "W_D": 35.272999999999996, "J_D": 505.21140141272537, "W_D_1KI": 1.6414444599562565, "J_D_1KI": 0.07638533482043168} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output index c2c0006..93ae60b 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.06486701965332031} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.06584358215332031} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 88, ..., 249915, 249957, +tensor(crow_indices=tensor([ 0, 48, 101, ..., 249892, 249942, 250000]), - col_indices=tensor([ 72, 118, 180, ..., 4779, 4849, 4984]), - values=tensor([0.8923, 0.3860, 0.0290, ..., 0.0532, 0.0516, 0.8464]), + col_indices=tensor([ 6, 107, 321, ..., 4686, 4752, 4909]), + values=tensor([0.6573, 0.6233, 0.5853, ..., 0.0909, 0.8119, 0.2306]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1487, 0.9450, 0.3254, ..., 0.6866, 0.3989, 0.7268]) +tensor([0.2370, 0.0962, 0.8634, ..., 0.1694, 0.3412, 0.6070]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.06486701965332031 seconds +Time: 0.06584358215332031 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '16186', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.035360336303711} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15946', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.791264772415161} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 44, 82, ..., 249908, 249962, +tensor(crow_indices=tensor([ 0, 37, 86, ..., 249909, 249955, 250000]), - col_indices=tensor([ 36, 43, 78, ..., 4796, 4867, 4932]), - values=tensor([0.3758, 0.9832, 0.1983, ..., 0.0743, 0.8633, 0.0592]), + col_indices=tensor([ 143, 471, 495, ..., 4844, 4903, 4966]), + values=tensor([0.7815, 0.7659, 0.8558, ..., 0.4556, 0.1537, 0.8448]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5985, 0.2492, 0.1240, ..., 0.8930, 0.7764, 0.3200]) +tensor([0.0365, 0.6707, 0.0658, ..., 0.6930, 0.0408, 0.2771]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 8.035360336303711 seconds +Time: 7.791264772415161 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '21150', '-ss', '5000', '-sd', '0.01', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43382978439331} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '21489', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.522697687149048} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 95, ..., 249894, 249948, +tensor(crow_indices=tensor([ 0, 40, 83, ..., 249895, 249959, 250000]), - col_indices=tensor([ 129, 143, 228, ..., 4613, 4768, 4965]), - values=tensor([0.2601, 0.0327, 0.3118, ..., 0.8257, 0.2689, 0.3965]), + col_indices=tensor([ 30, 48, 173, ..., 4252, 4292, 4802]), + values=tensor([0.0239, 0.4781, 0.2863, ..., 0.4474, 0.8773, 0.0233]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3768, 0.4725, 0.8369, ..., 0.3357, 0.4139, 0.5546]) +tensor([0.9052, 0.5856, 0.4582, ..., 0.5510, 0.4447, 0.7272]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.43382978439331 seconds +Time: 10.522697687149048 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 47, 95, ..., 249894, 249948, +tensor(crow_indices=tensor([ 0, 40, 83, ..., 249895, 249959, 250000]), - col_indices=tensor([ 129, 143, 228, ..., 4613, 4768, 4965]), - values=tensor([0.2601, 0.0327, 0.3118, ..., 0.8257, 0.2689, 0.3965]), + col_indices=tensor([ 30, 48, 173, ..., 4252, 4292, 4802]), + values=tensor([0.0239, 0.4781, 0.2863, ..., 0.4474, 0.8773, 0.0233]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3768, 0.4725, 0.8369, ..., 0.3357, 0.4139, 0.5546]) +tensor([0.9052, 0.5856, 0.4582, ..., 0.5510, 0.4447, 0.7272]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.43382978439331 seconds +Time: 10.522697687149048 seconds -[18.56, 17.95, 18.07, 18.36, 18.1, 18.2, 18.17, 18.16, 18.01, 17.83] -[47.74] -14.218704223632812 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21150, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43382978439331, 'TIME_S_1KI': 0.4933252853141046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 678.8009396362305, 'W': 47.74} -[18.56, 17.95, 18.07, 18.36, 18.1, 18.2, 18.17, 18.16, 18.01, 17.83, 19.27, 18.21, 17.98, 17.93, 18.07, 18.27, 18.29, 17.78, 18.3, 18.91] -327.135 -16.356749999999998 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21150, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43382978439331, 'TIME_S_1KI': 0.4933252853141046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 678.8009396362305, 'W': 47.74, 'J_1KI': 32.09460707499908, 'W_1KI': 2.257210401891253, 'W_D': 31.383250000000004, 'J_D': 446.22914932632455, 'W_D_1KI': 1.483841607565012, 'J_D_1KI': 0.0701579956295514} +[19.6, 18.7, 18.63, 18.64, 18.6, 19.05, 18.56, 21.87, 19.97, 18.75] +[53.65] +14.322892904281616 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21489, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.522697687149048, 'TIME_S_1KI': 0.48967833250263143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.4232043147086, 'W': 53.65} +[19.6, 18.7, 18.63, 18.64, 18.6, 19.05, 18.56, 21.87, 19.97, 18.75, 18.88, 19.62, 19.99, 35.37, 25.34, 18.41, 19.38, 18.72, 18.61, 18.93] +367.54 +18.377000000000002 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21489, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.522697687149048, 'TIME_S_1KI': 0.48967833250263143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.4232043147086, 'W': 53.65, 'J_1KI': 35.75890941014978, 'W_1KI': 2.4966261808367074, 'W_D': 35.272999999999996, 'J_D': 505.21140141272537, 'W_D_1KI': 1.6414444599562565, 'J_D_1KI': 0.07638533482043168} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json index c9a64dd..9a457a7 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4516, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.427689790725708, "TIME_S_1KI": 2.309054426644311, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 694.9637099742889, "W": 47.9, "J_1KI": 153.88921832911623, "W_1KI": 10.606731620903455, "W_D": 31.338749999999997, "J_D": 454.682546262145, "W_D_1KI": 6.939492914083259, "J_D_1KI": 1.5366459065729094} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4507, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.452783107757568, "TIME_S_1KI": 2.319232994843037, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 788.3322271823882, "W": 53.8, "J_1KI": 174.91285271408657, "W_1KI": 11.936986909252274, "W_D": 36.52775, "J_D": 535.2416823691725, "W_D_1KI": 8.104670512536055, "J_D_1KI": 1.7982406284748294} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output index ce3acc5..6f00292 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.2486708164215088} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.24941706657409668} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 247, 481, ..., 1249493, - 1249753, 1250000]), - col_indices=tensor([ 6, 31, 64, ..., 4955, 4959, 4978]), - values=tensor([0.8259, 0.7056, 0.5562, ..., 0.4513, 0.5248, 0.2272]), +tensor(crow_indices=tensor([ 0, 269, 515, ..., 1249442, + 1249742, 1250000]), + col_indices=tensor([ 18, 33, 64, ..., 4944, 4945, 4969]), + values=tensor([0.6983, 0.1365, 0.6760, ..., 0.8759, 0.6193, 0.1743]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.7144, 0.3655, 0.7208, ..., 0.9456, 0.6678, 0.5049]) +tensor([0.4545, 0.8492, 0.8687, ..., 0.9140, 0.3104, 0.7444]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.2486708164215088 seconds +Time: 0.24941706657409668 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4222', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.815936088562012} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4209', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.805480241775513} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 269, 519, ..., 1249537, - 1249769, 1250000]), - col_indices=tensor([ 42, 49, 74, ..., 4955, 4973, 4990]), - values=tensor([0.0021, 0.9721, 0.1598, ..., 0.6170, 0.8086, 0.1248]), +tensor(crow_indices=tensor([ 0, 258, 536, ..., 1249488, + 1249729, 1250000]), + col_indices=tensor([ 11, 74, 145, ..., 4958, 4988, 4993]), + values=tensor([0.2265, 0.0876, 0.3577, ..., 0.6186, 0.3326, 0.6793]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.9473, 0.7157, 0.7204, ..., 0.2702, 0.4361, 0.8753]) +tensor([0.9261, 0.7700, 0.5305, ..., 0.5419, 0.6760, 0.0747]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 9.815936088562012 seconds +Time: 9.805480241775513 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4516', '-ss', '5000', '-sd', '0.05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.427689790725708} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4507', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.452783107757568} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 262, 521, ..., 1249547, - 1249780, 1250000]), - col_indices=tensor([ 5, 16, 32, ..., 4965, 4966, 4994]), - values=tensor([0.6294, 0.1213, 0.8577, ..., 0.8057, 0.3565, 0.7731]), +tensor(crow_indices=tensor([ 0, 254, 505, ..., 1249513, + 1249748, 1250000]), + col_indices=tensor([ 4, 15, 43, ..., 4942, 4967, 4986]), + values=tensor([0.9980, 0.7569, 0.0426, ..., 0.7541, 0.1223, 0.2328]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.5561, 0.2020, 0.8277, ..., 0.0800, 0.4571, 0.4718]) +tensor([0.1484, 0.2157, 0.1969, ..., 0.4129, 0.2768, 0.1680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.427689790725708 seconds +Time: 10.452783107757568 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 262, 521, ..., 1249547, - 1249780, 1250000]), - col_indices=tensor([ 5, 16, 32, ..., 4965, 4966, 4994]), - values=tensor([0.6294, 0.1213, 0.8577, ..., 0.8057, 0.3565, 0.7731]), +tensor(crow_indices=tensor([ 0, 254, 505, ..., 1249513, + 1249748, 1250000]), + col_indices=tensor([ 4, 15, 43, ..., 4942, 4967, 4986]), + values=tensor([0.9980, 0.7569, 0.0426, ..., 0.7541, 0.1223, 0.2328]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.5561, 0.2020, 0.8277, ..., 0.0800, 0.4571, 0.4718]) +tensor([0.1484, 0.2157, 0.1969, ..., 0.4129, 0.2768, 0.1680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.427689790725708 seconds +Time: 10.452783107757568 seconds -[18.38, 21.41, 18.53, 17.98, 18.13, 17.94, 18.18, 18.19, 18.64, 17.87] -[47.9] -14.508636951446533 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.427689790725708, 'TIME_S_1KI': 2.309054426644311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.9637099742889, 'W': 47.9} -[18.38, 21.41, 18.53, 17.98, 18.13, 17.94, 18.18, 18.19, 18.64, 17.87, 18.41, 18.3, 18.02, 18.19, 18.18, 18.28, 18.35, 17.94, 18.16, 18.95] -331.225 -16.56125 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.427689790725708, 'TIME_S_1KI': 2.309054426644311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.9637099742889, 'W': 47.9, 'J_1KI': 153.88921832911623, 'W_1KI': 10.606731620903455, 'W_D': 31.338749999999997, 'J_D': 454.682546262145, 'W_D_1KI': 6.939492914083259, 'J_D_1KI': 1.5366459065729094} +[18.99, 18.82, 18.82, 20.01, 18.56, 18.75, 18.66, 19.57, 18.83, 19.81] +[53.8] +14.653015375137329 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.452783107757568, 'TIME_S_1KI': 2.319232994843037, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 788.3322271823882, 'W': 53.8} +[18.99, 18.82, 18.82, 20.01, 18.56, 18.75, 18.66, 19.57, 18.83, 19.81, 18.87, 18.49, 18.96, 18.99, 18.64, 23.2, 19.54, 18.4, 19.14, 18.46] +345.445 +17.27225 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4507, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.452783107757568, 'TIME_S_1KI': 2.319232994843037, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 788.3322271823882, 'W': 53.8, 'J_1KI': 174.91285271408657, 'W_1KI': 11.936986909252274, 'W_D': 36.52775, 'J_D': 535.2416823691725, 'W_D_1KI': 8.104670512536055, 'J_D_1KI': 1.7982406284748294} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json index 6d226de..2e2ae33 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2077, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.487555503845215, "TIME_S_1KI": 5.04937674715706, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 724.931280415058, "W": 48.23, "J_1KI": 349.0280599013279, "W_1KI": 23.220991815117955, "W_D": 31.779249999999998, "J_D": 477.6647811140418, "W_D_1KI": 15.300553683196917, "J_D_1KI": 7.366660415597938} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2065, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.45287299156189, "TIME_S_1KI": 5.061923966858059, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 809.5970643353462, "W": 54.129999999999995, "J_1KI": 392.05668975077293, "W_1KI": 26.213075060532685, "W_D": 36.782999999999994, "J_D": 550.1461078412532, "W_D_1KI": 17.812590799031472, "J_D_1KI": 8.625951960790058} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output index ec991c2..99c4ade 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.53643798828125} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.545907735824585} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 485, 953, ..., 2499027, - 2499524, 2500000]), - col_indices=tensor([ 4, 6, 18, ..., 4982, 4984, 4999]), - values=tensor([0.6950, 0.7335, 0.8547, ..., 0.7303, 0.2740, 0.2643]), +tensor(crow_indices=tensor([ 0, 463, 924, ..., 2498984, + 2499500, 2500000]), + col_indices=tensor([ 3, 5, 15, ..., 4982, 4993, 4996]), + values=tensor([0.5905, 0.3756, 0.1756, ..., 0.8297, 0.4615, 0.2237]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4725, 0.9871, 0.6689, ..., 0.5705, 0.1526, 0.5563]) +tensor([0.9622, 0.8462, 0.4599, ..., 0.4872, 0.1929, 0.7015]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.53643798828125 seconds +Time: 0.545907735824585 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1957', '-ss', '5000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.889901638031006} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1923', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.776902198791504} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 453, 921, ..., 2498978, - 2499478, 2500000]), - col_indices=tensor([ 4, 20, 34, ..., 4974, 4986, 4991]), - values=tensor([0.7939, 0.2865, 0.3388, ..., 0.3715, 0.9532, 0.1224]), +tensor(crow_indices=tensor([ 0, 532, 1021, ..., 2498948, + 2499479, 2500000]), + col_indices=tensor([ 7, 8, 20, ..., 4964, 4986, 4994]), + values=tensor([0.7984, 0.5010, 0.6603, ..., 0.6563, 0.5541, 0.6171]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8979, 0.8998, 0.6031, ..., 0.0686, 0.1119, 0.6753]) +tensor([0.8395, 0.6596, 0.0148, ..., 0.6891, 0.8591, 0.4265]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 9.889901638031006 seconds +Time: 9.776902198791504 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2077', '-ss', '5000', '-sd', '0.1', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.487555503845215} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2065', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.45287299156189} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 534, 1011, ..., 2499001, - 2499492, 2500000]), - col_indices=tensor([ 12, 16, 26, ..., 4984, 4992, 4995]), - values=tensor([0.3129, 0.5758, 0.2112, ..., 0.6208, 0.5668, 0.8482]), +tensor(crow_indices=tensor([ 0, 505, 1011, ..., 2499006, + 2499526, 2500000]), + col_indices=tensor([ 27, 29, 36, ..., 4978, 4986, 4992]), + values=tensor([0.2782, 0.8454, 0.7345, ..., 0.2216, 0.1965, 0.2730]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7114, 0.1437, 0.5452, ..., 0.6795, 0.1114, 0.8178]) +tensor([0.0758, 0.2374, 0.4573, ..., 0.8597, 0.4706, 0.8817]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.487555503845215 seconds +Time: 10.45287299156189 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 534, 1011, ..., 2499001, - 2499492, 2500000]), - col_indices=tensor([ 12, 16, 26, ..., 4984, 4992, 4995]), - values=tensor([0.3129, 0.5758, 0.2112, ..., 0.6208, 0.5668, 0.8482]), +tensor(crow_indices=tensor([ 0, 505, 1011, ..., 2499006, + 2499526, 2500000]), + col_indices=tensor([ 27, 29, 36, ..., 4978, 4986, 4992]), + values=tensor([0.2782, 0.8454, 0.7345, ..., 0.2216, 0.1965, 0.2730]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7114, 0.1437, 0.5452, ..., 0.6795, 0.1114, 0.8178]) +tensor([0.0758, 0.2374, 0.4573, ..., 0.8597, 0.4706, 0.8817]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.487555503845215 seconds +Time: 10.45287299156189 seconds -[18.45, 17.83, 21.85, 18.4, 18.31, 17.94, 17.97, 18.32, 18.18, 17.83] -[48.23] -15.030712842941284 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.487555503845215, 'TIME_S_1KI': 5.04937674715706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.931280415058, 'W': 48.23} -[18.45, 17.83, 21.85, 18.4, 18.31, 17.94, 17.97, 18.32, 18.18, 17.83, 18.49, 17.82, 18.18, 18.17, 18.15, 17.82, 18.07, 17.68, 17.99, 17.9] -329.015 -16.45075 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.487555503845215, 'TIME_S_1KI': 5.04937674715706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.931280415058, 'W': 48.23, 'J_1KI': 349.0280599013279, 'W_1KI': 23.220991815117955, 'W_D': 31.779249999999998, 'J_D': 477.6647811140418, 'W_D_1KI': 15.300553683196917, 'J_D_1KI': 7.366660415597938} +[19.0, 23.3, 18.72, 18.84, 18.99, 18.54, 18.55, 18.38, 18.99, 18.44] +[54.13] +14.956531763076782 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2065, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.45287299156189, 'TIME_S_1KI': 5.061923966858059, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 809.5970643353462, 'W': 54.129999999999995} +[19.0, 23.3, 18.72, 18.84, 18.99, 18.54, 18.55, 18.38, 18.99, 18.44, 18.9, 18.89, 18.67, 18.69, 18.68, 20.93, 21.41, 18.74, 18.95, 19.0] +346.94 +17.347 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2065, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.45287299156189, 'TIME_S_1KI': 5.061923966858059, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 809.5970643353462, 'W': 54.129999999999995, 'J_1KI': 392.05668975077293, 'W_1KI': 26.213075060532685, 'W_D': 36.782999999999994, 'J_D': 550.1461078412532, 'W_D_1KI': 17.812590799031472, 'J_D_1KI': 8.625951960790058} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json index e6d6ed2..bc073dc 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 962, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.386102437973022, "TIME_S_1KI": 10.796364280637237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 760.2223528313637, "W": 48.37, "J_1KI": 790.2519260201285, "W_1KI": 50.280665280665275, "W_D": 31.813249999999996, "J_D": 500.00297221857306, "W_D_1KI": 33.069906444906444, "J_D_1KI": 34.376202125682376} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 971, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375150203704834, "TIME_S_1KI": 10.685015657780468, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 849.9418584561348, "W": 54.17, "J_1KI": 875.3263217879864, "W_1KI": 55.78784757981463, "W_D": 37.319250000000004, "J_D": 585.5490622335077, "W_D_1KI": 38.43383110195675, "J_D_1KI": 39.581700413961634} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output index c8b98a6..f3998cb 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.090480089187622} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.0806596279144287} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 979, 1989, ..., 4997947, - 4998978, 5000000]), - col_indices=tensor([ 0, 1, 2, ..., 4989, 4991, 4992]), - values=tensor([0.4629, 0.8349, 0.1230, ..., 0.3254, 0.2010, 0.4262]), +tensor(crow_indices=tensor([ 0, 995, 1988, ..., 4997969, + 4998993, 5000000]), + col_indices=tensor([ 2, 8, 15, ..., 4982, 4992, 4997]), + values=tensor([0.7197, 0.1704, 0.3676, ..., 0.5241, 0.8938, 0.1612]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8820, 0.9283, 0.2134, ..., 0.8569, 0.5183, 0.3465]) +tensor([0.6730, 0.8677, 0.3708, ..., 0.7112, 0.4020, 0.2942]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 1.090480089187622 seconds +Time: 1.0806596279144287 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '962', '-ss', '5000', '-sd', '0.2', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.386102437973022} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '971', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375150203704834} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1020, 2003, ..., 4997983, - 4998972, 5000000]), - col_indices=tensor([ 2, 12, 14, ..., 4982, 4988, 4994]), - values=tensor([0.5268, 0.0953, 0.2601, ..., 0.2366, 0.8226, 0.2641]), +tensor(crow_indices=tensor([ 0, 988, 1955, ..., 4997972, + 4998955, 5000000]), + col_indices=tensor([ 3, 12, 18, ..., 4992, 4993, 4996]), + values=tensor([0.6820, 0.5689, 0.9542, ..., 0.8233, 0.5538, 0.5282]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6710, 0.0398, 0.1150, ..., 0.1943, 0.5070, 0.9802]) +tensor([0.8777, 0.1124, 0.1362, ..., 0.2255, 0.2013, 0.9144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.386102437973022 seconds +Time: 10.375150203704834 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1020, 2003, ..., 4997983, - 4998972, 5000000]), - col_indices=tensor([ 2, 12, 14, ..., 4982, 4988, 4994]), - values=tensor([0.5268, 0.0953, 0.2601, ..., 0.2366, 0.8226, 0.2641]), +tensor(crow_indices=tensor([ 0, 988, 1955, ..., 4997972, + 4998955, 5000000]), + col_indices=tensor([ 3, 12, 18, ..., 4992, 4993, 4996]), + values=tensor([0.6820, 0.5689, 0.9542, ..., 0.8233, 0.5538, 0.5282]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6710, 0.0398, 0.1150, ..., 0.1943, 0.5070, 0.9802]) +tensor([0.8777, 0.1124, 0.1362, ..., 0.2255, 0.2013, 0.9144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.386102437973022 seconds +Time: 10.375150203704834 seconds -[18.34, 18.12, 18.35, 17.91, 17.99, 18.03, 18.28, 18.28, 17.95, 18.09] -[48.37] -15.71681523323059 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 962, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.386102437973022, 'TIME_S_1KI': 10.796364280637237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 760.2223528313637, 'W': 48.37} -[18.34, 18.12, 18.35, 17.91, 17.99, 18.03, 18.28, 18.28, 17.95, 18.09, 18.27, 18.18, 18.12, 17.89, 18.15, 21.61, 18.11, 17.8, 18.1, 21.83] -331.13500000000005 -16.55675 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 962, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.386102437973022, 'TIME_S_1KI': 10.796364280637237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 760.2223528313637, 'W': 48.37, 'J_1KI': 790.2519260201285, 'W_1KI': 50.280665280665275, 'W_D': 31.813249999999996, 'J_D': 500.00297221857306, 'W_D_1KI': 33.069906444906444, 'J_D_1KI': 34.376202125682376} +[18.9, 18.56, 18.78, 18.83, 18.79, 18.5, 18.74, 18.48, 18.64, 18.83] +[54.17] +15.690268754959106 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 971, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375150203704834, 'TIME_S_1KI': 10.685015657780468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 849.9418584561348, 'W': 54.17} +[18.9, 18.56, 18.78, 18.83, 18.79, 18.5, 18.74, 18.48, 18.64, 18.83, 19.31, 18.65, 19.01, 18.67, 18.6, 18.68, 18.61, 18.75, 18.77, 18.87] +337.015 +16.850749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 971, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375150203704834, 'TIME_S_1KI': 10.685015657780468, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 849.9418584561348, 'W': 54.17, 'J_1KI': 875.3263217879864, 'W_1KI': 55.78784757981463, 'W_D': 37.319250000000004, 'J_D': 585.5490622335077, 'W_D_1KI': 38.43383110195675, 'J_D_1KI': 39.581700413961634} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json index 6efa366..57ad521 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 640, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.40328574180603, "TIME_S_1KI": 16.255133971571922, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 792.4070657157898, "W": 48.54, "J_1KI": 1238.1360401809216, "W_1KI": 75.84375, "W_D": 32.294250000000005, "J_D": 527.1980198185445, "W_D_1KI": 50.45976562500001, "J_D_1KI": 78.84338378906251} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 634, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.328722476959229, "TIME_S_1KI": 16.291360373752727, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 884.4766217422484, "W": 54.16, "J_1KI": 1395.0735358710542, "W_1KI": 85.42586750788642, "W_D": 36.95925, "J_D": 603.5744568339586, "W_D_1KI": 58.29534700315457, "J_D_1KI": 91.94849685040153} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output index e11bc1c..20f8484 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.6401786804199219} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.65598464012146} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1479, 2972, ..., 7496976, - 7498517, 7500000]), - col_indices=tensor([ 4, 10, 12, ..., 4987, 4989, 4997]), - values=tensor([0.8092, 0.2326, 0.1918, ..., 0.7537, 0.2703, 0.9406]), +tensor(crow_indices=tensor([ 0, 1512, 3010, ..., 7497102, + 7498580, 7500000]), + col_indices=tensor([ 0, 6, 7, ..., 4982, 4993, 4994]), + values=tensor([0.1441, 0.1264, 0.1974, ..., 0.8347, 0.6432, 0.4326]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.6079, 0.4244, 0.8803, ..., 0.9929, 0.8834, 0.4182]) +tensor([0.7793, 0.6447, 0.2763, ..., 0.7560, 0.6340, 0.7473]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 1.6401786804199219 seconds +Time: 1.65598464012146 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '640', '-ss', '5000', '-sd', '0.3', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.40328574180603} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '634', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.328722476959229} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1469, 2947, ..., 7496962, - 7498492, 7500000]), - col_indices=tensor([ 5, 8, 10, ..., 4979, 4981, 4995]), - values=tensor([0.5097, 0.4133, 0.1946, ..., 0.6762, 0.7827, 0.5941]), +tensor(crow_indices=tensor([ 0, 1455, 2926, ..., 7496907, + 7498447, 7500000]), + col_indices=tensor([ 7, 8, 12, ..., 4996, 4997, 4999]), + values=tensor([0.5602, 0.6149, 0.8310, ..., 0.9658, 0.2109, 0.7676]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.3256, 0.8526, 0.3288, ..., 0.0837, 0.1622, 0.2040]) +tensor([0.2196, 0.3329, 0.8031, ..., 0.2668, 0.7746, 0.1687]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.40328574180603 seconds +Time: 10.328722476959229 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1469, 2947, ..., 7496962, - 7498492, 7500000]), - col_indices=tensor([ 5, 8, 10, ..., 4979, 4981, 4995]), - values=tensor([0.5097, 0.4133, 0.1946, ..., 0.6762, 0.7827, 0.5941]), +tensor(crow_indices=tensor([ 0, 1455, 2926, ..., 7496907, + 7498447, 7500000]), + col_indices=tensor([ 7, 8, 12, ..., 4996, 4997, 4999]), + values=tensor([0.5602, 0.6149, 0.8310, ..., 0.9658, 0.2109, 0.7676]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.3256, 0.8526, 0.3288, ..., 0.0837, 0.1622, 0.2040]) +tensor([0.2196, 0.3329, 0.8031, ..., 0.2668, 0.7746, 0.1687]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.40328574180603 seconds +Time: 10.328722476959229 seconds -[18.61, 18.25, 18.17, 17.83, 17.98, 17.89, 17.97, 17.91, 17.89, 18.07] -[48.54] -16.32482624053955 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 640, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.40328574180603, 'TIME_S_1KI': 16.255133971571922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.4070657157898, 'W': 48.54} -[18.61, 18.25, 18.17, 17.83, 17.98, 17.89, 17.97, 17.91, 17.89, 18.07, 18.46, 18.32, 17.92, 18.02, 17.98, 18.33, 17.98, 17.9, 18.02, 17.97] -324.91499999999996 -16.245749999999997 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 640, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.40328574180603, 'TIME_S_1KI': 16.255133971571922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.4070657157898, 'W': 48.54, 'J_1KI': 1238.1360401809216, 'W_1KI': 75.84375, 'W_D': 32.294250000000005, 'J_D': 527.1980198185445, 'W_D_1KI': 50.45976562500001, 'J_D_1KI': 78.84338378906251} +[18.9, 18.45, 18.68, 23.55, 19.81, 18.49, 19.91, 18.65, 18.58, 18.47] +[54.16] +16.330809116363525 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 634, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.328722476959229, 'TIME_S_1KI': 16.291360373752727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 884.4766217422484, 'W': 54.16} +[18.9, 18.45, 18.68, 23.55, 19.81, 18.49, 19.91, 18.65, 18.58, 18.47, 19.6, 18.53, 19.12, 18.47, 18.97, 18.51, 18.9, 18.9, 18.76, 18.5] +344.015 +17.20075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 634, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.328722476959229, 'TIME_S_1KI': 16.291360373752727, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 884.4766217422484, 'W': 54.16, 'J_1KI': 1395.0735358710542, 'W_1KI': 85.42586750788642, 'W_D': 36.95925, 'J_D': 603.5744568339586, 'W_D_1KI': 58.29534700315457, 'J_D_1KI': 91.94849685040153} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json index 679e37e..324d8eb 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 393, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.361774921417236, "TIME_S_1KI": 26.365839494700346, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 830.257935385704, "W": 47.87, "J_1KI": 2112.615611668458, "W_1KI": 121.80661577608141, "W_D": 31.368499999999997, "J_D": 544.0556934645175, "W_D_1KI": 79.81806615776081, "J_D_1KI": 203.09940498157965} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 397, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.436140060424805, "TIME_S_1KI": 26.287506449432758, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 926.5611728620529, "W": 53.86, "J_1KI": 2333.9072364283447, "W_1KI": 135.6675062972292, "W_D": 36.199, "J_D": 622.7365001194477, "W_D_1KI": 91.18136020151132, "J_D_1KI": 229.67597028088497} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output index 4eb279b..b446ca1 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,15 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.6700339317321777} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.640493392944336} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1981, 4013, ..., 9996053, - 9998014, 10000000]), - col_indices=tensor([ 1, 2, 4, ..., 4993, 4995, 4997]), - values=tensor([0.1269, 0.6137, 0.4927, ..., 0.6127, 0.1027, 0.1107]), - size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1274, 0.2875, 0.5158, ..., 0.6638, 0.6368, 0.8182]) +tensor(crow_indices=tensor([ 0, 2010, 3987, ..., 9995974, + 9997951, 10000000]), + col_indices=tensor([ 1, 7, 10, ..., 4992, 4995, 4997]), + values=tensor([7.4519e-04, 5.9544e-01, 6.5419e-01, ..., + 9.4636e-01, 8.2090e-01, 7.6338e-01]), size=(5000, 5000), + nnz=10000000, layout=torch.sparse_csr) +tensor([0.3936, 0.5475, 0.9417, ..., 0.3205, 0.6260, 0.3688]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +17,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 2.6700339317321777 seconds +Time: 2.640493392944336 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '393', '-ss', '5000', '-sd', '0.4', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.361774921417236} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '397', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.436140060424805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2035, 3994, ..., 9995956, - 9997978, 10000000]), - col_indices=tensor([ 2, 3, 4, ..., 4989, 4993, 4999]), - values=tensor([0.0874, 0.4595, 0.0218, ..., 0.9380, 0.2756, 0.2464]), +tensor(crow_indices=tensor([ 0, 1974, 3981, ..., 9996025, + 9998009, 10000000]), + col_indices=tensor([ 4, 5, 7, ..., 4992, 4994, 4995]), + values=tensor([0.8520, 0.4990, 0.8451, ..., 0.4145, 0.7436, 0.6838]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1585, 0.7775, 0.6260, ..., 0.0357, 0.4122, 0.0843]) +tensor([0.6229, 0.6402, 0.3556, ..., 0.0272, 0.8417, 0.9178]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +37,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.361774921417236 seconds +Time: 10.436140060424805 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2035, 3994, ..., 9995956, - 9997978, 10000000]), - col_indices=tensor([ 2, 3, 4, ..., 4989, 4993, 4999]), - values=tensor([0.0874, 0.4595, 0.0218, ..., 0.9380, 0.2756, 0.2464]), +tensor(crow_indices=tensor([ 0, 1974, 3981, ..., 9996025, + 9998009, 10000000]), + col_indices=tensor([ 4, 5, 7, ..., 4992, 4994, 4995]), + values=tensor([0.8520, 0.4990, 0.8451, ..., 0.4145, 0.7436, 0.6838]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1585, 0.7775, 0.6260, ..., 0.0357, 0.4122, 0.0843]) +tensor([0.6229, 0.6402, 0.3556, ..., 0.0272, 0.8417, 0.9178]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +54,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.361774921417236 seconds +Time: 10.436140060424805 seconds -[18.09, 21.49, 19.13, 18.18, 17.87, 18.12, 18.49, 17.91, 18.18, 17.86] -[47.87] -17.344013690948486 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.361774921417236, 'TIME_S_1KI': 26.365839494700346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 830.257935385704, 'W': 47.87} -[18.09, 21.49, 19.13, 18.18, 17.87, 18.12, 18.49, 17.91, 18.18, 17.86, 18.63, 18.02, 17.95, 17.88, 17.98, 18.08, 17.96, 17.78, 18.35, 18.74] -330.03 -16.5015 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.361774921417236, 'TIME_S_1KI': 26.365839494700346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 830.257935385704, 'W': 47.87, 'J_1KI': 2112.615611668458, 'W_1KI': 121.80661577608141, 'W_D': 31.368499999999997, 'J_D': 544.0556934645175, 'W_D_1KI': 79.81806615776081, 'J_D_1KI': 203.09940498157965} +[18.83, 18.74, 18.78, 27.34, 18.7, 18.43, 18.59, 18.76, 18.62, 18.54] +[53.86] +17.2031409740448 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 397, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.436140060424805, 'TIME_S_1KI': 26.287506449432758, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.5611728620529, 'W': 53.86} +[18.83, 18.74, 18.78, 27.34, 18.7, 18.43, 18.59, 18.76, 18.62, 18.54, 19.12, 18.44, 19.84, 25.25, 18.79, 18.56, 19.54, 18.82, 18.53, 18.49] +353.21999999999997 +17.660999999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 397, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.436140060424805, 'TIME_S_1KI': 26.287506449432758, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.5611728620529, 'W': 53.86, 'J_1KI': 2333.9072364283447, 'W_1KI': 135.6675062972292, 'W_D': 36.199, 'J_D': 622.7365001194477, 'W_D_1KI': 91.18136020151132, 'J_D_1KI': 229.67597028088497} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json index afa179f..d1e779a 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 307, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.394981145858765, "TIME_S_1KI": 33.85987343927936, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 930.0089691495895, "W": 47.58, "J_1KI": 3029.3451763830276, "W_1KI": 154.98371335504885, "W_D": 31.056999999999995, "J_D": 607.0468380596636, "W_D_1KI": 101.16286644951138, "J_D_1KI": 329.5207376205583} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 311, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.450198650360107, "TIME_S_1KI": 33.60192492077205, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 962.3892062711716, "W": 53.74, "J_1KI": 3094.499055534314, "W_1KI": 172.79742765273312, "W_D": 36.356750000000005, "J_D": 651.0856675679089, "W_D_1KI": 116.90273311897107, "J_D_1KI": 375.8930325368845} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output index 91aa70e..ef415a1 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 3.415400505065918} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 3.3665878772735596} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2477, 4964, ..., 12494964, - 12497482, 12500000]), - col_indices=tensor([ 1, 2, 4, ..., 4993, 4994, 4997]), - values=tensor([0.5791, 0.4301, 0.3570, ..., 0.1858, 0.4639, 0.9573]), +tensor(crow_indices=tensor([ 0, 2529, 5030, ..., 12494987, + 12497532, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4997, 4998, 4999]), + values=tensor([0.6195, 0.8024, 0.8856, ..., 0.3756, 0.3749, 0.9034]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0093, 0.1244, 0.8882, ..., 0.7606, 0.5225, 0.2163]) +tensor([0.1073, 0.7449, 0.4816, ..., 0.4290, 0.8634, 0.7307]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 3.415400505065918 seconds +Time: 3.3665878772735596 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '307', '-ss', '5000', '-sd', '0.5', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.394981145858765} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '311', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.450198650360107} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2499, 4994, ..., 12495082, - 12497507, 12500000]), - col_indices=tensor([ 0, 1, 6, ..., 4995, 4997, 4999]), - values=tensor([0.6585, 0.9778, 0.9803, ..., 0.8325, 0.0849, 0.1040]), +tensor(crow_indices=tensor([ 0, 2539, 5026, ..., 12494990, + 12497512, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4995, 4997, 4998]), + values=tensor([0.4822, 0.2881, 0.1292, ..., 0.9343, 0.6851, 0.3724]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.3486, 0.4233, 0.3644, ..., 0.4149, 0.2376, 0.4812]) +tensor([0.5234, 0.0534, 0.5804, ..., 0.3109, 0.1531, 0.9809]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.394981145858765 seconds +Time: 10.450198650360107 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2499, 4994, ..., 12495082, - 12497507, 12500000]), - col_indices=tensor([ 0, 1, 6, ..., 4995, 4997, 4999]), - values=tensor([0.6585, 0.9778, 0.9803, ..., 0.8325, 0.0849, 0.1040]), +tensor(crow_indices=tensor([ 0, 2539, 5026, ..., 12494990, + 12497512, 12500000]), + col_indices=tensor([ 0, 1, 3, ..., 4995, 4997, 4998]), + values=tensor([0.4822, 0.2881, 0.1292, ..., 0.9343, 0.6851, 0.3724]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.3486, 0.4233, 0.3644, ..., 0.4149, 0.2376, 0.4812]) +tensor([0.5234, 0.0534, 0.5804, ..., 0.3109, 0.1531, 0.9809]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.394981145858765 seconds +Time: 10.450198650360107 seconds -[18.48, 21.56, 18.9, 18.32, 18.22, 17.83, 18.09, 17.98, 18.55, 18.34] -[47.58] -19.546216249465942 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.394981145858765, 'TIME_S_1KI': 33.85987343927936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 930.0089691495895, 'W': 47.58} -[18.48, 21.56, 18.9, 18.32, 18.22, 17.83, 18.09, 17.98, 18.55, 18.34, 18.35, 17.93, 17.78, 17.82, 18.22, 18.23, 18.5, 17.76, 18.34, 17.69] -330.46000000000004 -16.523000000000003 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.394981145858765, 'TIME_S_1KI': 33.85987343927936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 930.0089691495895, 'W': 47.58, 'J_1KI': 3029.3451763830276, 'W_1KI': 154.98371335504885, 'W_D': 31.056999999999995, 'J_D': 607.0468380596636, 'W_D_1KI': 101.16286644951138, 'J_D_1KI': 329.5207376205583} +[18.87, 18.66, 18.79, 18.47, 22.55, 19.06, 18.88, 19.73, 18.81, 19.09] +[53.74] +17.908247232437134 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 311, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.450198650360107, 'TIME_S_1KI': 33.60192492077205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 962.3892062711716, 'W': 53.74} +[18.87, 18.66, 18.79, 18.47, 22.55, 19.06, 18.88, 19.73, 18.81, 19.09, 19.09, 18.53, 18.69, 18.99, 18.59, 18.7, 23.26, 18.99, 18.96, 18.96] +347.665 +17.38325 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 311, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.450198650360107, 'TIME_S_1KI': 33.60192492077205, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 962.3892062711716, 'W': 53.74, 'J_1KI': 3094.499055534314, 'W_1KI': 172.79742765273312, 'W_D': 36.356750000000005, 'J_D': 651.0856675679089, 'W_D_1KI': 116.90273311897107, 'J_D_1KI': 375.8930325368845} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json index ca05b98..cd4e257 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 355542, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.588425874710083, "TIME_S_1KI": 0.02978108317641821, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 655.0935684347153, "W": 46.57, "J_1KI": 1.8425209073322286, "W_1KI": 0.13098311873140162, "W_D": 30.1475, "J_D": 424.08059597134593, "W_D_1KI": 0.08479307648604104, "J_D_1KI": 0.00023848962003375422} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 342879, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.04533338546753, "TIME_S_1KI": 0.029297021355835527, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 722.5925984573364, "W": 52.32, "J_1KI": 2.1074273970040056, "W_1KI": 0.1525902723701364, "W_D": 35.464, "J_D": 489.7940350093841, "W_D_1KI": 0.10343007299951294, "J_D_1KI": 0.0003016518159453129} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output index a05082b..f9f4749 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,156 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.022435426712036133} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02335357666015625} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 250, 250, 250]), + col_indices=tensor([2710, 2160, 3133, 4709, 949, 3472, 83, 3331, 4918, + 479, 2667, 1823, 2540, 2203, 4266, 1022, 2487, 4425, + 1993, 4325, 4370, 2012, 1654, 823, 4320, 2457, 599, + 3848, 2099, 2501, 1006, 1499, 1399, 3600, 1844, 3020, + 3144, 2751, 3930, 3847, 4981, 2145, 4292, 681, 4719, + 441, 1924, 2702, 2689, 1910, 2838, 2739, 968, 198, + 888, 701, 2535, 4522, 3901, 4648, 2510, 2066, 1638, + 366, 2578, 171, 333, 59, 886, 624, 647, 139, + 759, 274, 4237, 698, 2752, 1036, 2204, 3592, 146, + 2990, 1722, 145, 2503, 4758, 2168, 2097, 239, 3854, + 4884, 4630, 2563, 409, 3291, 2150, 4475, 3414, 1168, + 783, 2971, 1394, 4522, 2590, 4596, 3194, 3343, 4633, + 491, 2960, 4811, 4494, 4982, 798, 1046, 4978, 1137, + 2145, 1451, 3455, 3927, 1225, 367, 2754, 4551, 2650, + 3477, 255, 274, 139, 4369, 774, 1776, 3521, 928, + 2841, 2687, 3750, 3747, 2606, 4636, 3988, 2125, 4406, + 985, 1922, 2009, 2971, 2031, 1943, 3564, 3839, 1043, + 652, 4872, 3357, 2030, 4963, 3641, 4784, 4357, 1402, + 4046, 2631, 3901, 4504, 1012, 803, 2447, 4106, 753, + 1035, 3874, 3099, 3802, 4919, 1674, 1118, 1167, 2050, + 379, 90, 4848, 367, 950, 1212, 513, 4179, 3873, + 3319, 893, 5, 2185, 4634, 2144, 68, 4562, 3300, + 3486, 3932, 2686, 3825, 1973, 3732, 3190, 3931, 2912, + 2888, 378, 4822, 2306, 3763, 2936, 1433, 3409, 1791, + 4082, 939, 673, 2906, 3746, 4410, 26, 4725, 1690, + 4631, 1262, 3492, 785, 4600, 1052, 4694, 2598, 251, + 4789, 3820, 3312, 4746, 3488, 2552, 4525, 1674, 4534, + 4201, 4066, 1516, 2170, 1490, 2228, 4329]), + values=tensor([0.3374, 0.6348, 0.3296, 0.2038, 0.9495, 0.0396, 0.7791, + 0.9208, 0.3912, 0.8390, 0.5293, 0.4985, 0.3098, 0.3870, + 0.5844, 0.5875, 0.3830, 0.9503, 0.5415, 0.2963, 0.2474, + 0.2704, 0.7757, 0.6793, 0.9021, 0.8242, 0.4900, 0.6671, + 0.3269, 0.2676, 0.1675, 0.9819, 0.6766, 0.9529, 0.0179, + 0.5092, 0.0282, 0.7513, 0.7437, 0.1254, 0.2954, 0.2125, + 0.2869, 0.9141, 0.7903, 0.9797, 0.3316, 0.3135, 0.6989, + 0.2695, 0.5101, 0.1337, 0.6527, 0.7918, 0.1365, 0.8075, + 0.5201, 0.5272, 0.1257, 0.6914, 0.6136, 0.5347, 0.3303, + 0.3034, 0.7855, 0.8255, 0.6042, 0.4494, 0.0067, 0.1306, + 0.4580, 0.3239, 0.9028, 0.1142, 0.4467, 0.7459, 0.6398, + 0.5669, 0.6754, 0.2806, 0.9003, 0.9543, 0.7162, 0.5840, + 0.3940, 0.2840, 0.3596, 0.3565, 0.9149, 0.7132, 0.3196, + 0.4293, 0.8278, 0.8495, 0.2605, 0.8701, 0.2098, 0.1860, + 0.4090, 0.2953, 0.9703, 0.8961, 0.4387, 0.7496, 0.9440, + 0.9309, 0.2854, 0.2488, 0.7985, 0.9065, 0.8377, 0.6151, + 0.4022, 0.8065, 0.3700, 0.0205, 0.5272, 0.1609, 0.1101, + 0.8033, 0.1345, 0.3044, 0.2881, 0.3947, 0.5356, 0.4659, + 0.2462, 0.9431, 0.9702, 0.1866, 0.8513, 0.7510, 0.0854, + 0.9109, 0.2434, 0.2495, 0.0282, 0.7419, 0.7324, 0.7131, + 0.1604, 0.3140, 0.4701, 0.6297, 0.9680, 0.5925, 0.3820, + 0.9531, 0.4807, 0.7208, 0.6516, 0.5497, 0.4831, 0.8462, + 0.4641, 0.6462, 0.5718, 0.3214, 0.6301, 0.0423, 0.1848, + 0.7639, 0.7793, 0.8766, 0.7839, 0.2693, 0.6165, 0.5694, + 0.0809, 0.6353, 0.2681, 0.8614, 0.9826, 0.1788, 0.2561, + 0.8474, 0.1753, 0.7133, 0.4078, 0.1411, 0.3508, 0.3913, + 0.2622, 0.4959, 0.0322, 0.4114, 0.2324, 0.9219, 0.2075, + 0.6050, 0.8536, 0.5572, 0.5495, 0.8460, 0.9050, 0.9556, + 0.0380, 0.7357, 0.6217, 0.7493, 0.1616, 0.1052, 0.7299, + 0.9827, 0.5203, 0.5278, 0.1121, 0.0156, 0.3815, 0.1310, + 0.5952, 0.6956, 0.2005, 0.0296, 0.8936, 0.9754, 0.1470, + 0.1604, 0.2962, 0.2133, 0.1761, 0.2122, 0.5494, 0.4118, + 0.2219, 0.5058, 0.7503, 0.5251, 0.7282, 0.4731, 0.1995, + 0.7925, 0.8650, 0.9236, 0.9274, 0.4519, 0.0902, 0.5516, + 0.3201, 0.9129, 0.8076, 0.7566, 0.0207, 0.1460, 0.6827, + 0.6923, 0.9383, 0.3672, 0.5984, 0.7063]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9273, 0.9180, 0.5847, ..., 0.9859, 0.6052, 0.3197]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.02335357666015625 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44960', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3768103122711182} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1292, 659, 4365, 3710, 1440, 2896, 123, 3649, 2612, - 927, 1659, 4214, 4385, 2636, 1869, 118, 1932, 2570, - 3752, 4154, 1992, 3194, 1579, 4685, 2991, 2783, 3487, - 3211, 2142, 3564, 4650, 3661, 3610, 3388, 1017, 3880, - 4125, 2970, 4559, 2499, 1693, 43, 2397, 1040, 4017, - 1828, 4674, 1496, 1775, 719, 2677, 389, 3154, 2997, - 3651, 773, 1883, 4184, 1034, 1972, 2681, 2277, 1527, - 4621, 238, 4365, 4905, 2103, 4738, 2026, 4073, 1797, - 3650, 3259, 4594, 2607, 1806, 3417, 2906, 213, 1348, - 1781, 4696, 1901, 709, 1550, 1968, 4490, 826, 4326, - 374, 3405, 4945, 642, 3326, 217, 437, 4704, 4681, - 3950, 1228, 3052, 3026, 3269, 1740, 2853, 2996, 1978, - 359, 3246, 3815, 3422, 1128, 733, 749, 4078, 2740, - 127, 2401, 240, 391, 4079, 575, 1175, 4439, 4510, - 2136, 1342, 880, 2183, 3085, 3808, 4189, 2436, 1877, - 140, 1033, 744, 134, 1457, 133, 407, 2079, 2372, - 2867, 1110, 602, 2915, 3299, 4776, 1097, 3465, 774, - 2845, 4963, 619, 3626, 3224, 3032, 4984, 3635, 1115, - 1431, 229, 362, 2520, 4880, 2306, 2092, 2949, 3111, - 141, 801, 4774, 1268, 4702, 3013, 4053, 4202, 2338, - 3307, 2339, 1627, 3991, 2211, 3208, 4859, 2254, 850, - 2555, 416, 3498, 2761, 1743, 3828, 3909, 4942, 4647, - 2857, 399, 2142, 1173, 2936, 2739, 3524, 2473, 2398, - 3617, 4358, 1503, 3513, 1560, 2497, 176, 1685, 851, - 2706, 2662, 1211, 466, 3647, 2835, 1798, 4560, 4189, - 74, 1919, 4892, 1659, 1504, 1873, 179, 4512, 1622, - 131, 802, 3776, 894, 98, 1072, 3715, 1448, 4255, - 4226, 676, 4655, 4974, 2293, 491, 1924]), - values=tensor([0.9106, 0.6103, 0.8018, 0.6726, 0.7831, 0.8787, 0.8641, - 0.0319, 0.4873, 0.6079, 0.6438, 0.5806, 0.1055, 0.2960, - 0.2595, 0.4592, 0.2559, 0.3932, 0.7042, 0.8694, 0.4660, - 0.4246, 0.4675, 0.7217, 0.9048, 0.6757, 0.7971, 0.3444, - 0.9040, 0.2589, 0.8383, 0.9787, 0.5364, 0.5478, 0.4280, - 0.9375, 0.9169, 0.6011, 0.6510, 0.3645, 0.9595, 0.3413, - 0.1561, 0.3706, 0.5420, 0.2194, 0.4928, 0.9365, 0.2372, - 0.4934, 0.8170, 0.4062, 0.4573, 0.8424, 0.2137, 0.2198, - 0.8285, 0.9490, 0.8645, 0.5816, 0.3427, 0.8902, 0.3651, - 0.7666, 0.8408, 0.8585, 0.8931, 0.5551, 0.8982, 0.6356, - 0.4250, 0.1088, 0.6737, 0.3958, 0.4828, 0.5186, 0.8805, - 0.2395, 0.2572, 0.2532, 0.6717, 0.2414, 0.7893, 0.8437, - 0.3171, 0.1858, 0.6604, 0.8284, 0.5385, 0.2314, 0.5114, - 0.2593, 0.8363, 0.9654, 0.7652, 0.9942, 0.9048, 0.6526, - 0.7743, 0.0670, 0.4879, 0.0500, 0.2026, 0.0553, 0.2990, - 0.2738, 0.8845, 0.6958, 0.2567, 0.3351, 0.1957, 0.2099, - 0.3337, 0.5048, 0.9817, 0.1630, 0.6715, 0.7671, 0.0645, - 0.2446, 0.2884, 0.8150, 0.9791, 0.9499, 0.4039, 0.4962, - 0.6049, 0.6707, 0.4315, 0.8269, 0.1062, 0.0634, 0.9597, - 0.8898, 0.1177, 0.9543, 0.8326, 0.6160, 0.9716, 0.8673, - 0.7943, 0.1918, 0.0735, 0.7498, 0.7051, 0.7537, 0.5409, - 0.9422, 0.7547, 0.3930, 0.7287, 0.3187, 0.8163, 0.1055, - 0.0953, 0.5157, 0.5484, 0.2625, 0.0877, 0.4823, 0.2711, - 0.4063, 0.7443, 0.2411, 0.7149, 0.2424, 0.1102, 0.7648, - 0.9164, 0.7435, 0.3343, 0.4014, 0.3868, 0.7585, 0.7825, - 0.9665, 0.6243, 0.8999, 0.5120, 0.3172, 0.9824, 0.0450, - 0.4103, 0.8334, 0.8361, 0.7898, 0.9067, 0.7235, 0.2233, - 0.3637, 0.9009, 0.8914, 0.3259, 0.8165, 0.9365, 0.9274, - 0.5741, 0.2639, 0.6520, 0.7150, 0.2093, 0.3816, 0.4707, - 0.4201, 0.9190, 0.5078, 0.8874, 0.9120, 0.2753, 0.7359, - 0.5812, 0.5682, 0.7646, 0.6267, 0.4102, 0.8266, 0.8853, - 0.7018, 0.9169, 0.0053, 0.7880, 0.6418, 0.6555, 0.9720, - 0.3526, 0.6341, 0.5088, 0.2195, 0.0203, 0.5525, 0.7633, - 0.6606, 0.4333, 0.8817, 0.0693, 0.9617, 0.5559, 0.9634, - 0.6048, 0.0232, 0.1068, 0.9352, 0.6002, 0.6363, 0.5154, - 0.1116, 0.5347, 0.0671, 0.7793, 0.1196]), + col_indices=tensor([ 69, 4299, 2731, 1236, 4940, 1623, 756, 2957, 1536, + 2653, 3230, 748, 1478, 1150, 2207, 974, 1782, 2823, + 3457, 4355, 2053, 123, 311, 4879, 1537, 218, 149, + 1958, 841, 1292, 4949, 526, 481, 4783, 1625, 1517, + 1984, 1625, 3274, 3093, 3203, 3009, 2427, 4389, 1195, + 2984, 4857, 3794, 74, 670, 4499, 27, 3421, 4120, + 1996, 2985, 566, 1245, 776, 3211, 2528, 1781, 3440, + 2907, 4816, 3169, 2224, 3484, 2744, 1698, 2246, 309, + 3775, 4169, 3076, 4848, 3232, 3044, 2868, 3464, 2661, + 4398, 384, 3123, 4189, 606, 140, 481, 2547, 806, + 1173, 3668, 3379, 200, 2741, 1659, 2582, 3917, 4983, + 955, 3971, 2457, 4408, 1351, 4451, 336, 3224, 3962, + 575, 695, 3617, 3852, 2978, 4487, 831, 2937, 1864, + 3254, 2616, 1161, 2809, 3899, 2838, 3837, 129, 1216, + 4313, 3798, 781, 73, 2360, 2378, 4117, 4875, 2365, + 2222, 2364, 2358, 813, 484, 3715, 1184, 842, 1476, + 253, 3584, 3837, 4812, 4896, 3347, 1614, 3268, 377, + 2235, 4788, 11, 2236, 4655, 3758, 666, 2860, 2992, + 1521, 1702, 535, 4752, 1536, 1814, 1677, 1199, 4005, + 1122, 2423, 2174, 1365, 3928, 2046, 881, 614, 2690, + 877, 3579, 4929, 1636, 154, 4633, 1746, 3820, 3005, + 567, 3507, 4168, 1748, 2984, 3005, 1540, 2710, 3293, + 3606, 3806, 4085, 1313, 3830, 1186, 1600, 585, 4815, + 3786, 531, 3141, 3296, 4198, 1482, 4133, 4299, 2261, + 4921, 1098, 1143, 1900, 2155, 573, 493, 4092, 923, + 3345, 2433, 4600, 1509, 4302, 3751, 4462, 1633, 243, + 3471, 4431, 4211, 1345, 3684, 1521, 1253, 1210, 3393, + 2424, 2937, 4054, 1731, 1912, 2188, 3369]), + values=tensor([0.8616, 0.9936, 0.4802, 0.7353, 0.8916, 0.8986, 0.6038, + 0.0638, 0.7399, 0.9808, 0.9013, 0.9897, 0.8579, 0.8733, + 0.6487, 0.2950, 0.6250, 0.7749, 0.7537, 0.9091, 0.5303, + 0.0981, 0.8374, 0.5103, 0.0147, 0.4312, 0.1599, 0.5711, + 0.3701, 0.1054, 0.4770, 0.2647, 0.7617, 0.0254, 0.0783, + 0.3434, 0.5140, 0.5704, 0.1378, 0.9566, 0.1945, 0.2245, + 0.0753, 0.3806, 0.9586, 0.1089, 0.6277, 0.6981, 0.7376, + 0.2057, 0.5501, 0.2777, 0.6180, 0.7887, 0.3677, 0.7271, + 0.6329, 0.6423, 0.0768, 0.4641, 0.6615, 0.1540, 0.5618, + 0.3613, 0.7808, 0.4288, 0.8153, 0.4724, 0.1443, 0.3988, + 0.4872, 0.8528, 0.6659, 0.3053, 0.4023, 0.3231, 0.2990, + 0.3203, 0.3736, 0.9898, 0.3897, 0.2523, 0.2508, 0.5545, + 0.0463, 0.0351, 0.0608, 0.2691, 0.5209, 0.0891, 0.7095, + 0.8585, 0.7257, 0.0549, 0.6508, 0.7999, 0.9154, 0.6366, + 0.7366, 0.9443, 0.4145, 0.7889, 0.7631, 0.7724, 0.3768, + 0.6770, 0.6814, 0.5955, 0.5968, 0.7116, 0.2141, 0.1788, + 0.4679, 0.2984, 0.0829, 0.8548, 0.2037, 0.1932, 0.1182, + 0.6697, 0.9311, 0.6830, 0.6822, 0.7984, 0.4577, 0.9410, + 0.2320, 0.5736, 0.0819, 0.5220, 0.9710, 0.5442, 0.6573, + 0.8447, 0.6362, 0.5262, 0.9437, 0.4145, 0.1147, 0.3115, + 0.6198, 0.8729, 0.2770, 0.2102, 0.7507, 0.5146, 0.5118, + 0.6283, 0.7410, 0.5861, 0.3305, 0.5292, 0.6523, 0.2053, + 0.1156, 0.9332, 0.5783, 0.5654, 0.3285, 0.0653, 0.7785, + 0.1961, 0.7988, 0.7918, 0.6370, 0.6195, 0.1524, 0.5540, + 0.4328, 0.2834, 0.4650, 0.9140, 0.8546, 0.8825, 0.0393, + 0.0264, 0.6922, 0.0727, 0.2846, 0.8234, 0.1668, 0.8163, + 0.1332, 0.0089, 0.0570, 0.9107, 0.7490, 0.2233, 0.6581, + 0.9825, 0.0757, 0.7119, 0.1467, 0.5458, 0.3375, 0.5473, + 0.9959, 0.6807, 0.9543, 0.6792, 0.3358, 0.2626, 0.4746, + 0.5161, 0.2104, 0.3933, 0.3448, 0.1075, 0.6300, 0.5290, + 0.6354, 0.7540, 0.0229, 0.7810, 0.6629, 0.0706, 0.3692, + 0.5325, 0.3138, 0.0378, 0.9559, 0.4679, 0.6643, 0.7074, + 0.2707, 0.2257, 0.2642, 0.9823, 0.5243, 0.3260, 0.9173, + 0.2891, 0.2043, 0.0292, 0.2426, 0.3446, 0.5070, 0.8354, + 0.0494, 0.5131, 0.0087, 0.7122, 0.2606, 0.9183, 0.8074, + 0.2718, 0.6542, 0.1124, 0.6017, 0.3121]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.9677, 0.1967, 0.0087, ..., 0.2565, 0.6584, 0.6200]) +tensor([0.2273, 0.2120, 0.4132, ..., 0.3100, 0.5228, 0.5364]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,161 +158,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.022435426712036133 seconds +Time: 1.3768103122711182 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46800', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.382112741470337} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), - col_indices=tensor([4150, 2888, 4184, 1530, 678, 479, 1107, 471, 4285, - 3837, 1975, 2514, 511, 1660, 2068, 1448, 4627, 3695, - 4646, 2830, 2653, 1667, 2953, 3899, 1002, 4696, 4142, - 2682, 2648, 3794, 1459, 982, 980, 1746, 3339, 149, - 1718, 4640, 314, 53, 2403, 1141, 3357, 1085, 466, - 1619, 2477, 3859, 2168, 947, 4059, 1003, 2781, 2708, - 1717, 3009, 1864, 3007, 3896, 3680, 4372, 3309, 2254, - 2203, 4715, 1069, 4309, 2391, 3090, 3258, 952, 4475, - 4160, 3612, 4789, 3335, 819, 1827, 2260, 3171, 3323, - 4626, 3362, 7, 972, 4803, 364, 2649, 4177, 4599, - 2900, 3224, 1640, 4077, 3701, 2791, 1433, 655, 2314, - 3198, 317, 850, 1087, 611, 645, 558, 726, 1381, - 90, 1884, 2477, 176, 4078, 600, 1776, 1815, 4980, - 3290, 976, 3882, 4218, 3337, 4340, 4550, 1601, 376, - 2443, 2180, 1347, 4274, 3578, 2389, 1349, 3996, 4180, - 3976, 1026, 1825, 1698, 4427, 2513, 1604, 114, 2995, - 2989, 1072, 3384, 2975, 4300, 3198, 3255, 1005, 1851, - 4373, 2417, 1761, 1977, 1033, 304, 4563, 572, 4037, - 3427, 1513, 75, 468, 3187, 2009, 1764, 1805, 1467, - 3749, 4166, 2128, 4824, 3213, 2655, 2007, 1437, 1298, - 483, 971, 2056, 2156, 2263, 607, 4650, 771, 456, - 2047, 3920, 3689, 2454, 4552, 1948, 4918, 2583, 4601, - 1062, 3584, 2635, 2071, 2042, 2779, 1369, 1671, 4485, - 2542, 4111, 1550, 2280, 3307, 1653, 1055, 571, 3882, - 2132, 941, 2447, 3838, 493, 2724, 4427, 2495, 491, - 348, 2552, 3299, 317, 1166, 2830, 4896, 4608, 3014, - 670, 2086, 2508, 2837, 2920, 612, 4090, 2710, 1095, - 1628, 1220, 274, 3831, 1535, 1786, 4549]), - values=tensor([0.5255, 0.0182, 0.6174, 0.1076, 0.3535, 0.7090, 0.2797, - 0.4131, 0.9644, 0.6573, 0.3774, 0.2463, 0.8634, 0.5392, - 0.6180, 0.7460, 0.0840, 0.6919, 0.2395, 0.6380, 0.3064, - 0.4299, 0.2434, 0.7003, 0.1509, 0.6268, 0.3419, 0.0217, - 0.6724, 0.4826, 0.7793, 0.9245, 0.4498, 0.8997, 0.8789, - 0.2006, 0.9117, 0.6104, 0.9445, 0.6803, 0.5546, 0.0430, - 0.8599, 0.6166, 0.9366, 0.0741, 0.0108, 0.4102, 0.8063, - 0.1147, 0.2712, 0.9101, 0.6498, 0.4997, 0.5120, 0.1408, - 0.5873, 0.5440, 0.2130, 0.6524, 0.1914, 0.2027, 0.3598, - 0.1760, 0.9961, 0.4064, 0.1145, 0.4074, 0.8942, 0.9988, - 0.7396, 0.3520, 0.8007, 0.2689, 0.7383, 0.4192, 0.4738, - 0.5964, 0.1917, 0.1869, 0.3576, 0.9988, 0.6764, 0.1906, - 0.7629, 0.4501, 0.8709, 0.2468, 0.5177, 0.2466, 0.2197, - 0.1446, 0.0928, 0.2356, 0.4535, 0.4306, 0.8108, 0.4445, - 0.2001, 0.2909, 0.2893, 0.9446, 0.2722, 0.1526, 0.7522, - 0.5034, 0.0891, 0.6792, 0.6980, 0.8787, 0.8816, 0.0939, - 0.0544, 0.5728, 0.3453, 0.6599, 0.1401, 0.4967, 0.8703, - 0.0012, 0.1313, 0.5851, 0.4868, 0.8996, 0.7538, 0.7366, - 0.3299, 0.6412, 0.9032, 0.8207, 0.4202, 0.9740, 0.5987, - 0.7801, 0.2814, 0.4031, 0.0887, 0.7346, 0.5935, 0.7540, - 0.2319, 0.3570, 0.1145, 0.5888, 0.6276, 0.7231, 0.7135, - 0.9613, 0.8035, 0.6211, 0.0088, 0.0973, 0.8083, 0.1435, - 0.0594, 0.5423, 0.4477, 0.3960, 0.6871, 0.1103, 0.2807, - 0.9626, 0.5226, 0.3908, 0.2801, 0.5699, 0.2801, 0.6331, - 0.2050, 0.6787, 0.0958, 0.2630, 0.1454, 0.4463, 0.8988, - 0.0901, 0.2439, 0.8477, 0.1410, 0.2267, 0.5153, 0.7658, - 0.4023, 0.6070, 0.4869, 0.6448, 0.8450, 0.9154, 0.1431, - 0.8925, 0.4147, 0.0491, 0.3877, 0.8712, 0.5490, 0.2553, - 0.4929, 0.0602, 0.3093, 0.9867, 0.9857, 0.7010, 0.9249, - 0.3952, 0.9763, 0.0416, 0.7299, 0.7590, 0.5814, 0.9861, - 0.2685, 0.2403, 0.0997, 0.7290, 0.0363, 0.1796, 0.0573, - 0.1340, 0.1547, 0.5881, 0.5516, 0.6658, 0.9991, 0.5590, - 0.3010, 0.2004, 0.5300, 0.9600, 0.5439, 0.0253, 0.1689, - 0.7972, 0.3164, 0.6988, 0.4588, 0.6168, 0.9056, 0.7303, - 0.2798, 0.7978, 0.1100, 0.0574, 0.5151, 0.8940, 0.9058, - 0.1406, 0.2261, 0.0686, 0.3738, 0.0528]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.9505, 0.7309, 0.0755, ..., 0.0952, 0.7737, 0.4268]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 250 -Density: 1e-05 -Time: 1.382112741470337 seconds - -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '355542', '-ss', '5000', '-sd', '1e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.588425874710083} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '342879', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.04533338546753} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4976, 2999, 3531, 729, 4293, 4703, 527, 519, 1087, - 2588, 2503, 4621, 623, 2508, 4018, 2181, 4566, 129, - 1882, 141, 241, 4234, 3011, 3475, 1507, 1753, 1592, - 16, 1837, 2881, 4935, 1126, 1212, 1843, 3769, 1406, - 4716, 3662, 2501, 4960, 3438, 3187, 281, 2582, 4408, - 1833, 1310, 2092, 1021, 4933, 3776, 3565, 3765, 4067, - 3036, 4650, 3192, 1618, 3142, 599, 3093, 1420, 3804, - 496, 2492, 4465, 4733, 3170, 4826, 4434, 3018, 360, - 3823, 3346, 2952, 3585, 1580, 4171, 3383, 4158, 1729, - 4780, 335, 3992, 4542, 2789, 4029, 416, 4212, 913, - 792, 2949, 3671, 2278, 3431, 4080, 1970, 3530, 1625, - 1846, 123, 3801, 179, 2650, 2793, 3609, 4951, 1904, - 3327, 3068, 797, 2882, 3831, 1946, 4674, 2611, 3167, - 3799, 820, 4595, 2920, 4476, 339, 16, 1057, 3858, - 1996, 2480, 211, 316, 2763, 1963, 1855, 789, 4091, - 3327, 1560, 1579, 4983, 674, 1259, 1259, 1614, 4040, - 4463, 3393, 619, 1723, 4630, 933, 1161, 3845, 4268, - 4840, 4044, 3897, 1211, 4242, 540, 433, 3144, 2336, - 4751, 151, 2282, 2250, 2548, 3515, 851, 1606, 4107, - 209, 996, 4881, 3062, 1226, 876, 3179, 583, 2155, - 304, 1823, 35, 692, 3702, 4106, 1336, 1789, 1359, - 3986, 1095, 312, 3819, 955, 1411, 4676, 1229, 4251, - 263, 3639, 4785, 3043, 1158, 1138, 2259, 2515, 3611, - 4898, 3676, 269, 509, 3101, 1147, 409, 3191, 1984, - 2546, 1127, 3710, 4610, 4906, 997, 3399, 3628, 347, - 847, 2034, 588, 2961, 2738, 4337, 3827, 1890, 1227, - 1635, 1728, 2759, 1429, 4176, 4191, 4862, 4968, 1336, - 119, 3406, 554, 532, 1507, 1917, 2750]), - values=tensor([0.5199, 0.0559, 0.9006, 0.7777, 0.2843, 0.6207, 0.6975, - 0.8689, 0.5742, 0.4737, 0.6755, 0.5958, 0.4384, 0.4390, - 0.8599, 0.7635, 0.7028, 0.4511, 0.3241, 0.0806, 0.8242, - 0.9053, 0.3849, 0.6596, 0.0472, 0.0762, 0.5653, 0.2663, - 0.8433, 0.4091, 0.7837, 0.1469, 0.0020, 0.4944, 0.8738, - 0.1320, 0.2431, 0.2650, 0.7193, 0.2483, 0.4028, 0.5091, - 0.3427, 0.6757, 0.9747, 0.5603, 0.1148, 0.1932, 0.5560, - 0.7920, 0.7790, 0.7814, 0.5929, 0.3079, 0.4008, 0.4233, - 0.4893, 0.9629, 0.6575, 0.2210, 0.9465, 0.6464, 0.0795, - 0.6735, 0.0196, 0.9297, 0.4843, 0.7360, 0.6757, 0.8217, - 0.8318, 0.9782, 0.6754, 0.1098, 0.6940, 0.8548, 0.6163, - 0.6934, 0.6171, 0.1041, 0.7511, 0.4878, 0.3635, 0.8620, - 0.3905, 0.3542, 0.8549, 0.4722, 0.9206, 0.2789, 0.9568, - 0.1952, 0.7173, 0.8016, 0.5436, 0.6025, 0.2628, 0.4776, - 0.0648, 0.4119, 0.1873, 0.5768, 0.6229, 0.4612, 0.3781, - 0.9685, 0.2051, 0.1702, 0.4419, 0.4538, 0.4515, 0.1665, - 0.0292, 0.6419, 0.1217, 0.0198, 0.2563, 0.3495, 0.0936, - 0.2664, 0.5259, 0.3038, 0.7533, 0.2522, 0.7253, 0.1463, - 0.2087, 0.7896, 0.0364, 0.6152, 0.7986, 0.8112, 0.7556, - 0.0756, 0.4969, 0.3790, 0.1978, 0.1043, 0.8605, 0.5011, - 0.8762, 0.2381, 0.0660, 0.2738, 0.0392, 0.5623, 0.1390, - 0.4257, 0.6102, 0.9603, 0.2834, 0.0200, 0.2511, 0.8990, - 0.6324, 0.9167, 0.3730, 0.6817, 0.4652, 0.8996, 0.9136, - 0.8631, 0.9770, 0.2565, 0.6496, 0.1799, 0.4018, 0.4388, - 0.9314, 0.6833, 0.7914, 0.9455, 0.7655, 0.3170, 0.6697, - 0.9757, 0.4151, 0.9576, 0.1567, 0.8861, 0.0170, 0.7822, - 0.2221, 0.7184, 0.0581, 0.4824, 0.4815, 0.2522, 0.1559, - 0.1892, 0.1146, 0.0347, 0.4792, 0.0555, 0.6999, 0.5092, - 0.0919, 0.7115, 0.9111, 0.4498, 0.4923, 0.5300, 0.3126, - 0.8103, 0.3276, 0.8722, 0.4823, 0.2311, 0.5957, 0.0760, - 0.2892, 0.3555, 0.6604, 0.7686, 0.1412, 0.8595, 0.6702, - 0.1119, 0.1550, 0.8493, 0.8158, 0.3714, 0.8983, 0.3484, - 0.6611, 0.8110, 0.2241, 0.8267, 0.9605, 0.8151, 0.1779, - 0.2906, 0.1723, 0.3272, 0.4678, 0.1292, 0.9514, 0.6369, - 0.8054, 0.2983, 0.3742, 0.8673, 0.0274, 0.1851, 0.9052, - 0.8742, 0.4529, 0.2266, 0.4410, 0.5323]), + col_indices=tensor([4605, 1547, 1940, 3116, 2937, 2706, 1736, 4590, 1984, + 2418, 4294, 1059, 4879, 701, 3966, 2712, 4154, 1114, + 643, 4965, 781, 809, 3920, 4971, 2063, 1008, 1888, + 3571, 4371, 3913, 4103, 468, 4891, 2723, 3269, 2415, + 1604, 4901, 82, 814, 1999, 4143, 389, 1694, 665, + 4790, 3289, 297, 2848, 1969, 2805, 4443, 2510, 4704, + 1612, 1956, 961, 4927, 1995, 3037, 403, 1323, 3155, + 4566, 3383, 578, 736, 615, 2612, 4668, 1149, 4775, + 1429, 2510, 1093, 1946, 577, 522, 93, 80, 3498, + 3358, 983, 3198, 35, 3731, 4938, 3962, 636, 4035, + 659, 3478, 885, 4027, 2720, 2665, 771, 4886, 4146, + 3590, 2314, 550, 4103, 3341, 4473, 144, 1991, 2528, + 752, 261, 407, 3849, 4430, 77, 722, 2153, 2224, + 3142, 928, 1372, 1752, 1235, 4622, 3743, 1753, 4466, + 1018, 3257, 2825, 4103, 2969, 4873, 999, 938, 2587, + 3463, 4648, 2082, 1541, 836, 2578, 2075, 198, 826, + 1293, 1279, 4568, 3300, 1029, 931, 3504, 4115, 2829, + 1302, 2548, 300, 1991, 1334, 4955, 3119, 3649, 3694, + 4971, 942, 3920, 109, 1962, 1974, 4669, 3781, 3767, + 4554, 657, 2815, 4738, 2265, 1327, 733, 1170, 228, + 1195, 4791, 795, 1568, 2682, 4095, 3229, 2196, 439, + 2358, 1755, 2280, 1622, 4409, 2991, 1066, 1285, 2146, + 2092, 2573, 494, 846, 4228, 673, 4842, 3039, 3446, + 76, 839, 4937, 666, 1575, 1119, 4849, 2027, 412, + 3479, 2588, 2558, 1192, 1350, 2789, 2518, 2643, 2931, + 3603, 1297, 1277, 2346, 1523, 2606, 1583, 2948, 161, + 2409, 2148, 916, 1259, 4838, 3256, 2192, 3896, 29, + 394, 3591, 1309, 3907, 32, 3572, 3318]), + values=tensor([0.7980, 0.7333, 0.2915, 0.1833, 0.2207, 0.6598, 0.5147, + 0.5171, 0.5565, 0.0943, 0.5992, 0.8056, 0.1807, 0.1878, + 0.3625, 0.5930, 0.3092, 0.8760, 0.7030, 0.7627, 0.6150, + 0.4252, 0.1237, 0.9391, 0.0430, 0.8144, 0.9298, 0.8419, + 0.9282, 0.1145, 0.4201, 0.2592, 0.8076, 0.6482, 0.7684, + 0.0799, 0.3357, 0.2778, 0.6719, 0.4215, 0.6579, 0.8622, + 0.5198, 0.3042, 0.4639, 0.7357, 0.8400, 0.2047, 0.3583, + 0.9300, 0.5889, 0.9962, 0.4764, 0.1332, 0.0521, 0.4130, + 0.8482, 0.1381, 0.2425, 0.3904, 0.8170, 0.0142, 0.6421, + 0.1364, 0.3943, 0.8466, 0.4358, 0.4268, 0.3839, 0.5195, + 0.9398, 0.6091, 0.3110, 0.0490, 0.6042, 0.3749, 0.6524, + 0.6884, 0.7351, 0.4220, 0.6301, 0.3234, 0.8180, 0.0494, + 0.4807, 0.7608, 0.5737, 0.6535, 0.6062, 0.6670, 0.9518, + 0.4707, 0.2159, 0.3091, 0.0059, 0.3200, 0.6281, 0.6848, + 0.8189, 0.5681, 0.1616, 0.4337, 0.0591, 0.4153, 0.3459, + 0.0806, 0.2291, 0.7137, 0.1226, 0.6568, 0.0402, 0.6645, + 0.9000, 0.8252, 0.8737, 0.6408, 0.6140, 0.8365, 0.4012, + 0.6444, 0.2495, 0.0060, 0.6593, 0.0043, 0.6351, 0.0409, + 0.1464, 0.8214, 0.3535, 0.5030, 0.0812, 0.0902, 0.8829, + 0.4683, 0.2207, 0.1262, 0.7779, 0.0511, 0.7906, 0.3973, + 0.7980, 0.2539, 0.7452, 0.3440, 0.2654, 0.5777, 0.0836, + 0.0368, 0.9409, 0.6606, 0.7975, 0.5896, 0.3277, 0.9890, + 0.2059, 0.0489, 0.8196, 0.6400, 0.2541, 0.4423, 0.1987, + 0.6675, 0.9146, 0.3379, 0.5478, 0.3857, 0.3688, 0.5594, + 0.8086, 0.7778, 0.7407, 0.0158, 0.8002, 0.4253, 0.3126, + 0.5825, 0.1509, 0.9435, 0.1455, 0.8791, 0.0371, 0.8052, + 0.2046, 0.3499, 0.0013, 0.3831, 0.4555, 0.8500, 0.6225, + 0.6224, 0.1592, 0.1973, 0.8511, 0.9821, 0.3558, 0.1238, + 0.7911, 0.4500, 0.2380, 0.1023, 0.2318, 0.7731, 0.8252, + 0.1892, 0.6338, 0.8715, 0.5299, 0.5117, 0.3313, 0.7155, + 0.0565, 0.8656, 0.6506, 0.4660, 0.3607, 0.2434, 0.6528, + 0.7251, 0.0304, 0.4245, 0.6984, 0.1343, 0.4112, 0.2836, + 0.5980, 0.1947, 0.6220, 0.4071, 0.3919, 0.8165, 0.3254, + 0.6610, 0.4542, 0.2660, 0.3119, 0.4452, 0.7801, 0.6988, + 0.9613, 0.3394, 0.8118, 0.2309, 0.2364, 0.6556, 0.3089, + 0.9798, 0.3639, 0.5008, 0.0230, 0.8226]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4269, 0.0147, 0.1392, ..., 0.1737, 0.9746, 0.9710]) +tensor([0.2093, 0.5163, 0.8343, ..., 0.9510, 0.0319, 0.6814]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +239,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.588425874710083 seconds +Time: 10.04533338546753 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4976, 2999, 3531, 729, 4293, 4703, 527, 519, 1087, - 2588, 2503, 4621, 623, 2508, 4018, 2181, 4566, 129, - 1882, 141, 241, 4234, 3011, 3475, 1507, 1753, 1592, - 16, 1837, 2881, 4935, 1126, 1212, 1843, 3769, 1406, - 4716, 3662, 2501, 4960, 3438, 3187, 281, 2582, 4408, - 1833, 1310, 2092, 1021, 4933, 3776, 3565, 3765, 4067, - 3036, 4650, 3192, 1618, 3142, 599, 3093, 1420, 3804, - 496, 2492, 4465, 4733, 3170, 4826, 4434, 3018, 360, - 3823, 3346, 2952, 3585, 1580, 4171, 3383, 4158, 1729, - 4780, 335, 3992, 4542, 2789, 4029, 416, 4212, 913, - 792, 2949, 3671, 2278, 3431, 4080, 1970, 3530, 1625, - 1846, 123, 3801, 179, 2650, 2793, 3609, 4951, 1904, - 3327, 3068, 797, 2882, 3831, 1946, 4674, 2611, 3167, - 3799, 820, 4595, 2920, 4476, 339, 16, 1057, 3858, - 1996, 2480, 211, 316, 2763, 1963, 1855, 789, 4091, - 3327, 1560, 1579, 4983, 674, 1259, 1259, 1614, 4040, - 4463, 3393, 619, 1723, 4630, 933, 1161, 3845, 4268, - 4840, 4044, 3897, 1211, 4242, 540, 433, 3144, 2336, - 4751, 151, 2282, 2250, 2548, 3515, 851, 1606, 4107, - 209, 996, 4881, 3062, 1226, 876, 3179, 583, 2155, - 304, 1823, 35, 692, 3702, 4106, 1336, 1789, 1359, - 3986, 1095, 312, 3819, 955, 1411, 4676, 1229, 4251, - 263, 3639, 4785, 3043, 1158, 1138, 2259, 2515, 3611, - 4898, 3676, 269, 509, 3101, 1147, 409, 3191, 1984, - 2546, 1127, 3710, 4610, 4906, 997, 3399, 3628, 347, - 847, 2034, 588, 2961, 2738, 4337, 3827, 1890, 1227, - 1635, 1728, 2759, 1429, 4176, 4191, 4862, 4968, 1336, - 119, 3406, 554, 532, 1507, 1917, 2750]), - values=tensor([0.5199, 0.0559, 0.9006, 0.7777, 0.2843, 0.6207, 0.6975, - 0.8689, 0.5742, 0.4737, 0.6755, 0.5958, 0.4384, 0.4390, - 0.8599, 0.7635, 0.7028, 0.4511, 0.3241, 0.0806, 0.8242, - 0.9053, 0.3849, 0.6596, 0.0472, 0.0762, 0.5653, 0.2663, - 0.8433, 0.4091, 0.7837, 0.1469, 0.0020, 0.4944, 0.8738, - 0.1320, 0.2431, 0.2650, 0.7193, 0.2483, 0.4028, 0.5091, - 0.3427, 0.6757, 0.9747, 0.5603, 0.1148, 0.1932, 0.5560, - 0.7920, 0.7790, 0.7814, 0.5929, 0.3079, 0.4008, 0.4233, - 0.4893, 0.9629, 0.6575, 0.2210, 0.9465, 0.6464, 0.0795, - 0.6735, 0.0196, 0.9297, 0.4843, 0.7360, 0.6757, 0.8217, - 0.8318, 0.9782, 0.6754, 0.1098, 0.6940, 0.8548, 0.6163, - 0.6934, 0.6171, 0.1041, 0.7511, 0.4878, 0.3635, 0.8620, - 0.3905, 0.3542, 0.8549, 0.4722, 0.9206, 0.2789, 0.9568, - 0.1952, 0.7173, 0.8016, 0.5436, 0.6025, 0.2628, 0.4776, - 0.0648, 0.4119, 0.1873, 0.5768, 0.6229, 0.4612, 0.3781, - 0.9685, 0.2051, 0.1702, 0.4419, 0.4538, 0.4515, 0.1665, - 0.0292, 0.6419, 0.1217, 0.0198, 0.2563, 0.3495, 0.0936, - 0.2664, 0.5259, 0.3038, 0.7533, 0.2522, 0.7253, 0.1463, - 0.2087, 0.7896, 0.0364, 0.6152, 0.7986, 0.8112, 0.7556, - 0.0756, 0.4969, 0.3790, 0.1978, 0.1043, 0.8605, 0.5011, - 0.8762, 0.2381, 0.0660, 0.2738, 0.0392, 0.5623, 0.1390, - 0.4257, 0.6102, 0.9603, 0.2834, 0.0200, 0.2511, 0.8990, - 0.6324, 0.9167, 0.3730, 0.6817, 0.4652, 0.8996, 0.9136, - 0.8631, 0.9770, 0.2565, 0.6496, 0.1799, 0.4018, 0.4388, - 0.9314, 0.6833, 0.7914, 0.9455, 0.7655, 0.3170, 0.6697, - 0.9757, 0.4151, 0.9576, 0.1567, 0.8861, 0.0170, 0.7822, - 0.2221, 0.7184, 0.0581, 0.4824, 0.4815, 0.2522, 0.1559, - 0.1892, 0.1146, 0.0347, 0.4792, 0.0555, 0.6999, 0.5092, - 0.0919, 0.7115, 0.9111, 0.4498, 0.4923, 0.5300, 0.3126, - 0.8103, 0.3276, 0.8722, 0.4823, 0.2311, 0.5957, 0.0760, - 0.2892, 0.3555, 0.6604, 0.7686, 0.1412, 0.8595, 0.6702, - 0.1119, 0.1550, 0.8493, 0.8158, 0.3714, 0.8983, 0.3484, - 0.6611, 0.8110, 0.2241, 0.8267, 0.9605, 0.8151, 0.1779, - 0.2906, 0.1723, 0.3272, 0.4678, 0.1292, 0.9514, 0.6369, - 0.8054, 0.2983, 0.3742, 0.8673, 0.0274, 0.1851, 0.9052, - 0.8742, 0.4529, 0.2266, 0.4410, 0.5323]), + col_indices=tensor([4605, 1547, 1940, 3116, 2937, 2706, 1736, 4590, 1984, + 2418, 4294, 1059, 4879, 701, 3966, 2712, 4154, 1114, + 643, 4965, 781, 809, 3920, 4971, 2063, 1008, 1888, + 3571, 4371, 3913, 4103, 468, 4891, 2723, 3269, 2415, + 1604, 4901, 82, 814, 1999, 4143, 389, 1694, 665, + 4790, 3289, 297, 2848, 1969, 2805, 4443, 2510, 4704, + 1612, 1956, 961, 4927, 1995, 3037, 403, 1323, 3155, + 4566, 3383, 578, 736, 615, 2612, 4668, 1149, 4775, + 1429, 2510, 1093, 1946, 577, 522, 93, 80, 3498, + 3358, 983, 3198, 35, 3731, 4938, 3962, 636, 4035, + 659, 3478, 885, 4027, 2720, 2665, 771, 4886, 4146, + 3590, 2314, 550, 4103, 3341, 4473, 144, 1991, 2528, + 752, 261, 407, 3849, 4430, 77, 722, 2153, 2224, + 3142, 928, 1372, 1752, 1235, 4622, 3743, 1753, 4466, + 1018, 3257, 2825, 4103, 2969, 4873, 999, 938, 2587, + 3463, 4648, 2082, 1541, 836, 2578, 2075, 198, 826, + 1293, 1279, 4568, 3300, 1029, 931, 3504, 4115, 2829, + 1302, 2548, 300, 1991, 1334, 4955, 3119, 3649, 3694, + 4971, 942, 3920, 109, 1962, 1974, 4669, 3781, 3767, + 4554, 657, 2815, 4738, 2265, 1327, 733, 1170, 228, + 1195, 4791, 795, 1568, 2682, 4095, 3229, 2196, 439, + 2358, 1755, 2280, 1622, 4409, 2991, 1066, 1285, 2146, + 2092, 2573, 494, 846, 4228, 673, 4842, 3039, 3446, + 76, 839, 4937, 666, 1575, 1119, 4849, 2027, 412, + 3479, 2588, 2558, 1192, 1350, 2789, 2518, 2643, 2931, + 3603, 1297, 1277, 2346, 1523, 2606, 1583, 2948, 161, + 2409, 2148, 916, 1259, 4838, 3256, 2192, 3896, 29, + 394, 3591, 1309, 3907, 32, 3572, 3318]), + values=tensor([0.7980, 0.7333, 0.2915, 0.1833, 0.2207, 0.6598, 0.5147, + 0.5171, 0.5565, 0.0943, 0.5992, 0.8056, 0.1807, 0.1878, + 0.3625, 0.5930, 0.3092, 0.8760, 0.7030, 0.7627, 0.6150, + 0.4252, 0.1237, 0.9391, 0.0430, 0.8144, 0.9298, 0.8419, + 0.9282, 0.1145, 0.4201, 0.2592, 0.8076, 0.6482, 0.7684, + 0.0799, 0.3357, 0.2778, 0.6719, 0.4215, 0.6579, 0.8622, + 0.5198, 0.3042, 0.4639, 0.7357, 0.8400, 0.2047, 0.3583, + 0.9300, 0.5889, 0.9962, 0.4764, 0.1332, 0.0521, 0.4130, + 0.8482, 0.1381, 0.2425, 0.3904, 0.8170, 0.0142, 0.6421, + 0.1364, 0.3943, 0.8466, 0.4358, 0.4268, 0.3839, 0.5195, + 0.9398, 0.6091, 0.3110, 0.0490, 0.6042, 0.3749, 0.6524, + 0.6884, 0.7351, 0.4220, 0.6301, 0.3234, 0.8180, 0.0494, + 0.4807, 0.7608, 0.5737, 0.6535, 0.6062, 0.6670, 0.9518, + 0.4707, 0.2159, 0.3091, 0.0059, 0.3200, 0.6281, 0.6848, + 0.8189, 0.5681, 0.1616, 0.4337, 0.0591, 0.4153, 0.3459, + 0.0806, 0.2291, 0.7137, 0.1226, 0.6568, 0.0402, 0.6645, + 0.9000, 0.8252, 0.8737, 0.6408, 0.6140, 0.8365, 0.4012, + 0.6444, 0.2495, 0.0060, 0.6593, 0.0043, 0.6351, 0.0409, + 0.1464, 0.8214, 0.3535, 0.5030, 0.0812, 0.0902, 0.8829, + 0.4683, 0.2207, 0.1262, 0.7779, 0.0511, 0.7906, 0.3973, + 0.7980, 0.2539, 0.7452, 0.3440, 0.2654, 0.5777, 0.0836, + 0.0368, 0.9409, 0.6606, 0.7975, 0.5896, 0.3277, 0.9890, + 0.2059, 0.0489, 0.8196, 0.6400, 0.2541, 0.4423, 0.1987, + 0.6675, 0.9146, 0.3379, 0.5478, 0.3857, 0.3688, 0.5594, + 0.8086, 0.7778, 0.7407, 0.0158, 0.8002, 0.4253, 0.3126, + 0.5825, 0.1509, 0.9435, 0.1455, 0.8791, 0.0371, 0.8052, + 0.2046, 0.3499, 0.0013, 0.3831, 0.4555, 0.8500, 0.6225, + 0.6224, 0.1592, 0.1973, 0.8511, 0.9821, 0.3558, 0.1238, + 0.7911, 0.4500, 0.2380, 0.1023, 0.2318, 0.7731, 0.8252, + 0.1892, 0.6338, 0.8715, 0.5299, 0.5117, 0.3313, 0.7155, + 0.0565, 0.8656, 0.6506, 0.4660, 0.3607, 0.2434, 0.6528, + 0.7251, 0.0304, 0.4245, 0.6984, 0.1343, 0.4112, 0.2836, + 0.5980, 0.1947, 0.6220, 0.4071, 0.3919, 0.8165, 0.3254, + 0.6610, 0.4542, 0.2660, 0.3119, 0.4452, 0.7801, 0.6988, + 0.9613, 0.3394, 0.8118, 0.2309, 0.2364, 0.6556, 0.3089, + 0.9798, 0.3639, 0.5008, 0.0230, 0.8226]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4269, 0.0147, 0.1392, ..., 0.1737, 0.9746, 0.9710]) +tensor([0.2093, 0.5163, 0.8343, ..., 0.9510, 0.0319, 0.6814]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +317,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.588425874710083 seconds +Time: 10.04533338546753 seconds -[18.34, 17.96, 17.95, 19.17, 18.44, 18.12, 18.19, 19.28, 18.14, 18.1] -[46.57] -14.066857814788818 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 355542, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.588425874710083, 'TIME_S_1KI': 0.02978108317641821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 655.0935684347153, 'W': 46.57} -[18.34, 17.96, 17.95, 19.17, 18.44, 18.12, 18.19, 19.28, 18.14, 18.1, 18.29, 18.16, 18.13, 17.89, 18.15, 18.08, 18.32, 17.91, 18.06, 18.27] -328.45 -16.4225 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 355542, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.588425874710083, 'TIME_S_1KI': 0.02978108317641821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 655.0935684347153, 'W': 46.57, 'J_1KI': 1.8425209073322286, 'W_1KI': 0.13098311873140162, 'W_D': 30.1475, 'J_D': 424.08059597134593, 'W_D_1KI': 0.08479307648604104, 'J_D_1KI': 0.00023848962003375422} +[19.7, 18.48, 18.66, 18.83, 18.82, 18.51, 18.76, 18.43, 19.01, 18.43] +[52.32] +13.811020612716675 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 342879, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.04533338546753, 'TIME_S_1KI': 0.029297021355835527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 722.5925984573364, 'W': 52.32} +[19.7, 18.48, 18.66, 18.83, 18.82, 18.51, 18.76, 18.43, 19.01, 18.43, 18.98, 18.69, 19.12, 18.51, 18.64, 18.66, 18.98, 18.52, 18.59, 18.71] +337.12 +16.856 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 342879, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.04533338546753, 'TIME_S_1KI': 0.029297021355835527, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 722.5925984573364, 'W': 52.32, 'J_1KI': 2.1074273970040056, 'W_1KI': 0.1525902723701364, 'W_D': 35.464, 'J_D': 489.7940350093841, 'W_D_1KI': 0.10343007299951294, 'J_D_1KI': 0.0003016518159453129} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json index 0d245c9..24cd4aa 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 303638, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.314902782440186, "TIME_S_1KI": 0.033971053631100805, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 652.3264142632485, "W": 46.59, "J_1KI": 2.148368828220606, "W_1KI": 0.1534392928421344, "W_D": 30.048000000000002, "J_D": 420.71483356475835, "W_D_1KI": 0.0989599457248434, "J_D_1KI": 0.0003259142324901475} +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 308095, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.368000268936157, "TIME_S_1KI": 0.03365195887286764, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 744.0538631057739, "W": 52.44, "J_1KI": 2.4150144049912328, "W_1KI": 0.17020724127298398, "W_D": 35.279, "J_D": 500.56209451770786, "W_D_1KI": 0.11450688910887877, "J_D_1KI": 0.00037166097829850785} diff --git a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output index 2579376..27c2f2a 100644 --- a/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_1core/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018550395965576172} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019425392150878906} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([2148, 2186, 2653, ..., 4713, 108, 1050]), - values=tensor([0.2240, 0.7824, 0.4591, ..., 0.0832, 0.2125, 0.9204]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1249, 1250]), + col_indices=tensor([1829, 4982, 735, ..., 4930, 1039, 2162]), + values=tensor([0.6306, 0.5443, 0.9048, ..., 0.9471, 0.6437, 0.9353]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.9989, 0.0104, 0.9920, ..., 0.8791, 0.9452, 0.9082]) +tensor([0.8363, 0.8481, 0.1501, ..., 0.0482, 0.5984, 0.9282]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.018550395965576172 seconds +Time: 0.019425392150878906 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '56602', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9573328495025635} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '54052', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.8421094417572021} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 1249, 1250, 1250]), - col_indices=tensor([ 143, 2910, 3407, ..., 360, 1598, 1598]), - values=tensor([0.9185, 0.3997, 0.2489, ..., 0.0567, 0.3179, 0.0072]), +tensor(crow_indices=tensor([ 0, 2, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([ 6, 277, 1426, ..., 3500, 4127, 1110]), + values=tensor([0.4840, 0.0118, 0.8124, ..., 0.4395, 0.7635, 0.9699]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.5677, 0.2086, 0.3083, ..., 0.5984, 0.8633, 0.3307]) +tensor([0.7095, 0.2973, 0.7856, ..., 0.4374, 0.4236, 0.1506]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 1.9573328495025635 seconds +Time: 1.8421094417572021 seconds -['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '303638', '-ss', '5000', '-sd', '5e-05', '-c', '1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.314902782440186} +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '308095', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.368000268936157} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([4078, 67, 3564, ..., 1146, 2529, 4353]), - values=tensor([0.7614, 0.2386, 0.2238, ..., 0.8351, 0.5866, 0.5164]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([2390, 3281, 4702, ..., 2980, 290, 2178]), + values=tensor([0.2443, 0.2640, 0.7541, ..., 0.7133, 0.6253, 0.6379]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.2149, 0.3426, 0.8577, ..., 0.2235, 0.1553, 0.5182]) +tensor([0.2175, 0.8851, 0.1665, ..., 0.6673, 0.5931, 0.2307]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.314902782440186 seconds +Time: 10.368000268936157 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([4078, 67, 3564, ..., 1146, 2529, 4353]), - values=tensor([0.7614, 0.2386, 0.2238, ..., 0.8351, 0.5866, 0.5164]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([2390, 3281, 4702, ..., 2980, 290, 2178]), + values=tensor([0.2443, 0.2640, 0.7541, ..., 0.7133, 0.6253, 0.6379]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.2149, 0.3426, 0.8577, ..., 0.2235, 0.1553, 0.5182]) +tensor([0.2175, 0.8851, 0.1665, ..., 0.6673, 0.5931, 0.2307]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.314902782440186 seconds +Time: 10.368000268936157 seconds -[18.14, 22.27, 18.32, 18.01, 17.87, 18.05, 18.11, 18.01, 18.55, 17.98] -[46.59] -14.001425504684448 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 303638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.314902782440186, 'TIME_S_1KI': 0.033971053631100805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.3264142632485, 'W': 46.59} -[18.14, 22.27, 18.32, 18.01, 17.87, 18.05, 18.11, 18.01, 18.55, 17.98, 18.58, 17.96, 18.17, 18.69, 18.11, 18.0, 18.22, 18.0, 18.06, 18.18] -330.84000000000003 -16.542 -{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 303638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.314902782440186, 'TIME_S_1KI': 0.033971053631100805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.3264142632485, 'W': 46.59, 'J_1KI': 2.148368828220606, 'W_1KI': 0.1534392928421344, 'W_D': 30.048000000000002, 'J_D': 420.71483356475835, 'W_D_1KI': 0.0989599457248434, 'J_D_1KI': 0.0003259142324901475} +[18.94, 19.01, 18.85, 18.93, 18.48, 18.4, 18.7, 18.61, 18.8, 18.72] +[52.44] +14.18867015838623 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 308095, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.368000268936157, 'TIME_S_1KI': 0.03365195887286764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 744.0538631057739, 'W': 52.44} +[18.94, 19.01, 18.85, 18.93, 18.48, 18.4, 18.7, 18.61, 18.8, 18.72, 19.02, 18.82, 18.84, 18.45, 19.68, 22.53, 19.83, 18.49, 19.11, 18.7] +343.2199999999999 +17.160999999999994 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 308095, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.368000268936157, 'TIME_S_1KI': 0.03365195887286764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 744.0538631057739, 'W': 52.44, 'J_1KI': 2.4150144049912328, 'W_1KI': 0.17020724127298398, 'W_D': 35.279, 'J_D': 500.56209451770786, 'W_D_1KI': 0.11450688910887877, 'J_D_1KI': 0.00037166097829850785} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..1bdafea --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 426, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.062527418136597, "TIME_S_1KI": 23.62095638060234, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 310.93658995628357, "W": 22.910177826692795, "J_1KI": 729.8980984889287, "W_1KI": 53.77976015655585, "W_D": 4.4831778266927955, "J_D": 60.845622244596484, "W_D_1KI": 10.52389161195492, "J_D_1KI": 24.70397092008197} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..6496241 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.4617955684661865} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 14, 23, ..., 999981, + 999993, 1000000]), + col_indices=tensor([ 4955, 8657, 25975, ..., 77712, 83219, 89598]), + values=tensor([0.6839, 0.0631, 0.2295, ..., 0.4308, 0.9509, 0.3745]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8318, 0.0587, 0.7825, ..., 0.4905, 0.7506, 0.0148]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 2.4617955684661865 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 426 -ss 100000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.062527418136597} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 16, ..., 999978, + 999987, 1000000]), + col_indices=tensor([ 4266, 12843, 25231, ..., 84479, 87700, 95752]), + values=tensor([0.9986, 0.4680, 0.6719, ..., 0.1198, 0.1607, 0.3222]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1538, 0.6601, 0.2448, ..., 0.8405, 0.0282, 0.7640]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.062527418136597 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 16, ..., 999978, + 999987, 1000000]), + col_indices=tensor([ 4266, 12843, 25231, ..., 84479, 87700, 95752]), + values=tensor([0.9986, 0.4680, 0.6719, ..., 0.1198, 0.1607, 0.3222]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1538, 0.6601, 0.2448, ..., 0.8405, 0.0282, 0.7640]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.062527418136597 seconds + +[20.6, 20.44, 20.32, 20.4, 20.44, 20.84, 21.0, 20.88, 20.52, 20.4] +[20.4, 20.24, 20.28, 24.24, 25.56, 27.32, 28.24, 28.56, 25.84, 24.76, 24.56, 24.68, 24.68] +13.571985006332397 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.062527418136597, 'TIME_S_1KI': 23.62095638060234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.93658995628357, 'W': 22.910177826692795} +[20.6, 20.44, 20.32, 20.4, 20.44, 20.84, 21.0, 20.88, 20.52, 20.4, 20.64, 20.56, 20.4, 20.2, 20.36, 20.36, 20.12, 20.2, 20.4, 20.56] +368.53999999999996 +18.427 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 426, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.062527418136597, 'TIME_S_1KI': 23.62095638060234, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.93658995628357, 'W': 22.910177826692795, 'J_1KI': 729.8980984889287, 'W_1KI': 53.77976015655585, 'W_D': 4.4831778266927955, 'J_D': 60.845622244596484, 'W_D_1KI': 10.52389161195492, 'J_D_1KI': 24.70397092008197} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..9db277a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 22.583206176757812, "TIME_S_1KI": 225.83206176757812, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 712.6889356327057, "W": 23.468990304831248, "J_1KI": 7126.889356327057, "W_1KI": 234.68990304831246, "W_D": 4.866990304831251, "J_D": 147.7971610636712, "W_D_1KI": 48.66990304831251, "J_D_1KI": 486.6990304831251} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..1c6b2fe --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 22.583206176757812} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 115, 205, ..., 9999778, + 9999875, 10000000]), + col_indices=tensor([ 1402, 2097, 3965, ..., 98532, 99293, 99429]), + values=tensor([0.3375, 0.2900, 0.6603, ..., 0.1611, 0.9536, 0.6072]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8425, 0.9618, 0.5102, ..., 0.7524, 0.4133, 0.9192]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 22.583206176757812 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 115, 205, ..., 9999778, + 9999875, 10000000]), + col_indices=tensor([ 1402, 2097, 3965, ..., 98532, 99293, 99429]), + values=tensor([0.3375, 0.2900, 0.6603, ..., 0.1611, 0.9536, 0.6072]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8425, 0.9618, 0.5102, ..., 0.7524, 0.4133, 0.9192]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 22.583206176757812 seconds + +[20.72, 21.0, 21.0, 21.04, 20.76, 20.44, 20.16, 20.32, 20.24, 20.52] +[20.4, 20.36, 21.96, 22.92, 24.92, 26.56, 28.48, 28.48, 28.88, 28.96, 27.92, 26.48, 25.56, 24.72, 24.64, 24.76, 24.96, 24.76, 24.6, 24.56, 24.6, 24.36, 24.76, 24.88, 24.92, 24.68, 24.6, 24.36, 24.04] +30.367260217666626 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 22.583206176757812, 'TIME_S_1KI': 225.83206176757812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.6889356327057, 'W': 23.468990304831248} +[20.72, 21.0, 21.0, 21.04, 20.76, 20.44, 20.16, 20.32, 20.24, 20.52, 20.24, 20.24, 20.2, 20.4, 20.64, 21.08, 21.24, 21.12, 21.0, 20.84] +372.03999999999996 +18.601999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 22.583206176757812, 'TIME_S_1KI': 225.83206176757812, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 712.6889356327057, 'W': 23.468990304831248, 'J_1KI': 7126.889356327057, 'W_1KI': 234.68990304831246, 'W_D': 4.866990304831251, 'J_D': 147.7971610636712, 'W_D_1KI': 48.66990304831251, 'J_D_1KI': 486.6990304831251} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..21e4694 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3104, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.44502592086792, "TIME_S_1KI": 3.3650212373930155, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 348.514190454483, "W": 23.934119588627873, "J_1KI": 112.27905620311952, "W_1KI": 7.710734403552794, "W_D": 5.569119588627871, "J_D": 81.09415505290025, "W_D_1KI": 1.794175125202278, "J_D_1KI": 0.578020336727538} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..9aacb6b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3382716178894043} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 99999, 100000, + 100000]), + col_indices=tensor([91034, 37166, 45389, ..., 40200, 40353, 102]), + values=tensor([0.2917, 0.4189, 0.5553, ..., 0.7170, 0.1120, 0.1885]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2165, 0.9661, 0.1946, ..., 0.3640, 0.8184, 0.1773]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.3382716178894043 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3104 -ss 100000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.44502592086792} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, + 100000]), + col_indices=tensor([ 252, 18132, 64781, ..., 90653, 85542, 48452]), + values=tensor([0.2676, 0.4026, 0.9927, ..., 0.1189, 0.3190, 0.1177]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6783, 0.3478, 0.4100, ..., 0.2741, 0.0736, 0.8098]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.44502592086792 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 100000, 100000, + 100000]), + col_indices=tensor([ 252, 18132, 64781, ..., 90653, 85542, 48452]), + values=tensor([0.2676, 0.4026, 0.9927, ..., 0.1189, 0.3190, 0.1177]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6783, 0.3478, 0.4100, ..., 0.2741, 0.0736, 0.8098]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.44502592086792 seconds + +[20.36, 20.36, 20.36, 20.36, 20.36, 20.52, 20.6, 20.76, 20.96, 20.92] +[20.92, 20.6, 23.92, 25.4, 27.24, 28.24, 28.24, 29.08, 26.32, 25.76, 25.4, 25.2, 25.44, 25.64] +14.56139588356018 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.44502592086792, 'TIME_S_1KI': 3.3650212373930155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.514190454483, 'W': 23.934119588627873} +[20.36, 20.36, 20.36, 20.36, 20.36, 20.52, 20.6, 20.76, 20.96, 20.92, 20.24, 20.2, 20.48, 20.2, 20.2, 20.2, 20.12, 20.28, 20.28, 20.6] +367.30000000000007 +18.365000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3104, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.44502592086792, 'TIME_S_1KI': 3.3650212373930155, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 348.514190454483, 'W': 23.934119588627873, 'J_1KI': 112.27905620311952, 'W_1KI': 7.710734403552794, 'W_D': 5.569119588627871, 'J_D': 81.09415505290025, 'W_D_1KI': 1.794175125202278, 'J_D_1KI': 0.578020336727538} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..9e7c05a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 868, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.049697399139404, "TIME_S_1KI": 12.730066128040788, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 347.2841278934479, "W": 23.818433138573972, "J_1KI": 400.0969215362303, "W_1KI": 27.44059117347232, "W_D": 5.198433138573975, "J_D": 75.79563728809362, "W_D_1KI": 5.988978270246515, "J_D_1KI": 6.899744550975249} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..5c6adba --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.209580421447754} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 11, ..., 499991, 499997, + 500000]), + col_indices=tensor([ 8709, 33303, 39829, ..., 65447, 85964, 93697]), + values=tensor([0.2765, 0.3303, 0.4846, ..., 0.1571, 0.7749, 0.0327]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.2847, 0.7446, 0.1507, ..., 0.7274, 0.5755, 0.0187]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.209580421447754 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 868 -ss 100000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.049697399139404} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 19, ..., 499985, 499993, + 500000]), + col_indices=tensor([ 930, 5720, 18229, ..., 18263, 29630, 53753]), + values=tensor([0.0983, 0.1468, 0.4729, ..., 0.5988, 0.3077, 0.5585]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.4408, 0.1732, 0.5273, ..., 0.8772, 0.6136, 0.9894]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 11.049697399139404 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 19, ..., 499985, 499993, + 500000]), + col_indices=tensor([ 930, 5720, 18229, ..., 18263, 29630, 53753]), + values=tensor([0.0983, 0.1468, 0.4729, ..., 0.5988, 0.3077, 0.5585]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.4408, 0.1732, 0.5273, ..., 0.8772, 0.6136, 0.9894]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 11.049697399139404 seconds + +[20.76, 20.76, 20.64, 20.6, 20.48, 20.24, 20.32, 20.36, 20.4, 20.64] +[20.8, 20.68, 23.8, 24.88, 26.88, 27.84, 27.84, 28.76, 26.08, 26.28, 25.56, 25.36, 25.48, 25.4] +14.580477476119995 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 868, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.049697399139404, 'TIME_S_1KI': 12.730066128040788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 347.2841278934479, 'W': 23.818433138573972} +[20.76, 20.76, 20.64, 20.6, 20.48, 20.24, 20.32, 20.36, 20.4, 20.64, 20.68, 20.8, 20.6, 20.6, 20.92, 21.0, 21.0, 21.08, 21.04, 21.04] +372.4 +18.619999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 868, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.049697399139404, 'TIME_S_1KI': 12.730066128040788, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 347.2841278934479, 'W': 23.818433138573972, 'J_1KI': 400.0969215362303, 'W_1KI': 27.44059117347232, 'W_D': 5.198433138573975, 'J_D': 75.79563728809362, 'W_D_1KI': 5.988978270246515, 'J_D_1KI': 6.899744550975249} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..669cf9a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 32669, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.073116302490234, "TIME_S_1KI": 0.33894873741131454, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 342.2707403564453, "W": 23.410284009847928, "J_1KI": 10.476927373242074, "W_1KI": 0.7165901622286549, "W_D": 4.943284009847925, "J_D": 72.2734280853271, "W_D_1KI": 0.15131421255159097, "J_D_1KI": 0.004631736892821665} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..4a81d8b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03962588310241699} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9995, 9999, 10000]), + col_indices=tensor([5736, 4740, 5169, ..., 5050, 7314, 6933]), + values=tensor([0.2904, 0.9920, 0.0901, ..., 0.6475, 0.2992, 0.6153]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.6798, 0.4263, 0.2506, ..., 0.2181, 0.0906, 0.7562]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.03962588310241699 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 26497 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.51607346534729} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9996, 9998, 10000]), + col_indices=tensor([5374, 4189, 5897, ..., 9913, 4567, 8496]), + values=tensor([0.8167, 0.6460, 0.7856, ..., 0.9381, 0.0308, 0.1187]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.9083, 0.0911, 0.6427, ..., 0.4641, 0.3576, 0.6926]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 8.51607346534729 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32669 -ss 10000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.073116302490234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 5, ..., 9999, 10000, 10000]), + col_indices=tensor([2638, 262, 675, ..., 9893, 8606, 4272]), + values=tensor([0.0918, 0.5777, 0.8540, ..., 0.4523, 0.7955, 0.7135]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1165, 0.4058, 0.2834, ..., 0.7342, 0.6568, 0.2677]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 11.073116302490234 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 5, ..., 9999, 10000, 10000]), + col_indices=tensor([2638, 262, 675, ..., 9893, 8606, 4272]), + values=tensor([0.0918, 0.5777, 0.8540, ..., 0.4523, 0.7955, 0.7135]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1165, 0.4058, 0.2834, ..., 0.7342, 0.6568, 0.2677]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 11.073116302490234 seconds + +[20.76, 20.6, 20.6, 20.6, 20.44, 20.48, 20.48, 20.48, 20.48, 20.64] +[20.44, 20.4, 23.56, 25.72, 27.84, 28.36, 29.08, 29.08, 26.36, 24.68, 23.72, 23.68, 23.44, 23.32] +14.620529174804688 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32669, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.073116302490234, 'TIME_S_1KI': 0.33894873741131454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.2707403564453, 'W': 23.410284009847928} +[20.76, 20.6, 20.6, 20.6, 20.44, 20.48, 20.48, 20.48, 20.48, 20.64, 20.6, 20.88, 20.92, 20.76, 20.64, 20.48, 20.16, 20.16, 20.08, 20.2] +369.34000000000003 +18.467000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 32669, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.073116302490234, 'TIME_S_1KI': 0.33894873741131454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.2707403564453, 'W': 23.410284009847928, 'J_1KI': 10.476927373242074, 'W_1KI': 0.7165901622286549, 'W_D': 4.943284009847925, 'J_D': 72.2734280853271, 'W_D_1KI': 0.15131421255159097, 'J_D_1KI': 0.004631736892821665} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..3604623 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 4348, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135539293289185, "TIME_S_1KI": 2.3310807942247433, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 299.55518106460573, "W": 22.140804095253714, "J_1KI": 68.89493584742543, "W_1KI": 5.092181254658168, "W_D": 3.514804095253716, "J_D": 47.55372806835178, "W_D_1KI": 0.8083726070040745, "J_D_1KI": 0.1859182628804219} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..4918d44 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2414851188659668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 19, ..., 99982, 99991, + 100000]), + col_indices=tensor([ 302, 1349, 1385, ..., 9083, 9115, 9373]), + values=tensor([0.3908, 0.9700, 0.7778, ..., 0.9299, 0.7856, 0.5693]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.9198, 0.1049, 0.3911, ..., 0.9152, 0.2471, 0.8814]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.2414851188659668 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4348 -ss 10000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135539293289185} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 23, ..., 99987, 99990, + 100000]), + col_indices=tensor([ 62, 627, 2703, ..., 9273, 9381, 9947]), + values=tensor([0.4329, 0.2872, 0.8964, ..., 0.9783, 0.1219, 0.9101]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8278, 0.7584, 0.9132, ..., 0.6086, 0.4680, 0.0616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.135539293289185 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 23, ..., 99987, 99990, + 100000]), + col_indices=tensor([ 62, 627, 2703, ..., 9273, 9381, 9947]), + values=tensor([0.4329, 0.2872, 0.8964, ..., 0.9783, 0.1219, 0.9101]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8278, 0.7584, 0.9132, ..., 0.6086, 0.4680, 0.0616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.135539293289185 seconds + +[20.76, 20.76, 21.0, 21.0, 20.84, 20.76, 20.68, 20.44, 20.6, 20.72] +[20.88, 20.96, 21.56, 22.88, 24.32, 25.12, 25.84, 25.2, 25.2, 24.92, 24.16, 24.04, 24.2] +13.529552936553955 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135539293289185, 'TIME_S_1KI': 2.3310807942247433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 299.55518106460573, 'W': 22.140804095253714} +[20.76, 20.76, 21.0, 21.0, 20.84, 20.76, 20.68, 20.44, 20.6, 20.72, 20.52, 20.44, 20.36, 20.68, 20.84, 20.96, 20.92, 20.68, 20.36, 20.4] +372.52 +18.625999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 4348, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135539293289185, 'TIME_S_1KI': 2.3310807942247433, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 299.55518106460573, 'W': 22.140804095253714, 'J_1KI': 68.89493584742543, 'W_1KI': 5.092181254658168, 'W_D': 3.514804095253716, 'J_D': 47.55372806835178, 'W_D_1KI': 0.8083726070040745, 'J_D_1KI': 0.1859182628804219} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..1a09527 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 493, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.442772150039673, "TIME_S_1KI": 21.18209361062814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 317.52540328025816, "W": 21.825092476629965, "J_1KI": 644.0677551323695, "W_1KI": 44.26996445563888, "W_D": 3.2900924766299617, "J_D": 47.86636948227874, "W_D_1KI": 6.673615571257529, "J_D_1KI": 13.536745580644075} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..449d7aa --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1293396949768066} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 94, 210, ..., 999806, + 999898, 1000000]), + col_indices=tensor([ 197, 225, 349, ..., 9664, 9718, 9909]), + values=tensor([0.2825, 0.4405, 0.0615, ..., 0.4764, 0.3721, 0.7741]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2003, 0.0291, 0.9415, ..., 0.2751, 0.8368, 0.8186]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.1293396949768066 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 493 -ss 10000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.442772150039673} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 103, 211, ..., 999799, + 999895, 1000000]), + col_indices=tensor([ 29, 259, 296, ..., 9649, 9833, 9895]), + values=tensor([0.6562, 0.6337, 0.8410, ..., 0.1779, 0.9179, 0.3279]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2837, 0.1453, 0.4499, ..., 0.4322, 0.7993, 0.4344]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.442772150039673 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 103, 211, ..., 999799, + 999895, 1000000]), + col_indices=tensor([ 29, 259, 296, ..., 9649, 9833, 9895]), + values=tensor([0.6562, 0.6337, 0.8410, ..., 0.1779, 0.9179, 0.3279]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2837, 0.1453, 0.4499, ..., 0.4322, 0.7993, 0.4344]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.442772150039673 seconds + +[20.48, 20.72, 20.88, 20.72, 20.92, 20.68, 20.44, 20.48, 20.4, 20.4] +[20.48, 20.48, 20.76, 21.64, 23.08, 24.24, 25.0, 25.48, 24.92, 24.24, 24.2, 24.12, 24.0, 23.96] +14.54863953590393 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 493, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.442772150039673, 'TIME_S_1KI': 21.18209361062814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.52540328025816, 'W': 21.825092476629965} +[20.48, 20.72, 20.88, 20.72, 20.92, 20.68, 20.44, 20.48, 20.4, 20.4, 20.6, 20.6, 20.36, 20.32, 20.48, 20.48, 20.64, 20.64, 20.8, 20.8] +370.70000000000005 +18.535000000000004 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 493, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.442772150039673, 'TIME_S_1KI': 21.18209361062814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.52540328025816, 'W': 21.825092476629965, 'J_1KI': 644.0677551323695, 'W_1KI': 44.26996445563888, 'W_D': 3.2900924766299617, 'J_D': 47.86636948227874, 'W_D_1KI': 6.673615571257529, 'J_D_1KI': 13.536745580644075} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..ad31b62 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.699479103088379, "TIME_S_1KI": 106.99479103088379, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 372.40043621063234, "W": 23.7347384855202, "J_1KI": 3724.0043621063232, "W_1KI": 237.34738485520202, "W_D": 5.215738485520202, "J_D": 81.83546190547948, "W_D_1KI": 52.15738485520202, "J_D_1KI": 521.5738485520202} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..f0aea1a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.699479103088379} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1091, ..., 4998975, + 4999490, 5000000]), + col_indices=tensor([ 4, 42, 44, ..., 9941, 9942, 9945]), + values=tensor([0.3387, 0.3479, 0.7697, ..., 0.0992, 0.1573, 0.7910]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0536, 0.4974, 0.9494, ..., 0.5617, 0.8582, 0.7161]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.699479103088379 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1091, ..., 4998975, + 4999490, 5000000]), + col_indices=tensor([ 4, 42, 44, ..., 9941, 9942, 9945]), + values=tensor([0.3387, 0.3479, 0.7697, ..., 0.0992, 0.1573, 0.7910]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0536, 0.4974, 0.9494, ..., 0.5617, 0.8582, 0.7161]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.699479103088379 seconds + +[20.52, 20.4, 20.48, 20.48, 20.48, 20.64, 20.76, 20.8, 20.8, 20.56] +[20.56, 20.2, 20.28, 23.96, 25.84, 29.28, 30.68, 31.08, 27.28, 26.24, 25.12, 24.52, 24.52, 24.32, 24.2] +15.69010066986084 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.699479103088379, 'TIME_S_1KI': 106.99479103088379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 372.40043621063234, 'W': 23.7347384855202} +[20.52, 20.4, 20.48, 20.48, 20.48, 20.64, 20.76, 20.8, 20.8, 20.56, 20.6, 20.6, 20.6, 20.68, 20.68, 20.68, 20.56, 20.4, 20.28, 20.44] +370.38 +18.519 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.699479103088379, 'TIME_S_1KI': 106.99479103088379, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 372.40043621063234, 'W': 23.7347384855202, 'J_1KI': 3724.0043621063232, 'W_1KI': 237.34738485520202, 'W_D': 5.215738485520202, 'J_D': 81.83546190547948, 'W_D_1KI': 52.15738485520202, 'J_D_1KI': 521.5738485520202} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..f0b391a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31538224220276, "TIME_S_1KI": 213.1538224220276, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 656.8145585250853, "W": 23.224732449579832, "J_1KI": 6568.145585250853, "W_1KI": 232.24732449579832, "W_D": 4.7157324495798285, "J_D": 133.36479693436596, "W_D_1KI": 47.157324495798285, "J_D_1KI": 471.57324495798287} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..f5c642e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31538224220276} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 941, 1920, ..., 9998069, + 9999051, 10000000]), + col_indices=tensor([ 4, 12, 19, ..., 9982, 9986, 9989]), + values=tensor([0.3288, 0.1903, 0.7853, ..., 0.1848, 0.4723, 0.3439]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8250, 0.2999, 0.1337, ..., 0.5908, 0.0422, 0.7676]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.31538224220276 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 941, 1920, ..., 9998069, + 9999051, 10000000]), + col_indices=tensor([ 4, 12, 19, ..., 9982, 9986, 9989]), + values=tensor([0.3288, 0.1903, 0.7853, ..., 0.1848, 0.4723, 0.3439]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8250, 0.2999, 0.1337, ..., 0.5908, 0.0422, 0.7676]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.31538224220276 seconds + +[20.48, 20.28, 20.4, 20.48, 20.4, 20.6, 20.96, 20.96, 21.04, 20.88] +[20.92, 20.84, 20.88, 24.28, 26.48, 27.96, 29.4, 28.08, 28.12, 26.52, 25.88, 24.92, 24.44, 24.32, 24.56, 24.84, 24.84, 24.96, 24.88, 24.72, 24.72, 24.48, 24.56, 24.28, 24.16, 24.12, 24.48] +28.280823469161987 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31538224220276, 'TIME_S_1KI': 213.1538224220276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 656.8145585250853, 'W': 23.224732449579832} +[20.48, 20.28, 20.4, 20.48, 20.4, 20.6, 20.96, 20.96, 21.04, 20.88, 20.44, 20.52, 20.68, 20.36, 20.52, 20.4, 20.4, 20.4, 20.52, 20.72] +370.18000000000006 +18.509000000000004 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31538224220276, 'TIME_S_1KI': 213.1538224220276, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 656.8145585250853, 'W': 23.224732449579832, 'J_1KI': 6568.145585250853, 'W_1KI': 232.24732449579832, 'W_D': 4.7157324495798285, 'J_D': 133.36479693436596, 'W_D_1KI': 47.157324495798285, 'J_D_1KI': 471.57324495798287} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..6cc4360 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.396695137023926, "TIME_S_1KI": 423.96695137023926, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1244.9685798645019, "W": 23.740989470007165, "J_1KI": 12449.68579864502, "W_1KI": 237.40989470007165, "W_D": 5.275989470007168, "J_D": 276.6709081840517, "W_D_1KI": 52.75989470007168, "J_D_1KI": 527.5989470007169} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..8c655a7 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.396695137023926} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1981, 3970, ..., 19995969, + 19997989, 20000000]), + col_indices=tensor([ 3, 4, 9, ..., 9978, 9982, 9987]), + values=tensor([0.8747, 0.4611, 0.0013, ..., 0.7048, 0.8145, 0.2728]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.1891, 0.5511, 0.0831, ..., 0.7428, 0.4718, 0.5050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.396695137023926 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1981, 3970, ..., 19995969, + 19997989, 20000000]), + col_indices=tensor([ 3, 4, 9, ..., 9978, 9982, 9987]), + values=tensor([0.8747, 0.4611, 0.0013, ..., 0.7048, 0.8145, 0.2728]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.1891, 0.5511, 0.0831, ..., 0.7428, 0.4718, 0.5050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.396695137023926 seconds + +[20.28, 20.48, 20.28, 20.48, 20.68, 20.72, 20.92, 20.84, 20.6, 20.6] +[20.68, 20.48, 20.48, 24.96, 25.96, 28.64, 31.0, 30.08, 29.88, 29.2, 29.04, 27.32, 26.6, 25.64, 24.48, 24.28, 24.48, 24.6, 24.64, 24.64, 24.8, 24.8, 24.96, 24.76, 25.0, 24.92, 24.88, 24.72, 24.76, 24.72, 24.68, 24.72, 24.56, 24.6, 24.6, 24.56, 24.72, 24.8, 24.76, 24.64, 24.64, 24.64, 24.68, 24.56, 24.52, 24.68, 24.44, 24.28, 24.24, 24.04] +52.43962478637695 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.396695137023926, 'TIME_S_1KI': 423.96695137023926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1244.9685798645019, 'W': 23.740989470007165} +[20.28, 20.48, 20.28, 20.48, 20.68, 20.72, 20.92, 20.84, 20.6, 20.6, 20.92, 20.6, 20.68, 20.68, 20.32, 20.36, 20.28, 20.24, 20.16, 20.16] +369.29999999999995 +18.464999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.396695137023926, 'TIME_S_1KI': 423.96695137023926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1244.9685798645019, 'W': 23.740989470007165, 'J_1KI': 12449.68579864502, 'W_1KI': 237.40989470007165, 'W_D': 5.275989470007168, 'J_D': 276.6709081840517, 'W_D_1KI': 52.75989470007168, 'J_D_1KI': 527.5989470007169} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..33ea6fd --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.99070644378662, "TIME_S_1KI": 629.9070644378662, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1817.8575790786747, "W": 23.739082320260188, "J_1KI": 18178.57579078675, "W_1KI": 237.3908232026019, "W_D": 5.089082320260189, "J_D": 389.7044856929784, "W_D_1KI": 50.890823202601894, "J_D_1KI": 508.908232026019} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..58e2c22 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 62.99070644378662} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2976, 6021, ..., 29993904, + 29996986, 30000000]), + col_indices=tensor([ 0, 1, 2, ..., 9993, 9995, 9997]), + values=tensor([0.2230, 0.6279, 0.9702, ..., 0.2815, 0.5420, 0.7025]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3830, 0.2972, 0.7622, ..., 0.1887, 0.7379, 0.3841]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 62.99070644378662 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2976, 6021, ..., 29993904, + 29996986, 30000000]), + col_indices=tensor([ 0, 1, 2, ..., 9993, 9995, 9997]), + values=tensor([0.2230, 0.6279, 0.9702, ..., 0.2815, 0.5420, 0.7025]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3830, 0.2972, 0.7622, ..., 0.1887, 0.7379, 0.3841]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 62.99070644378662 seconds + +[20.4, 20.36, 20.44, 20.44, 20.72, 20.96, 20.88, 21.12, 20.72, 20.36] +[20.44, 20.2, 23.44, 25.08, 27.68, 28.88, 31.04, 29.08, 29.08, 29.48, 28.4, 29.16, 28.52, 28.08, 27.8, 26.96, 25.8, 24.96, 24.8, 24.68, 24.76, 24.68, 24.72, 24.68, 24.52, 24.44, 24.4, 24.76, 24.6, 24.88, 24.88, 24.72, 24.44, 24.48, 24.24, 24.32, 24.24, 24.16, 24.36, 24.56, 24.28, 24.24, 24.28, 24.16, 23.76, 23.92, 24.36, 24.4, 24.52, 24.84, 24.76, 24.44, 24.44, 24.24, 24.32, 24.04, 24.08, 24.28, 24.08, 24.08, 24.0, 23.88, 24.04, 24.28, 24.16, 24.4, 24.48, 24.4, 24.48, 24.6, 24.52, 24.52, 24.52] +76.5765733718872 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.99070644378662, 'TIME_S_1KI': 629.9070644378662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1817.8575790786747, 'W': 23.739082320260188} +[20.4, 20.36, 20.44, 20.44, 20.72, 20.96, 20.88, 21.12, 20.72, 20.36, 20.28, 20.84, 20.84, 20.8, 21.0, 21.12, 21.0, 20.52, 20.56, 20.32] +373.0 +18.65 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 62.99070644378662, 'TIME_S_1KI': 629.9070644378662, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1817.8575790786747, 'W': 23.739082320260188, 'J_1KI': 18178.57579078675, 'W_1KI': 237.3908232026019, 'W_D': 5.089082320260189, 'J_D': 389.7044856929784, 'W_D_1KI': 50.890823202601894, 'J_D_1KI': 508.908232026019} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..256b7e3 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 147223, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.998100280761719, "TIME_S_1KI": 0.07470368271779354, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.2257844543456, "W": 22.66095026687019, "J_1KI": 2.256616048133414, "W_1KI": 0.15392262259884795, "W_D": 4.3619502668701955, "J_D": 63.94931951642035, "W_D_1KI": 0.029628184909084827, "J_D_1KI": 0.00020124698524744657} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..a769041 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1200 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015372514724731445} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), + col_indices=tensor([3209, 9868, 3248, 6619, 340, 2292, 7833, 3392, 6373, + 1926, 8761, 7309, 7662, 7112, 9220, 3460, 9210, 9337, + 5270, 8671, 5002, 6954, 8836, 761, 7936, 5205, 4423, + 5866, 2140, 76, 8198, 3105, 6063, 2414, 5795, 8249, + 3229, 3225, 6597, 3776, 3375, 2931, 9809, 7037, 3178, + 6061, 4148, 6345, 6554, 2041, 7831, 9356, 1293, 5890, + 3788, 7939, 1779, 945, 7194, 3467, 8405, 3255, 8893, + 1669, 2661, 614, 6554, 8211, 1542, 4830, 2116, 6825, + 4028, 8188, 3362, 1229, 1014, 2629, 60, 4341, 8573, + 344, 2144, 7288, 8591, 1396, 212, 7483, 7941, 134, + 292, 9035, 5218, 3760, 5255, 8326, 5872, 9422, 5064, + 1086, 5137, 505, 3749, 1743, 2035, 8335, 8836, 193, + 9939, 568, 4682, 8836, 9271, 8548, 6366, 5833, 4592, + 9204, 1646, 9941, 8566, 6013, 5398, 3083, 5993, 1547, + 9892, 4995, 9172, 6435, 2837, 813, 4813, 1229, 5022, + 7573, 4340, 3946, 5005, 3375, 4969, 6442, 1642, 4944, + 719, 4401, 9666, 2565, 7859, 2383, 6897, 7942, 2904, + 1047, 3110, 6786, 88, 4488, 8747, 4203, 3652, 6051, + 2302, 7218, 9399, 7374, 5910, 928, 5333, 4890, 6792, + 6062, 9221, 9163, 4174, 2896, 9079, 9463, 4208, 5064, + 7724, 2493, 6553, 5557, 8081, 9830, 2788, 4152, 7234, + 544, 6509, 5692, 1300, 2830, 9380, 1684, 9089, 5782, + 393, 9511, 2834, 554, 2905, 5654, 8350, 1747, 8061, + 3684, 7688, 2628, 4287, 3328, 8038, 7059, 3207, 9644, + 6057, 9467, 5893, 7274, 1133, 4739, 5738, 3748, 7812, + 2444, 9074, 454, 761, 500, 6433, 3779, 8509, 8830, + 6164, 5825, 850, 1492, 145, 499, 2214, 7508, 3582, + 1886, 2390, 8118, 3401, 5635, 7260, 8422, 6823, 6262, + 9571, 4326, 2162, 8956, 5708, 7110, 4681, 462, 9728, + 2261, 2203, 2746, 5158, 8676, 7833, 4522, 1961, 7126, + 5692, 6002, 4837, 8636, 405, 1956, 8123, 5103, 7581, + 269, 8632, 135, 5524, 5948, 6685, 8043, 3706, 935, + 1445, 8185, 7972, 9341, 2312, 3081, 2043, 4519, 1533, + 9679, 8227, 9554, 3652, 824, 8035, 5846, 6458, 8139, + 5279, 1551, 3623, 2541, 6344, 2318, 9360, 8584, 9478, + 5672, 6142, 2286, 8123, 5498, 770, 7176, 456, 9177, + 9809, 2702, 1264, 7605, 5300, 2103, 3058, 2051, 1882, + 6297, 3562, 1383, 6913, 7884, 2698, 3990, 9715, 8693, + 2273, 6508, 3649, 6905, 2527, 2597, 2004, 3902, 8701, + 3289, 7304, 3591, 7249, 5558, 5443, 8985, 9103, 809, + 2290, 2274, 5849, 6456, 6694, 4646, 1044, 3808, 5972, + 584, 9153, 1314, 8076, 1874, 630, 9422, 7201, 7271, + 2554, 8484, 6762, 4360, 9672, 9206, 7238, 670, 261, + 7282, 4415, 8095, 5313, 556, 825, 8440, 6187, 3980, + 9374, 4645, 1791, 2944, 8696, 6211, 8269, 9169, 6448, + 1597, 8204, 1505, 8627, 9214, 6978, 3170, 6053, 7401, + 1622, 5348, 9321, 8871, 4197, 4724, 5006, 1208, 9868, + 5608, 2933, 4739, 2856, 6964, 5457, 9490, 2939, 8482, + 1706, 9462, 5508, 6207, 2528, 5845, 6687, 1429, 6904, + 6857, 5980, 1821, 3495, 7978, 717, 8671, 579, 3286, + 887, 9751, 6063, 3554, 478, 2346, 2715, 6230, 8314, + 9263, 8757, 6504, 1482, 9283, 6266, 2632, 8114, 9322, + 2614, 4650, 6056, 8492, 3113, 826, 1487, 9140, 7416, + 7679, 9789, 7142, 4923, 1585, 5093, 5772, 2033, 5245, + 8006, 9978, 5857, 1355, 851, 5200, 8304, 5704, 3358, + 8939, 7230, 1375, 6361, 6987, 2890, 847, 284, 5635, + 2950, 5927, 4878, 706, 5188, 2344, 5086, 7880, 7517, + 1759, 3646, 2795, 5560, 4430, 6312, 6067, 4770, 6312, + 7816, 2489, 4516, 4206, 8931, 9933, 7409, 5795, 3224, + 9549, 513, 4869, 3275, 8533, 1434, 7035, 5766, 8355, + 3615, 7273, 4810, 1822, 8987, 5862, 7804, 6291, 3123, + 8324, 910, 3823, 8563, 5832, 1345, 743, 9538, 1425, + 1584, 1245, 4705, 4281, 7546, 4065, 6171, 9291, 990, + 4041, 5597, 6554, 8240, 8238, 6185, 8030, 2085, 8194, + 9631, 1292, 3009, 3267, 1595, 2327, 1125, 1646, 4746, + 1415, 8714, 2232, 7073, 5930, 7011, 55, 989, 2475, + 5544, 2472, 4127, 1839, 5169, 2530, 1956, 7139, 3386, + 8181, 8523, 171, 1301, 5967, 548, 983, 9893, 5816, + 785, 9972, 7080, 7125, 1561, 326, 3497, 769, 7886, + 6549, 7277, 7031, 6240, 4002, 7102, 8726, 916, 4682, + 2949, 4811, 4044, 7673, 4734, 9767, 6661, 3130, 3538, + 3521, 4909, 9932, 861, 6634, 7437, 9719, 9476, 9830, + 4919, 9257, 8438, 5402, 3055, 7641, 9591, 2146, 858, + 7075, 8845, 1624, 3798, 8047, 3425, 4197, 3144, 8268, + 472, 8494, 4913, 2139, 8400, 8497, 4513, 89, 2209, + 8090, 2080, 3370, 2376, 3620, 2461, 6864, 494, 7285, + 1786, 2146, 1376, 4153, 3064, 5331, 3128, 7774, 4797, + 6823, 5809, 7554, 2423, 3053, 5382, 5543, 5683, 1277, + 4836, 9205, 7550, 6908, 2869, 2091, 2867, 5163, 7698, + 8343, 1967, 4007, 9458, 4283, 6112, 5901, 9866, 4663, + 8311, 5474, 5957, 9701, 7202, 5032, 1871, 8242, 1522, + 8481, 762, 5930, 8891, 7093, 2033, 9164, 152, 9198, + 4185, 4093, 4667, 5351, 6201, 7388, 6549, 8266, 7538, + 2670, 6818, 8834, 5685, 2671, 6016, 4286, 9742, 6121, + 4666, 6526, 6512, 259, 7033, 8244, 4223, 1085, 4469, + 1035, 7267, 9736, 8892, 5043, 9002, 4300, 8899, 8431, + 4947, 7717, 9483, 9531, 640, 9735, 136, 1004, 5963, + 2560, 1270, 1063, 6551, 6380, 9051, 6415, 2273, 8966, + 1448, 7975, 7104, 1638, 6895, 2796, 7076, 1607, 8593, + 8650, 4534, 4443, 8898, 3141, 730, 7859, 569, 4715, + 8107, 6041, 7589, 8647, 5268, 5224, 9357, 3430, 8808, + 12, 1062, 6461, 8940, 7168, 7178, 9954, 1483, 4577, + 6145, 3798, 5241, 3768, 4435, 9815, 4630, 4239, 3642, + 7610, 6283, 3620, 9310, 5539, 7589, 7924, 9409, 40, + 5336, 3123, 7751, 1910, 907, 9203, 6319, 755, 6279, + 6919, 5494, 5179, 9028, 1415, 5153, 7002, 7586, 3433, + 5870, 6467, 9059, 2416, 4341, 4715, 6133, 8530, 2254, + 5462, 9186, 7294, 4272, 1845, 4289, 5086, 9957, 5707, + 2498, 2610, 6779, 4816, 5544, 3242, 9052, 5820, 5426, + 475, 6174, 8032, 6228, 3859, 7942, 6852, 6841, 762, + 9084, 1360, 9518, 1399, 9474, 9073, 2698, 3992, 1688, + 6424, 4716, 9100, 9699, 7494, 4411, 8629, 5459, 3464, + 1277, 3163, 4665, 7812, 7685, 5789, 2468, 8083, 8019, + 2880, 7221, 4770, 2039, 4111, 4375, 8217, 9082, 7877, + 2202, 6073, 6164, 7099, 1964, 662, 7796, 1896, 7509, + 6563, 1763, 3395, 2316, 1045, 7898, 8435, 5923, 1093, + 4006, 5474, 2690, 3754, 7989, 9753, 5385, 2389, 2008, + 5223, 2679, 6615, 8933, 1776, 4320, 6999, 5312, 2814, + 7847, 7325, 670, 7203, 6195, 963, 5468, 6227, 8998, + 6214, 993, 9855, 2891, 1973, 3928, 3222, 2306, 3674, + 8093, 6359, 3072, 5055, 3934, 4856, 6132, 9135, 2922, + 5957]), + values=tensor([0.4275, 0.3881, 0.3139, 0.1808, 0.5310, 0.9604, 0.6046, + 0.8590, 0.0287, 0.6756, 0.8185, 0.6927, 0.6675, 0.9855, + 0.7208, 0.8799, 0.8629, 0.7190, 0.7258, 0.9361, 0.0339, + 0.8077, 0.0414, 0.6703, 0.6580, 0.3675, 0.6750, 0.5718, + 0.4038, 0.0553, 0.0413, 0.2273, 0.5506, 0.5477, 0.5624, + 0.2283, 0.5380, 0.3589, 0.2593, 0.3555, 0.2992, 0.1850, + 0.8180, 0.5232, 0.0988, 0.3425, 0.4717, 0.9349, 0.9395, + 0.6753, 0.8041, 0.9749, 0.8897, 0.6447, 0.0817, 0.5610, + 0.1896, 0.0161, 0.7003, 0.7673, 0.8307, 0.7283, 0.3245, + 0.7473, 0.8098, 0.7438, 0.5987, 0.9483, 0.2473, 0.6059, + 0.8590, 0.0578, 0.9300, 0.1277, 0.1910, 0.5390, 0.9923, + 0.4717, 0.2172, 0.2304, 0.7135, 0.7750, 0.1479, 0.7140, + 0.7426, 0.6959, 0.2516, 0.7713, 0.8673, 0.3328, 0.6887, + 0.0208, 0.8741, 0.0944, 0.9474, 0.4933, 0.1551, 0.9330, + 0.9766, 0.0714, 0.8864, 0.8490, 0.5548, 0.2259, 0.0151, + 0.2924, 0.3046, 0.3078, 0.5126, 0.0910, 0.6552, 0.9529, + 0.7444, 0.5430, 0.3329, 0.7363, 0.3272, 0.2064, 0.4308, + 0.1399, 0.3205, 0.6451, 0.2111, 0.5851, 0.8748, 0.5024, + 0.1107, 0.3502, 0.5846, 0.1250, 0.2751, 0.3449, 0.5057, + 0.2501, 0.6308, 0.3508, 0.6882, 0.9384, 0.9511, 0.0630, + 0.4802, 0.4548, 0.7968, 0.6334, 0.3897, 0.8817, 0.1901, + 0.5186, 0.8390, 0.1266, 0.4062, 0.8053, 0.1020, 0.6893, + 0.0686, 0.2855, 0.7574, 0.2385, 0.1315, 0.1501, 0.7480, + 0.6280, 0.4408, 0.8455, 0.1285, 0.1782, 0.6626, 0.6066, + 0.6225, 0.0217, 0.6914, 0.5402, 0.9870, 0.3813, 0.7033, + 0.7712, 0.4849, 0.9511, 0.0434, 0.4425, 0.9459, 0.2266, + 0.2372, 0.1136, 0.1849, 0.2397, 0.0967, 0.7983, 0.6073, + 0.4963, 0.1922, 0.1740, 0.8024, 0.3263, 0.6368, 0.5308, + 0.5142, 0.6581, 0.3418, 0.3095, 0.4538, 0.9619, 0.2867, + 0.0950, 0.5813, 0.8368, 0.3176, 0.9734, 0.7668, 0.2728, + 0.2680, 0.8861, 0.2565, 0.4795, 0.6296, 0.9507, 0.8958, + 0.6429, 0.7807, 0.0849, 0.2596, 0.2876, 0.2984, 0.4516, + 0.1776, 0.5252, 0.4818, 0.5382, 0.0378, 0.7754, 0.1815, + 0.5299, 0.4358, 0.7701, 0.9552, 0.7604, 0.5343, 0.8461, + 0.3733, 0.4748, 0.5019, 0.5319, 0.1950, 0.4915, 0.8982, + 0.8792, 0.4578, 0.6727, 0.5864, 0.0044, 0.3636, 0.0865, + 0.2878, 0.8889, 0.8029, 0.0564, 0.2779, 0.2583, 0.5323, + 0.1895, 0.5860, 0.2646, 0.8879, 0.0599, 0.9224, 0.7196, + 0.2286, 0.8550, 0.3374, 0.6693, 0.7673, 0.1370, 0.6482, + 0.0168, 0.4774, 0.5008, 0.7572, 0.1368, 0.1470, 0.6304, + 0.9706, 0.7405, 0.7412, 0.6298, 0.9122, 0.5157, 0.2650, + 0.2818, 0.6111, 0.7527, 0.0490, 0.1004, 0.1241, 0.4454, + 0.2894, 0.8228, 0.9204, 0.4938, 0.0564, 0.5710, 0.0400, + 0.2954, 0.8673, 0.1061, 0.7455, 0.9212, 0.6252, 0.7116, + 0.4632, 0.3277, 0.9426, 0.1467, 0.0608, 0.4242, 0.5067, + 0.2902, 0.2721, 0.6498, 0.6839, 0.5643, 0.4679, 0.4103, + 0.1304, 0.0703, 0.8222, 0.8881, 0.0527, 0.9839, 0.0318, + 0.0604, 0.9264, 0.4420, 0.3194, 0.8482, 0.7627, 0.6903, + 0.2123, 0.7648, 0.9926, 0.4948, 0.8455, 0.3602, 0.4169, + 0.8328, 0.4984, 0.5681, 0.5841, 0.1407, 0.5588, 0.2134, + 0.7507, 0.9645, 0.2482, 0.2182, 0.2073, 0.8998, 0.3807, + 0.6886, 0.2991, 0.4671, 0.6025, 0.2544, 0.5519, 0.8787, + 0.3819, 0.8798, 0.3104, 0.1937, 0.3473, 0.6897, 0.2116, + 0.3715, 0.4188, 0.9594, 0.7854, 0.5738, 0.0788, 0.2956, + 0.4393, 0.0205, 0.4497, 0.5799, 0.6998, 0.9907, 0.6955, + 0.3688, 0.5214, 0.6675, 0.4868, 0.9051, 0.2066, 0.5278, + 0.8434, 0.2920, 0.5001, 0.1687, 0.6222, 0.3592, 0.3402, + 0.8386, 0.9190, 0.2841, 0.6388, 0.8916, 0.7093, 0.0676, + 0.8268, 0.3857, 0.7248, 0.8444, 0.9262, 0.3228, 0.1858, + 0.0541, 0.0147, 0.3726, 0.7119, 0.3771, 0.7018, 0.0498, + 0.9764, 0.7213, 0.6857, 0.9076, 0.6930, 0.9925, 0.6796, + 0.9099, 0.8957, 0.7847, 0.7694, 0.5255, 0.5450, 0.8537, + 0.1109, 0.6083, 0.2595, 0.0732, 0.5598, 0.3732, 0.8030, + 0.2943, 0.5044, 0.8687, 0.1569, 0.7485, 0.8397, 0.0563, + 0.7580, 0.1302, 0.3220, 0.5738, 0.3028, 0.8634, 0.5299, + 0.5384, 0.7185, 0.0740, 0.0896, 0.9393, 0.1708, 0.1684, + 0.2457, 0.7029, 0.1009, 0.4980, 0.2762, 0.9245, 0.0022, + 0.2297, 0.4398, 0.5470, 0.7673, 0.7132, 0.8762, 0.6237, + 0.6368, 0.1300, 0.5274, 0.0913, 0.3842, 0.9630, 0.8184, + 0.7355, 0.5099, 0.4932, 0.6527, 0.8872, 0.4640, 0.0588, + 0.0306, 0.0236, 0.7435, 0.9265, 0.3932, 0.2464, 0.9514, + 0.8510, 0.0383, 0.3895, 0.0953, 0.4855, 0.1771, 0.7719, + 0.8612, 0.3632, 0.8694, 0.4383, 0.3873, 0.3076, 0.9881, + 0.4672, 0.6409, 0.8009, 0.0756, 0.2586, 0.8768, 0.1936, + 0.7249, 0.5557, 0.8572, 0.7169, 0.0050, 0.3295, 0.4291, + 0.4545, 0.4668, 0.5083, 0.2008, 0.3836, 0.4073, 0.4304, + 0.5879, 0.4421, 0.6393, 0.9889, 0.4650, 0.1876, 0.0798, + 0.1449, 0.8117, 0.9255, 0.7918, 0.1406, 0.4863, 0.3651, + 0.6067, 0.0364, 0.4333, 0.7822, 0.1643, 0.8343, 0.6051, + 0.6308, 0.5806, 0.0642, 0.7919, 0.6050, 0.3368, 0.7779, + 0.6713, 0.9857, 0.2853, 0.6567, 0.0967, 0.1455, 0.3512, + 0.6778, 0.9504, 0.2678, 0.9544, 0.9340, 0.7942, 0.7527, + 0.8358, 0.7805, 0.5659, 0.6318, 0.6349, 0.7939, 0.9928, + 0.5106, 0.8033, 0.2636, 0.1925, 0.5894, 0.1231, 0.2221, + 0.7313, 0.2019, 0.1380, 0.7432, 0.1575, 0.1695, 0.9626, + 0.8004, 0.5557, 0.4175, 0.9737, 0.1028, 0.5973, 0.2245, + 0.1568, 0.0148, 0.5338, 0.5531, 0.7419, 0.6821, 0.6004, + 0.5075, 0.2898, 0.0572, 0.6541, 0.9867, 0.3169, 0.1091, + 0.3007, 0.8230, 0.1327, 0.9676, 0.8384, 0.4791, 0.8642, + 0.9498, 0.1113, 0.9321, 0.2886, 0.8491, 0.4298, 0.3001, + 0.9524, 0.4168, 0.8559, 0.9356, 0.3367, 0.3858, 0.3719, + 0.5105, 0.6573, 0.6925, 0.8585, 0.6425, 0.0422, 0.4540, + 0.3811, 0.7744, 0.8488, 0.8536, 0.4800, 0.9205, 0.8820, + 0.7503, 0.7388, 0.7461, 0.1778, 0.5414, 0.8666, 0.9751, + 0.7892, 0.7263, 0.7718, 0.9065, 0.4577, 0.3046, 0.6991, + 0.4446, 0.7928, 0.1473, 0.3983, 0.1582, 0.8541, 0.4231, + 0.5913, 0.3400, 0.8006, 0.4155, 0.5300, 0.2295, 0.5802, + 0.4966, 0.0158, 0.5097, 0.7507, 0.4467, 0.6603, 0.6281, + 0.1152, 0.6784, 0.7820, 0.0012, 0.3761, 0.8898, 0.3791, + 0.9433, 0.1967, 0.8005, 0.7968, 0.3696, 0.5243, 0.0864, + 0.5603, 0.6699, 0.3078, 0.6225, 0.0752, 0.2391, 0.9255, + 0.2650, 0.4673, 0.0102, 0.8488, 0.0879, 0.5946, 0.5603, + 0.8401, 0.0314, 0.3946, 0.8748, 0.3681, 0.9390, 0.1692, + 0.0070, 0.7205, 0.1654, 0.3264, 0.1626, 0.1313, 0.4264, + 0.8391, 0.2851, 0.5610, 0.8789, 0.5540, 0.4573, 0.8763, + 0.8174, 0.9396, 0.8320, 0.3091, 0.2567, 0.5267, 0.9107, + 0.4557, 0.5158, 0.6588, 0.7481, 0.3435, 0.5032, 0.1494, + 0.2718, 0.9923, 0.0652, 0.5633, 0.2524, 0.1153, 0.8668, + 0.0841, 0.4940, 0.8535, 0.3145, 0.1873, 0.9043, 0.8521, + 0.2257, 0.4809, 0.4740, 0.4980, 0.5276, 0.9017, 0.9235, + 0.9432, 0.5306, 0.4201, 0.2755, 0.9923, 0.8605, 0.1183, + 0.9099, 0.3224, 0.9282, 0.7819, 0.8800, 0.4258, 0.6844, + 0.1727, 0.5079, 0.3511, 0.1414, 0.6247, 0.1502, 0.3955, + 0.8356, 0.2976, 0.3207, 0.8266, 0.7924, 0.8054, 0.2943, + 0.7693, 0.0348, 0.4802, 0.5128, 0.1271, 0.4537, 0.3203, + 0.9071, 0.7626, 0.0279, 0.0653, 0.7417, 0.8793, 0.3925, + 0.3770, 0.0111, 0.1162, 0.8992, 0.1358, 0.0867, 0.3383, + 0.0706, 0.8072, 0.4835, 0.2675, 0.0759, 0.3658, 0.8759, + 0.2284, 0.3000, 0.0818, 0.5543, 0.6464, 0.9176, 0.5691, + 0.4971, 0.2282, 0.3215, 0.4138, 0.8085, 0.5363, 0.5913, + 0.1304, 0.2349, 0.6791, 0.2305, 0.3800, 0.4937, 0.6027, + 0.6534, 0.4279, 0.4943, 0.0085, 0.1579, 0.4565, 0.1248, + 0.0451, 0.7534, 0.2521, 0.6142, 0.7396, 0.5206, 0.5054, + 0.5613, 0.0793, 0.7518, 0.6824, 0.6628, 0.6832, 0.8860, + 0.4838, 0.1172, 0.2289, 0.9153, 0.1052, 0.5633, 0.2296, + 0.0191, 0.7705, 0.1415, 0.7241, 0.1302, 0.8941, 0.8284, + 0.8306, 0.6027, 0.1602, 0.9703, 0.1293, 0.7369, 0.5368, + 0.4238, 0.6523, 0.3576, 0.1197, 0.1318, 0.1788, 0.5367, + 0.3701, 0.7941, 0.1745, 0.1459, 0.5116, 0.3561, 0.4053, + 0.8269, 0.1280, 0.7759, 0.6886, 0.1888, 0.5706, 0.1161, + 0.8356, 0.4561, 0.7736, 0.4631, 0.3148, 0.8575, 0.0637, + 0.0338, 0.5995, 0.4671, 0.3925, 0.0599, 0.4135, 0.6334, + 0.0196, 0.1795, 0.1793, 0.0102, 0.8458, 0.5380, 0.4059, + 0.0568, 0.6315, 0.2073, 0.7972, 0.9797, 0.0690, 0.3810, + 0.7467, 0.1427, 0.9719, 0.3101, 0.6784, 0.3571, 0.7329, + 0.6992, 0.2961, 0.7605, 0.3985, 0.1295, 0.8951, 0.5622, + 0.2362, 0.0149, 0.3226, 0.8306, 0.2548, 0.2532, 0.8888, + 0.1434, 0.1482, 0.2179, 0.8422, 0.6150, 0.7603, 0.2665, + 0.8673, 0.3035, 0.1706, 0.2689, 0.9151, 0.1045, 0.0141, + 0.4861, 0.1566, 0.2750, 0.3958, 0.0336, 0.7854, 0.0838, + 0.7202, 0.5357, 0.1356, 0.8048, 0.1340, 0.0716, 0.5308, + 0.4959, 0.3349, 0.8036, 0.6913, 0.6107, 0.4431, 0.5386, + 0.3116, 0.0546, 0.1399, 0.8659, 0.1354, 0.9518]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4774, 0.5622, 0.8471, ..., 0.2935, 0.6471, 0.0068]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.015372514724731445 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68303 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.871366500854492} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([6744, 9183, 5203, 6638, 1533, 7743, 539, 8215, 8490, + 4534, 6346, 2886, 815, 690, 4659, 7859, 4960, 9599, + 7211, 9102, 9352, 9158, 5228, 349, 9466, 1784, 6758, + 6019, 4222, 3313, 4202, 6284, 5941, 3644, 8527, 621, + 8978, 2864, 4741, 937, 3040, 5951, 4377, 2752, 2224, + 833, 9594, 8371, 4644, 3164, 5751, 2168, 7735, 2026, + 7627, 2921, 3825, 1318, 5894, 9816, 8373, 6219, 7761, + 770, 6016, 7731, 2607, 3685, 9115, 9936, 4556, 2302, + 1032, 5304, 9652, 9315, 2299, 8095, 2227, 9852, 7527, + 7548, 5459, 1914, 4627, 9758, 4418, 5645, 5335, 1474, + 5, 4325, 1166, 5758, 8037, 4831, 7864, 4621, 1408, + 7991, 7361, 3430, 5370, 9921, 6713, 3837, 9935, 1916, + 9036, 5612, 1786, 9554, 5873, 9290, 5803, 8105, 7749, + 2495, 2472, 8808, 5958, 6237, 7275, 4348, 4709, 7618, + 2723, 835, 9714, 1090, 6330, 6812, 6188, 232, 626, + 445, 8400, 57, 7784, 9047, 5661, 8636, 2320, 4471, + 9589, 9560, 5053, 2465, 7391, 8282, 7210, 2128, 3549, + 2704, 7346, 5039, 6538, 8120, 953, 8729, 5862, 8226, + 4137, 112, 6745, 8084, 376, 7059, 6560, 8487, 4693, + 10, 5925, 5522, 7046, 5332, 5096, 8815, 7290, 8154, + 1699, 3419, 3972, 7305, 963, 699, 8976, 8405, 1750, + 8383, 6396, 7366, 2223, 7026, 1722, 7778, 173, 7465, + 1026, 2239, 9167, 4227, 3336, 8659, 6898, 264, 1819, + 4526, 9903, 3299, 8471, 2338, 5008, 6034, 5255, 1016, + 2476, 3281, 9291, 6915, 6157, 3429, 6616, 5294, 1960, + 3849, 6756, 6524, 9676, 1278, 2144, 8034, 9702, 8689, + 7428, 2101, 1606, 8926, 2565, 4025, 4286, 9944, 6729, + 1860, 6272, 6011, 1865, 1480, 6982, 593, 5077, 8793, + 8373, 2645, 7072, 2995, 422, 4753, 1315, 8695, 9479, + 4749, 7734, 6063, 7096, 9419, 8427, 6287, 6628, 3423, + 6286, 7303, 6136, 8068, 796, 3097, 1701, 3836, 8310, + 9629, 9555, 6582, 1297, 7896, 2598, 2280, 4630, 1294, + 7175, 3070, 3321, 7083, 6930, 8188, 3776, 9884, 1450, + 5587, 7988, 8659, 5326, 9433, 9194, 1815, 1042, 9718, + 3878, 4772, 7012, 9395, 8269, 9270, 1629, 2660, 209, + 5712, 932, 1485, 5192, 4238, 6353, 6193, 5165, 3026, + 3281, 2398, 4990, 8451, 5995, 2437, 7572, 9348, 2438, + 9680, 6106, 4164, 7478, 3249, 3727, 1957, 9143, 6126, + 1831, 4451, 8075, 4026, 2735, 8626, 4378, 205, 5899, + 1940, 1951, 2036, 8343, 8308, 8732, 8451, 6204, 7090, + 1801, 4175, 4396, 4659, 2284, 3194, 7493, 7149, 2323, + 5044, 3306, 5547, 5456, 1851, 4655, 6488, 9141, 7070, + 4125, 1093, 5396, 7738, 2079, 8139, 738, 1474, 9389, + 8473, 6944, 9388, 9801, 3310, 4840, 8737, 2836, 7124, + 5736, 8458, 7851, 2445, 4348, 2958, 1500, 7079, 3361, + 2917, 5609, 9694, 7338, 2003, 6819, 9301, 8759, 3668, + 4230, 9606, 5045, 1392, 3446, 5031, 2339, 7379, 9413, + 7019, 1575, 9646, 9892, 4175, 7521, 9210, 8996, 9856, + 2105, 6891, 2516, 7644, 6342, 5591, 4607, 8370, 2267, + 2178, 98, 4758, 5716, 8549, 2672, 3367, 9687, 6760, + 9110, 1378, 9958, 2062, 8086, 5488, 6201, 5121, 5155, + 4299, 3592, 4569, 7674, 9743, 9715, 5859, 5982, 9157, + 5766, 746, 379, 1868, 3707, 4085, 3960, 6014, 1918, + 7302, 5034, 8923, 4358, 3196, 5464, 5160, 7466, 5520, + 4012, 4775, 1536, 4418, 7081, 2030, 8186, 3043, 4489, + 6982, 4291, 14, 4887, 277, 3371, 7617, 8246, 1976, + 1779, 4401, 3966, 5238, 2266, 3849, 2641, 4879, 9073, + 3290, 4069, 7474, 232, 4401, 1411, 2648, 6385, 9602, + 1030, 9986, 4068, 265, 1296, 9722, 4721, 3794, 7224, + 9214, 2941, 4786, 6676, 2620, 6240, 7532, 6528, 3667, + 6404, 7204, 9481, 2749, 555, 4908, 3755, 47, 7892, + 8014, 5573, 4646, 8048, 9408, 4239, 7085, 7339, 2213, + 9590, 4261, 4369, 1661, 5901, 621, 7189, 453, 6633, + 1412, 1040, 3670, 8460, 7301, 8172, 5431, 8655, 8445, + 5264, 1982, 6719, 6488, 9538, 9776, 4381, 2280, 1223, + 2336, 4148, 8508, 585, 9480, 5943, 6989, 3101, 8689, + 8256, 6028, 1081, 8748, 6924, 5582, 9219, 1164, 9719, + 6904, 9219, 3348, 1248, 2380, 4837, 6421, 8759, 8176, + 2413, 9241, 6280, 795, 6836, 6623, 3295, 5487, 113, + 562, 6970, 5028, 2406, 8295, 8368, 3385, 4212, 9418, + 4453, 6440, 4796, 4189, 429, 199, 5828, 1302, 4275, + 3705, 6507, 9466, 8720, 636, 9800, 6974, 9561, 3002, + 6294, 6645, 1538, 238, 807, 3626, 6120, 5716, 8600, + 4318, 5792, 1943, 5603, 2143, 3104, 3061, 862, 2604, + 9101, 6695, 4250, 6121, 3993, 5656, 3094, 4118, 5827, + 4887, 1061, 5947, 6676, 6374, 4231, 1551, 240, 1173, + 8389, 1604, 5288, 4594, 2461, 5134, 2250, 9472, 1501, + 6338, 6393, 5061, 6467, 9017, 111, 4430, 4906, 4617, + 7659, 3038, 9083, 8874, 4785, 6903, 4936, 1238, 8369, + 183, 8286, 1773, 4890, 3477, 1143, 6106, 7346, 5613, + 5765, 1348, 576, 3940, 2554, 5492, 9590, 603, 7762, + 563, 6706, 3669, 5773, 6097, 3721, 8648, 2328, 9556, + 4798, 605, 5091, 7028, 8465, 606, 8771, 9883, 2697, + 1661, 8326, 5685, 5164, 2959, 1796, 6376, 4139, 9534, + 1259, 5224, 6687, 803, 432, 6625, 1457, 844, 9575, + 7195, 2325, 5841, 4426, 9449, 829, 1474, 6946, 23, + 4915, 4070, 2967, 6725, 2677, 2638, 219, 4785, 1030, + 4914, 3439, 8781, 4947, 6034, 2961, 6021, 103, 3443, + 9818, 844, 8744, 4287, 6940, 9206, 8132, 9639, 4328, + 6379, 4109, 7287, 5872, 2968, 8194, 8179, 2332, 2296, + 5350, 3926, 8242, 8552, 4727, 2202, 387, 3297, 1857, + 3876, 1691, 9764, 1422, 229, 1091, 1690, 730, 2018, + 7618, 1303, 3323, 7404, 3433, 488, 4748, 2180, 1314, + 6350, 9436, 3086, 9808, 6299, 1930, 3547, 4010, 3605, + 6165, 956, 5351, 421, 9662, 1173, 8605, 7930, 2736, + 6953, 2718, 9042, 3741, 7757, 1552, 9205, 1650, 599, + 5595, 4263, 1103, 7418, 9241, 8601, 1744, 7642, 515, + 9006, 3083, 8693, 9032, 5906, 7000, 2551, 3028, 3835, + 5449, 4937, 2426, 8599, 3062, 2693, 9800, 9210, 5275, + 3360, 3233, 6476, 4125, 6253, 8942, 1537, 467, 9364, + 2729, 7910, 1994, 1148, 3863, 8051, 6394, 8157, 1640, + 8390, 8149, 5412, 2816, 3795, 8617, 9981, 2763, 1764, + 676, 9771, 7442, 4680, 2279, 1341, 3408, 6649, 5478, + 8223, 5047, 7129, 5067, 1428, 6922, 7376, 493, 3030, + 127, 9415, 1410, 4169, 9819, 3335, 4738, 3017, 486, + 451, 4170, 208, 4490, 1163, 5281, 8361, 7624, 6925, + 1101, 1934, 2095, 7683, 109, 688, 2415, 8688, 4906, + 47, 1639, 3155, 9545, 9361, 5321, 5522, 1318, 4944, + 3639, 4692, 6598, 5731, 4104, 2490, 6697, 6146, 6141, + 8631, 4236, 8812, 5534, 9172, 2357, 694, 5351, 3729, + 8488, 2621, 5018, 1297, 5214, 2847, 2659, 9771, 3304, + 3414]), + values=tensor([8.7997e-01, 2.8521e-01, 4.5005e-01, 5.0855e-01, + 3.8634e-03, 5.7734e-01, 3.8405e-01, 9.4486e-01, + 3.5202e-01, 7.6382e-01, 4.7786e-01, 6.9793e-01, + 5.0912e-01, 6.3767e-03, 6.9119e-01, 4.0181e-01, + 7.6100e-01, 8.5859e-01, 3.6709e-01, 7.7781e-01, + 4.3558e-01, 2.5672e-01, 7.1189e-01, 8.1200e-01, + 3.4793e-01, 3.2184e-01, 1.1386e-01, 6.1289e-01, + 8.2371e-01, 1.9096e-01, 1.8303e-01, 1.8841e-01, + 5.0087e-01, 2.5093e-02, 6.6238e-03, 6.4069e-01, + 1.1422e-01, 6.7509e-01, 2.8931e-01, 9.6964e-01, + 2.4089e-01, 4.4554e-01, 3.2641e-01, 4.3071e-01, + 6.5669e-02, 3.6619e-01, 6.9264e-01, 7.7302e-01, + 6.5551e-01, 4.8090e-02, 3.4286e-01, 6.4302e-02, + 9.6403e-01, 7.7081e-01, 6.0460e-01, 4.8930e-01, + 7.6331e-01, 6.1378e-01, 7.4581e-01, 6.4943e-01, + 6.6800e-01, 9.2555e-01, 9.3781e-01, 7.1424e-01, + 3.9480e-01, 7.6845e-01, 9.4928e-01, 1.6757e-02, + 7.3400e-01, 5.1327e-01, 7.3558e-01, 1.5695e-01, + 6.7293e-01, 9.5435e-01, 9.1287e-01, 2.5381e-02, + 8.4456e-01, 7.6779e-01, 1.3407e-01, 4.9738e-01, + 3.9647e-01, 5.3527e-01, 7.3173e-01, 3.2717e-01, + 2.7642e-01, 1.7541e-01, 8.7538e-01, 5.3714e-01, + 4.0317e-01, 9.6258e-01, 4.9134e-01, 5.2103e-01, + 6.3584e-01, 7.8266e-01, 6.2083e-01, 7.1376e-01, + 5.7960e-01, 5.3449e-01, 7.5515e-01, 3.9838e-01, + 9.3057e-02, 7.9195e-01, 9.9938e-02, 3.2785e-01, + 4.1981e-01, 4.7350e-01, 2.9521e-01, 7.2738e-01, + 7.1675e-01, 9.1113e-01, 6.8622e-01, 3.1104e-01, + 4.7595e-01, 5.6348e-02, 6.2878e-01, 8.8733e-01, + 8.3583e-01, 3.5424e-01, 7.4340e-01, 6.9196e-01, + 8.6307e-01, 4.9523e-01, 1.9610e-01, 4.7483e-01, + 5.7196e-01, 5.1546e-01, 7.5100e-01, 5.9452e-01, + 2.3036e-01, 2.6958e-01, 5.3660e-01, 1.1357e-01, + 1.4815e-01, 4.3323e-01, 7.8068e-01, 7.5669e-01, + 5.1025e-01, 8.9680e-01, 2.9991e-01, 9.5303e-01, + 6.5093e-01, 1.8516e-01, 6.6969e-02, 1.1082e-01, + 8.5796e-02, 1.8005e-01, 8.5690e-02, 4.0885e-01, + 8.5941e-01, 5.4485e-02, 8.5646e-01, 9.8199e-01, + 5.0636e-01, 4.3617e-01, 5.9529e-01, 5.7098e-01, + 7.3113e-01, 9.0655e-01, 4.7307e-01, 6.9439e-01, + 9.5960e-01, 2.8637e-01, 4.1165e-01, 3.5498e-01, + 2.9302e-01, 7.1891e-01, 7.9570e-01, 5.6832e-01, + 7.0477e-01, 2.7348e-01, 9.5003e-01, 2.2469e-01, + 2.5175e-01, 4.8973e-01, 5.9989e-01, 5.8269e-01, + 7.9838e-01, 5.5642e-01, 6.1809e-01, 6.7618e-01, + 8.2227e-01, 5.4224e-01, 1.0585e-01, 3.0707e-01, + 8.6384e-01, 8.3680e-01, 8.1983e-01, 5.4882e-01, + 6.7569e-01, 4.0288e-01, 9.5827e-01, 6.4296e-01, + 4.5776e-01, 4.7685e-01, 4.7969e-01, 9.1368e-01, + 2.5314e-01, 8.9231e-01, 6.5064e-01, 1.3463e-01, + 7.7634e-02, 7.6612e-01, 7.6132e-01, 4.5487e-01, + 8.1562e-01, 3.9770e-01, 3.7874e-01, 9.8044e-01, + 2.9015e-01, 2.3310e-01, 8.9180e-02, 3.3477e-01, + 7.4886e-01, 7.7876e-01, 5.7465e-01, 6.7747e-01, + 6.3826e-01, 6.8923e-01, 2.2027e-01, 9.2721e-01, + 6.5802e-01, 8.6034e-01, 6.7590e-01, 3.0092e-01, + 5.0367e-01, 5.2007e-01, 5.4932e-01, 9.2163e-01, + 1.5539e-01, 8.3302e-01, 6.3467e-01, 6.7336e-01, + 9.6415e-01, 7.3161e-01, 5.1727e-01, 9.6905e-01, + 8.0804e-01, 8.6858e-02, 5.0096e-01, 9.7323e-01, + 5.0529e-01, 6.9268e-01, 8.5286e-01, 6.1778e-01, + 9.8231e-01, 3.3323e-01, 8.6429e-01, 7.8369e-02, + 4.1466e-01, 9.1520e-01, 5.0481e-01, 6.4411e-01, + 2.9477e-01, 8.0817e-01, 9.8288e-01, 3.3812e-01, + 2.4965e-01, 9.9967e-01, 9.0376e-01, 5.8973e-01, + 7.8162e-01, 6.5235e-01, 1.7058e-01, 4.2443e-01, + 6.6975e-01, 9.8723e-01, 6.7750e-01, 1.4445e-02, + 4.8293e-01, 3.9921e-01, 1.6541e-01, 3.0431e-01, + 7.0151e-01, 4.4106e-01, 3.5106e-02, 1.8337e-01, + 2.3382e-01, 2.3108e-01, 9.0945e-01, 9.0869e-01, + 2.5770e-01, 1.1305e-01, 3.0215e-01, 5.6599e-01, + 4.9016e-01, 1.0358e-01, 6.5084e-01, 2.1980e-01, + 2.7526e-01, 6.8575e-01, 3.8685e-01, 3.3360e-01, + 9.2841e-01, 2.3845e-01, 6.0353e-01, 4.7893e-01, + 2.1612e-01, 4.9916e-02, 6.1017e-01, 4.1103e-01, + 6.8532e-01, 8.6455e-02, 6.1925e-01, 1.9289e-01, + 2.1648e-01, 4.0255e-02, 7.4462e-01, 8.3120e-01, + 5.4511e-01, 9.3700e-02, 3.9437e-01, 7.9637e-01, + 5.2834e-01, 6.5519e-01, 9.8216e-01, 7.8679e-01, + 5.1865e-01, 2.6188e-01, 3.8483e-01, 1.0953e-01, + 6.0364e-01, 6.2100e-01, 2.7577e-01, 3.8468e-01, + 5.9031e-01, 1.4769e-01, 3.2718e-01, 7.6561e-01, + 7.0164e-01, 4.9655e-01, 8.9997e-01, 8.1649e-02, + 8.3568e-01, 3.7404e-01, 5.7489e-01, 4.8879e-01, + 3.3945e-01, 6.4600e-01, 3.3785e-02, 9.3062e-01, + 8.0442e-01, 4.2287e-01, 8.8652e-01, 2.5031e-01, + 5.4172e-02, 8.0831e-01, 9.0044e-01, 2.0783e-01, + 2.6264e-02, 6.3673e-01, 3.1357e-01, 1.3635e-02, + 9.4946e-01, 8.7749e-01, 6.3987e-01, 3.0051e-01, + 3.8978e-01, 3.9192e-01, 7.7167e-01, 2.5028e-01, + 6.3457e-01, 2.2326e-01, 2.1021e-01, 4.9092e-01, + 5.6111e-01, 6.4954e-01, 8.0426e-01, 4.5267e-02, + 7.2168e-01, 5.1283e-04, 9.1136e-01, 1.2495e-01, + 4.4895e-01, 9.3184e-01, 2.2892e-01, 5.9504e-01, + 5.6472e-02, 2.9795e-01, 4.0327e-01, 5.5233e-02, + 7.8699e-01, 9.1170e-02, 4.2592e-01, 2.8865e-01, + 9.5409e-01, 7.2826e-01, 6.7525e-01, 7.3618e-01, + 3.4218e-01, 6.9883e-01, 7.1871e-01, 2.0906e-01, + 9.9500e-01, 9.5206e-01, 1.0883e-01, 7.8057e-01, + 4.1855e-01, 1.7458e-01, 9.7395e-01, 3.9682e-01, + 7.6878e-01, 7.3827e-01, 2.9447e-01, 4.1030e-01, + 7.1074e-01, 4.2242e-01, 6.9407e-02, 1.0676e-01, + 3.7330e-01, 7.8475e-02, 2.3566e-01, 3.3687e-01, + 8.9509e-01, 5.6818e-01, 8.2692e-02, 2.1473e-01, + 2.7932e-01, 3.2494e-01, 9.5931e-01, 5.0787e-01, + 5.3320e-01, 8.9201e-01, 9.8617e-01, 3.2344e-01, + 7.5618e-01, 5.6291e-01, 1.7829e-01, 7.5177e-01, + 1.7789e-01, 5.7880e-01, 9.0026e-01, 6.1347e-03, + 1.1215e-01, 8.8491e-01, 7.7699e-02, 6.0626e-01, + 9.8867e-01, 4.0378e-01, 8.6001e-01, 4.4491e-01, + 3.1045e-02, 1.0014e-01, 8.3254e-01, 7.2285e-01, + 8.8978e-01, 3.3346e-01, 5.5897e-01, 7.5235e-01, + 3.0218e-01, 9.7073e-01, 8.0571e-01, 6.3659e-01, + 7.4468e-01, 8.8894e-02, 3.8364e-01, 8.3517e-01, + 7.1046e-01, 8.2789e-02, 9.9635e-01, 1.9280e-01, + 3.2091e-01, 6.0294e-01, 9.4375e-01, 7.4749e-01, + 6.7696e-01, 4.1623e-01, 7.4117e-02, 5.0351e-01, + 7.9718e-01, 7.1889e-01, 5.1471e-01, 7.8589e-01, + 9.1567e-01, 5.0537e-01, 1.4661e-01, 2.0153e-01, + 1.9491e-01, 6.4695e-01, 9.8462e-01, 6.0265e-01, + 2.7637e-01, 3.9293e-01, 1.7625e-01, 8.0870e-01, + 2.1159e-01, 5.9264e-01, 5.7012e-01, 5.9849e-01, + 5.3945e-01, 9.0767e-01, 5.0641e-01, 8.2994e-02, + 4.1407e-01, 8.5969e-01, 7.5631e-01, 9.7899e-01, + 3.0245e-01, 9.5165e-01, 5.6284e-01, 5.8693e-01, + 1.3760e-01, 8.9978e-01, 8.0792e-01, 6.6211e-01, + 5.6306e-01, 2.5657e-01, 7.2956e-01, 3.7115e-01, + 5.1520e-01, 8.2467e-01, 7.5545e-01, 9.4052e-01, + 9.7952e-01, 6.6546e-01, 7.8683e-01, 7.5895e-02, + 8.2766e-01, 9.3261e-01, 8.4123e-01, 7.0903e-01, + 8.6546e-01, 9.9407e-01, 5.9707e-01, 6.4446e-01, + 3.2013e-01, 4.5177e-01, 7.2474e-01, 9.5717e-01, + 9.2961e-01, 1.8631e-02, 1.2011e-01, 8.7293e-01, + 5.0934e-01, 1.2118e-01, 4.5119e-01, 3.9655e-01, + 9.6740e-01, 7.3842e-01, 5.3682e-01, 1.5135e-01, + 6.6126e-01, 6.5927e-01, 1.1407e-01, 4.1038e-01, + 1.6834e-01, 8.2535e-01, 7.0347e-02, 2.7263e-01, + 4.2171e-01, 2.8301e-01, 8.8235e-02, 2.2241e-01, + 4.6000e-01, 6.6548e-01, 1.8844e-01, 6.6034e-01, + 2.1227e-02, 3.8783e-02, 3.3827e-01, 4.7088e-01, + 9.7627e-01, 6.2968e-01, 6.3522e-01, 2.4334e-01, + 1.9212e-01, 1.3712e-01, 1.2686e-01, 3.0093e-01, + 9.4777e-01, 2.4785e-01, 9.8636e-01, 9.3583e-01, + 3.6026e-01, 2.2137e-01, 6.1133e-01, 9.1866e-02, + 3.6103e-01, 1.7489e-01, 4.5825e-01, 7.3822e-01, + 1.1665e-01, 3.6889e-01, 7.8219e-01, 1.6535e-01, + 7.7655e-01, 4.3396e-02, 4.5151e-01, 9.3061e-01, + 3.4963e-01, 6.3379e-01, 3.0356e-01, 7.9136e-01, + 2.1718e-01, 4.5984e-01, 8.5264e-01, 2.2071e-01, + 5.8333e-01, 3.7356e-01, 2.4578e-02, 6.6263e-01, + 7.4748e-01, 2.1894e-01, 2.5251e-01, 4.2144e-01, + 8.0678e-01, 1.2904e-01, 8.8791e-01, 2.4954e-01, + 5.5182e-01, 8.3078e-01, 8.0810e-01, 9.4817e-01, + 8.9573e-01, 5.1727e-01, 3.6179e-01, 3.5744e-01, + 8.1063e-01, 6.3401e-01, 8.4475e-01, 4.6719e-01, + 4.7935e-01, 8.8285e-01, 3.5260e-01, 8.1538e-01, + 2.9803e-01, 7.1210e-01, 9.6602e-01, 4.3768e-01, + 6.0388e-01, 7.5485e-01, 1.6097e-01, 7.3941e-01, + 1.2583e-01, 3.7349e-01, 2.7473e-02, 5.5611e-01, + 9.1308e-01, 4.4818e-01, 4.5070e-01, 3.2069e-01, + 1.4256e-01, 4.3464e-01, 2.5253e-01, 3.2121e-01, + 3.3224e-01, 3.9011e-01, 7.1556e-01, 5.7877e-01, + 8.0467e-01, 6.7205e-01, 1.0771e-01, 2.8920e-01, + 5.6940e-02, 4.6611e-02, 8.5119e-01, 7.0781e-01, + 1.4819e-01, 2.7250e-01, 8.7019e-01, 4.3230e-01, + 2.6507e-01, 9.2497e-01, 2.6065e-01, 6.7056e-01, + 4.7623e-01, 7.7408e-02, 7.8815e-01, 7.9083e-01, + 4.0135e-01, 4.4360e-01, 5.6498e-01, 2.1514e-01, + 4.4589e-01, 7.5491e-01, 9.7064e-01, 5.8939e-02, + 5.4940e-01, 7.9838e-01, 7.1091e-01, 2.8896e-01, + 8.1053e-01, 8.6197e-02, 9.4069e-01, 4.2678e-01, + 7.9980e-01, 3.9927e-01, 7.9838e-01, 7.2181e-01, + 6.1672e-01, 4.3699e-01, 1.9963e-01, 1.9977e-01, + 5.4157e-01, 6.4055e-01, 5.1059e-01, 6.7688e-01, + 4.1296e-01, 9.1355e-01, 9.9221e-01, 1.9937e-01, + 2.0294e-01, 4.8334e-01, 5.5805e-01, 5.7577e-01, + 9.8396e-01, 2.1795e-01, 1.8931e-01, 8.5201e-01, + 3.8953e-01, 6.9513e-02, 6.9908e-01, 4.2103e-01, + 5.1686e-01, 9.8052e-01, 4.8102e-01, 7.7828e-01, + 3.3843e-01, 9.9544e-01, 2.5451e-01, 9.0598e-01, + 7.5647e-02, 2.9191e-01, 6.4351e-01, 3.2196e-01, + 2.0623e-01, 8.6748e-01, 7.8486e-01, 6.7205e-01, + 3.1625e-01, 8.5128e-01, 8.8565e-01, 4.2137e-01, + 1.1091e-01, 4.9402e-01, 5.4089e-01, 6.8405e-01, + 2.9753e-01, 4.2060e-01, 9.1311e-01, 5.5459e-01, + 8.3026e-01, 4.6157e-01, 2.3608e-01, 5.8476e-01, + 2.5801e-01, 7.1950e-01, 9.1236e-01, 8.9678e-01, + 5.6071e-01, 4.7115e-01, 9.0639e-01, 1.3986e-01, + 5.1603e-01, 4.5611e-01, 5.2778e-01, 2.3069e-01, + 1.7250e-01, 6.1973e-01, 4.9448e-01, 7.8930e-01, + 6.9896e-01, 6.4817e-01, 4.7390e-01, 6.7748e-01, + 1.8072e-01, 9.9144e-01, 2.6587e-01, 6.0122e-01, + 2.1225e-01, 4.4200e-01, 4.8065e-01, 9.0465e-01, + 3.2260e-01, 2.5671e-01, 2.8894e-01, 2.4832e-01, + 7.5346e-01, 3.7883e-01, 2.2906e-01, 6.0232e-02, + 5.1161e-01, 7.4210e-02, 6.5179e-01, 5.1750e-01, + 8.8617e-01, 4.7427e-01, 6.8617e-01, 5.4228e-02, + 7.3517e-01, 7.3277e-01, 2.1593e-01, 8.7086e-01, + 2.5536e-01, 6.7634e-01, 4.0865e-01, 3.9570e-01, + 8.1394e-01, 6.7279e-01, 3.1018e-01, 7.9731e-01, + 3.2988e-01, 1.4131e-01, 2.7103e-02, 5.9738e-01, + 5.7453e-01, 9.5820e-01, 2.2775e-01, 5.9104e-01, + 5.1097e-01, 9.5412e-01, 9.8822e-01, 5.2242e-01, + 2.8580e-01, 6.6891e-01, 8.0483e-01, 9.6335e-01, + 8.8453e-01, 8.3167e-01, 7.5733e-01, 9.4445e-01, + 3.0266e-01, 9.4298e-01, 6.4349e-01, 3.7207e-01, + 2.8157e-01, 9.8279e-01, 5.2135e-01, 9.2412e-01, + 3.9414e-01, 1.9883e-01, 1.0428e-01, 4.4835e-01, + 5.4509e-01, 2.9717e-01, 8.8737e-01, 3.2817e-01, + 8.1706e-01, 5.4337e-01, 3.5309e-02, 6.1329e-01, + 2.4008e-01, 8.6714e-01, 8.2432e-01, 8.6613e-01, + 4.7304e-01, 8.0100e-01, 5.9219e-01, 9.1623e-01, + 8.9663e-01, 8.4183e-01, 6.1959e-01, 2.5572e-01, + 5.3324e-01, 8.7137e-01, 2.3142e-01, 1.2987e-01, + 8.7470e-01, 3.2674e-01, 7.0319e-01, 6.8691e-01, + 6.0352e-02, 1.0758e-01, 8.0610e-01, 1.4246e-01, + 8.5753e-01, 3.1758e-01, 7.2485e-02, 4.9372e-01, + 5.9390e-01, 8.6305e-01, 9.5054e-01, 6.1048e-01, + 2.5868e-01, 6.7061e-01, 5.4914e-02, 3.1942e-01, + 3.2211e-01, 5.9735e-02, 6.3294e-01, 5.3201e-01, + 3.2903e-01, 3.8267e-01, 4.1705e-01, 2.8449e-01, + 9.6245e-01, 9.8518e-01, 3.5270e-01, 3.0525e-01, + 5.7444e-01, 3.8535e-01, 7.2539e-01, 9.0836e-01, + 2.5651e-01, 1.1982e-01, 7.7055e-01, 4.9427e-01, + 7.7750e-01, 1.2286e-01, 7.6843e-01, 9.7353e-01, + 3.1458e-03, 4.9794e-01, 5.4164e-01, 2.7698e-01, + 9.4323e-01, 6.8588e-01, 4.4740e-01, 2.5060e-01, + 6.3933e-01, 1.5948e-01, 8.0108e-01, 1.6827e-01, + 7.7705e-01, 3.7266e-01, 1.1629e-01, 1.2457e-01, + 1.7987e-01, 1.7544e-01, 8.9379e-01, 9.9154e-01, + 1.9943e-01, 4.3856e-01, 8.6042e-01, 4.5407e-01, + 2.6806e-01, 5.9331e-01, 6.6726e-02, 8.4538e-01, + 4.3269e-01, 6.3831e-01, 8.4049e-01, 5.6436e-01, + 7.0962e-01, 2.9599e-01, 3.5067e-01, 4.4666e-01, + 1.2939e-01, 9.4365e-01, 7.8824e-02, 2.4235e-01, + 3.1459e-02, 4.0226e-01, 6.6546e-01, 1.9632e-01, + 1.0086e-01, 6.9880e-01, 4.8300e-01, 8.2713e-01, + 7.4273e-01, 3.5099e-02, 2.7427e-01, 6.4444e-01, + 3.3388e-01, 5.7429e-01, 6.7086e-01, 8.3286e-01, + 8.6153e-02, 6.5216e-02, 7.5920e-01, 4.8135e-01, + 3.9134e-01, 2.2790e-01, 4.1788e-01, 3.1685e-01, + 4.6375e-01, 7.1242e-01, 9.2845e-01, 4.7767e-01, + 8.8388e-01, 5.0231e-01, 8.8959e-01, 1.3500e-01, + 5.5147e-01, 3.4307e-01, 8.1439e-01, 5.6923e-01, + 1.5864e-01, 2.8270e-01, 7.7082e-01, 9.5930e-01, + 6.5183e-01, 2.8440e-01, 8.4987e-01, 4.0660e-01, + 4.5850e-01, 7.9000e-01, 7.4111e-01, 9.7289e-02, + 7.1325e-01, 4.8387e-01, 8.8020e-01, 9.2394e-01, + 2.2972e-01, 1.5188e-01, 7.6577e-01, 9.9898e-01, + 3.1108e-01, 6.0390e-01, 1.0443e-01, 2.6637e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5445, 0.4387, 0.2146, ..., 0.9330, 0.4366, 0.2965]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 4.871366500854492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 147223 -ss 10000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.998100280761719} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 606, 4248, 4323, 142, 3267, 1209, 7616, 5137, 8211, + 6424, 2299, 2295, 132, 1237, 207, 4471, 8426, 1510, + 3485, 6960, 5069, 6876, 5759, 6010, 6198, 5515, 1057, + 6920, 9155, 4894, 8077, 2579, 7160, 9343, 5510, 6273, + 3046, 6095, 9498, 4568, 7882, 4172, 6457, 7199, 2821, + 8109, 7947, 3059, 5273, 811, 3090, 6904, 7921, 941, + 171, 1700, 8078, 8915, 5888, 6297, 3017, 6368, 5232, + 3121, 6921, 1954, 1701, 989, 2512, 3553, 4586, 8676, + 4195, 7666, 522, 8800, 4183, 4215, 5436, 4713, 8751, + 9665, 2209, 2880, 8327, 8043, 3725, 5157, 5205, 5068, + 5472, 2221, 9235, 5844, 1615, 5209, 8676, 880, 3145, + 5350, 6766, 607, 1821, 2811, 5587, 4952, 36, 972, + 9657, 5891, 932, 6251, 2971, 8136, 8846, 5746, 6772, + 3335, 9210, 5699, 6015, 5058, 4114, 1121, 1345, 4570, + 3080, 1837, 5077, 2774, 60, 9761, 283, 1953, 8754, + 568, 8363, 7504, 4312, 2950, 8818, 3615, 7260, 5054, + 9479, 8823, 817, 4247, 8916, 458, 1663, 1516, 8921, + 8262, 3930, 5511, 4167, 1454, 227, 9082, 8167, 6759, + 4104, 2365, 4891, 7213, 8479, 3335, 3691, 5216, 8378, + 1063, 5818, 8471, 7448, 5050, 8971, 747, 666, 6626, + 6463, 5721, 7264, 5789, 105, 7831, 7486, 5643, 8037, + 3964, 1225, 7868, 6793, 785, 9337, 463, 785, 4486, + 40, 8639, 6486, 5799, 220, 7289, 3630, 4023, 9716, + 636, 3153, 9820, 4041, 7979, 6263, 5265, 4683, 9203, + 90, 7642, 420, 3880, 2822, 376, 3766, 4385, 838, + 7707, 6033, 8983, 6271, 3139, 9042, 3814, 2813, 2724, + 6271, 5387, 8121, 7195, 5117, 5794, 8947, 1085, 4429, + 160, 2993, 9952, 7440, 7699, 3817, 18, 5719, 6435, + 6095, 7069, 731, 9481, 9394, 2426, 4205, 2461, 3873, + 8586, 4374, 7356, 5004, 2798, 7617, 2197, 119, 2470, + 3245, 8733, 6824, 4746, 9120, 2061, 7077, 2855, 8255, + 3205, 7568, 3053, 9530, 1111, 6923, 2083, 2934, 8128, + 5682, 8762, 6169, 7878, 5288, 2262, 4469, 7426, 1628, + 8495, 4564, 2858, 9806, 1173, 5731, 2322, 8964, 1865, + 7016, 9059, 5039, 2002, 1262, 7718, 4777, 1962, 1879, + 7855, 7890, 8641, 1099, 829, 2515, 1746, 2320, 6687, + 4799, 4569, 6237, 125, 2360, 7731, 401, 9623, 7974, + 7957, 893, 162, 8581, 115, 4518, 1206, 9451, 4274, + 8143, 5673, 5773, 9287, 9034, 6759, 2616, 4836, 638, + 2832, 3042, 3961, 9127, 7640, 5801, 3757, 8870, 8049, + 7486, 8964, 1399, 684, 1029, 2622, 6759, 2002, 3761, + 9818, 252, 8051, 7124, 5540, 8162, 333, 6237, 8228, + 7539, 7326, 3539, 9730, 3200, 1285, 3058, 6960, 9438, + 3335, 118, 848, 7603, 2443, 3021, 5193, 1159, 4828, + 5979, 3020, 6180, 8297, 2434, 8432, 873, 6202, 7493, + 1203, 4541, 547, 3753, 2874, 591, 2074, 4910, 3558, + 8, 8298, 2922, 6062, 8645, 3039, 7096, 8203, 433, + 4098, 2978, 5201, 220, 4789, 9244, 7862, 3711, 8615, + 7690, 3102, 7583, 2602, 5733, 1120, 4841, 144, 8214, + 5416, 404, 6971, 805, 6355, 8972, 3062, 8352, 2931, + 889, 7464, 2057, 2241, 2991, 9880, 133, 6643, 7302, + 3644, 4493, 6722, 9152, 4033, 7880, 8852, 4724, 5057, + 855, 3221, 7385, 5224, 5527, 5418, 5692, 9101, 2016, + 4500, 7558, 4991, 2183, 8431, 5687, 826, 8185, 7512, + 2034, 7269, 9017, 5667, 1066, 3954, 6620, 6000, 5735, + 4170, 6764, 5652, 6631, 3447, 1745, 1777, 3216, 7230, + 9533, 1387, 2874, 521, 745, 8458, 5566, 3373, 8999, + 2967, 8776, 9639, 3523, 831, 3365, 1405, 3704, 2518, + 8736, 9840, 8611, 7168, 974, 9007, 4762, 6891, 9021, + 6430, 9209, 301, 1699, 2870, 8888, 7910, 1671, 663, + 5112, 5271, 1358, 1676, 8365, 1780, 7972, 1361, 1045, + 1410, 4989, 2560, 1704, 6356, 9942, 2422, 9763, 6643, + 1212, 5204, 5812, 5025, 7667, 8288, 576, 8273, 6833, + 9949, 6536, 5359, 6612, 7293, 5641, 1851, 335, 4310, + 5189, 2075, 5757, 8286, 7613, 8539, 4264, 7571, 1867, + 7380, 7067, 1, 1197, 3144, 5717, 7323, 4837, 5709, + 8246, 2292, 5490, 4382, 1545, 1139, 9329, 6740, 703, + 1593, 798, 7486, 9746, 3819, 8833, 6776, 5180, 5052, + 2319, 7164, 4881, 5400, 7841, 2215, 554, 7675, 7385, + 9306, 6355, 6231, 9361, 2385, 796, 4758, 7147, 5797, + 8281, 3852, 8531, 1545, 8108, 6926, 7059, 4504, 4531, + 7506, 124, 7458, 3327, 3628, 7556, 4417, 5016, 2525, + 7489, 8555, 8443, 3229, 5225, 3661, 6918, 4100, 8017, + 696, 3226, 8086, 741, 4257, 7752, 9762, 5276, 7602, + 5466, 6581, 5529, 3577, 8691, 4977, 7816, 9124, 4760, + 3859, 4524, 9305, 6899, 4753, 8459, 4314, 3121, 6139, + 2846, 229, 1517, 5567, 5587, 6908, 6983, 1860, 4933, + 7361, 4014, 765, 1397, 6972, 1767, 184, 2375, 5132, + 8705, 5405, 2339, 1938, 8334, 4952, 7588, 5389, 6605, + 3177, 1985, 9590, 5212, 8072, 3816, 8811, 3096, 8925, + 1481, 7459, 6634, 1656, 5790, 1907, 5450, 5369, 1969, + 416, 8199, 204, 7300, 3972, 4789, 9148, 3034, 8861, + 867, 3687, 8017, 7357, 7678, 1306, 4227, 2554, 13, + 4237, 2094, 155, 3093, 7018, 9364, 9696, 454, 2404, + 9877, 1667, 5368, 2586, 9721, 344, 4734, 6749, 9534, + 6490, 9586, 8397, 4492, 1324, 1754, 8517, 7428, 618, + 5779, 2546, 5800, 591, 8731, 153, 2268, 5592, 747, + 1729, 6137, 9551, 3864, 9573, 4958, 9404, 1107, 40, + 81, 4587, 1225, 3165, 570, 3811, 3337, 716, 3120, + 5004, 6021, 7094, 5385, 1117, 9004, 5007, 7419, 1047, + 3691, 9910, 3062, 8070, 427, 7658, 5876, 5585, 6170, + 8863, 2006, 168, 2709, 1624, 9133, 9550, 8635, 9703, + 2241, 7070, 8845, 7089, 5296, 7227, 4530, 8156, 6517, + 6289, 3818, 5042, 4297, 6306, 292, 2597, 1535, 5147, + 8744, 3740, 2035, 1766, 8354, 3023, 1994, 479, 1457, + 2645, 8758, 6863, 6834, 8195, 5541, 5631, 8260, 1567, + 9934, 417, 8257, 382, 2493, 3232, 8660, 8338, 7113, + 1209, 5614, 471, 6024, 9286, 54, 7652, 6187, 540, + 8542, 6207, 6408, 4218, 7616, 5719, 4534, 6986, 2199, + 2970, 7293, 6650, 9284, 956, 6642, 9326, 5000, 9529, + 5318, 3414, 2028, 6559, 6060, 2447, 543, 4868, 7178, + 336, 1140, 3673, 2489, 8807, 2157, 5056, 6476, 3035, + 9189, 2353, 2512, 4440, 9211, 3097, 2278, 932, 4252, + 768, 331, 5614, 3971, 5355, 7842, 9323, 6119, 7597, + 450, 8478, 583, 2348, 5392, 1174, 1349, 5119, 7445, + 479, 1422, 413, 167, 314, 7818, 8189, 3817, 9967, + 3831, 4635, 9032, 2332, 6873, 8301, 1534, 4964, 9327, + 3874, 9991, 8234, 517, 4545, 7823, 9803, 8221, 542, + 5331, 9104, 7312, 7039, 7742, 2907, 3191, 8019, 2692, + 5064, 3352, 9762, 7319, 9026, 6962, 128, 5753, 4, + 9327, 3158, 8441, 4944, 4588, 8836, 9882, 3216, 3631, + 4729]), + values=tensor([0.7863, 0.8342, 0.7119, 0.8691, 0.1799, 0.8019, 0.5297, + 0.0461, 0.6789, 0.7616, 0.1867, 0.1510, 0.4759, 0.4897, + 0.3112, 0.5107, 0.2673, 0.7677, 0.2078, 0.1024, 0.6974, + 0.7483, 0.1375, 0.4229, 0.3418, 0.4279, 0.1779, 0.3128, + 0.7738, 0.2688, 0.7139, 0.5474, 0.8893, 0.2669, 0.3732, + 0.3978, 0.5696, 0.8366, 0.1938, 0.3846, 0.7418, 0.1575, + 0.9175, 0.3788, 0.9034, 0.5215, 0.5959, 0.4812, 0.8080, + 0.9553, 0.3234, 0.3058, 0.4874, 0.0548, 0.2922, 0.3243, + 0.2802, 0.3277, 0.7008, 0.8317, 0.6850, 0.3190, 0.4857, + 0.5360, 0.3195, 0.2796, 0.9648, 0.3173, 0.1462, 0.6508, + 0.9193, 0.0306, 0.5949, 0.4367, 0.7038, 0.9052, 0.8896, + 0.7649, 0.2853, 0.3726, 0.3482, 0.2792, 0.4239, 0.7674, + 0.7629, 0.2768, 0.2724, 0.5503, 0.3248, 0.8259, 0.0264, + 0.4983, 0.0596, 0.1536, 0.8502, 0.3449, 0.3085, 0.4356, + 0.3873, 0.9396, 0.5172, 0.4872, 0.6271, 0.3707, 0.6993, + 0.4127, 0.1519, 0.7471, 0.9960, 0.8186, 0.7247, 0.9753, + 0.6228, 0.3862, 0.0395, 0.6842, 0.6671, 0.0794, 0.0052, + 0.9718, 0.2986, 0.0151, 0.4374, 0.9946, 0.0935, 0.5060, + 0.9278, 0.3396, 0.3559, 0.9710, 0.0242, 0.8115, 0.3763, + 0.7869, 0.4303, 0.4782, 0.2549, 0.1494, 0.9501, 0.9807, + 0.5176, 0.8320, 0.7401, 0.7586, 0.0516, 0.2314, 0.8977, + 0.3697, 0.6354, 0.3793, 0.1332, 0.4121, 0.9345, 0.8805, + 0.2787, 0.5335, 0.9802, 0.1369, 0.6510, 0.3232, 0.7449, + 0.7218, 0.7851, 0.7585, 0.3555, 0.2232, 0.3523, 0.7028, + 0.1003, 0.5059, 0.7756, 0.5967, 0.2963, 0.2044, 0.5060, + 0.4409, 0.2094, 0.4839, 0.8768, 0.4050, 0.2371, 0.4748, + 0.4748, 0.2096, 0.9009, 0.7365, 0.7361, 0.9956, 0.8167, + 0.9573, 0.1456, 0.8912, 0.8245, 0.2111, 0.1344, 0.3731, + 0.3546, 0.1724, 0.5871, 0.2882, 0.4315, 0.5993, 0.8036, + 0.5470, 0.0035, 0.4441, 0.2185, 0.7867, 0.1945, 0.1865, + 0.6911, 0.1596, 0.9086, 0.6358, 0.5350, 0.7830, 0.3829, + 0.8050, 0.3156, 0.1687, 0.6780, 0.7685, 0.5011, 0.3136, + 0.7647, 0.2212, 0.6030, 0.2126, 0.7262, 0.0615, 0.5973, + 0.9209, 0.1964, 0.0162, 0.2415, 0.2513, 0.1957, 0.9780, + 0.4213, 0.1357, 0.1199, 0.0038, 0.5586, 0.0956, 0.1284, + 0.2755, 0.0056, 0.5708, 0.5209, 0.1329, 0.1111, 0.6389, + 0.5765, 0.0036, 0.4213, 0.3664, 0.1220, 0.0489, 0.9965, + 0.8755, 0.8525, 0.6302, 0.2268, 0.7377, 0.0782, 0.4169, + 0.9956, 0.6600, 0.0917, 0.5793, 0.2528, 0.5405, 0.4980, + 0.7610, 0.2135, 0.4588, 0.6096, 0.1996, 0.3369, 0.2309, + 0.4068, 0.9687, 0.7562, 0.2269, 0.1687, 0.3793, 0.6399, + 0.4915, 0.4112, 0.6703, 0.6153, 0.4705, 0.1233, 0.9046, + 0.5631, 0.3352, 0.9593, 0.2252, 0.0553, 0.6186, 0.4222, + 0.2235, 0.1408, 0.4026, 0.0716, 0.3602, 0.6066, 0.8411, + 0.9387, 0.9914, 0.7443, 0.1915, 0.6794, 0.2087, 0.8185, + 0.7287, 0.5539, 0.8187, 0.7845, 0.7145, 0.3411, 0.1268, + 0.7307, 0.2110, 0.3286, 0.1623, 0.5391, 0.8854, 0.3559, + 0.3656, 0.2022, 0.2735, 0.4384, 0.2267, 0.5682, 0.0871, + 0.7504, 0.0440, 0.9453, 0.0560, 0.7453, 0.5245, 0.7868, + 0.7607, 0.0740, 0.5851, 0.6988, 0.9941, 0.1340, 0.8946, + 0.5851, 0.0531, 0.1239, 0.5973, 0.8490, 0.8281, 0.0289, + 0.9819, 0.2244, 0.1732, 0.2714, 0.1424, 0.2251, 0.3208, + 0.8178, 0.8825, 0.1082, 0.5536, 0.6102, 0.1422, 0.0979, + 0.8259, 0.1018, 0.6720, 0.0237, 0.5334, 0.2595, 0.3522, + 0.7713, 0.9285, 0.1688, 0.2536, 0.8257, 0.4873, 0.4909, + 0.6034, 0.7331, 0.7261, 0.8379, 0.4814, 0.4604, 0.0061, + 0.6796, 0.9124, 0.6545, 0.1060, 0.4611, 0.8779, 0.0489, + 0.4770, 0.9426, 0.0362, 0.9291, 0.0085, 0.2023, 0.9600, + 0.4833, 0.8674, 0.3213, 0.0549, 0.2797, 0.9705, 0.5031, + 0.0798, 0.1913, 0.0630, 0.4306, 0.1285, 0.5088, 0.2413, + 0.7971, 0.4797, 0.5836, 0.3643, 0.1162, 0.6647, 0.5069, + 0.8942, 0.2930, 0.0041, 0.5855, 0.8851, 0.2293, 0.4329, + 0.7326, 0.1690, 0.2530, 0.2387, 0.0036, 0.0121, 0.1039, + 0.5190, 0.2097, 0.4634, 0.9255, 0.4940, 0.3517, 0.6614, + 0.4552, 0.1007, 0.0099, 0.5784, 0.2947, 0.7080, 0.3681, + 0.7319, 0.5470, 0.3905, 0.8546, 0.8378, 0.2638, 0.2474, + 0.2020, 0.2602, 0.2058, 0.3439, 0.3725, 0.2372, 0.8608, + 0.6782, 0.4161, 0.2714, 0.0130, 0.3098, 0.6316, 0.5573, + 0.6366, 0.0552, 0.8331, 0.1655, 0.6120, 0.6847, 0.5996, + 0.3423, 0.3329, 0.9321, 0.1631, 0.7762, 0.9917, 0.4457, + 0.2145, 0.3700, 0.3585, 0.1830, 0.8788, 0.7830, 0.8978, + 0.9083, 0.9699, 0.0188, 0.8464, 0.2189, 0.0314, 0.1098, + 0.5475, 0.6995, 0.2544, 0.4684, 0.3350, 0.8207, 0.7849, + 0.7699, 0.7118, 0.1858, 0.2650, 0.1482, 0.3208, 0.2300, + 0.0603, 0.6347, 0.1795, 0.1880, 0.1849, 0.3946, 0.2904, + 0.3987, 0.0378, 0.8753, 0.9825, 0.3658, 0.9591, 0.8361, + 0.6193, 0.9979, 0.4363, 0.2356, 0.5991, 0.1784, 0.1734, + 0.6202, 0.8094, 0.4349, 0.0297, 0.2971, 0.5907, 0.7311, + 0.5068, 0.6370, 0.7797, 0.6554, 0.9023, 0.2928, 0.4467, + 0.3417, 0.7580, 0.0048, 0.5521, 0.6463, 0.4551, 0.2157, + 0.9490, 0.7709, 0.5133, 0.6298, 0.9824, 0.0939, 0.6470, + 0.8582, 0.6745, 0.3195, 0.7034, 0.3210, 0.4343, 0.8580, + 0.2725, 0.9024, 0.1300, 0.1018, 0.1214, 0.8294, 0.8231, + 0.4988, 0.6393, 0.5659, 0.3564, 0.4693, 0.7534, 0.8943, + 0.8326, 0.6300, 0.8559, 0.5119, 0.1976, 0.2140, 0.7843, + 0.6970, 0.1656, 0.6279, 0.1965, 0.1246, 0.2067, 0.4844, + 0.6932, 0.0875, 0.9163, 0.9941, 0.6738, 0.5892, 0.8709, + 0.4754, 0.3597, 0.3053, 0.6792, 0.1671, 0.1823, 0.5845, + 0.7883, 0.0033, 0.7413, 0.6371, 0.5830, 0.0867, 0.2426, + 0.0434, 0.2486, 0.2783, 0.8635, 0.4149, 0.8689, 0.2094, + 0.0406, 0.9992, 0.4458, 0.4786, 0.5163, 0.4532, 0.9377, + 0.1115, 0.5946, 0.5658, 0.7630, 0.5075, 0.2843, 0.6994, + 0.4331, 0.1647, 0.7111, 0.9415, 0.9134, 0.3649, 0.1405, + 0.3023, 0.1916, 0.9338, 0.8955, 0.1579, 0.6881, 0.6431, + 0.7938, 0.6435, 0.1598, 0.8058, 0.0143, 0.7389, 0.1256, + 0.3343, 0.5721, 0.4218, 0.4586, 0.7800, 0.2224, 0.0329, + 0.3782, 0.5096, 0.7729, 0.2314, 0.6628, 0.4257, 0.0530, + 0.2394, 0.3782, 0.3378, 0.9264, 0.3846, 0.8312, 0.7165, + 0.9018, 0.2255, 0.8764, 0.4648, 0.8905, 0.6287, 0.3061, + 0.2358, 0.3575, 0.8837, 0.8661, 0.4644, 0.0307, 0.1658, + 0.1603, 0.6516, 0.7616, 0.5701, 0.3014, 0.9310, 0.2065, + 0.7077, 0.7364, 0.4491, 0.0518, 0.7097, 0.4874, 0.4668, + 0.0620, 0.4255, 0.6665, 0.1014, 0.1580, 0.0470, 0.3432, + 0.8393, 0.1570, 0.1401, 0.5172, 0.8417, 0.4672, 0.2201, + 0.1674, 0.9708, 0.6467, 0.6089, 0.9800, 0.0513, 0.3044, + 0.6979, 0.6719, 0.1842, 0.8617, 0.2669, 0.6961, 0.6593, + 0.1234, 0.4151, 0.5864, 0.5859, 0.0830, 0.1869, 0.0506, + 0.4507, 0.0944, 0.5583, 0.8982, 0.5055, 0.6171, 0.7678, + 0.4718, 0.9977, 0.2777, 0.7559, 0.1953, 0.4405, 0.9006, + 0.3125, 0.6338, 0.3459, 0.2249, 0.6948, 0.3347, 0.4623, + 0.5826, 0.5751, 0.0351, 0.6388, 0.1795, 0.7330, 0.5707, + 0.5527, 0.2760, 0.8521, 0.1919, 0.1692, 0.4703, 0.1457, + 0.4745, 0.3853, 0.5193, 0.8361, 0.1959, 0.4596, 0.5784, + 0.0974, 0.3018, 0.0027, 0.0284, 0.6379, 0.0985, 0.6697, + 0.1617, 0.9408, 0.1225, 0.4828, 0.1493, 0.2255, 0.9622, + 0.3456, 0.5549, 0.7083, 0.8183, 0.4017, 0.9015, 0.9500, + 0.9757, 0.7762, 0.4376, 0.1119, 0.0478, 0.0482, 0.0578, + 0.7538, 0.6748, 0.4915, 0.0046, 0.4804, 0.0171, 0.2571, + 0.5740, 0.8135, 0.9212, 0.1282, 0.1633, 0.3991, 0.3795, + 0.2563, 0.7909, 0.3096, 0.9640, 0.3523, 0.8436, 0.3227, + 0.0600, 0.3198, 0.3035, 0.1361, 0.3922, 0.1782, 0.7833, + 0.2504, 0.4757, 0.7349, 0.8713, 0.6836, 0.3507, 0.6395, + 0.8433, 0.8479, 0.5637, 0.2767, 0.4270, 0.1521, 0.7400, + 0.4410, 0.9217, 0.8199, 0.3646, 0.7246, 0.6747, 0.1362, + 0.4476, 0.3311, 0.4522, 0.8256, 0.9839, 0.1661, 0.7065, + 0.0053, 0.7677, 0.6798, 0.6573, 0.7053, 0.0946, 0.4782, + 0.6733, 0.8968, 0.8493, 0.9722, 0.3359, 0.2513, 0.8759, + 0.7557, 0.5642, 0.6956, 0.9785, 0.2314, 0.2092, 0.6617, + 0.2157, 0.9152, 0.2913, 0.0438, 0.9309, 0.2537, 0.0994, + 0.4607, 0.6405, 0.5177, 0.7145, 0.1394, 0.3492, 0.5865, + 0.9348, 0.8342, 0.9034, 0.5205, 0.0516, 0.1632, 0.3433, + 0.4758, 0.2442, 0.7218, 0.3687, 0.3685, 0.7796, 0.4166, + 0.7390, 0.4015, 0.0501, 0.4473, 0.1656, 0.4610, 0.3317, + 0.3754, 0.6628, 0.9353, 0.1661, 0.2491, 0.3244, 0.5026, + 0.2276, 0.1611, 0.0412, 0.1485, 0.2596, 0.3703, 0.5359, + 0.7023, 0.3612, 0.9260, 0.3044, 0.4320, 0.5730, 0.4544, + 0.7409, 0.6046, 0.2126, 0.8407, 0.5541, 0.9635, 0.4726, + 0.7284, 0.1079, 0.8545, 0.8839, 0.1658, 0.6432, 0.3731, + 0.4876, 0.5276, 0.8205, 0.3497, 0.2810, 0.3329, 0.4371, + 0.6824, 0.9070, 0.8115, 0.6630, 0.8608, 0.8445, 0.6452, + 0.0464, 0.2074, 0.6033, 0.8590, 0.4426, 0.1662, 0.9143, + 0.8420, 0.9435, 0.3667, 0.0587, 0.3344, 0.5940, 0.9391, + 0.3098, 0.3277, 0.3122, 0.0248, 0.5693, 0.1331]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8452, 0.1047, 0.0563, ..., 0.6079, 0.4820, 0.4351]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.998100280761719 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 606, 4248, 4323, 142, 3267, 1209, 7616, 5137, 8211, + 6424, 2299, 2295, 132, 1237, 207, 4471, 8426, 1510, + 3485, 6960, 5069, 6876, 5759, 6010, 6198, 5515, 1057, + 6920, 9155, 4894, 8077, 2579, 7160, 9343, 5510, 6273, + 3046, 6095, 9498, 4568, 7882, 4172, 6457, 7199, 2821, + 8109, 7947, 3059, 5273, 811, 3090, 6904, 7921, 941, + 171, 1700, 8078, 8915, 5888, 6297, 3017, 6368, 5232, + 3121, 6921, 1954, 1701, 989, 2512, 3553, 4586, 8676, + 4195, 7666, 522, 8800, 4183, 4215, 5436, 4713, 8751, + 9665, 2209, 2880, 8327, 8043, 3725, 5157, 5205, 5068, + 5472, 2221, 9235, 5844, 1615, 5209, 8676, 880, 3145, + 5350, 6766, 607, 1821, 2811, 5587, 4952, 36, 972, + 9657, 5891, 932, 6251, 2971, 8136, 8846, 5746, 6772, + 3335, 9210, 5699, 6015, 5058, 4114, 1121, 1345, 4570, + 3080, 1837, 5077, 2774, 60, 9761, 283, 1953, 8754, + 568, 8363, 7504, 4312, 2950, 8818, 3615, 7260, 5054, + 9479, 8823, 817, 4247, 8916, 458, 1663, 1516, 8921, + 8262, 3930, 5511, 4167, 1454, 227, 9082, 8167, 6759, + 4104, 2365, 4891, 7213, 8479, 3335, 3691, 5216, 8378, + 1063, 5818, 8471, 7448, 5050, 8971, 747, 666, 6626, + 6463, 5721, 7264, 5789, 105, 7831, 7486, 5643, 8037, + 3964, 1225, 7868, 6793, 785, 9337, 463, 785, 4486, + 40, 8639, 6486, 5799, 220, 7289, 3630, 4023, 9716, + 636, 3153, 9820, 4041, 7979, 6263, 5265, 4683, 9203, + 90, 7642, 420, 3880, 2822, 376, 3766, 4385, 838, + 7707, 6033, 8983, 6271, 3139, 9042, 3814, 2813, 2724, + 6271, 5387, 8121, 7195, 5117, 5794, 8947, 1085, 4429, + 160, 2993, 9952, 7440, 7699, 3817, 18, 5719, 6435, + 6095, 7069, 731, 9481, 9394, 2426, 4205, 2461, 3873, + 8586, 4374, 7356, 5004, 2798, 7617, 2197, 119, 2470, + 3245, 8733, 6824, 4746, 9120, 2061, 7077, 2855, 8255, + 3205, 7568, 3053, 9530, 1111, 6923, 2083, 2934, 8128, + 5682, 8762, 6169, 7878, 5288, 2262, 4469, 7426, 1628, + 8495, 4564, 2858, 9806, 1173, 5731, 2322, 8964, 1865, + 7016, 9059, 5039, 2002, 1262, 7718, 4777, 1962, 1879, + 7855, 7890, 8641, 1099, 829, 2515, 1746, 2320, 6687, + 4799, 4569, 6237, 125, 2360, 7731, 401, 9623, 7974, + 7957, 893, 162, 8581, 115, 4518, 1206, 9451, 4274, + 8143, 5673, 5773, 9287, 9034, 6759, 2616, 4836, 638, + 2832, 3042, 3961, 9127, 7640, 5801, 3757, 8870, 8049, + 7486, 8964, 1399, 684, 1029, 2622, 6759, 2002, 3761, + 9818, 252, 8051, 7124, 5540, 8162, 333, 6237, 8228, + 7539, 7326, 3539, 9730, 3200, 1285, 3058, 6960, 9438, + 3335, 118, 848, 7603, 2443, 3021, 5193, 1159, 4828, + 5979, 3020, 6180, 8297, 2434, 8432, 873, 6202, 7493, + 1203, 4541, 547, 3753, 2874, 591, 2074, 4910, 3558, + 8, 8298, 2922, 6062, 8645, 3039, 7096, 8203, 433, + 4098, 2978, 5201, 220, 4789, 9244, 7862, 3711, 8615, + 7690, 3102, 7583, 2602, 5733, 1120, 4841, 144, 8214, + 5416, 404, 6971, 805, 6355, 8972, 3062, 8352, 2931, + 889, 7464, 2057, 2241, 2991, 9880, 133, 6643, 7302, + 3644, 4493, 6722, 9152, 4033, 7880, 8852, 4724, 5057, + 855, 3221, 7385, 5224, 5527, 5418, 5692, 9101, 2016, + 4500, 7558, 4991, 2183, 8431, 5687, 826, 8185, 7512, + 2034, 7269, 9017, 5667, 1066, 3954, 6620, 6000, 5735, + 4170, 6764, 5652, 6631, 3447, 1745, 1777, 3216, 7230, + 9533, 1387, 2874, 521, 745, 8458, 5566, 3373, 8999, + 2967, 8776, 9639, 3523, 831, 3365, 1405, 3704, 2518, + 8736, 9840, 8611, 7168, 974, 9007, 4762, 6891, 9021, + 6430, 9209, 301, 1699, 2870, 8888, 7910, 1671, 663, + 5112, 5271, 1358, 1676, 8365, 1780, 7972, 1361, 1045, + 1410, 4989, 2560, 1704, 6356, 9942, 2422, 9763, 6643, + 1212, 5204, 5812, 5025, 7667, 8288, 576, 8273, 6833, + 9949, 6536, 5359, 6612, 7293, 5641, 1851, 335, 4310, + 5189, 2075, 5757, 8286, 7613, 8539, 4264, 7571, 1867, + 7380, 7067, 1, 1197, 3144, 5717, 7323, 4837, 5709, + 8246, 2292, 5490, 4382, 1545, 1139, 9329, 6740, 703, + 1593, 798, 7486, 9746, 3819, 8833, 6776, 5180, 5052, + 2319, 7164, 4881, 5400, 7841, 2215, 554, 7675, 7385, + 9306, 6355, 6231, 9361, 2385, 796, 4758, 7147, 5797, + 8281, 3852, 8531, 1545, 8108, 6926, 7059, 4504, 4531, + 7506, 124, 7458, 3327, 3628, 7556, 4417, 5016, 2525, + 7489, 8555, 8443, 3229, 5225, 3661, 6918, 4100, 8017, + 696, 3226, 8086, 741, 4257, 7752, 9762, 5276, 7602, + 5466, 6581, 5529, 3577, 8691, 4977, 7816, 9124, 4760, + 3859, 4524, 9305, 6899, 4753, 8459, 4314, 3121, 6139, + 2846, 229, 1517, 5567, 5587, 6908, 6983, 1860, 4933, + 7361, 4014, 765, 1397, 6972, 1767, 184, 2375, 5132, + 8705, 5405, 2339, 1938, 8334, 4952, 7588, 5389, 6605, + 3177, 1985, 9590, 5212, 8072, 3816, 8811, 3096, 8925, + 1481, 7459, 6634, 1656, 5790, 1907, 5450, 5369, 1969, + 416, 8199, 204, 7300, 3972, 4789, 9148, 3034, 8861, + 867, 3687, 8017, 7357, 7678, 1306, 4227, 2554, 13, + 4237, 2094, 155, 3093, 7018, 9364, 9696, 454, 2404, + 9877, 1667, 5368, 2586, 9721, 344, 4734, 6749, 9534, + 6490, 9586, 8397, 4492, 1324, 1754, 8517, 7428, 618, + 5779, 2546, 5800, 591, 8731, 153, 2268, 5592, 747, + 1729, 6137, 9551, 3864, 9573, 4958, 9404, 1107, 40, + 81, 4587, 1225, 3165, 570, 3811, 3337, 716, 3120, + 5004, 6021, 7094, 5385, 1117, 9004, 5007, 7419, 1047, + 3691, 9910, 3062, 8070, 427, 7658, 5876, 5585, 6170, + 8863, 2006, 168, 2709, 1624, 9133, 9550, 8635, 9703, + 2241, 7070, 8845, 7089, 5296, 7227, 4530, 8156, 6517, + 6289, 3818, 5042, 4297, 6306, 292, 2597, 1535, 5147, + 8744, 3740, 2035, 1766, 8354, 3023, 1994, 479, 1457, + 2645, 8758, 6863, 6834, 8195, 5541, 5631, 8260, 1567, + 9934, 417, 8257, 382, 2493, 3232, 8660, 8338, 7113, + 1209, 5614, 471, 6024, 9286, 54, 7652, 6187, 540, + 8542, 6207, 6408, 4218, 7616, 5719, 4534, 6986, 2199, + 2970, 7293, 6650, 9284, 956, 6642, 9326, 5000, 9529, + 5318, 3414, 2028, 6559, 6060, 2447, 543, 4868, 7178, + 336, 1140, 3673, 2489, 8807, 2157, 5056, 6476, 3035, + 9189, 2353, 2512, 4440, 9211, 3097, 2278, 932, 4252, + 768, 331, 5614, 3971, 5355, 7842, 9323, 6119, 7597, + 450, 8478, 583, 2348, 5392, 1174, 1349, 5119, 7445, + 479, 1422, 413, 167, 314, 7818, 8189, 3817, 9967, + 3831, 4635, 9032, 2332, 6873, 8301, 1534, 4964, 9327, + 3874, 9991, 8234, 517, 4545, 7823, 9803, 8221, 542, + 5331, 9104, 7312, 7039, 7742, 2907, 3191, 8019, 2692, + 5064, 3352, 9762, 7319, 9026, 6962, 128, 5753, 4, + 9327, 3158, 8441, 4944, 4588, 8836, 9882, 3216, 3631, + 4729]), + values=tensor([0.7863, 0.8342, 0.7119, 0.8691, 0.1799, 0.8019, 0.5297, + 0.0461, 0.6789, 0.7616, 0.1867, 0.1510, 0.4759, 0.4897, + 0.3112, 0.5107, 0.2673, 0.7677, 0.2078, 0.1024, 0.6974, + 0.7483, 0.1375, 0.4229, 0.3418, 0.4279, 0.1779, 0.3128, + 0.7738, 0.2688, 0.7139, 0.5474, 0.8893, 0.2669, 0.3732, + 0.3978, 0.5696, 0.8366, 0.1938, 0.3846, 0.7418, 0.1575, + 0.9175, 0.3788, 0.9034, 0.5215, 0.5959, 0.4812, 0.8080, + 0.9553, 0.3234, 0.3058, 0.4874, 0.0548, 0.2922, 0.3243, + 0.2802, 0.3277, 0.7008, 0.8317, 0.6850, 0.3190, 0.4857, + 0.5360, 0.3195, 0.2796, 0.9648, 0.3173, 0.1462, 0.6508, + 0.9193, 0.0306, 0.5949, 0.4367, 0.7038, 0.9052, 0.8896, + 0.7649, 0.2853, 0.3726, 0.3482, 0.2792, 0.4239, 0.7674, + 0.7629, 0.2768, 0.2724, 0.5503, 0.3248, 0.8259, 0.0264, + 0.4983, 0.0596, 0.1536, 0.8502, 0.3449, 0.3085, 0.4356, + 0.3873, 0.9396, 0.5172, 0.4872, 0.6271, 0.3707, 0.6993, + 0.4127, 0.1519, 0.7471, 0.9960, 0.8186, 0.7247, 0.9753, + 0.6228, 0.3862, 0.0395, 0.6842, 0.6671, 0.0794, 0.0052, + 0.9718, 0.2986, 0.0151, 0.4374, 0.9946, 0.0935, 0.5060, + 0.9278, 0.3396, 0.3559, 0.9710, 0.0242, 0.8115, 0.3763, + 0.7869, 0.4303, 0.4782, 0.2549, 0.1494, 0.9501, 0.9807, + 0.5176, 0.8320, 0.7401, 0.7586, 0.0516, 0.2314, 0.8977, + 0.3697, 0.6354, 0.3793, 0.1332, 0.4121, 0.9345, 0.8805, + 0.2787, 0.5335, 0.9802, 0.1369, 0.6510, 0.3232, 0.7449, + 0.7218, 0.7851, 0.7585, 0.3555, 0.2232, 0.3523, 0.7028, + 0.1003, 0.5059, 0.7756, 0.5967, 0.2963, 0.2044, 0.5060, + 0.4409, 0.2094, 0.4839, 0.8768, 0.4050, 0.2371, 0.4748, + 0.4748, 0.2096, 0.9009, 0.7365, 0.7361, 0.9956, 0.8167, + 0.9573, 0.1456, 0.8912, 0.8245, 0.2111, 0.1344, 0.3731, + 0.3546, 0.1724, 0.5871, 0.2882, 0.4315, 0.5993, 0.8036, + 0.5470, 0.0035, 0.4441, 0.2185, 0.7867, 0.1945, 0.1865, + 0.6911, 0.1596, 0.9086, 0.6358, 0.5350, 0.7830, 0.3829, + 0.8050, 0.3156, 0.1687, 0.6780, 0.7685, 0.5011, 0.3136, + 0.7647, 0.2212, 0.6030, 0.2126, 0.7262, 0.0615, 0.5973, + 0.9209, 0.1964, 0.0162, 0.2415, 0.2513, 0.1957, 0.9780, + 0.4213, 0.1357, 0.1199, 0.0038, 0.5586, 0.0956, 0.1284, + 0.2755, 0.0056, 0.5708, 0.5209, 0.1329, 0.1111, 0.6389, + 0.5765, 0.0036, 0.4213, 0.3664, 0.1220, 0.0489, 0.9965, + 0.8755, 0.8525, 0.6302, 0.2268, 0.7377, 0.0782, 0.4169, + 0.9956, 0.6600, 0.0917, 0.5793, 0.2528, 0.5405, 0.4980, + 0.7610, 0.2135, 0.4588, 0.6096, 0.1996, 0.3369, 0.2309, + 0.4068, 0.9687, 0.7562, 0.2269, 0.1687, 0.3793, 0.6399, + 0.4915, 0.4112, 0.6703, 0.6153, 0.4705, 0.1233, 0.9046, + 0.5631, 0.3352, 0.9593, 0.2252, 0.0553, 0.6186, 0.4222, + 0.2235, 0.1408, 0.4026, 0.0716, 0.3602, 0.6066, 0.8411, + 0.9387, 0.9914, 0.7443, 0.1915, 0.6794, 0.2087, 0.8185, + 0.7287, 0.5539, 0.8187, 0.7845, 0.7145, 0.3411, 0.1268, + 0.7307, 0.2110, 0.3286, 0.1623, 0.5391, 0.8854, 0.3559, + 0.3656, 0.2022, 0.2735, 0.4384, 0.2267, 0.5682, 0.0871, + 0.7504, 0.0440, 0.9453, 0.0560, 0.7453, 0.5245, 0.7868, + 0.7607, 0.0740, 0.5851, 0.6988, 0.9941, 0.1340, 0.8946, + 0.5851, 0.0531, 0.1239, 0.5973, 0.8490, 0.8281, 0.0289, + 0.9819, 0.2244, 0.1732, 0.2714, 0.1424, 0.2251, 0.3208, + 0.8178, 0.8825, 0.1082, 0.5536, 0.6102, 0.1422, 0.0979, + 0.8259, 0.1018, 0.6720, 0.0237, 0.5334, 0.2595, 0.3522, + 0.7713, 0.9285, 0.1688, 0.2536, 0.8257, 0.4873, 0.4909, + 0.6034, 0.7331, 0.7261, 0.8379, 0.4814, 0.4604, 0.0061, + 0.6796, 0.9124, 0.6545, 0.1060, 0.4611, 0.8779, 0.0489, + 0.4770, 0.9426, 0.0362, 0.9291, 0.0085, 0.2023, 0.9600, + 0.4833, 0.8674, 0.3213, 0.0549, 0.2797, 0.9705, 0.5031, + 0.0798, 0.1913, 0.0630, 0.4306, 0.1285, 0.5088, 0.2413, + 0.7971, 0.4797, 0.5836, 0.3643, 0.1162, 0.6647, 0.5069, + 0.8942, 0.2930, 0.0041, 0.5855, 0.8851, 0.2293, 0.4329, + 0.7326, 0.1690, 0.2530, 0.2387, 0.0036, 0.0121, 0.1039, + 0.5190, 0.2097, 0.4634, 0.9255, 0.4940, 0.3517, 0.6614, + 0.4552, 0.1007, 0.0099, 0.5784, 0.2947, 0.7080, 0.3681, + 0.7319, 0.5470, 0.3905, 0.8546, 0.8378, 0.2638, 0.2474, + 0.2020, 0.2602, 0.2058, 0.3439, 0.3725, 0.2372, 0.8608, + 0.6782, 0.4161, 0.2714, 0.0130, 0.3098, 0.6316, 0.5573, + 0.6366, 0.0552, 0.8331, 0.1655, 0.6120, 0.6847, 0.5996, + 0.3423, 0.3329, 0.9321, 0.1631, 0.7762, 0.9917, 0.4457, + 0.2145, 0.3700, 0.3585, 0.1830, 0.8788, 0.7830, 0.8978, + 0.9083, 0.9699, 0.0188, 0.8464, 0.2189, 0.0314, 0.1098, + 0.5475, 0.6995, 0.2544, 0.4684, 0.3350, 0.8207, 0.7849, + 0.7699, 0.7118, 0.1858, 0.2650, 0.1482, 0.3208, 0.2300, + 0.0603, 0.6347, 0.1795, 0.1880, 0.1849, 0.3946, 0.2904, + 0.3987, 0.0378, 0.8753, 0.9825, 0.3658, 0.9591, 0.8361, + 0.6193, 0.9979, 0.4363, 0.2356, 0.5991, 0.1784, 0.1734, + 0.6202, 0.8094, 0.4349, 0.0297, 0.2971, 0.5907, 0.7311, + 0.5068, 0.6370, 0.7797, 0.6554, 0.9023, 0.2928, 0.4467, + 0.3417, 0.7580, 0.0048, 0.5521, 0.6463, 0.4551, 0.2157, + 0.9490, 0.7709, 0.5133, 0.6298, 0.9824, 0.0939, 0.6470, + 0.8582, 0.6745, 0.3195, 0.7034, 0.3210, 0.4343, 0.8580, + 0.2725, 0.9024, 0.1300, 0.1018, 0.1214, 0.8294, 0.8231, + 0.4988, 0.6393, 0.5659, 0.3564, 0.4693, 0.7534, 0.8943, + 0.8326, 0.6300, 0.8559, 0.5119, 0.1976, 0.2140, 0.7843, + 0.6970, 0.1656, 0.6279, 0.1965, 0.1246, 0.2067, 0.4844, + 0.6932, 0.0875, 0.9163, 0.9941, 0.6738, 0.5892, 0.8709, + 0.4754, 0.3597, 0.3053, 0.6792, 0.1671, 0.1823, 0.5845, + 0.7883, 0.0033, 0.7413, 0.6371, 0.5830, 0.0867, 0.2426, + 0.0434, 0.2486, 0.2783, 0.8635, 0.4149, 0.8689, 0.2094, + 0.0406, 0.9992, 0.4458, 0.4786, 0.5163, 0.4532, 0.9377, + 0.1115, 0.5946, 0.5658, 0.7630, 0.5075, 0.2843, 0.6994, + 0.4331, 0.1647, 0.7111, 0.9415, 0.9134, 0.3649, 0.1405, + 0.3023, 0.1916, 0.9338, 0.8955, 0.1579, 0.6881, 0.6431, + 0.7938, 0.6435, 0.1598, 0.8058, 0.0143, 0.7389, 0.1256, + 0.3343, 0.5721, 0.4218, 0.4586, 0.7800, 0.2224, 0.0329, + 0.3782, 0.5096, 0.7729, 0.2314, 0.6628, 0.4257, 0.0530, + 0.2394, 0.3782, 0.3378, 0.9264, 0.3846, 0.8312, 0.7165, + 0.9018, 0.2255, 0.8764, 0.4648, 0.8905, 0.6287, 0.3061, + 0.2358, 0.3575, 0.8837, 0.8661, 0.4644, 0.0307, 0.1658, + 0.1603, 0.6516, 0.7616, 0.5701, 0.3014, 0.9310, 0.2065, + 0.7077, 0.7364, 0.4491, 0.0518, 0.7097, 0.4874, 0.4668, + 0.0620, 0.4255, 0.6665, 0.1014, 0.1580, 0.0470, 0.3432, + 0.8393, 0.1570, 0.1401, 0.5172, 0.8417, 0.4672, 0.2201, + 0.1674, 0.9708, 0.6467, 0.6089, 0.9800, 0.0513, 0.3044, + 0.6979, 0.6719, 0.1842, 0.8617, 0.2669, 0.6961, 0.6593, + 0.1234, 0.4151, 0.5864, 0.5859, 0.0830, 0.1869, 0.0506, + 0.4507, 0.0944, 0.5583, 0.8982, 0.5055, 0.6171, 0.7678, + 0.4718, 0.9977, 0.2777, 0.7559, 0.1953, 0.4405, 0.9006, + 0.3125, 0.6338, 0.3459, 0.2249, 0.6948, 0.3347, 0.4623, + 0.5826, 0.5751, 0.0351, 0.6388, 0.1795, 0.7330, 0.5707, + 0.5527, 0.2760, 0.8521, 0.1919, 0.1692, 0.4703, 0.1457, + 0.4745, 0.3853, 0.5193, 0.8361, 0.1959, 0.4596, 0.5784, + 0.0974, 0.3018, 0.0027, 0.0284, 0.6379, 0.0985, 0.6697, + 0.1617, 0.9408, 0.1225, 0.4828, 0.1493, 0.2255, 0.9622, + 0.3456, 0.5549, 0.7083, 0.8183, 0.4017, 0.9015, 0.9500, + 0.9757, 0.7762, 0.4376, 0.1119, 0.0478, 0.0482, 0.0578, + 0.7538, 0.6748, 0.4915, 0.0046, 0.4804, 0.0171, 0.2571, + 0.5740, 0.8135, 0.9212, 0.1282, 0.1633, 0.3991, 0.3795, + 0.2563, 0.7909, 0.3096, 0.9640, 0.3523, 0.8436, 0.3227, + 0.0600, 0.3198, 0.3035, 0.1361, 0.3922, 0.1782, 0.7833, + 0.2504, 0.4757, 0.7349, 0.8713, 0.6836, 0.3507, 0.6395, + 0.8433, 0.8479, 0.5637, 0.2767, 0.4270, 0.1521, 0.7400, + 0.4410, 0.9217, 0.8199, 0.3646, 0.7246, 0.6747, 0.1362, + 0.4476, 0.3311, 0.4522, 0.8256, 0.9839, 0.1661, 0.7065, + 0.0053, 0.7677, 0.6798, 0.6573, 0.7053, 0.0946, 0.4782, + 0.6733, 0.8968, 0.8493, 0.9722, 0.3359, 0.2513, 0.8759, + 0.7557, 0.5642, 0.6956, 0.9785, 0.2314, 0.2092, 0.6617, + 0.2157, 0.9152, 0.2913, 0.0438, 0.9309, 0.2537, 0.0994, + 0.4607, 0.6405, 0.5177, 0.7145, 0.1394, 0.3492, 0.5865, + 0.9348, 0.8342, 0.9034, 0.5205, 0.0516, 0.1632, 0.3433, + 0.4758, 0.2442, 0.7218, 0.3687, 0.3685, 0.7796, 0.4166, + 0.7390, 0.4015, 0.0501, 0.4473, 0.1656, 0.4610, 0.3317, + 0.3754, 0.6628, 0.9353, 0.1661, 0.2491, 0.3244, 0.5026, + 0.2276, 0.1611, 0.0412, 0.1485, 0.2596, 0.3703, 0.5359, + 0.7023, 0.3612, 0.9260, 0.3044, 0.4320, 0.5730, 0.4544, + 0.7409, 0.6046, 0.2126, 0.8407, 0.5541, 0.9635, 0.4726, + 0.7284, 0.1079, 0.8545, 0.8839, 0.1658, 0.6432, 0.3731, + 0.4876, 0.5276, 0.8205, 0.3497, 0.2810, 0.3329, 0.4371, + 0.6824, 0.9070, 0.8115, 0.6630, 0.8608, 0.8445, 0.6452, + 0.0464, 0.2074, 0.6033, 0.8590, 0.4426, 0.1662, 0.9143, + 0.8420, 0.9435, 0.3667, 0.0587, 0.3344, 0.5940, 0.9391, + 0.3098, 0.3277, 0.3122, 0.0248, 0.5693, 0.1331]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8452, 0.1047, 0.0563, ..., 0.6079, 0.4820, 0.4351]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.998100280761719 seconds + +[20.36, 20.16, 20.2, 20.32, 20.32, 20.36, 20.56, 20.6, 20.52, 20.56] +[20.68, 20.8, 21.2, 24.88, 26.6, 27.2, 27.72, 25.88, 24.68, 23.88, 23.88, 23.8, 23.88, 23.68] +14.660717248916626 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 147223, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.998100280761719, 'TIME_S_1KI': 0.07470368271779354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2257844543456, 'W': 22.66095026687019} +[20.36, 20.16, 20.2, 20.32, 20.32, 20.36, 20.56, 20.6, 20.52, 20.56, 20.32, 20.36, 20.24, 20.2, 20.24, 20.36, 20.24, 20.2, 20.36, 20.24] +365.9799999999999 +18.298999999999996 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 147223, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.998100280761719, 'TIME_S_1KI': 0.07470368271779354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.2257844543456, 'W': 22.66095026687019, 'J_1KI': 2.256616048133414, 'W_1KI': 0.15392262259884795, 'W_D': 4.3619502668701955, 'J_D': 63.94931951642035, 'W_D_1KI': 0.029628184909084827, 'J_D_1KI': 0.00020124698524744657} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..f94bbe2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 52408, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.675631761550903, "TIME_S_1KI": 0.2037023309714338, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 322.76721565246584, "W": 22.07477701860741, "J_1KI": 6.1587394224634755, "W_1KI": 0.42121006370415603, "W_D": 3.4607770186074056, "J_D": 50.6018865489959, "W_D_1KI": 0.06603528122819809, "J_D_1KI": 0.0012600229207029095} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..a9bfe94 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02814483642578125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 4997, 4998, 5000]), + col_indices=tensor([7223, 597, 5381, ..., 4437, 2871, 7175]), + values=tensor([0.8424, 0.9605, 0.7186, ..., 0.3316, 0.2968, 0.8125]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.9947, 0.8149, 0.3597, ..., 0.7445, 0.4060, 0.0098]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.02814483642578125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37307 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.474437952041626} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), + col_indices=tensor([7873, 9438, 5376, ..., 1254, 8934, 6510]), + values=tensor([0.8139, 0.0055, 0.6843, ..., 0.4362, 0.9226, 0.6386]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.9683, 0.4961, 0.7880, ..., 0.7466, 0.9086, 0.6990]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 7.474437952041626 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52408 -ss 10000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.675631761550903} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([6316, 4387, 8598, ..., 977, 3012, 3071]), + values=tensor([0.0249, 0.1066, 0.4899, ..., 0.3057, 0.2915, 0.5832]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.6250, 0.8754, 0.6636, ..., 0.3831, 0.1537, 0.5147]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.675631761550903 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([6316, 4387, 8598, ..., 977, 3012, 3071]), + values=tensor([0.0249, 0.1066, 0.4899, ..., 0.3057, 0.2915, 0.5832]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.6250, 0.8754, 0.6636, ..., 0.3831, 0.1537, 0.5147]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.675631761550903 seconds + +[20.6, 20.52, 20.48, 20.48, 20.44, 20.28, 20.56, 20.6, 20.76, 21.08] +[21.08, 20.76, 21.6, 22.6, 23.96, 24.64, 25.4, 24.84, 24.84, 24.72, 23.92, 24.04, 24.08, 23.92] +14.621539115905762 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 52408, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.675631761550903, 'TIME_S_1KI': 0.2037023309714338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.76721565246584, 'W': 22.07477701860741} +[20.6, 20.52, 20.48, 20.48, 20.44, 20.28, 20.56, 20.6, 20.76, 21.08, 20.48, 20.64, 20.8, 20.76, 20.92, 21.0, 20.88, 20.76, 20.84, 20.96] +372.2800000000001 +18.614000000000004 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 52408, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.675631761550903, 'TIME_S_1KI': 0.2037023309714338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 322.76721565246584, 'W': 22.07477701860741, 'J_1KI': 6.1587394224634755, 'W_1KI': 0.42121006370415603, 'W_D': 3.4607770186074056, 'J_D': 50.6018865489959, 'W_D_1KI': 0.06603528122819809, 'J_D_1KI': 0.0012600229207029095} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..118d60f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.35910129547119, "TIME_S_1KI": 983.5910129547119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2706.022798595428, "W": 24.09983015121324, "J_1KI": 27060.22798595428, "W_1KI": 240.9983015121324, "W_D": 5.635830151213241, "J_D": 632.8129610252375, "W_D_1KI": 56.358301512132414, "J_D_1KI": 563.5830151213241} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..22bc540 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,47 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 98.35910129547119} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 62, 113, ..., 24999916, + 24999964, 25000000]), + col_indices=tensor([ 13628, 17541, 24252, ..., 467551, 469636, + 477818]), + values=tensor([0.8374, 0.1433, 0.7046, ..., 0.7606, 0.4438, 0.1648]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5427, 0.9990, 0.7165, ..., 0.2818, 0.2990, 0.5329]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 98.35910129547119 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 62, 113, ..., 24999916, + 24999964, 25000000]), + col_indices=tensor([ 13628, 17541, 24252, ..., 467551, 469636, + 477818]), + values=tensor([0.8374, 0.1433, 0.7046, ..., 0.7606, 0.4438, 0.1648]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5427, 0.9990, 0.7165, ..., 0.2818, 0.2990, 0.5329]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 98.35910129547119 seconds + +[20.28, 20.32, 20.44, 20.32, 20.6, 20.64, 20.64, 20.52, 20.76, 20.8] +[20.68, 20.52, 20.6, 22.08, 24.8, 25.76, 28.52, 28.48, 30.56, 30.0, 29.28, 29.8, 28.92, 28.32, 28.32, 27.32, 26.84, 25.96, 25.16, 25.08, 24.72, 24.92, 24.96, 25.12, 25.32, 25.44, 25.32, 25.24, 25.2, 25.08, 25.12, 25.28, 25.16, 25.32, 25.2, 25.04, 25.04, 25.04, 25.28, 25.16, 25.28, 25.36, 25.36, 25.36, 25.44, 25.48, 25.28, 25.24, 25.12, 25.04, 25.04, 25.2, 25.16, 25.36, 25.24, 25.36, 25.12, 25.12, 25.12, 25.2, 25.32, 25.36, 25.16, 25.16, 25.2, 25.28, 25.4, 25.44, 25.48, 25.28, 25.24, 25.36, 25.36, 25.24, 25.36, 25.2, 25.24, 25.52, 25.52, 25.52, 25.52, 25.4, 25.08, 25.2, 25.24, 25.44, 25.12, 25.12, 24.92, 24.8, 24.8, 25.08, 25.12, 25.32, 25.28, 25.2, 25.04, 25.08, 25.16, 25.4, 25.44, 25.36, 25.36, 25.36, 25.28, 25.04, 25.16] +112.28389501571655 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.35910129547119, 'TIME_S_1KI': 983.5910129547119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2706.022798595428, 'W': 24.09983015121324} +[20.28, 20.32, 20.44, 20.32, 20.6, 20.64, 20.64, 20.52, 20.76, 20.8, 20.48, 20.64, 20.56, 20.44, 20.36, 20.4, 20.36, 20.52, 20.64, 20.68] +369.28 +18.464 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 98.35910129547119, 'TIME_S_1KI': 983.5910129547119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2706.022798595428, 'W': 24.09983015121324, 'J_1KI': 27060.22798595428, 'W_1KI': 240.9983015121324, 'W_D': 5.635830151213241, 'J_D': 632.8129610252375, 'W_D_1KI': 56.358301512132414, 'J_D_1KI': 563.5830151213241} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..36dce3a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078357696533203, "TIME_S_1KI": 100.78357696533203, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 355.02523105621333, "W": 24.261238712111865, "J_1KI": 3550.2523105621335, "W_1KI": 242.61238712111864, "W_D": 5.657238712111866, "J_D": 82.78482829093929, "W_D_1KI": 56.57238712111866, "J_D_1KI": 565.7238712111866} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..8996689 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,47 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.078357696533203} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 6, ..., 2499989, + 2499997, 2500000]), + col_indices=tensor([ 10944, 177257, 201447, ..., 125511, 168548, + 443200]), + values=tensor([0.0549, 0.4670, 0.3111, ..., 0.0129, 0.0661, 0.9327]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3951, 0.3409, 0.2222, ..., 0.4533, 0.5999, 0.5088]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.078357696533203 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 6, ..., 2499989, + 2499997, 2500000]), + col_indices=tensor([ 10944, 177257, 201447, ..., 125511, 168548, + 443200]), + values=tensor([0.0549, 0.4670, 0.3111, ..., 0.0129, 0.0661, 0.9327]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3951, 0.3409, 0.2222, ..., 0.4533, 0.5999, 0.5088]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.078357696533203 seconds + +[20.6, 20.6, 20.76, 20.88, 20.68, 20.64, 20.36, 20.76, 20.88, 20.96] +[21.0, 20.96, 24.12, 26.84, 26.84, 28.92, 30.04, 30.92, 26.56, 25.12, 24.8, 25.2, 25.28, 25.52] +14.633433818817139 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078357696533203, 'TIME_S_1KI': 100.78357696533203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 355.02523105621333, 'W': 24.261238712111865} +[20.6, 20.6, 20.76, 20.88, 20.68, 20.64, 20.36, 20.76, 20.88, 20.96, 20.52, 20.48, 20.56, 20.6, 20.6, 20.64, 20.64, 20.76, 20.8, 20.8] +372.08 +18.604 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.078357696533203, 'TIME_S_1KI': 100.78357696533203, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 355.02523105621333, 'W': 24.261238712111865, 'J_1KI': 3550.2523105621335, 'W_1KI': 242.61238712111864, 'W_D': 5.657238712111866, 'J_D': 82.78482829093929, 'W_D_1KI': 56.57238712111866, 'J_D_1KI': 565.7238712111866} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..741a72d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.092703104019165, "TIME_S_1KI": 500.92703104019165, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1421.3682743358613, "W": 24.212233919725165, "J_1KI": 14213.682743358613, "W_1KI": 242.12233919725165, "W_D": 5.561233919725165, "J_D": 326.46972955346126, "W_D_1KI": 55.61233919725165, "J_D_1KI": 556.1233919725165} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..2787c93 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,47 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 50.092703104019165} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 19, 45, ..., 12499947, + 12499973, 12500000]), + col_indices=tensor([ 17397, 55872, 132943, ..., 437400, 464141, + 486359]), + values=tensor([0.6537, 0.5151, 0.3039, ..., 0.7629, 0.2656, 0.5446]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4135, 0.5444, 0.9798, ..., 0.8106, 0.6562, 0.9974]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 50.092703104019165 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 19, 45, ..., 12499947, + 12499973, 12500000]), + col_indices=tensor([ 17397, 55872, 132943, ..., 437400, 464141, + 486359]), + values=tensor([0.6537, 0.5151, 0.3039, ..., 0.7629, 0.2656, 0.5446]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4135, 0.5444, 0.9798, ..., 0.8106, 0.6562, 0.9974]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 50.092703104019165 seconds + +[20.76, 20.64, 20.64, 20.64, 20.48, 20.64, 20.6, 20.8, 21.08, 20.96] +[20.92, 20.92, 23.96, 24.8, 26.64, 28.44, 28.44, 30.32, 28.64, 28.92, 27.6, 26.6, 25.88, 25.0, 25.04, 25.08, 25.08, 25.16, 25.16, 25.12, 25.08, 24.88, 25.08, 25.12, 25.2, 25.28, 25.36, 25.32, 25.32, 25.04, 25.04, 25.28, 25.16, 25.12, 25.24, 25.16, 25.16, 25.36, 25.24, 25.2, 25.28, 25.12, 24.96, 25.12, 25.2, 25.32, 25.36, 25.48, 25.56, 25.52, 25.52, 25.36, 25.4, 25.2, 25.28, 25.56] +58.704549074172974 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.092703104019165, 'TIME_S_1KI': 500.92703104019165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.3682743358613, 'W': 24.212233919725165} +[20.76, 20.64, 20.64, 20.64, 20.48, 20.64, 20.6, 20.8, 21.08, 20.96, 20.76, 20.4, 20.48, 20.92, 20.8, 20.92, 20.88, 20.72, 20.64, 21.0] +373.02 +18.651 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 50.092703104019165, 'TIME_S_1KI': 500.92703104019165, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1421.3682743358613, 'W': 24.212233919725165, 'J_1KI': 14213.682743358613, 'W_1KI': 242.12233919725165, 'W_D': 5.561233919725165, 'J_D': 326.46972955346126, 'W_D_1KI': 55.61233919725165, 'J_D_1KI': 556.1233919725165} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..8bb383c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1701, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.195863723754883, "TIME_S_1KI": 5.994040989861777, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 298.5689013671875, "W": 22.02886990511616, "J_1KI": 175.5255152070473, "W_1KI": 12.950540802537425, "W_D": 3.5278699051161624, "J_D": 47.81508294677735, "W_D_1KI": 2.0739975926608833, "J_D_1KI": 1.2192813595889966} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..7e66808 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.6172366142272949} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 11, ..., 249984, 249992, + 250000]), + col_indices=tensor([ 55, 33912, 7825, ..., 25553, 31300, 45367]), + values=tensor([0.2156, 0.3825, 0.1471, ..., 0.6075, 0.9514, 0.6641]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8972, 0.9948, 0.0628, ..., 0.4950, 0.5589, 0.8119]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.6172366142272949 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1701 -ss 50000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.195863723754883} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 249986, 249995, + 250000]), + col_indices=tensor([ 4095, 7631, 26458, ..., 36946, 37655, 49733]), + values=tensor([0.3588, 0.4994, 0.4557, ..., 0.6547, 0.8163, 0.6645]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0295, 0.0838, 0.1870, ..., 0.2542, 0.3969, 0.7673]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.195863723754883 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 249986, 249995, + 250000]), + col_indices=tensor([ 4095, 7631, 26458, ..., 36946, 37655, 49733]), + values=tensor([0.3588, 0.4994, 0.4557, ..., 0.6547, 0.8163, 0.6645]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0295, 0.0838, 0.1870, ..., 0.2542, 0.3969, 0.7673]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.195863723754883 seconds + +[20.6, 20.6, 20.68, 20.56, 20.68, 20.6, 20.6, 20.48, 20.36, 20.44] +[20.44, 20.16, 20.24, 22.04, 22.92, 24.8, 25.72, 25.84, 25.56, 24.68, 24.84, 25.0, 25.2] +13.55352783203125 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.195863723754883, 'TIME_S_1KI': 5.994040989861777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.5689013671875, 'W': 22.02886990511616} +[20.6, 20.6, 20.68, 20.56, 20.68, 20.6, 20.6, 20.48, 20.36, 20.44, 20.56, 20.52, 20.52, 20.44, 20.44, 20.44, 20.44, 20.56, 20.88, 20.84] +370.02 +18.500999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1701, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.195863723754883, 'TIME_S_1KI': 5.994040989861777, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.5689013671875, 'W': 22.02886990511616, 'J_1KI': 175.5255152070473, 'W_1KI': 12.950540802537425, 'W_D': 3.5278699051161624, 'J_D': 47.81508294677735, 'W_D_1KI': 2.0739975926608833, 'J_D_1KI': 1.2192813595889966} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..911c49b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 193, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.676417112350464, "TIME_S_1KI": 55.318223380054214, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 327.75970675468443, "W": 22.393652951640597, "J_1KI": 1698.2368225631317, "W_1KI": 116.02928990487356, "W_D": 4.002652951640599, "J_D": 58.58393717646598, "W_D_1KI": 20.73913446445906, "J_D_1KI": 107.45665525626457} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..7534e40 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.431778192520142} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 95, ..., 2499895, + 2499944, 2500000]), + col_indices=tensor([ 14, 1180, 1352, ..., 49220, 49912, 49936]), + values=tensor([0.8618, 0.4205, 0.6419, ..., 0.4989, 0.5508, 0.1652]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3965, 0.7585, 0.8670, ..., 0.1152, 0.9413, 0.0865]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 5.431778192520142 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 193 -ss 50000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.676417112350464} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 106, ..., 2499905, + 2499942, 2500000]), + col_indices=tensor([ 275, 2452, 2625, ..., 47289, 48937, 49987]), + values=tensor([0.8108, 0.0031, 0.6812, ..., 0.9899, 0.5982, 0.1156]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1893, 0.7932, 0.1409, ..., 0.0408, 0.4757, 0.3205]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.676417112350464 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 106, ..., 2499905, + 2499942, 2500000]), + col_indices=tensor([ 275, 2452, 2625, ..., 47289, 48937, 49987]), + values=tensor([0.8108, 0.0031, 0.6812, ..., 0.9899, 0.5982, 0.1156]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1893, 0.7932, 0.1409, ..., 0.0408, 0.4757, 0.3205]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.676417112350464 seconds + +[20.32, 20.4, 20.88, 20.88, 20.88, 20.88, 20.64, 20.24, 20.48, 20.52] +[20.48, 20.64, 20.88, 22.0, 23.72, 25.32, 26.2, 26.2, 26.28, 25.08, 24.6, 24.56, 24.36, 24.36] +14.636276960372925 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.676417112350464, 'TIME_S_1KI': 55.318223380054214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.75970675468443, 'W': 22.393652951640597} +[20.32, 20.4, 20.88, 20.88, 20.88, 20.88, 20.64, 20.24, 20.48, 20.52, 20.24, 20.24, 20.24, 20.4, 20.4, 20.2, 20.16, 20.08, 20.16, 20.24] +367.81999999999994 +18.391 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.676417112350464, 'TIME_S_1KI': 55.318223380054214, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 327.75970675468443, 'W': 22.393652951640597, 'J_1KI': 1698.2368225631317, 'W_1KI': 116.02928990487356, 'W_D': 4.002652951640599, 'J_D': 58.58393717646598, 'W_D_1KI': 20.73913446445906, 'J_D_1KI': 107.45665525626457} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..0d711cb --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.1586058139801, "TIME_S_1KI": 531.586058139801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1662.8207085037227, "W": 23.65320095688087, "J_1KI": 16628.20708503723, "W_1KI": 236.53200956880872, "W_D": 4.901200956880867, "J_D": 344.55456842803903, "W_D_1KI": 49.01200956880867, "J_D_1KI": 490.12009568808674} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..142816b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 53.1586058139801} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 489, 973, ..., 24998974, + 24999478, 25000000]), + col_indices=tensor([ 275, 454, 699, ..., 49715, 49729, 49796]), + values=tensor([0.3350, 0.9556, 0.9308, ..., 0.7756, 0.4208, 0.8843]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3743, 0.4258, 0.0327, ..., 0.7931, 0.5462, 0.4257]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 53.1586058139801 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 489, 973, ..., 24998974, + 24999478, 25000000]), + col_indices=tensor([ 275, 454, 699, ..., 49715, 49729, 49796]), + values=tensor([0.3350, 0.9556, 0.9308, ..., 0.7756, 0.4208, 0.8843]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3743, 0.4258, 0.0327, ..., 0.7931, 0.5462, 0.4257]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 53.1586058139801 seconds + +[20.44, 20.76, 21.08, 21.12, 21.36, 21.16, 21.04, 21.0, 20.96, 20.84] +[21.0, 20.84, 20.84, 23.72, 24.72, 26.8, 28.0, 29.48, 28.44, 28.04, 28.08, 27.92, 28.6, 27.68, 27.36, 26.52, 25.72, 24.68, 24.64, 24.72, 24.64, 24.64, 24.68, 24.68, 24.56, 24.76, 24.76, 24.64, 24.68, 24.64, 24.8, 24.64, 25.04, 24.96, 24.84, 24.88, 24.68, 24.68, 24.64, 24.8, 24.72, 24.72, 24.68, 24.56, 24.36, 24.36, 24.48, 24.4, 24.44, 24.4, 24.6, 24.56, 24.72, 24.96, 24.92, 24.84, 24.76, 24.72, 24.76, 24.8, 24.88, 24.92, 24.76, 24.68, 24.56, 24.64, 24.6] +70.30002880096436 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.1586058139801, 'TIME_S_1KI': 531.586058139801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1662.8207085037227, 'W': 23.65320095688087} +[20.44, 20.76, 21.08, 21.12, 21.36, 21.16, 21.04, 21.0, 20.96, 20.84, 20.6, 20.6, 20.72, 20.72, 20.68, 20.56, 20.76, 20.64, 20.6, 20.68] +375.04 +18.752000000000002 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 53.1586058139801, 'TIME_S_1KI': 531.586058139801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1662.8207085037227, 'W': 23.65320095688087, 'J_1KI': 16628.20708503723, 'W_1KI': 236.53200956880872, 'W_D': 4.901200956880867, 'J_D': 344.55456842803903, 'W_D_1KI': 49.01200956880867, 'J_D_1KI': 490.12009568808674} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..458d682 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 10429, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.529776096343994, "TIME_S_1KI": 1.0096630641810331, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 331.7018713665009, "W": 22.733124806095596, "J_1KI": 31.80572167671885, "W_1KI": 2.1797990992516634, "W_D": 4.373124806095593, "J_D": 63.8088117790222, "W_D_1KI": 0.41932350235838456, "J_D_1KI": 0.040207450604888735} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..f7f583d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1077582836151123} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), + col_indices=tensor([30956, 25020, 4290, ..., 1571, 5930, 34059]), + values=tensor([0.1925, 0.5429, 0.7430, ..., 0.0669, 0.5504, 0.8934]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9564, 0.4579, 0.9465, ..., 0.7236, 0.9546, 0.7676]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.1077582836151123 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 9744 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.809481859207153} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([47588, 45161, 40455, ..., 30522, 42036, 2005]), + values=tensor([0.2055, 0.2802, 0.2448, ..., 0.0926, 0.8451, 0.9361]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5605, 0.1853, 0.0043, ..., 0.6007, 0.1968, 0.9775]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.809481859207153 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10429 -ss 50000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.529776096343994} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), + col_indices=tensor([20158, 23859, 20874, ..., 41939, 15422, 41283]), + values=tensor([0.7225, 0.1851, 0.6655, ..., 0.6086, 0.8791, 0.8414]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1130, 0.7539, 0.9598, ..., 0.4914, 0.7455, 0.6539]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.529776096343994 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24998, 24999, 25000]), + col_indices=tensor([20158, 23859, 20874, ..., 41939, 15422, 41283]), + values=tensor([0.7225, 0.1851, 0.6655, ..., 0.6086, 0.8791, 0.8414]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1130, 0.7539, 0.9598, ..., 0.4914, 0.7455, 0.6539]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.529776096343994 seconds + +[20.44, 20.44, 20.32, 20.64, 20.6, 20.48, 20.6, 20.36, 20.32, 20.16] +[20.36, 20.16, 20.76, 22.32, 23.88, 23.88, 25.12, 26.36, 26.64, 26.44, 26.08, 26.2, 25.88, 25.48] +14.591125249862671 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.529776096343994, 'TIME_S_1KI': 1.0096630641810331, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.7018713665009, 'W': 22.733124806095596} +[20.44, 20.44, 20.32, 20.64, 20.6, 20.48, 20.6, 20.36, 20.32, 20.16, 20.56, 20.32, 20.36, 20.56, 20.36, 20.28, 20.44, 20.16, 20.16, 20.44] +367.20000000000005 +18.360000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 10429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.529776096343994, 'TIME_S_1KI': 1.0096630641810331, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 331.7018713665009, 'W': 22.733124806095596, 'J_1KI': 31.80572167671885, 'W_1KI': 2.1797990992516634, 'W_D': 4.373124806095593, 'J_D': 63.8088117790222, 'W_D_1KI': 0.41932350235838456, 'J_D_1KI': 0.040207450604888735} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..fea28ac --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 3217, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.183324813842773, "TIME_S_1KI": 3.1654724320307035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 313.5762239074707, "W": 23.142141761096784, "J_1KI": 97.47473543906457, "W_1KI": 7.193702754459679, "W_D": 4.7631417610967866, "J_D": 64.54061265373231, "W_D_1KI": 1.4806160276956128, "J_D_1KI": 0.46024744410805496} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..dbda68a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3263556957244873} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 124996, 124997, + 125000]), + col_indices=tensor([ 8999, 37078, 2648, ..., 24880, 43913, 47673]), + values=tensor([0.7939, 0.1706, 0.9831, ..., 0.2838, 0.4924, 0.0921]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.2834, 0.1318, 0.3567, ..., 0.3503, 0.0519, 0.6169]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.3263556957244873 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3217 -ss 50000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.183324813842773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 125000, 125000, + 125000]), + col_indices=tensor([ 9508, 26799, 1812, ..., 32912, 38580, 39384]), + values=tensor([0.1038, 0.2683, 0.7729, ..., 0.6337, 0.2232, 0.8870]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7924, 0.9644, 0.0933, ..., 0.2945, 0.3904, 0.9557]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.183324813842773 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 125000, 125000, + 125000]), + col_indices=tensor([ 9508, 26799, 1812, ..., 32912, 38580, 39384]), + values=tensor([0.1038, 0.2683, 0.7729, ..., 0.6337, 0.2232, 0.8870]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7924, 0.9644, 0.0933, ..., 0.2945, 0.3904, 0.9557]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.183324813842773 seconds + +[20.36, 20.32, 20.32, 20.32, 20.36, 20.28, 20.48, 20.52, 20.64, 20.52] +[20.52, 20.48, 21.56, 23.32, 25.2, 26.32, 27.16, 27.16, 26.96, 26.76, 25.76, 25.84, 25.52] +13.550008773803711 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.183324813842773, 'TIME_S_1KI': 3.1654724320307035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.5762239074707, 'W': 23.142141761096784} +[20.36, 20.32, 20.32, 20.32, 20.36, 20.28, 20.48, 20.52, 20.64, 20.52, 20.52, 20.6, 20.44, 20.6, 20.48, 20.32, 20.2, 20.36, 20.4, 20.48] +367.5799999999999 +18.378999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 3217, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.183324813842773, 'TIME_S_1KI': 3.1654724320307035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 313.5762239074707, 'W': 23.142141761096784, 'J_1KI': 97.47473543906457, 'W_1KI': 7.193702754459679, 'W_D': 4.7631417610967866, 'J_D': 64.54061265373231, 'W_D_1KI': 1.4806160276956128, 'J_D_1KI': 0.46024744410805496} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..769abe1 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 97993, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.781827449798584, "TIME_S_1KI": 0.1100265064831017, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.41705104827884, "W": 21.541217597275665, "J_1KI": 3.218771249459439, "W_1KI": 0.21982404454681115, "W_D": 2.808217597275668, "J_D": 41.119296494484, "W_D_1KI": 0.02865732855689353, "J_D_1KI": 0.00029244260872606745} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..bb7f54b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0191800594329834} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([3313, 1621, 3812, ..., 4525, 1664, 4698]), + values=tensor([0.0941, 0.2796, 0.9707, ..., 0.4661, 0.7642, 0.2416]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5336, 0.9402, 0.6361, ..., 0.0126, 0.4753, 0.7232]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.0191800594329834 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 54744 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.86583399772644} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([ 440, 3019, 2397, ..., 2648, 4224, 1471]), + values=tensor([0.9686, 0.9548, 0.6770, ..., 0.0683, 0.1247, 0.7029]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3648, 0.8360, 0.9424, ..., 0.5773, 0.5768, 0.8650]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 5.86583399772644 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 97993 -ss 5000 -sd 0.0001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.781827449798584} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2498, 2500, 2500]), + col_indices=tensor([3713, 3378, 4473, ..., 4286, 2104, 3764]), + values=tensor([0.2566, 0.6316, 0.0221, ..., 0.9864, 0.6559, 0.8912]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3213, 0.3541, 0.7168, ..., 0.5598, 0.7087, 0.6560]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.781827449798584 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2498, 2500, 2500]), + col_indices=tensor([3713, 3378, 4473, ..., 4286, 2104, 3764]), + values=tensor([0.2566, 0.6316, 0.0221, ..., 0.9864, 0.6559, 0.8912]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3213, 0.3541, 0.7168, ..., 0.5598, 0.7087, 0.6560]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.781827449798584 seconds + +[20.44, 20.48, 20.88, 20.96, 20.96, 20.92, 20.72, 20.72, 20.56, 20.48] +[20.44, 20.32, 20.6, 21.56, 23.32, 24.04, 24.72, 24.96, 24.56, 23.56, 23.68, 23.44, 23.52, 23.56] +14.642489433288574 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 97993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.781827449798584, 'TIME_S_1KI': 0.1100265064831017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.41705104827884, 'W': 21.541217597275665} +[20.44, 20.48, 20.88, 20.96, 20.96, 20.92, 20.72, 20.72, 20.56, 20.48, 20.36, 20.44, 20.44, 20.32, 20.4, 20.68, 21.08, 21.76, 21.76, 21.88] +374.65999999999997 +18.732999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 97993, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.781827449798584, 'TIME_S_1KI': 0.1100265064831017, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.41705104827884, 'W': 21.541217597275665, 'J_1KI': 3.218771249459439, 'W_1KI': 0.21982404454681115, 'W_D': 2.808217597275668, 'J_D': 41.119296494484, 'W_D_1KI': 0.02865732855689353, 'J_D_1KI': 0.00029244260872606745} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..1de7362 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 17801, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.451899290084839, "TIME_S_1KI": 0.5871523672875029, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 335.3643196678161, "W": 22.918247866906853, "J_1KI": 18.839633709781257, "W_1KI": 1.2874696852371694, "W_D": 4.382247866906855, "J_D": 64.12573871421813, "W_D_1KI": 0.24617987005824704, "J_D_1KI": 0.013829552837382564} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..6e9f06a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06674790382385254} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 24994, 24998, 25000]), + col_indices=tensor([ 153, 1166, 1591, ..., 4476, 1654, 3013]), + values=tensor([0.9133, 0.8479, 0.0929, ..., 0.2328, 0.6185, 0.0308]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9627, 0.4329, 0.3045, ..., 0.2813, 0.7730, 0.0924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.06674790382385254 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15730 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.27815866470337} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 5, ..., 24988, 24995, 25000]), + col_indices=tensor([1294, 410, 634, ..., 1096, 2182, 3875]), + values=tensor([0.7576, 0.8466, 0.6529, ..., 0.8373, 0.3120, 0.9707]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6367, 0.3440, 0.8123, ..., 0.8035, 0.7344, 0.3858]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.27815866470337 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17801 -ss 5000 -sd 0.001 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.451899290084839} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 24991, 24996, 25000]), + col_indices=tensor([ 139, 2091, 2694, ..., 3635, 3692, 4401]), + values=tensor([0.7198, 0.5125, 0.0166, ..., 0.6335, 0.1279, 0.4059]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0330, 0.5472, 0.9005, ..., 0.3693, 0.0673, 0.4597]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.451899290084839 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 24991, 24996, 25000]), + col_indices=tensor([ 139, 2091, 2694, ..., 3635, 3692, 4401]), + values=tensor([0.7198, 0.5125, 0.0166, ..., 0.6335, 0.1279, 0.4059]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0330, 0.5472, 0.9005, ..., 0.3693, 0.0673, 0.4597]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.451899290084839 seconds + +[20.28, 20.36, 20.4, 20.6, 20.68, 20.64, 20.52, 20.48, 20.32, 20.24] +[20.4, 20.48, 23.8, 23.8, 25.36, 27.2, 28.0, 28.44, 25.2, 23.88, 23.72, 23.92, 24.08, 24.08] +14.63306975364685 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.451899290084839, 'TIME_S_1KI': 0.5871523672875029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.3643196678161, 'W': 22.918247866906853} +[20.28, 20.36, 20.4, 20.6, 20.68, 20.64, 20.52, 20.48, 20.32, 20.24, 20.44, 20.64, 20.96, 20.68, 20.56, 20.68, 20.72, 20.76, 20.76, 20.96] +370.71999999999997 +18.535999999999998 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 17801, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.451899290084839, 'TIME_S_1KI': 0.5871523672875029, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 335.3643196678161, 'W': 22.918247866906853, 'J_1KI': 18.839633709781257, 'W_1KI': 1.2874696852371694, 'W_D': 4.382247866906855, 'J_D': 64.12573871421813, 'W_D_1KI': 0.24617987005824704, 'J_D_1KI': 0.013829552837382564} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..d1eb1c9 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 1927, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.278133630752563, "TIME_S_1KI": 5.333748640764174, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 315.9295339012146, "W": 21.68469080536072, "J_1KI": 163.94890186881918, "W_1KI": 11.253082929611168, "W_D": 3.326690805360723, "J_D": 48.46736737012868, "W_D_1KI": 1.7263574495904115, "J_D_1KI": 0.8958782820915472} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..9788e34 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.5448694229125977} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 104, ..., 249896, 249951, + 250000]), + col_indices=tensor([ 128, 142, 245, ..., 4657, 4734, 4838]), + values=tensor([0.1820, 0.1438, 0.1562, ..., 0.6881, 0.0081, 0.4382]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7604, 0.0772, 0.6951, ..., 0.4926, 0.6864, 0.3702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.5448694229125977 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1927 -ss 5000 -sd 0.01 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.278133630752563} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 109, ..., 249898, 249957, + 250000]), + col_indices=tensor([ 25, 140, 158, ..., 4486, 4823, 4835]), + values=tensor([0.8176, 0.1521, 0.6094, ..., 0.2740, 0.3181, 0.5161]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4533, 0.4508, 0.3256, ..., 0.6556, 0.1742, 0.9221]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.278133630752563 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 109, ..., 249898, 249957, + 250000]), + col_indices=tensor([ 25, 140, 158, ..., 4486, 4823, 4835]), + values=tensor([0.8176, 0.1521, 0.6094, ..., 0.2740, 0.3181, 0.5161]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4533, 0.4508, 0.3256, ..., 0.6556, 0.1742, 0.9221]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.278133630752563 seconds + +[20.28, 20.32, 20.56, 20.6, 20.48, 20.56, 20.76, 20.68, 20.88, 20.88] +[20.72, 20.48, 20.56, 21.28, 22.24, 24.0, 24.68, 25.2, 25.04, 23.88, 23.88, 24.04, 24.2, 24.4] +14.56924319267273 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.278133630752563, 'TIME_S_1KI': 5.333748640764174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.9295339012146, 'W': 21.68469080536072} +[20.28, 20.32, 20.56, 20.6, 20.48, 20.56, 20.76, 20.68, 20.88, 20.88, 20.16, 20.08, 20.16, 20.16, 20.24, 20.36, 20.08, 20.2, 20.28, 20.2] +367.15999999999997 +18.357999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 1927, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.278133630752563, 'TIME_S_1KI': 5.333748640764174, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 315.9295339012146, 'W': 21.68469080536072, 'J_1KI': 163.94890186881918, 'W_1KI': 11.253082929611168, 'W_D': 3.326690805360723, 'J_D': 48.46736737012868, 'W_D_1KI': 1.7263574495904115, 'J_D_1KI': 0.8958782820915472} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..942bb7d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 393, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.605815887451172, "TIME_S_1KI": 26.986808873921557, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 343.57593024253845, "W": 23.547531420592268, "J_1KI": 874.239008250734, "W_1KI": 59.917382749598644, "W_D": 5.065531420592269, "J_D": 73.90985657548903, "W_D_1KI": 12.889392927715695, "J_D_1KI": 32.79743747510355} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..5869b7e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.6673474311828613} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 248, 507, ..., 1249488, + 1249771, 1250000]), + col_indices=tensor([ 0, 22, 35, ..., 4958, 4983, 4999]), + values=tensor([0.4233, 0.1325, 0.2059, ..., 0.9744, 0.8399, 0.1366]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6304, 0.5951, 0.1863, ..., 0.0552, 0.3796, 0.7701]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 2.6673474311828613 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 393 -ss 5000 -sd 0.05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.605815887451172} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 284, 548, ..., 1249494, + 1249762, 1250000]), + col_indices=tensor([ 9, 27, 28, ..., 4894, 4914, 4954]), + values=tensor([0.8223, 0.3728, 0.3102, ..., 0.8633, 0.4361, 0.2072]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4176, 0.5149, 0.4165, ..., 0.2240, 0.9505, 0.5242]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.605815887451172 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 284, 548, ..., 1249494, + 1249762, 1250000]), + col_indices=tensor([ 9, 27, 28, ..., 4894, 4914, 4954]), + values=tensor([0.8223, 0.3728, 0.3102, ..., 0.8633, 0.4361, 0.2072]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4176, 0.5149, 0.4165, ..., 0.2240, 0.9505, 0.5242]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.605815887451172 seconds + +[20.32, 20.32, 20.44, 20.4, 20.52, 20.52, 20.64, 20.8, 20.92, 20.88] +[20.92, 21.04, 24.24, 26.28, 27.72, 27.72, 28.36, 29.08, 25.2, 24.24, 24.28, 24.28, 24.28, 24.2] +14.59074091911316 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.605815887451172, 'TIME_S_1KI': 26.986808873921557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.57593024253845, 'W': 23.547531420592268} +[20.32, 20.32, 20.44, 20.4, 20.52, 20.52, 20.64, 20.8, 20.92, 20.88, 20.24, 20.28, 20.52, 20.64, 20.6, 20.56, 20.52, 20.48, 20.48, 20.56] +369.64 +18.482 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.605815887451172, 'TIME_S_1KI': 26.986808873921557, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.57593024253845, 'W': 23.547531420592268, 'J_1KI': 874.239008250734, 'W_1KI': 59.917382749598644, 'W_D': 5.065531420592269, 'J_D': 73.90985657548903, 'W_D_1KI': 12.889392927715695, 'J_D_1KI': 32.79743747510355} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..5939696 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 194, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.912250995635986, "TIME_S_1KI": 56.248716472350445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 326.6964877033234, "W": 22.305376949262342, "J_1KI": 1684.0025139346567, "W_1KI": 114.97616984155846, "W_D": 3.7133769492623436, "J_D": 54.38810604286199, "W_D_1KI": 19.14111829516672, "J_D_1KI": 98.66555822250888} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..469c726 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.392450332641602} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 508, 1024, ..., 2499064, + 2499534, 2500000]), + col_indices=tensor([ 1, 4, 10, ..., 4973, 4986, 4993]), + values=tensor([0.4448, 0.2935, 0.6096, ..., 0.6772, 0.8304, 0.1969]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5801, 0.6662, 0.3258, ..., 0.9572, 0.7518, 0.3845]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 5.392450332641602 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 194 -ss 5000 -sd 0.1 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.912250995635986} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 510, 1021, ..., 2499033, + 2499527, 2500000]), + col_indices=tensor([ 27, 33, 84, ..., 4958, 4963, 4982]), + values=tensor([0.5404, 0.4129, 0.3312, ..., 0.4218, 0.5770, 0.4495]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8518, 0.9058, 0.3829, ..., 0.5160, 0.0011, 0.3108]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.912250995635986 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 510, 1021, ..., 2499033, + 2499527, 2500000]), + col_indices=tensor([ 27, 33, 84, ..., 4958, 4963, 4982]), + values=tensor([0.5404, 0.4129, 0.3312, ..., 0.4218, 0.5770, 0.4495]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8518, 0.9058, 0.3829, ..., 0.5160, 0.0011, 0.3108]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.912250995635986 seconds + +[20.28, 20.36, 20.6, 20.76, 20.96, 20.8, 20.76, 20.64, 20.48, 20.76] +[20.88, 20.88, 20.64, 21.88, 22.92, 24.88, 26.08, 26.2, 25.72, 24.96, 24.48, 24.56, 24.76, 24.68] +14.646535158157349 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.912250995635986, 'TIME_S_1KI': 56.248716472350445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.6964877033234, 'W': 22.305376949262342} +[20.28, 20.36, 20.6, 20.76, 20.96, 20.8, 20.76, 20.64, 20.48, 20.76, 20.68, 20.08, 20.2, 20.52, 20.52, 20.72, 20.72, 21.32, 21.0, 21.08] +371.84 +18.592 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 194, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.912250995635986, 'TIME_S_1KI': 56.248716472350445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 326.6964877033234, 'W': 22.305376949262342, 'J_1KI': 1684.0025139346567, 'W_1KI': 114.97616984155846, 'W_D': 3.7133769492623436, 'J_D': 54.38810604286199, 'W_D_1KI': 19.14111829516672, 'J_D_1KI': 98.66555822250888} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..8d5c96a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.527036905288696, "TIME_S_1KI": 105.27036905288696, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 363.19735393524167, "W": 23.113061201495835, "J_1KI": 3631.973539352417, "W_1KI": 231.13061201495836, "W_D": 4.666061201495836, "J_D": 73.32222533869742, "W_D_1KI": 46.66061201495836, "J_D_1KI": 466.6061201495836} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..9300b4b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.527036905288696} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 996, 2006, ..., 4997968, + 4998974, 5000000]), + col_indices=tensor([ 4, 8, 12, ..., 4976, 4983, 4993]), + values=tensor([0.4991, 0.7024, 0.1537, ..., 0.4726, 0.2476, 0.0939]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1571, 0.7792, 0.7385, ..., 0.2151, 0.4821, 0.5033]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.527036905288696 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 996, 2006, ..., 4997968, + 4998974, 5000000]), + col_indices=tensor([ 4, 8, 12, ..., 4976, 4983, 4993]), + values=tensor([0.4991, 0.7024, 0.1537, ..., 0.4726, 0.2476, 0.0939]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1571, 0.7792, 0.7385, ..., 0.2151, 0.4821, 0.5033]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.527036905288696 seconds + +[20.48, 20.44, 20.48, 20.64, 20.72, 20.72, 20.68, 20.72, 20.68, 20.6] +[20.4, 20.48, 21.0, 23.96, 25.48, 27.4, 28.52, 27.56, 26.24, 25.04, 24.48, 24.4, 24.4, 24.32, 24.48] +15.71394419670105 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.527036905288696, 'TIME_S_1KI': 105.27036905288696, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 363.19735393524167, 'W': 23.113061201495835} +[20.48, 20.44, 20.48, 20.64, 20.72, 20.72, 20.68, 20.72, 20.68, 20.6, 20.4, 20.16, 20.32, 20.32, 20.16, 20.56, 20.48, 20.52, 20.48, 20.24] +368.94 +18.447 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.527036905288696, 'TIME_S_1KI': 105.27036905288696, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 363.19735393524167, 'W': 23.113061201495835, 'J_1KI': 3631.973539352417, 'W_1KI': 231.13061201495836, 'W_D': 4.666061201495836, 'J_D': 73.32222533869742, 'W_D_1KI': 46.66061201495836, 'J_D_1KI': 466.6061201495836} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..fef5f93 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.927062749862671, "TIME_S_1KI": 159.2706274986267, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 517.2976983642578, "W": 23.521442010494468, "J_1KI": 5172.9769836425785, "W_1KI": 235.21442010494468, "W_D": 5.048442010494465, "J_D": 111.02837280082699, "W_D_1KI": 50.484420104944654, "J_D_1KI": 504.8442010494465} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..eaea98f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 15.927062749862671} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1527, 3024, ..., 7496971, + 7498460, 7500000]), + col_indices=tensor([ 0, 3, 4, ..., 4985, 4992, 4996]), + values=tensor([0.7552, 0.2419, 0.2481, ..., 0.7383, 0.7786, 0.4470]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.6238, 0.3406, 0.3665, ..., 0.0040, 0.2464, 0.8126]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 15.927062749862671 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1527, 3024, ..., 7496971, + 7498460, 7500000]), + col_indices=tensor([ 0, 3, 4, ..., 4985, 4992, 4996]), + values=tensor([0.7552, 0.2419, 0.2481, ..., 0.7383, 0.7786, 0.4470]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.6238, 0.3406, 0.3665, ..., 0.0040, 0.2464, 0.8126]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 15.927062749862671 seconds + +[20.4, 20.32, 20.32, 20.4, 20.48, 20.52, 20.8, 20.76, 20.76, 20.64] +[20.8, 20.8, 21.12, 22.88, 23.72, 26.16, 27.92, 27.96, 27.28, 26.72, 25.24, 24.32, 24.24, 24.16, 24.2, 24.68, 24.8, 24.68, 24.68, 24.56, 24.64] +21.99260139465332 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.927062749862671, 'TIME_S_1KI': 159.2706274986267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 517.2976983642578, 'W': 23.521442010494468} +[20.4, 20.32, 20.32, 20.4, 20.48, 20.52, 20.8, 20.76, 20.76, 20.64, 20.56, 20.16, 20.24, 20.6, 20.64, 20.76, 20.8, 20.6, 20.28, 20.44] +369.46000000000004 +18.473000000000003 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 15.927062749862671, 'TIME_S_1KI': 159.2706274986267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 517.2976983642578, 'W': 23.521442010494468, 'J_1KI': 5172.9769836425785, 'W_1KI': 235.21442010494468, 'W_D': 5.048442010494465, 'J_D': 111.02837280082699, 'W_D_1KI': 50.484420104944654, 'J_D_1KI': 504.8442010494465} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..d2af780 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.84249472618103, "TIME_S_1KI": 228.4249472618103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 682.8522075366976, "W": 24.128078827131084, "J_1KI": 6828.522075366976, "W_1KI": 241.28078827131083, "W_D": 3.4220788271310845, "J_D": 96.84874200773261, "W_D_1KI": 34.220788271310845, "J_D_1KI": 342.2078827131084} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..c75f057 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.84249472618103} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2006, 4067, ..., 9995955, + 9997971, 10000000]), + col_indices=tensor([ 1, 3, 4, ..., 4995, 4998, 4999]), + values=tensor([0.5438, 0.4529, 0.4674, ..., 0.4313, 0.1734, 0.8643]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4316, 0.5719, 0.8319, ..., 0.7407, 0.2442, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 22.84249472618103 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2006, 4067, ..., 9995955, + 9997971, 10000000]), + col_indices=tensor([ 1, 3, 4, ..., 4995, 4998, 4999]), + values=tensor([0.5438, 0.4529, 0.4674, ..., 0.4313, 0.1734, 0.8643]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4316, 0.5719, 0.8319, ..., 0.7407, 0.2442, 0.5797]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 22.84249472618103 seconds + +[26.88, 26.16, 25.48, 25.52, 24.68, 24.6, 24.36, 24.16, 24.16, 24.28] +[24.4, 24.52, 24.52, 28.04, 29.2, 30.8, 31.68, 30.2, 29.88, 28.12, 26.84, 25.72, 24.28, 24.16, 24.04, 24.12, 24.24, 24.24, 24.4, 24.44, 24.48, 24.52, 24.68, 24.48, 24.68, 24.6, 24.68] +28.301142930984497 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.84249472618103, 'TIME_S_1KI': 228.4249472618103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 682.8522075366976, 'W': 24.128078827131084} +[26.88, 26.16, 25.48, 25.52, 24.68, 24.6, 24.36, 24.16, 24.16, 24.28, 20.44, 20.64, 20.68, 20.8, 20.92, 21.32, 21.36, 21.44, 21.44, 21.2] +414.12 +20.706 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.84249472618103, 'TIME_S_1KI': 228.4249472618103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 682.8522075366976, 'W': 24.128078827131084, 'J_1KI': 6828.522075366976, 'W_1KI': 241.28078827131083, 'W_D': 3.4220788271310845, 'J_D': 96.84874200773261, 'W_D_1KI': 34.220788271310845, 'J_D_1KI': 342.2078827131084} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..1c5a213 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.49390745162964, "TIME_S_1KI": 264.9390745162964, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 833.4256306743622, "W": 24.07917164557845, "J_1KI": 8334.256306743622, "W_1KI": 240.79171645578452, "W_D": 5.603171645578453, "J_D": 193.9363584108353, "W_D_1KI": 56.031716455784526, "J_D_1KI": 560.3171645578453} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..99ea343 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.49390745162964} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2475, 4991, ..., 12495070, + 12497536, 12500000]), + col_indices=tensor([ 3, 6, 7, ..., 4992, 4996, 4999]), + values=tensor([0.7861, 0.1444, 0.2009, ..., 0.5207, 0.8919, 0.5019]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4801, 0.0235, 0.0420, ..., 0.5930, 0.2408, 0.0610]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.49390745162964 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2475, 4991, ..., 12495070, + 12497536, 12500000]), + col_indices=tensor([ 3, 6, 7, ..., 4992, 4996, 4999]), + values=tensor([0.7861, 0.1444, 0.2009, ..., 0.5207, 0.8919, 0.5019]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4801, 0.0235, 0.0420, ..., 0.5930, 0.2408, 0.0610]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.49390745162964 seconds + +[20.72, 20.56, 20.56, 20.36, 20.36, 20.36, 20.28, 20.56, 20.72, 21.08] +[21.48, 21.52, 24.72, 26.76, 28.04, 29.8, 31.48, 31.48, 29.76, 28.44, 27.2, 25.96, 25.12, 24.52, 24.64, 24.64, 24.72, 24.52, 24.56, 24.68, 24.68, 24.6, 24.52, 24.2, 24.32, 24.24, 24.16, 24.4, 24.44, 24.44, 24.44, 24.6, 24.36] +34.611889600753784 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.49390745162964, 'TIME_S_1KI': 264.9390745162964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4256306743622, 'W': 24.07917164557845} +[20.72, 20.56, 20.56, 20.36, 20.36, 20.36, 20.28, 20.56, 20.72, 21.08, 20.64, 20.64, 20.36, 20.36, 20.56, 20.64, 20.68, 20.56, 20.56, 20.36] +369.52 +18.476 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.49390745162964, 'TIME_S_1KI': 264.9390745162964, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.4256306743622, 'W': 24.07917164557845, 'J_1KI': 8334.256306743622, 'W_1KI': 240.79171645578452, 'W_D': 5.603171645578453, 'J_D': 193.9363584108353, 'W_D_1KI': 56.031716455784526, 'J_D_1KI': 560.3171645578453} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..732c4d4 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 289937, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.948570489883423, "TIME_S_1KI": 0.037761894790535266, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 337.0917576313019, "W": 22.992700250818316, "J_1KI": 1.1626379442130597, "W_1KI": 0.07930240104166876, "W_D": 4.582700250818316, "J_D": 67.18612713575365, "W_D_1KI": 0.015805848342289243, "J_D_1KI": 5.4514768181671336e-05} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..90b44fb --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012730121612548828} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 601, 2647, 820, 2703, 4832, 2905, 4036, 996, 2456, + 2955, 1267, 3283, 2251, 4368, 2032, 1143, 1874, 4481, + 3312, 337, 3271, 2673, 2707, 912, 1355, 576, 4171, + 1338, 4509, 2894, 3669, 436, 629, 3354, 3800, 1938, + 3841, 3356, 3452, 2739, 639, 4726, 1185, 2215, 4780, + 2365, 813, 3529, 3133, 2027, 1430, 1360, 1560, 4504, + 4891, 990, 715, 1174, 4133, 1335, 2115, 2803, 644, + 1222, 359, 4157, 1393, 961, 2251, 4773, 836, 2710, + 702, 1699, 2540, 3245, 4452, 1916, 3004, 2947, 4627, + 2897, 3573, 4136, 3724, 2543, 4225, 4206, 1697, 2467, + 4746, 2725, 1632, 1936, 4003, 4667, 837, 3403, 2009, + 3106, 4241, 1632, 143, 2900, 2184, 2389, 134, 1255, + 455, 3988, 4547, 3113, 3784, 246, 1055, 1579, 2608, + 1441, 4808, 3117, 2320, 2723, 2732, 1022, 4658, 752, + 2092, 3461, 403, 1092, 2475, 1500, 4745, 1977, 2013, + 1043, 1615, 2577, 280, 4124, 4918, 3583, 3155, 4834, + 2410, 1454, 4710, 2436, 3776, 3032, 3163, 332, 2083, + 3800, 348, 742, 2269, 4249, 2249, 388, 3899, 1000, + 3276, 3790, 482, 1626, 3791, 386, 1380, 385, 518, + 3261, 3414, 3411, 1506, 372, 2674, 2798, 1667, 3725, + 190, 3734, 1581, 3514, 2910, 3386, 1246, 4529, 2936, + 3830, 2148, 4608, 2142, 767, 3320, 1467, 118, 736, + 1289, 3485, 482, 1713, 920, 523, 2549, 1394, 1179, + 1453, 3629, 477, 885, 4060, 1379, 4354, 1610, 3955, + 4389, 2465, 337, 97, 1261, 1276, 880, 2430, 2803, + 1291, 2721, 585, 2387, 4856, 993, 177, 4024, 1337, + 4378, 435, 408, 1205, 4, 2496, 4066, 296, 288, + 2154, 1297, 3984, 4892, 1675, 3223, 2466]), + values=tensor([0.2688, 0.3138, 0.1347, 0.8899, 0.0694, 0.1416, 0.2868, + 0.2019, 0.8985, 0.0861, 0.2909, 0.4503, 0.7663, 0.8882, + 0.6672, 0.1346, 0.9398, 0.2159, 0.4799, 0.2790, 0.3866, + 0.2729, 0.6835, 0.4176, 0.9415, 0.7950, 0.5659, 0.4247, + 0.4627, 0.0016, 0.2802, 0.5691, 0.4545, 0.9589, 0.5833, + 0.2407, 0.8459, 0.0609, 0.7229, 0.1587, 0.3799, 0.2604, + 0.0299, 0.6751, 0.8528, 0.9681, 0.0567, 0.8653, 0.8227, + 0.8273, 0.6799, 0.0354, 0.3989, 0.1205, 0.6402, 0.1199, + 0.3054, 0.1464, 0.1989, 0.0387, 0.3720, 0.5942, 0.7253, + 0.7730, 0.9054, 0.0855, 0.5753, 0.3128, 0.7859, 0.4565, + 0.8518, 0.4282, 0.9370, 0.5476, 0.3415, 0.1584, 0.4788, + 0.2685, 0.1433, 0.3934, 0.1639, 0.1743, 0.9037, 0.0304, + 0.0289, 0.0705, 0.5423, 0.6257, 0.8142, 0.7578, 0.3516, + 0.0327, 0.0056, 0.9367, 0.3464, 0.2720, 0.5506, 0.6244, + 0.9778, 0.9403, 0.2695, 0.1008, 0.2814, 0.4022, 0.0750, + 0.8589, 0.5073, 0.2768, 0.4090, 0.0915, 0.6257, 0.7999, + 0.6904, 0.8703, 0.1142, 0.1298, 0.5992, 0.4302, 0.0539, + 0.7905, 0.9381, 0.9895, 0.8549, 0.3053, 0.2672, 0.8126, + 0.5440, 0.4082, 0.4559, 0.5422, 0.5229, 0.1070, 0.6759, + 0.9088, 0.1120, 0.0848, 0.1772, 0.0720, 0.8290, 0.5142, + 0.0465, 0.4337, 0.4749, 0.7655, 0.4175, 0.8665, 0.2208, + 0.6756, 0.9278, 0.6977, 0.7861, 0.8161, 0.4230, 0.8317, + 0.1717, 0.0383, 0.6928, 0.6239, 0.7675, 0.9077, 0.2548, + 0.9243, 0.7360, 0.7612, 0.0838, 0.0670, 0.8799, 0.8976, + 0.6367, 0.4875, 0.4382, 0.0454, 0.5556, 0.5205, 0.8555, + 0.8390, 0.6880, 0.8890, 0.5970, 0.9613, 0.7713, 0.4355, + 0.0933, 0.7601, 0.9627, 0.3532, 0.8675, 0.4814, 0.2521, + 0.6473, 0.8370, 0.9626, 0.0085, 0.0901, 0.8755, 0.5072, + 0.9504, 0.7596, 0.2658, 0.8293, 0.6634, 0.4401, 0.0682, + 0.6406, 0.9649, 0.2363, 0.8410, 0.6169, 0.9731, 0.1306, + 0.2698, 0.6020, 0.0496, 0.3126, 0.8880, 0.7892, 0.7667, + 0.6466, 0.0659, 0.7587, 0.7496, 0.6160, 0.2212, 0.0833, + 0.9146, 0.0286, 0.3379, 0.5728, 0.8427, 0.7370, 0.7738, + 0.6182, 0.3534, 0.1226, 0.0015, 0.7059, 0.3466, 0.3941, + 0.7962, 0.2804, 0.4929, 0.7827, 0.0766, 0.2294, 0.8494, + 0.9943, 0.0815, 0.8720, 0.8261, 0.8846]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4430, 0.7360, 0.8513, ..., 0.2058, 0.5954, 0.5363]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.012730121612548828 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 82481 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.9870200157165527} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1850, 2920, 982, 2920, 4207, 4970, 436, 2573, 4448, + 4877, 4672, 2082, 4283, 4267, 2430, 1672, 1924, 2492, + 2408, 375, 2305, 3583, 3031, 1418, 2140, 1197, 1752, + 3076, 720, 312, 2699, 3140, 1513, 401, 3552, 13, + 806, 4950, 885, 1841, 3287, 275, 2117, 560, 18, + 1477, 2688, 1794, 1927, 4953, 3645, 3622, 4539, 3985, + 3945, 2233, 4167, 2147, 1082, 1206, 1252, 1066, 1078, + 321, 808, 1999, 635, 1039, 3245, 1630, 1008, 4182, + 1408, 2667, 193, 3182, 772, 1491, 608, 1914, 2811, + 1620, 3712, 1794, 3637, 3266, 214, 1284, 4462, 4923, + 2463, 2700, 3904, 4098, 3900, 3027, 4116, 3580, 332, + 4692, 353, 653, 1766, 2037, 590, 2523, 136, 4354, + 4126, 2451, 226, 2678, 2130, 4889, 2592, 1660, 3477, + 4537, 2214, 1017, 1706, 4859, 3869, 167, 4374, 4799, + 1507, 1665, 4935, 3083, 2008, 4174, 859, 984, 2840, + 863, 82, 79, 4175, 3907, 4158, 4533, 3431, 2281, + 2787, 3034, 4208, 2453, 2306, 4607, 1125, 4475, 945, + 2063, 2439, 3548, 3678, 3034, 1770, 1726, 3619, 4461, + 647, 1318, 4963, 2847, 798, 1118, 1938, 2152, 3584, + 162, 4914, 1173, 3329, 4768, 2504, 2250, 4542, 1812, + 1545, 228, 2, 4710, 208, 2941, 770, 2538, 3754, + 4746, 2065, 1748, 2708, 2153, 2280, 3564, 3494, 4959, + 1719, 167, 854, 1084, 801, 1756, 2522, 2179, 3363, + 4832, 639, 1945, 2876, 4790, 630, 3306, 2308, 3577, + 253, 3942, 331, 3878, 976, 3355, 242, 1100, 869, + 105, 4517, 3895, 3065, 4030, 177, 4481, 2908, 861, + 1478, 4165, 3720, 1447, 2081, 2042, 4335, 1110, 2494, + 4959, 3445, 552, 1961, 1713, 2677, 2157]), + values=tensor([0.2973, 0.0528, 0.0593, 0.4373, 0.3149, 0.1720, 0.8405, + 0.5692, 0.8566, 0.6937, 0.9133, 0.4677, 0.7516, 0.1147, + 0.9773, 0.0717, 0.0053, 0.2351, 0.3045, 0.8381, 0.0276, + 0.1683, 0.6574, 0.1413, 0.3680, 0.2281, 0.6440, 0.9364, + 0.7063, 0.6312, 0.4354, 0.4765, 0.5047, 0.5079, 0.1300, + 0.6462, 0.5531, 0.3126, 0.0835, 0.2181, 0.0832, 0.6136, + 0.2259, 0.2091, 0.0668, 0.3926, 0.4321, 0.7371, 0.0309, + 0.5039, 0.4193, 0.4354, 0.3007, 0.2701, 0.1422, 0.9665, + 0.4389, 0.2429, 0.7584, 0.6797, 0.5891, 0.3173, 0.8048, + 0.5910, 0.6072, 0.6009, 0.0564, 0.6818, 0.0189, 0.7587, + 0.4355, 0.8685, 0.7348, 0.6709, 0.7401, 0.8320, 0.4287, + 0.5419, 0.6982, 0.3521, 0.5136, 0.1073, 0.2471, 0.8444, + 0.2369, 0.0792, 0.5748, 0.6149, 0.9268, 0.1438, 0.9218, + 0.2189, 0.9680, 0.3579, 0.6778, 0.9284, 0.9712, 0.8475, + 0.6595, 0.8451, 0.9596, 0.0291, 0.2904, 0.4624, 0.9348, + 0.5743, 0.6942, 0.7654, 0.0460, 0.6704, 0.4112, 0.0182, + 0.7891, 0.1191, 0.0775, 0.8674, 0.6379, 0.6054, 0.8989, + 0.6635, 0.7675, 0.4663, 0.1353, 0.5542, 0.9368, 0.0244, + 0.5413, 0.4729, 0.7814, 0.8256, 0.2315, 0.9472, 0.2322, + 0.6177, 0.8709, 0.4587, 0.3448, 0.7377, 0.9270, 0.8111, + 0.2693, 0.6265, 0.4066, 0.9210, 0.2302, 0.6077, 0.3406, + 0.5854, 0.6597, 0.4653, 0.2831, 0.2390, 0.4564, 0.7151, + 0.8705, 0.3781, 0.3836, 0.2946, 0.0129, 0.3443, 0.5513, + 0.5972, 0.1489, 0.6113, 0.5915, 0.8810, 0.4599, 0.1897, + 0.3004, 0.9932, 0.0623, 0.6712, 0.9400, 0.1765, 0.4924, + 0.4162, 0.7114, 0.5036, 0.7747, 0.8718, 0.4237, 0.6772, + 0.3151, 0.4843, 0.4319, 0.0489, 0.9698, 0.2863, 0.1393, + 0.0339, 0.9839, 0.8166, 0.2810, 0.0680, 0.7799, 0.6229, + 0.5426, 0.1095, 0.3560, 0.3903, 0.8409, 0.3643, 0.1432, + 0.9733, 0.8764, 0.1891, 0.5211, 0.8147, 0.5398, 0.1212, + 0.4051, 0.7700, 0.6201, 0.6092, 0.5740, 0.0174, 0.9730, + 0.5750, 0.1625, 0.0572, 0.6170, 0.5243, 0.2437, 0.4114, + 0.6512, 0.1771, 0.4980, 0.0027, 0.6626, 0.7558, 0.5376, + 0.7689, 0.8026, 0.1009, 0.8359, 0.8508, 0.4274, 0.6167, + 0.9714, 0.0496, 0.8017, 0.4516, 0.6537, 0.1179, 0.6975, + 0.5184, 0.3878, 0.1200, 0.4588, 0.2915]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1901, 0.2382, 0.6937, ..., 0.7632, 0.8746, 0.1540]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 2.9870200157165527 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 289937 -ss 5000 -sd 1e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.948570489883423} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4712, 4178, 2903, 24, 4753, 51, 2819, 4572, 4453, + 3780, 2899, 3226, 3780, 3989, 722, 4731, 1139, 1109, + 4512, 1669, 4522, 2228, 2733, 1441, 2781, 363, 3766, + 2188, 1770, 795, 1224, 1803, 4910, 1370, 4516, 2224, + 2678, 4365, 692, 1811, 1383, 2901, 749, 3344, 1016, + 4896, 4731, 857, 4171, 1998, 4569, 2011, 3832, 2691, + 1005, 4276, 2954, 4491, 2491, 2981, 645, 4461, 2128, + 3675, 4293, 1741, 3314, 1065, 1939, 1615, 3365, 3901, + 589, 3305, 4000, 4212, 790, 4927, 4076, 2238, 4107, + 3701, 3348, 1617, 1179, 3888, 4445, 2667, 3215, 4009, + 4710, 219, 2800, 233, 1521, 2319, 680, 1854, 4750, + 3077, 1721, 3819, 3579, 2334, 2886, 4510, 1278, 1666, + 4749, 4910, 1969, 2508, 532, 1736, 4315, 1491, 537, + 3309, 3121, 4585, 2996, 3358, 502, 4286, 4572, 2864, + 1049, 469, 825, 1143, 635, 2773, 2543, 3425, 3473, + 2174, 4228, 3516, 1137, 2463, 4638, 1994, 2452, 2065, + 96, 3029, 2790, 1834, 4863, 978, 4811, 3677, 2912, + 1938, 2797, 895, 1501, 2558, 1230, 534, 2633, 3017, + 4982, 4618, 4241, 2899, 2098, 2010, 1636, 2502, 2716, + 4980, 363, 466, 23, 1737, 1476, 1286, 4720, 833, + 2653, 201, 3769, 3397, 3009, 4570, 2692, 2095, 4797, + 3941, 2845, 1360, 1763, 3589, 3716, 2365, 196, 1112, + 123, 2267, 4731, 228, 4673, 1590, 3794, 3816, 2846, + 863, 3759, 1182, 304, 2540, 66, 4385, 3694, 3525, + 31, 4315, 4266, 4089, 2728, 1405, 1294, 4022, 2222, + 370, 101, 3253, 4145, 1994, 1358, 981, 2203, 2167, + 3742, 4696, 614, 2733, 396, 4399, 427, 1682, 4896, + 3429, 693, 870, 4939, 3305, 4250, 3680]), + values=tensor([0.3479, 0.5084, 0.6603, 0.8257, 0.8683, 0.9247, 0.0338, + 0.9486, 0.8504, 0.5745, 0.3925, 0.3196, 0.6449, 0.2119, + 0.0164, 0.7309, 0.7682, 0.1461, 0.7397, 0.9951, 0.7123, + 0.4571, 0.7549, 0.0282, 0.5968, 0.6667, 0.3749, 0.3789, + 0.4293, 0.3353, 0.3273, 0.0531, 0.5787, 0.8917, 0.4198, + 0.7695, 0.7895, 0.7926, 0.6654, 0.0192, 0.0703, 0.9096, + 0.9289, 0.6077, 0.6990, 0.6780, 0.1687, 0.0557, 0.0641, + 0.1726, 0.7968, 0.1192, 0.9982, 0.1104, 0.3778, 0.1311, + 0.0584, 0.9615, 0.6551, 0.7173, 0.4827, 0.9281, 0.2508, + 0.5901, 0.8616, 0.6261, 0.7668, 0.8880, 0.5680, 0.6476, + 0.9494, 0.3895, 0.7153, 0.7995, 0.4681, 0.0628, 0.0354, + 0.8123, 0.7147, 0.5397, 0.7785, 0.1737, 0.3550, 0.8870, + 0.9193, 0.0915, 0.0963, 0.4243, 0.0483, 0.3655, 0.7711, + 0.4395, 0.3161, 0.5266, 0.7991, 0.4530, 0.0590, 0.9302, + 0.7021, 0.5336, 0.6784, 0.9823, 0.0943, 0.7391, 0.7084, + 0.0171, 0.4786, 0.7623, 0.5776, 0.2256, 0.8698, 0.1309, + 0.6095, 0.6277, 0.0828, 0.3536, 0.7932, 0.1162, 0.9939, + 0.6893, 0.6054, 0.2963, 0.4057, 0.5571, 0.8162, 0.7161, + 0.6029, 0.7576, 0.8687, 0.3351, 0.8262, 0.5784, 0.6376, + 0.1057, 0.2968, 0.0568, 0.6646, 0.7354, 0.2403, 0.0158, + 0.7552, 0.5770, 0.3899, 0.7014, 0.1196, 0.2500, 0.6112, + 0.3203, 0.8311, 0.8445, 0.8722, 0.6620, 0.5633, 0.3401, + 0.0024, 0.6473, 0.3675, 0.6286, 0.4764, 0.3994, 0.7176, + 0.9295, 0.7610, 0.0448, 0.1910, 0.5959, 0.2410, 0.6714, + 0.3638, 0.8788, 0.4303, 0.8357, 0.1493, 0.7533, 0.2046, + 0.6241, 0.3330, 0.7519, 0.0927, 0.5403, 0.3301, 0.0842, + 0.3044, 0.5311, 0.1859, 0.7234, 0.6523, 0.1074, 0.7205, + 0.0951, 0.9394, 0.8290, 0.0965, 0.9119, 0.9547, 0.5884, + 0.2956, 0.6206, 0.7425, 0.9894, 0.3994, 0.9059, 0.3500, + 0.1825, 0.6628, 0.6687, 0.3257, 0.5028, 0.7592, 0.5362, + 0.2886, 0.3968, 0.4420, 0.4118, 0.6245, 0.3599, 0.5238, + 0.6126, 0.6306, 0.0343, 0.1672, 0.1822, 0.1255, 0.6333, + 0.3425, 0.1597, 0.8225, 0.7857, 0.7675, 0.1595, 0.4863, + 0.8578, 0.1155, 0.8038, 0.8906, 0.6082, 0.3640, 0.5820, + 0.4951, 0.3638, 0.3016, 0.7272, 0.7832, 0.5085, 0.1101, + 0.2648, 0.6399, 0.4137, 0.5843, 0.7184]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0011, 0.9357, 0.8539, ..., 0.1995, 0.1479, 0.1616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.948570489883423 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4712, 4178, 2903, 24, 4753, 51, 2819, 4572, 4453, + 3780, 2899, 3226, 3780, 3989, 722, 4731, 1139, 1109, + 4512, 1669, 4522, 2228, 2733, 1441, 2781, 363, 3766, + 2188, 1770, 795, 1224, 1803, 4910, 1370, 4516, 2224, + 2678, 4365, 692, 1811, 1383, 2901, 749, 3344, 1016, + 4896, 4731, 857, 4171, 1998, 4569, 2011, 3832, 2691, + 1005, 4276, 2954, 4491, 2491, 2981, 645, 4461, 2128, + 3675, 4293, 1741, 3314, 1065, 1939, 1615, 3365, 3901, + 589, 3305, 4000, 4212, 790, 4927, 4076, 2238, 4107, + 3701, 3348, 1617, 1179, 3888, 4445, 2667, 3215, 4009, + 4710, 219, 2800, 233, 1521, 2319, 680, 1854, 4750, + 3077, 1721, 3819, 3579, 2334, 2886, 4510, 1278, 1666, + 4749, 4910, 1969, 2508, 532, 1736, 4315, 1491, 537, + 3309, 3121, 4585, 2996, 3358, 502, 4286, 4572, 2864, + 1049, 469, 825, 1143, 635, 2773, 2543, 3425, 3473, + 2174, 4228, 3516, 1137, 2463, 4638, 1994, 2452, 2065, + 96, 3029, 2790, 1834, 4863, 978, 4811, 3677, 2912, + 1938, 2797, 895, 1501, 2558, 1230, 534, 2633, 3017, + 4982, 4618, 4241, 2899, 2098, 2010, 1636, 2502, 2716, + 4980, 363, 466, 23, 1737, 1476, 1286, 4720, 833, + 2653, 201, 3769, 3397, 3009, 4570, 2692, 2095, 4797, + 3941, 2845, 1360, 1763, 3589, 3716, 2365, 196, 1112, + 123, 2267, 4731, 228, 4673, 1590, 3794, 3816, 2846, + 863, 3759, 1182, 304, 2540, 66, 4385, 3694, 3525, + 31, 4315, 4266, 4089, 2728, 1405, 1294, 4022, 2222, + 370, 101, 3253, 4145, 1994, 1358, 981, 2203, 2167, + 3742, 4696, 614, 2733, 396, 4399, 427, 1682, 4896, + 3429, 693, 870, 4939, 3305, 4250, 3680]), + values=tensor([0.3479, 0.5084, 0.6603, 0.8257, 0.8683, 0.9247, 0.0338, + 0.9486, 0.8504, 0.5745, 0.3925, 0.3196, 0.6449, 0.2119, + 0.0164, 0.7309, 0.7682, 0.1461, 0.7397, 0.9951, 0.7123, + 0.4571, 0.7549, 0.0282, 0.5968, 0.6667, 0.3749, 0.3789, + 0.4293, 0.3353, 0.3273, 0.0531, 0.5787, 0.8917, 0.4198, + 0.7695, 0.7895, 0.7926, 0.6654, 0.0192, 0.0703, 0.9096, + 0.9289, 0.6077, 0.6990, 0.6780, 0.1687, 0.0557, 0.0641, + 0.1726, 0.7968, 0.1192, 0.9982, 0.1104, 0.3778, 0.1311, + 0.0584, 0.9615, 0.6551, 0.7173, 0.4827, 0.9281, 0.2508, + 0.5901, 0.8616, 0.6261, 0.7668, 0.8880, 0.5680, 0.6476, + 0.9494, 0.3895, 0.7153, 0.7995, 0.4681, 0.0628, 0.0354, + 0.8123, 0.7147, 0.5397, 0.7785, 0.1737, 0.3550, 0.8870, + 0.9193, 0.0915, 0.0963, 0.4243, 0.0483, 0.3655, 0.7711, + 0.4395, 0.3161, 0.5266, 0.7991, 0.4530, 0.0590, 0.9302, + 0.7021, 0.5336, 0.6784, 0.9823, 0.0943, 0.7391, 0.7084, + 0.0171, 0.4786, 0.7623, 0.5776, 0.2256, 0.8698, 0.1309, + 0.6095, 0.6277, 0.0828, 0.3536, 0.7932, 0.1162, 0.9939, + 0.6893, 0.6054, 0.2963, 0.4057, 0.5571, 0.8162, 0.7161, + 0.6029, 0.7576, 0.8687, 0.3351, 0.8262, 0.5784, 0.6376, + 0.1057, 0.2968, 0.0568, 0.6646, 0.7354, 0.2403, 0.0158, + 0.7552, 0.5770, 0.3899, 0.7014, 0.1196, 0.2500, 0.6112, + 0.3203, 0.8311, 0.8445, 0.8722, 0.6620, 0.5633, 0.3401, + 0.0024, 0.6473, 0.3675, 0.6286, 0.4764, 0.3994, 0.7176, + 0.9295, 0.7610, 0.0448, 0.1910, 0.5959, 0.2410, 0.6714, + 0.3638, 0.8788, 0.4303, 0.8357, 0.1493, 0.7533, 0.2046, + 0.6241, 0.3330, 0.7519, 0.0927, 0.5403, 0.3301, 0.0842, + 0.3044, 0.5311, 0.1859, 0.7234, 0.6523, 0.1074, 0.7205, + 0.0951, 0.9394, 0.8290, 0.0965, 0.9119, 0.9547, 0.5884, + 0.2956, 0.6206, 0.7425, 0.9894, 0.3994, 0.9059, 0.3500, + 0.1825, 0.6628, 0.6687, 0.3257, 0.5028, 0.7592, 0.5362, + 0.2886, 0.3968, 0.4420, 0.4118, 0.6245, 0.3599, 0.5238, + 0.6126, 0.6306, 0.0343, 0.1672, 0.1822, 0.1255, 0.6333, + 0.3425, 0.1597, 0.8225, 0.7857, 0.7675, 0.1595, 0.4863, + 0.8578, 0.1155, 0.8038, 0.8906, 0.6082, 0.3640, 0.5820, + 0.4951, 0.3638, 0.3016, 0.7272, 0.7832, 0.5085, 0.1101, + 0.2648, 0.6399, 0.4137, 0.5843, 0.7184]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0011, 0.9357, 0.8539, ..., 0.1995, 0.1479, 0.1616]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.948570489883423 seconds + +[20.16, 20.24, 20.28, 20.24, 20.08, 20.16, 20.2, 20.28, 20.28, 20.36] +[20.72, 20.56, 20.44, 24.96, 27.52, 28.24, 29.2, 26.64, 25.12, 24.04, 23.96, 23.92, 24.04, 24.2] +14.660816431045532 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.948570489883423, 'TIME_S_1KI': 0.037761894790535266, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.0917576313019, 'W': 22.992700250818316} +[20.16, 20.24, 20.28, 20.24, 20.08, 20.16, 20.2, 20.28, 20.28, 20.36, 20.4, 20.4, 20.44, 20.84, 20.84, 20.76, 20.64, 20.8, 20.84, 20.84] +368.2 +18.41 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 289937, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.948570489883423, 'TIME_S_1KI': 0.037761894790535266, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 337.0917576313019, 'W': 22.992700250818316, 'J_1KI': 1.1626379442130597, 'W_1KI': 0.07930240104166876, 'W_D': 4.582700250818316, 'J_D': 67.18612713575365, 'W_D_1KI': 0.015805848342289243, 'J_D_1KI': 5.4514768181671336e-05} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..8ad2558 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 1, "ITERATIONS": 154350, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.289572715759277, "TIME_S_1KI": 0.0666638983852237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 297.04585779190063, "W": 21.84801552939114, "J_1KI": 1.9244953533650835, "W_1KI": 0.14154852950690727, "W_D": 3.3270155293911436, "J_D": 45.23413947987558, "W_D_1KI": 0.0215550082888963, "J_D_1KI": 0.0001396501994745468} diff --git a/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..f253283 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/altra_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014810323715209961} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4902, 4751, 573, ..., 1409, 1871, 577]), + values=tensor([0.0874, 0.7756, 0.4965, ..., 0.1251, 0.3364, 0.3476]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.4221, 0.7918, 0.4416, ..., 0.8475, 0.7362, 0.1103]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.014810323715209961 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 70896 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.822846412658691} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([1594, 2931, 2652, ..., 4428, 449, 1795]), + values=tensor([0.3058, 0.1710, 0.0965, ..., 0.7799, 0.8373, 0.5140]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.0899, 0.4612, 0.1283, ..., 0.7452, 0.2953, 0.1670]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 4.822846412658691 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 154350 -ss 5000 -sd 5e-05 -c 1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.289572715759277} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([ 621, 1968, 1113, ..., 1968, 726, 3393]), + values=tensor([0.9316, 0.3440, 0.3874, ..., 0.4845, 0.3520, 0.3225]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.4702, 0.8122, 0.0166, ..., 0.1291, 0.0008, 0.5220]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.289572715759277 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([ 621, 1968, 1113, ..., 1968, 726, 3393]), + values=tensor([0.9316, 0.3440, 0.3874, ..., 0.4845, 0.3520, 0.3225]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.4702, 0.8122, 0.0166, ..., 0.1291, 0.0008, 0.5220]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.289572715759277 seconds + +[20.4, 20.44, 20.52, 20.68, 20.64, 20.48, 20.24, 20.36, 20.2, 20.16] +[20.36, 20.44, 21.32, 23.28, 23.28, 24.96, 25.6, 26.08, 24.88, 23.92, 23.64, 23.6, 23.6] +13.59601092338562 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 154350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.289572715759277, 'TIME_S_1KI': 0.0666638983852237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.04585779190063, 'W': 21.84801552939114} +[20.4, 20.44, 20.52, 20.68, 20.64, 20.48, 20.24, 20.36, 20.2, 20.16, 20.52, 20.64, 20.96, 21.2, 21.24, 21.04, 20.76, 20.36, 20.08, 20.08] +370.41999999999996 +18.520999999999997 +{'CPU': 'Altra', 'CORES': 1, 'ITERATIONS': 154350, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.289572715759277, 'TIME_S_1KI': 0.0666638983852237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 297.04585779190063, 'W': 21.84801552939114, 'J_1KI': 1.9244953533650835, 'W_1KI': 0.14154852950690727, 'W_D': 3.3270155293911436, 'J_D': 45.23413947987558, 'W_D_1KI': 0.0215550082888963, 'J_D_1KI': 0.0001396501994745468} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..57ab5e0 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 6238, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.769784212112427, "TIME_S_1KI": 1.7264803161449866, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 915.7679015994072, "W": 67.11, "J_1KI": 146.8047293362307, "W_1KI": 10.75825585123437, "W_D": 31.7095, "J_D": 432.70067465007304, "W_D_1KI": 5.083279897403013, "J_D_1KI": 0.8148893711771422} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..fe560b9 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.18373441696166992} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 18, ..., 999979, + 999991, 1000000]), + col_indices=tensor([ 9419, 17690, 19775, ..., 65904, 78906, 97730]), + values=tensor([0.1002, 0.0063, 0.1334, ..., 0.8477, 0.2339, 0.2955]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9261, 0.9567, 0.5751, ..., 0.3199, 0.0262, 0.3042]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.18373441696166992 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5714', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.617395401000977} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 15, ..., 999976, + 999989, 1000000]), + col_indices=tensor([12342, 20602, 31374, ..., 83399, 88988, 97850]), + values=tensor([0.3594, 0.1684, 0.5761, ..., 0.4601, 0.3694, 0.1608]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.5848, 0.1566, 0.7046, ..., 0.5634, 0.8550, 0.2097]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 9.617395401000977 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6238', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.769784212112427} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 15, ..., 999972, + 999990, 1000000]), + col_indices=tensor([ 1977, 7363, 16479, ..., 91067, 93957, 95744]), + values=tensor([0.8934, 0.4616, 0.7140, ..., 0.3224, 0.7140, 0.9696]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0584, 0.0097, 0.6336, ..., 0.7366, 0.8575, 0.7006]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.769784212112427 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 15, ..., 999972, + 999990, 1000000]), + col_indices=tensor([ 1977, 7363, 16479, ..., 91067, 93957, 95744]), + values=tensor([0.8934, 0.4616, 0.7140, ..., 0.3224, 0.7140, 0.9696]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0584, 0.0097, 0.6336, ..., 0.7366, 0.8575, 0.7006]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.769784212112427 seconds + +[39.95, 40.61, 39.69, 39.43, 39.55, 38.89, 39.33, 39.12, 39.11, 38.94] +[67.11] +13.645774126052856 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.769784212112427, 'TIME_S_1KI': 1.7264803161449866, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 915.7679015994072, 'W': 67.11} +[39.95, 40.61, 39.69, 39.43, 39.55, 38.89, 39.33, 39.12, 39.11, 38.94, 39.57, 38.89, 39.02, 39.08, 39.5, 39.31, 39.6, 39.05, 39.01, 39.18] +708.01 +35.4005 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 6238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.769784212112427, 'TIME_S_1KI': 1.7264803161449866, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 915.7679015994072, 'W': 67.11, 'J_1KI': 146.8047293362307, 'W_1KI': 10.75825585123437, 'W_D': 31.7095, 'J_D': 432.70067465007304, 'W_D_1KI': 5.083279897403013, 'J_D_1KI': 0.8148893711771422} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..a6d9713 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 631, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.591248035430908, "TIME_S_1KI": 16.78486217976372, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1175.510341129303, "W": 76.44, "J_1KI": 1862.932394816645, "W_1KI": 121.14104595879556, "W_D": 40.61775, "J_D": 624.6282726112604, "W_D_1KI": 64.37044374009508, "J_D_1KI": 102.0133815215453} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..4526ab4 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 1.6621592044830322} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 87, 180, ..., 9999810, + 9999900, 10000000]), + col_indices=tensor([ 1316, 2180, 2488, ..., 99391, 99679, 99852]), + values=tensor([0.4838, 0.8512, 0.8260, ..., 0.7772, 0.9919, 0.5400]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7127, 0.0015, 0.2736, ..., 0.7345, 0.7377, 0.4477]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 1.6621592044830322 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '631', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.591248035430908} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 104, 198, ..., 9999801, + 9999900, 10000000]), + col_indices=tensor([ 1720, 2057, 4608, ..., 98148, 99667, 99757]), + values=tensor([0.5091, 0.6981, 0.1321, ..., 0.4342, 0.6647, 0.6565]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3567, 0.4501, 0.1430, ..., 0.3086, 0.4387, 0.0746]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.591248035430908 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 104, 198, ..., 9999801, + 9999900, 10000000]), + col_indices=tensor([ 1720, 2057, 4608, ..., 98148, 99667, 99757]), + values=tensor([0.5091, 0.6981, 0.1321, ..., 0.4342, 0.6647, 0.6565]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3567, 0.4501, 0.1430, ..., 0.3086, 0.4387, 0.0746]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.591248035430908 seconds + +[39.78, 39.0, 39.24, 38.88, 39.37, 39.28, 39.34, 38.84, 41.87, 41.9] +[76.44] +15.378209590911865 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.591248035430908, 'TIME_S_1KI': 16.78486217976372, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1175.510341129303, 'W': 76.44} +[39.78, 39.0, 39.24, 38.88, 39.37, 39.28, 39.34, 38.84, 41.87, 41.9, 39.74, 39.44, 39.02, 44.31, 40.1, 38.93, 39.71, 39.12, 39.62, 39.33] +716.4449999999999 +35.82225 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.591248035430908, 'TIME_S_1KI': 16.78486217976372, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1175.510341129303, 'W': 76.44, 'J_1KI': 1862.932394816645, 'W_1KI': 121.14104595879556, 'W_D': 40.61775, 'J_D': 624.6282726112604, 'W_D_1KI': 64.37044374009508, 'J_D_1KI': 102.0133815215453} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..87f4ecf --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 12301, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471284627914429, "TIME_S_1KI": 0.8512547457860685, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 857.4546840524674, "W": 65.29, "J_1KI": 69.70609576883729, "W_1KI": 5.307698561092595, "W_D": 29.40700000000001, "J_D": 386.2026327757837, "W_D_1KI": 2.3906186488903347, "J_D_1KI": 0.19434343946755017} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..e82259e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.09777641296386719} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99999, 99999, + 100000]), + col_indices=tensor([10415, 34481, 41161, ..., 69185, 8793, 68858]), + values=tensor([0.7697, 0.4410, 0.3075, ..., 0.8657, 0.1828, 0.6667]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2533, 0.9138, 0.2717, ..., 0.2019, 0.7103, 0.0862]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.09777641296386719 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10738', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.165135860443115} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99996, 99996, + 100000]), + col_indices=tensor([17140, 55127, 70380, ..., 9005, 21920, 77148]), + values=tensor([0.4913, 0.5196, 0.1867, ..., 0.0903, 0.8718, 0.1023]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0174, 0.3477, 0.7027, ..., 0.9312, 0.2138, 0.3974]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 9.165135860443115 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12301', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.471284627914429} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 100000, + 100000]), + col_indices=tensor([46597, 403, 54918, ..., 58141, 94085, 20979]), + values=tensor([0.5040, 0.7325, 0.7996, ..., 0.9839, 0.2631, 0.4936]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2071, 0.7418, 0.9347, ..., 0.4731, 0.1489, 0.5724]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.471284627914429 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 5, ..., 99999, 100000, + 100000]), + col_indices=tensor([46597, 403, 54918, ..., 58141, 94085, 20979]), + values=tensor([0.5040, 0.7325, 0.7996, ..., 0.9839, 0.2631, 0.4936]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2071, 0.7418, 0.9347, ..., 0.4731, 0.1489, 0.5724]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.471284627914429 seconds + +[39.92, 39.64, 39.53, 39.59, 39.42, 39.38, 39.39, 39.02, 44.4, 39.0] +[65.29] +13.13301706314087 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471284627914429, 'TIME_S_1KI': 0.8512547457860685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.4546840524674, 'W': 65.29} +[39.92, 39.64, 39.53, 39.59, 39.42, 39.38, 39.39, 39.02, 44.4, 39.0, 39.77, 38.96, 39.1, 39.29, 38.97, 44.08, 39.5, 39.1, 39.5, 38.89] +717.66 +35.882999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 12301, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.471284627914429, 'TIME_S_1KI': 0.8512547457860685, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 857.4546840524674, 'W': 65.29, 'J_1KI': 69.70609576883729, 'W_1KI': 5.307698561092595, 'W_D': 29.40700000000001, 'J_D': 386.2026327757837, 'W_D_1KI': 2.3906186488903347, 'J_D_1KI': 0.19434343946755017} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..96abffc --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 7670, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.499921083450317, "TIME_S_1KI": 1.3689597240482814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 873.9648419952392, "W": 65.94, "J_1KI": 113.94587248960094, "W_1KI": 8.597131681877444, "W_D": 30.4285, "J_D": 403.29753100776674, "W_D_1KI": 3.9672099087353327, "J_D_1KI": 0.5172372762366796} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..7c108ed --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.15105009078979492} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 499997, 499999, + 500000]), + col_indices=tensor([ 4363, 49954, 63940, ..., 740, 19551, 36085]), + values=tensor([0.7532, 0.6946, 0.3669, ..., 0.0744, 0.6590, 0.6868]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.0672, 0.6383, 0.6761, ..., 0.1188, 0.9489, 0.0863]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.15105009078979492 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '6951', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.51450252532959} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 13, ..., 499996, 499997, + 500000]), + col_indices=tensor([ 4260, 42899, 54575, ..., 5425, 31756, 61151]), + values=tensor([0.4952, 0.8247, 0.2969, ..., 0.2331, 0.9267, 0.2319]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.7470, 0.2926, 0.2731, ..., 0.9830, 0.8295, 0.9958]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 9.51450252532959 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7670', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.499921083450317} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 18, ..., 499989, 499996, + 500000]), + col_indices=tensor([16754, 23077, 28797, ..., 22620, 46442, 72952]), + values=tensor([0.6737, 0.8129, 0.9335, ..., 0.4581, 0.1021, 0.2391]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.9975, 0.9245, 0.4309, ..., 0.4303, 0.6144, 0.3183]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.499921083450317 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 18, ..., 499989, 499996, + 500000]), + col_indices=tensor([16754, 23077, 28797, ..., 22620, 46442, 72952]), + values=tensor([0.6737, 0.8129, 0.9335, ..., 0.4581, 0.1021, 0.2391]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.9975, 0.9245, 0.4309, ..., 0.4303, 0.6144, 0.3183]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.499921083450317 seconds + +[39.66, 38.93, 40.25, 39.53, 39.7, 39.41, 40.1, 39.55, 39.0, 39.05] +[65.94] +13.25394058227539 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7670, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.499921083450317, 'TIME_S_1KI': 1.3689597240482814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 873.9648419952392, 'W': 65.94} +[39.66, 38.93, 40.25, 39.53, 39.7, 39.41, 40.1, 39.55, 39.0, 39.05, 39.72, 39.51, 39.22, 39.24, 39.32, 39.23, 40.03, 39.22, 39.23, 39.09] +710.23 +35.5115 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 7670, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.499921083450317, 'TIME_S_1KI': 1.3689597240482814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 873.9648419952392, 'W': 65.94, 'J_1KI': 113.94587248960094, 'W_1KI': 8.597131681877444, 'W_D': 30.4285, 'J_D': 403.29753100776674, 'W_D_1KI': 3.9672099087353327, 'J_D_1KI': 0.5172372762366796} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..a0173f2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 237172, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.292231321334839, "TIME_S_1KI": 0.04339564249293693, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 862.8344623732567, "W": 65.91, "J_1KI": 3.638011495342016, "W_1KI": 0.2778995834246875, "W_D": 30.572500000000005, "J_D": 400.22768321812157, "W_D_1KI": 0.12890433946671614, "J_D_1KI": 0.0005435057235538602} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..104c518 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.016118526458740234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 3, ..., 9995, 9998, 10000]), + col_indices=tensor([3770, 7218, 7901, ..., 7147, 2189, 2422]), + values=tensor([0.0682, 0.4925, 0.4932, ..., 0.9859, 0.2682, 0.5675]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1703, 0.4753, 0.7272, ..., 0.9852, 0.8357, 0.1698]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.016118526458740234 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65142', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8839359283447266} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9999, 10000, 10000]), + col_indices=tensor([6160, 1315, 448, ..., 9882, 6598, 7658]), + values=tensor([0.4764, 0.2622, 0.7017, ..., 0.9860, 0.1866, 0.7529]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4338, 0.9515, 0.6308, ..., 0.9365, 0.1556, 0.4912]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.8839359283447266 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '237172', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.292231321334839} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 10000, 10000]), + col_indices=tensor([1278, 7265, 6993, ..., 9863, 6468, 3133]), + values=tensor([0.6288, 0.8682, 0.0748, ..., 0.3062, 0.2031, 0.3525]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1078, 0.8244, 0.8698, ..., 0.0830, 0.2322, 0.6518]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.292231321334839 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 10000, 10000]), + col_indices=tensor([1278, 7265, 6993, ..., 9863, 6468, 3133]), + values=tensor([0.6288, 0.8682, 0.0748, ..., 0.3062, 0.2031, 0.3525]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1078, 0.8244, 0.8698, ..., 0.0830, 0.2322, 0.6518]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.292231321334839 seconds + +[40.27, 39.31, 39.28, 38.86, 41.16, 39.32, 39.02, 39.09, 38.81, 39.11] +[65.91] +13.091100931167603 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.292231321334839, 'TIME_S_1KI': 0.04339564249293693, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.8344623732567, 'W': 65.91} +[40.27, 39.31, 39.28, 38.86, 41.16, 39.32, 39.02, 39.09, 38.81, 39.11, 39.9, 39.01, 39.35, 39.15, 39.0, 38.86, 39.09, 38.89, 38.89, 40.04] +706.7499999999999 +35.33749999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 237172, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.292231321334839, 'TIME_S_1KI': 0.04339564249293693, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 862.8344623732567, 'W': 65.91, 'J_1KI': 3.638011495342016, 'W_1KI': 0.2778995834246875, 'W_D': 30.572500000000005, 'J_D': 400.22768321812157, 'W_D_1KI': 0.12890433946671614, 'J_D_1KI': 0.0005435057235538602} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..deb9985 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 75716, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.424914360046387, "TIME_S_1KI": 0.1376844307682179, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 940.2084917426108, "W": 66.49, "J_1KI": 12.41756685169067, "W_1KI": 0.8781499286808601, "W_D": 31.17374999999999, "J_D": 440.81552819162596, "W_D_1KI": 0.4117194516350572, "J_D_1KI": 0.005437680960894093} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..169706c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,105 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02692556381225586} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 22, ..., 99984, 99990, + 100000]), + col_indices=tensor([ 947, 1869, 5338, ..., 6268, 7050, 7942]), + values=tensor([0.2237, 0.7540, 0.0617, ..., 0.6862, 0.3906, 0.7890]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6838, 0.4222, 0.9597, ..., 0.5474, 0.0680, 0.5394]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.02692556381225586 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '38996', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.952186584472656} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 19, ..., 99978, 99990, + 100000]), + col_indices=tensor([ 7, 556, 703, ..., 8117, 8865, 9056]), + values=tensor([0.2495, 0.4435, 0.2550, ..., 0.5409, 0.7823, 0.3947]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8603, 0.6651, 0.2785, ..., 0.2036, 0.7755, 0.1415]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 5.952186584472656 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '68791', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.539567232131958} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 19, ..., 99987, 99995, + 100000]), + col_indices=tensor([ 696, 997, 2062, ..., 1211, 1590, 9690]), + values=tensor([0.0377, 0.1568, 0.2160, ..., 0.8237, 0.6309, 0.0587]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0823, 0.1873, 0.3356, ..., 0.2591, 0.5771, 0.7059]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 9.539567232131958 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75716', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.424914360046387} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 17, ..., 99986, 99994, + 100000]), + col_indices=tensor([1284, 3776, 5103, ..., 6955, 7171, 8445]), + values=tensor([0.9684, 0.2053, 0.3935, ..., 0.8592, 0.0314, 0.3677]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4306, 0.9725, 0.6597, ..., 0.5969, 0.7821, 0.5134]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.424914360046387 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 17, ..., 99986, 99994, + 100000]), + col_indices=tensor([1284, 3776, 5103, ..., 6955, 7171, 8445]), + values=tensor([0.9684, 0.2053, 0.3935, ..., 0.8592, 0.0314, 0.3677]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4306, 0.9725, 0.6597, ..., 0.5969, 0.7821, 0.5134]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.424914360046387 seconds + +[40.22, 38.81, 38.98, 39.63, 38.95, 39.23, 38.84, 39.29, 39.31, 38.77] +[66.49] +14.140599966049194 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.424914360046387, 'TIME_S_1KI': 0.1376844307682179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.2084917426108, 'W': 66.49} +[40.22, 38.81, 38.98, 39.63, 38.95, 39.23, 38.84, 39.29, 39.31, 38.77, 41.54, 39.08, 39.06, 38.87, 39.0, 38.96, 39.2, 38.82, 39.06, 41.94] +706.325 +35.316250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 75716, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.424914360046387, 'TIME_S_1KI': 0.1376844307682179, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 940.2084917426108, 'W': 66.49, 'J_1KI': 12.41756685169067, 'W_1KI': 0.8781499286808601, 'W_D': 31.17374999999999, 'J_D': 440.81552819162596, 'W_D_1KI': 0.4117194516350572, 'J_D_1KI': 0.005437680960894093} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..db6444c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 10206, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48923373222351, "TIME_S_1KI": 1.0277516884404774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 888.1208229660987, "W": 67.21, "J_1KI": 87.01948098825189, "W_1KI": 6.585341955712326, "W_D": 31.27349999999999, "J_D": 413.25169702470293, "W_D_1KI": 3.0642269253380356, "J_D_1KI": 0.30023779397785966} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..191d2ac --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.1162419319152832} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 100, 185, ..., 999774, + 999893, 1000000]), + col_indices=tensor([ 36, 100, 149, ..., 9802, 9836, 9872]), + values=tensor([0.2938, 0.2320, 0.9118, ..., 0.8681, 0.8272, 0.2716]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9985, 0.1887, 0.5488, ..., 0.6608, 0.9222, 0.7055]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.1162419319152832 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '9032', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.29158329963684} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 95, 205, ..., 999782, + 999891, 1000000]), + col_indices=tensor([ 54, 212, 264, ..., 9693, 9804, 9961]), + values=tensor([0.9421, 0.7916, 0.1774, ..., 0.7420, 0.5713, 0.3525]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0899, 0.7410, 0.9990, ..., 0.5022, 0.0295, 0.8248]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 9.29158329963684 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '10206', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48923373222351} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 108, 196, ..., 999774, + 999884, 1000000]), + col_indices=tensor([ 16, 259, 309, ..., 9528, 9603, 9788]), + values=tensor([0.1649, 0.9890, 0.6907, ..., 0.8956, 0.0145, 0.7596]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.7287, 0.8351, 0.4943, ..., 0.5583, 0.1274, 0.9823]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.48923373222351 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 108, 196, ..., 999774, + 999884, 1000000]), + col_indices=tensor([ 16, 259, 309, ..., 9528, 9603, 9788]), + values=tensor([0.1649, 0.9890, 0.6907, ..., 0.8956, 0.0145, 0.7596]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.7287, 0.8351, 0.4943, ..., 0.5583, 0.1274, 0.9823]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.48923373222351 seconds + +[39.9, 39.07, 39.52, 38.94, 39.35, 44.79, 39.52, 39.98, 39.47, 39.28] +[67.21] +13.214117288589478 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10206, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48923373222351, 'TIME_S_1KI': 1.0277516884404774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1208229660987, 'W': 67.21} +[39.9, 39.07, 39.52, 38.94, 39.35, 44.79, 39.52, 39.98, 39.47, 39.28, 40.29, 39.27, 43.98, 38.92, 39.16, 39.39, 39.32, 39.34, 39.08, 39.79] +718.73 +35.9365 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 10206, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48923373222351, 'TIME_S_1KI': 1.0277516884404774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.1208229660987, 'W': 67.21, 'J_1KI': 87.01948098825189, 'W_1KI': 6.585341955712326, 'W_D': 31.27349999999999, 'J_D': 413.25169702470293, 'W_D_1KI': 3.0642269253380356, 'J_D_1KI': 0.30023779397785966} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..f94ac11 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1725, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.28298306465149, "TIME_S_1KI": 5.961149602696516, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.8662384581567, "W": 75.33, "J_1KI": 608.6181092511052, "W_1KI": 43.66956521739131, "W_D": 39.905, "J_D": 556.151762188673, "W_D_1KI": 23.133333333333336, "J_D_1KI": 13.410628019323674} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..bf02e51 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.6085023880004883} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 508, 930, ..., 4999014, + 4999519, 5000000]), + col_indices=tensor([ 33, 44, 68, ..., 9921, 9984, 9990]), + values=tensor([0.7535, 0.2308, 0.9086, ..., 0.5781, 0.9835, 0.5048]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1644, 0.2567, 0.4067, ..., 0.0618, 0.3860, 0.0437]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 0.6085023880004883 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1725', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.28298306465149} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 453, 934, ..., 4998993, + 4999474, 5000000]), + col_indices=tensor([ 76, 82, 85, ..., 9960, 9963, 9989]), + values=tensor([0.2757, 0.2788, 0.5904, ..., 0.0782, 0.3342, 0.9799]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0884, 0.4732, 0.8375, ..., 0.9901, 0.5525, 0.7748]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.28298306465149 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 453, 934, ..., 4998993, + 4999474, 5000000]), + col_indices=tensor([ 76, 82, 85, ..., 9960, 9963, 9989]), + values=tensor([0.2757, 0.2788, 0.5904, ..., 0.0782, 0.3342, 0.9799]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0884, 0.4732, 0.8375, ..., 0.9901, 0.5525, 0.7748]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.28298306465149 seconds + +[40.86, 38.91, 38.99, 39.35, 38.92, 38.87, 39.32, 39.52, 39.41, 41.56] +[75.33] +13.936894178390503 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.28298306465149, 'TIME_S_1KI': 5.961149602696516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.8662384581567, 'W': 75.33} +[40.86, 38.91, 38.99, 39.35, 38.92, 38.87, 39.32, 39.52, 39.41, 41.56, 39.65, 38.96, 39.21, 38.88, 39.0, 40.01, 39.33, 39.71, 39.42, 39.31] +708.5 +35.425 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1725, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.28298306465149, 'TIME_S_1KI': 5.961149602696516, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.8662384581567, 'W': 75.33, 'J_1KI': 608.6181092511052, 'W_1KI': 43.66956521739131, 'W_D': 39.905, 'J_D': 556.151762188673, 'W_D_1KI': 23.133333333333336, 'J_D_1KI': 13.410628019323674} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..02b08b6 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 700, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.428780555725098, "TIME_S_1KI": 14.89825793675014, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1164.5532444572448, "W": 76.66, "J_1KI": 1663.6474920817784, "W_1KI": 109.5142857142857, "W_D": 40.888999999999996, "J_D": 621.1507645788192, "W_D_1KI": 58.412857142857135, "J_D_1KI": 83.44693877551019} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..f0881c0 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 1.498870611190796} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 987, 1979, ..., 9997999, + 9999011, 10000000]), + col_indices=tensor([ 3, 7, 20, ..., 9954, 9962, 9986]), + values=tensor([0.9369, 0.1464, 0.7342, ..., 0.7208, 0.8895, 0.6454]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7880, 0.5272, 0.7128, ..., 0.1762, 0.3407, 0.4321]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 1.498870611190796 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '700', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.428780555725098} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 995, 2027, ..., 9997983, + 9998981, 10000000]), + col_indices=tensor([ 16, 21, 33, ..., 9977, 9983, 9988]), + values=tensor([0.3684, 0.6722, 0.7880, ..., 0.5048, 0.0966, 0.9792]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4422, 0.8800, 0.2165, ..., 0.4558, 0.6103, 0.1393]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.428780555725098 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 995, 2027, ..., 9997983, + 9998981, 10000000]), + col_indices=tensor([ 16, 21, 33, ..., 9977, 9983, 9988]), + values=tensor([0.3684, 0.6722, 0.7880, ..., 0.5048, 0.0966, 0.9792]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4422, 0.8800, 0.2165, ..., 0.4558, 0.6103, 0.1393]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.428780555725098 seconds + +[40.18, 38.97, 39.04, 39.1, 38.99, 38.9, 38.95, 39.03, 44.22, 39.22] +[76.66] +15.191145896911621 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 700, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.428780555725098, 'TIME_S_1KI': 14.89825793675014, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.5532444572448, 'W': 76.66} +[40.18, 38.97, 39.04, 39.1, 38.99, 38.9, 38.95, 39.03, 44.22, 39.22, 40.59, 39.18, 39.25, 44.66, 38.99, 39.16, 38.92, 38.8, 39.17, 40.19] +715.42 +35.771 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 700, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.428780555725098, 'TIME_S_1KI': 14.89825793675014, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1164.5532444572448, 'W': 76.66, 'J_1KI': 1663.6474920817784, 'W_1KI': 109.5142857142857, 'W_D': 40.888999999999996, 'J_D': 621.1507645788192, 'W_D_1KI': 58.412857142857135, 'J_D_1KI': 83.44693877551019} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..a0b1b52 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 343, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.229193925857544, "TIME_S_1KI": 29.822722815911206, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1277.0280195808411, "W": 76.08, "J_1KI": 3723.1137597109073, "W_1KI": 221.8075801749271, "W_D": 40.66575, "J_D": 682.5880939441324, "W_D_1KI": 118.55903790087464, "J_D_1KI": 345.65317172266657} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..9529016 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 3.0540103912353516} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2046, 4073, ..., 19996013, + 19997969, 20000000]), + col_indices=tensor([ 3, 8, 17, ..., 9981, 9984, 9987]), + values=tensor([0.5017, 0.4094, 0.1287, ..., 0.4741, 0.2195, 0.3916]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.6824, 0.3340, 0.1820, ..., 0.2779, 0.3641, 0.6445]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 3.0540103912353516 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '343', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.229193925857544} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1942, 3919, ..., 19996087, + 19998013, 20000000]), + col_indices=tensor([ 9, 10, 17, ..., 9985, 9988, 9989]), + values=tensor([0.3594, 0.3340, 0.0020, ..., 0.8034, 0.9201, 0.1838]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.0654, 0.1078, 0.4601, ..., 0.8409, 0.3729, 0.1721]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.229193925857544 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1942, 3919, ..., 19996087, + 19998013, 20000000]), + col_indices=tensor([ 9, 10, 17, ..., 9985, 9988, 9989]), + values=tensor([0.3594, 0.3340, 0.0020, ..., 0.8034, 0.9201, 0.1838]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.0654, 0.1078, 0.4601, ..., 0.8409, 0.3729, 0.1721]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.229193925857544 seconds + +[39.53, 39.95, 39.42, 39.3, 39.16, 38.94, 39.07, 38.8, 39.46, 38.91] +[76.08] +16.78533148765564 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.229193925857544, 'TIME_S_1KI': 29.822722815911206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.0280195808411, 'W': 76.08} +[39.53, 39.95, 39.42, 39.3, 39.16, 38.94, 39.07, 38.8, 39.46, 38.91, 39.85, 38.96, 39.07, 39.05, 39.38, 38.91, 39.06, 39.15, 39.29, 44.34] +708.285 +35.414249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.229193925857544, 'TIME_S_1KI': 29.822722815911206, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1277.0280195808411, 'W': 76.08, 'J_1KI': 3723.1137597109073, 'W_1KI': 221.8075801749271, 'W_D': 40.66575, 'J_D': 682.5880939441324, 'W_D_1KI': 118.55903790087464, 'J_D_1KI': 345.65317172266657} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..a1f4a32 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 233, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.295273542404175, "TIME_S_1KI": 44.18572335795783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1458.0208019256593, "W": 76.48, "J_1KI": 6257.6000082646315, "W_1KI": 328.2403433476395, "W_D": 40.760999999999996, "J_D": 777.0709454405307, "W_D_1KI": 174.9399141630901, "J_D_1KI": 750.8150822450219} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..d790296 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 4.5049638748168945} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3029, 6057, ..., 29993789, + 29996881, 30000000]), + col_indices=tensor([ 0, 1, 2, ..., 9988, 9991, 9998]), + values=tensor([0.8599, 0.6300, 0.6697, ..., 0.0214, 0.0757, 0.9206]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.5404, 0.3446, 0.4295, ..., 0.2969, 0.5137, 0.1316]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 4.5049638748168945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '233', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.295273542404175} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2988, 6007, ..., 29993964, + 29997046, 30000000]), + col_indices=tensor([ 9, 16, 24, ..., 9996, 9997, 9999]), + values=tensor([0.2433, 0.7720, 0.0178, ..., 0.3342, 0.8303, 0.6867]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.4151, 0.6857, 0.4615, ..., 0.0665, 0.4824, 0.1217]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.295273542404175 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2988, 6007, ..., 29993964, + 29997046, 30000000]), + col_indices=tensor([ 9, 16, 24, ..., 9996, 9997, 9999]), + values=tensor([0.2433, 0.7720, 0.0178, ..., 0.3342, 0.8303, 0.6867]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.4151, 0.6857, 0.4615, ..., 0.0665, 0.4824, 0.1217]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.295273542404175 seconds + +[40.6, 39.0, 39.72, 40.47, 39.9, 39.66, 39.13, 38.97, 39.1, 39.62] +[76.48] +19.064079523086548 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.295273542404175, 'TIME_S_1KI': 44.18572335795783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1458.0208019256593, 'W': 76.48} +[40.6, 39.0, 39.72, 40.47, 39.9, 39.66, 39.13, 38.97, 39.1, 39.62, 40.89, 39.12, 39.47, 39.11, 38.93, 39.6, 38.9, 43.73, 39.55, 38.93] +714.3800000000001 +35.71900000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 233, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.295273542404175, 'TIME_S_1KI': 44.18572335795783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1458.0208019256593, 'W': 76.48, 'J_1KI': 6257.6000082646315, 'W_1KI': 328.2403433476395, 'W_D': 40.760999999999996, 'J_D': 777.0709454405307, 'W_D_1KI': 174.9399141630901, 'J_D_1KI': 750.8150822450219} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..9c3e1bc --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 362205, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420795440673828, "TIME_S_1KI": 0.02877043508696409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 858.7538476467132, "W": 65.88, "J_1KI": 2.3709055580312617, "W_1KI": 0.18188594856503915, "W_D": 30.515249999999995, "J_D": 397.7700113752484, "W_D_1KI": 0.08424856089783408, "J_D_1KI": 0.00023259911071860987} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..3b7f750 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1521 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.01423192024230957} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 998, 999, 1000]), + col_indices=tensor([3651, 8143, 8284, 1201, 8802, 9084, 518, 1318, 7113, + 4198, 6659, 361, 3967, 2631, 475, 1422, 3709, 9745, + 1114, 731, 1484, 190, 4372, 6889, 5946, 9134, 7399, + 9315, 9547, 5191, 558, 5996, 7786, 36, 2608, 6971, + 3588, 9206, 3929, 9738, 5532, 8672, 3550, 556, 7458, + 8249, 9648, 4644, 9311, 9352, 38, 6820, 8314, 7776, + 4648, 2648, 7188, 7862, 9766, 7529, 130, 2138, 9531, + 8955, 7529, 5567, 4237, 5643, 2920, 8945, 2985, 4202, + 7221, 9523, 9145, 9414, 1727, 482, 6337, 9385, 8259, + 3509, 9326, 4737, 9125, 4925, 237, 7538, 8759, 1847, + 9447, 2922, 470, 1647, 9673, 9620, 4380, 489, 1206, + 7536, 7237, 8859, 8031, 2617, 1541, 3066, 2051, 3249, + 799, 2618, 3289, 7373, 6080, 4605, 5686, 1742, 849, + 6896, 7211, 3112, 7256, 3784, 1106, 2254, 6134, 5896, + 1197, 668, 4080, 9988, 7197, 5551, 2494, 6286, 1000, + 4996, 8332, 1100, 2758, 798, 3892, 3478, 2185, 7918, + 2934, 778, 141, 2782, 3364, 3405, 1218, 2274, 7259, + 5489, 6646, 2341, 6847, 7023, 9242, 3864, 2758, 6683, + 8891, 1342, 9608, 1869, 4064, 5757, 8557, 505, 8921, + 9349, 4634, 352, 1820, 5776, 3014, 8861, 7895, 3485, + 5721, 197, 176, 117, 3954, 4225, 7039, 5248, 2463, + 2095, 11, 1760, 9171, 5730, 5601, 9323, 4622, 2426, + 3559, 8535, 6749, 1931, 7698, 9928, 8891, 5123, 8390, + 8697, 6602, 5844, 2162, 8439, 3522, 3358, 3931, 2301, + 4057, 5775, 5263, 8135, 9212, 9531, 3252, 7052, 6494, + 758, 7356, 4215, 9693, 8845, 7141, 2982, 531, 2113, + 4038, 615, 6178, 4213, 9543, 1003, 8259, 9546, 7414, + 7919, 4200, 52, 4324, 4762, 7129, 7642, 6073, 4560, + 2253, 6444, 669, 7596, 1750, 485, 1814, 6910, 8509, + 4617, 1203, 1628, 8037, 4709, 5873, 8080, 203, 987, + 8514, 9228, 3095, 6122, 3762, 801, 3892, 4470, 2052, + 5864, 5657, 6101, 8630, 7401, 6959, 6944, 6101, 4154, + 3180, 3057, 9407, 723, 3850, 364, 3889, 6447, 9732, + 3930, 7935, 4825, 3256, 5086, 2856, 8438, 4603, 8740, + 9625, 3540, 9399, 5097, 4034, 5917, 531, 7347, 5038, + 6274, 647, 1800, 1181, 8789, 5526, 6301, 3891, 6331, + 1478, 6263, 4830, 6993, 7924, 6574, 5359, 4883, 4837, + 5042, 947, 5442, 5733, 3374, 4555, 3375, 3285, 5832, + 9049, 3233, 166, 4580, 1532, 151, 2765, 7817, 8639, + 4125, 8484, 1732, 5071, 6465, 5679, 8488, 8603, 3469, + 9261, 9776, 846, 5701, 7438, 7977, 8568, 2276, 939, + 9123, 3558, 5796, 1651, 3499, 7225, 1382, 979, 1521, + 7794, 7875, 9433, 8620, 9143, 8536, 3539, 3123, 9572, + 8887, 6698, 648, 1852, 3737, 6106, 5365, 7996, 7696, + 6420, 4967, 723, 9089, 9304, 2947, 4078, 3384, 510, + 6462, 9968, 9761, 6304, 6398, 8470, 7323, 4206, 7644, + 801, 8103, 2105, 1747, 4824, 1219, 9728, 7888, 9670, + 2176, 6904, 3033, 9611, 7207, 9012, 1271, 657, 371, + 9123, 6710, 4619, 7211, 2155, 4610, 6025, 285, 9331, + 4113, 554, 5803, 1639, 6961, 1648, 415, 906, 7866, + 1924, 3252, 3636, 5550, 6715, 7435, 8064, 1106, 274, + 3628, 1664, 6161, 5478, 8830, 6266, 1459, 7707, 6348, + 9645, 9206, 4977, 7028, 6265, 4870, 2230, 9839, 9967, + 347, 6592, 8862, 5370, 9685, 8059, 7554, 1703, 4535, + 4005, 4350, 4668, 1143, 4889, 1870, 2588, 240, 675, + 71, 5834, 2880, 4006, 8645, 5427, 9818, 4307, 738, + 3961, 7964, 5288, 9417, 1982, 329, 8068, 4072, 5879, + 6097, 8617, 7503, 1410, 1823, 7077, 6135, 2488, 3328, + 274, 921, 2060, 2645, 3571, 1043, 6316, 8871, 8412, + 5119, 7069, 2665, 85, 34, 8574, 2590, 637, 2202, + 6952, 7619, 3016, 9668, 7862, 6958, 6039, 6333, 7215, + 5464, 8477, 6548, 5285, 7311, 2003, 1410, 2913, 1526, + 9521, 8698, 6223, 3986, 3657, 6207, 7801, 5709, 3647, + 3252, 1035, 9604, 1181, 7037, 159, 4177, 5759, 4872, + 9240, 5252, 3872, 9571, 6044, 7719, 7565, 7149, 3588, + 6562, 3557, 6068, 2546, 6115, 1355, 6668, 4906, 8986, + 6500, 7496, 630, 8871, 9721, 5188, 8498, 7741, 5019, + 628, 8555, 587, 5660, 9280, 7630, 6539, 5875, 2329, + 5734, 8187, 7487, 1969, 563, 9588, 8397, 4526, 9056, + 9279, 1452, 7145, 4509, 8675, 9423, 4903, 6061, 3078, + 9854, 658, 2800, 8777, 3256, 6502, 655, 4202, 2340, + 7264, 2793, 9667, 7007, 1984, 2597, 8329, 973, 2964, + 5928, 5694, 7, 458, 1678, 8136, 7463, 5577, 1552, + 8802, 7791, 3267, 9658, 7328, 8612, 1072, 8025, 9476, + 1953, 9826, 5398, 8217, 3320, 6354, 9186, 4280, 429, + 8543, 1826, 2844, 6067, 725, 1315, 8628, 7368, 7864, + 4348, 880, 8731, 1212, 8814, 5248, 5301, 5948, 6407, + 5261, 1408, 3246, 3306, 9042, 2193, 3541, 9568, 5425, + 6999, 7594, 1750, 1807, 6659, 2171, 9504, 1409, 9150, + 8663, 1696, 1010, 8263, 1112, 7827, 8063, 2910, 334, + 16, 3900, 2459, 926, 9369, 1805, 4344, 9836, 4115, + 1369, 4988, 1627, 5156, 2353, 7777, 5894, 8446, 4883, + 2998, 1145, 6802, 3712, 7048, 8242, 4573, 1952, 9161, + 2948, 1393, 5629, 6077, 6217, 7741, 5490, 4760, 4928, + 1111, 4030, 3358, 6226, 9638, 8878, 3960, 3811, 7376, + 3518, 4285, 2019, 3229, 8382, 4467, 8984, 5949, 7869, + 9506, 3931, 1371, 3956, 1352, 8445, 4708, 282, 7584, + 1810, 9323, 6419, 249, 2071, 7240, 9768, 3331, 8772, + 8183, 1613, 8842, 2926, 7357, 3452, 3763, 4133, 7805, + 1885, 7240, 2702, 9042, 9269, 3584, 5278, 8194, 3343, + 7179, 8945, 3898, 5523, 3442, 7945, 2587, 5458, 9537, + 6622, 2944, 1110, 771, 236, 8935, 9666, 139, 7612, + 4134, 3984, 5023, 4683, 9275, 111, 7428, 7669, 7747, + 755, 2779, 3468, 8498, 3541, 7482, 944, 4464, 8516, + 463, 5427, 8364, 7748, 7401, 6892, 9346, 1909, 9249, + 86, 7583, 3063, 6943, 626, 6091, 2519, 1737, 8751, + 9124, 1009, 3316, 5731, 5829, 4431, 2023, 2450, 6131, + 8424, 4861, 7933, 8078, 1779, 8877, 9139, 4928, 6951, + 3919, 3482, 9094, 3852, 5471, 7856, 3392, 1994, 9752, + 3756, 1817, 2591, 2817, 8894, 629, 9298, 5223, 8822, + 1201, 4189, 9730, 9744, 8034, 9431, 7558, 9291, 4944, + 6796, 6751, 6082, 5976, 5338, 8944, 6438, 5985, 1828, + 2836, 465, 3960, 4405, 5510, 5454, 8633, 6603, 9533, + 8571, 4583, 7144, 6663, 4393, 2927, 2954, 8837, 5885, + 2961, 5939, 1827, 723, 7500, 2196, 8973, 455, 8932, + 7063, 3686, 5260, 2893, 3247, 6085, 381, 1076, 6381, + 5199, 7305, 6086, 9206, 3702, 9000, 1702, 2385, 6122, + 7104, 8300, 3094, 3205, 8369, 2427, 9735, 4152, 6605, + 5060, 8896, 8622, 7660, 6402, 2450, 225, 677, 5702, + 9898, 1760, 5944, 2917, 4615, 3267, 4326, 273, 948, + 9292, 3919, 1215, 2725, 3011, 184, 7139, 7428, 6341, + 6075]), + values=tensor([4.0893e-01, 7.4647e-01, 4.0914e-01, 3.5864e-01, + 4.8555e-01, 1.8092e-04, 3.0123e-01, 4.6002e-01, + 6.8513e-01, 7.7433e-01, 1.1833e-01, 7.7674e-01, + 2.1230e-02, 6.5005e-01, 2.5378e-03, 1.4380e-02, + 4.7469e-01, 7.3133e-01, 3.5139e-01, 9.5308e-01, + 8.5012e-01, 6.7265e-01, 3.0657e-01, 6.1900e-01, + 5.5589e-01, 9.4629e-01, 3.6042e-01, 3.1773e-02, + 5.5135e-01, 5.7262e-01, 8.6726e-01, 2.2990e-01, + 2.7172e-01, 8.1676e-01, 9.8110e-01, 6.1687e-02, + 9.1464e-01, 8.3395e-01, 5.0196e-02, 2.3382e-01, + 3.5840e-01, 6.2133e-01, 1.0935e-01, 1.9079e-01, + 9.0813e-01, 6.0670e-01, 9.4361e-01, 4.0716e-01, + 2.6226e-01, 2.9494e-01, 4.5161e-01, 1.5794e-02, + 5.5005e-02, 2.7524e-01, 8.4708e-02, 2.7124e-01, + 5.9039e-01, 4.4167e-01, 3.5090e-02, 7.8814e-02, + 3.3788e-01, 3.3405e-01, 9.6722e-01, 2.9590e-01, + 8.2668e-01, 3.8429e-01, 1.1459e-01, 7.2456e-01, + 6.3513e-01, 5.5357e-01, 7.5112e-02, 4.6214e-01, + 8.2042e-01, 8.8648e-01, 5.7232e-01, 5.0082e-01, + 7.2914e-01, 4.7976e-01, 1.9845e-01, 7.8635e-01, + 8.4682e-02, 8.8008e-01, 3.3425e-01, 5.1799e-01, + 4.9607e-01, 3.9452e-03, 5.5016e-02, 3.1020e-01, + 8.9128e-02, 4.3443e-01, 2.9563e-01, 7.3845e-01, + 7.2493e-01, 1.5083e-01, 7.4143e-01, 2.7680e-01, + 8.7421e-01, 5.4248e-02, 4.9964e-01, 8.0841e-01, + 8.4502e-01, 3.6483e-01, 8.3624e-02, 7.4983e-01, + 1.2633e-01, 9.9425e-02, 4.8657e-02, 4.9780e-01, + 4.6150e-01, 6.6866e-01, 7.5273e-01, 6.5112e-01, + 2.2094e-01, 9.3158e-02, 2.0877e-01, 1.4861e-01, + 3.6390e-01, 5.0828e-01, 1.5597e-01, 4.6034e-01, + 9.3777e-02, 6.9770e-01, 6.5803e-02, 9.5397e-01, + 3.0578e-01, 9.4048e-01, 5.9922e-01, 7.7196e-01, + 9.7704e-01, 2.2835e-01, 9.9383e-01, 8.6800e-01, + 8.8874e-01, 7.2989e-01, 4.9534e-01, 1.1077e-01, + 1.9545e-01, 9.8031e-01, 7.0316e-01, 1.0654e-01, + 4.6569e-01, 9.8063e-01, 6.9036e-01, 6.1855e-01, + 1.7249e-01, 1.4035e-01, 6.4122e-01, 4.2890e-01, + 4.4597e-01, 2.0114e-01, 6.2076e-01, 2.6296e-01, + 5.1906e-01, 4.6291e-02, 4.3527e-01, 8.6879e-01, + 2.7018e-02, 6.7597e-01, 9.9164e-01, 2.6552e-01, + 7.9984e-01, 7.7352e-01, 1.7076e-01, 4.2917e-01, + 1.6896e-01, 9.6141e-02, 1.5324e-01, 2.2314e-01, + 1.1977e-01, 9.3202e-01, 8.6227e-01, 7.1184e-01, + 4.1014e-01, 5.2121e-01, 4.3074e-01, 1.5812e-01, + 6.7409e-02, 7.7385e-01, 9.2572e-01, 3.7243e-01, + 7.7639e-01, 1.3124e-01, 9.6748e-01, 9.9585e-01, + 9.4879e-02, 4.2174e-01, 1.0964e-01, 7.7620e-01, + 4.2591e-01, 1.0445e-01, 4.8496e-01, 8.8067e-01, + 4.7605e-01, 4.0209e-02, 2.9210e-01, 9.7444e-01, + 2.6426e-01, 7.9845e-01, 8.6184e-01, 2.8088e-01, + 4.7154e-01, 7.8332e-01, 3.3323e-01, 8.9045e-01, + 5.3742e-01, 5.4121e-01, 3.5738e-01, 2.8552e-01, + 6.6960e-01, 7.9798e-02, 9.5883e-02, 6.4858e-01, + 6.1198e-01, 3.9265e-01, 6.7901e-01, 7.7712e-01, + 7.3831e-01, 3.8690e-01, 2.8814e-01, 1.5900e-01, + 7.4089e-01, 3.3823e-02, 6.0486e-02, 1.9784e-02, + 7.5396e-01, 6.7253e-01, 2.0684e-01, 3.3255e-01, + 5.8123e-01, 9.0516e-01, 8.4270e-02, 5.9737e-01, + 9.1544e-01, 3.8624e-01, 8.6974e-01, 5.9614e-01, + 3.4793e-01, 2.2719e-01, 5.0042e-01, 3.1822e-01, + 1.4557e-02, 3.8869e-02, 2.4391e-01, 2.0417e-01, + 6.8119e-01, 9.6452e-03, 8.2586e-01, 3.7726e-01, + 7.5797e-01, 7.3036e-01, 1.0288e-01, 5.9685e-01, + 4.1494e-01, 6.9899e-01, 3.6873e-01, 8.5351e-01, + 8.1078e-01, 8.1658e-01, 2.2052e-01, 1.4704e-01, + 1.1582e-01, 2.2602e-01, 9.1009e-01, 7.8758e-01, + 2.0662e-01, 9.7393e-01, 1.5186e-01, 2.0325e-01, + 5.2871e-01, 7.1085e-01, 7.7983e-01, 3.6386e-01, + 8.1438e-03, 2.8844e-01, 9.8254e-01, 9.2260e-01, + 9.3691e-01, 4.4583e-01, 6.9386e-01, 7.4592e-01, + 8.5702e-01, 8.7501e-01, 8.6228e-01, 6.8437e-02, + 8.8003e-01, 5.9655e-02, 5.5984e-01, 5.3624e-01, + 5.1137e-01, 6.6329e-02, 4.5292e-01, 9.3699e-01, + 9.3817e-01, 9.4024e-01, 8.9176e-01, 1.0596e-01, + 8.7853e-01, 8.9555e-01, 9.5549e-01, 9.6768e-01, + 6.0145e-01, 5.1318e-01, 9.7239e-01, 8.2053e-01, + 1.5019e-01, 9.3464e-01, 7.3976e-01, 1.5624e-01, + 5.8975e-01, 6.6958e-01, 4.4599e-01, 6.0277e-02, + 8.8260e-01, 9.3177e-01, 8.0332e-01, 9.8789e-01, + 4.1076e-01, 2.3903e-01, 3.9403e-01, 8.9861e-01, + 6.0636e-01, 5.1990e-01, 8.5455e-01, 3.3246e-01, + 7.3782e-01, 1.4832e-01, 5.5070e-01, 4.8315e-01, + 4.3169e-01, 2.1798e-01, 1.4116e-02, 1.8134e-01, + 8.1250e-02, 3.4893e-01, 1.9401e-01, 9.6046e-01, + 3.8487e-01, 4.5091e-01, 4.4411e-01, 6.2029e-02, + 5.7398e-01, 8.6131e-01, 8.9893e-02, 1.7085e-01, + 8.5289e-01, 3.7000e-01, 2.3685e-01, 2.6378e-01, + 9.1867e-01, 6.1129e-01, 8.6874e-01, 1.9254e-01, + 8.3643e-02, 6.8821e-01, 2.7187e-01, 4.6686e-01, + 9.7703e-01, 5.0208e-01, 2.6366e-01, 6.2374e-01, + 2.3604e-01, 9.9467e-01, 8.0512e-01, 8.0824e-02, + 1.8891e-01, 1.0550e-01, 5.5150e-01, 1.2517e-01, + 2.7982e-01, 2.4216e-01, 4.1069e-01, 6.8340e-01, + 7.3819e-02, 9.1765e-01, 5.7174e-01, 9.9094e-01, + 6.6466e-01, 3.5731e-01, 1.8235e-01, 4.5610e-01, + 8.7234e-01, 5.0580e-01, 2.8747e-01, 8.1041e-01, + 4.7505e-01, 2.0576e-01, 8.9010e-01, 8.4393e-01, + 4.7564e-01, 9.1611e-01, 3.3871e-01, 5.8593e-01, + 9.1005e-01, 4.6075e-01, 2.2976e-01, 1.6047e-01, + 6.2572e-02, 8.8079e-01, 7.9591e-01, 3.7074e-02, + 2.5624e-01, 5.3032e-01, 3.1496e-01, 5.5436e-01, + 5.2931e-01, 9.5389e-01, 6.3221e-01, 1.7242e-01, + 8.3925e-01, 7.5080e-01, 8.9636e-01, 7.4764e-01, + 9.0250e-01, 8.2000e-01, 2.9637e-01, 6.3107e-01, + 5.1566e-01, 9.1943e-01, 4.8464e-01, 2.2422e-01, + 7.0865e-01, 3.7071e-03, 8.8154e-01, 8.4300e-01, + 5.9617e-01, 7.8834e-01, 9.3422e-02, 6.2256e-01, + 8.3740e-01, 8.4745e-01, 2.3007e-02, 5.3812e-01, + 2.3587e-01, 2.9263e-01, 8.1439e-01, 2.4432e-01, + 3.1496e-01, 6.8237e-01, 9.9900e-01, 5.1112e-01, + 6.1402e-02, 2.0358e-01, 6.3097e-01, 1.7750e-01, + 3.5024e-01, 3.0596e-02, 7.3322e-01, 8.2818e-01, + 6.9285e-01, 1.8523e-01, 8.6152e-01, 9.3401e-01, + 4.5793e-01, 4.1794e-01, 4.9425e-01, 6.8516e-01, + 6.5776e-01, 6.9317e-01, 3.2227e-02, 2.8983e-01, + 8.6778e-01, 8.3223e-01, 6.3141e-01, 4.1697e-01, + 1.5997e-01, 2.8398e-02, 3.6903e-01, 7.4846e-01, + 9.4236e-01, 6.7992e-01, 7.9570e-01, 7.0022e-01, + 3.9911e-01, 5.4460e-01, 5.3406e-01, 6.1707e-02, + 3.4846e-01, 7.8485e-03, 5.2423e-01, 1.8395e-01, + 1.2384e-02, 2.1221e-01, 9.8422e-01, 2.7750e-02, + 7.8006e-02, 8.5223e-01, 7.3279e-01, 7.5232e-01, + 9.8682e-02, 5.9891e-01, 7.2723e-01, 7.2170e-01, + 6.7640e-01, 3.1676e-01, 7.2952e-01, 8.8475e-01, + 1.0979e-01, 1.0927e-01, 3.1374e-01, 3.8162e-01, + 6.8310e-01, 8.9795e-01, 6.9303e-01, 2.0847e-01, + 7.0549e-01, 4.6013e-01, 1.0482e-02, 8.2480e-02, + 6.3437e-01, 2.2931e-01, 5.8909e-01, 1.5036e-01, + 9.5013e-02, 3.0604e-02, 3.4294e-01, 3.0982e-01, + 3.3888e-01, 7.3004e-01, 2.7535e-01, 4.6383e-01, + 3.7714e-01, 1.6771e-01, 2.8789e-01, 6.4774e-01, + 8.7569e-01, 5.8565e-01, 3.7890e-01, 1.7734e-01, + 8.5514e-01, 6.2505e-01, 2.0834e-01, 4.7834e-01, + 4.1297e-01, 1.9860e-01, 2.9647e-02, 4.3259e-01, + 2.8413e-01, 5.6185e-01, 8.2575e-02, 6.8264e-02, + 7.9173e-01, 9.4058e-01, 9.9665e-01, 1.5687e-01, + 9.0528e-01, 8.6377e-01, 6.8574e-01, 7.6422e-02, + 8.4351e-01, 4.1954e-01, 5.1337e-02, 9.5963e-02, + 7.5659e-01, 8.6958e-01, 7.4293e-02, 8.5173e-01, + 2.3423e-01, 3.5272e-03, 4.5855e-01, 2.5929e-01, + 1.8317e-01, 3.9174e-01, 1.9440e-01, 7.1044e-01, + 7.0894e-01, 8.5999e-02, 8.3721e-01, 6.8479e-01, + 6.4997e-01, 5.8657e-01, 5.1681e-01, 3.9751e-01, + 4.2349e-01, 2.1141e-01, 3.0925e-01, 3.4787e-02, + 3.7440e-01, 3.1224e-01, 2.9507e-01, 4.5275e-01, + 4.4228e-01, 9.2088e-01, 7.0840e-01, 8.1934e-02, + 8.9574e-01, 2.9883e-01, 9.7423e-01, 8.2101e-01, + 6.5546e-02, 3.5597e-01, 7.5741e-01, 3.4616e-01, + 8.3611e-01, 8.8683e-01, 2.5757e-01, 6.8378e-01, + 7.9689e-01, 9.6721e-01, 4.2741e-01, 3.3442e-02, + 8.7813e-01, 6.5055e-01, 1.7699e-01, 9.4707e-01, + 9.8286e-01, 5.1212e-01, 7.7437e-01, 5.3901e-01, + 8.9561e-01, 1.0947e-02, 3.2047e-01, 7.3343e-01, + 2.7138e-01, 9.5091e-01, 2.0941e-01, 5.3765e-01, + 6.1969e-01, 9.5062e-01, 9.8183e-01, 9.1239e-01, + 4.0820e-02, 7.5090e-01, 5.9426e-01, 3.6137e-01, + 5.3664e-01, 2.4273e-01, 7.0358e-01, 8.6205e-01, + 9.1349e-01, 3.4262e-01, 4.5848e-01, 3.6446e-01, + 6.4786e-01, 8.8286e-01, 4.6119e-01, 2.3044e-01, + 3.5263e-01, 6.8361e-01, 4.7663e-01, 5.6685e-03, + 8.3359e-01, 3.5310e-01, 8.9949e-02, 5.1295e-01, + 8.6279e-01, 7.2924e-01, 7.0823e-01, 4.7497e-01, + 9.8642e-01, 9.6950e-01, 3.1822e-01, 8.9707e-01, + 8.2130e-02, 8.3490e-01, 2.5776e-01, 5.1683e-01, + 1.1497e-01, 8.3218e-01, 8.6213e-01, 3.2832e-03, + 7.0292e-01, 2.8831e-01, 4.4333e-01, 7.2009e-01, + 4.0793e-01, 4.5162e-01, 3.6693e-01, 1.2147e-01, + 7.6792e-01, 1.5089e-01, 6.2167e-01, 3.5245e-01, + 9.3500e-01, 7.9017e-01, 9.1171e-01, 7.0950e-01, + 7.5701e-01, 8.5804e-01, 4.4625e-01, 6.2964e-01, + 4.0825e-01, 4.8145e-01, 3.6250e-01, 2.1616e-01, + 5.0502e-01, 2.4223e-02, 4.9976e-01, 9.0505e-01, + 9.3497e-01, 8.6822e-01, 8.5824e-01, 1.7667e-01, + 2.2820e-01, 3.3221e-01, 9.1383e-01, 1.5961e-01, + 2.3015e-01, 5.6818e-01, 9.9248e-01, 9.5778e-01, + 4.5782e-01, 3.0766e-01, 4.2287e-01, 6.0590e-01, + 3.1194e-01, 6.2603e-01, 1.2652e-01, 6.6990e-01, + 2.7961e-01, 8.6904e-01, 6.2781e-01, 8.5423e-01, + 2.5994e-01, 3.5572e-01, 8.9677e-01, 6.9735e-02, + 8.7326e-01, 2.3486e-01, 6.7756e-01, 9.1506e-01, + 6.6235e-01, 6.3638e-01, 6.2212e-01, 4.2214e-01, + 9.6628e-01, 4.7979e-01, 5.5599e-01, 6.6975e-01, + 9.1770e-01, 8.4921e-01, 7.8228e-01, 4.9541e-01, + 2.4676e-01, 5.8229e-01, 6.3363e-01, 5.8999e-01, + 5.5398e-01, 1.7297e-01, 2.6442e-01, 6.6954e-01, + 4.0705e-01, 1.5092e-01, 8.6820e-01, 8.0155e-01, + 9.6246e-01, 5.4199e-02, 6.1116e-01, 1.0232e-01, + 8.6891e-01, 2.4082e-01, 5.6454e-01, 2.4664e-01, + 3.0002e-01, 3.8365e-01, 4.3945e-01, 7.7554e-01, + 3.5987e-01, 4.1034e-01, 1.7631e-01, 4.3247e-01, + 4.9655e-01, 9.4554e-01, 5.2332e-01, 9.9976e-01, + 2.3445e-01, 4.2321e-01, 1.2031e-01, 6.9179e-01, + 9.5785e-01, 9.2308e-01, 2.4116e-01, 9.9876e-01, + 2.0342e-01, 8.9867e-01, 3.4996e-01, 2.3225e-01, + 2.8198e-01, 2.7293e-01, 7.0256e-02, 8.3430e-01, + 9.7407e-01, 2.1501e-02, 5.6137e-02, 8.3712e-01, + 2.1155e-01, 2.0680e-01, 5.8653e-01, 7.0146e-01, + 2.4499e-01, 6.1291e-01, 6.7401e-02, 5.2936e-02, + 5.5710e-01, 3.6277e-01, 6.4019e-01, 8.5335e-01, + 4.7761e-01, 2.3988e-01, 5.6900e-01, 2.0262e-01, + 4.5640e-01, 4.8010e-01, 8.7735e-01, 9.6990e-01, + 1.2396e-02, 7.7949e-01, 7.6676e-01, 2.7081e-01, + 5.2024e-01, 2.6366e-01, 7.9351e-01, 7.7967e-01, + 5.1343e-01, 8.6651e-01, 7.1801e-01, 5.4343e-01, + 5.7478e-01, 6.1938e-01, 7.5548e-02, 9.0783e-01, + 5.9722e-01, 7.1908e-01, 2.7312e-01, 4.9747e-01, + 1.7016e-01, 7.4263e-01, 9.3484e-01, 2.5565e-01, + 7.6354e-01, 6.9252e-01, 8.9558e-01, 5.9991e-01, + 7.4127e-01, 2.4916e-01, 3.2608e-01, 1.1447e-01, + 2.9080e-01, 2.2015e-01, 2.7586e-01, 9.3624e-01, + 5.0901e-01, 9.2773e-01, 2.3779e-01, 7.2414e-01, + 6.2115e-01, 8.8543e-03, 9.5559e-01, 5.5237e-01, + 5.9702e-02, 3.0996e-02, 2.7452e-01, 9.3476e-01, + 9.2334e-01, 1.0158e-01, 8.1141e-01, 4.2710e-01, + 9.4909e-02, 3.4917e-01, 1.6636e-01, 7.4557e-01, + 8.8926e-01, 4.1535e-01, 8.2911e-01, 1.4004e-01, + 1.3374e-02, 1.4100e-01, 9.0306e-01, 4.8827e-02, + 7.7186e-01, 6.3321e-01, 1.9462e-01, 3.9827e-01, + 5.9589e-01, 5.0163e-01, 5.5641e-01, 3.2488e-01, + 6.6930e-01, 5.1964e-01, 3.1040e-01, 8.3101e-01, + 9.1606e-01, 8.0023e-01, 3.2635e-01, 6.2952e-01, + 7.4512e-01, 2.9163e-01, 3.0956e-01, 5.7898e-01, + 3.8483e-01, 3.5295e-01, 7.1281e-02, 9.3899e-02, + 7.5473e-01, 5.6119e-01, 7.2374e-01, 3.3493e-01, + 2.1353e-01, 4.1028e-01, 4.3380e-01, 4.9443e-01, + 8.3914e-01, 4.0980e-01, 2.2353e-01, 4.6856e-01, + 9.8532e-01, 4.1826e-01, 7.2795e-01, 8.8187e-01, + 2.2991e-01, 5.6937e-01, 8.7997e-01, 9.8732e-01, + 8.9366e-01, 3.8819e-01, 8.6363e-01, 1.1832e-01, + 3.8550e-01, 7.5854e-01, 5.3971e-01, 4.0107e-01, + 7.1399e-02, 7.0022e-01, 3.9042e-01, 4.6769e-01, + 4.1568e-01, 1.8784e-03, 5.3264e-01, 3.9975e-01, + 7.9966e-01, 1.8820e-01, 1.7925e-02, 3.9911e-01, + 9.2246e-01, 1.7928e-01, 8.4736e-01, 7.0479e-01, + 5.7742e-01, 3.6414e-01, 6.4537e-01, 6.3918e-01, + 9.7580e-01, 2.1688e-01, 5.4784e-01, 7.4740e-01, + 1.3211e-01, 3.6429e-02, 4.4621e-01, 6.6190e-01, + 5.3119e-01, 2.5916e-01, 1.5296e-03, 5.4547e-01, + 7.1935e-01, 1.0792e-01, 5.1658e-02, 8.1958e-02, + 4.1507e-01, 8.1530e-01, 1.1213e-01, 1.5070e-01, + 6.7624e-01, 8.1166e-01, 8.6140e-01, 2.2385e-01, + 9.3921e-01, 4.0510e-01, 8.1991e-01, 8.7893e-01, + 2.8937e-01, 1.6184e-01, 2.8851e-01, 7.3226e-01, + 9.5694e-01, 6.4905e-01, 2.8654e-01, 3.4884e-01, + 9.3195e-01, 2.5569e-01, 8.9239e-01, 9.7412e-02, + 4.6401e-01, 6.8605e-01, 1.7844e-01, 1.5800e-01, + 6.8510e-01, 6.7166e-01, 3.4981e-02, 3.1457e-02, + 2.0280e-01, 5.6851e-01, 1.3432e-01, 8.7812e-01, + 7.2954e-01, 8.1625e-01, 3.7929e-01, 7.5942e-01, + 5.3153e-01, 5.4974e-01, 9.8914e-01, 7.2178e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.1442, 0.3479, 0.0736, ..., 0.7713, 0.5566, 0.1675]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.01423192024230957 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '73777', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.1387269496917725} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([5508, 9575, 656, 2627, 8566, 5820, 3018, 3373, 4414, + 3055, 6937, 1153, 3250, 1389, 6514, 3345, 2855, 7135, + 4792, 7175, 808, 4068, 3395, 4430, 2729, 8023, 6573, + 5208, 6115, 4870, 2959, 9004, 6154, 6036, 7804, 4772, + 5201, 296, 3325, 6381, 5380, 9121, 1445, 4957, 6867, + 4006, 2284, 5865, 3974, 9462, 3299, 49, 9461, 6670, + 3714, 3027, 4310, 2400, 5954, 7235, 2580, 2868, 7198, + 3736, 5562, 9005, 8912, 2276, 8194, 5812, 8468, 2983, + 6818, 255, 9224, 6925, 9166, 298, 3685, 1181, 2606, + 9590, 7743, 2755, 3440, 9622, 8827, 767, 6647, 6657, + 9003, 6161, 3158, 3383, 8984, 4164, 6155, 1581, 6250, + 3148, 5225, 7492, 1641, 3667, 1192, 2607, 7402, 6138, + 2597, 2358, 274, 9224, 7866, 4138, 5342, 38, 3457, + 5561, 5899, 2231, 5345, 7211, 5666, 1005, 6429, 1717, + 4864, 6032, 2544, 6728, 2581, 9245, 4695, 381, 3082, + 2086, 6195, 4752, 4134, 1810, 9562, 3721, 268, 4151, + 5614, 5819, 1155, 8820, 5775, 7686, 2525, 6174, 5674, + 9202, 279, 7651, 9040, 5, 6561, 3605, 6968, 2146, + 953, 377, 4363, 187, 7768, 751, 9638, 1330, 4286, + 1751, 8364, 6205, 8982, 8713, 8415, 8469, 6147, 491, + 5083, 2289, 8302, 8307, 1182, 2477, 2676, 5713, 9967, + 4103, 6320, 780, 3861, 5582, 5133, 3823, 1563, 4798, + 4512, 4446, 7915, 4568, 903, 1523, 3572, 5001, 1933, + 1949, 8734, 4705, 2155, 9618, 715, 6214, 4699, 997, + 7101, 2851, 9068, 1466, 7697, 4478, 7542, 2752, 2412, + 18, 2463, 5978, 4225, 6611, 4731, 5581, 7116, 7970, + 2865, 8155, 4666, 604, 7169, 7837, 4336, 6912, 697, + 5910, 8813, 5358, 8366, 9544, 501, 4615, 2803, 6919, + 2746, 1371, 4480, 7793, 6743, 6203, 1713, 865, 1494, + 9168, 9742, 4247, 3057, 8065, 4346, 2523, 6892, 1113, + 3827, 763, 132, 1453, 4262, 294, 8688, 9365, 3282, + 8252, 7662, 5148, 6621, 3594, 8378, 4159, 6261, 4499, + 7194, 6189, 9208, 1850, 3329, 2636, 8760, 3724, 3020, + 3329, 3956, 184, 9442, 8259, 7422, 5104, 240, 5838, + 1954, 7486, 8601, 3925, 7339, 2015, 1974, 3758, 3482, + 8722, 5421, 9520, 4546, 5331, 6717, 5812, 7975, 7442, + 7940, 4158, 7967, 6116, 3811, 7786, 1220, 4785, 6185, + 579, 3103, 7158, 8394, 2983, 6440, 5229, 2484, 1111, + 16, 2051, 1935, 293, 2058, 4920, 7225, 9659, 5108, + 639, 1031, 4323, 8725, 2666, 7298, 3414, 5109, 6513, + 2574, 266, 919, 1740, 8088, 5280, 894, 6663, 1130, + 7446, 4099, 20, 7217, 3332, 8885, 3112, 2296, 5865, + 56, 4230, 7201, 4767, 2688, 1411, 9682, 7533, 7713, + 5376, 8528, 19, 1667, 3225, 7209, 9374, 9673, 8929, + 5186, 3393, 5754, 458, 7524, 5689, 1857, 584, 2192, + 9671, 8149, 7375, 3650, 7201, 7617, 524, 5283, 3542, + 8092, 5244, 7073, 8028, 4271, 9656, 7882, 874, 2907, + 6918, 3419, 5521, 9656, 2461, 9839, 5975, 5084, 1139, + 6395, 805, 1410, 4457, 9849, 8263, 514, 713, 4473, + 3973, 1009, 6909, 7592, 3285, 3898, 5360, 2525, 440, + 9369, 1527, 3851, 9222, 3247, 3317, 4058, 7353, 2516, + 4234, 7642, 6022, 9914, 5180, 6543, 8560, 1441, 6117, + 2991, 8437, 3934, 9954, 7373, 1482, 1468, 7800, 5928, + 9144, 2690, 4445, 134, 7037, 9825, 1077, 7704, 5017, + 7739, 1905, 2423, 2516, 175, 6651, 6001, 6897, 6784, + 4971, 6198, 365, 1638, 7617, 6571, 7493, 3203, 704, + 2950, 2519, 5939, 5997, 931, 7748, 1950, 8185, 5176, + 890, 3070, 2791, 6438, 8457, 5669, 4411, 1482, 7050, + 601, 287, 5291, 4620, 2195, 1746, 4986, 8569, 3581, + 9031, 6202, 4210, 8431, 5885, 6652, 8914, 5202, 5034, + 816, 9352, 3322, 4967, 2768, 6719, 1660, 5297, 984, + 7606, 4288, 353, 7241, 1058, 9337, 9042, 97, 7482, + 3656, 4781, 9768, 5551, 9382, 6204, 3873, 3658, 9543, + 7766, 9333, 6514, 8310, 3838, 9481, 1887, 7793, 7682, + 9769, 4432, 4077, 5553, 8666, 5009, 9686, 1897, 5687, + 3382, 5919, 6551, 985, 8716, 2117, 7814, 5439, 7352, + 9154, 1069, 2145, 3994, 2450, 9754, 2935, 404, 138, + 6862, 4476, 5505, 2562, 2599, 2713, 4755, 6459, 7992, + 5580, 8294, 5009, 2485, 3945, 804, 8407, 3840, 3255, + 3135, 998, 5023, 8897, 4666, 1858, 4752, 135, 5372, + 1971, 2696, 7255, 1370, 8484, 4531, 4478, 5553, 2961, + 5403, 8082, 2803, 4478, 8089, 2563, 8344, 6470, 7823, + 5312, 5620, 5527, 4242, 8621, 2330, 4398, 642, 8991, + 7171, 9790, 5285, 6026, 3822, 3235, 4680, 6765, 3912, + 6543, 1450, 8818, 7558, 3261, 2113, 2373, 6919, 1131, + 6346, 3897, 1491, 1956, 4891, 7597, 996, 4074, 4722, + 2917, 5715, 8477, 9008, 7897, 4755, 7127, 2468, 4303, + 8306, 3974, 8907, 6722, 8711, 5733, 5028, 9135, 1866, + 7880, 3122, 2881, 8959, 8975, 55, 2311, 518, 7346, + 3155, 8803, 9740, 1957, 1812, 9667, 6289, 8136, 3531, + 1701, 495, 6279, 3572, 1162, 849, 217, 4374, 1639, + 1008, 5290, 4005, 9949, 9499, 7223, 3926, 7792, 354, + 8743, 429, 9875, 4421, 8853, 4992, 7345, 2313, 5947, + 2043, 7821, 7045, 3713, 1991, 9737, 2501, 6974, 8470, + 9355, 1042, 5178, 8527, 3387, 490, 4642, 8954, 1137, + 8655, 3394, 6784, 7051, 1774, 6167, 9750, 8404, 3082, + 6552, 4440, 522, 3508, 6337, 1418, 9452, 7117, 6648, + 8517, 766, 124, 6291, 1485, 2849, 7280, 459, 6691, + 7248, 5962, 9416, 2853, 3078, 5487, 3279, 8934, 9426, + 8788, 3551, 5452, 6149, 8603, 4993, 498, 4502, 2852, + 8524, 8859, 7620, 8162, 97, 6066, 6387, 8989, 817, + 1568, 246, 5600, 6289, 8988, 5342, 8448, 5649, 3692, + 688, 5698, 9881, 5150, 1629, 4058, 8276, 7736, 2593, + 1359, 4844, 9564, 7059, 7297, 9003, 798, 1319, 4119, + 2381, 7125, 4085, 273, 5623, 8157, 7485, 7765, 726, + 6395, 7157, 2290, 1726, 6046, 982, 9604, 2732, 8470, + 7237, 619, 3919, 7222, 540, 9760, 4380, 7356, 6016, + 7784, 4321, 395, 6423, 8568, 6214, 7974, 3353, 8129, + 6464, 9392, 9492, 3759, 472, 9308, 9344, 5127, 7423, + 6806, 1474, 2823, 5706, 1764, 6599, 9778, 8657, 7022, + 3614, 4485, 2452, 7525, 3149, 8601, 9645, 4269, 1503, + 6069, 2741, 3306, 863, 1229, 9656, 4386, 1768, 867, + 4819, 8547, 4244, 4948, 4190, 762, 9133, 131, 6780, + 6079, 8208, 5847, 9316, 7287, 8495, 2586, 1656, 4465, + 8814, 4625, 11, 2919, 3804, 7982, 6935, 9826, 7505, + 1259, 1040, 140, 9789, 2693, 5742, 2306, 7537, 8992, + 8296, 9987, 103, 8057, 643, 1848, 2330, 3674, 7402, + 1282, 4176, 8432, 9278, 6798, 342, 6639, 8590, 2414, + 6534, 9031, 318, 5748, 4899, 3462, 8454, 2064, 8941, + 716, 5685, 1233, 1844, 1555, 318, 7269, 8269, 8949, + 7099, 9893, 538, 3010, 6057, 7431, 5445, 2794, 2504, + 5841]), + values=tensor([3.2633e-01, 7.9191e-01, 3.6473e-01, 5.2553e-02, + 5.7875e-01, 8.9363e-01, 9.6431e-01, 4.0938e-02, + 1.5024e-01, 9.3457e-02, 8.5666e-01, 6.6021e-01, + 1.9759e-01, 8.0939e-01, 7.4204e-01, 2.8880e-01, + 7.9046e-01, 7.8851e-01, 7.0371e-01, 1.3685e-01, + 5.0481e-01, 3.6903e-01, 9.2692e-01, 8.9307e-02, + 4.0071e-01, 8.5389e-02, 6.8778e-01, 9.1185e-01, + 3.9447e-01, 4.1056e-01, 6.9100e-03, 4.4205e-01, + 8.9282e-01, 9.3322e-01, 8.3638e-01, 1.9546e-01, + 4.4549e-01, 8.8377e-01, 8.9362e-01, 6.5160e-01, + 3.2815e-02, 6.3599e-01, 2.9856e-01, 3.8738e-01, + 5.7386e-01, 2.8263e-01, 5.0030e-01, 3.7536e-01, + 2.7007e-01, 9.5128e-01, 6.1301e-01, 9.8955e-01, + 4.8124e-01, 8.0247e-01, 9.1091e-01, 7.1646e-01, + 8.6645e-01, 9.2664e-03, 3.1727e-01, 1.0464e-01, + 7.6432e-01, 3.2572e-01, 6.4872e-01, 4.6614e-01, + 6.3000e-01, 9.5522e-01, 3.9473e-01, 3.8621e-01, + 5.6019e-01, 1.1553e-01, 5.3397e-01, 4.0184e-01, + 6.5600e-01, 7.2218e-01, 5.3125e-01, 6.7148e-01, + 1.5411e-01, 2.4424e-01, 6.2084e-03, 1.7181e-01, + 5.5933e-01, 8.9801e-02, 3.3998e-01, 1.1723e-01, + 3.0450e-01, 3.8459e-01, 9.8748e-01, 9.0307e-01, + 8.4006e-01, 7.4864e-01, 3.1639e-01, 2.1710e-01, + 7.3644e-01, 3.8356e-01, 3.0116e-01, 3.5797e-02, + 6.7960e-01, 3.8349e-01, 8.9489e-01, 6.5540e-01, + 7.5185e-02, 8.8098e-02, 8.4915e-01, 4.5570e-01, + 4.1326e-01, 2.2841e-01, 5.7832e-01, 9.8572e-01, + 2.1038e-01, 3.1384e-01, 6.5102e-01, 7.8064e-02, + 8.1578e-01, 9.6599e-01, 4.3348e-01, 4.0471e-01, + 6.6705e-01, 9.9374e-01, 9.3593e-01, 2.7792e-01, + 7.6408e-01, 7.1910e-01, 3.0730e-01, 9.2789e-02, + 5.0816e-01, 2.0054e-01, 9.0756e-01, 4.2139e-02, + 2.2042e-02, 3.8699e-01, 5.8026e-01, 3.2550e-01, + 1.6808e-01, 8.3002e-01, 4.0081e-01, 9.5268e-01, + 5.0592e-01, 6.5938e-01, 1.9963e-01, 4.0528e-01, + 2.7731e-02, 1.9693e-01, 6.1060e-01, 6.3866e-01, + 5.3688e-01, 5.2347e-01, 2.5349e-01, 2.9615e-01, + 9.7016e-01, 3.4954e-01, 2.6613e-01, 3.3523e-02, + 1.5584e-01, 8.3051e-02, 6.0614e-01, 8.6788e-01, + 6.0641e-01, 3.2123e-01, 3.1342e-01, 8.0290e-01, + 9.8955e-01, 3.0445e-01, 7.7405e-01, 4.4438e-01, + 2.4247e-01, 1.0812e-01, 3.8171e-02, 1.6700e-01, + 9.9278e-01, 1.6124e-01, 1.4018e-01, 9.9672e-01, + 1.7920e-01, 8.7088e-01, 9.3567e-01, 9.0612e-01, + 7.3360e-01, 8.6982e-01, 1.8853e-01, 2.8631e-02, + 3.1908e-01, 5.4943e-01, 5.4554e-01, 5.7148e-01, + 8.7372e-01, 8.1331e-01, 3.5535e-01, 6.6365e-02, + 3.7900e-01, 4.4060e-01, 6.9755e-01, 8.4770e-02, + 2.6765e-01, 7.1295e-01, 5.9500e-01, 9.3206e-01, + 2.8045e-01, 9.7087e-01, 2.9626e-01, 3.4970e-01, + 9.7291e-01, 3.3104e-01, 7.5134e-02, 4.9646e-01, + 7.3787e-01, 9.4535e-01, 9.9298e-01, 7.8352e-02, + 7.1369e-01, 5.2407e-01, 3.4781e-01, 2.9479e-02, + 7.9036e-01, 3.2159e-02, 2.9663e-01, 4.0057e-01, + 7.5187e-01, 7.2985e-02, 9.9586e-01, 3.5315e-01, + 7.0513e-01, 8.3438e-01, 1.1850e-01, 6.1954e-01, + 7.4807e-01, 4.7444e-01, 1.4806e-01, 4.6297e-01, + 8.2386e-01, 3.7491e-01, 5.2912e-01, 4.8493e-01, + 9.8747e-01, 8.2071e-01, 6.0411e-01, 2.6614e-01, + 4.7854e-01, 7.0928e-01, 4.7195e-01, 1.2463e-01, + 6.5685e-01, 3.5210e-01, 3.3489e-01, 3.4371e-01, + 5.6556e-01, 5.8336e-02, 2.5168e-01, 1.1237e-03, + 6.1377e-01, 1.4862e-01, 4.7916e-01, 9.8286e-01, + 7.2047e-01, 6.1947e-01, 2.5084e-02, 8.6610e-01, + 3.6497e-01, 7.1297e-01, 4.6956e-01, 3.6140e-01, + 7.0052e-01, 5.8768e-01, 8.9326e-01, 5.2583e-01, + 9.7503e-01, 5.9070e-01, 9.4841e-01, 7.0689e-01, + 4.8720e-01, 1.2759e-01, 5.2594e-01, 5.1674e-01, + 2.2215e-01, 4.8116e-01, 5.4493e-01, 1.6438e-01, + 1.6212e-01, 1.4592e-01, 9.3453e-01, 5.4120e-01, + 9.9377e-01, 9.5104e-01, 3.6132e-01, 5.3946e-01, + 9.9391e-01, 4.6287e-01, 2.8328e-01, 6.1212e-01, + 4.1816e-01, 7.1117e-01, 7.2036e-01, 4.2976e-02, + 2.1797e-01, 6.2665e-01, 4.2489e-02, 6.7964e-01, + 4.1152e-01, 2.0657e-01, 7.4251e-01, 6.2519e-01, + 4.6706e-01, 6.5784e-01, 1.7252e-01, 8.8152e-01, + 1.6930e-01, 9.4893e-01, 8.8209e-01, 8.7479e-01, + 4.1704e-01, 8.7646e-01, 6.5490e-01, 5.1932e-01, + 8.4152e-01, 6.8650e-01, 9.4596e-01, 6.7239e-01, + 7.6111e-01, 8.3939e-01, 8.3015e-01, 8.0600e-02, + 5.4688e-01, 4.2004e-01, 3.5995e-01, 8.7290e-01, + 2.9848e-01, 4.0104e-01, 7.6500e-01, 7.5102e-01, + 2.0463e-01, 2.7033e-01, 9.7413e-01, 5.9293e-01, + 4.1711e-01, 6.1095e-01, 2.3521e-01, 7.4961e-01, + 7.3623e-01, 8.2256e-01, 6.1390e-01, 5.1919e-01, + 2.8273e-01, 4.5435e-01, 8.5019e-02, 9.4253e-01, + 3.1807e-02, 5.6156e-01, 4.3673e-01, 2.3393e-01, + 4.5771e-01, 1.4899e-01, 8.7682e-01, 9.2175e-01, + 7.3314e-01, 1.1596e-01, 2.1438e-01, 1.1876e-01, + 2.7871e-01, 6.2895e-01, 1.0399e-01, 6.5021e-01, + 9.7906e-01, 6.0746e-01, 1.0704e-01, 8.5925e-01, + 2.2433e-01, 4.9315e-01, 2.7459e-01, 7.3299e-01, + 2.6631e-01, 4.4259e-01, 1.6375e-01, 8.4103e-01, + 6.8640e-02, 9.0139e-01, 3.6033e-01, 8.9148e-01, + 1.4670e-01, 2.1681e-01, 2.2662e-01, 6.6090e-02, + 2.7846e-01, 7.6443e-01, 6.3337e-01, 1.4853e-01, + 3.1082e-01, 4.9872e-01, 8.4014e-01, 9.8999e-01, + 6.7688e-01, 1.6975e-01, 3.7138e-01, 8.1020e-01, + 7.0790e-02, 1.3523e-01, 3.7468e-02, 7.9415e-01, + 2.7812e-01, 1.2167e-01, 4.3224e-01, 5.5136e-01, + 4.0724e-02, 2.6502e-01, 6.1419e-01, 8.7909e-01, + 9.5819e-01, 9.5194e-01, 2.6001e-01, 2.3975e-01, + 6.7454e-01, 5.7007e-01, 5.3707e-01, 8.1178e-01, + 7.9154e-01, 2.2539e-01, 7.8655e-01, 6.5846e-01, + 2.6535e-01, 9.7806e-01, 7.1020e-01, 8.3252e-01, + 9.6863e-01, 7.5960e-01, 5.7156e-01, 2.7669e-01, + 6.3128e-01, 7.7289e-01, 7.1228e-01, 7.7482e-01, + 1.6904e-01, 8.1997e-01, 9.7624e-01, 5.6560e-01, + 2.7663e-01, 2.3951e-01, 4.4379e-01, 2.8076e-02, + 4.4129e-02, 7.9282e-01, 3.5166e-01, 7.1038e-01, + 4.7806e-01, 8.0129e-01, 1.8759e-02, 2.9831e-01, + 9.6097e-01, 7.5028e-01, 6.7067e-01, 2.5064e-01, + 7.3552e-02, 2.2102e-01, 4.0798e-02, 7.3754e-01, + 5.2514e-01, 1.2280e-01, 7.7926e-01, 5.8105e-01, + 5.6506e-01, 3.0079e-01, 5.7893e-01, 4.5377e-01, + 4.5091e-02, 6.1516e-01, 8.0451e-01, 8.1548e-01, + 4.9070e-01, 9.4442e-01, 6.7333e-01, 4.6352e-01, + 9.4087e-01, 6.6036e-01, 7.7251e-01, 4.8492e-01, + 5.5056e-01, 6.1609e-01, 4.1240e-01, 7.6930e-01, + 1.5580e-01, 5.5480e-01, 6.6083e-01, 7.9676e-01, + 2.1381e-01, 9.8321e-01, 6.0074e-01, 2.8389e-01, + 8.3180e-01, 8.8109e-01, 1.2639e-01, 6.2125e-01, + 7.0439e-01, 9.1146e-01, 5.1574e-01, 9.8774e-01, + 8.2024e-01, 2.4417e-01, 8.8016e-01, 8.1631e-01, + 1.4643e-01, 9.8658e-01, 2.7646e-01, 1.2864e-01, + 2.9087e-02, 1.4354e-01, 6.0524e-01, 2.8677e-01, + 1.4899e-01, 4.6367e-01, 5.1784e-01, 9.8821e-01, + 4.8069e-01, 8.3426e-01, 1.7525e-01, 5.0974e-01, + 3.0878e-01, 2.9609e-01, 6.8677e-01, 2.0869e-01, + 3.2086e-01, 3.7111e-01, 6.1587e-02, 7.4378e-02, + 4.2695e-01, 1.4850e-01, 5.4335e-02, 8.4394e-01, + 8.0973e-01, 4.7836e-01, 8.4671e-01, 9.7012e-01, + 1.0626e-01, 4.9016e-01, 4.0803e-01, 9.7115e-01, + 7.5995e-01, 9.5447e-01, 8.6611e-01, 7.0402e-01, + 7.9939e-01, 2.3092e-01, 4.4970e-01, 3.9381e-01, + 1.5460e-01, 9.1987e-01, 4.3357e-01, 7.2170e-01, + 9.1074e-01, 9.8918e-01, 2.9317e-01, 2.4967e-01, + 6.4585e-01, 8.0204e-01, 4.6814e-01, 3.0193e-01, + 8.1103e-01, 1.4968e-01, 8.8709e-01, 5.7762e-01, + 8.7050e-01, 1.3270e-01, 3.7713e-01, 5.9660e-01, + 6.3653e-01, 2.9283e-01, 2.1496e-01, 8.4861e-01, + 5.2665e-01, 4.4147e-01, 5.6040e-01, 2.4280e-01, + 1.0891e-01, 1.3062e-01, 9.9818e-01, 3.2711e-02, + 7.8364e-01, 8.8951e-01, 1.1528e-01, 9.1362e-01, + 1.8879e-01, 4.1744e-01, 9.0491e-01, 2.1200e-01, + 5.8240e-02, 1.4926e-01, 2.8456e-01, 5.7208e-01, + 3.9875e-01, 3.0948e-01, 8.5836e-02, 2.0035e-01, + 7.1400e-01, 7.8279e-01, 6.8278e-01, 6.6938e-01, + 9.4297e-01, 1.8496e-01, 1.5518e-02, 1.0389e-01, + 4.1335e-01, 9.0194e-02, 5.3872e-01, 7.5585e-01, + 3.4338e-01, 1.1621e-01, 7.9511e-01, 5.9093e-01, + 6.0436e-01, 3.4533e-01, 6.2464e-01, 6.2137e-01, + 2.6066e-01, 2.1593e-01, 5.7001e-01, 2.9622e-01, + 8.6756e-01, 4.5066e-01, 6.3181e-01, 7.8115e-01, + 1.1513e-01, 3.3336e-01, 5.9404e-01, 5.5227e-01, + 4.8272e-01, 4.5789e-01, 5.2434e-01, 8.7910e-01, + 3.6237e-01, 3.6341e-01, 2.2645e-01, 2.1768e-01, + 4.5579e-01, 3.4246e-01, 7.0649e-01, 2.9172e-01, + 1.5275e-01, 9.9131e-01, 6.6686e-01, 3.1529e-01, + 9.5275e-01, 5.2154e-01, 6.0311e-01, 6.9413e-01, + 5.2394e-01, 2.9542e-01, 6.9079e-01, 3.5982e-01, + 2.0311e-01, 2.2891e-01, 8.3397e-01, 7.9481e-01, + 7.7724e-01, 6.7393e-01, 5.1568e-01, 8.6496e-01, + 8.7170e-01, 9.9110e-02, 8.4462e-01, 8.3862e-01, + 8.1703e-01, 7.4599e-01, 8.0526e-01, 4.4458e-02, + 9.9751e-01, 7.3077e-01, 4.6991e-01, 7.3467e-01, + 9.3161e-02, 9.5937e-01, 6.5874e-01, 6.4726e-01, + 1.5051e-01, 5.7551e-01, 7.2428e-01, 9.3155e-01, + 6.2676e-01, 9.6143e-01, 8.6169e-01, 4.6347e-01, + 8.7308e-01, 1.8969e-02, 8.3502e-01, 8.9361e-01, + 1.9404e-01, 4.3472e-02, 2.0120e-01, 6.9356e-01, + 9.4889e-01, 5.6077e-01, 8.6433e-01, 8.7016e-01, + 7.3158e-01, 4.4803e-01, 2.3998e-01, 9.7238e-01, + 4.3879e-01, 2.9965e-01, 8.3148e-01, 2.4330e-01, + 6.7838e-01, 6.1669e-01, 7.2623e-01, 1.2354e-01, + 2.2972e-01, 2.7296e-01, 9.8873e-01, 7.5405e-01, + 3.4953e-01, 2.5789e-01, 6.3716e-01, 8.4804e-01, + 1.8777e-01, 5.7827e-01, 4.9683e-01, 8.6743e-01, + 2.8557e-01, 7.6619e-01, 2.2211e-01, 6.8093e-01, + 9.5156e-01, 6.3191e-01, 8.7661e-01, 9.1812e-02, + 8.0288e-01, 9.5932e-01, 8.4664e-01, 5.6317e-01, + 4.1155e-01, 5.6613e-01, 6.7922e-01, 5.0367e-01, + 7.7000e-01, 2.3487e-01, 6.5953e-01, 2.6442e-01, + 4.8671e-01, 1.3645e-01, 3.1847e-01, 3.7867e-01, + 6.8019e-01, 6.2500e-01, 8.7404e-01, 7.5376e-01, + 1.0881e-01, 2.1589e-01, 4.1967e-01, 2.7998e-01, + 9.8292e-01, 1.8108e-01, 4.3913e-01, 1.4646e-01, + 6.9060e-01, 4.8902e-01, 7.7270e-01, 6.7527e-01, + 4.5794e-01, 6.8861e-01, 8.4179e-01, 8.6936e-01, + 8.8470e-01, 8.3332e-01, 4.5104e-01, 2.2109e-01, + 6.7705e-01, 3.9307e-02, 1.4542e-01, 7.5689e-01, + 7.3681e-01, 1.4316e-01, 1.3469e-01, 9.2693e-01, + 7.9835e-02, 4.8747e-01, 2.3991e-01, 5.0674e-01, + 8.1572e-01, 8.9171e-01, 9.4262e-01, 4.4242e-02, + 4.8771e-01, 2.2083e-01, 7.7910e-01, 6.0918e-01, + 7.4097e-02, 1.2246e-01, 6.0510e-01, 9.1036e-01, + 5.6118e-01, 1.6158e-01, 6.7291e-01, 1.8197e-01, + 1.7103e-01, 9.5862e-01, 4.6520e-01, 1.2359e-01, + 3.6908e-01, 9.8844e-01, 3.6924e-03, 4.3265e-01, + 8.5218e-01, 1.4758e-01, 1.7384e-01, 6.5237e-02, + 8.2651e-01, 8.8829e-01, 2.6356e-01, 4.3625e-01, + 2.5112e-01, 9.8859e-01, 7.9307e-01, 6.3628e-01, + 8.3428e-01, 1.7687e-02, 2.6174e-01, 9.5803e-01, + 1.4430e-01, 2.1325e-01, 1.4008e-01, 6.7486e-01, + 2.8336e-01, 1.6215e-01, 9.0703e-01, 8.7796e-01, + 3.4117e-01, 5.4612e-01, 1.4746e-01, 4.1956e-01, + 2.7025e-02, 7.3708e-02, 8.1368e-01, 5.0288e-01, + 5.6119e-01, 4.8670e-01, 5.7631e-01, 3.1818e-01, + 6.6127e-01, 7.7339e-01, 7.0503e-02, 9.2309e-01, + 3.9171e-01, 7.5288e-02, 3.6269e-01, 4.9846e-01, + 8.1495e-01, 9.4113e-01, 4.7373e-02, 3.0034e-01, + 2.2161e-01, 4.3905e-01, 3.3523e-01, 8.3970e-01, + 2.1837e-01, 8.3822e-01, 9.9775e-01, 5.5403e-01, + 8.9486e-01, 1.4148e-01, 9.0127e-01, 9.7059e-02, + 3.2741e-01, 1.0701e-01, 2.7984e-01, 7.7278e-01, + 6.8926e-01, 2.8243e-01, 3.0207e-01, 6.5028e-02, + 2.4311e-01, 2.9351e-01, 7.3131e-01, 4.2415e-01, + 7.4454e-01, 6.9421e-01, 2.5342e-01, 1.5780e-01, + 2.9231e-01, 1.4667e-01, 6.9582e-02, 8.6692e-01, + 5.4682e-01, 4.6227e-01, 4.6657e-01, 1.0327e-01, + 1.8106e-01, 7.5880e-01, 3.2925e-01, 1.2240e-01, + 3.1478e-01, 9.1416e-01, 9.6930e-03, 1.3241e-01, + 8.0456e-01, 7.4784e-01, 6.9926e-02, 8.4918e-01, + 8.2454e-01, 8.7979e-01, 1.6012e-01, 3.4099e-01, + 1.3158e-01, 5.1930e-01, 3.2542e-01, 8.0769e-01, + 1.1181e-02, 9.1890e-02, 7.2786e-01, 6.7172e-02, + 8.3525e-02, 7.9476e-01, 6.4096e-01, 2.8445e-01, + 3.8206e-01, 4.6326e-01, 4.8276e-01, 2.3710e-01, + 5.7913e-01, 5.4204e-01, 7.5509e-01, 5.9955e-01, + 1.1200e-01, 9.5213e-04, 9.0864e-01, 6.2336e-01, + 4.3307e-01, 3.6198e-01, 9.8456e-01, 2.8949e-01, + 4.7497e-01, 5.7509e-01, 3.3429e-01, 7.0479e-01, + 9.6075e-01, 9.2558e-01, 3.6179e-01, 8.2591e-01, + 5.9758e-01, 5.3473e-01, 2.7530e-02, 9.6342e-02, + 9.3529e-02, 1.6467e-01, 8.9391e-01, 8.5922e-01, + 1.4915e-01, 1.8769e-01, 1.0016e-01, 8.4777e-02, + 3.6040e-01, 8.2450e-01, 3.4686e-01, 8.2301e-01, + 8.9691e-02, 4.3378e-01, 1.3710e-01, 4.0908e-01, + 6.9494e-01, 9.9675e-01, 9.8566e-01, 1.3164e-02, + 6.9583e-01, 8.3204e-01, 2.1108e-01, 8.1028e-01, + 6.7781e-01, 2.9978e-01, 3.4866e-01, 2.9128e-01, + 8.2911e-01, 3.9288e-01, 9.7244e-01, 5.5754e-01, + 8.6365e-01, 1.9935e-01, 4.2606e-01, 6.5710e-01, + 1.1321e-01, 8.9438e-01, 5.3287e-02, 6.7747e-01, + 1.3725e-01, 9.6097e-01, 6.1965e-01, 7.3747e-01, + 1.9823e-01, 8.4099e-01, 9.3609e-01, 2.6566e-01, + 2.6173e-01, 6.9673e-01, 9.6055e-01, 7.2272e-03, + 8.9838e-01, 5.5549e-01, 4.9628e-01, 2.5939e-01, + 2.4806e-01, 5.9743e-01, 7.2404e-01, 5.7379e-01, + 9.2371e-01, 9.8965e-02, 1.7327e-01, 7.3460e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0752, 0.0265, 0.5369, ..., 0.1368, 0.6161, 0.4463]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.1387269496917725 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '362205', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420795440673828} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 367, 5928, 4913, 9662, 9837, 1139, 7195, 709, 1186, + 80, 7429, 8354, 3731, 4238, 1204, 8286, 5350, 6518, + 6581, 5481, 8412, 8567, 4483, 270, 6809, 6052, 9047, + 8886, 6265, 2683, 8840, 6468, 5705, 4463, 7370, 8923, + 7324, 752, 3820, 476, 1511, 1152, 7156, 6378, 1601, + 6251, 5183, 964, 1260, 884, 6109, 4781, 1936, 3415, + 7691, 4888, 5674, 6459, 8140, 8932, 2864, 6432, 1356, + 2665, 9855, 2360, 1289, 3774, 7837, 576, 7725, 1293, + 907, 2542, 7673, 5326, 1656, 1077, 464, 7656, 5438, + 524, 4581, 8099, 2687, 4078, 5342, 5240, 9155, 9342, + 4985, 2318, 5185, 4603, 8415, 3279, 2083, 7925, 8071, + 9166, 9303, 9469, 5789, 7778, 8645, 7134, 7594, 2178, + 218, 3344, 3951, 8834, 4013, 7845, 8331, 9692, 3137, + 7730, 5023, 8474, 4718, 38, 726, 535, 2913, 4635, + 2716, 3591, 9066, 2660, 3345, 7905, 2026, 1175, 9827, + 6627, 4714, 9765, 1301, 2421, 949, 3573, 4881, 5681, + 2117, 1844, 8550, 9611, 6234, 1475, 3891, 4140, 4791, + 497, 5133, 1193, 8459, 1053, 2110, 5799, 6433, 6363, + 6452, 1314, 1357, 383, 1853, 7981, 8433, 7415, 6026, + 6286, 7644, 2079, 2595, 9578, 6356, 4227, 8298, 2069, + 2653, 5142, 7404, 4079, 1966, 7533, 6062, 6415, 8225, + 8783, 5739, 3815, 1201, 4896, 5097, 5808, 8945, 5520, + 6206, 511, 7347, 334, 3119, 1570, 7713, 2906, 3536, + 6334, 7998, 298, 3942, 6558, 2963, 4842, 2263, 5657, + 6492, 6276, 9075, 5298, 2267, 9620, 5320, 1568, 8760, + 7490, 9058, 1722, 5460, 3464, 1842, 2377, 9510, 1053, + 3322, 9012, 5664, 8967, 8790, 9286, 1946, 3141, 8192, + 6659, 3495, 4958, 4102, 7004, 6177, 5317, 6593, 7640, + 6307, 8035, 9650, 8087, 4138, 5288, 54, 277, 9498, + 8689, 2955, 8232, 1954, 321, 4627, 4226, 4258, 7653, + 5234, 2531, 412, 5196, 6249, 3047, 8171, 9428, 3213, + 9763, 1374, 9103, 7900, 6626, 2118, 7765, 3728, 6106, + 4954, 4753, 8313, 550, 6888, 8896, 9965, 3061, 2947, + 8278, 5879, 5273, 5453, 181, 2437, 4117, 7283, 5591, + 8602, 8473, 5720, 1866, 7656, 8837, 3905, 7472, 7158, + 3023, 2882, 9989, 8681, 648, 6274, 6847, 3312, 5656, + 7680, 6098, 4463, 9954, 2825, 6574, 7011, 1957, 6137, + 6725, 3576, 5559, 9535, 4552, 2200, 3109, 9611, 9451, + 1309, 2875, 3230, 5628, 9907, 8010, 6405, 222, 8761, + 2120, 9374, 4858, 4514, 5442, 9330, 8281, 8858, 4407, + 6543, 35, 3825, 9633, 1009, 2627, 3906, 1955, 7496, + 3563, 3539, 8202, 7400, 7587, 7952, 2607, 3232, 9574, + 748, 9829, 4992, 1810, 7308, 6501, 6785, 7235, 1137, + 8724, 2405, 7482, 6759, 4652, 5655, 4530, 6784, 9512, + 4088, 6049, 9230, 7810, 9658, 5737, 1295, 738, 747, + 7672, 2503, 5907, 2220, 201, 5120, 7515, 3259, 3879, + 3039, 9321, 8605, 9482, 4181, 7348, 9537, 587, 6394, + 3984, 306, 1193, 3228, 778, 134, 9046, 7121, 4357, + 4190, 4050, 4701, 5483, 6713, 8937, 2483, 711, 8665, + 3008, 3841, 9929, 1156, 7864, 8900, 4657, 7749, 2947, + 9699, 2414, 4266, 6048, 8585, 5785, 2832, 7866, 2519, + 9463, 5405, 1165, 1777, 9786, 9696, 3031, 653, 1886, + 1959, 8043, 4843, 484, 4688, 1972, 2038, 6598, 7240, + 1462, 7254, 3087, 1879, 7554, 1558, 7001, 9016, 8895, + 5649, 4937, 9704, 4961, 7644, 8442, 2757, 2242, 9502, + 2976, 4095, 4676, 9126, 5855, 4163, 6430, 137, 106, + 3030, 655, 8531, 7089, 648, 854, 5910, 4435, 7738, + 4805, 6752, 444, 2066, 5929, 8674, 9769, 2865, 2752, + 5321, 130, 2489, 807, 9550, 1633, 7468, 9068, 9241, + 6808, 7365, 968, 8661, 9018, 3550, 6104, 2629, 7508, + 35, 8351, 2048, 4626, 2188, 1746, 7387, 3093, 1503, + 1452, 7638, 1336, 9281, 6296, 6140, 7102, 9783, 1071, + 4115, 8592, 8292, 2729, 6495, 356, 8036, 1239, 4223, + 6109, 2038, 6313, 7535, 4790, 2693, 2125, 7954, 5865, + 3268, 2522, 6271, 2184, 4150, 1470, 3491, 6995, 9734, + 2067, 3839, 7338, 6815, 4421, 307, 289, 5776, 7623, + 5550, 9770, 6085, 800, 6471, 6365, 7940, 7253, 9023, + 3056, 1623, 7408, 4931, 2227, 3852, 1329, 3500, 1597, + 680, 4795, 1426, 733, 9436, 1001, 976, 8821, 8330, + 6704, 5168, 673, 3362, 4167, 8558, 5789, 6484, 7568, + 5585, 6431, 6874, 5521, 5213, 7970, 3020, 8092, 5025, + 6800, 7632, 7512, 3989, 4195, 1578, 2940, 3628, 3648, + 6431, 2736, 7676, 5158, 5635, 1844, 6595, 8947, 5903, + 51, 9169, 4098, 5691, 6814, 9766, 6514, 9970, 2239, + 2046, 3280, 2714, 5806, 2677, 82, 1113, 2963, 2180, + 2566, 2541, 3825, 9253, 8388, 6485, 870, 1684, 7333, + 793, 9898, 5870, 5155, 6286, 9563, 9068, 5713, 2343, + 1715, 3887, 723, 1944, 2953, 3319, 9854, 144, 718, + 6674, 4026, 8079, 7531, 4342, 6924, 6779, 6546, 7462, + 9962, 1432, 8607, 1875, 2805, 7948, 2976, 1046, 7036, + 4949, 7102, 477, 7850, 2174, 1961, 4920, 8186, 1308, + 179, 2095, 3644, 8483, 9153, 627, 2488, 890, 3123, + 4350, 5492, 1701, 1645, 3932, 3331, 2933, 6624, 7332, + 7665, 5288, 7681, 6550, 7545, 730, 5687, 802, 6864, + 7451, 2117, 4878, 4869, 1248, 1137, 1380, 8950, 5598, + 7429, 4233, 1060, 5569, 6689, 8608, 3567, 4558, 7535, + 9199, 2985, 2282, 235, 9129, 8582, 2072, 7435, 8445, + 9048, 7905, 3709, 7810, 5385, 5720, 2662, 2668, 7912, + 4854, 9556, 9550, 3132, 8229, 4856, 4161, 1914, 745, + 65, 7058, 9271, 7846, 8928, 5152, 5482, 8156, 4590, + 3430, 90, 6589, 2814, 3078, 1382, 8736, 5505, 2054, + 8404, 4829, 8221, 2458, 952, 6527, 2277, 8961, 5925, + 2066, 2154, 3256, 2466, 7237, 2076, 9446, 4670, 7006, + 2231, 8823, 7168, 5299, 1101, 7045, 7082, 8944, 4016, + 4074, 4258, 3864, 3141, 4450, 5115, 5389, 9949, 1172, + 9433, 1642, 1479, 3769, 9973, 4747, 4872, 389, 8601, + 9119, 8563, 2438, 6873, 1818, 9226, 937, 6705, 3733, + 2036, 669, 2029, 332, 7663, 8899, 3889, 9735, 9326, + 8050, 2621, 3330, 992, 9581, 1488, 8125, 6995, 2528, + 7140, 6221, 1417, 9802, 898, 1689, 3308, 303, 2075, + 6622, 9167, 6725, 8722, 8663, 7196, 3835, 4992, 1316, + 6283, 4758, 9632, 641, 8717, 8903, 403, 3496, 5247, + 9491, 1114, 4440, 9898, 4961, 6497, 4291, 3663, 5161, + 7340, 9289, 8727, 7951, 3216, 9826, 2744, 1742, 7964, + 1100, 4645, 9, 4874, 3732, 9378, 1625, 9820, 3610, + 1545, 6999, 6918, 1895, 3580, 9207, 2130, 5891, 4573, + 1060, 9808, 4116, 943, 4244, 5724, 4961, 3837, 8840, + 389, 1348, 833, 3514, 5170, 1683, 7789, 9831, 5974, + 9125, 5396, 7463, 6389, 1948, 8359, 6877, 7522, 8087, + 7219, 1695, 4668, 662, 3386, 4803, 1414, 7277, 1481, + 9053, 9285, 9508, 5533, 205, 8818, 4845, 7696, 8623, + 9000]), + values=tensor([9.0347e-01, 8.9097e-01, 3.9050e-01, 1.1761e-01, + 7.3236e-01, 1.8076e-01, 1.7435e-01, 3.1968e-01, + 6.2094e-02, 6.9707e-01, 9.2156e-01, 2.5833e-01, + 3.9175e-01, 8.0344e-02, 8.5392e-01, 7.0595e-02, + 6.3035e-01, 4.6849e-01, 4.1355e-01, 5.1765e-01, + 6.7505e-01, 7.7298e-01, 3.0196e-01, 2.7160e-01, + 4.9835e-01, 7.7842e-01, 7.9100e-01, 6.1241e-01, + 9.2766e-01, 8.1158e-01, 9.2979e-01, 2.0087e-01, + 6.3444e-01, 1.1919e-01, 2.9666e-01, 8.1344e-01, + 2.3658e-01, 7.2770e-01, 3.4239e-01, 6.0915e-01, + 3.8756e-01, 4.8989e-01, 5.2955e-01, 9.2633e-01, + 7.9699e-01, 9.0118e-01, 6.9847e-02, 3.2137e-02, + 8.8024e-01, 6.0500e-01, 4.6450e-01, 8.5960e-02, + 5.6130e-01, 3.3831e-01, 9.3616e-01, 8.2763e-01, + 2.4098e-01, 8.9643e-01, 9.8468e-01, 8.8817e-01, + 4.6338e-01, 1.1147e-02, 8.1814e-01, 9.2206e-02, + 1.0005e-01, 6.2906e-01, 6.9146e-01, 1.5603e-01, + 3.1235e-01, 6.2561e-01, 3.7711e-01, 6.5022e-01, + 1.2930e-01, 6.7901e-02, 7.2399e-01, 7.8736e-01, + 8.8706e-02, 7.0933e-01, 9.0665e-01, 1.0525e-01, + 7.0947e-01, 7.6785e-01, 7.4226e-01, 9.8559e-01, + 9.2459e-01, 1.7157e-01, 7.4478e-01, 7.9596e-01, + 7.2144e-01, 1.9958e-01, 6.1043e-01, 2.7464e-01, + 5.8302e-01, 9.7091e-01, 4.8610e-01, 8.4624e-01, + 8.7645e-01, 7.2595e-01, 7.4275e-01, 2.6952e-01, + 8.8491e-01, 6.7085e-02, 7.5574e-01, 6.1435e-01, + 1.3665e-01, 8.8809e-01, 2.2500e-01, 7.6147e-01, + 5.8642e-01, 8.3791e-01, 3.2752e-01, 4.9244e-01, + 6.5160e-01, 6.2237e-01, 5.6666e-01, 1.2589e-01, + 5.4657e-01, 1.6810e-01, 1.1783e-01, 3.3742e-01, + 6.8221e-01, 1.2268e-02, 4.7183e-01, 6.4275e-01, + 3.3058e-01, 6.2514e-01, 2.7607e-01, 5.2090e-01, + 3.9798e-01, 7.4704e-02, 3.1431e-01, 2.3156e-01, + 2.4983e-01, 7.9235e-01, 6.1773e-02, 4.1696e-01, + 2.9905e-01, 6.8152e-01, 3.4211e-01, 6.0393e-01, + 3.6186e-02, 8.0620e-01, 9.8874e-01, 4.6252e-01, + 8.4231e-01, 9.1653e-01, 4.6135e-01, 2.8152e-01, + 9.7319e-01, 4.3848e-01, 5.7170e-01, 4.9901e-01, + 7.4233e-01, 2.6089e-01, 8.3401e-01, 2.3202e-01, + 8.4769e-01, 8.7678e-01, 7.1420e-01, 8.9378e-01, + 5.6124e-01, 4.0637e-01, 3.8693e-01, 8.0881e-02, + 2.6688e-01, 5.6776e-01, 4.3004e-01, 4.7054e-01, + 2.0392e-01, 7.2817e-01, 6.6755e-01, 5.1867e-01, + 5.2670e-01, 1.4618e-01, 7.3183e-01, 8.6973e-01, + 1.9126e-01, 4.9009e-01, 2.5587e-01, 4.0650e-01, + 9.8806e-01, 9.6733e-01, 1.1171e-01, 7.9880e-01, + 6.7184e-01, 7.4651e-02, 1.7680e-01, 9.4485e-01, + 8.4713e-01, 1.4694e-01, 9.3410e-01, 6.8790e-01, + 6.7995e-01, 3.4766e-01, 3.3209e-01, 8.5193e-01, + 5.7959e-01, 2.9187e-01, 3.0229e-01, 9.7462e-01, + 7.8153e-01, 2.9900e-01, 3.0648e-01, 9.4053e-01, + 7.5039e-01, 7.3664e-01, 2.3912e-01, 5.6769e-01, + 9.2225e-01, 1.2617e-01, 4.4870e-01, 5.2352e-01, + 4.9831e-01, 4.2978e-01, 5.8379e-03, 5.1101e-01, + 1.1850e-02, 4.3076e-02, 2.0400e-01, 4.3425e-02, + 2.0565e-01, 2.5026e-01, 5.8630e-01, 9.9979e-01, + 5.5005e-01, 2.8369e-01, 7.0988e-01, 9.9716e-01, + 1.0843e-01, 5.2002e-01, 8.1336e-01, 4.4712e-01, + 1.7801e-01, 4.1042e-01, 8.1718e-01, 7.5846e-01, + 6.4002e-01, 8.1764e-01, 9.2236e-01, 2.7385e-01, + 2.2693e-01, 1.3388e-01, 2.7481e-01, 2.7276e-01, + 6.3817e-01, 8.1593e-01, 7.1721e-01, 1.0760e-01, + 3.2614e-01, 4.3753e-03, 9.9770e-01, 9.1052e-01, + 1.4963e-01, 1.3781e-01, 1.1034e-01, 1.1895e-02, + 8.2173e-01, 2.6975e-01, 3.5686e-01, 5.8749e-01, + 1.5253e-01, 6.6786e-01, 9.1749e-01, 8.5611e-01, + 5.9711e-01, 8.9352e-01, 7.2370e-01, 4.2727e-01, + 4.7201e-01, 6.1878e-01, 1.3255e-01, 4.0640e-01, + 6.1650e-01, 9.0122e-01, 3.3783e-01, 1.2667e-01, + 5.3203e-01, 7.8517e-01, 4.7198e-01, 8.0031e-01, + 9.8415e-02, 6.3035e-01, 8.4415e-01, 4.0094e-01, + 3.8505e-01, 5.1696e-01, 9.5335e-01, 7.6890e-01, + 5.9772e-01, 8.7886e-01, 7.4086e-01, 8.4781e-01, + 1.6493e-01, 3.3683e-01, 7.1960e-03, 2.9042e-01, + 8.4813e-01, 1.8192e-01, 6.0102e-01, 4.2567e-01, + 9.3317e-02, 3.8724e-01, 3.5787e-01, 9.9992e-01, + 1.2443e-01, 8.2890e-01, 2.8195e-01, 6.3051e-01, + 6.0894e-01, 9.5920e-01, 6.4943e-01, 6.5476e-01, + 6.7692e-01, 5.7763e-01, 7.8022e-01, 4.1886e-01, + 5.0205e-01, 2.5778e-01, 9.0479e-01, 8.7006e-01, + 6.1226e-01, 6.4647e-01, 1.5596e-01, 7.0047e-01, + 6.7722e-01, 5.4595e-01, 4.9292e-01, 9.0526e-01, + 4.8174e-02, 8.1995e-02, 7.1448e-01, 2.7007e-02, + 6.2586e-01, 6.7855e-01, 4.6902e-01, 3.6021e-01, + 3.9907e-01, 1.8609e-02, 6.0745e-01, 2.4758e-01, + 6.2019e-02, 9.3085e-01, 3.9521e-01, 8.9117e-01, + 5.9487e-01, 4.2672e-02, 7.4925e-01, 8.7766e-02, + 5.8346e-01, 6.5657e-01, 8.0155e-01, 4.5151e-01, + 5.5583e-01, 7.1994e-01, 6.1938e-01, 4.9424e-01, + 8.4852e-01, 2.5577e-01, 7.5244e-01, 5.8561e-01, + 1.6622e-01, 5.3017e-01, 7.0377e-01, 1.6795e-01, + 6.1195e-01, 5.0235e-01, 4.0852e-01, 2.4062e-01, + 3.7706e-01, 7.1399e-01, 1.5932e-01, 2.0246e-01, + 9.3118e-01, 3.6824e-01, 9.2308e-01, 4.1388e-01, + 3.9074e-01, 2.1232e-02, 4.8230e-01, 3.1334e-02, + 7.6588e-01, 8.9020e-01, 9.5571e-02, 9.3996e-01, + 3.3502e-01, 1.6456e-02, 4.8873e-01, 9.0152e-01, + 6.0836e-01, 6.2449e-02, 2.3911e-01, 2.5569e-01, + 1.9099e-01, 6.5319e-01, 3.4599e-01, 9.4871e-01, + 1.5293e-01, 9.6291e-01, 5.9772e-01, 9.2359e-01, + 6.8691e-01, 6.3677e-01, 7.7151e-01, 5.4282e-01, + 9.3692e-01, 3.1526e-01, 9.7688e-01, 8.3385e-01, + 2.2972e-01, 6.2720e-01, 2.5945e-01, 2.4371e-01, + 1.9518e-01, 9.3585e-01, 5.0154e-01, 4.9345e-01, + 9.2069e-01, 2.8370e-01, 2.0941e-01, 2.7517e-01, + 3.3759e-01, 3.1978e-01, 9.0873e-01, 7.9081e-01, + 6.9871e-01, 2.8844e-01, 7.5941e-01, 4.7964e-01, + 4.1805e-01, 7.2289e-01, 7.1452e-01, 2.2869e-01, + 3.5827e-01, 5.0632e-01, 5.1573e-01, 6.2442e-01, + 5.5513e-01, 3.8608e-01, 6.5218e-02, 2.8971e-01, + 9.3603e-01, 9.9599e-01, 2.9308e-02, 5.0704e-01, + 3.3920e-01, 1.3835e-01, 1.3900e-01, 8.3661e-01, + 8.7030e-02, 1.9115e-01, 8.1969e-01, 2.0766e-01, + 2.0634e-04, 3.2996e-03, 3.5546e-01, 6.9885e-01, + 8.1732e-01, 4.6117e-01, 9.2421e-01, 7.9629e-01, + 9.9088e-01, 9.9274e-01, 1.1080e-01, 1.7902e-01, + 7.9706e-01, 1.8505e-01, 7.4092e-01, 7.3209e-01, + 1.2144e-01, 5.6902e-01, 9.3325e-01, 2.2999e-01, + 6.3010e-01, 9.2148e-01, 2.9383e-01, 4.2274e-01, + 1.2992e-01, 1.8436e-01, 7.7452e-01, 9.5314e-03, + 8.7521e-01, 3.7291e-01, 3.4776e-01, 9.8447e-01, + 9.2182e-01, 6.2103e-01, 7.8077e-01, 6.7523e-01, + 1.6309e-01, 9.9005e-01, 7.0418e-01, 7.5393e-01, + 4.9572e-02, 5.3018e-01, 5.0127e-02, 9.4383e-01, + 9.3893e-01, 2.9396e-01, 1.7931e-01, 3.2387e-01, + 5.3937e-01, 4.4591e-01, 4.1519e-01, 6.4965e-01, + 9.0437e-01, 8.3040e-01, 1.4310e-01, 3.5145e-02, + 4.6746e-01, 5.6639e-01, 4.2557e-01, 8.3449e-01, + 7.7874e-01, 8.0354e-03, 7.7620e-01, 6.3805e-01, + 9.9677e-01, 2.9228e-01, 7.5318e-01, 7.8356e-01, + 9.8216e-01, 6.5817e-01, 3.5696e-01, 2.7232e-01, + 3.7271e-01, 2.1862e-01, 6.4841e-01, 3.4376e-01, + 6.0935e-01, 9.5134e-02, 6.6915e-01, 8.1621e-01, + 4.0272e-01, 6.1882e-01, 5.0284e-02, 8.7929e-01, + 4.2125e-01, 7.6634e-01, 2.5193e-01, 1.3171e-01, + 5.5583e-01, 1.3959e-01, 1.7579e-01, 7.2551e-01, + 6.6240e-01, 1.9258e-01, 3.6612e-01, 1.8147e-01, + 5.7841e-01, 7.8278e-01, 4.2669e-02, 6.6255e-01, + 2.8767e-01, 8.0007e-02, 3.6674e-04, 3.9335e-01, + 2.1417e-01, 2.2919e-01, 1.2683e-01, 9.0001e-01, + 9.9290e-01, 4.5741e-01, 2.3197e-01, 1.8836e-01, + 9.6565e-01, 8.0740e-01, 1.9854e-01, 5.3042e-02, + 8.1469e-01, 4.9040e-01, 7.2177e-01, 6.4045e-01, + 4.9810e-01, 2.5103e-02, 4.7475e-01, 4.7844e-02, + 6.0969e-01, 9.1481e-01, 4.4597e-01, 1.1494e-01, + 4.9666e-01, 6.1275e-01, 8.7093e-01, 1.3668e-01, + 9.5248e-01, 5.9063e-01, 4.9499e-01, 9.1388e-01, + 4.0836e-01, 9.3011e-01, 8.1737e-01, 5.9702e-01, + 9.9251e-01, 9.3234e-01, 7.8850e-01, 5.3497e-01, + 5.6422e-01, 3.4891e-01, 7.9452e-01, 8.5710e-01, + 9.6578e-01, 5.9804e-01, 4.3277e-01, 7.3290e-01, + 1.0449e-01, 4.8889e-01, 9.8684e-01, 3.1549e-02, + 8.2245e-02, 9.4666e-01, 1.7503e-01, 1.3455e-01, + 8.0620e-01, 9.4127e-01, 6.0203e-01, 2.4925e-01, + 2.1869e-01, 3.0844e-02, 4.0776e-01, 2.7968e-01, + 7.2045e-01, 4.5107e-02, 7.6360e-01, 6.9114e-01, + 3.5384e-01, 2.6205e-01, 4.0814e-01, 4.1925e-01, + 9.5840e-01, 2.7948e-01, 9.9578e-01, 5.4533e-01, + 4.9157e-01, 1.7568e-01, 4.9298e-01, 1.2249e-01, + 4.9184e-01, 5.8221e-01, 3.4826e-01, 2.3718e-01, + 4.0414e-02, 9.8825e-01, 6.8724e-01, 4.1618e-01, + 3.5580e-01, 9.8542e-01, 6.1434e-01, 1.5506e-01, + 1.8635e-01, 3.4507e-01, 8.7226e-01, 6.4723e-01, + 1.1927e-01, 6.1913e-02, 5.8115e-01, 5.0795e-02, + 6.9421e-01, 8.5543e-01, 4.6361e-01, 1.7225e-01, + 7.1720e-01, 5.2724e-01, 2.7571e-01, 7.8026e-01, + 9.7859e-01, 3.3684e-01, 9.4074e-01, 8.4877e-01, + 1.6576e-02, 3.2110e-01, 8.1119e-01, 4.4604e-02, + 7.5553e-01, 8.9513e-01, 2.5486e-01, 5.6611e-01, + 1.5194e-01, 8.0984e-01, 1.1473e-02, 5.0922e-02, + 1.9459e-01, 2.4450e-01, 5.2269e-01, 8.8459e-01, + 1.5980e-01, 5.1724e-01, 3.6058e-01, 3.8147e-01, + 8.0778e-02, 6.8899e-01, 9.3518e-01, 3.9577e-01, + 6.8067e-01, 2.9545e-01, 2.1034e-01, 8.0864e-01, + 3.0905e-01, 3.9012e-01, 5.0504e-01, 3.5331e-01, + 2.3990e-01, 1.9663e-01, 4.7629e-02, 8.8145e-01, + 1.6688e-01, 3.2309e-01, 6.0635e-01, 6.9179e-01, + 8.4762e-01, 4.8298e-01, 4.2062e-01, 8.8411e-01, + 3.1172e-01, 4.7812e-02, 1.8446e-01, 3.6828e-01, + 2.3400e-01, 8.2280e-02, 7.9717e-01, 4.5737e-02, + 6.3521e-01, 7.6517e-01, 5.7003e-01, 8.6854e-01, + 6.3527e-01, 5.4238e-01, 6.1423e-02, 9.0375e-01, + 9.5888e-01, 3.1839e-01, 2.4367e-01, 6.4739e-01, + 8.1586e-01, 2.4076e-01, 9.7343e-01, 4.9856e-01, + 7.2246e-01, 5.0023e-01, 1.2692e-01, 7.4359e-01, + 3.5270e-01, 2.8465e-01, 3.0118e-01, 3.5307e-01, + 6.2379e-01, 7.1186e-01, 6.6474e-01, 8.4095e-02, + 8.8565e-01, 2.9464e-01, 5.4755e-01, 9.0701e-01, + 8.9197e-01, 1.8235e-01, 9.3370e-01, 8.6788e-01, + 1.0571e-01, 3.0684e-01, 2.0394e-01, 6.1322e-01, + 7.6393e-01, 7.5679e-01, 2.3964e-01, 2.2054e-01, + 4.5451e-01, 3.5051e-01, 7.3550e-02, 9.3935e-01, + 2.5262e-01, 9.1953e-01, 2.4572e-01, 1.1299e-01, + 5.8933e-01, 2.5009e-01, 8.9652e-01, 6.5729e-02, + 1.7446e-01, 3.5451e-01, 5.3832e-01, 6.5479e-01, + 6.3736e-01, 1.1466e-01, 4.7192e-01, 9.4751e-01, + 4.2249e-01, 8.0013e-01, 8.3662e-01, 2.7945e-01, + 4.9136e-01, 2.0754e-01, 1.5234e-01, 2.6470e-01, + 9.8611e-01, 2.0121e-01, 6.4605e-01, 1.4810e-01, + 3.8203e-01, 1.0221e-02, 6.4883e-01, 3.7738e-01, + 6.6558e-01, 1.0409e-01, 3.3837e-01, 4.1054e-01, + 2.3450e-01, 5.9670e-01, 6.3347e-01, 9.3841e-01, + 7.1202e-01, 8.2758e-02, 3.3401e-01, 4.6725e-01, + 5.5433e-01, 4.1060e-01, 4.8108e-01, 2.6006e-02, + 5.0602e-01, 7.5691e-01, 8.3228e-01, 5.5461e-01, + 9.7723e-01, 9.5142e-02, 4.8502e-01, 4.7276e-01, + 4.6702e-01, 6.8685e-02, 4.7602e-01, 9.8285e-01, + 8.7376e-02, 7.2154e-01, 6.4366e-02, 8.0105e-01, + 6.7920e-01, 3.4169e-01, 9.9690e-01, 8.6900e-02, + 6.8423e-01, 2.2307e-01, 6.6827e-01, 6.2007e-01, + 7.2812e-01, 8.1208e-01, 8.1304e-01, 4.9537e-02, + 2.2654e-01, 1.0913e-01, 1.8736e-01, 7.4063e-01, + 7.0233e-01, 5.2413e-01, 9.2392e-01, 4.3601e-01, + 8.7513e-01, 9.5555e-02, 5.5401e-01, 4.9938e-02, + 6.6611e-02, 5.3894e-01, 7.4381e-01, 8.2785e-01, + 4.4283e-01, 2.1861e-01, 8.1992e-02, 5.3650e-01, + 2.7083e-01, 9.6992e-02, 6.5700e-01, 3.6738e-01, + 5.3582e-02, 6.8961e-01, 5.1692e-01, 4.7811e-01, + 9.5507e-01, 1.1629e-01, 4.4608e-01, 9.1741e-01, + 6.7667e-01, 2.8148e-02, 7.4903e-01, 7.8503e-01, + 4.6236e-02, 4.6999e-02, 1.9415e-01, 5.8769e-01, + 3.9665e-01, 2.8180e-01, 6.6891e-01, 5.2471e-01, + 5.6718e-01, 3.6965e-01, 4.8691e-01, 1.2448e-01, + 3.7830e-01, 7.1508e-01, 5.3843e-01, 1.9964e-01, + 4.6846e-01, 3.0749e-01, 5.0821e-01, 4.7264e-01, + 6.5869e-01, 7.5655e-01, 6.6608e-01, 4.4097e-01, + 8.8246e-01, 5.5178e-01, 3.1991e-01, 6.1838e-01, + 5.4250e-01, 3.8407e-01, 6.9734e-01, 5.6089e-01, + 7.7507e-01, 1.5344e-01, 8.1394e-01, 3.8479e-02, + 5.6357e-02, 3.7774e-01, 7.2734e-01, 7.5830e-02, + 9.5355e-01, 4.6254e-01, 5.1318e-01, 7.7011e-01, + 2.9491e-01, 1.3797e-01, 8.7797e-01, 8.0879e-01, + 5.8383e-01, 1.2735e-01, 2.6792e-01, 7.5423e-01, + 8.5687e-01, 5.6856e-01, 3.0846e-01, 7.2150e-01, + 2.0158e-01, 4.4285e-01, 4.3074e-01, 8.2410e-01, + 5.6984e-01, 2.3044e-01, 8.7317e-01, 5.4302e-01, + 1.6661e-01, 1.5551e-01, 5.3661e-01, 9.8902e-01, + 6.2303e-01, 5.7449e-01, 9.7497e-01, 6.9276e-01, + 2.8973e-01, 2.2008e-01, 7.4155e-02, 6.3535e-01, + 3.2883e-01, 7.0117e-01, 3.0143e-01, 5.7505e-01, + 3.4680e-01, 2.7660e-01, 7.0338e-02, 7.8984e-01, + 6.7461e-01, 2.5361e-01, 8.3933e-01, 7.1929e-01, + 1.4013e-01, 9.9655e-01, 2.4267e-01, 9.3212e-01, + 4.6070e-01, 2.2070e-01, 6.6336e-01, 1.3432e-01, + 5.3597e-01, 5.1768e-01, 7.6964e-01, 9.9864e-01, + 5.3829e-01, 3.1592e-01, 9.3386e-01, 5.8600e-01, + 1.2704e-01, 5.0213e-01, 6.2221e-02, 1.0695e-01, + 2.6995e-01, 2.6387e-01, 9.3927e-01, 2.7555e-01, + 3.1073e-01, 1.1755e-01, 8.1059e-01, 3.6864e-01, + 2.6251e-01, 5.7401e-01, 2.8597e-02, 8.6585e-02]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8046, 0.6398, 0.4516, ..., 0.6060, 0.1172, 0.9615]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.420795440673828 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([ 367, 5928, 4913, 9662, 9837, 1139, 7195, 709, 1186, + 80, 7429, 8354, 3731, 4238, 1204, 8286, 5350, 6518, + 6581, 5481, 8412, 8567, 4483, 270, 6809, 6052, 9047, + 8886, 6265, 2683, 8840, 6468, 5705, 4463, 7370, 8923, + 7324, 752, 3820, 476, 1511, 1152, 7156, 6378, 1601, + 6251, 5183, 964, 1260, 884, 6109, 4781, 1936, 3415, + 7691, 4888, 5674, 6459, 8140, 8932, 2864, 6432, 1356, + 2665, 9855, 2360, 1289, 3774, 7837, 576, 7725, 1293, + 907, 2542, 7673, 5326, 1656, 1077, 464, 7656, 5438, + 524, 4581, 8099, 2687, 4078, 5342, 5240, 9155, 9342, + 4985, 2318, 5185, 4603, 8415, 3279, 2083, 7925, 8071, + 9166, 9303, 9469, 5789, 7778, 8645, 7134, 7594, 2178, + 218, 3344, 3951, 8834, 4013, 7845, 8331, 9692, 3137, + 7730, 5023, 8474, 4718, 38, 726, 535, 2913, 4635, + 2716, 3591, 9066, 2660, 3345, 7905, 2026, 1175, 9827, + 6627, 4714, 9765, 1301, 2421, 949, 3573, 4881, 5681, + 2117, 1844, 8550, 9611, 6234, 1475, 3891, 4140, 4791, + 497, 5133, 1193, 8459, 1053, 2110, 5799, 6433, 6363, + 6452, 1314, 1357, 383, 1853, 7981, 8433, 7415, 6026, + 6286, 7644, 2079, 2595, 9578, 6356, 4227, 8298, 2069, + 2653, 5142, 7404, 4079, 1966, 7533, 6062, 6415, 8225, + 8783, 5739, 3815, 1201, 4896, 5097, 5808, 8945, 5520, + 6206, 511, 7347, 334, 3119, 1570, 7713, 2906, 3536, + 6334, 7998, 298, 3942, 6558, 2963, 4842, 2263, 5657, + 6492, 6276, 9075, 5298, 2267, 9620, 5320, 1568, 8760, + 7490, 9058, 1722, 5460, 3464, 1842, 2377, 9510, 1053, + 3322, 9012, 5664, 8967, 8790, 9286, 1946, 3141, 8192, + 6659, 3495, 4958, 4102, 7004, 6177, 5317, 6593, 7640, + 6307, 8035, 9650, 8087, 4138, 5288, 54, 277, 9498, + 8689, 2955, 8232, 1954, 321, 4627, 4226, 4258, 7653, + 5234, 2531, 412, 5196, 6249, 3047, 8171, 9428, 3213, + 9763, 1374, 9103, 7900, 6626, 2118, 7765, 3728, 6106, + 4954, 4753, 8313, 550, 6888, 8896, 9965, 3061, 2947, + 8278, 5879, 5273, 5453, 181, 2437, 4117, 7283, 5591, + 8602, 8473, 5720, 1866, 7656, 8837, 3905, 7472, 7158, + 3023, 2882, 9989, 8681, 648, 6274, 6847, 3312, 5656, + 7680, 6098, 4463, 9954, 2825, 6574, 7011, 1957, 6137, + 6725, 3576, 5559, 9535, 4552, 2200, 3109, 9611, 9451, + 1309, 2875, 3230, 5628, 9907, 8010, 6405, 222, 8761, + 2120, 9374, 4858, 4514, 5442, 9330, 8281, 8858, 4407, + 6543, 35, 3825, 9633, 1009, 2627, 3906, 1955, 7496, + 3563, 3539, 8202, 7400, 7587, 7952, 2607, 3232, 9574, + 748, 9829, 4992, 1810, 7308, 6501, 6785, 7235, 1137, + 8724, 2405, 7482, 6759, 4652, 5655, 4530, 6784, 9512, + 4088, 6049, 9230, 7810, 9658, 5737, 1295, 738, 747, + 7672, 2503, 5907, 2220, 201, 5120, 7515, 3259, 3879, + 3039, 9321, 8605, 9482, 4181, 7348, 9537, 587, 6394, + 3984, 306, 1193, 3228, 778, 134, 9046, 7121, 4357, + 4190, 4050, 4701, 5483, 6713, 8937, 2483, 711, 8665, + 3008, 3841, 9929, 1156, 7864, 8900, 4657, 7749, 2947, + 9699, 2414, 4266, 6048, 8585, 5785, 2832, 7866, 2519, + 9463, 5405, 1165, 1777, 9786, 9696, 3031, 653, 1886, + 1959, 8043, 4843, 484, 4688, 1972, 2038, 6598, 7240, + 1462, 7254, 3087, 1879, 7554, 1558, 7001, 9016, 8895, + 5649, 4937, 9704, 4961, 7644, 8442, 2757, 2242, 9502, + 2976, 4095, 4676, 9126, 5855, 4163, 6430, 137, 106, + 3030, 655, 8531, 7089, 648, 854, 5910, 4435, 7738, + 4805, 6752, 444, 2066, 5929, 8674, 9769, 2865, 2752, + 5321, 130, 2489, 807, 9550, 1633, 7468, 9068, 9241, + 6808, 7365, 968, 8661, 9018, 3550, 6104, 2629, 7508, + 35, 8351, 2048, 4626, 2188, 1746, 7387, 3093, 1503, + 1452, 7638, 1336, 9281, 6296, 6140, 7102, 9783, 1071, + 4115, 8592, 8292, 2729, 6495, 356, 8036, 1239, 4223, + 6109, 2038, 6313, 7535, 4790, 2693, 2125, 7954, 5865, + 3268, 2522, 6271, 2184, 4150, 1470, 3491, 6995, 9734, + 2067, 3839, 7338, 6815, 4421, 307, 289, 5776, 7623, + 5550, 9770, 6085, 800, 6471, 6365, 7940, 7253, 9023, + 3056, 1623, 7408, 4931, 2227, 3852, 1329, 3500, 1597, + 680, 4795, 1426, 733, 9436, 1001, 976, 8821, 8330, + 6704, 5168, 673, 3362, 4167, 8558, 5789, 6484, 7568, + 5585, 6431, 6874, 5521, 5213, 7970, 3020, 8092, 5025, + 6800, 7632, 7512, 3989, 4195, 1578, 2940, 3628, 3648, + 6431, 2736, 7676, 5158, 5635, 1844, 6595, 8947, 5903, + 51, 9169, 4098, 5691, 6814, 9766, 6514, 9970, 2239, + 2046, 3280, 2714, 5806, 2677, 82, 1113, 2963, 2180, + 2566, 2541, 3825, 9253, 8388, 6485, 870, 1684, 7333, + 793, 9898, 5870, 5155, 6286, 9563, 9068, 5713, 2343, + 1715, 3887, 723, 1944, 2953, 3319, 9854, 144, 718, + 6674, 4026, 8079, 7531, 4342, 6924, 6779, 6546, 7462, + 9962, 1432, 8607, 1875, 2805, 7948, 2976, 1046, 7036, + 4949, 7102, 477, 7850, 2174, 1961, 4920, 8186, 1308, + 179, 2095, 3644, 8483, 9153, 627, 2488, 890, 3123, + 4350, 5492, 1701, 1645, 3932, 3331, 2933, 6624, 7332, + 7665, 5288, 7681, 6550, 7545, 730, 5687, 802, 6864, + 7451, 2117, 4878, 4869, 1248, 1137, 1380, 8950, 5598, + 7429, 4233, 1060, 5569, 6689, 8608, 3567, 4558, 7535, + 9199, 2985, 2282, 235, 9129, 8582, 2072, 7435, 8445, + 9048, 7905, 3709, 7810, 5385, 5720, 2662, 2668, 7912, + 4854, 9556, 9550, 3132, 8229, 4856, 4161, 1914, 745, + 65, 7058, 9271, 7846, 8928, 5152, 5482, 8156, 4590, + 3430, 90, 6589, 2814, 3078, 1382, 8736, 5505, 2054, + 8404, 4829, 8221, 2458, 952, 6527, 2277, 8961, 5925, + 2066, 2154, 3256, 2466, 7237, 2076, 9446, 4670, 7006, + 2231, 8823, 7168, 5299, 1101, 7045, 7082, 8944, 4016, + 4074, 4258, 3864, 3141, 4450, 5115, 5389, 9949, 1172, + 9433, 1642, 1479, 3769, 9973, 4747, 4872, 389, 8601, + 9119, 8563, 2438, 6873, 1818, 9226, 937, 6705, 3733, + 2036, 669, 2029, 332, 7663, 8899, 3889, 9735, 9326, + 8050, 2621, 3330, 992, 9581, 1488, 8125, 6995, 2528, + 7140, 6221, 1417, 9802, 898, 1689, 3308, 303, 2075, + 6622, 9167, 6725, 8722, 8663, 7196, 3835, 4992, 1316, + 6283, 4758, 9632, 641, 8717, 8903, 403, 3496, 5247, + 9491, 1114, 4440, 9898, 4961, 6497, 4291, 3663, 5161, + 7340, 9289, 8727, 7951, 3216, 9826, 2744, 1742, 7964, + 1100, 4645, 9, 4874, 3732, 9378, 1625, 9820, 3610, + 1545, 6999, 6918, 1895, 3580, 9207, 2130, 5891, 4573, + 1060, 9808, 4116, 943, 4244, 5724, 4961, 3837, 8840, + 389, 1348, 833, 3514, 5170, 1683, 7789, 9831, 5974, + 9125, 5396, 7463, 6389, 1948, 8359, 6877, 7522, 8087, + 7219, 1695, 4668, 662, 3386, 4803, 1414, 7277, 1481, + 9053, 9285, 9508, 5533, 205, 8818, 4845, 7696, 8623, + 9000]), + values=tensor([9.0347e-01, 8.9097e-01, 3.9050e-01, 1.1761e-01, + 7.3236e-01, 1.8076e-01, 1.7435e-01, 3.1968e-01, + 6.2094e-02, 6.9707e-01, 9.2156e-01, 2.5833e-01, + 3.9175e-01, 8.0344e-02, 8.5392e-01, 7.0595e-02, + 6.3035e-01, 4.6849e-01, 4.1355e-01, 5.1765e-01, + 6.7505e-01, 7.7298e-01, 3.0196e-01, 2.7160e-01, + 4.9835e-01, 7.7842e-01, 7.9100e-01, 6.1241e-01, + 9.2766e-01, 8.1158e-01, 9.2979e-01, 2.0087e-01, + 6.3444e-01, 1.1919e-01, 2.9666e-01, 8.1344e-01, + 2.3658e-01, 7.2770e-01, 3.4239e-01, 6.0915e-01, + 3.8756e-01, 4.8989e-01, 5.2955e-01, 9.2633e-01, + 7.9699e-01, 9.0118e-01, 6.9847e-02, 3.2137e-02, + 8.8024e-01, 6.0500e-01, 4.6450e-01, 8.5960e-02, + 5.6130e-01, 3.3831e-01, 9.3616e-01, 8.2763e-01, + 2.4098e-01, 8.9643e-01, 9.8468e-01, 8.8817e-01, + 4.6338e-01, 1.1147e-02, 8.1814e-01, 9.2206e-02, + 1.0005e-01, 6.2906e-01, 6.9146e-01, 1.5603e-01, + 3.1235e-01, 6.2561e-01, 3.7711e-01, 6.5022e-01, + 1.2930e-01, 6.7901e-02, 7.2399e-01, 7.8736e-01, + 8.8706e-02, 7.0933e-01, 9.0665e-01, 1.0525e-01, + 7.0947e-01, 7.6785e-01, 7.4226e-01, 9.8559e-01, + 9.2459e-01, 1.7157e-01, 7.4478e-01, 7.9596e-01, + 7.2144e-01, 1.9958e-01, 6.1043e-01, 2.7464e-01, + 5.8302e-01, 9.7091e-01, 4.8610e-01, 8.4624e-01, + 8.7645e-01, 7.2595e-01, 7.4275e-01, 2.6952e-01, + 8.8491e-01, 6.7085e-02, 7.5574e-01, 6.1435e-01, + 1.3665e-01, 8.8809e-01, 2.2500e-01, 7.6147e-01, + 5.8642e-01, 8.3791e-01, 3.2752e-01, 4.9244e-01, + 6.5160e-01, 6.2237e-01, 5.6666e-01, 1.2589e-01, + 5.4657e-01, 1.6810e-01, 1.1783e-01, 3.3742e-01, + 6.8221e-01, 1.2268e-02, 4.7183e-01, 6.4275e-01, + 3.3058e-01, 6.2514e-01, 2.7607e-01, 5.2090e-01, + 3.9798e-01, 7.4704e-02, 3.1431e-01, 2.3156e-01, + 2.4983e-01, 7.9235e-01, 6.1773e-02, 4.1696e-01, + 2.9905e-01, 6.8152e-01, 3.4211e-01, 6.0393e-01, + 3.6186e-02, 8.0620e-01, 9.8874e-01, 4.6252e-01, + 8.4231e-01, 9.1653e-01, 4.6135e-01, 2.8152e-01, + 9.7319e-01, 4.3848e-01, 5.7170e-01, 4.9901e-01, + 7.4233e-01, 2.6089e-01, 8.3401e-01, 2.3202e-01, + 8.4769e-01, 8.7678e-01, 7.1420e-01, 8.9378e-01, + 5.6124e-01, 4.0637e-01, 3.8693e-01, 8.0881e-02, + 2.6688e-01, 5.6776e-01, 4.3004e-01, 4.7054e-01, + 2.0392e-01, 7.2817e-01, 6.6755e-01, 5.1867e-01, + 5.2670e-01, 1.4618e-01, 7.3183e-01, 8.6973e-01, + 1.9126e-01, 4.9009e-01, 2.5587e-01, 4.0650e-01, + 9.8806e-01, 9.6733e-01, 1.1171e-01, 7.9880e-01, + 6.7184e-01, 7.4651e-02, 1.7680e-01, 9.4485e-01, + 8.4713e-01, 1.4694e-01, 9.3410e-01, 6.8790e-01, + 6.7995e-01, 3.4766e-01, 3.3209e-01, 8.5193e-01, + 5.7959e-01, 2.9187e-01, 3.0229e-01, 9.7462e-01, + 7.8153e-01, 2.9900e-01, 3.0648e-01, 9.4053e-01, + 7.5039e-01, 7.3664e-01, 2.3912e-01, 5.6769e-01, + 9.2225e-01, 1.2617e-01, 4.4870e-01, 5.2352e-01, + 4.9831e-01, 4.2978e-01, 5.8379e-03, 5.1101e-01, + 1.1850e-02, 4.3076e-02, 2.0400e-01, 4.3425e-02, + 2.0565e-01, 2.5026e-01, 5.8630e-01, 9.9979e-01, + 5.5005e-01, 2.8369e-01, 7.0988e-01, 9.9716e-01, + 1.0843e-01, 5.2002e-01, 8.1336e-01, 4.4712e-01, + 1.7801e-01, 4.1042e-01, 8.1718e-01, 7.5846e-01, + 6.4002e-01, 8.1764e-01, 9.2236e-01, 2.7385e-01, + 2.2693e-01, 1.3388e-01, 2.7481e-01, 2.7276e-01, + 6.3817e-01, 8.1593e-01, 7.1721e-01, 1.0760e-01, + 3.2614e-01, 4.3753e-03, 9.9770e-01, 9.1052e-01, + 1.4963e-01, 1.3781e-01, 1.1034e-01, 1.1895e-02, + 8.2173e-01, 2.6975e-01, 3.5686e-01, 5.8749e-01, + 1.5253e-01, 6.6786e-01, 9.1749e-01, 8.5611e-01, + 5.9711e-01, 8.9352e-01, 7.2370e-01, 4.2727e-01, + 4.7201e-01, 6.1878e-01, 1.3255e-01, 4.0640e-01, + 6.1650e-01, 9.0122e-01, 3.3783e-01, 1.2667e-01, + 5.3203e-01, 7.8517e-01, 4.7198e-01, 8.0031e-01, + 9.8415e-02, 6.3035e-01, 8.4415e-01, 4.0094e-01, + 3.8505e-01, 5.1696e-01, 9.5335e-01, 7.6890e-01, + 5.9772e-01, 8.7886e-01, 7.4086e-01, 8.4781e-01, + 1.6493e-01, 3.3683e-01, 7.1960e-03, 2.9042e-01, + 8.4813e-01, 1.8192e-01, 6.0102e-01, 4.2567e-01, + 9.3317e-02, 3.8724e-01, 3.5787e-01, 9.9992e-01, + 1.2443e-01, 8.2890e-01, 2.8195e-01, 6.3051e-01, + 6.0894e-01, 9.5920e-01, 6.4943e-01, 6.5476e-01, + 6.7692e-01, 5.7763e-01, 7.8022e-01, 4.1886e-01, + 5.0205e-01, 2.5778e-01, 9.0479e-01, 8.7006e-01, + 6.1226e-01, 6.4647e-01, 1.5596e-01, 7.0047e-01, + 6.7722e-01, 5.4595e-01, 4.9292e-01, 9.0526e-01, + 4.8174e-02, 8.1995e-02, 7.1448e-01, 2.7007e-02, + 6.2586e-01, 6.7855e-01, 4.6902e-01, 3.6021e-01, + 3.9907e-01, 1.8609e-02, 6.0745e-01, 2.4758e-01, + 6.2019e-02, 9.3085e-01, 3.9521e-01, 8.9117e-01, + 5.9487e-01, 4.2672e-02, 7.4925e-01, 8.7766e-02, + 5.8346e-01, 6.5657e-01, 8.0155e-01, 4.5151e-01, + 5.5583e-01, 7.1994e-01, 6.1938e-01, 4.9424e-01, + 8.4852e-01, 2.5577e-01, 7.5244e-01, 5.8561e-01, + 1.6622e-01, 5.3017e-01, 7.0377e-01, 1.6795e-01, + 6.1195e-01, 5.0235e-01, 4.0852e-01, 2.4062e-01, + 3.7706e-01, 7.1399e-01, 1.5932e-01, 2.0246e-01, + 9.3118e-01, 3.6824e-01, 9.2308e-01, 4.1388e-01, + 3.9074e-01, 2.1232e-02, 4.8230e-01, 3.1334e-02, + 7.6588e-01, 8.9020e-01, 9.5571e-02, 9.3996e-01, + 3.3502e-01, 1.6456e-02, 4.8873e-01, 9.0152e-01, + 6.0836e-01, 6.2449e-02, 2.3911e-01, 2.5569e-01, + 1.9099e-01, 6.5319e-01, 3.4599e-01, 9.4871e-01, + 1.5293e-01, 9.6291e-01, 5.9772e-01, 9.2359e-01, + 6.8691e-01, 6.3677e-01, 7.7151e-01, 5.4282e-01, + 9.3692e-01, 3.1526e-01, 9.7688e-01, 8.3385e-01, + 2.2972e-01, 6.2720e-01, 2.5945e-01, 2.4371e-01, + 1.9518e-01, 9.3585e-01, 5.0154e-01, 4.9345e-01, + 9.2069e-01, 2.8370e-01, 2.0941e-01, 2.7517e-01, + 3.3759e-01, 3.1978e-01, 9.0873e-01, 7.9081e-01, + 6.9871e-01, 2.8844e-01, 7.5941e-01, 4.7964e-01, + 4.1805e-01, 7.2289e-01, 7.1452e-01, 2.2869e-01, + 3.5827e-01, 5.0632e-01, 5.1573e-01, 6.2442e-01, + 5.5513e-01, 3.8608e-01, 6.5218e-02, 2.8971e-01, + 9.3603e-01, 9.9599e-01, 2.9308e-02, 5.0704e-01, + 3.3920e-01, 1.3835e-01, 1.3900e-01, 8.3661e-01, + 8.7030e-02, 1.9115e-01, 8.1969e-01, 2.0766e-01, + 2.0634e-04, 3.2996e-03, 3.5546e-01, 6.9885e-01, + 8.1732e-01, 4.6117e-01, 9.2421e-01, 7.9629e-01, + 9.9088e-01, 9.9274e-01, 1.1080e-01, 1.7902e-01, + 7.9706e-01, 1.8505e-01, 7.4092e-01, 7.3209e-01, + 1.2144e-01, 5.6902e-01, 9.3325e-01, 2.2999e-01, + 6.3010e-01, 9.2148e-01, 2.9383e-01, 4.2274e-01, + 1.2992e-01, 1.8436e-01, 7.7452e-01, 9.5314e-03, + 8.7521e-01, 3.7291e-01, 3.4776e-01, 9.8447e-01, + 9.2182e-01, 6.2103e-01, 7.8077e-01, 6.7523e-01, + 1.6309e-01, 9.9005e-01, 7.0418e-01, 7.5393e-01, + 4.9572e-02, 5.3018e-01, 5.0127e-02, 9.4383e-01, + 9.3893e-01, 2.9396e-01, 1.7931e-01, 3.2387e-01, + 5.3937e-01, 4.4591e-01, 4.1519e-01, 6.4965e-01, + 9.0437e-01, 8.3040e-01, 1.4310e-01, 3.5145e-02, + 4.6746e-01, 5.6639e-01, 4.2557e-01, 8.3449e-01, + 7.7874e-01, 8.0354e-03, 7.7620e-01, 6.3805e-01, + 9.9677e-01, 2.9228e-01, 7.5318e-01, 7.8356e-01, + 9.8216e-01, 6.5817e-01, 3.5696e-01, 2.7232e-01, + 3.7271e-01, 2.1862e-01, 6.4841e-01, 3.4376e-01, + 6.0935e-01, 9.5134e-02, 6.6915e-01, 8.1621e-01, + 4.0272e-01, 6.1882e-01, 5.0284e-02, 8.7929e-01, + 4.2125e-01, 7.6634e-01, 2.5193e-01, 1.3171e-01, + 5.5583e-01, 1.3959e-01, 1.7579e-01, 7.2551e-01, + 6.6240e-01, 1.9258e-01, 3.6612e-01, 1.8147e-01, + 5.7841e-01, 7.8278e-01, 4.2669e-02, 6.6255e-01, + 2.8767e-01, 8.0007e-02, 3.6674e-04, 3.9335e-01, + 2.1417e-01, 2.2919e-01, 1.2683e-01, 9.0001e-01, + 9.9290e-01, 4.5741e-01, 2.3197e-01, 1.8836e-01, + 9.6565e-01, 8.0740e-01, 1.9854e-01, 5.3042e-02, + 8.1469e-01, 4.9040e-01, 7.2177e-01, 6.4045e-01, + 4.9810e-01, 2.5103e-02, 4.7475e-01, 4.7844e-02, + 6.0969e-01, 9.1481e-01, 4.4597e-01, 1.1494e-01, + 4.9666e-01, 6.1275e-01, 8.7093e-01, 1.3668e-01, + 9.5248e-01, 5.9063e-01, 4.9499e-01, 9.1388e-01, + 4.0836e-01, 9.3011e-01, 8.1737e-01, 5.9702e-01, + 9.9251e-01, 9.3234e-01, 7.8850e-01, 5.3497e-01, + 5.6422e-01, 3.4891e-01, 7.9452e-01, 8.5710e-01, + 9.6578e-01, 5.9804e-01, 4.3277e-01, 7.3290e-01, + 1.0449e-01, 4.8889e-01, 9.8684e-01, 3.1549e-02, + 8.2245e-02, 9.4666e-01, 1.7503e-01, 1.3455e-01, + 8.0620e-01, 9.4127e-01, 6.0203e-01, 2.4925e-01, + 2.1869e-01, 3.0844e-02, 4.0776e-01, 2.7968e-01, + 7.2045e-01, 4.5107e-02, 7.6360e-01, 6.9114e-01, + 3.5384e-01, 2.6205e-01, 4.0814e-01, 4.1925e-01, + 9.5840e-01, 2.7948e-01, 9.9578e-01, 5.4533e-01, + 4.9157e-01, 1.7568e-01, 4.9298e-01, 1.2249e-01, + 4.9184e-01, 5.8221e-01, 3.4826e-01, 2.3718e-01, + 4.0414e-02, 9.8825e-01, 6.8724e-01, 4.1618e-01, + 3.5580e-01, 9.8542e-01, 6.1434e-01, 1.5506e-01, + 1.8635e-01, 3.4507e-01, 8.7226e-01, 6.4723e-01, + 1.1927e-01, 6.1913e-02, 5.8115e-01, 5.0795e-02, + 6.9421e-01, 8.5543e-01, 4.6361e-01, 1.7225e-01, + 7.1720e-01, 5.2724e-01, 2.7571e-01, 7.8026e-01, + 9.7859e-01, 3.3684e-01, 9.4074e-01, 8.4877e-01, + 1.6576e-02, 3.2110e-01, 8.1119e-01, 4.4604e-02, + 7.5553e-01, 8.9513e-01, 2.5486e-01, 5.6611e-01, + 1.5194e-01, 8.0984e-01, 1.1473e-02, 5.0922e-02, + 1.9459e-01, 2.4450e-01, 5.2269e-01, 8.8459e-01, + 1.5980e-01, 5.1724e-01, 3.6058e-01, 3.8147e-01, + 8.0778e-02, 6.8899e-01, 9.3518e-01, 3.9577e-01, + 6.8067e-01, 2.9545e-01, 2.1034e-01, 8.0864e-01, + 3.0905e-01, 3.9012e-01, 5.0504e-01, 3.5331e-01, + 2.3990e-01, 1.9663e-01, 4.7629e-02, 8.8145e-01, + 1.6688e-01, 3.2309e-01, 6.0635e-01, 6.9179e-01, + 8.4762e-01, 4.8298e-01, 4.2062e-01, 8.8411e-01, + 3.1172e-01, 4.7812e-02, 1.8446e-01, 3.6828e-01, + 2.3400e-01, 8.2280e-02, 7.9717e-01, 4.5737e-02, + 6.3521e-01, 7.6517e-01, 5.7003e-01, 8.6854e-01, + 6.3527e-01, 5.4238e-01, 6.1423e-02, 9.0375e-01, + 9.5888e-01, 3.1839e-01, 2.4367e-01, 6.4739e-01, + 8.1586e-01, 2.4076e-01, 9.7343e-01, 4.9856e-01, + 7.2246e-01, 5.0023e-01, 1.2692e-01, 7.4359e-01, + 3.5270e-01, 2.8465e-01, 3.0118e-01, 3.5307e-01, + 6.2379e-01, 7.1186e-01, 6.6474e-01, 8.4095e-02, + 8.8565e-01, 2.9464e-01, 5.4755e-01, 9.0701e-01, + 8.9197e-01, 1.8235e-01, 9.3370e-01, 8.6788e-01, + 1.0571e-01, 3.0684e-01, 2.0394e-01, 6.1322e-01, + 7.6393e-01, 7.5679e-01, 2.3964e-01, 2.2054e-01, + 4.5451e-01, 3.5051e-01, 7.3550e-02, 9.3935e-01, + 2.5262e-01, 9.1953e-01, 2.4572e-01, 1.1299e-01, + 5.8933e-01, 2.5009e-01, 8.9652e-01, 6.5729e-02, + 1.7446e-01, 3.5451e-01, 5.3832e-01, 6.5479e-01, + 6.3736e-01, 1.1466e-01, 4.7192e-01, 9.4751e-01, + 4.2249e-01, 8.0013e-01, 8.3662e-01, 2.7945e-01, + 4.9136e-01, 2.0754e-01, 1.5234e-01, 2.6470e-01, + 9.8611e-01, 2.0121e-01, 6.4605e-01, 1.4810e-01, + 3.8203e-01, 1.0221e-02, 6.4883e-01, 3.7738e-01, + 6.6558e-01, 1.0409e-01, 3.3837e-01, 4.1054e-01, + 2.3450e-01, 5.9670e-01, 6.3347e-01, 9.3841e-01, + 7.1202e-01, 8.2758e-02, 3.3401e-01, 4.6725e-01, + 5.5433e-01, 4.1060e-01, 4.8108e-01, 2.6006e-02, + 5.0602e-01, 7.5691e-01, 8.3228e-01, 5.5461e-01, + 9.7723e-01, 9.5142e-02, 4.8502e-01, 4.7276e-01, + 4.6702e-01, 6.8685e-02, 4.7602e-01, 9.8285e-01, + 8.7376e-02, 7.2154e-01, 6.4366e-02, 8.0105e-01, + 6.7920e-01, 3.4169e-01, 9.9690e-01, 8.6900e-02, + 6.8423e-01, 2.2307e-01, 6.6827e-01, 6.2007e-01, + 7.2812e-01, 8.1208e-01, 8.1304e-01, 4.9537e-02, + 2.2654e-01, 1.0913e-01, 1.8736e-01, 7.4063e-01, + 7.0233e-01, 5.2413e-01, 9.2392e-01, 4.3601e-01, + 8.7513e-01, 9.5555e-02, 5.5401e-01, 4.9938e-02, + 6.6611e-02, 5.3894e-01, 7.4381e-01, 8.2785e-01, + 4.4283e-01, 2.1861e-01, 8.1992e-02, 5.3650e-01, + 2.7083e-01, 9.6992e-02, 6.5700e-01, 3.6738e-01, + 5.3582e-02, 6.8961e-01, 5.1692e-01, 4.7811e-01, + 9.5507e-01, 1.1629e-01, 4.4608e-01, 9.1741e-01, + 6.7667e-01, 2.8148e-02, 7.4903e-01, 7.8503e-01, + 4.6236e-02, 4.6999e-02, 1.9415e-01, 5.8769e-01, + 3.9665e-01, 2.8180e-01, 6.6891e-01, 5.2471e-01, + 5.6718e-01, 3.6965e-01, 4.8691e-01, 1.2448e-01, + 3.7830e-01, 7.1508e-01, 5.3843e-01, 1.9964e-01, + 4.6846e-01, 3.0749e-01, 5.0821e-01, 4.7264e-01, + 6.5869e-01, 7.5655e-01, 6.6608e-01, 4.4097e-01, + 8.8246e-01, 5.5178e-01, 3.1991e-01, 6.1838e-01, + 5.4250e-01, 3.8407e-01, 6.9734e-01, 5.6089e-01, + 7.7507e-01, 1.5344e-01, 8.1394e-01, 3.8479e-02, + 5.6357e-02, 3.7774e-01, 7.2734e-01, 7.5830e-02, + 9.5355e-01, 4.6254e-01, 5.1318e-01, 7.7011e-01, + 2.9491e-01, 1.3797e-01, 8.7797e-01, 8.0879e-01, + 5.8383e-01, 1.2735e-01, 2.6792e-01, 7.5423e-01, + 8.5687e-01, 5.6856e-01, 3.0846e-01, 7.2150e-01, + 2.0158e-01, 4.4285e-01, 4.3074e-01, 8.2410e-01, + 5.6984e-01, 2.3044e-01, 8.7317e-01, 5.4302e-01, + 1.6661e-01, 1.5551e-01, 5.3661e-01, 9.8902e-01, + 6.2303e-01, 5.7449e-01, 9.7497e-01, 6.9276e-01, + 2.8973e-01, 2.2008e-01, 7.4155e-02, 6.3535e-01, + 3.2883e-01, 7.0117e-01, 3.0143e-01, 5.7505e-01, + 3.4680e-01, 2.7660e-01, 7.0338e-02, 7.8984e-01, + 6.7461e-01, 2.5361e-01, 8.3933e-01, 7.1929e-01, + 1.4013e-01, 9.9655e-01, 2.4267e-01, 9.3212e-01, + 4.6070e-01, 2.2070e-01, 6.6336e-01, 1.3432e-01, + 5.3597e-01, 5.1768e-01, 7.6964e-01, 9.9864e-01, + 5.3829e-01, 3.1592e-01, 9.3386e-01, 5.8600e-01, + 1.2704e-01, 5.0213e-01, 6.2221e-02, 1.0695e-01, + 2.6995e-01, 2.6387e-01, 9.3927e-01, 2.7555e-01, + 3.1073e-01, 1.1755e-01, 8.1059e-01, 3.6864e-01, + 2.6251e-01, 5.7401e-01, 2.8597e-02, 8.6585e-02]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8046, 0.6398, 0.4516, ..., 0.6060, 0.1172, 0.9615]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.420795440673828 seconds + +[40.1, 39.18, 39.44, 38.84, 39.26, 38.78, 38.88, 39.27, 39.5, 39.18] +[65.88] +13.035122156143188 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420795440673828, 'TIME_S_1KI': 0.02877043508696409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 858.7538476467132, 'W': 65.88} +[40.1, 39.18, 39.44, 38.84, 39.26, 38.78, 38.88, 39.27, 39.5, 39.18, 39.55, 39.36, 39.34, 38.83, 39.02, 39.04, 38.92, 39.01, 41.73, 38.96] +707.295 +35.36475 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 362205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420795440673828, 'TIME_S_1KI': 0.02877043508696409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 858.7538476467132, 'W': 65.88, 'J_1KI': 2.3709055580312617, 'W_1KI': 0.18188594856503915, 'W_D': 30.515249999999995, 'J_D': 397.7700113752484, 'W_D_1KI': 0.08424856089783408, 'J_D_1KI': 0.00023259911071860987} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..44b0019 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 288650, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.542811155319214, "TIME_S_1KI": 0.036524549299564224, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 872.9266104698181, "W": 66.0, "J_1KI": 3.0241697920312425, "W_1KI": 0.22865061493157804, "W_D": 30.442750000000004, "J_D": 402.64070561939485, "W_D_1KI": 0.10546596223800452, "J_D_1KI": 0.00036537662303136846} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..ec44bfd --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014984130859375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), + col_indices=tensor([5455, 6096, 7620, ..., 8334, 1515, 9556]), + values=tensor([0.3295, 0.9699, 0.1085, ..., 0.1358, 0.2338, 0.9968]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.0525, 0.2160, 0.5197, ..., 0.9729, 0.0490, 0.2973]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.014984130859375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '70074', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.549025535583496} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([3016, 6372, 3284, ..., 9865, 6936, 5486]), + values=tensor([0.2981, 0.0450, 0.6145, ..., 0.3998, 0.9695, 0.2536]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.7165, 0.5810, 0.9668, ..., 0.2745, 0.2690, 0.0815]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 2.549025535583496 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288650', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.542811155319214} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([4532, 8082, 1862, ..., 2662, 2473, 4062]), + values=tensor([0.2290, 0.0977, 0.7273, ..., 0.3334, 0.1586, 0.6128]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.0256, 0.2861, 0.7976, ..., 0.1212, 0.6310, 0.3680]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.542811155319214 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([4532, 8082, 1862, ..., 2662, 2473, 4062]), + values=tensor([0.2290, 0.0977, 0.7273, ..., 0.3334, 0.1586, 0.6128]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.0256, 0.2861, 0.7976, ..., 0.1212, 0.6310, 0.3680]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.542811155319214 seconds + +[40.67, 38.88, 39.03, 38.97, 39.55, 38.85, 39.41, 38.82, 39.41, 38.86] +[66.0] +13.226160764694214 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288650, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.542811155319214, 'TIME_S_1KI': 0.036524549299564224, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.9266104698181, 'W': 66.0} +[40.67, 38.88, 39.03, 38.97, 39.55, 38.85, 39.41, 38.82, 39.41, 38.86, 39.72, 38.82, 38.97, 39.27, 39.27, 39.76, 38.89, 38.89, 45.01, 39.44] +711.145 +35.557249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 288650, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.542811155319214, 'TIME_S_1KI': 0.036524549299564224, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 872.9266104698181, 'W': 66.0, 'J_1KI': 3.0241697920312425, 'W_1KI': 0.22865061493157804, 'W_D': 30.442750000000004, 'J_D': 402.64070561939485, 'W_D_1KI': 0.10546596223800452, 'J_D_1KI': 0.00036537662303136846} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..c479701 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 203, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.8138906955719, "TIME_S_1KI": 53.27039751513251, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1609.7031775331498, "W": 81.98, "J_1KI": 7929.572303118964, "W_1KI": 403.8423645320197, "W_D": 30.52650000000002, "J_D": 599.3974633930925, "W_D_1KI": 150.37684729064048, "J_D_1KI": 740.7726467519235} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..e1438f6 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,89 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 5.424488306045532} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 99, ..., 24999917, + 24999946, 25000000]), + col_indices=tensor([ 4827, 10869, 14232, ..., 471243, 483745, + 496563]), + values=tensor([0.8207, 0.6147, 0.2995, ..., 0.3197, 0.5880, 0.5650]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7008, 0.9045, 0.7559, ..., 0.2377, 0.3193, 0.3380]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 5.424488306045532 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '193', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.982287168502808} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 103, ..., 24999914, + 24999954, 25000000]), + col_indices=tensor([ 956, 25275, 30712, ..., 470941, 489379, + 489461]), + values=tensor([0.2897, 0.9352, 0.3996, ..., 0.3187, 0.8556, 0.7054]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.6888, 0.9226, 0.2376, ..., 0.2155, 0.1168, 0.1817]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 9.982287168502808 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '203', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.8138906955719} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 106, ..., 24999892, + 24999946, 25000000]), + col_indices=tensor([ 18694, 24514, 28811, ..., 477104, 482132, + 483877]), + values=tensor([0.0999, 0.2209, 0.5662, ..., 0.8643, 0.1918, 0.8434]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7305, 0.0261, 0.0866, ..., 0.4657, 0.9743, 0.1757]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.8138906955719 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 106, ..., 24999892, + 24999946, 25000000]), + col_indices=tensor([ 18694, 24514, 28811, ..., 477104, 482132, + 483877]), + values=tensor([0.0999, 0.2209, 0.5662, ..., 0.8643, 0.1918, 0.8434]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7305, 0.0261, 0.0866, ..., 0.4657, 0.9743, 0.1757]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.8138906955719 seconds + +[48.25, 66.08, 64.62, 65.48, 60.76, 53.27, 65.63, 72.69, 68.45, 64.01] +[81.98] +19.635315656661987 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 203, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.8138906955719, 'TIME_S_1KI': 53.27039751513251, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1609.7031775331498, 'W': 81.98} +[48.25, 66.08, 64.62, 65.48, 60.76, 53.27, 65.63, 72.69, 68.45, 64.01, 67.95, 64.4, 68.38, 60.78, 45.98, 39.37, 40.58, 39.5, 41.17, 43.65] +1029.0699999999997 +51.453499999999984 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 203, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.8138906955719, 'TIME_S_1KI': 53.27039751513251, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1609.7031775331498, 'W': 81.98, 'J_1KI': 7929.572303118964, 'W_1KI': 403.8423645320197, 'W_D': 30.52650000000002, 'J_D': 599.3974633930925, 'W_D_1KI': 150.37684729064048, 'J_D_1KI': 740.7726467519235} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..9445831 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1357, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.301345348358154, "TIME_S_1KI": 7.591264073955898, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 974.2936608052254, "W": 72.47, "J_1KI": 717.9761686110725, "W_1KI": 53.404568901989684, "W_D": 37.03175, "J_D": 497.85841415101294, "W_D_1KI": 27.28942520265291, "J_D_1KI": 20.11011437188866} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..d835f69 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7734920978546143} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 2499990, + 2499995, 2500000]), + col_indices=tensor([ 99860, 161360, 168008, ..., 375780, 443860, + 468048]), + values=tensor([0.7731, 0.7975, 0.7314, ..., 0.7653, 0.4860, 0.2739]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9501, 0.6169, 0.8449, ..., 0.9228, 0.9726, 0.5004]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.7734920978546143 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1357', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.301345348358154} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499991, + 2499994, 2500000]), + col_indices=tensor([238281, 305722, 262347, ..., 326599, 364388, + 410788]), + values=tensor([0.2261, 0.8621, 0.1222, ..., 0.7643, 0.7262, 0.6796]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7756, 0.5142, 0.7476, ..., 0.1970, 0.9731, 0.3396]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.301345348358154 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 10, ..., 2499991, + 2499994, 2500000]), + col_indices=tensor([238281, 305722, 262347, ..., 326599, 364388, + 410788]), + values=tensor([0.2261, 0.8621, 0.1222, ..., 0.7643, 0.7262, 0.6796]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7756, 0.5142, 0.7476, ..., 0.1970, 0.9731, 0.3396]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.301345348358154 seconds + +[40.45, 39.15, 39.07, 39.07, 39.08, 39.31, 39.47, 39.12, 39.58, 39.08] +[72.47] +13.444096326828003 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.301345348358154, 'TIME_S_1KI': 7.591264073955898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 974.2936608052254, 'W': 72.47} +[40.45, 39.15, 39.07, 39.07, 39.08, 39.31, 39.47, 39.12, 39.58, 39.08, 39.87, 38.95, 39.89, 38.93, 39.13, 39.06, 39.33, 39.5, 40.16, 40.53] +708.7649999999999 +35.43825 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1357, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.301345348358154, 'TIME_S_1KI': 7.591264073955898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 974.2936608052254, 'W': 72.47, 'J_1KI': 717.9761686110725, 'W_1KI': 53.404568901989684, 'W_D': 37.03175, 'J_D': 497.85841415101294, 'W_D_1KI': 27.28942520265291, 'J_D_1KI': 20.11011437188866} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..016557b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 374, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481700897216797, "TIME_S_1KI": 28.025938227852397, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1245.1822829723358, "W": 76.84, "J_1KI": 3329.3643929741597, "W_1KI": 205.45454545454547, "W_D": 40.990750000000006, "J_D": 664.2498134532572, "W_D_1KI": 109.60093582887701, "J_D_1KI": 293.0506305584947} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..9fc6dc1 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.8034374713897705} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 31, 56, ..., 12499945, + 12499972, 12500000]), + col_indices=tensor([ 16534, 21956, 27589, ..., 400032, 455487, + 480702]), + values=tensor([0.5221, 0.3710, 0.3411, ..., 0.2701, 0.3669, 0.2928]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.5983, 0.4656, 0.7235, ..., 0.5590, 0.7340, 0.5167]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 2.8034374713897705 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '374', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481700897216797} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 20, 47, ..., 12499941, + 12499971, 12500000]), + col_indices=tensor([ 37298, 48174, 79945, ..., 425979, 429124, + 477898]), + values=tensor([0.8892, 0.8073, 0.3867, ..., 0.6750, 0.3130, 0.8587]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.8772, 0.7830, 0.9014, ..., 0.3941, 0.0151, 0.6871]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.481700897216797 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 20, 47, ..., 12499941, + 12499971, 12500000]), + col_indices=tensor([ 37298, 48174, 79945, ..., 425979, 429124, + 477898]), + values=tensor([0.8892, 0.8073, 0.3867, ..., 0.6750, 0.3130, 0.8587]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.8772, 0.7830, 0.9014, ..., 0.3941, 0.0151, 0.6871]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.481700897216797 seconds + +[39.77, 39.61, 39.09, 39.46, 39.43, 39.2, 39.07, 38.94, 44.28, 39.72] +[76.84] +16.20487093925476 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481700897216797, 'TIME_S_1KI': 28.025938227852397, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.1822829723358, 'W': 76.84} +[39.77, 39.61, 39.09, 39.46, 39.43, 39.2, 39.07, 38.94, 44.28, 39.72, 40.11, 44.49, 39.49, 38.95, 39.21, 39.06, 39.21, 38.96, 39.02, 39.43] +716.9849999999999 +35.84925 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 374, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481700897216797, 'TIME_S_1KI': 28.025938227852397, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.1822829723358, 'W': 76.84, 'J_1KI': 3329.3643929741597, 'W_1KI': 205.45454545454547, 'W_D': 40.990750000000006, 'J_D': 664.2498134532572, 'W_D_1KI': 109.60093582887701, 'J_D_1KI': 293.0506305584947} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..e80dbeb --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 15655, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.832781791687012, "TIME_S_1KI": 0.691969453317599, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 866.9169475746155, "W": 65.84, "J_1KI": 55.37636202967841, "W_1KI": 4.205685084637497, "W_D": 30.269999999999996, "J_D": 398.5658566689491, "W_D_1KI": 1.933567550303417, "J_D_1KI": 0.12351118175045782} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..da8990f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.08056378364562988} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 249990, 249996, + 250000]), + col_indices=tensor([28795, 30379, 41102, ..., 5633, 6424, 22447]), + values=tensor([0.9841, 0.4564, 0.4138, ..., 0.4352, 0.7831, 0.6427]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6096, 0.2856, 0.5951, ..., 0.5564, 0.0665, 0.9869]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.08056378364562988 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13033', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.741129398345947} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 13, ..., 249987, 249991, + 250000]), + col_indices=tensor([ 370, 1086, 2786, ..., 43615, 44396, 45243]), + values=tensor([0.9664, 0.8693, 0.7422, ..., 0.8293, 0.8225, 0.1476]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.9116, 0.5607, 0.8635, ..., 0.8139, 0.6651, 0.4589]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 8.741129398345947 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15655', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.832781791687012} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 10, ..., 249991, 249998, + 250000]), + col_indices=tensor([12466, 31687, 41380, ..., 43099, 30794, 44210]), + values=tensor([0.5535, 0.2801, 0.3869, ..., 0.8607, 0.0342, 0.7001]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1141, 0.0022, 0.7559, ..., 0.9683, 0.9705, 0.8203]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.832781791687012 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 10, ..., 249991, 249998, + 250000]), + col_indices=tensor([12466, 31687, 41380, ..., 43099, 30794, 44210]), + values=tensor([0.5535, 0.2801, 0.3869, ..., 0.8607, 0.0342, 0.7001]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1141, 0.0022, 0.7559, ..., 0.9683, 0.9705, 0.8203]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.832781791687012 seconds + +[39.71, 38.91, 39.08, 39.03, 39.04, 38.89, 39.49, 39.37, 39.35, 39.31] +[65.84] +13.167025327682495 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15655, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.832781791687012, 'TIME_S_1KI': 0.691969453317599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.9169475746155, 'W': 65.84} +[39.71, 38.91, 39.08, 39.03, 39.04, 38.89, 39.49, 39.37, 39.35, 39.31, 40.75, 39.44, 39.59, 39.09, 39.39, 39.11, 38.98, 38.78, 44.38, 39.19] +711.4000000000001 +35.57000000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 15655, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.832781791687012, 'TIME_S_1KI': 0.691969453317599, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 866.9169475746155, 'W': 65.84, 'J_1KI': 55.37636202967841, 'W_1KI': 4.205685084637497, 'W_D': 30.269999999999996, 'J_D': 398.5658566689491, 'W_D_1KI': 1.933567550303417, 'J_D_1KI': 0.12351118175045782} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..4e67596 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3401, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135668277740479, "TIME_S_1KI": 2.9802023751074618, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 929.8197621202468, "W": 70.57, "J_1KI": 273.39599003829665, "W_1KI": 20.74977947662452, "W_D": 35.09774999999999, "J_D": 462.44270307433595, "W_D_1KI": 10.319832402234633, "J_D_1KI": 3.0343523676079487} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..73dd96f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3087193965911865} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 47, 100, ..., 2499908, + 2499955, 2500000]), + col_indices=tensor([ 1811, 3820, 5210, ..., 47398, 47518, 48036]), + values=tensor([0.8154, 0.8090, 0.3024, ..., 0.4722, 0.9116, 0.7561]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4260, 0.8795, 0.3202, ..., 0.3159, 0.0406, 0.9752]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.3087193965911865 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3401', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.135668277740479} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 41, 88, ..., 2499883, + 2499951, 2500000]), + col_indices=tensor([ 638, 2365, 2400, ..., 44467, 46636, 49496]), + values=tensor([0.8518, 0.8769, 0.3572, ..., 0.1360, 0.1673, 0.1097]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0408, 0.6133, 0.1624, ..., 0.7272, 0.2583, 0.9038]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.135668277740479 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 41, 88, ..., 2499883, + 2499951, 2500000]), + col_indices=tensor([ 638, 2365, 2400, ..., 44467, 46636, 49496]), + values=tensor([0.8518, 0.8769, 0.3572, ..., 0.1360, 0.1673, 0.1097]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0408, 0.6133, 0.1624, ..., 0.7272, 0.2583, 0.9038]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.135668277740479 seconds + +[40.86, 44.19, 38.96, 38.91, 39.18, 38.84, 39.26, 38.86, 39.41, 39.32] +[70.57] +13.17585039138794 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135668277740479, 'TIME_S_1KI': 2.9802023751074618, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.8197621202468, 'W': 70.57} +[40.86, 44.19, 38.96, 38.91, 39.18, 38.84, 39.26, 38.86, 39.41, 39.32, 40.03, 39.02, 38.84, 38.98, 38.82, 40.3, 38.82, 38.8, 38.79, 38.72] +709.445 +35.47225 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3401, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.135668277740479, 'TIME_S_1KI': 2.9802023751074618, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 929.8197621202468, 'W': 70.57, 'J_1KI': 273.39599003829665, 'W_1KI': 20.74977947662452, 'W_D': 35.09774999999999, 'J_D': 462.44270307433595, 'W_D_1KI': 10.319832402234633, 'J_D_1KI': 3.0343523676079487} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..05ef34d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 277, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.601917266845703, "TIME_S_1KI": 38.27406955539965, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.4378191399574, "W": 76.47, "J_1KI": 5423.963245992626, "W_1KI": 276.0649819494585, "W_D": 40.946749999999994, "J_D": 804.497786986649, "W_D_1KI": 147.82220216606495, "J_D_1KI": 533.6541594442779} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..e414a83 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.789712905883789} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 551, 1060, ..., 24998953, + 24999494, 25000000]), + col_indices=tensor([ 18, 53, 90, ..., 49926, 49944, 49970]), + values=tensor([0.6546, 0.1735, 0.7966, ..., 0.3203, 0.8871, 0.0598]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.4855, 0.9619, 0.0930, ..., 0.6959, 0.6112, 0.3764]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 3.789712905883789 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '277', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.601917266845703} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 523, 1012, ..., 24998990, + 24999509, 25000000]), + col_indices=tensor([ 171, 246, 332, ..., 49640, 49825, 49863]), + values=tensor([0.1620, 0.2511, 0.7784, ..., 0.0916, 0.2856, 0.2435]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9840, 0.8151, 0.6106, ..., 0.4542, 0.6992, 0.9833]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.601917266845703 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 523, 1012, ..., 24998990, + 24999509, 25000000]), + col_indices=tensor([ 171, 246, 332, ..., 49640, 49825, 49863]), + values=tensor([0.1620, 0.2511, 0.7784, ..., 0.0916, 0.2856, 0.2435]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9840, 0.8151, 0.6106, ..., 0.4542, 0.6992, 0.9833]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.601917266845703 seconds + +[39.75, 38.88, 39.04, 38.94, 38.9, 44.6, 39.87, 39.01, 39.47, 38.87] +[76.47] +19.647414922714233 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.601917266845703, 'TIME_S_1KI': 38.27406955539965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.4378191399574, 'W': 76.47} +[39.75, 38.88, 39.04, 38.94, 38.9, 44.6, 39.87, 39.01, 39.47, 38.87, 39.65, 39.46, 38.97, 39.37, 39.36, 38.88, 39.18, 38.94, 39.03, 38.86] +710.465 +35.523250000000004 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.601917266845703, 'TIME_S_1KI': 38.27406955539965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.4378191399574, 'W': 76.47, 'J_1KI': 5423.963245992626, 'W_1KI': 276.0649819494585, 'W_D': 40.946749999999994, 'J_D': 804.497786986649, 'W_D_1KI': 147.82220216606495, 'J_D_1KI': 533.6541594442779} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..41355a8 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 35925, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.468754291534424, "TIME_S_1KI": 0.29140582579079816, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.3263425445557, "W": 65.12, "J_1KI": 23.808666459138642, "W_1KI": 1.8126652748782186, "W_D": 29.625000000000007, "J_D": 389.1130666136743, "W_D_1KI": 0.8246346555323593, "J_D_1KI": 0.022954339750378826} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..77e4f27 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.04216480255126953} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 24999, 25000]), + col_indices=tensor([ 8605, 8537, 29290, ..., 9179, 13978, 1469]), + values=tensor([0.2780, 0.8342, 0.8502, ..., 0.9082, 0.5496, 0.9536]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7685, 0.9033, 0.7153, ..., 0.8654, 0.8274, 0.9503]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.04216480255126953 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24902', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.278246164321899} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([37031, 32096, 18727, ..., 44552, 41451, 6296]), + values=tensor([0.0751, 0.3287, 0.1662, ..., 0.5788, 0.0483, 0.4147]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5341, 0.4675, 0.3327, ..., 0.1193, 0.3106, 0.6128]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 7.278246164321899 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35925', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.468754291534424} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24998, 24998, 25000]), + col_indices=tensor([44391, 10770, 45928, ..., 5594, 4079, 17032]), + values=tensor([0.6882, 0.4791, 0.4331, ..., 0.9470, 0.6037, 0.4941]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1889, 0.2559, 0.4696, ..., 0.5278, 0.4768, 0.4458]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.468754291534424 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24998, 24998, 25000]), + col_indices=tensor([44391, 10770, 45928, ..., 5594, 4079, 17032]), + values=tensor([0.6882, 0.4791, 0.4331, ..., 0.9470, 0.6037, 0.4941]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1889, 0.2559, 0.4696, ..., 0.5278, 0.4768, 0.4458]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.468754291534424 seconds + +[39.59, 39.87, 39.3, 39.3, 39.78, 38.89, 39.35, 40.52, 39.13, 38.83] +[65.12] +13.134618282318115 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.468754291534424, 'TIME_S_1KI': 0.29140582579079816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3263425445557, 'W': 65.12} +[39.59, 39.87, 39.3, 39.3, 39.78, 38.89, 39.35, 40.52, 39.13, 38.83, 39.59, 39.23, 39.11, 38.91, 39.77, 40.62, 39.49, 39.29, 38.91, 38.85] +709.9 +35.495 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 35925, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.468754291534424, 'TIME_S_1KI': 0.29140582579079816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.3263425445557, 'W': 65.12, 'J_1KI': 23.808666459138642, 'W_1KI': 1.8126652748782186, 'W_D': 29.625000000000007, 'J_D': 389.1130666136743, 'W_D_1KI': 0.8246346555323593, 'J_D_1KI': 0.022954339750378826} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..54cfc66 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 18331, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45155644416809, "TIME_S_1KI": 0.570157462449844, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 852.8894222688675, "W": 65.62, "J_1KI": 46.52716285357414, "W_1KI": 3.5797283290600626, "W_D": 29.885000000000005, "J_D": 388.42731460690504, "W_D_1KI": 1.6302984016147513, "J_D_1KI": 0.08893668657545967} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..bef46d2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0693967342376709} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 5, ..., 124992, 124996, + 125000]), + col_indices=tensor([17438, 29688, 13553, ..., 36532, 44163, 44855]), + values=tensor([0.2242, 0.0224, 0.1461, ..., 0.6740, 0.5355, 0.8238]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.2153, 0.3458, 0.3359, ..., 0.7412, 0.9011, 0.6249]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.0693967342376709 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15130', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.666174411773682} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 124994, 124997, + 125000]), + col_indices=tensor([17329, 36001, 38373, ..., 475, 21379, 35295]), + values=tensor([0.0803, 0.2135, 0.1853, ..., 0.6523, 0.5299, 0.1396]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.4435, 0.4362, 0.5554, ..., 0.5263, 0.8506, 0.5178]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 8.666174411773682 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18331', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.45155644416809} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 124997, 124997, + 125000]), + col_indices=tensor([14185, 16264, 7088, ..., 33383, 46641, 46645]), + values=tensor([0.3059, 0.0880, 0.9320, ..., 0.7602, 0.8512, 0.4645]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5025, 0.7968, 0.2806, ..., 0.1024, 0.8091, 0.6972]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.45155644416809 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 124997, 124997, + 125000]), + col_indices=tensor([14185, 16264, 7088, ..., 33383, 46641, 46645]), + values=tensor([0.3059, 0.0880, 0.9320, ..., 0.7602, 0.8512, 0.4645]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5025, 0.7968, 0.2806, ..., 0.1024, 0.8091, 0.6972]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.45155644416809 seconds + +[39.86, 38.92, 44.85, 39.01, 39.6, 39.39, 38.97, 39.8, 41.08, 39.48] +[65.62] +12.997400522232056 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18331, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45155644416809, 'TIME_S_1KI': 0.570157462449844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.8894222688675, 'W': 65.62} +[39.86, 38.92, 44.85, 39.01, 39.6, 39.39, 38.97, 39.8, 41.08, 39.48, 40.67, 39.02, 39.09, 39.05, 39.17, 38.87, 39.48, 39.3, 39.44, 39.31] +714.7 +35.735 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 18331, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.45155644416809, 'TIME_S_1KI': 0.570157462449844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 852.8894222688675, 'W': 65.62, 'J_1KI': 46.52716285357414, 'W_1KI': 3.5797283290600626, 'W_D': 29.885000000000005, 'J_D': 388.42731460690504, 'W_D_1KI': 1.6302984016147513, 'J_D_1KI': 0.08893668657545967} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..2d402b4 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 461205, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.546576023101807, "TIME_S_1KI": 0.022867436439548153, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 854.1005449390411, "W": 65.22, "J_1KI": 1.8518891706270337, "W_1KI": 0.1414121702930367, "W_D": 29.99125, "J_D": 392.75594861090184, "W_D_1KI": 0.065028024414306, "J_D_1KI": 0.0001409959224516343} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..a788520 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0131683349609375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([1743, 190, 2771, ..., 2075, 3388, 3957]), + values=tensor([0.1814, 0.3494, 0.7591, ..., 0.1503, 0.3935, 0.5274]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3288, 0.8511, 0.6239, ..., 0.9211, 0.6649, 0.5940]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.0131683349609375 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '79736', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8153021335601807} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([3060, 1065, 3686, ..., 959, 268, 4999]), + values=tensor([0.4441, 0.5663, 0.4237, ..., 0.7927, 0.1815, 0.5098]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5827, 0.3654, 0.8856, ..., 0.0793, 0.3634, 0.4632]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.8153021335601807 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '461205', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.546576023101807} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([ 400, 983, 3289, ..., 2520, 735, 710]), + values=tensor([0.5782, 0.5847, 0.2189, ..., 0.6822, 0.4901, 0.5172]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8041, 0.1477, 0.3593, ..., 0.2372, 0.8803, 0.6540]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.546576023101807 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([ 400, 983, 3289, ..., 2520, 735, 710]), + values=tensor([0.5782, 0.5847, 0.2189, ..., 0.6822, 0.4901, 0.5172]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.8041, 0.1477, 0.3593, ..., 0.2372, 0.8803, 0.6540]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.546576023101807 seconds + +[42.91, 38.91, 39.2, 38.78, 38.91, 38.88, 39.21, 39.32, 39.19, 38.86] +[65.22] +13.09568452835083 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 461205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.546576023101807, 'TIME_S_1KI': 0.022867436439548153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.1005449390411, 'W': 65.22} +[42.91, 38.91, 39.2, 38.78, 38.91, 38.88, 39.21, 39.32, 39.19, 38.86, 39.94, 38.72, 39.13, 38.84, 39.27, 38.74, 39.16, 38.84, 39.21, 38.82] +704.5749999999999 +35.22875 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 461205, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.546576023101807, 'TIME_S_1KI': 0.022867436439548153, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 854.1005449390411, 'W': 65.22, 'J_1KI': 1.8518891706270337, 'W_1KI': 0.1414121702930367, 'W_D': 29.99125, 'J_D': 392.75594861090184, 'W_D_1KI': 0.065028024414306, 'J_D_1KI': 0.0001409959224516343} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..89f214d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 244335, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.417778253555298, "TIME_S_1KI": 0.04263727363478543, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.0210603523253, "W": 66.32, "J_1KI": 3.4993801966657467, "W_1KI": 0.27143061779933286, "W_D": 30.753, "J_D": 396.4786289055347, "W_D_1KI": 0.1258640800540242, "J_D_1KI": 0.0005151291466798624} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..b5c1026 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.01594710350036621} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 12, ..., 24992, 24999, 25000]), + col_indices=tensor([ 675, 1985, 2689, ..., 3047, 3313, 3022]), + values=tensor([0.4390, 0.7079, 0.1777, ..., 0.9145, 0.2520, 0.4929]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8163, 0.8334, 0.7201, ..., 0.8897, 0.9022, 0.1765]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.01594710350036621 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '65842', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.8294758796691895} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 9, ..., 24993, 24997, 25000]), + col_indices=tensor([ 33, 1908, 3594, ..., 299, 386, 4209]), + values=tensor([0.2193, 0.5619, 0.9883, ..., 0.1847, 0.2793, 0.9697]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6444, 0.1854, 0.4415, ..., 0.6088, 0.2901, 0.0299]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 2.8294758796691895 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '244335', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.417778253555298} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 8, ..., 24990, 24997, 25000]), + col_indices=tensor([2557, 3235, 4228, ..., 486, 3364, 4712]), + values=tensor([0.2079, 0.9240, 0.7430, ..., 0.9071, 0.8940, 0.1396]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3103, 0.6909, 0.1567, ..., 0.3064, 0.4398, 0.1480]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.417778253555298 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 8, ..., 24990, 24997, 25000]), + col_indices=tensor([2557, 3235, 4228, ..., 486, 3364, 4712]), + values=tensor([0.2079, 0.9240, 0.7430, ..., 0.9071, 0.8940, 0.1396]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3103, 0.6909, 0.1567, ..., 0.3064, 0.4398, 0.1480]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.417778253555298 seconds + +[45.51, 38.78, 39.51, 38.99, 39.48, 39.2, 39.32, 39.33, 39.41, 38.75] +[66.32] +12.892356157302856 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 244335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.417778253555298, 'TIME_S_1KI': 0.04263727363478543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.0210603523253, 'W': 66.32} +[45.51, 38.78, 39.51, 38.99, 39.48, 39.2, 39.32, 39.33, 39.41, 38.75, 39.41, 38.8, 39.14, 39.28, 40.7, 39.8, 39.35, 39.07, 39.32, 40.05] +711.3399999999999 +35.56699999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 244335, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.417778253555298, 'TIME_S_1KI': 0.04263727363478543, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.0210603523253, 'W': 66.32, 'J_1KI': 3.4993801966657467, 'W_1KI': 0.27143061779933286, 'W_D': 30.753, 'J_D': 396.4786289055347, 'W_D_1KI': 0.1258640800540242, 'J_D_1KI': 0.0005151291466798624} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..d1e9eee --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 41575, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.472304821014404, "TIME_S_1KI": 0.2518894725439424, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 875.4065453624726, "W": 66.47, "J_1KI": 21.056080465723934, "W_1KI": 1.5987973541791942, "W_D": 30.839750000000002, "J_D": 406.1579510657788, "W_D_1KI": 0.7417859290438966, "J_D_1KI": 0.017842114949943394} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..9d6f728 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03741025924682617} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 50, 95, ..., 249917, 249960, + 250000]), + col_indices=tensor([ 56, 131, 133, ..., 4645, 4665, 4841]), + values=tensor([0.2594, 0.3669, 0.3309, ..., 0.9204, 0.7750, 0.3008]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7286, 0.8651, 0.2961, ..., 0.7120, 0.4132, 0.7079]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.03741025924682617 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28067', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.088342666625977} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 51, 92, ..., 249888, 249951, + 250000]), + col_indices=tensor([ 195, 232, 275, ..., 4637, 4801, 4910]), + values=tensor([0.9933, 0.3255, 0.9817, ..., 0.2679, 0.2640, 0.0554]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2297, 0.2173, 0.6945, ..., 0.5761, 0.5521, 0.3650]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 7.088342666625977 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '41575', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.472304821014404} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 88, ..., 249902, 249950, + 250000]), + col_indices=tensor([ 75, 83, 302, ..., 4746, 4941, 4952]), + values=tensor([0.9930, 0.3893, 0.6584, ..., 0.0382, 0.8338, 0.2904]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5807, 0.7893, 0.2250, ..., 0.2178, 0.8594, 0.2155]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.472304821014404 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 88, ..., 249902, 249950, + 250000]), + col_indices=tensor([ 75, 83, 302, ..., 4746, 4941, 4952]), + values=tensor([0.9930, 0.3893, 0.6584, ..., 0.0382, 0.8338, 0.2904]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5807, 0.7893, 0.2250, ..., 0.2178, 0.8594, 0.2155]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.472304821014404 seconds + +[39.83, 38.85, 39.09, 38.79, 39.53, 38.8, 39.37, 39.26, 39.29, 38.81] +[66.47] +13.169949531555176 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41575, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.472304821014404, 'TIME_S_1KI': 0.2518894725439424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 875.4065453624726, 'W': 66.47} +[39.83, 38.85, 39.09, 38.79, 39.53, 38.8, 39.37, 39.26, 39.29, 38.81, 39.98, 38.99, 38.83, 39.07, 38.9, 38.8, 38.86, 39.29, 47.91, 39.33] +712.6049999999999 +35.63025 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 41575, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.472304821014404, 'TIME_S_1KI': 0.2518894725439424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 875.4065453624726, 'W': 66.47, 'J_1KI': 21.056080465723934, 'W_1KI': 1.5987973541791942, 'W_D': 30.839750000000002, 'J_D': 406.1579510657788, 'W_D_1KI': 0.7417859290438966, 'J_D_1KI': 0.017842114949943394} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..e407283 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 8178, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511467218399048, "TIME_S_1KI": 1.2853347051111579, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 880.7755374526977, "W": 66.16, "J_1KI": 107.70060374818999, "W_1KI": 8.089997554414282, "W_D": 30.69249999999999, "J_D": 408.60343384623513, "W_D_1KI": 3.7530569821472226, "J_D_1KI": 0.4589211276775767} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..b9e58d8 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.14169001579284668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 264, 521, ..., 1249500, + 1249758, 1250000]), + col_indices=tensor([ 22, 35, 54, ..., 4954, 4963, 4982]), + values=tensor([0.3715, 0.6699, 0.1465, ..., 0.9132, 0.2376, 0.1878]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.3853, 0.7833, 0.5244, ..., 0.5756, 0.3818, 0.8103]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.14169001579284668 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '7410', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.513462543487549} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 245, 477, ..., 1249530, + 1249759, 1250000]), + col_indices=tensor([ 15, 78, 164, ..., 4968, 4978, 4992]), + values=tensor([0.5947, 0.7215, 0.2123, ..., 0.7493, 0.6227, 0.0355]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4245, 0.9857, 0.9704, ..., 0.1643, 0.0459, 0.0545]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.513462543487549 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '8178', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.511467218399048} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 243, 531, ..., 1249480, + 1249728, 1250000]), + col_indices=tensor([ 10, 29, 31, ..., 4961, 4980, 4981]), + values=tensor([0.2878, 0.6436, 0.4714, ..., 0.0074, 0.1096, 0.3758]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4226, 0.4894, 0.7354, ..., 0.5546, 0.7888, 0.4627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.511467218399048 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 243, 531, ..., 1249480, + 1249728, 1250000]), + col_indices=tensor([ 10, 29, 31, ..., 4961, 4980, 4981]), + values=tensor([0.2878, 0.6436, 0.4714, ..., 0.0074, 0.1096, 0.3758]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4226, 0.4894, 0.7354, ..., 0.5546, 0.7888, 0.4627]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.511467218399048 seconds + +[40.21, 39.14, 39.56, 39.02, 39.43, 41.29, 39.49, 39.07, 39.13, 39.19] +[66.16] +13.31281042098999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511467218399048, 'TIME_S_1KI': 1.2853347051111579, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 880.7755374526977, 'W': 66.16} +[40.21, 39.14, 39.56, 39.02, 39.43, 41.29, 39.49, 39.07, 39.13, 39.19, 40.14, 39.04, 40.64, 38.87, 39.1, 38.93, 39.37, 39.03, 38.8, 39.34] +709.3500000000001 +35.46750000000001 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 8178, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.511467218399048, 'TIME_S_1KI': 1.2853347051111579, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 880.7755374526977, 'W': 66.16, 'J_1KI': 107.70060374818999, 'W_1KI': 8.089997554414282, 'W_D': 30.69249999999999, 'J_D': 408.60343384623513, 'W_D_1KI': 3.7530569821472226, 'J_D_1KI': 0.4589211276775767} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..cc784c7 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 3463, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.092161417007446, "TIME_S_1KI": 2.9142828232767677, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 888.6800115585327, "W": 67.72, "J_1KI": 256.6214298465298, "W_1KI": 19.555298873808837, "W_D": 32.353, "J_D": 424.56385726451873, "W_D_1KI": 9.34247762056021, "J_D_1KI": 2.697798908622642} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..8f92a77 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.30318737030029297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 515, 1018, ..., 2498999, + 2499493, 2500000]), + col_indices=tensor([ 4, 21, 24, ..., 4955, 4957, 4967]), + values=tensor([0.5325, 0.9134, 0.0336, ..., 0.9679, 0.3618, 0.7033]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8211, 0.4248, 0.8998, ..., 0.9656, 0.3613, 0.2992]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.30318737030029297 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3463', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.092161417007446} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 498, 1000, ..., 2499016, + 2499523, 2500000]), + col_indices=tensor([ 2, 43, 50, ..., 4978, 4986, 4997]), + values=tensor([0.9352, 0.1649, 0.8628, ..., 0.6965, 0.7003, 0.5455]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0499, 0.6757, 0.4614, ..., 0.0542, 0.7848, 0.0169]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.092161417007446 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 498, 1000, ..., 2499016, + 2499523, 2500000]), + col_indices=tensor([ 2, 43, 50, ..., 4978, 4986, 4997]), + values=tensor([0.9352, 0.1649, 0.8628, ..., 0.6965, 0.7003, 0.5455]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0499, 0.6757, 0.4614, ..., 0.0542, 0.7848, 0.0169]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.092161417007446 seconds + +[40.21, 38.8, 38.93, 38.71, 39.03, 38.9, 38.74, 39.19, 39.25, 38.74] +[67.72] +13.122859001159668 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3463, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.092161417007446, 'TIME_S_1KI': 2.9142828232767677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.6800115585327, 'W': 67.72} +[40.21, 38.8, 38.93, 38.71, 39.03, 38.9, 38.74, 39.19, 39.25, 38.74, 40.21, 38.79, 39.15, 38.78, 38.85, 38.73, 44.44, 39.11, 38.88, 38.96] +707.3399999999999 +35.367 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 3463, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.092161417007446, 'TIME_S_1KI': 2.9142828232767677, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 888.6800115585327, 'W': 67.72, 'J_1KI': 256.6214298465298, 'W_1KI': 19.555298873808837, 'W_D': 32.353, 'J_D': 424.56385726451873, 'W_D_1KI': 9.34247762056021, 'J_D_1KI': 2.697798908622642} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..b65b863 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1741, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.298704862594604, "TIME_S_1KI": 5.915396245028492, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1027.8759766101837, "W": 74.46, "J_1KI": 590.3940129868946, "W_1KI": 42.76852383687535, "W_D": 39.113499999999995, "J_D": 539.9385846245289, "W_D_1KI": 22.466111430212518, "J_D_1KI": 12.904142119593635} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..987a685 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.6030943393707275} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1017, 2015, ..., 4997969, + 4998981, 5000000]), + col_indices=tensor([ 0, 31, 34, ..., 4984, 4989, 4995]), + values=tensor([0.9331, 0.7841, 0.5601, ..., 0.4749, 0.8668, 0.1575]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.1492, 0.3030, 0.4894, ..., 0.2440, 0.4033, 0.0238]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 0.6030943393707275 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1741', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.298704862594604} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 961, 1935, ..., 4998003, + 4999013, 5000000]), + col_indices=tensor([ 0, 3, 5, ..., 4968, 4970, 4989]), + values=tensor([0.3710, 0.5603, 0.2176, ..., 0.5072, 0.2694, 0.8987]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6029, 0.2962, 0.8345, ..., 0.0907, 0.1121, 0.2666]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.298704862594604 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 961, 1935, ..., 4998003, + 4999013, 5000000]), + col_indices=tensor([ 0, 3, 5, ..., 4968, 4970, 4989]), + values=tensor([0.3710, 0.5603, 0.2176, ..., 0.5072, 0.2694, 0.8987]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6029, 0.2962, 0.8345, ..., 0.0907, 0.1121, 0.2666]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.298704862594604 seconds + +[39.56, 39.38, 38.97, 38.88, 38.82, 39.21, 39.23, 39.16, 39.31, 39.25] +[74.46] +13.804404735565186 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.298704862594604, 'TIME_S_1KI': 5.915396245028492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1027.8759766101837, 'W': 74.46} +[39.56, 39.38, 38.97, 38.88, 38.82, 39.21, 39.23, 39.16, 39.31, 39.25, 40.48, 40.05, 39.23, 38.85, 39.62, 39.32, 38.87, 39.04, 39.64, 39.41] +706.93 +35.3465 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.298704862594604, 'TIME_S_1KI': 5.915396245028492, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1027.8759766101837, 'W': 74.46, 'J_1KI': 590.3940129868946, 'W_1KI': 42.76852383687535, 'W_D': 39.113499999999995, 'J_D': 539.9385846245289, 'W_D_1KI': 22.466111430212518, 'J_D_1KI': 12.904142119593635} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..20f95b6 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 1166, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.334495306015015, "TIME_S_1KI": 8.8632035214537, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1095.1619911956786, "W": 75.64, "J_1KI": 939.2469907338582, "W_1KI": 64.8713550600343, "W_D": 40.43425, "J_D": 585.4316993985176, "W_D_1KI": 34.67774442538593, "J_D_1KI": 29.74077566499651} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..be3ca1f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.9000704288482666} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1542, 2985, ..., 7497037, + 7498525, 7500000]), + col_indices=tensor([ 0, 1, 6, ..., 4988, 4994, 4997]), + values=tensor([0.4057, 0.2930, 0.9549, ..., 0.4574, 0.5414, 0.6416]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.4314, 0.0098, 0.8060, ..., 0.6655, 0.0522, 0.0757]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 0.9000704288482666 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1166', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.334495306015015} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1525, 3027, ..., 7497015, + 7498498, 7500000]), + col_indices=tensor([ 12, 15, 18, ..., 4991, 4994, 4995]), + values=tensor([0.8648, 0.2387, 0.0206, ..., 0.7504, 0.0755, 0.9898]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.0126, 0.2581, 0.5840, ..., 0.5862, 0.5778, 0.3525]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.334495306015015 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1525, 3027, ..., 7497015, + 7498498, 7500000]), + col_indices=tensor([ 12, 15, 18, ..., 4991, 4994, 4995]), + values=tensor([0.8648, 0.2387, 0.0206, ..., 0.7504, 0.0755, 0.9898]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.0126, 0.2581, 0.5840, ..., 0.5862, 0.5778, 0.3525]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.334495306015015 seconds + +[41.12, 38.83, 38.85, 38.96, 39.38, 38.75, 39.14, 38.88, 38.85, 38.77] +[75.64] +14.478609085083008 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.334495306015015, 'TIME_S_1KI': 8.8632035214537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1095.1619911956786, 'W': 75.64} +[41.12, 38.83, 38.85, 38.96, 39.38, 38.75, 39.14, 38.88, 38.85, 38.77, 40.18, 38.79, 39.32, 38.91, 39.22, 38.92, 39.3, 39.28, 39.3, 38.8] +704.115 +35.20575 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 1166, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.334495306015015, 'TIME_S_1KI': 8.8632035214537, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1095.1619911956786, 'W': 75.64, 'J_1KI': 939.2469907338582, 'W_1KI': 64.8713550600343, 'W_D': 40.43425, 'J_D': 585.4316993985176, 'W_D_1KI': 34.67774442538593, 'J_D_1KI': 29.74077566499651} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..973144c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 704, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.429930925369263, "TIME_S_1KI": 14.815242791717703, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1157.2951932907106, "W": 76.7, "J_1KI": 1643.8852177424865, "W_1KI": 108.94886363636364, "W_D": 41.2785, "J_D": 622.8345454530717, "W_D_1KI": 58.63423295454546, "J_D_1KI": 83.2872627195248} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..0f653eb --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 1.4912726879119873} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1951, 3944, ..., 9995965, + 9997988, 10000000]), + col_indices=tensor([ 6, 14, 15, ..., 4996, 4997, 4998]), + values=tensor([0.7071, 0.0905, 0.6037, ..., 0.4689, 0.3914, 0.3516]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7487, 0.9804, 0.4485, ..., 0.3781, 0.3818, 0.0598]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 1.4912726879119873 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '704', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.429930925369263} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1962, 3997, ..., 9995983, + 9998031, 10000000]), + col_indices=tensor([ 2, 3, 4, ..., 4989, 4991, 4999]), + values=tensor([0.3156, 0.5704, 0.4238, ..., 0.9413, 0.7746, 0.6098]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3215, 0.3275, 0.6323, ..., 0.0024, 0.8893, 0.0654]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.429930925369263 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1962, 3997, ..., 9995983, + 9998031, 10000000]), + col_indices=tensor([ 2, 3, 4, ..., 4989, 4991, 4999]), + values=tensor([0.3156, 0.5704, 0.4238, ..., 0.9413, 0.7746, 0.6098]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3215, 0.3275, 0.6323, ..., 0.0024, 0.8893, 0.0654]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.429930925369263 seconds + +[40.15, 39.37, 39.68, 38.87, 39.22, 39.13, 41.47, 38.91, 39.0, 39.01] +[76.7] +15.088594436645508 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.429930925369263, 'TIME_S_1KI': 14.815242791717703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1157.2951932907106, 'W': 76.7} +[40.15, 39.37, 39.68, 38.87, 39.22, 39.13, 41.47, 38.91, 39.0, 39.01, 40.04, 39.56, 38.96, 39.33, 38.96, 38.95, 39.21, 38.91, 38.9, 40.8] +708.4300000000001 +35.4215 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 704, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.429930925369263, 'TIME_S_1KI': 14.815242791717703, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1157.2951932907106, 'W': 76.7, 'J_1KI': 1643.8852177424865, 'W_1KI': 108.94886363636364, 'W_D': 41.2785, 'J_D': 622.8345454530717, 'W_D_1KI': 58.63423295454546, 'J_D_1KI': 83.2872627195248} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..70ebf3a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 569, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.483191013336182, "TIME_S_1KI": 18.42388578793705, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1197.8375442028046, "W": 76.6, "J_1KI": 2105.162643590166, "W_1KI": 134.62214411247803, "W_D": 41.0435, "J_D": 641.8204340142012, "W_D_1KI": 72.13268892794376, "J_D_1KI": 126.77098229867093} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..d4faa36 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.8439099788665771} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2531, 5020, ..., 12494917, + 12497450, 12500000]), + col_indices=tensor([ 0, 1, 9, ..., 4992, 4994, 4999]), + values=tensor([0.6676, 0.2754, 0.2712, ..., 0.4447, 0.2547, 0.8500]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9338, 0.0413, 0.5968, ..., 0.6366, 0.2029, 0.7249]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 1.8439099788665771 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '569', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.483191013336182} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2491, 4957, ..., 12495037, + 12497545, 12500000]), + col_indices=tensor([ 0, 2, 5, ..., 4995, 4998, 4999]), + values=tensor([0.5758, 0.7291, 0.3910, ..., 0.8483, 0.9816, 0.9388]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0835, 0.8623, 0.0534, ..., 0.1116, 0.3605, 0.8512]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.483191013336182 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2491, 4957, ..., 12495037, + 12497545, 12500000]), + col_indices=tensor([ 0, 2, 5, ..., 4995, 4998, 4999]), + values=tensor([0.5758, 0.7291, 0.3910, ..., 0.8483, 0.9816, 0.9388]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0835, 0.8623, 0.0534, ..., 0.1116, 0.3605, 0.8512]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.483191013336182 seconds + +[39.92, 38.82, 39.41, 39.19, 39.02, 39.24, 39.27, 38.82, 38.86, 38.89] +[76.6] +15.637565851211548 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.483191013336182, 'TIME_S_1KI': 18.42388578793705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.8375442028046, 'W': 76.6} +[39.92, 38.82, 39.41, 39.19, 39.02, 39.24, 39.27, 38.82, 38.86, 38.89, 40.74, 39.81, 39.32, 39.32, 39.28, 44.23, 39.36, 38.92, 39.11, 38.75] +711.1299999999999 +35.55649999999999 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.483191013336182, 'TIME_S_1KI': 18.42388578793705, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.8375442028046, 'W': 76.6, 'J_1KI': 2105.162643590166, 'W_1KI': 134.62214411247803, 'W_D': 41.0435, 'J_D': 641.8204340142012, 'W_D_1KI': 72.13268892794376, 'J_D_1KI': 126.77098229867093} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..c3ed634 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 569391, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.146198511123657, "TIME_S_1KI": 0.017819386873209546, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 837.5037681818008, "W": 65.05, "J_1KI": 1.4708763717406856, "W_1KI": 0.11424486864035434, "W_D": 29.76475, "J_D": 383.2143010605574, "W_D_1KI": 0.05227471105092985, "J_D_1KI": 9.180810910416543e-05} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..6482c19 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.025912761688232422} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2955, 4682, 3449, 1969, 3854, 251, 2319, 668, 498, + 3572, 3854, 2322, 4794, 541, 1204, 3498, 557, 2286, + 3599, 1633, 263, 572, 401, 2359, 4315, 911, 3580, + 647, 4452, 4876, 2180, 2875, 4244, 3344, 2209, 559, + 1303, 3655, 2944, 2633, 4973, 1628, 4101, 143, 4999, + 4517, 2519, 3173, 1189, 2853, 1263, 1640, 2218, 2187, + 500, 639, 1141, 4234, 755, 459, 531, 3206, 4041, + 4859, 2211, 4404, 1761, 2224, 3743, 1719, 4799, 2055, + 4619, 4316, 4729, 1778, 2613, 3964, 2169, 739, 4154, + 1973, 1793, 4308, 801, 3310, 342, 3095, 520, 3764, + 4097, 3032, 1897, 3411, 2128, 1565, 3684, 2810, 2598, + 1157, 2540, 3590, 4555, 145, 3631, 3914, 308, 84, + 3223, 2928, 1083, 1157, 2416, 2092, 1789, 4234, 2339, + 866, 1332, 582, 480, 407, 2460, 594, 1184, 1302, + 4554, 3210, 212, 2368, 3876, 3742, 2272, 1393, 406, + 80, 3371, 4169, 2118, 452, 2986, 932, 49, 3160, + 2327, 713, 2877, 3484, 2257, 182, 3066, 1789, 610, + 3257, 2119, 4757, 577, 310, 2782, 116, 2689, 2876, + 2666, 144, 4643, 3955, 517, 1439, 3012, 4289, 3484, + 1701, 3447, 1824, 1951, 3681, 4272, 2740, 4790, 1804, + 3833, 1146, 672, 4424, 3920, 2173, 2155, 2090, 4174, + 4084, 560, 3425, 3143, 3470, 1712, 3489, 1811, 1766, + 4872, 3288, 771, 4485, 3269, 2695, 4584, 535, 200, + 2606, 3547, 3737, 102, 696, 271, 2048, 2470, 1740, + 2219, 829, 4785, 1264, 2283, 4517, 1344, 1726, 844, + 2384, 4310, 42, 3459, 2017, 2199, 3093, 4339, 313, + 1332, 303, 3733, 2074, 4094, 4006, 429, 1830, 3447, + 3735, 3446, 1158, 2156, 2171, 2458, 989]), + values=tensor([0.1546, 0.0338, 0.3832, 0.6845, 0.5359, 0.2115, 0.5134, + 0.5236, 0.8368, 0.0286, 0.8691, 0.3848, 0.7412, 0.7376, + 0.0492, 0.2561, 0.4285, 0.6403, 0.4264, 0.9823, 0.2432, + 0.9496, 0.9648, 0.1994, 0.5806, 0.9987, 0.8946, 0.3796, + 0.9742, 0.5421, 0.7745, 0.0707, 0.4271, 0.7771, 0.6813, + 0.4916, 0.5119, 0.7062, 0.2752, 0.9391, 0.5947, 0.6220, + 0.1073, 0.5082, 0.8956, 0.6658, 0.1040, 0.0454, 0.1560, + 0.7150, 0.4739, 0.5494, 0.1706, 0.6723, 0.3127, 0.2460, + 0.1585, 0.6938, 0.6937, 0.2200, 0.6734, 0.5021, 0.7544, + 0.1720, 0.3861, 0.8179, 0.3810, 0.0801, 0.6567, 0.1205, + 0.2925, 0.1255, 0.7882, 0.3753, 0.2290, 0.4877, 0.9183, + 0.7718, 0.3047, 0.6194, 0.2851, 0.3419, 0.7256, 0.2356, + 0.5102, 0.4050, 0.2487, 0.5983, 0.2252, 0.4330, 0.1968, + 0.1306, 0.4006, 0.3092, 0.2645, 0.4635, 0.3587, 0.7536, + 0.7019, 0.0792, 0.1678, 0.7485, 0.9817, 0.9065, 0.9994, + 0.3911, 0.8833, 0.5411, 0.3679, 0.9711, 0.9853, 0.6437, + 0.7861, 0.5048, 0.2591, 0.7726, 0.6174, 0.1004, 0.1489, + 0.6017, 0.8836, 0.5571, 0.7423, 0.4796, 0.5887, 0.7010, + 0.6198, 0.7601, 0.9790, 0.0717, 0.8254, 0.1983, 0.6497, + 0.9092, 0.9977, 0.7279, 0.3767, 0.5564, 0.8901, 0.2896, + 0.4914, 0.7288, 0.7282, 0.1569, 0.7491, 0.2024, 0.5103, + 0.2006, 0.2616, 0.3680, 0.8784, 0.5095, 0.3100, 0.5468, + 0.7473, 0.8067, 0.1566, 0.7922, 0.7836, 0.0734, 0.2895, + 0.0658, 0.3948, 0.1442, 0.9166, 0.4276, 0.4709, 0.0305, + 0.5373, 0.3415, 0.0906, 0.7116, 0.3303, 0.4381, 0.5263, + 0.3654, 0.9325, 0.0370, 0.6385, 0.6709, 0.3766, 0.7549, + 0.4298, 0.9883, 0.6034, 0.5195, 0.8067, 0.8173, 0.5194, + 0.5329, 0.4999, 0.5357, 0.5843, 0.4652, 0.6589, 0.6127, + 0.9625, 0.8533, 0.0618, 0.2250, 0.1341, 0.3851, 0.9176, + 0.6106, 0.0281, 0.2538, 0.5580, 0.0137, 0.4927, 0.5743, + 0.6268, 0.5818, 0.7719, 0.1711, 0.1084, 0.6064, 0.1367, + 0.6312, 0.8778, 0.2960, 0.3372, 0.8224, 0.9699, 0.6070, + 0.2907, 0.4693, 0.5694, 0.7710, 0.6091, 0.5452, 0.3569, + 0.0226, 0.4986, 0.6727, 0.5738, 0.8629, 0.9155, 0.9081, + 0.9105, 0.9222, 0.7776, 0.3699, 0.9402, 0.5035, 0.4769, + 0.4797, 0.1466, 0.6411, 0.6861, 0.6601]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1758, 0.5990, 0.4260, ..., 0.6457, 0.1523, 0.4408]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.025912761688232422 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '40520', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.7472190856933594} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4979, 4303, 4566, 4762, 4549, 190, 2004, 2509, 751, + 928, 1907, 2345, 3485, 628, 2908, 2256, 1744, 3960, + 753, 741, 1691, 3236, 822, 110, 1716, 2413, 2905, + 1494, 3365, 3162, 4499, 3337, 4292, 2749, 1497, 1083, + 571, 2576, 1389, 2374, 4678, 2621, 3191, 2850, 4442, + 3038, 2838, 3147, 1692, 3297, 1369, 316, 44, 3096, + 1795, 1502, 4078, 1530, 382, 1493, 2347, 369, 4086, + 3463, 1734, 324, 4159, 222, 2163, 3883, 2624, 3179, + 1955, 2358, 4353, 3311, 2924, 323, 448, 10, 1850, + 3616, 4600, 4760, 641, 4296, 2902, 1005, 4495, 3758, + 2177, 1780, 1919, 576, 3690, 2414, 4915, 960, 2888, + 4117, 1616, 3504, 3045, 3420, 3788, 3250, 4185, 565, + 1378, 1958, 1079, 3142, 700, 3775, 2755, 2658, 4044, + 3937, 2159, 2450, 691, 2810, 2854, 3907, 15, 3162, + 4347, 4859, 392, 4662, 1450, 2377, 2601, 2733, 2929, + 1056, 3918, 1597, 324, 1218, 4516, 2665, 4847, 4488, + 1656, 2761, 652, 3181, 916, 67, 4641, 3028, 215, + 2829, 990, 4480, 3427, 3715, 1658, 1471, 2613, 915, + 4016, 2089, 4, 4230, 2407, 4012, 246, 2239, 359, + 1734, 4596, 4776, 1554, 2314, 1733, 3775, 4475, 4112, + 4261, 4903, 3913, 101, 2681, 3353, 525, 3858, 185, + 877, 1621, 4340, 4135, 463, 2930, 2999, 1174, 3548, + 1690, 2905, 3325, 3911, 711, 2787, 2453, 456, 3056, + 3218, 3802, 3733, 687, 2791, 1304, 4779, 2332, 3970, + 155, 2041, 2588, 2371, 3243, 3760, 181, 1764, 909, + 39, 3595, 2215, 3497, 2575, 3579, 257, 3215, 1268, + 660, 3425, 4720, 1730, 3977, 302, 2922, 4097, 2384, + 2222, 3777, 3527, 253, 2444, 2627, 880]), + values=tensor([0.6428, 0.9934, 0.2143, 0.2775, 0.8801, 0.9209, 0.7937, + 0.3950, 0.9558, 0.4336, 0.7173, 0.5611, 0.9142, 0.7220, + 0.9288, 0.0252, 0.1643, 0.3238, 0.3899, 0.9050, 0.8167, + 0.0316, 0.4642, 0.2775, 0.6749, 0.3101, 0.4910, 0.2421, + 0.2427, 0.1619, 0.6985, 0.1109, 0.5934, 0.6019, 0.5879, + 0.2363, 0.8960, 0.8107, 0.6787, 0.6439, 0.2015, 0.5182, + 0.3918, 0.2235, 0.7280, 0.1655, 0.1523, 0.7307, 0.2277, + 0.0713, 0.3285, 0.3448, 0.9658, 0.6123, 0.3458, 0.8019, + 0.7467, 0.1797, 0.0926, 0.5584, 0.6018, 0.7448, 0.7637, + 0.6467, 0.4119, 0.4865, 0.3304, 0.8004, 0.4028, 0.3316, + 0.4346, 0.2111, 0.8264, 0.5751, 0.1845, 0.5351, 0.6490, + 0.1782, 0.3206, 0.0372, 0.4209, 0.6069, 0.2848, 0.1110, + 0.4496, 0.7402, 0.9814, 0.1676, 0.9158, 0.3694, 0.5027, + 0.9759, 0.3630, 0.0452, 0.6637, 0.4099, 0.7193, 0.9349, + 0.4031, 0.4884, 0.0588, 0.8852, 0.4143, 0.6287, 0.2603, + 0.0585, 0.6828, 0.0068, 0.4013, 0.8395, 0.5456, 0.8367, + 0.2039, 0.1976, 0.9018, 0.5362, 0.0977, 0.9421, 0.0954, + 0.0477, 0.2774, 0.5744, 0.7438, 0.6477, 0.2957, 0.5835, + 0.7384, 0.3101, 0.2011, 0.7432, 0.1000, 0.7629, 0.0287, + 0.1747, 0.7484, 0.6466, 0.2628, 0.6546, 0.9448, 0.6789, + 0.3641, 0.3340, 0.2888, 0.6351, 0.9463, 0.7934, 0.2568, + 0.0882, 0.7783, 0.9130, 0.2947, 0.2702, 0.1584, 0.1104, + 0.9967, 0.4045, 0.5340, 0.4029, 0.5998, 0.3784, 0.8814, + 0.3307, 0.4333, 0.0862, 0.8030, 0.7150, 0.6042, 0.4672, + 0.1420, 0.3126, 0.5565, 0.5063, 0.1371, 0.4110, 0.5830, + 0.3460, 0.8459, 0.9932, 0.3370, 0.3110, 0.0982, 0.6162, + 0.8866, 0.9938, 0.9137, 0.2599, 0.1340, 0.1968, 0.4654, + 0.2563, 0.6561, 0.7845, 0.2903, 0.6104, 0.9219, 0.6603, + 0.0736, 0.2080, 0.8730, 0.0269, 0.5148, 0.9185, 0.0914, + 0.7374, 0.6494, 0.0513, 0.2661, 0.6485, 0.3876, 0.3399, + 0.5727, 0.6526, 0.6545, 0.8063, 0.5866, 0.7739, 0.1262, + 0.2849, 0.1051, 0.1115, 0.2427, 0.0104, 0.2599, 0.3134, + 0.0451, 0.7262, 0.6349, 0.5852, 0.2103, 0.4468, 0.0131, + 0.9703, 0.2087, 0.9981, 0.0746, 0.6323, 0.0776, 0.3216, + 0.8062, 0.8168, 0.8982, 0.6078, 0.4816, 0.3037, 0.1198, + 0.4605, 0.7585, 0.3262, 0.3531, 0.6379]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.7653, 0.7067, 0.4116, ..., 0.4300, 0.1070, 0.3611]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.7472190856933594 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '569391', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.146198511123657} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), + col_indices=tensor([ 989, 4634, 172, 296, 437, 988, 4701, 40, 3459, + 2902, 284, 2223, 3489, 379, 2336, 3854, 3801, 4913, + 1784, 189, 1121, 2886, 4344, 1044, 1507, 1629, 4228, + 950, 3157, 372, 4392, 249, 3355, 4991, 61, 3311, + 365, 3749, 2426, 4689, 420, 1130, 2303, 3276, 2058, + 3417, 2635, 1997, 4469, 149, 3640, 2817, 310, 1358, + 4005, 314, 2266, 836, 2231, 2153, 4590, 1083, 2752, + 2577, 2539, 3832, 170, 4839, 1662, 908, 3409, 160, + 1208, 2792, 1394, 3839, 404, 2657, 1041, 2651, 3377, + 3822, 3581, 2353, 3591, 2000, 4401, 4545, 4324, 3328, + 3711, 2291, 2328, 732, 536, 660, 2140, 2401, 566, + 1414, 1235, 4049, 1072, 4129, 3797, 3825, 3260, 1333, + 2653, 3617, 58, 3265, 1036, 4854, 392, 4867, 4701, + 3576, 771, 2398, 4330, 1034, 4559, 2708, 409, 3139, + 2114, 3463, 923, 763, 2766, 4868, 1142, 1459, 3024, + 2321, 1511, 1594, 1553, 98, 954, 4757, 1367, 2284, + 321, 4282, 4827, 311, 3480, 705, 1128, 255, 1664, + 653, 1381, 1987, 2729, 634, 2582, 4911, 3144, 1242, + 3821, 2906, 2900, 547, 195, 264, 1462, 3048, 2738, + 753, 4689, 302, 1125, 2387, 532, 854, 131, 4228, + 2001, 3802, 1432, 364, 2122, 3, 492, 481, 3737, + 3945, 2016, 4040, 4587, 3047, 678, 2781, 1044, 3568, + 1574, 3813, 2876, 1656, 4200, 1707, 1113, 3551, 4496, + 1942, 1480, 4429, 3975, 2412, 3934, 2906, 952, 4773, + 1043, 3314, 572, 4511, 1843, 4636, 1964, 2523, 457, + 3459, 2009, 1681, 9, 2459, 3710, 4173, 1493, 3773, + 2982, 4418, 4646, 1091, 541, 4902, 4735, 4604, 3735, + 3670, 955, 687, 2373, 4360, 1850, 1893]), + values=tensor([0.2345, 0.0289, 0.8583, 0.9123, 0.0874, 0.7501, 0.2033, + 0.8326, 0.8469, 0.1882, 0.3285, 0.3183, 0.8931, 0.0457, + 0.8868, 0.7189, 0.4379, 0.1462, 0.4719, 0.1691, 0.1099, + 0.8022, 0.0756, 0.2871, 0.6213, 0.4582, 0.2170, 0.3357, + 0.7252, 0.0149, 0.2470, 0.4898, 0.0035, 0.1331, 0.4871, + 0.7295, 0.2640, 0.3186, 0.3619, 0.0774, 0.2757, 0.9917, + 0.3749, 0.2825, 0.4846, 0.8782, 0.2242, 0.0584, 0.4269, + 0.3007, 0.5193, 0.9227, 0.9773, 0.6304, 0.0725, 0.4260, + 0.4518, 0.5456, 0.3019, 0.2067, 0.3845, 0.8768, 0.2863, + 0.4471, 0.0208, 0.9135, 0.0548, 0.1836, 0.9804, 0.3038, + 0.5045, 0.8119, 0.2476, 0.4867, 0.9780, 0.3338, 0.2853, + 0.7670, 0.4677, 0.5075, 0.3848, 0.5236, 0.0031, 0.3726, + 0.6233, 0.1936, 0.1739, 0.4139, 0.1871, 0.5920, 0.8457, + 0.8536, 0.8234, 0.3531, 0.8514, 0.1766, 0.5797, 0.3086, + 0.0545, 0.2101, 0.0864, 0.3338, 0.2356, 0.3200, 0.7401, + 0.4108, 0.5013, 0.5320, 0.4414, 0.7825, 0.0249, 0.2494, + 0.0429, 0.7080, 0.9162, 0.6423, 0.2821, 0.2742, 0.5289, + 0.2928, 0.0848, 0.8315, 0.7088, 0.8269, 0.3671, 0.5127, + 0.2282, 0.7407, 0.1379, 0.8288, 0.2763, 0.1471, 0.0918, + 0.7196, 0.6693, 0.6326, 0.9413, 0.1511, 0.6888, 0.3336, + 0.2545, 0.9984, 0.8005, 0.8337, 0.2430, 0.7476, 0.3204, + 0.0554, 0.5080, 0.0854, 0.1850, 0.7747, 0.5775, 0.2057, + 0.7868, 0.8337, 0.6964, 0.9562, 0.1725, 0.3223, 0.4786, + 0.5641, 0.5075, 0.5871, 0.6849, 0.6564, 0.2437, 0.1937, + 0.6389, 0.0952, 0.9817, 0.1000, 0.7393, 0.9387, 0.8443, + 0.9838, 0.1009, 0.7329, 0.9758, 0.9984, 0.0689, 0.6045, + 0.3081, 0.8442, 0.7079, 0.3197, 0.6314, 0.2885, 0.9946, + 0.0894, 0.3380, 0.0723, 0.8864, 0.2114, 0.6387, 0.7774, + 0.5705, 0.9374, 0.3114, 0.6458, 0.5623, 0.1687, 0.3946, + 0.8120, 0.4227, 0.8777, 0.4345, 0.8346, 0.0514, 0.7320, + 0.0137, 0.2630, 0.1970, 0.0196, 0.2035, 0.6052, 0.7403, + 0.6899, 0.2449, 0.2769, 0.3900, 0.8664, 0.9461, 0.5286, + 0.0997, 0.7438, 0.0400, 0.7885, 0.5277, 0.1693, 0.7534, + 0.3649, 0.5259, 0.9420, 0.2968, 0.8974, 0.5468, 0.5308, + 0.9748, 0.7021, 0.7026, 0.1970, 0.7386, 0.9856, 0.8826, + 0.6766, 0.7905, 0.8999, 0.3805, 0.8437]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.6631, 0.6256, 0.9086, ..., 0.3830, 0.1647, 0.1472]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.146198511123657 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 250, 250, 250]), + col_indices=tensor([ 989, 4634, 172, 296, 437, 988, 4701, 40, 3459, + 2902, 284, 2223, 3489, 379, 2336, 3854, 3801, 4913, + 1784, 189, 1121, 2886, 4344, 1044, 1507, 1629, 4228, + 950, 3157, 372, 4392, 249, 3355, 4991, 61, 3311, + 365, 3749, 2426, 4689, 420, 1130, 2303, 3276, 2058, + 3417, 2635, 1997, 4469, 149, 3640, 2817, 310, 1358, + 4005, 314, 2266, 836, 2231, 2153, 4590, 1083, 2752, + 2577, 2539, 3832, 170, 4839, 1662, 908, 3409, 160, + 1208, 2792, 1394, 3839, 404, 2657, 1041, 2651, 3377, + 3822, 3581, 2353, 3591, 2000, 4401, 4545, 4324, 3328, + 3711, 2291, 2328, 732, 536, 660, 2140, 2401, 566, + 1414, 1235, 4049, 1072, 4129, 3797, 3825, 3260, 1333, + 2653, 3617, 58, 3265, 1036, 4854, 392, 4867, 4701, + 3576, 771, 2398, 4330, 1034, 4559, 2708, 409, 3139, + 2114, 3463, 923, 763, 2766, 4868, 1142, 1459, 3024, + 2321, 1511, 1594, 1553, 98, 954, 4757, 1367, 2284, + 321, 4282, 4827, 311, 3480, 705, 1128, 255, 1664, + 653, 1381, 1987, 2729, 634, 2582, 4911, 3144, 1242, + 3821, 2906, 2900, 547, 195, 264, 1462, 3048, 2738, + 753, 4689, 302, 1125, 2387, 532, 854, 131, 4228, + 2001, 3802, 1432, 364, 2122, 3, 492, 481, 3737, + 3945, 2016, 4040, 4587, 3047, 678, 2781, 1044, 3568, + 1574, 3813, 2876, 1656, 4200, 1707, 1113, 3551, 4496, + 1942, 1480, 4429, 3975, 2412, 3934, 2906, 952, 4773, + 1043, 3314, 572, 4511, 1843, 4636, 1964, 2523, 457, + 3459, 2009, 1681, 9, 2459, 3710, 4173, 1493, 3773, + 2982, 4418, 4646, 1091, 541, 4902, 4735, 4604, 3735, + 3670, 955, 687, 2373, 4360, 1850, 1893]), + values=tensor([0.2345, 0.0289, 0.8583, 0.9123, 0.0874, 0.7501, 0.2033, + 0.8326, 0.8469, 0.1882, 0.3285, 0.3183, 0.8931, 0.0457, + 0.8868, 0.7189, 0.4379, 0.1462, 0.4719, 0.1691, 0.1099, + 0.8022, 0.0756, 0.2871, 0.6213, 0.4582, 0.2170, 0.3357, + 0.7252, 0.0149, 0.2470, 0.4898, 0.0035, 0.1331, 0.4871, + 0.7295, 0.2640, 0.3186, 0.3619, 0.0774, 0.2757, 0.9917, + 0.3749, 0.2825, 0.4846, 0.8782, 0.2242, 0.0584, 0.4269, + 0.3007, 0.5193, 0.9227, 0.9773, 0.6304, 0.0725, 0.4260, + 0.4518, 0.5456, 0.3019, 0.2067, 0.3845, 0.8768, 0.2863, + 0.4471, 0.0208, 0.9135, 0.0548, 0.1836, 0.9804, 0.3038, + 0.5045, 0.8119, 0.2476, 0.4867, 0.9780, 0.3338, 0.2853, + 0.7670, 0.4677, 0.5075, 0.3848, 0.5236, 0.0031, 0.3726, + 0.6233, 0.1936, 0.1739, 0.4139, 0.1871, 0.5920, 0.8457, + 0.8536, 0.8234, 0.3531, 0.8514, 0.1766, 0.5797, 0.3086, + 0.0545, 0.2101, 0.0864, 0.3338, 0.2356, 0.3200, 0.7401, + 0.4108, 0.5013, 0.5320, 0.4414, 0.7825, 0.0249, 0.2494, + 0.0429, 0.7080, 0.9162, 0.6423, 0.2821, 0.2742, 0.5289, + 0.2928, 0.0848, 0.8315, 0.7088, 0.8269, 0.3671, 0.5127, + 0.2282, 0.7407, 0.1379, 0.8288, 0.2763, 0.1471, 0.0918, + 0.7196, 0.6693, 0.6326, 0.9413, 0.1511, 0.6888, 0.3336, + 0.2545, 0.9984, 0.8005, 0.8337, 0.2430, 0.7476, 0.3204, + 0.0554, 0.5080, 0.0854, 0.1850, 0.7747, 0.5775, 0.2057, + 0.7868, 0.8337, 0.6964, 0.9562, 0.1725, 0.3223, 0.4786, + 0.5641, 0.5075, 0.5871, 0.6849, 0.6564, 0.2437, 0.1937, + 0.6389, 0.0952, 0.9817, 0.1000, 0.7393, 0.9387, 0.8443, + 0.9838, 0.1009, 0.7329, 0.9758, 0.9984, 0.0689, 0.6045, + 0.3081, 0.8442, 0.7079, 0.3197, 0.6314, 0.2885, 0.9946, + 0.0894, 0.3380, 0.0723, 0.8864, 0.2114, 0.6387, 0.7774, + 0.5705, 0.9374, 0.3114, 0.6458, 0.5623, 0.1687, 0.3946, + 0.8120, 0.4227, 0.8777, 0.4345, 0.8346, 0.0514, 0.7320, + 0.0137, 0.2630, 0.1970, 0.0196, 0.2035, 0.6052, 0.7403, + 0.6899, 0.2449, 0.2769, 0.3900, 0.8664, 0.9461, 0.5286, + 0.0997, 0.7438, 0.0400, 0.7885, 0.5277, 0.1693, 0.7534, + 0.3649, 0.5259, 0.9420, 0.2968, 0.8974, 0.5468, 0.5308, + 0.9748, 0.7021, 0.7026, 0.1970, 0.7386, 0.9856, 0.8826, + 0.6766, 0.7905, 0.8999, 0.3805, 0.8437]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.6631, 0.6256, 0.9086, ..., 0.3830, 0.1647, 0.1472]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.146198511123657 seconds + +[39.27, 38.69, 38.63, 39.39, 38.61, 38.66, 38.59, 38.96, 38.73, 38.93] +[65.05] +12.874769687652588 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569391, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.146198511123657, 'TIME_S_1KI': 0.017819386873209546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 837.5037681818008, 'W': 65.05} +[39.27, 38.69, 38.63, 39.39, 38.61, 38.66, 38.59, 38.96, 38.73, 38.93, 39.57, 38.76, 39.0, 38.65, 39.12, 39.14, 39.15, 44.31, 38.75, 39.36] +705.7049999999999 +35.28525 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 569391, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.146198511123657, 'TIME_S_1KI': 0.017819386873209546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 837.5037681818008, 'W': 65.05, 'J_1KI': 1.4708763717406856, 'W_1KI': 0.11424486864035434, 'W_D': 29.76475, 'J_D': 383.2143010605574, 'W_D_1KI': 0.05227471105092985, 'J_D_1KI': 9.180810910416543e-05} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..1f26acc --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 1, "ITERATIONS": 520646, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.21049165725708, "TIME_S_1KI": 0.01961119773753583, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 853.8924551653862, "W": 65.17, "J_1KI": 1.6400634119255428, "W_1KI": 0.1251714216569416, "W_D": 29.995000000000005, "J_D": 393.0106520283223, "W_D_1KI": 0.05761112156820566, "J_D_1KI": 0.0001106531531370752} diff --git a/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..65d6ab4 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/epyc_7313p_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.012744903564453125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1249, 1250]), + col_indices=tensor([1621, 4974, 1997, ..., 3786, 4849, 461]), + values=tensor([0.2109, 0.3256, 0.0266, ..., 0.3581, 0.6264, 0.0778]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8357, 0.0383, 0.1188, ..., 0.4462, 0.9461, 0.1099]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.012744903564453125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '82385', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6614789962768555} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1213, 1571, 1960, ..., 2843, 4867, 4843]), + values=tensor([0.3029, 0.3061, 0.3000, ..., 0.6016, 0.9759, 0.4960]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.1072, 0.2899, 0.3055, ..., 0.7146, 0.5978, 0.9959]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 1.6614789962768555 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '520646', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.21049165725708} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([ 472, 3691, 4268, ..., 1601, 3041, 533]), + values=tensor([0.9317, 0.8516, 0.8376, ..., 0.6191, 0.8435, 0.3776]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.6520, 0.6755, 0.7512, ..., 0.2262, 0.3599, 0.0025]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.21049165725708 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([ 472, 3691, 4268, ..., 1601, 3041, 533]), + values=tensor([0.9317, 0.8516, 0.8376, ..., 0.6191, 0.8435, 0.3776]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.6520, 0.6755, 0.7512, ..., 0.2262, 0.3599, 0.0025]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.21049165725708 seconds + +[39.48, 39.4, 39.17, 39.09, 40.79, 38.97, 38.82, 38.72, 38.81, 38.71] +[65.17] +13.102538824081421 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 520646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.21049165725708, 'TIME_S_1KI': 0.01961119773753583, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.8924551653862, 'W': 65.17} +[39.48, 39.4, 39.17, 39.09, 40.79, 38.97, 38.82, 38.72, 38.81, 38.71, 39.49, 39.17, 38.98, 38.86, 39.13, 38.88, 38.85, 38.81, 38.77, 38.88] +703.5 +35.175 +{'CPU': 'Epyc 7313P', 'CORES': 1, 'ITERATIONS': 520646, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.21049165725708, 'TIME_S_1KI': 0.01961119773753583, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.8924551653862, 'W': 65.17, 'J_1KI': 1.6400634119255428, 'W_1KI': 0.1251714216569416, 'W_D': 29.995000000000005, 'J_D': 393.0106520283223, 'W_D_1KI': 0.05761112156820566, 'J_D_1KI': 0.0001106531531370752} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..3a94ff3 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 3659, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.513276815414429, "TIME_S_1KI": 2.8732650493070317, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 699.5738222312928, "W": 48.46, "J_1KI": 191.19262701046534, "W_1KI": 13.244055752937962, "W_D": 31.944000000000003, "J_D": 461.147052772522, "W_D_1KI": 8.73025416780541, "J_D_1KI": 2.3859672500151436} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..3030a8d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.3051443099975586} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 20, ..., 999978, + 999988, 1000000]), + col_indices=tensor([10874, 16180, 25759, ..., 85120, 90595, 97571]), + values=tensor([0.6980, 0.1450, 0.2222, ..., 0.0442, 0.2876, 0.2305]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9393, 0.9771, 0.4381, ..., 0.2097, 0.8256, 0.0395]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.3051443099975586 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3440', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.870911598205566} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 16, 29, ..., 999980, + 999991, 1000000]), + col_indices=tensor([ 5523, 13716, 16446, ..., 89337, 96388, 97674]), + values=tensor([0.9883, 0.0360, 0.0063, ..., 0.9506, 0.8956, 0.5971]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2735, 0.3671, 0.2374, ..., 0.9952, 0.6404, 0.3809]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 9.870911598205566 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '3659', '-ss', '100000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.513276815414429} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 15, 23, ..., 999980, + 999992, 1000000]), + col_indices=tensor([ 3420, 12508, 17596, ..., 74140, 75972, 84324]), + values=tensor([0.2457, 0.5369, 0.4041, ..., 0.2835, 0.7746, 0.0854]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1485, 0.5045, 0.7097, ..., 0.5250, 0.4505, 0.4467]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.513276815414429 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 15, 23, ..., 999980, + 999992, 1000000]), + col_indices=tensor([ 3420, 12508, 17596, ..., 74140, 75972, 84324]), + values=tensor([0.2457, 0.5369, 0.4041, ..., 0.2835, 0.7746, 0.0854]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.1485, 0.5045, 0.7097, ..., 0.5250, 0.4505, 0.4467]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.513276815414429 seconds + +[19.03, 17.89, 18.11, 18.19, 17.92, 19.96, 20.76, 18.13, 18.16, 18.25] +[48.46] +14.436108589172363 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3659, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.513276815414429, 'TIME_S_1KI': 2.8732650493070317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 699.5738222312928, 'W': 48.46} +[19.03, 17.89, 18.11, 18.19, 17.92, 19.96, 20.76, 18.13, 18.16, 18.25, 18.14, 18.17, 18.05, 17.87, 18.21, 18.32, 17.88, 17.89, 18.02, 18.16] +330.31999999999994 +16.516 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 3659, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.513276815414429, 'TIME_S_1KI': 2.8732650493070317, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 699.5738222312928, 'W': 48.46, 'J_1KI': 191.19262701046534, 'W_1KI': 13.244055752937962, 'W_D': 31.944000000000003, 'J_D': 461.147052772522, 'W_D_1KI': 8.73025416780541, 'J_D_1KI': 2.3859672500151436} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..253d722 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 376, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.408750772476196, "TIME_S_1KI": 27.68284779913882, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 864.391910111904, "W": 48.41, "J_1KI": 2298.914654552936, "W_1KI": 128.75, "W_D": 32.1215, "J_D": 573.5501908832788, "W_D_1KI": 85.42952127659574, "J_D_1KI": 227.20617360796737} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..8c93db0 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.7904272079467773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 83, 173, ..., 9999788, + 9999895, 10000000]), + col_indices=tensor([ 687, 1990, 2832, ..., 93491, 98909, 99713]), + values=tensor([0.2182, 0.4312, 0.1873, ..., 0.5994, 0.5663, 0.3895]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6218, 0.2935, 0.0099, ..., 0.4944, 0.2399, 0.4191]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 2.7904272079467773 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '376', '-ss', '100000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.408750772476196} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 101, 179, ..., 9999785, + 9999885, 10000000]), + col_indices=tensor([ 974, 1017, 1175, ..., 97865, 98322, 99037]), + values=tensor([0.8598, 0.2680, 0.8943, ..., 0.1763, 0.5676, 0.4916]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4049, 0.5573, 0.2557, ..., 0.0615, 0.7671, 0.4849]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.408750772476196 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 101, 179, ..., 9999785, + 9999885, 10000000]), + col_indices=tensor([ 974, 1017, 1175, ..., 97865, 98322, 99037]), + values=tensor([0.8598, 0.2680, 0.8943, ..., 0.1763, 0.5676, 0.4916]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4049, 0.5573, 0.2557, ..., 0.0615, 0.7671, 0.4849]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.408750772476196 seconds + +[18.19, 17.86, 18.15, 17.76, 18.57, 17.97, 17.88, 17.91, 18.5, 18.33] +[48.41] +17.855647802352905 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 376, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.408750772476196, 'TIME_S_1KI': 27.68284779913882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 864.391910111904, 'W': 48.41} +[18.19, 17.86, 18.15, 17.76, 18.57, 17.97, 17.88, 17.91, 18.5, 18.33, 18.68, 17.76, 17.89, 18.01, 18.9, 17.73, 18.37, 17.97, 18.0, 17.88] +325.77 +16.2885 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 376, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.408750772476196, 'TIME_S_1KI': 27.68284779913882, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 864.391910111904, 'W': 48.41, 'J_1KI': 2298.914654552936, 'W_1KI': 128.75, 'W_D': 32.1215, 'J_D': 573.5501908832788, 'W_D_1KI': 85.42952127659574, 'J_D_1KI': 227.20617360796737} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..9c4b03d --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 8087, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.519959688186646, "TIME_S_1KI": 1.300848236451916, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 670.7503997087479, "W": 46.91, "J_1KI": 82.94180780372794, "W_1KI": 5.800667738345492, "W_D": 30.164499999999997, "J_D": 431.3120961844921, "W_D_1KI": 3.729998763447508, "J_D_1KI": 0.46123392648046346} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..cc342ed --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14809489250183105} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 99993, 99994, + 100000]), + col_indices=tensor([11566, 2001, 14819, ..., 49184, 52555, 95716]), + values=tensor([0.6903, 0.1382, 0.4591, ..., 0.3067, 0.8088, 0.6364]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.9563, 0.6034, 0.0890, ..., 0.8548, 0.6115, 0.7911]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.14809489250183105 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7090', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.205029487609863} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 99999, 99999, + 100000]), + col_indices=tensor([ 7711, 16815, 22150, ..., 77554, 50594, 27282]), + values=tensor([0.3735, 0.5582, 0.2278, ..., 0.2317, 0.9623, 0.5188]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8847, 0.2063, 0.0570, ..., 0.4149, 0.1346, 0.4208]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 9.205029487609863 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8087', '-ss', '100000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.519959688186646} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, + 100000]), + col_indices=tensor([95443, 50058, 77222, ..., 43317, 3451, 10339]), + values=tensor([0.1078, 0.8522, 0.7935, ..., 0.8133, 0.3945, 0.6126]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8231, 0.3876, 0.1205, ..., 0.1479, 0.8608, 0.1605]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.519959688186646 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 100000, 100000, + 100000]), + col_indices=tensor([95443, 50058, 77222, ..., 43317, 3451, 10339]), + values=tensor([0.1078, 0.8522, 0.7935, ..., 0.8133, 0.3945, 0.6126]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8231, 0.3876, 0.1205, ..., 0.1479, 0.8608, 0.1605]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.519959688186646 seconds + +[18.45, 18.35, 18.06, 17.99, 17.93, 18.06, 18.08, 17.93, 22.44, 18.22] +[46.91] +14.298665523529053 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8087, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.519959688186646, 'TIME_S_1KI': 1.300848236451916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 670.7503997087479, 'W': 46.91} +[18.45, 18.35, 18.06, 17.99, 17.93, 18.06, 18.08, 17.93, 22.44, 18.22, 19.6, 17.81, 18.45, 17.9, 22.14, 18.02, 18.39, 17.96, 18.34, 17.85] +334.90999999999997 +16.7455 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 8087, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.519959688186646, 'TIME_S_1KI': 1.300848236451916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 670.7503997087479, 'W': 46.91, 'J_1KI': 82.94180780372794, 'W_1KI': 5.800667738345492, 'W_D': 30.164499999999997, 'J_D': 431.3120961844921, 'W_D_1KI': 3.729998763447508, 'J_D_1KI': 0.46123392648046346} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..0e52b1a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4760, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.509625434875488, "TIME_S_1KI": 2.2079045031251026, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 685.1951848602295, "W": 48.04, "J_1KI": 143.94856824794735, "W_1KI": 10.092436974789916, "W_D": 31.590249999999997, "J_D": 450.57217295026777, "W_D_1KI": 6.636607142857142, "J_D_1KI": 1.3942451980792314} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..e5c487f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.2381742000579834} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 499988, 499996, + 500000]), + col_indices=tensor([ 7829, 21471, 22951, ..., 29509, 41224, 66852]), + values=tensor([0.9739, 0.6225, 0.8607, ..., 0.0619, 0.3093, 0.0510]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6694, 0.1094, 0.7903, ..., 0.4860, 0.7386, 0.2172]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.2381742000579834 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4408', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.722268104553223} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 8, ..., 499991, 499997, + 500000]), + col_indices=tensor([ 8468, 11831, 46487, ..., 65418, 70471, 71020]), + values=tensor([0.5611, 0.2625, 0.0139, ..., 0.7643, 0.0263, 0.5630]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6021, 0.3259, 0.4454, ..., 0.1291, 0.3066, 0.3093]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 9.722268104553223 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4760', '-ss', '100000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.509625434875488} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 499986, 499991, + 500000]), + col_indices=tensor([37454, 51813, 86506, ..., 62954, 73906, 92773]), + values=tensor([0.8256, 0.7091, 0.8154, ..., 0.4160, 0.7952, 0.1689]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.1880, 0.2475, 0.0895, ..., 0.2917, 0.5906, 0.9519]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.509625434875488 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 7, ..., 499986, 499991, + 500000]), + col_indices=tensor([37454, 51813, 86506, ..., 62954, 73906, 92773]), + values=tensor([0.8256, 0.7091, 0.8154, ..., 0.4160, 0.7952, 0.1689]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.1880, 0.2475, 0.0895, ..., 0.2917, 0.5906, 0.9519]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.509625434875488 seconds + +[18.07, 18.03, 18.14, 17.98, 17.8, 18.31, 18.47, 18.23, 18.16, 17.92] +[48.04] +14.26301383972168 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.509625434875488, 'TIME_S_1KI': 2.2079045031251026, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.1951848602295, 'W': 48.04} +[18.07, 18.03, 18.14, 17.98, 17.8, 18.31, 18.47, 18.23, 18.16, 17.92, 18.33, 17.81, 18.24, 18.05, 18.09, 17.75, 17.78, 21.35, 18.67, 17.95] +328.995 +16.44975 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4760, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.509625434875488, 'TIME_S_1KI': 2.2079045031251026, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 685.1951848602295, 'W': 48.04, 'J_1KI': 143.94856824794735, 'W_1KI': 10.092436974789916, 'W_D': 31.590249999999997, 'J_D': 450.57217295026777, 'W_D_1KI': 6.636607142857142, 'J_D_1KI': 1.3942451980792314} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..20b7d93 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 84718, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.541555881500244, "TIME_S_1KI": 0.12443112303761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 661.3758532905579, "W": 46.44, "J_1KI": 7.80679257407585, "W_1KI": 0.5481715810099388, "W_D": 30.123999999999995, "J_D": 429.01133084678645, "W_D_1KI": 0.3555796879057579, "J_D_1KI": 0.004197215325028422} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..79fd885 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.028100967407226562} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 10000, 10000]), + col_indices=tensor([1696, 2591, 3015, ..., 1730, 8585, 3790]), + values=tensor([0.6837, 0.7697, 0.7550, ..., 0.1323, 0.4514, 0.4553]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1557, 0.9230, 0.6401, ..., 0.3725, 0.8926, 0.6402]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.028100967407226562 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '37365', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.631014823913574} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 9998, 10000]), + col_indices=tensor([2386, 8388, 9261, ..., 1344, 2569, 4425]), + values=tensor([0.6000, 0.2415, 0.7139, ..., 0.1197, 0.4001, 0.0791]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.9194, 0.6903, 0.0708, ..., 0.1917, 0.6424, 0.6800]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 4.631014823913574 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '84718', '-ss', '10000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.541555881500244} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), + col_indices=tensor([6630, 3375, 966, ..., 5451, 76, 624]), + values=tensor([0.0314, 0.6841, 0.2123, ..., 0.3011, 0.8872, 0.9156]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4846, 0.1841, 0.4323, ..., 0.0718, 0.1957, 0.5902]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.541555881500244 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), + col_indices=tensor([6630, 3375, 966, ..., 5451, 76, 624]), + values=tensor([0.0314, 0.6841, 0.2123, ..., 0.3011, 0.8872, 0.9156]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.4846, 0.1841, 0.4323, ..., 0.0718, 0.1957, 0.5902]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.541555881500244 seconds + +[18.42, 18.02, 18.15, 18.67, 18.24, 18.06, 18.05, 17.99, 18.11, 18.07] +[46.44] +14.241512775421143 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84718, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.541555881500244, 'TIME_S_1KI': 0.12443112303761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.3758532905579, 'W': 46.44} +[18.42, 18.02, 18.15, 18.67, 18.24, 18.06, 18.05, 17.99, 18.11, 18.07, 18.36, 18.1, 17.99, 18.16, 18.02, 18.13, 18.12, 18.03, 18.02, 18.07] +326.32000000000005 +16.316000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 84718, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.541555881500244, 'TIME_S_1KI': 0.12443112303761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.3758532905579, 'W': 46.44, 'J_1KI': 7.80679257407585, 'W_1KI': 0.5481715810099388, 'W_D': 30.123999999999995, 'J_D': 429.01133084678645, 'W_D_1KI': 0.3555796879057579, 'J_D_1KI': 0.004197215325028422} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..e07fffc --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 34651, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.471668004989624, "TIME_S_1KI": 0.30220391922281103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 677.0323536157608, "W": 47.45, "J_1KI": 19.53860937969354, "W_1KI": 1.369368849383856, "W_D": 30.828750000000007, "J_D": 439.8748402851821, "W_D_1KI": 0.8896929381547433, "J_D_1KI": 0.025675822866720825} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..ff06711 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.04611039161682129} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 22, ..., 99983, 99992, + 100000]), + col_indices=tensor([ 845, 1153, 1508, ..., 8313, 9367, 9854]), + values=tensor([0.8746, 0.4039, 0.9243, ..., 0.5657, 0.2713, 0.6449]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.3540, 0.2628, 0.4314, ..., 0.0912, 0.3507, 0.8651]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.04611039161682129 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22771', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.900022268295288} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 19, ..., 99978, 99991, + 100000]), + col_indices=tensor([ 917, 1959, 2965, ..., 8075, 8263, 9058]), + values=tensor([0.2271, 0.8712, 0.9636, ..., 0.2167, 0.1262, 0.7253]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8029, 0.1325, 0.2655, ..., 0.5832, 0.4718, 0.3144]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 6.900022268295288 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '34651', '-ss', '10000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.471668004989624} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 99994, 99997, + 100000]), + col_indices=tensor([ 24, 1396, 2236, ..., 5590, 6310, 9874]), + values=tensor([0.4982, 0.6812, 0.1465, ..., 0.3747, 0.0311, 0.9162]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7533, 0.0703, 0.6276, ..., 0.6008, 0.2603, 0.3256]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.471668004989624 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 99994, 99997, + 100000]), + col_indices=tensor([ 24, 1396, 2236, ..., 5590, 6310, 9874]), + values=tensor([0.4982, 0.6812, 0.1465, ..., 0.3747, 0.0311, 0.9162]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7533, 0.0703, 0.6276, ..., 0.6008, 0.2603, 0.3256]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.471668004989624 seconds + +[18.34, 18.19, 18.22, 18.02, 22.16, 18.64, 18.12, 17.87, 17.95, 18.81] +[47.45] +14.26833200454712 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.471668004989624, 'TIME_S_1KI': 0.30220391922281103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.0323536157608, 'W': 47.45} +[18.34, 18.19, 18.22, 18.02, 22.16, 18.64, 18.12, 17.87, 17.95, 18.81, 22.38, 17.93, 18.41, 17.77, 18.07, 17.88, 18.52, 17.88, 18.1, 17.86] +332.42499999999995 +16.621249999999996 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 34651, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.471668004989624, 'TIME_S_1KI': 0.30220391922281103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 677.0323536157608, 'W': 47.45, 'J_1KI': 19.53860937969354, 'W_1KI': 1.369368849383856, 'W_D': 30.828750000000007, 'J_D': 439.8748402851821, 'W_D_1KI': 0.8896929381547433, 'J_D_1KI': 0.025675822866720825} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..89c422b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 5583, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.513006925582886, "TIME_S_1KI": 1.8830390337780558, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 692.6418607521057, "W": 47.919999999999995, "J_1KI": 124.06266536845884, "W_1KI": 8.58319899695504, "W_D": 31.175249999999995, "J_D": 450.611084503591, "W_D_1KI": 5.583960236432024, "J_D_1KI": 1.0001719929127753} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..d152642 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.2040116786956787} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 106, 193, ..., 999811, + 999905, 1000000]), + col_indices=tensor([ 107, 139, 344, ..., 9485, 9560, 9767]), + values=tensor([0.2657, 0.7219, 0.2773, ..., 0.6022, 0.5377, 0.2291]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8604, 0.1859, 0.3719, ..., 0.6286, 0.9460, 0.3185]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.2040116786956787 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5146', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.677676439285278} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 90, 186, ..., 999811, + 999907, 1000000]), + col_indices=tensor([ 74, 208, 311, ..., 9654, 9863, 9976]), + values=tensor([0.0395, 0.4059, 0.0831, ..., 0.6188, 0.9591, 0.8953]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6360, 0.9265, 0.4313, ..., 0.6926, 0.7242, 0.0651]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 9.677676439285278 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5583', '-ss', '10000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.513006925582886} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 104, 207, ..., 999792, + 999898, 1000000]), + col_indices=tensor([ 168, 206, 240, ..., 9827, 9842, 9996]), + values=tensor([0.8276, 0.5768, 0.6424, ..., 0.0752, 0.7475, 0.3129]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.7571, 0.4178, 0.1860, ..., 0.0563, 0.6255, 0.7203]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.513006925582886 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 104, 207, ..., 999792, + 999898, 1000000]), + col_indices=tensor([ 168, 206, 240, ..., 9827, 9842, 9996]), + values=tensor([0.8276, 0.5768, 0.6424, ..., 0.0752, 0.7475, 0.3129]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.7571, 0.4178, 0.1860, ..., 0.0563, 0.6255, 0.7203]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.513006925582886 seconds + +[18.38, 17.86, 18.24, 17.87, 19.06, 17.88, 18.29, 21.74, 18.54, 18.08] +[47.92] +14.454128980636597 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5583, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.513006925582886, 'TIME_S_1KI': 1.8830390337780558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6418607521057, 'W': 47.919999999999995} +[18.38, 17.86, 18.24, 17.87, 19.06, 17.88, 18.29, 21.74, 18.54, 18.08, 18.58, 17.99, 17.96, 22.09, 18.22, 17.97, 17.93, 18.49, 18.13, 18.23] +334.895 +16.74475 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 5583, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.513006925582886, 'TIME_S_1KI': 1.8830390337780558, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 692.6418607521057, 'W': 47.919999999999995, 'J_1KI': 124.06266536845884, 'W_1KI': 8.58319899695504, 'W_D': 31.175249999999995, 'J_D': 450.611084503591, 'W_D_1KI': 5.583960236432024, 'J_D_1KI': 1.0001719929127753} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..20cc2bc --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 959, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.294347047805786, "TIME_S_1KI": 10.73445990386422, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 792.9360737204552, "W": 48.35, "J_1KI": 826.8363646720074, "W_1KI": 50.417101147028156, "W_D": 31.9585, "J_D": 524.1168047982454, "W_D_1KI": 33.324817518248175, "J_D_1KI": 34.74954902841311} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..643494f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.0941581726074219} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 1009, ..., 4998988, + 4999478, 5000000]), + col_indices=tensor([ 8, 23, 83, ..., 9969, 9982, 9990]), + values=tensor([0.1393, 0.4453, 0.1108, ..., 0.3215, 0.7885, 0.8444]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.3317, 0.0622, 0.5595, ..., 0.2290, 0.2268, 0.9236]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 1.0941581726074219 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '959', '-ss', '10000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.294347047805786} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 994, ..., 4999000, + 4999500, 5000000]), + col_indices=tensor([ 13, 16, 23, ..., 9955, 9988, 9993]), + values=tensor([0.2414, 0.9977, 0.9772, ..., 0.9200, 0.6029, 0.8714]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0530, 0.1017, 0.7510, ..., 0.2543, 0.0728, 0.9686]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.294347047805786 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 514, 994, ..., 4999000, + 4999500, 5000000]), + col_indices=tensor([ 13, 16, 23, ..., 9955, 9988, 9993]), + values=tensor([0.2414, 0.9977, 0.9772, ..., 0.9200, 0.6029, 0.8714]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0530, 0.1017, 0.7510, ..., 0.2543, 0.0728, 0.9686]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.294347047805786 seconds + +[18.47, 18.05, 17.89, 17.81, 18.54, 18.75, 17.99, 17.87, 18.49, 18.81] +[48.35] +16.399918794631958 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.294347047805786, 'TIME_S_1KI': 10.73445990386422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.9360737204552, 'W': 48.35} +[18.47, 18.05, 17.89, 17.81, 18.54, 18.75, 17.99, 17.87, 18.49, 18.81, 18.47, 17.8, 17.9, 18.12, 18.16, 18.55, 18.0, 18.63, 18.45, 17.91] +327.83 +16.3915 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.294347047805786, 'TIME_S_1KI': 10.73445990386422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.9360737204552, 'W': 48.35, 'J_1KI': 826.8363646720074, 'W_1KI': 50.417101147028156, 'W_D': 31.9585, 'J_D': 524.1168047982454, 'W_D_1KI': 33.324817518248175, 'J_D_1KI': 34.74954902841311} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..a3bba53 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 389, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.411779165267944, "TIME_S_1KI": 26.765499139506282, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 876.1174001097679, "W": 47.85, "J_1KI": 2252.2298203335936, "W_1KI": 123.00771208226222, "W_D": 31.457250000000002, "J_D": 575.971663210094, "W_D_1KI": 80.86696658097686, "J_D_1KI": 207.88423285598165} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..086f941 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.693603992462158} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1004, 2049, ..., 9997955, + 9998949, 10000000]), + col_indices=tensor([ 1, 3, 35, ..., 9984, 9987, 9993]), + values=tensor([0.3631, 0.8073, 0.7190, ..., 0.1286, 0.7057, 0.1104]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7885, 0.1169, 0.1101, ..., 0.4416, 0.5822, 0.3212]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.693603992462158 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '389', '-ss', '10000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.411779165267944} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 966, 1941, ..., 9997989, + 9998973, 10000000]), + col_indices=tensor([ 2, 11, 26, ..., 9956, 9965, 9978]), + values=tensor([0.0342, 0.9218, 0.6993, ..., 0.4506, 0.1146, 0.2093]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6037, 0.3505, 0.1319, ..., 0.1315, 0.2126, 0.8791]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.411779165267944 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 966, 1941, ..., 9997989, + 9998973, 10000000]), + col_indices=tensor([ 2, 11, 26, ..., 9956, 9965, 9978]), + values=tensor([0.0342, 0.9218, 0.6993, ..., 0.4506, 0.1146, 0.2093]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6037, 0.3505, 0.1319, ..., 0.1315, 0.2126, 0.8791]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.411779165267944 seconds + +[18.87, 18.17, 18.37, 17.93, 17.97, 18.29, 18.55, 17.92, 18.13, 17.84] +[47.85] +18.30966353416443 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.411779165267944, 'TIME_S_1KI': 26.765499139506282, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 876.1174001097679, 'W': 47.85} +[18.87, 18.17, 18.37, 17.93, 17.97, 18.29, 18.55, 17.92, 18.13, 17.84, 19.36, 18.07, 17.99, 17.91, 18.82, 18.49, 18.1, 18.04, 18.09, 17.96] +327.855 +16.39275 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 389, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.411779165267944, 'TIME_S_1KI': 26.765499139506282, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 876.1174001097679, 'W': 47.85, 'J_1KI': 2252.2298203335936, 'W_1KI': 123.00771208226222, 'W_D': 31.457250000000002, 'J_D': 575.971663210094, 'W_D_1KI': 80.86696658097686, 'J_D_1KI': 207.88423285598165} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..9e5a000 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 193, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.419133424758911, "TIME_S_1KI": 53.98514727854358, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1480.8600453448294, "W": 45.41, "J_1KI": 7672.849975879945, "W_1KI": 235.28497409326422, "W_D": 29.142749999999996, "J_D": 950.3707132013438, "W_D_1KI": 150.9987046632124, "J_D_1KI": 782.3767080995461} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..f051b42 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.42013144493103} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2043, 4027, ..., 19995921, + 19998006, 20000000]), + col_indices=tensor([ 9, 15, 16, ..., 9989, 9991, 9993]), + values=tensor([0.8685, 0.2737, 0.0800, ..., 0.3440, 0.3550, 0.7008]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.2841, 0.6439, 0.8852, ..., 0.0124, 0.9656, 0.3759]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 5.42013144493103 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '193', '-ss', '10000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.419133424758911} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1925, 3946, ..., 19996050, + 19998038, 20000000]), + col_indices=tensor([ 4, 9, 22, ..., 9994, 9995, 9997]), + values=tensor([0.4143, 0.0158, 0.3991, ..., 0.4975, 0.2189, 0.5132]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.4618, 0.1550, 0.6479, ..., 0.8342, 0.8619, 0.7737]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.419133424758911 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1925, 3946, ..., 19996050, + 19998038, 20000000]), + col_indices=tensor([ 4, 9, 22, ..., 9994, 9995, 9997]), + values=tensor([0.4143, 0.0158, 0.3991, ..., 0.4975, 0.2189, 0.5132]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.4618, 0.1550, 0.6479, ..., 0.8342, 0.8619, 0.7737]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.419133424758911 seconds + +[18.6, 18.13, 18.25, 17.88, 17.94, 17.94, 18.23, 18.23, 18.37, 17.79] +[45.41] +32.61087965965271 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.419133424758911, 'TIME_S_1KI': 53.98514727854358, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1480.8600453448294, 'W': 45.41} +[18.6, 18.13, 18.25, 17.88, 17.94, 17.94, 18.23, 18.23, 18.37, 17.79, 18.29, 17.94, 17.98, 17.82, 18.04, 18.23, 17.96, 17.98, 18.11, 17.95] +325.345 +16.26725 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 193, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.419133424758911, 'TIME_S_1KI': 53.98514727854358, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1480.8600453448294, 'W': 45.41, 'J_1KI': 7672.849975879945, 'W_1KI': 235.28497409326422, 'W_D': 29.142749999999996, 'J_D': 950.3707132013438, 'W_D_1KI': 150.9987046632124, 'J_D_1KI': 782.3767080995461} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..28c2d57 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.80802845954895, "TIME_S_1KI": 108.0802845954895, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3840.7128683352466, "W": 37.19, "J_1KI": 38407.128683352465, "W_1KI": 371.9, "W_D": 20.811749999999996, "J_D": 2149.286260757625, "W_D_1KI": 208.11749999999995, "J_D_1KI": 2081.1749999999997} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..5b24363 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.80802845954895} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2964, 5988, ..., 29994017, + 29996992, 30000000]), + col_indices=tensor([ 4, 9, 14, ..., 9990, 9993, 9995]), + values=tensor([0.5510, 0.0884, 0.7125, ..., 0.7844, 0.3492, 0.1801]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7648, 0.2589, 0.8570, ..., 0.0438, 0.7014, 0.5513]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.80802845954895 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2964, 5988, ..., 29994017, + 29996992, 30000000]), + col_indices=tensor([ 4, 9, 14, ..., 9990, 9993, 9995]), + values=tensor([0.5510, 0.0884, 0.7125, ..., 0.7844, 0.3492, 0.1801]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7648, 0.2589, 0.8570, ..., 0.0438, 0.7014, 0.5513]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.80802845954895 seconds + +[18.36, 18.76, 18.08, 17.8, 18.22, 18.13, 18.92, 17.85, 18.59, 18.05] +[37.19] +103.27273106575012 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.80802845954895, 'TIME_S_1KI': 108.0802845954895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3840.7128683352466, 'W': 37.19} +[18.36, 18.76, 18.08, 17.8, 18.22, 18.13, 18.92, 17.85, 18.59, 18.05, 18.39, 18.01, 18.68, 17.8, 18.23, 17.94, 17.9, 17.92, 18.33, 18.01] +327.56500000000005 +16.37825 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.80802845954895, 'TIME_S_1KI': 108.0802845954895, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3840.7128683352466, 'W': 37.19, 'J_1KI': 38407.128683352465, 'W_1KI': 371.9, 'W_D': 20.811749999999996, 'J_D': 2149.286260757625, 'W_D_1KI': 208.11749999999995, 'J_D_1KI': 2081.1749999999997} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..d3d85cb --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 223318, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.272708654403687, "TIME_S_1KI": 0.046000361163917314, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 661.7633163213729, "W": 46.54, "J_1KI": 2.963322778823798, "W_1KI": 0.2084023679237679, "W_D": 29.777749999999997, "J_D": 423.41690143078563, "W_D_1KI": 0.13334236380408207, "J_D_1KI": 0.0005970963549919042} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..404ad45 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1307 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02031111717224121} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([4137, 7377, 7043, 6108, 9323, 897, 9654, 227, 2578, + 4698, 9915, 4015, 9949, 2349, 8008, 1966, 4875, 4040, + 6979, 2450, 7633, 6619, 9795, 2725, 5746, 5264, 4390, + 2139, 8315, 3676, 8258, 2550, 7703, 6861, 8246, 1325, + 6548, 4010, 3106, 8161, 3532, 2108, 7993, 386, 7921, + 2704, 9957, 7073, 3331, 9744, 4980, 6346, 1498, 1404, + 8824, 1275, 9681, 2862, 4082, 1465, 5274, 7840, 5924, + 9483, 98, 2679, 7843, 1423, 2659, 4200, 7882, 4396, + 8968, 5460, 7766, 9614, 1590, 8686, 7557, 1225, 5913, + 5155, 9823, 7515, 4242, 147, 5927, 2787, 5542, 7655, + 5944, 2866, 5695, 9408, 3998, 7348, 7456, 5035, 7898, + 6129, 2807, 3798, 4059, 6217, 4055, 6985, 8048, 2902, + 2337, 461, 3316, 7537, 42, 5645, 9082, 1247, 3998, + 9775, 8363, 2735, 93, 476, 9970, 9170, 4926, 3860, + 2000, 8258, 8990, 1304, 1955, 2898, 4931, 9660, 749, + 7970, 1430, 136, 1728, 3493, 5173, 838, 9924, 9475, + 9208, 3918, 2507, 3992, 2000, 7754, 5632, 87, 7861, + 1028, 9056, 3445, 6453, 2164, 8030, 8837, 3400, 1991, + 5976, 7138, 4278, 6917, 8431, 3928, 8200, 9576, 544, + 3683, 3997, 1043, 2590, 5349, 7018, 58, 9350, 6692, + 1721, 3408, 5604, 655, 7107, 7982, 4650, 1645, 8377, + 5270, 5374, 9226, 6822, 6438, 8163, 2878, 5855, 3643, + 4380, 8884, 7438, 20, 8087, 7305, 2720, 1905, 9868, + 6635, 715, 5790, 888, 2709, 5700, 3461, 8946, 8488, + 9562, 8957, 6895, 1530, 8225, 2482, 9848, 6363, 60, + 9048, 4834, 584, 7029, 2291, 6622, 5454, 1482, 6564, + 7253, 4134, 4215, 7307, 255, 2256, 1308, 8832, 24, + 420, 4060, 3545, 3071, 2732, 6945, 8909, 1551, 7964, + 4210, 6238, 8532, 2620, 6983, 8405, 3644, 369, 8495, + 2851, 3884, 8614, 2495, 5435, 1497, 6514, 5937, 4865, + 7874, 7518, 9410, 726, 6738, 5623, 3151, 6922, 7926, + 2790, 1189, 762, 7808, 8573, 5170, 8000, 1707, 6332, + 7054, 4243, 6417, 3948, 1132, 1131, 6589, 5174, 60, + 223, 7683, 4637, 4586, 8258, 6780, 8920, 2046, 1612, + 5178, 8919, 7992, 5147, 6177, 3273, 9105, 8454, 3669, + 6494, 4848, 1427, 8432, 5755, 6577, 3626, 331, 3914, + 3636, 8285, 5491, 2909, 3115, 3372, 4727, 2186, 9534, + 5635, 7951, 3172, 9667, 3745, 5270, 428, 6615, 7258, + 1963, 3714, 6510, 2116, 2898, 5537, 481, 5467, 1168, + 6070, 9569, 7666, 9538, 6782, 3677, 4998, 780, 3405, + 8522, 5807, 9775, 4531, 4145, 9593, 484, 6152, 5639, + 6746, 3067, 8640, 5539, 6804, 5763, 6878, 8527, 9567, + 2461, 88, 3812, 7527, 1732, 2995, 1302, 5999, 4401, + 1636, 9368, 9766, 8129, 7690, 5557, 6058, 8415, 2316, + 3836, 7654, 4129, 8130, 662, 8048, 4078, 5310, 3394, + 697, 8499, 7954, 2695, 9511, 7314, 7366, 7641, 6823, + 6140, 7617, 8756, 7529, 926, 8480, 7592, 9330, 5786, + 5489, 124, 1928, 9569, 8032, 4423, 7047, 765, 9603, + 6041, 2668, 1487, 2665, 9551, 1633, 6181, 84, 7547, + 8082, 7482, 3259, 9213, 5803, 3882, 5394, 1979, 2400, + 6749, 5983, 1058, 6137, 7246, 773, 5714, 4565, 7822, + 6878, 4417, 3086, 1569, 1719, 5174, 5169, 6031, 5251, + 9640, 5409, 14, 2545, 7747, 5114, 5492, 9898, 1409, + 3523, 4063, 6729, 80, 9337, 6181, 8917, 261, 8381, + 6760, 9320, 5295, 3220, 4010, 4744, 8609, 1010, 5305, + 9260, 3490, 8283, 6887, 1747, 9772, 4685, 6659, 290, + 7030, 1170, 5710, 5634, 940, 2186, 3989, 889, 5333, + 6260, 8885, 2061, 5442, 4830, 8951, 3636, 8790, 578, + 5354, 9275, 1915, 5033, 4205, 7287, 2495, 1602, 3870, + 5486, 8600, 8275, 4198, 1084, 888, 495, 9803, 1292, + 8077, 5480, 1678, 5774, 3124, 3887, 332, 2678, 6175, + 409, 8310, 3436, 8252, 4352, 7590, 1442, 1255, 228, + 1152, 5560, 3824, 6610, 6637, 709, 2283, 8512, 6994, + 8985, 8735, 2350, 5740, 1930, 8458, 1361, 5384, 1444, + 7861, 6037, 7951, 3248, 8488, 1791, 1333, 177, 1355, + 6950, 3609, 6501, 8409, 1484, 7136, 5644, 3183, 9492, + 6917, 4022, 2331, 6796, 6370, 8934, 8277, 944, 5653, + 3069, 1456, 5189, 4084, 6941, 6557, 5802, 9989, 2527, + 6957, 9803, 14, 7896, 3797, 8217, 7379, 6116, 3815, + 9642, 9413, 2717, 6241, 1490, 1253, 3463, 2175, 3779, + 4863, 9031, 9205, 1690, 6995, 924, 43, 1394, 9054, + 1581, 3438, 2069, 4045, 5727, 2738, 4333, 5292, 7000, + 5376, 3015, 7111, 9030, 4819, 3485, 7384, 3097, 9362, + 8900, 2958, 8391, 6412, 1332, 5703, 9571, 6546, 2733, + 1890, 2159, 872, 1550, 305, 8578, 3297, 2050, 7548, + 3782, 1940, 640, 686, 8521, 6710, 9047, 3672, 8803, + 8238, 8882, 3472, 9261, 8047, 8763, 3448, 6714, 2416, + 9982, 2400, 3564, 3852, 8717, 2790, 8247, 2277, 887, + 5222, 7805, 9772, 5998, 1226, 9045, 8024, 5, 1965, + 946, 1956, 9244, 9275, 4790, 1278, 551, 2586, 35, + 6129, 7167, 9018, 9329, 5117, 4282, 2002, 7861, 3835, + 5057, 4040, 5353, 9894, 1447, 6457, 4072, 9973, 7775, + 2060, 8382, 3220, 1353, 1529, 3443, 132, 4532, 2405, + 9547, 5941, 254, 7773, 7543, 3043, 7390, 5242, 1337, + 8815, 2460, 9378, 1204, 7745, 4525, 8861, 8564, 8327, + 688, 8999, 6101, 1066, 660, 3165, 3845, 8997, 7684, + 9043, 8631, 6390, 6701, 9914, 2834, 7659, 8364, 6501, + 353, 1688, 3567, 3604, 5656, 5920, 1486, 9285, 8314, + 230, 4186, 6805, 6534, 5779, 6201, 1337, 619, 1368, + 4028, 9239, 5361, 9670, 3165, 8764, 8451, 1749, 5354, + 4757, 9433, 8330, 4044, 8195, 6400, 7150, 969, 9788, + 7121, 3276, 6802, 4876, 1470, 6334, 7325, 2295, 3504, + 4757, 93, 7166, 9475, 6294, 7298, 7786, 8426, 8073, + 3016, 7988, 8621, 7220, 3188, 7756, 7413, 4578, 7672, + 2046, 3507, 519, 6069, 2781, 1914, 7615, 4950, 7118, + 7131, 3707, 5379, 1378, 4426, 7265, 6448, 6606, 9476, + 5509, 2898, 947, 2096, 8223, 9521, 3781, 8358, 8383, + 2204, 5656, 5777, 3613, 5194, 5301, 7557, 8773, 1256, + 6364, 6538, 4220, 9100, 2360, 3056, 9805, 5261, 8211, + 2932, 9519, 1171, 205, 9123, 4449, 7847, 546, 6585, + 7811, 3911, 2070, 1743, 7688, 4267, 306, 7417, 1851, + 1865, 9232, 5014, 5024, 5064, 6511, 2993, 9070, 2950, + 557, 3789, 8751, 5349, 7134, 7831, 8803, 1422, 8903, + 7709, 2862, 91, 1213, 8347, 7430, 5358, 7802, 8452, + 8492, 6556, 5314, 3562, 3630, 6367, 543, 1027, 4551, + 8, 5834, 388, 6891, 1469, 6519, 5988, 3495, 5974, + 8821, 728, 3788, 2391, 8571, 3972, 9896, 1211, 4134, + 5009, 417, 3154, 4927, 8513, 4238, 3565, 1465, 8832, + 3710, 8052, 4595, 350, 943, 174, 7051, 2274, 4617, + 7712, 6029, 6405, 9644, 16, 4952, 1792, 7406, 3238, + 9821, 8222, 3824, 1039, 5851, 9963, 5374, 3591, 6018, + 1911]), + values=tensor([5.8156e-01, 2.8002e-01, 5.9905e-01, 3.3753e-01, + 2.5754e-01, 1.6141e-01, 7.2303e-01, 4.8284e-01, + 3.7468e-02, 3.4012e-01, 4.4953e-01, 4.5698e-01, + 2.9457e-01, 5.1227e-01, 5.2810e-01, 7.8482e-01, + 3.4909e-01, 8.6832e-01, 2.8544e-01, 4.3171e-01, + 4.6706e-01, 6.8312e-01, 4.0593e-01, 5.5164e-01, + 4.3321e-01, 5.3334e-01, 4.0228e-01, 6.6474e-01, + 5.9923e-02, 4.9814e-01, 3.1823e-01, 2.7113e-01, + 9.4443e-01, 6.0224e-01, 7.4800e-01, 8.6611e-01, + 9.1513e-01, 3.6846e-01, 2.0848e-01, 6.1374e-01, + 9.2960e-01, 6.5074e-01, 9.9559e-01, 2.8175e-01, + 4.7907e-01, 2.3395e-01, 4.2341e-01, 3.1732e-01, + 7.4312e-02, 7.5262e-01, 3.5443e-01, 1.0318e-02, + 2.2228e-01, 8.9822e-01, 4.1342e-01, 5.8355e-01, + 1.7611e-01, 1.6604e-02, 4.8518e-01, 8.7768e-01, + 7.1511e-01, 3.0722e-02, 4.5888e-01, 5.7548e-01, + 9.0778e-01, 4.3445e-02, 3.5396e-01, 4.8016e-01, + 4.4230e-02, 9.9890e-01, 3.9145e-01, 2.9800e-01, + 6.1930e-01, 3.9548e-01, 3.4855e-01, 8.7740e-01, + 3.8543e-01, 6.6978e-01, 9.4785e-01, 4.1594e-01, + 8.4757e-01, 8.1133e-01, 5.6867e-02, 5.5226e-01, + 5.8939e-02, 5.7339e-01, 1.1663e-01, 6.6088e-02, + 1.8693e-01, 9.0155e-01, 5.9654e-01, 3.1837e-01, + 5.0504e-01, 6.4980e-01, 9.5165e-01, 7.1873e-01, + 4.5242e-01, 8.6374e-01, 4.6175e-01, 2.2611e-01, + 1.8382e-01, 3.6584e-01, 6.7886e-02, 7.5200e-02, + 8.3070e-01, 3.0320e-01, 9.8572e-01, 9.5629e-01, + 8.1226e-01, 9.5778e-01, 1.0918e-01, 4.0952e-01, + 7.0520e-01, 8.3725e-02, 7.8669e-01, 6.6612e-01, + 4.0295e-01, 4.8172e-01, 6.7617e-01, 8.8344e-01, + 9.7812e-01, 4.7845e-01, 1.0589e-01, 9.0709e-01, + 9.5599e-01, 6.2792e-01, 8.6374e-01, 5.8811e-01, + 6.0978e-01, 5.2608e-01, 9.0717e-01, 8.6568e-01, + 7.4107e-01, 1.1724e-01, 5.3682e-01, 2.8650e-01, + 8.3774e-01, 2.3658e-01, 7.3055e-01, 4.1747e-01, + 3.8409e-01, 8.0411e-01, 7.2906e-01, 8.5363e-01, + 2.0820e-01, 8.0698e-01, 5.4203e-01, 4.8744e-01, + 5.9718e-01, 9.5159e-01, 5.9867e-01, 6.4359e-01, + 2.5597e-01, 3.0317e-01, 6.6206e-01, 5.1466e-01, + 9.4856e-01, 2.4915e-01, 7.6198e-01, 1.5861e-02, + 2.1708e-01, 1.1645e-02, 7.9476e-01, 9.2575e-01, + 6.7664e-01, 4.2133e-01, 3.3009e-01, 7.6266e-01, + 1.6946e-01, 1.6240e-01, 9.3099e-01, 7.5739e-01, + 4.9141e-01, 6.4062e-02, 3.2170e-02, 3.2530e-01, + 1.2979e-01, 2.9704e-01, 3.2789e-01, 2.5535e-01, + 9.4932e-01, 5.1161e-01, 2.7674e-01, 5.8229e-01, + 6.3609e-01, 9.9252e-01, 9.0421e-02, 2.4892e-01, + 5.9265e-01, 9.3719e-01, 3.9770e-01, 3.8402e-01, + 1.3360e-01, 8.5620e-01, 3.1692e-01, 5.1660e-01, + 9.5131e-01, 3.8815e-01, 7.3377e-01, 2.1604e-01, + 8.0984e-01, 5.4329e-01, 8.9375e-01, 9.7503e-01, + 7.6707e-01, 3.0482e-01, 1.3642e-01, 1.5375e-01, + 1.1664e-01, 6.0092e-01, 3.7722e-01, 6.7558e-01, + 9.1306e-01, 4.7166e-01, 4.9053e-01, 4.7589e-01, + 7.3640e-01, 1.8433e-01, 8.1768e-01, 7.1966e-01, + 4.5083e-01, 3.3424e-02, 1.6627e-01, 2.2704e-02, + 3.6462e-01, 8.1434e-01, 3.7133e-01, 8.8699e-01, + 2.4828e-01, 6.5800e-01, 8.5523e-01, 3.6767e-01, + 1.2518e-01, 5.6608e-03, 9.2298e-01, 5.8367e-01, + 2.1481e-01, 5.0742e-01, 1.9039e-01, 2.3495e-01, + 5.4404e-01, 3.6909e-01, 4.9871e-01, 9.8443e-01, + 8.3432e-01, 3.3951e-01, 9.1294e-01, 6.4420e-01, + 3.3950e-01, 5.7430e-01, 9.6796e-01, 8.2992e-01, + 3.6682e-01, 5.1895e-01, 4.5942e-01, 1.0992e-01, + 1.6148e-01, 7.7366e-01, 1.0734e-01, 1.5092e-01, + 5.9359e-01, 5.9202e-01, 6.6171e-01, 9.7101e-01, + 2.3146e-01, 6.7431e-01, 2.2316e-01, 7.9158e-01, + 6.0714e-01, 8.9981e-01, 5.6367e-01, 5.8087e-02, + 3.3026e-01, 3.8922e-01, 4.8605e-01, 4.9868e-01, + 2.3291e-01, 7.2357e-01, 9.8898e-01, 6.9764e-01, + 7.7170e-01, 3.4277e-02, 2.2283e-01, 9.1702e-01, + 6.5624e-02, 6.0656e-01, 3.2293e-01, 3.1288e-01, + 8.9296e-01, 8.0019e-01, 1.5753e-01, 1.6827e-01, + 9.2928e-04, 7.8682e-01, 7.2607e-01, 1.3459e-01, + 1.7782e-01, 4.9229e-01, 7.3527e-01, 6.0846e-01, + 5.9970e-01, 5.9203e-01, 5.0594e-02, 8.4256e-01, + 2.0525e-01, 2.4114e-01, 6.6307e-01, 5.7161e-01, + 3.0589e-01, 6.9218e-01, 2.2955e-01, 5.3125e-01, + 5.4542e-01, 7.7805e-01, 9.8328e-01, 1.5098e-01, + 8.6051e-01, 3.0543e-01, 8.4604e-01, 2.0176e-01, + 4.4216e-01, 6.7534e-01, 4.6552e-01, 1.4717e-01, + 1.3506e-01, 5.8764e-01, 9.3422e-01, 6.4074e-02, + 3.3773e-01, 5.7824e-01, 7.9107e-01, 1.3758e-01, + 4.4704e-02, 5.2611e-01, 3.7652e-01, 5.3796e-01, + 9.8105e-01, 3.4618e-01, 7.2700e-01, 9.4524e-01, + 7.1978e-02, 1.4573e-01, 5.6411e-01, 7.1441e-01, + 1.9767e-01, 1.6079e-01, 2.3638e-01, 3.6422e-01, + 3.9279e-01, 9.1192e-01, 8.2524e-01, 4.6723e-01, + 9.0043e-01, 1.3223e-03, 4.2797e-01, 1.8204e-01, + 3.7267e-01, 6.6259e-01, 7.9631e-01, 3.0493e-01, + 5.0885e-01, 8.5562e-01, 1.1331e-01, 9.8719e-01, + 1.7280e-01, 8.7981e-01, 5.5505e-01, 7.7485e-01, + 4.0930e-01, 7.3886e-01, 5.6965e-02, 4.9206e-01, + 2.7155e-01, 1.5581e-01, 2.3386e-01, 4.3266e-01, + 7.7529e-01, 7.5067e-02, 2.5397e-01, 9.4814e-01, + 9.6142e-01, 8.3097e-01, 5.0218e-01, 9.1265e-01, + 2.8372e-01, 3.9980e-01, 5.4897e-01, 2.1594e-01, + 9.3294e-01, 5.4688e-01, 8.6351e-01, 5.3989e-01, + 8.0554e-01, 9.0137e-01, 9.3937e-01, 7.9889e-01, + 5.0169e-01, 9.8315e-01, 4.6786e-01, 8.1309e-01, + 5.9596e-01, 4.7187e-01, 6.3017e-01, 9.6252e-01, + 8.2351e-01, 1.7638e-01, 3.9039e-02, 5.8827e-01, + 4.1792e-01, 7.3715e-01, 4.2459e-01, 1.6410e-01, + 7.9534e-01, 8.3015e-01, 4.4691e-01, 8.4333e-02, + 1.7140e-01, 9.2049e-01, 8.4713e-01, 9.8495e-01, + 8.7326e-01, 8.9186e-01, 8.9461e-01, 7.6165e-01, + 5.1863e-01, 9.0112e-01, 6.7544e-01, 2.1724e-01, + 4.6683e-01, 2.3202e-01, 9.8523e-01, 8.8973e-02, + 6.0117e-01, 4.6704e-01, 9.9637e-01, 1.6636e-01, + 3.5168e-01, 7.7266e-01, 9.0598e-01, 9.0368e-01, + 4.4043e-01, 4.7120e-01, 7.6647e-01, 1.1084e-01, + 2.3819e-01, 9.6001e-01, 9.8689e-01, 2.3260e-01, + 5.2772e-01, 2.2360e-01, 9.2191e-01, 8.0228e-01, + 8.4431e-01, 5.3234e-01, 2.9951e-02, 6.1181e-02, + 3.6272e-01, 9.8281e-01, 9.9138e-01, 5.9192e-01, + 9.2206e-01, 3.7819e-01, 3.1175e-01, 8.8393e-01, + 7.9849e-01, 5.1811e-01, 2.9878e-01, 7.0659e-03, + 5.3577e-01, 5.7861e-01, 9.5654e-01, 8.4606e-01, + 7.8324e-01, 6.0075e-01, 8.1177e-01, 8.4371e-01, + 3.4033e-01, 9.2437e-01, 4.5828e-01, 5.0331e-01, + 4.4795e-01, 7.0522e-01, 6.9947e-01, 2.2760e-01, + 6.6935e-01, 1.7435e-01, 4.4039e-01, 1.6653e-01, + 4.7153e-02, 9.0149e-02, 7.3259e-01, 6.9735e-01, + 7.5150e-01, 8.6004e-01, 6.1248e-01, 8.5985e-01, + 8.8200e-01, 4.8501e-01, 7.3720e-01, 8.0633e-01, + 4.8185e-01, 6.4752e-01, 5.6419e-01, 3.9386e-01, + 8.0603e-01, 5.0349e-01, 1.2778e-01, 2.0548e-01, + 1.5821e-01, 1.6556e-01, 7.1631e-01, 4.0624e-01, + 1.3606e-01, 6.1183e-01, 3.9555e-01, 1.0561e-01, + 2.8675e-01, 5.2567e-02, 7.5976e-01, 7.9129e-02, + 7.3730e-01, 8.3241e-01, 7.1875e-01, 1.1856e-01, + 8.6258e-02, 5.6471e-01, 2.5785e-02, 8.1635e-01, + 6.3949e-02, 1.8129e-01, 6.7175e-01, 3.8091e-02, + 2.2187e-01, 7.4485e-01, 7.9801e-01, 5.3716e-01, + 4.1435e-02, 9.8612e-01, 5.7251e-01, 7.8581e-01, + 2.8949e-01, 7.0273e-01, 8.5312e-01, 6.0211e-01, + 4.8109e-01, 7.5446e-01, 1.5074e-01, 8.6119e-01, + 6.0038e-01, 9.8757e-01, 4.5262e-03, 4.2661e-01, + 4.7783e-02, 4.0508e-01, 3.0956e-01, 9.5069e-01, + 5.6196e-01, 7.6732e-01, 9.4765e-01, 2.0531e-01, + 6.1458e-01, 5.0798e-02, 5.9745e-01, 5.1827e-01, + 8.9350e-01, 6.1221e-01, 8.1354e-01, 6.6283e-01, + 2.1505e-01, 8.9438e-01, 9.4276e-01, 7.0349e-01, + 3.8506e-01, 4.0799e-01, 3.0782e-02, 5.2799e-01, + 5.3057e-01, 2.7077e-02, 7.8361e-01, 7.3240e-01, + 3.5031e-01, 6.4726e-01, 2.2415e-01, 6.0831e-01, + 2.4833e-02, 3.5753e-01, 6.0782e-01, 5.4172e-02, + 4.9722e-01, 5.4081e-01, 9.9252e-01, 1.6883e-01, + 7.4341e-01, 5.9899e-01, 5.2520e-01, 6.9027e-01, + 3.9066e-01, 2.0380e-01, 6.7595e-01, 1.5434e-01, + 5.9520e-01, 1.2884e-01, 6.0210e-01, 3.2402e-01, + 8.5293e-01, 7.4237e-01, 1.0054e-02, 2.4596e-01, + 7.4130e-01, 5.2280e-01, 8.0440e-01, 6.1296e-01, + 2.0068e-01, 2.5059e-01, 9.8719e-01, 9.6879e-02, + 7.7269e-01, 3.4500e-01, 6.1866e-02, 4.6024e-01, + 2.0564e-01, 4.1027e-01, 6.0301e-01, 4.6657e-01, + 6.1033e-01, 4.8262e-01, 8.8721e-01, 5.7207e-01, + 3.8954e-01, 9.6602e-01, 9.8296e-01, 3.0700e-01, + 4.7009e-01, 6.1393e-01, 3.6798e-01, 4.6415e-01, + 5.5804e-01, 7.3157e-01, 3.9789e-02, 5.7521e-01, + 2.3897e-01, 2.7612e-01, 3.6370e-01, 2.3572e-01, + 7.5048e-01, 7.4924e-01, 5.2436e-01, 4.1265e-01, + 1.0055e-01, 6.8976e-01, 9.0139e-01, 5.4326e-01, + 5.6119e-01, 8.8100e-01, 4.3479e-01, 1.6981e-01, + 5.4168e-01, 4.0741e-02, 3.7419e-01, 6.0348e-01, + 4.7511e-02, 9.1892e-02, 2.9757e-01, 7.9735e-01, + 4.0023e-02, 8.0662e-01, 3.5945e-01, 4.4613e-01, + 4.0985e-01, 6.4909e-01, 8.2913e-01, 5.6092e-01, + 6.1720e-01, 2.2445e-01, 9.2177e-01, 5.4056e-01, + 9.9170e-02, 4.0349e-01, 1.0757e-01, 4.6385e-01, + 9.1243e-01, 9.6524e-01, 1.5630e-02, 1.2372e-02, + 6.8277e-01, 6.6230e-01, 4.2663e-01, 1.1331e-01, + 1.1843e-01, 8.7492e-01, 3.2309e-01, 1.8141e-01, + 4.6109e-01, 9.2364e-01, 3.4677e-01, 9.5054e-01, + 9.0661e-01, 2.2803e-01, 2.8323e-01, 1.1518e-01, + 3.3279e-01, 7.6139e-01, 3.8271e-01, 2.4706e-02, + 3.9825e-01, 9.4575e-02, 1.2221e-01, 3.4147e-01, + 8.3339e-01, 5.0504e-02, 2.8865e-01, 2.4651e-01, + 3.6862e-01, 2.4803e-01, 2.1618e-01, 6.2499e-01, + 3.4456e-01, 6.0018e-02, 2.1179e-02, 1.1625e-01, + 4.2536e-01, 6.7639e-01, 3.6221e-01, 1.9357e-01, + 6.0271e-01, 7.0261e-01, 1.1726e-01, 6.0181e-01, + 8.9599e-01, 8.1322e-01, 3.7877e-01, 4.3523e-01, + 4.3914e-01, 8.8124e-01, 3.0515e-01, 6.7038e-01, + 6.0127e-01, 7.6653e-01, 2.9249e-02, 2.2096e-01, + 9.4668e-01, 8.5878e-01, 4.5109e-01, 4.7936e-01, + 6.5717e-01, 6.8862e-01, 7.6992e-01, 9.3225e-01, + 8.5394e-03, 4.3708e-01, 7.2233e-01, 1.6987e-01, + 5.4738e-02, 2.4823e-02, 6.7652e-01, 1.7166e-01, + 1.1735e-01, 2.5575e-01, 7.2176e-01, 3.0234e-01, + 7.1567e-01, 4.3413e-01, 8.1045e-01, 4.8844e-01, + 1.4022e-01, 7.3062e-01, 4.2223e-01, 4.7326e-01, + 1.9276e-02, 4.8944e-01, 4.0502e-01, 8.2884e-01, + 9.5804e-01, 1.5109e-01, 6.8776e-01, 3.4887e-02, + 6.7033e-01, 7.8144e-01, 1.8732e-01, 3.9434e-01, + 7.8051e-01, 5.3829e-01, 4.1271e-01, 2.6616e-01, + 1.6403e-01, 5.4211e-01, 4.1375e-01, 3.8938e-01, + 3.9980e-01, 8.9495e-01, 9.2956e-01, 5.7320e-01, + 4.4129e-01, 7.3360e-01, 4.8990e-01, 7.0508e-01, + 9.3550e-01, 2.6797e-01, 1.4940e-01, 2.8127e-01, + 5.0946e-01, 8.7169e-01, 5.9653e-01, 5.9401e-01, + 2.2895e-01, 7.8885e-01, 9.8315e-01, 8.8748e-01, + 7.5977e-01, 6.6367e-01, 2.3954e-01, 3.7040e-01, + 1.1940e-01, 4.6258e-01, 9.2222e-01, 4.4173e-01, + 7.7064e-01, 2.1576e-01, 2.7241e-01, 1.4843e-01, + 6.5290e-01, 3.1076e-01, 4.9139e-01, 2.5075e-02, + 9.6030e-01, 9.1738e-01, 6.8115e-01, 8.9410e-01, + 9.2336e-01, 3.2098e-01, 9.9084e-01, 2.9840e-01, + 9.3566e-01, 8.7476e-01, 7.3520e-01, 7.4897e-01, + 5.0484e-01, 2.8760e-01, 5.5015e-01, 3.5047e-01, + 9.0398e-01, 4.8261e-01, 4.4223e-02, 2.8056e-01, + 2.3169e-01, 3.8516e-01, 5.1483e-01, 3.1543e-01, + 6.3024e-01, 9.4733e-02, 4.7810e-01, 5.5539e-01, + 5.7289e-02, 3.2199e-01, 5.0710e-01, 7.9558e-01, + 1.6270e-01, 7.9383e-01, 1.4537e-02, 8.9211e-01, + 3.7452e-01, 2.0368e-01, 5.9692e-01, 2.5907e-01, + 9.5016e-01, 6.1173e-01, 5.1952e-01, 5.1345e-01, + 8.7632e-01, 1.7394e-01, 2.4675e-01, 7.4916e-01, + 9.8520e-02, 4.5166e-01, 9.0804e-01, 8.2228e-01, + 1.9968e-01, 8.0784e-01, 9.4766e-01, 7.3145e-01, + 4.4231e-01, 5.8073e-01, 9.6792e-01, 1.5536e-01, + 1.6897e-01, 4.6301e-02, 6.9308e-01, 6.5970e-01, + 7.7933e-01, 7.7180e-01, 7.6291e-01, 6.9430e-01, + 2.8253e-01, 9.3597e-01, 5.5534e-01, 7.0535e-01, + 5.7773e-01, 1.7953e-01, 8.2295e-02, 6.2018e-01, + 5.0550e-01, 7.1159e-01, 1.0988e-01, 4.4791e-01, + 9.1384e-01, 9.0103e-01, 3.6527e-01, 6.0135e-02, + 2.6150e-02, 4.6299e-01, 2.4526e-01, 3.1743e-01, + 4.2068e-01, 3.1271e-01, 7.5573e-01, 2.9495e-01, + 8.0565e-01, 2.3590e-01, 9.1131e-01, 2.8576e-01, + 2.0058e-01, 5.4518e-01, 6.9041e-01, 5.0889e-01, + 7.8903e-01, 6.8462e-01, 1.1413e-01, 2.1967e-01, + 1.0436e-01, 5.2052e-03, 5.2858e-01, 7.2315e-01, + 5.8394e-01, 7.4019e-01, 1.9528e-01, 9.0704e-01, + 5.3717e-01, 5.1037e-01, 9.7610e-01, 6.4359e-01, + 6.7128e-01, 4.9653e-01, 8.6929e-01, 7.7971e-01, + 8.0348e-01, 9.0766e-01, 7.7183e-01, 3.4928e-01, + 3.9630e-01, 3.8015e-01, 3.9147e-01, 8.9063e-01, + 7.3734e-01, 6.4280e-01, 2.0891e-01, 7.2230e-01, + 6.9356e-01, 2.6885e-01, 7.5346e-01, 1.8015e-02, + 9.7342e-01, 7.5843e-01, 3.9930e-02, 5.3224e-01, + 2.4238e-01, 7.0185e-01, 4.2115e-01, 7.0334e-01, + 9.0349e-02, 6.6656e-01, 3.1757e-01, 6.7208e-01, + 5.6411e-01, 4.9627e-01, 5.3507e-01, 9.0409e-01, + 6.4014e-01, 8.4199e-01, 5.6660e-01, 1.3654e-01, + 7.8412e-01, 4.1112e-01, 2.8374e-01, 6.5040e-01, + 9.6597e-01, 1.1932e-01, 1.4265e-01, 5.1651e-01, + 6.2252e-01, 5.9412e-01, 5.8400e-01, 4.7015e-01, + 8.7865e-01, 2.1175e-01, 4.5409e-01, 9.5729e-01, + 6.2691e-01, 4.0234e-01, 8.7349e-01, 9.0944e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.3846, 0.0429, 0.8390, ..., 0.4444, 0.4671, 0.5693]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.02031111717224121 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51695', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.430596351623535} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([2944, 4631, 7838, 64, 7093, 5206, 1028, 310, 9606, + 1018, 6902, 7057, 1823, 4013, 4610, 9989, 4269, 4529, + 6204, 2190, 9040, 4125, 4186, 8485, 9100, 1208, 6942, + 5613, 5410, 982, 7663, 9703, 3365, 643, 8709, 5149, + 1560, 3811, 7285, 2169, 4837, 435, 8158, 77, 5709, + 4392, 8477, 214, 5447, 1230, 2352, 7618, 2372, 1855, + 9698, 8138, 9945, 2370, 5687, 685, 4433, 106, 4886, + 6383, 6506, 8296, 2570, 6015, 7735, 8565, 6870, 6825, + 6162, 5406, 357, 4114, 3650, 9189, 4601, 615, 2109, + 9546, 583, 5240, 2983, 5584, 3664, 6211, 9643, 427, + 3253, 1697, 3784, 7720, 8331, 144, 5167, 9371, 3050, + 8120, 311, 7247, 526, 2016, 559, 5116, 960, 9939, + 4437, 7192, 3190, 5204, 1362, 3918, 6955, 3497, 7938, + 3584, 768, 9858, 357, 7248, 4890, 3019, 5465, 2250, + 6450, 6315, 1323, 7946, 4804, 9385, 2390, 5192, 6218, + 6486, 9725, 8061, 6921, 2589, 581, 1321, 8748, 5258, + 8932, 3600, 1815, 3765, 4806, 1864, 8606, 4321, 8281, + 1210, 8564, 1395, 944, 9269, 4578, 851, 2516, 3733, + 4122, 5377, 8566, 3856, 7526, 2900, 7004, 3137, 4286, + 5951, 6434, 855, 2440, 3924, 4064, 8268, 6266, 7614, + 8695, 6065, 890, 1123, 3892, 8571, 852, 7199, 2977, + 6191, 6117, 9295, 9139, 1914, 8396, 8082, 1888, 5920, + 3434, 2684, 5852, 8569, 776, 8843, 6510, 1633, 8834, + 8526, 1580, 8173, 282, 6642, 7312, 3586, 1518, 1005, + 4706, 4163, 5932, 3672, 2153, 7059, 4002, 6969, 6411, + 6065, 7536, 1968, 2781, 6687, 7194, 6920, 2851, 1789, + 9756, 3005, 9630, 99, 2131, 1722, 5637, 9061, 5906, + 8732, 6783, 9448, 5100, 3577, 9652, 6175, 6234, 6704, + 7274, 4940, 771, 1465, 3148, 2594, 9088, 4863, 4452, + 4262, 3763, 6315, 2297, 9967, 167, 5404, 7440, 1234, + 4491, 9830, 2721, 8188, 3813, 5248, 4965, 9264, 2428, + 8783, 9884, 7556, 5232, 1866, 4881, 4240, 444, 3067, + 767, 9531, 8720, 9797, 8425, 5431, 8011, 9312, 4880, + 8821, 3698, 2971, 5, 3673, 267, 2436, 5203, 2055, + 6531, 9865, 8191, 8816, 5113, 1324, 2406, 853, 5434, + 2987, 1271, 1838, 833, 6506, 1146, 2917, 8166, 7909, + 3648, 1782, 1802, 2838, 4143, 631, 2015, 75, 9460, + 3849, 2112, 456, 3786, 8968, 9520, 6835, 7144, 7621, + 2315, 8703, 1731, 2031, 7103, 9099, 6237, 2016, 8915, + 5251, 258, 3215, 1913, 1648, 6070, 6349, 537, 3231, + 9264, 7547, 5721, 2677, 3735, 2594, 2432, 4830, 5593, + 5054, 5952, 3001, 8697, 8593, 1261, 9838, 9624, 9968, + 7941, 1741, 9156, 5234, 2249, 5886, 5882, 6287, 151, + 4253, 9286, 5777, 3328, 1300, 7552, 1150, 698, 5598, + 6779, 6880, 7828, 8409, 1847, 1177, 9423, 5339, 8040, + 2933, 226, 8668, 9532, 829, 7683, 1021, 5184, 1989, + 6222, 4187, 646, 8577, 348, 4530, 3057, 139, 8413, + 9470, 3528, 8527, 8781, 9727, 2925, 7182, 2238, 8597, + 1505, 4920, 6681, 6866, 4885, 4952, 1302, 8353, 6525, + 4239, 278, 7723, 9708, 2951, 4329, 9499, 847, 1705, + 7179, 4713, 4933, 6431, 7397, 1171, 1797, 9375, 2124, + 9648, 2158, 151, 8529, 5910, 7872, 8196, 6347, 4852, + 6362, 8322, 3066, 4255, 1267, 8971, 9505, 2218, 8058, + 7670, 2180, 5402, 5997, 6533, 932, 8160, 4883, 4362, + 4475, 5441, 4918, 5223, 9438, 3376, 2793, 964, 8748, + 1984, 1432, 1708, 8439, 2947, 4968, 6368, 6713, 6795, + 3148, 9913, 217, 5345, 6001, 8332, 1552, 1199, 7206, + 1308, 7825, 5277, 8289, 85, 2333, 5765, 226, 8927, + 8002, 7029, 1600, 6557, 5595, 3577, 8420, 953, 281, + 5437, 1674, 7978, 3961, 278, 6180, 7438, 6902, 7475, + 9302, 154, 2101, 2559, 9637, 8412, 7907, 1203, 2401, + 9205, 9268, 671, 2239, 1644, 2845, 8071, 5971, 7992, + 5268, 5658, 3402, 9075, 117, 9708, 7839, 6237, 1366, + 4248, 5405, 3450, 2823, 2678, 8128, 3077, 7890, 5525, + 352, 7953, 8994, 2792, 9843, 6836, 2444, 23, 8606, + 5482, 538, 7248, 2671, 2636, 4875, 6392, 1634, 9039, + 206, 8557, 4262, 6345, 2787, 5186, 9690, 3313, 7196, + 8187, 4688, 3636, 5699, 5851, 2918, 4441, 2561, 4610, + 2129, 7033, 6, 654, 3222, 6073, 7625, 8473, 6191, + 2880, 70, 1031, 6689, 6971, 2004, 7000, 9676, 3644, + 2140, 5806, 8059, 6633, 4320, 1663, 4526, 3203, 4616, + 869, 2766, 1163, 1210, 2331, 4282, 6403, 6555, 6365, + 7000, 9544, 3550, 4307, 1368, 6969, 8833, 3383, 9978, + 1612, 6654, 7073, 7110, 9566, 3445, 7597, 360, 3236, + 7275, 5924, 226, 8020, 320, 4244, 2760, 6608, 2555, + 7919, 984, 1478, 3569, 1644, 9945, 9097, 9180, 2886, + 7713, 3094, 1356, 6989, 8789, 3839, 5615, 1182, 7409, + 5000, 430, 5040, 7926, 3097, 6697, 4245, 2151, 950, + 4330, 6071, 639, 3674, 5561, 6529, 7043, 7697, 1166, + 9926, 5506, 4062, 141, 6776, 1000, 5889, 1811, 3805, + 4499, 742, 9756, 6422, 3115, 1421, 2148, 4577, 849, + 2703, 9046, 5675, 6060, 9740, 1043, 1133, 2568, 6264, + 8365, 2819, 7233, 5911, 5860, 7890, 540, 224, 40, + 3677, 6635, 2186, 2545, 1144, 5999, 7948, 1275, 7035, + 6767, 440, 2108, 7801, 2969, 8350, 7659, 9002, 317, + 3973, 8437, 443, 2678, 8935, 6175, 9836, 8183, 4682, + 9089, 9110, 5267, 8976, 8306, 1349, 7138, 7607, 1143, + 8943, 8080, 3638, 5861, 5984, 1101, 7747, 9364, 6794, + 9386, 9795, 7275, 4821, 451, 5323, 9773, 4378, 5864, + 5634, 5713, 173, 9097, 1287, 4073, 5068, 8982, 1727, + 8774, 1428, 3104, 3715, 575, 7243, 3651, 6707, 5075, + 8914, 3157, 1446, 1023, 3445, 7439, 3411, 6822, 8426, + 4690, 8269, 6866, 4650, 7633, 3301, 1775, 9688, 3595, + 2053, 2622, 3250, 7912, 6773, 6316, 177, 2930, 6156, + 1293, 577, 1247, 6081, 5439, 1662, 4881, 8932, 4199, + 138, 9186, 2308, 2400, 770, 4059, 6081, 3117, 8087, + 5005, 7215, 4647, 6804, 6284, 198, 3580, 7361, 1611, + 7627, 5464, 1293, 4984, 2254, 7286, 5884, 9703, 5740, + 9960, 7501, 6278, 972, 9723, 1341, 1549, 9994, 8760, + 3100, 1070, 3050, 126, 7718, 9489, 9768, 6233, 8322, + 198, 1114, 1745, 6052, 5501, 54, 1988, 844, 5891, + 7468, 5693, 6195, 1125, 8757, 6530, 960, 1088, 5817, + 2609, 2322, 5466, 178, 2328, 4211, 8288, 7077, 2606, + 6127, 1116, 3683, 2034, 9765, 3690, 5390, 5302, 9888, + 373, 5882, 1973, 5371, 3576, 9266, 7825, 1187, 1677, + 5997, 3264, 2770, 7555, 2180, 1025, 5640, 6520, 351, + 7466, 7331, 398, 4140, 6895, 6471, 5153, 2278, 5771, + 9295, 4997, 1365, 9802, 7556, 5796, 7043, 4565, 8844, + 8073, 4187, 3352, 7057, 3992, 5284, 8234, 8721, 6018, + 7438, 1774, 1947, 6669, 1070, 6008, 9524, 9578, 5651, + 9208, 8515, 3069, 4213, 1533, 3772, 203, 9477, 7854, + 4029]), + values=tensor([2.3738e-01, 2.6402e-02, 5.3203e-01, 6.2427e-01, + 6.7786e-01, 6.5310e-01, 3.6557e-01, 9.7123e-01, + 8.9972e-01, 9.5830e-01, 3.4232e-01, 1.0001e-01, + 8.4545e-01, 1.7529e-01, 5.2093e-01, 2.8574e-01, + 7.7667e-01, 4.8853e-01, 8.8174e-02, 3.8671e-01, + 6.4054e-01, 1.4478e-01, 1.4161e-01, 1.3033e-01, + 5.8032e-01, 3.2700e-01, 5.7588e-01, 6.4111e-01, + 9.3574e-01, 9.8711e-01, 2.9326e-01, 5.1547e-01, + 5.5550e-01, 9.9624e-01, 6.9869e-01, 8.7238e-01, + 1.4140e-01, 7.8895e-01, 8.0717e-01, 8.4288e-01, + 7.5389e-01, 6.4603e-01, 4.7889e-01, 2.0452e-01, + 6.3552e-01, 7.5543e-01, 4.2326e-01, 1.2044e-01, + 6.4432e-01, 4.9972e-02, 1.5813e-01, 5.4805e-01, + 7.7746e-01, 9.6780e-01, 8.1895e-01, 7.0802e-01, + 2.0264e-01, 4.3775e-01, 8.5595e-01, 2.7169e-01, + 4.0346e-01, 4.0533e-01, 1.4484e-01, 3.1479e-01, + 2.3174e-01, 2.5230e-01, 5.5513e-01, 3.5842e-01, + 8.2852e-01, 8.2638e-01, 9.5400e-01, 1.2552e-01, + 9.9323e-01, 1.6458e-01, 3.0800e-01, 4.9640e-01, + 8.8731e-01, 9.2485e-02, 3.0189e-02, 7.0178e-01, + 3.9376e-01, 6.4217e-01, 3.1370e-01, 5.0653e-01, + 4.3611e-01, 5.4295e-01, 4.0541e-01, 6.4571e-01, + 2.9565e-01, 2.7763e-01, 8.2797e-01, 7.0112e-01, + 1.8712e-01, 6.7060e-01, 2.6354e-01, 5.6298e-01, + 2.5183e-01, 9.8374e-01, 1.1535e-01, 5.6613e-01, + 6.5427e-01, 5.3770e-01, 7.1824e-01, 2.6174e-01, + 7.6309e-01, 7.0956e-01, 3.4060e-01, 8.8854e-01, + 6.3975e-01, 1.3395e-01, 1.6898e-01, 3.7819e-01, + 8.5999e-01, 8.8412e-01, 7.3320e-01, 7.1138e-01, + 2.4100e-01, 9.2249e-01, 8.4111e-01, 2.0516e-02, + 3.2336e-01, 3.0900e-01, 4.7411e-01, 4.1779e-01, + 2.4793e-01, 4.4443e-01, 7.6111e-01, 2.5346e-01, + 9.3721e-01, 9.5365e-01, 1.6967e-01, 9.2786e-01, + 2.4943e-01, 1.3252e-01, 6.9868e-01, 7.1942e-01, + 6.5647e-01, 4.3431e-01, 9.3856e-01, 5.0608e-01, + 3.8007e-01, 6.6370e-01, 3.5720e-01, 8.2733e-01, + 9.5105e-01, 5.6232e-01, 1.4629e-02, 4.8497e-01, + 1.7643e-01, 6.8013e-01, 8.6450e-01, 6.5699e-01, + 7.1222e-02, 2.5273e-01, 4.9082e-01, 3.7325e-01, + 3.1209e-03, 2.1298e-02, 4.6607e-01, 3.9429e-01, + 3.7773e-02, 4.3848e-02, 4.0839e-01, 3.0784e-01, + 2.5508e-01, 3.4852e-01, 7.1756e-01, 7.2894e-01, + 3.6554e-01, 9.2825e-01, 3.6757e-01, 5.1756e-01, + 2.1632e-01, 3.3479e-01, 9.3681e-01, 4.2513e-01, + 9.5757e-01, 9.7212e-01, 3.7930e-01, 7.9385e-01, + 7.5330e-01, 4.3880e-01, 3.0507e-02, 8.3967e-01, + 2.0001e-01, 2.5357e-01, 2.5983e-01, 4.3218e-01, + 4.0356e-02, 9.8588e-01, 1.8766e-01, 9.5399e-02, + 1.9176e-01, 4.3947e-01, 1.3454e-01, 5.5070e-01, + 2.9283e-01, 7.3724e-01, 7.5999e-01, 6.5391e-01, + 3.3144e-01, 2.0963e-01, 7.8419e-02, 3.2595e-01, + 4.8398e-01, 7.0621e-01, 9.4064e-01, 5.0536e-01, + 8.0281e-01, 2.0884e-01, 1.8452e-01, 4.2045e-01, + 7.5716e-01, 4.8000e-01, 1.3992e-01, 9.2003e-01, + 2.9919e-01, 9.8505e-02, 3.8274e-01, 4.3847e-01, + 6.4077e-01, 8.0841e-01, 3.7080e-01, 1.8314e-01, + 5.0345e-01, 6.5621e-01, 9.9426e-01, 2.6686e-01, + 5.2414e-03, 1.2980e-01, 6.4376e-01, 7.5228e-02, + 8.7038e-01, 6.2767e-01, 8.2595e-02, 7.1176e-01, + 2.3989e-01, 3.3127e-01, 2.3020e-02, 9.7489e-01, + 8.6149e-01, 1.2530e-01, 4.6233e-01, 2.9601e-01, + 5.5479e-01, 8.5166e-01, 4.3046e-01, 4.1313e-01, + 2.6780e-01, 5.4984e-01, 4.1855e-01, 4.9210e-01, + 6.2682e-01, 7.4517e-01, 6.6285e-01, 9.3085e-01, + 5.3350e-01, 2.8974e-01, 4.4394e-02, 2.8930e-01, + 7.8976e-01, 4.3678e-01, 7.6157e-01, 3.7944e-01, + 3.3591e-01, 1.2626e-01, 7.2858e-01, 9.3352e-02, + 6.7802e-01, 8.5292e-01, 7.5938e-01, 1.8850e-01, + 9.9956e-01, 5.6141e-01, 7.0078e-01, 9.1202e-01, + 8.7269e-01, 4.9572e-01, 9.6328e-01, 5.5369e-01, + 4.1234e-01, 1.2004e-01, 2.3986e-01, 9.6465e-01, + 4.2990e-01, 1.6000e-01, 4.0186e-01, 2.1829e-01, + 9.7558e-01, 2.7825e-01, 2.9610e-01, 9.7411e-01, + 8.6444e-01, 5.5775e-01, 7.9402e-02, 5.2234e-01, + 7.9853e-01, 1.9558e-01, 9.4287e-01, 1.0306e-01, + 2.8658e-01, 4.0821e-01, 7.9293e-01, 5.1165e-01, + 8.5048e-01, 3.7074e-01, 8.3318e-01, 5.7708e-01, + 5.9855e-01, 1.9828e-01, 3.4368e-01, 4.7604e-01, + 9.6257e-01, 7.8500e-01, 1.6163e-02, 1.5176e-01, + 3.1112e-01, 3.7056e-02, 7.2264e-01, 3.0160e-02, + 4.1949e-01, 9.7850e-01, 1.0112e-01, 8.4497e-01, + 7.3483e-01, 6.4117e-01, 5.4885e-01, 4.8799e-01, + 2.4303e-01, 3.3166e-02, 8.0967e-02, 5.5332e-01, + 9.0394e-01, 4.8740e-01, 5.7113e-01, 1.8688e-01, + 2.5461e-01, 8.0556e-01, 4.2261e-01, 8.4350e-01, + 8.9809e-01, 6.0672e-01, 5.2776e-01, 2.0777e-02, + 2.9352e-01, 3.8160e-01, 5.0311e-01, 7.4170e-01, + 4.7393e-01, 5.2493e-01, 1.4382e-01, 8.3941e-01, + 1.4398e-01, 7.4231e-01, 6.4705e-01, 3.0748e-01, + 9.8149e-01, 8.3684e-01, 4.1167e-01, 9.1083e-01, + 6.8831e-01, 4.9856e-01, 8.1105e-01, 8.6579e-01, + 6.1778e-01, 3.3383e-01, 5.5148e-01, 6.1391e-01, + 4.8355e-01, 5.2801e-01, 3.1579e-01, 4.0657e-01, + 8.3102e-01, 8.8214e-01, 6.2846e-01, 7.7912e-02, + 7.1251e-01, 1.5134e-01, 3.7764e-01, 9.3226e-01, + 9.0073e-01, 4.0255e-01, 8.3469e-01, 5.5163e-02, + 5.5679e-01, 7.7247e-01, 9.9926e-01, 7.1631e-01, + 1.4688e-01, 8.5901e-01, 9.5820e-01, 8.7402e-01, + 3.3750e-01, 8.3379e-01, 1.7997e-01, 3.4024e-02, + 6.0026e-01, 7.2408e-01, 1.6327e-01, 8.5342e-01, + 6.8883e-01, 7.5934e-01, 4.2426e-01, 4.2108e-01, + 7.9901e-01, 3.7324e-01, 1.4921e-01, 3.4700e-02, + 6.9781e-01, 5.9434e-01, 6.0246e-01, 5.8023e-01, + 2.8967e-01, 1.9480e-01, 6.2027e-01, 8.0618e-01, + 3.4684e-03, 2.8542e-01, 7.7883e-01, 9.8518e-01, + 2.6924e-01, 3.0555e-01, 3.6124e-01, 7.4453e-01, + 1.8155e-01, 6.4206e-01, 2.7114e-01, 4.7901e-01, + 8.7451e-01, 9.2856e-02, 3.4326e-01, 2.1555e-01, + 3.0832e-01, 4.0096e-01, 1.5544e-01, 6.8396e-01, + 6.8827e-01, 2.2804e-01, 6.6620e-01, 8.9734e-01, + 3.3590e-01, 9.3187e-01, 9.3009e-01, 3.5177e-01, + 6.8568e-01, 5.5609e-01, 2.0487e-01, 8.8749e-01, + 6.9496e-01, 4.7259e-01, 9.6056e-01, 2.6776e-01, + 8.7342e-01, 9.2249e-01, 1.3092e-01, 5.5780e-01, + 4.7137e-01, 5.0339e-01, 1.1579e-01, 4.4049e-01, + 6.2910e-01, 4.8928e-01, 6.7993e-01, 2.2566e-01, + 8.9579e-01, 3.8693e-01, 1.3706e-01, 7.8526e-01, + 7.7892e-01, 4.7453e-01, 5.5368e-01, 7.7885e-01, + 5.6439e-01, 5.8212e-01, 1.2176e-01, 5.9040e-01, + 8.3832e-01, 4.8913e-01, 3.9403e-01, 7.9724e-01, + 1.9041e-01, 3.0558e-01, 7.3543e-02, 3.8543e-01, + 3.8432e-01, 1.5301e-01, 1.9464e-01, 7.6242e-01, + 8.5603e-01, 2.4348e-01, 3.3823e-01, 1.3497e-01, + 2.9285e-01, 3.1222e-01, 4.8630e-01, 7.9517e-01, + 2.0854e-01, 5.5934e-01, 3.1609e-01, 9.9551e-01, + 3.6527e-01, 3.4714e-01, 6.7307e-01, 2.8007e-01, + 3.8271e-01, 3.9288e-01, 6.5768e-01, 5.8630e-01, + 5.4392e-01, 5.8597e-02, 6.5879e-01, 7.0779e-01, + 8.2889e-01, 7.8725e-01, 2.3580e-01, 1.2659e-01, + 4.6340e-01, 7.1524e-01, 1.4355e-01, 9.3213e-01, + 6.7358e-01, 9.9362e-01, 3.3661e-01, 8.1383e-01, + 6.3340e-01, 5.5372e-01, 9.3278e-01, 4.5087e-01, + 9.2927e-01, 5.3649e-01, 8.7350e-01, 2.7214e-01, + 9.4851e-01, 5.9748e-01, 3.1403e-02, 6.1772e-01, + 2.6300e-01, 6.6770e-01, 1.9508e-01, 6.6179e-01, + 2.5318e-02, 3.1166e-01, 5.9892e-01, 9.4355e-01, + 2.1253e-01, 2.3988e-01, 8.9296e-01, 7.7257e-01, + 6.2487e-01, 9.1523e-01, 9.7362e-01, 5.4365e-01, + 7.0272e-02, 8.3501e-01, 3.0732e-01, 4.5696e-01, + 6.9966e-01, 5.5054e-01, 8.9687e-01, 4.4966e-01, + 4.7031e-01, 3.6367e-01, 7.4112e-01, 1.0620e-01, + 7.1418e-01, 9.5953e-03, 9.8052e-01, 4.1101e-01, + 1.8228e-01, 8.9274e-01, 7.5847e-01, 7.1856e-01, + 5.6637e-01, 5.7611e-01, 3.8877e-01, 4.2294e-01, + 6.2557e-01, 5.3124e-01, 1.6826e-02, 2.3507e-01, + 8.1644e-01, 1.5435e-01, 3.3081e-01, 5.6503e-01, + 1.9146e-01, 6.8300e-01, 2.5627e-02, 1.7776e-01, + 2.7126e-01, 9.1810e-03, 2.4871e-01, 3.6057e-02, + 5.8490e-01, 3.2513e-01, 6.0523e-01, 3.8111e-01, + 9.1639e-01, 2.9765e-01, 7.2000e-01, 2.5948e-01, + 2.5582e-01, 2.4864e-01, 3.7189e-01, 1.0205e-01, + 3.8670e-01, 4.0786e-02, 8.8019e-01, 6.7285e-01, + 7.3613e-01, 3.4637e-01, 5.4532e-01, 1.9835e-02, + 9.5883e-01, 2.3990e-01, 1.1367e-01, 2.3155e-01, + 9.8695e-02, 9.9085e-01, 8.2365e-01, 1.9166e-02, + 1.0994e-01, 7.0415e-01, 7.0864e-01, 3.8621e-01, + 2.6239e-01, 7.7842e-01, 1.3817e-01, 7.9101e-02, + 2.1852e-01, 6.2569e-01, 2.2697e-01, 6.0997e-01, + 5.8456e-01, 2.7736e-02, 9.9867e-01, 1.3290e-01, + 1.4762e-01, 1.4758e-02, 9.2467e-03, 9.5847e-02, + 5.6896e-01, 7.7772e-01, 4.3486e-01, 9.6895e-01, + 5.7190e-01, 9.8043e-01, 5.9680e-02, 1.3507e-01, + 1.1531e-01, 7.4566e-01, 8.1640e-01, 6.3706e-01, + 1.5904e-01, 6.7155e-01, 4.7947e-01, 6.7198e-01, + 8.0175e-01, 2.8197e-02, 4.2399e-04, 8.8622e-01, + 2.9810e-01, 6.2710e-01, 5.3580e-01, 7.0656e-01, + 1.9115e-01, 2.1864e-01, 8.2521e-01, 9.7825e-01, + 3.2694e-01, 4.5699e-01, 9.2332e-01, 8.1085e-01, + 8.6842e-01, 2.6298e-01, 9.5236e-01, 9.6902e-01, + 3.7078e-02, 3.2802e-01, 6.1465e-01, 1.9061e-01, + 7.4225e-02, 9.0832e-01, 6.7882e-01, 9.1376e-01, + 1.8754e-01, 5.8402e-01, 4.0463e-01, 1.6950e-02, + 6.0488e-01, 7.0416e-01, 8.9438e-01, 6.3537e-01, + 9.3311e-01, 7.1219e-01, 1.8304e-01, 2.0294e-01, + 2.4156e-01, 2.5246e-02, 4.7333e-01, 3.6382e-01, + 7.7323e-02, 6.9307e-01, 6.1588e-02, 8.9470e-01, + 3.1037e-01, 8.2865e-01, 6.5366e-01, 1.6570e-01, + 5.7658e-01, 4.8139e-01, 7.1975e-01, 5.1300e-01, + 8.9715e-01, 4.2038e-01, 5.8014e-01, 6.9148e-01, + 3.0599e-01, 9.3319e-02, 4.7731e-01, 2.1704e-01, + 6.4481e-01, 2.4996e-01, 6.0429e-01, 5.5205e-01, + 2.2135e-01, 8.8346e-01, 7.0709e-01, 8.2152e-01, + 9.6879e-01, 9.9632e-01, 2.4221e-01, 4.5972e-01, + 9.6493e-01, 1.4195e-01, 3.1842e-01, 1.4820e-01, + 1.9437e-01, 9.3361e-01, 7.2369e-01, 8.1980e-02, + 3.0563e-01, 8.3087e-01, 9.6210e-01, 3.8570e-01, + 3.8999e-01, 2.6426e-01, 3.8497e-01, 6.2371e-01, + 3.4436e-02, 5.7200e-01, 9.1103e-03, 3.4663e-01, + 7.5139e-01, 8.0402e-02, 1.4851e-01, 5.6103e-01, + 1.4194e-01, 3.7747e-01, 1.2583e-01, 3.4934e-01, + 7.8245e-01, 4.4202e-02, 2.3814e-02, 5.0043e-01, + 4.0549e-02, 9.2123e-01, 6.4456e-01, 5.5668e-01, + 2.2637e-01, 4.5866e-01, 6.5612e-01, 4.8747e-01, + 3.5998e-01, 9.6531e-01, 2.2855e-01, 8.7165e-01, + 2.1451e-01, 6.0932e-01, 1.3079e-01, 8.7426e-01, + 8.1725e-01, 4.2196e-01, 7.1141e-01, 4.3740e-01, + 5.3629e-01, 9.9116e-01, 2.5936e-01, 6.2397e-02, + 4.8627e-01, 7.9972e-01, 5.9392e-01, 1.2708e-01, + 7.6474e-01, 7.6568e-01, 7.2778e-01, 7.1216e-01, + 7.5908e-01, 6.9230e-01, 8.6691e-01, 1.5589e-01, + 9.6845e-01, 3.9806e-01, 5.2987e-01, 4.8572e-02, + 6.1088e-01, 8.4502e-01, 8.7595e-01, 7.9081e-01, + 2.2328e-01, 7.3453e-02, 2.9361e-01, 7.2018e-01, + 8.6362e-01, 5.8239e-01, 9.8886e-01, 9.8207e-01, + 4.9789e-02, 9.0883e-01, 4.6588e-01, 7.8227e-01, + 5.6496e-01, 4.4322e-01, 6.0243e-01, 7.9633e-01, + 1.5407e-01, 4.7116e-01, 4.6135e-02, 7.9439e-01, + 9.4742e-01, 3.0537e-01, 7.4954e-01, 6.1486e-01, + 4.0816e-01, 5.7772e-02, 4.8874e-02, 6.5603e-01, + 6.5378e-01, 1.4657e-01, 7.5423e-01, 2.1719e-01, + 6.1274e-01, 7.2810e-01, 5.9787e-01, 8.5114e-01, + 8.4574e-01, 4.4836e-01, 1.1812e-01, 1.6302e-02, + 2.8183e-01, 2.8454e-01, 7.8868e-01, 5.6188e-01, + 3.7106e-01, 9.5184e-01, 4.6233e-01, 3.0721e-01, + 4.6652e-01, 4.5518e-01, 3.9625e-01, 3.1047e-01, + 6.1431e-01, 7.7994e-03, 5.0223e-01, 8.8821e-01, + 6.2450e-01, 1.5123e-02, 1.6467e-01, 3.7877e-02, + 3.3093e-01, 4.8842e-01, 1.4406e-01, 3.7447e-01, + 4.1940e-01, 4.0252e-01, 1.5874e-01, 9.2837e-01, + 6.5660e-02, 3.7370e-01, 1.6842e-01, 3.0803e-01, + 6.5644e-01, 7.7941e-01, 7.2378e-01, 5.4227e-01, + 1.4685e-01, 2.4221e-01, 2.9616e-01, 1.1769e-01, + 7.8889e-01, 4.7046e-01, 5.6810e-01, 9.0070e-02, + 9.6025e-01, 8.4650e-01, 5.7994e-01, 9.2239e-01, + 5.9694e-01, 7.0729e-01, 9.6887e-01, 6.6587e-01, + 4.3626e-01, 8.5358e-01, 3.8961e-01, 7.2852e-01, + 4.6255e-01, 8.4622e-01, 7.8245e-01, 6.5110e-01, + 6.2516e-01, 5.3600e-01, 4.1244e-01, 2.4712e-01, + 9.1064e-01, 7.1727e-01, 8.3599e-03, 4.9574e-01, + 6.2521e-02, 6.8479e-01, 5.4408e-02, 7.0926e-01, + 9.2773e-01, 4.3862e-01, 1.9592e-02, 8.3484e-01, + 6.2885e-01, 4.3876e-01, 7.8552e-01, 1.0172e-01, + 6.3309e-01, 3.7608e-01, 2.7818e-01, 4.2463e-01, + 5.4210e-01, 8.8253e-01, 7.3945e-02, 9.1054e-03, + 5.1831e-01, 5.1131e-01, 5.9108e-02, 5.0222e-01, + 4.4171e-02, 7.9161e-01, 4.9498e-01, 8.3120e-01, + 8.8602e-01, 1.6136e-01, 6.8591e-01, 5.5565e-01, + 3.7967e-01, 1.6486e-02, 8.2781e-01, 5.2694e-01, + 8.7141e-01, 9.6245e-01, 4.4308e-01, 4.7436e-01, + 9.5535e-01, 4.0917e-01, 6.3676e-01, 2.5075e-01, + 6.4306e-01, 1.4736e-01, 9.1328e-01, 9.5108e-01, + 5.5093e-01, 1.0660e-01, 1.0531e-01, 8.3265e-01, + 5.7798e-01, 9.3816e-01, 7.4004e-01, 2.6624e-01, + 4.6151e-01, 7.3546e-01, 2.5614e-01, 8.3512e-01, + 8.1670e-01, 5.0614e-01, 9.7279e-01, 3.1225e-01, + 7.8442e-01, 7.3051e-01, 5.8183e-01, 3.3468e-01, + 8.6277e-01, 1.6129e-01, 3.9534e-01, 5.0412e-01, + 9.3144e-02, 6.8435e-01, 1.5667e-01, 4.1079e-01, + 2.4207e-01, 4.8373e-01, 4.2507e-01, 6.1906e-01, + 2.8594e-01, 9.8090e-01, 8.5909e-01, 7.8064e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8755, 0.4171, 0.4007, ..., 0.3638, 0.0663, 0.0983]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.430596351623535 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '223318', '-ss', '10000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.272708654403687} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([1701, 5200, 151, 7106, 6780, 2055, 1375, 8242, 376, + 4556, 1864, 1124, 4828, 55, 5866, 9752, 5516, 7381, + 1233, 540, 636, 7075, 5707, 6021, 4061, 4372, 9110, + 8043, 6636, 2721, 2135, 174, 4881, 658, 3469, 6307, + 6633, 7241, 1540, 9161, 3585, 3028, 1940, 352, 9272, + 3623, 3390, 9183, 6840, 1293, 3398, 5823, 704, 5011, + 1738, 9159, 5732, 4747, 2802, 1249, 5171, 5364, 7066, + 4818, 4723, 9883, 1229, 9311, 6671, 7348, 8536, 5413, + 6, 4030, 8060, 4147, 5081, 6166, 1683, 8447, 8806, + 8357, 9243, 6546, 2055, 6450, 9246, 3143, 856, 7551, + 1646, 9323, 5361, 3834, 1319, 9746, 7840, 8141, 2671, + 5949, 5626, 8150, 3730, 796, 1535, 4730, 6915, 6972, + 2766, 9860, 7270, 8106, 2537, 797, 5364, 6621, 5648, + 8548, 5257, 7801, 1229, 8693, 4025, 9202, 2593, 7960, + 5369, 7148, 7940, 9714, 2580, 5784, 7297, 5127, 6802, + 622, 3354, 4226, 9817, 7738, 681, 3311, 7124, 4060, + 4152, 2763, 5861, 4574, 2518, 5568, 7103, 2604, 1355, + 4689, 7527, 1325, 8159, 152, 4720, 4594, 5586, 2562, + 6272, 3785, 7905, 2125, 7915, 9420, 6886, 4693, 2166, + 91, 6490, 4806, 3118, 2672, 66, 1234, 1981, 8691, + 253, 7553, 230, 3625, 559, 8846, 3213, 4483, 4148, + 6841, 4073, 5125, 2223, 9103, 1924, 720, 5412, 4803, + 3085, 8563, 9540, 9315, 3139, 4726, 2626, 5785, 2038, + 6901, 621, 2004, 8854, 1914, 4754, 192, 7935, 5989, + 7322, 8869, 9629, 1813, 2620, 6175, 6750, 6796, 7909, + 5597, 7944, 723, 3767, 4917, 8478, 1868, 384, 772, + 4519, 5478, 4767, 9866, 5553, 2050, 2777, 5275, 9876, + 2088, 6106, 2118, 5471, 7481, 4915, 1002, 5183, 2482, + 7207, 9343, 7965, 4201, 3702, 8145, 2757, 9560, 9718, + 7886, 52, 9777, 985, 9786, 8980, 8450, 853, 4486, + 1238, 3703, 3895, 824, 2368, 4761, 7044, 6976, 7259, + 3408, 4627, 1088, 2233, 2531, 9316, 3007, 7830, 1611, + 2937, 2309, 6160, 3027, 5106, 8960, 6283, 8248, 1806, + 338, 2652, 6414, 8464, 1253, 2007, 5826, 1700, 5189, + 3184, 9274, 7910, 2408, 2889, 1106, 4992, 8140, 123, + 5004, 6911, 4693, 4844, 2907, 9739, 1307, 4690, 6578, + 2490, 1409, 2982, 4236, 7452, 258, 9946, 1594, 9084, + 7506, 349, 1142, 8943, 4241, 4711, 6656, 2419, 7301, + 2041, 4288, 2852, 4987, 6067, 2342, 5031, 1240, 3326, + 809, 1883, 7214, 5948, 4514, 7898, 6737, 2244, 7695, + 5774, 7074, 4380, 338, 3037, 5989, 4689, 2151, 4935, + 8239, 8199, 8742, 8379, 8464, 7648, 7511, 9789, 542, + 2321, 4702, 7444, 4179, 5713, 3500, 440, 3372, 1553, + 7066, 237, 6333, 4983, 9001, 6220, 9958, 7810, 6862, + 5158, 12, 3298, 176, 3276, 228, 4205, 1817, 6762, + 3628, 9393, 8101, 3855, 4143, 1454, 9776, 905, 9823, + 5862, 2192, 4474, 7500, 7608, 7951, 8118, 1256, 8656, + 8920, 6853, 1192, 8163, 2750, 1899, 3066, 8521, 9238, + 1236, 5960, 5040, 5370, 4579, 9179, 1422, 3868, 5989, + 5883, 5467, 8069, 2067, 4039, 8465, 7018, 1691, 4312, + 4117, 4240, 6107, 9816, 943, 9542, 8621, 1040, 2403, + 8739, 7827, 6446, 9590, 3659, 3297, 3557, 6979, 1879, + 3579, 2959, 2874, 7456, 8140, 5191, 5810, 753, 5881, + 2926, 3160, 7447, 9128, 3369, 9643, 866, 1094, 7856, + 2939, 2771, 903, 4019, 1481, 8141, 3059, 5105, 54, + 5797, 8704, 6592, 9683, 238, 941, 9073, 1570, 8149, + 620, 9506, 8170, 935, 287, 4429, 4319, 2568, 1551, + 2824, 4536, 8659, 8268, 4581, 7494, 1661, 2098, 5451, + 7560, 9224, 2281, 7897, 5655, 6711, 6313, 5649, 3624, + 1113, 3337, 9567, 6293, 4554, 4615, 390, 6190, 9206, + 2980, 9994, 298, 8200, 7196, 3147, 82, 7032, 1909, + 6954, 4345, 2438, 2065, 2556, 5426, 8978, 7465, 5220, + 5604, 788, 8414, 2663, 5742, 4974, 462, 1147, 9967, + 2553, 6062, 5166, 3507, 4599, 8067, 9145, 6263, 358, + 6192, 9886, 1258, 1355, 8678, 8303, 9017, 8920, 3248, + 9675, 9017, 2019, 2107, 8167, 8733, 3179, 4669, 528, + 6951, 1546, 9515, 8164, 7191, 1458, 7943, 2239, 8218, + 160, 1727, 1954, 2526, 1135, 2201, 9078, 3991, 6935, + 519, 208, 8883, 3452, 5310, 4150, 3705, 3822, 7401, + 8546, 6404, 3490, 1861, 743, 6663, 7115, 426, 6265, + 5338, 4421, 8479, 4579, 7854, 952, 4160, 5970, 2820, + 6175, 9201, 3918, 8342, 1561, 8841, 9347, 6312, 8811, + 802, 3751, 1504, 9841, 3459, 4709, 7585, 9176, 402, + 8777, 5093, 5366, 1341, 94, 1213, 3151, 5186, 6190, + 9472, 8769, 8920, 5876, 9844, 8088, 3443, 3158, 1522, + 2239, 5911, 1843, 878, 3231, 9219, 9744, 5314, 5187, + 7781, 4935, 1116, 2698, 2909, 7609, 34, 6226, 271, + 7563, 6665, 2928, 7844, 7394, 1971, 1334, 8649, 7028, + 9162, 7777, 8221, 7806, 7876, 9654, 9678, 288, 6495, + 8153, 8652, 8277, 3375, 9963, 69, 2407, 1311, 3891, + 6745, 2980, 7523, 832, 7750, 7534, 9660, 5289, 3385, + 484, 2425, 9377, 7231, 951, 3144, 5429, 1186, 4354, + 946, 3543, 2792, 7235, 7094, 6477, 4712, 3170, 5325, + 6937, 7529, 2618, 9824, 1671, 9430, 3905, 2384, 1668, + 2134, 5903, 8606, 6210, 736, 9653, 2364, 46, 8839, + 3680, 8561, 7610, 2537, 5694, 4491, 3070, 102, 7674, + 94, 8566, 9307, 6073, 5078, 5944, 6543, 2837, 5013, + 5856, 26, 4035, 1232, 2905, 644, 1632, 730, 2714, + 5211, 7999, 7356, 9942, 3050, 3488, 2030, 3261, 990, + 2503, 9947, 5773, 2097, 8736, 2427, 814, 8208, 7952, + 1425, 763, 3478, 2559, 2030, 7355, 2403, 8133, 1521, + 2534, 4093, 6128, 650, 4011, 373, 5526, 1945, 3671, + 9189, 5214, 508, 4155, 9541, 9972, 2781, 9076, 2880, + 3611, 4734, 9885, 7739, 6719, 4377, 7240, 3082, 5164, + 2874, 6457, 2759, 7393, 5423, 443, 9684, 5344, 4129, + 7342, 9307, 5758, 5216, 2857, 6451, 1753, 5670, 3430, + 7468, 6794, 5048, 2729, 9118, 9764, 1919, 5665, 767, + 1418, 1135, 8997, 7779, 2429, 9649, 5229, 7038, 811, + 1832, 5395, 8070, 6294, 826, 5781, 6439, 3995, 1648, + 5102, 6547, 1824, 6293, 2074, 1908, 2741, 7323, 5768, + 7042, 3737, 1819, 2188, 1660, 4954, 7523, 3959, 9961, + 6052, 7983, 9534, 9061, 2300, 6304, 2454, 8016, 2821, + 1085, 1757, 9862, 5100, 1573, 6827, 5866, 5759, 3861, + 1208, 5621, 2294, 4759, 9995, 7707, 978, 9925, 5367, + 9804, 9907, 9336, 1138, 7783, 5709, 9686, 6759, 5257, + 3612, 9813, 741, 965, 354, 5561, 3584, 1206, 483, + 5051, 7676, 6562, 4835, 7136, 2052, 4373, 7923, 9493, + 3447, 2194, 3934, 1186, 6674, 4835, 7761, 3711, 2508, + 8473, 8257, 9236, 9392, 9771, 837, 8611, 4349, 8338, + 9309, 8829, 4296, 7751, 6181, 8890, 2833, 7799, 5871, + 3122, 6337, 1417, 1292, 5004, 6019, 1196, 5382, 6389, + 3275]), + values=tensor([0.2237, 0.0937, 0.9630, 0.1477, 0.5729, 0.1998, 0.9498, + 0.4379, 0.8034, 0.0235, 0.9902, 0.3432, 0.0700, 0.1825, + 0.5442, 0.2482, 0.6281, 0.6943, 0.3994, 0.1696, 0.8276, + 0.4876, 0.7936, 0.7452, 0.0998, 0.2679, 0.8349, 0.3267, + 0.0044, 0.0261, 0.7294, 0.7269, 0.9705, 0.1244, 0.6999, + 0.0989, 0.9010, 0.3293, 0.8054, 0.8993, 0.1777, 0.0858, + 0.0842, 0.5815, 0.0073, 0.8773, 0.5984, 0.9584, 0.9585, + 0.9084, 0.8010, 0.3890, 0.7561, 0.2065, 0.0869, 0.1566, + 0.5409, 0.0893, 0.4037, 0.9105, 0.3894, 0.1083, 0.0753, + 0.9158, 0.4006, 0.0324, 0.5094, 0.9774, 0.7974, 0.7971, + 0.4057, 0.2885, 0.0520, 0.2695, 0.5617, 0.7978, 0.7923, + 0.0655, 0.7159, 0.9957, 0.9205, 0.7614, 0.5249, 0.9947, + 0.7384, 0.2115, 0.5030, 0.2376, 0.3351, 0.0950, 0.7426, + 0.9408, 0.1119, 0.8775, 0.3131, 0.2215, 0.5071, 0.5270, + 0.2205, 0.8988, 0.4070, 0.6511, 0.6751, 0.8449, 0.8296, + 0.8009, 0.0238, 0.1850, 0.1927, 0.3420, 0.8088, 0.0148, + 0.6612, 0.4105, 0.1426, 0.8882, 0.2834, 0.4158, 0.8305, + 0.3284, 0.6229, 0.4603, 0.3207, 0.5603, 0.9335, 0.2077, + 0.3713, 0.2711, 0.3356, 0.1915, 0.1831, 0.6739, 0.8426, + 0.8846, 0.4385, 0.3712, 0.1660, 0.5165, 0.4995, 0.8937, + 0.3826, 0.4064, 0.9294, 0.5277, 0.4615, 0.3664, 0.3226, + 0.2012, 0.4698, 0.4989, 0.3815, 0.0360, 0.2844, 0.1905, + 0.6394, 0.1553, 0.2179, 0.9991, 0.4322, 0.8990, 0.8663, + 0.5714, 0.2616, 0.9118, 0.1532, 0.0794, 0.4487, 0.6562, + 0.8275, 0.3748, 0.9786, 0.2334, 0.4221, 0.0891, 0.4672, + 0.7959, 0.9325, 0.2602, 0.6239, 0.2789, 0.5279, 0.9698, + 0.2620, 0.9710, 0.0659, 0.7009, 0.4749, 0.0914, 0.8769, + 0.1261, 0.4952, 0.9824, 0.3145, 0.3912, 0.7378, 0.2369, + 0.7117, 0.3375, 0.4187, 0.9390, 0.2536, 0.2477, 0.8490, + 0.6862, 0.8212, 0.4774, 0.7214, 0.2067, 0.2441, 0.9886, + 0.6487, 0.5109, 0.5780, 0.0528, 0.8446, 0.2500, 0.0099, + 0.3498, 0.0603, 0.6328, 0.1396, 0.3979, 0.0437, 0.4013, + 0.4045, 0.6436, 0.0264, 0.5833, 0.5292, 0.4239, 0.4451, + 0.5547, 0.9082, 0.0133, 0.9732, 0.3574, 0.5837, 0.2932, + 0.7709, 0.0813, 0.8692, 0.3324, 0.5254, 0.2501, 0.7664, + 0.8569, 0.9977, 0.7986, 0.2734, 0.2131, 0.2578, 0.0864, + 0.4157, 0.7639, 0.4495, 0.8317, 0.0808, 0.3465, 0.3449, + 0.1795, 0.3111, 0.3583, 0.5361, 0.7493, 0.6307, 0.5749, + 0.7117, 0.5264, 0.3319, 0.4855, 0.7740, 0.0389, 0.7199, + 0.1757, 0.7320, 0.3881, 0.8473, 0.7211, 0.0089, 0.8715, + 0.8121, 0.6785, 0.3229, 0.7947, 0.1252, 0.4363, 0.7100, + 0.8129, 0.7275, 0.7882, 0.1837, 0.4836, 0.9665, 0.3166, + 0.8373, 0.4110, 0.9919, 0.7071, 0.1476, 0.4264, 0.6374, + 0.1843, 0.0355, 0.4476, 0.5128, 0.6553, 0.9636, 0.4657, + 0.3003, 0.5021, 0.2604, 0.1725, 0.4826, 0.8980, 0.9472, + 0.5359, 0.5329, 0.7276, 0.9162, 0.7128, 0.3214, 0.4430, + 0.0268, 0.4336, 0.6726, 0.3183, 0.6498, 0.8979, 0.4377, + 0.4508, 0.7071, 0.7374, 0.6664, 0.3207, 0.9118, 0.0607, + 0.6305, 0.1649, 0.7631, 0.3880, 0.1389, 0.3349, 0.7098, + 0.2548, 0.1348, 0.7807, 0.3721, 0.5610, 0.2096, 0.6681, + 0.4693, 0.1901, 0.1930, 0.1622, 0.2963, 0.0603, 0.2890, + 0.1230, 0.6505, 0.5223, 0.4052, 0.7992, 0.2060, 0.6551, + 0.9600, 0.8356, 0.2101, 0.1915, 0.5095, 0.9710, 0.1888, + 0.1645, 0.3590, 0.3490, 0.1788, 0.3894, 0.3501, 0.3981, + 0.8373, 0.9445, 0.9392, 0.0706, 0.5984, 0.1523, 0.8809, + 0.3914, 0.7705, 0.3109, 0.6448, 0.4013, 0.0518, 0.5764, + 0.9274, 0.9661, 0.6301, 0.2194, 0.3082, 0.6486, 0.9235, + 0.3097, 0.0386, 0.1441, 0.2865, 0.6143, 0.4563, 0.3660, + 0.2231, 0.3622, 0.4918, 0.2906, 0.1025, 0.3484, 0.1783, + 0.0793, 0.4740, 0.8350, 0.2433, 0.0513, 0.4263, 0.6836, + 0.8954, 0.2130, 0.9199, 0.7013, 0.7526, 0.7990, 0.3316, + 0.3824, 0.6560, 0.0665, 0.3473, 0.7352, 0.3958, 0.6717, + 0.1787, 0.9112, 0.3996, 0.1342, 0.8973, 0.7673, 0.3783, + 0.7610, 0.5775, 0.8738, 0.8225, 0.6836, 0.9352, 0.4659, + 0.0218, 0.6016, 0.7349, 0.5865, 0.4088, 0.3716, 0.3419, + 0.4366, 0.2265, 0.1625, 0.3329, 0.2090, 0.9832, 0.8807, + 0.9078, 0.0817, 0.5180, 0.2440, 0.2482, 0.7292, 0.3941, + 0.8401, 0.0141, 0.7489, 0.3723, 0.0350, 0.2835, 0.3314, + 0.1685, 0.1121, 0.6204, 0.8138, 0.4395, 0.6725, 0.3304, + 0.8152, 0.9086, 0.9942, 0.4917, 0.4382, 0.2414, 0.3708, + 0.4786, 0.6864, 0.9521, 0.6060, 0.5707, 0.3741, 0.7770, + 0.9669, 0.3737, 0.9687, 0.3704, 0.3850, 0.7958, 0.7808, + 0.1612, 0.9230, 0.8525, 0.6799, 0.0805, 0.9623, 0.0798, + 0.1563, 0.7621, 0.9897, 0.4134, 0.9079, 0.9743, 0.2876, + 0.0876, 0.7910, 0.9308, 0.0610, 0.2187, 0.5615, 0.3229, + 0.9375, 0.5980, 0.9792, 0.8630, 0.9130, 0.2219, 0.2254, + 0.9540, 0.2937, 0.2448, 0.2621, 0.5306, 0.1461, 0.7337, + 0.4278, 0.4789, 0.9525, 0.8487, 0.1678, 0.8083, 0.8602, + 0.5427, 0.9150, 0.6338, 0.0596, 0.3932, 0.3519, 0.0682, + 0.2049, 0.6992, 0.1087, 0.4717, 0.7743, 0.1069, 0.9390, + 0.7215, 0.0549, 0.2173, 0.5676, 0.3363, 0.3647, 0.6589, + 0.8482, 0.0780, 0.4024, 0.0875, 0.0825, 0.2565, 0.7274, + 0.2689, 0.2341, 0.8282, 0.6484, 0.5740, 0.2458, 0.3418, + 0.6679, 0.0928, 0.8043, 0.0375, 0.0455, 0.1021, 0.0727, + 0.6034, 0.7073, 0.9501, 0.1827, 0.6546, 0.0622, 0.8697, + 0.6037, 0.6145, 0.7007, 0.9749, 0.2250, 0.9919, 0.6048, + 0.0930, 0.2305, 0.6915, 0.5382, 0.1474, 0.1571, 0.4897, + 0.4500, 0.3177, 0.8426, 0.5450, 0.6520, 0.4501, 0.7247, + 0.0507, 0.2850, 0.0212, 0.9359, 0.6125, 0.4131, 0.8209, + 0.5987, 0.8486, 0.6476, 0.7153, 0.8008, 0.6473, 0.9929, + 0.7390, 0.7887, 0.4013, 0.3755, 0.5104, 0.8562, 0.2127, + 0.2174, 0.7440, 0.3993, 0.8984, 0.4550, 0.9304, 0.9089, + 0.9769, 0.3961, 0.4588, 0.7478, 0.4077, 0.7220, 0.1553, + 0.0217, 0.3009, 0.6585, 0.6058, 0.6903, 0.5106, 0.3638, + 0.8147, 0.1664, 0.2008, 0.2597, 0.0618, 0.8442, 0.8484, + 0.5244, 0.1691, 0.8820, 0.2008, 0.5312, 0.1231, 0.2789, + 0.2030, 0.9130, 0.3220, 0.9544, 0.0942, 0.3181, 0.3072, + 0.8830, 0.5897, 0.4002, 0.4521, 0.5237, 0.1548, 0.2054, + 0.1146, 0.0603, 0.1840, 0.5799, 0.0520, 0.7941, 0.5931, + 0.8282, 0.2801, 0.8827, 0.2904, 0.6095, 0.1708, 0.2367, + 0.4340, 0.5936, 0.4575, 0.2879, 0.8225, 0.0562, 0.7618, + 0.0424, 0.3732, 0.7233, 0.7772, 0.4983, 0.8623, 0.4285, + 0.4221, 0.1088, 0.3304, 0.1568, 0.5143, 0.6215, 0.5503, + 0.7690, 0.3756, 0.5348, 0.7357, 0.8109, 0.3130, 0.0158, + 0.0023, 0.6162, 0.8535, 0.2750, 0.7217, 0.5469, 0.3265, + 0.1222, 0.4043, 0.3551, 0.1335, 0.2431, 0.0804, 0.8806, + 0.9161, 0.7825, 0.7689, 0.7354, 0.8643, 0.4190, 0.5343, + 0.3840, 0.7807, 0.6542, 0.2807, 0.0621, 0.5952, 0.8649, + 0.8056, 0.2886, 0.8492, 0.0497, 0.6903, 0.0986, 0.3481, + 0.9830, 0.1513, 0.7150, 0.5003, 0.6462, 0.3091, 0.5104, + 0.1090, 0.4875, 0.2487, 0.1973, 0.1005, 0.5965, 0.6503, + 0.6776, 0.1194, 0.6968, 0.4165, 0.3997, 0.2356, 0.9948, + 0.2469, 0.5798, 0.3444, 0.2015, 0.9011, 0.6742, 0.3975, + 0.8612, 0.7831, 0.0862, 0.4379, 0.0809, 0.7931, 0.0607, + 0.3830, 0.8447, 0.6462, 0.9840, 0.5433, 0.4101, 0.8260, + 0.5601, 0.6978, 0.1804, 0.4108, 0.0158, 0.7436, 0.8390, + 0.9475, 0.4933, 0.1487, 0.4644, 0.5877, 0.7798, 0.6315, + 0.5300, 0.7588, 0.5234, 0.0393, 0.3814, 0.5279, 0.6528, + 0.9869, 0.9540, 0.6270, 0.7013, 0.4305, 0.2085, 0.9008, + 0.4866, 0.4271, 0.1271, 0.3216, 0.3653, 0.9917, 0.2290, + 0.3330, 0.6020, 0.4543, 0.6559, 0.8582, 0.2937, 0.1913, + 0.3080, 0.3895, 0.3059, 0.0633, 0.6122, 0.1175, 0.6572, + 0.2524, 0.7704, 0.8210, 0.4794, 0.7538, 0.4031, 0.0275, + 0.5311, 0.0815, 0.4034, 0.6148, 0.1185, 0.7439, 0.4016, + 0.5608, 0.0706, 0.4835, 0.2505, 0.9363, 0.3741, 0.3124, + 0.4634, 0.9844, 0.4189, 0.3458, 0.6504, 0.0883, 0.9154, + 0.1987, 0.8102, 0.2403, 0.6352, 0.9134, 0.0740, 0.6217, + 0.7461, 0.9307, 0.2340, 0.9526, 0.2394, 0.1661, 0.5903, + 0.3531, 0.3579, 0.3335, 0.4941, 0.0937, 0.0190, 0.7400, + 0.6048, 0.0736, 0.8480, 0.7056, 0.9624, 0.8952, 0.2590, + 0.2918, 0.9369, 0.3316, 0.8127, 0.4530, 0.8583, 0.8824, + 0.6823, 0.0554, 0.6007, 0.8647, 0.0307, 0.2993, 0.2476, + 0.7318, 0.8917, 0.9643, 0.6157, 0.2184, 0.8408, 0.3345, + 0.0712, 0.8159, 0.2459, 0.0991, 0.7444, 0.2222, 0.0014, + 0.1305, 0.8914, 0.0089, 0.5321, 0.7917, 0.7163, 0.9580, + 0.3624, 0.0142, 0.8937, 0.5115, 0.5049, 0.8434, 0.7234, + 0.7161, 0.2634, 0.8592, 0.3961, 0.5586, 0.2620, 0.0375, + 0.1665, 0.2915, 0.9139, 0.7009, 0.5095, 0.4519, 0.1213, + 0.3561, 0.0066, 0.4379, 0.3522, 0.6225, 0.6900, 0.8216, + 0.8841, 0.6553, 0.8193, 0.7688, 0.5104, 0.3926, 0.7388, + 0.4735, 0.1897, 0.7788, 0.8825, 0.9103, 0.2988, 0.1239, + 0.1792, 0.1266, 0.4818, 0.8893, 0.6604, 0.1883, 0.9700, + 0.5469, 0.0958, 0.2762, 0.2054, 0.3215, 0.7664]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5980, 0.9421, 0.9493, ..., 0.6518, 0.5202, 0.4457]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.272708654403687 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([1701, 5200, 151, 7106, 6780, 2055, 1375, 8242, 376, + 4556, 1864, 1124, 4828, 55, 5866, 9752, 5516, 7381, + 1233, 540, 636, 7075, 5707, 6021, 4061, 4372, 9110, + 8043, 6636, 2721, 2135, 174, 4881, 658, 3469, 6307, + 6633, 7241, 1540, 9161, 3585, 3028, 1940, 352, 9272, + 3623, 3390, 9183, 6840, 1293, 3398, 5823, 704, 5011, + 1738, 9159, 5732, 4747, 2802, 1249, 5171, 5364, 7066, + 4818, 4723, 9883, 1229, 9311, 6671, 7348, 8536, 5413, + 6, 4030, 8060, 4147, 5081, 6166, 1683, 8447, 8806, + 8357, 9243, 6546, 2055, 6450, 9246, 3143, 856, 7551, + 1646, 9323, 5361, 3834, 1319, 9746, 7840, 8141, 2671, + 5949, 5626, 8150, 3730, 796, 1535, 4730, 6915, 6972, + 2766, 9860, 7270, 8106, 2537, 797, 5364, 6621, 5648, + 8548, 5257, 7801, 1229, 8693, 4025, 9202, 2593, 7960, + 5369, 7148, 7940, 9714, 2580, 5784, 7297, 5127, 6802, + 622, 3354, 4226, 9817, 7738, 681, 3311, 7124, 4060, + 4152, 2763, 5861, 4574, 2518, 5568, 7103, 2604, 1355, + 4689, 7527, 1325, 8159, 152, 4720, 4594, 5586, 2562, + 6272, 3785, 7905, 2125, 7915, 9420, 6886, 4693, 2166, + 91, 6490, 4806, 3118, 2672, 66, 1234, 1981, 8691, + 253, 7553, 230, 3625, 559, 8846, 3213, 4483, 4148, + 6841, 4073, 5125, 2223, 9103, 1924, 720, 5412, 4803, + 3085, 8563, 9540, 9315, 3139, 4726, 2626, 5785, 2038, + 6901, 621, 2004, 8854, 1914, 4754, 192, 7935, 5989, + 7322, 8869, 9629, 1813, 2620, 6175, 6750, 6796, 7909, + 5597, 7944, 723, 3767, 4917, 8478, 1868, 384, 772, + 4519, 5478, 4767, 9866, 5553, 2050, 2777, 5275, 9876, + 2088, 6106, 2118, 5471, 7481, 4915, 1002, 5183, 2482, + 7207, 9343, 7965, 4201, 3702, 8145, 2757, 9560, 9718, + 7886, 52, 9777, 985, 9786, 8980, 8450, 853, 4486, + 1238, 3703, 3895, 824, 2368, 4761, 7044, 6976, 7259, + 3408, 4627, 1088, 2233, 2531, 9316, 3007, 7830, 1611, + 2937, 2309, 6160, 3027, 5106, 8960, 6283, 8248, 1806, + 338, 2652, 6414, 8464, 1253, 2007, 5826, 1700, 5189, + 3184, 9274, 7910, 2408, 2889, 1106, 4992, 8140, 123, + 5004, 6911, 4693, 4844, 2907, 9739, 1307, 4690, 6578, + 2490, 1409, 2982, 4236, 7452, 258, 9946, 1594, 9084, + 7506, 349, 1142, 8943, 4241, 4711, 6656, 2419, 7301, + 2041, 4288, 2852, 4987, 6067, 2342, 5031, 1240, 3326, + 809, 1883, 7214, 5948, 4514, 7898, 6737, 2244, 7695, + 5774, 7074, 4380, 338, 3037, 5989, 4689, 2151, 4935, + 8239, 8199, 8742, 8379, 8464, 7648, 7511, 9789, 542, + 2321, 4702, 7444, 4179, 5713, 3500, 440, 3372, 1553, + 7066, 237, 6333, 4983, 9001, 6220, 9958, 7810, 6862, + 5158, 12, 3298, 176, 3276, 228, 4205, 1817, 6762, + 3628, 9393, 8101, 3855, 4143, 1454, 9776, 905, 9823, + 5862, 2192, 4474, 7500, 7608, 7951, 8118, 1256, 8656, + 8920, 6853, 1192, 8163, 2750, 1899, 3066, 8521, 9238, + 1236, 5960, 5040, 5370, 4579, 9179, 1422, 3868, 5989, + 5883, 5467, 8069, 2067, 4039, 8465, 7018, 1691, 4312, + 4117, 4240, 6107, 9816, 943, 9542, 8621, 1040, 2403, + 8739, 7827, 6446, 9590, 3659, 3297, 3557, 6979, 1879, + 3579, 2959, 2874, 7456, 8140, 5191, 5810, 753, 5881, + 2926, 3160, 7447, 9128, 3369, 9643, 866, 1094, 7856, + 2939, 2771, 903, 4019, 1481, 8141, 3059, 5105, 54, + 5797, 8704, 6592, 9683, 238, 941, 9073, 1570, 8149, + 620, 9506, 8170, 935, 287, 4429, 4319, 2568, 1551, + 2824, 4536, 8659, 8268, 4581, 7494, 1661, 2098, 5451, + 7560, 9224, 2281, 7897, 5655, 6711, 6313, 5649, 3624, + 1113, 3337, 9567, 6293, 4554, 4615, 390, 6190, 9206, + 2980, 9994, 298, 8200, 7196, 3147, 82, 7032, 1909, + 6954, 4345, 2438, 2065, 2556, 5426, 8978, 7465, 5220, + 5604, 788, 8414, 2663, 5742, 4974, 462, 1147, 9967, + 2553, 6062, 5166, 3507, 4599, 8067, 9145, 6263, 358, + 6192, 9886, 1258, 1355, 8678, 8303, 9017, 8920, 3248, + 9675, 9017, 2019, 2107, 8167, 8733, 3179, 4669, 528, + 6951, 1546, 9515, 8164, 7191, 1458, 7943, 2239, 8218, + 160, 1727, 1954, 2526, 1135, 2201, 9078, 3991, 6935, + 519, 208, 8883, 3452, 5310, 4150, 3705, 3822, 7401, + 8546, 6404, 3490, 1861, 743, 6663, 7115, 426, 6265, + 5338, 4421, 8479, 4579, 7854, 952, 4160, 5970, 2820, + 6175, 9201, 3918, 8342, 1561, 8841, 9347, 6312, 8811, + 802, 3751, 1504, 9841, 3459, 4709, 7585, 9176, 402, + 8777, 5093, 5366, 1341, 94, 1213, 3151, 5186, 6190, + 9472, 8769, 8920, 5876, 9844, 8088, 3443, 3158, 1522, + 2239, 5911, 1843, 878, 3231, 9219, 9744, 5314, 5187, + 7781, 4935, 1116, 2698, 2909, 7609, 34, 6226, 271, + 7563, 6665, 2928, 7844, 7394, 1971, 1334, 8649, 7028, + 9162, 7777, 8221, 7806, 7876, 9654, 9678, 288, 6495, + 8153, 8652, 8277, 3375, 9963, 69, 2407, 1311, 3891, + 6745, 2980, 7523, 832, 7750, 7534, 9660, 5289, 3385, + 484, 2425, 9377, 7231, 951, 3144, 5429, 1186, 4354, + 946, 3543, 2792, 7235, 7094, 6477, 4712, 3170, 5325, + 6937, 7529, 2618, 9824, 1671, 9430, 3905, 2384, 1668, + 2134, 5903, 8606, 6210, 736, 9653, 2364, 46, 8839, + 3680, 8561, 7610, 2537, 5694, 4491, 3070, 102, 7674, + 94, 8566, 9307, 6073, 5078, 5944, 6543, 2837, 5013, + 5856, 26, 4035, 1232, 2905, 644, 1632, 730, 2714, + 5211, 7999, 7356, 9942, 3050, 3488, 2030, 3261, 990, + 2503, 9947, 5773, 2097, 8736, 2427, 814, 8208, 7952, + 1425, 763, 3478, 2559, 2030, 7355, 2403, 8133, 1521, + 2534, 4093, 6128, 650, 4011, 373, 5526, 1945, 3671, + 9189, 5214, 508, 4155, 9541, 9972, 2781, 9076, 2880, + 3611, 4734, 9885, 7739, 6719, 4377, 7240, 3082, 5164, + 2874, 6457, 2759, 7393, 5423, 443, 9684, 5344, 4129, + 7342, 9307, 5758, 5216, 2857, 6451, 1753, 5670, 3430, + 7468, 6794, 5048, 2729, 9118, 9764, 1919, 5665, 767, + 1418, 1135, 8997, 7779, 2429, 9649, 5229, 7038, 811, + 1832, 5395, 8070, 6294, 826, 5781, 6439, 3995, 1648, + 5102, 6547, 1824, 6293, 2074, 1908, 2741, 7323, 5768, + 7042, 3737, 1819, 2188, 1660, 4954, 7523, 3959, 9961, + 6052, 7983, 9534, 9061, 2300, 6304, 2454, 8016, 2821, + 1085, 1757, 9862, 5100, 1573, 6827, 5866, 5759, 3861, + 1208, 5621, 2294, 4759, 9995, 7707, 978, 9925, 5367, + 9804, 9907, 9336, 1138, 7783, 5709, 9686, 6759, 5257, + 3612, 9813, 741, 965, 354, 5561, 3584, 1206, 483, + 5051, 7676, 6562, 4835, 7136, 2052, 4373, 7923, 9493, + 3447, 2194, 3934, 1186, 6674, 4835, 7761, 3711, 2508, + 8473, 8257, 9236, 9392, 9771, 837, 8611, 4349, 8338, + 9309, 8829, 4296, 7751, 6181, 8890, 2833, 7799, 5871, + 3122, 6337, 1417, 1292, 5004, 6019, 1196, 5382, 6389, + 3275]), + values=tensor([0.2237, 0.0937, 0.9630, 0.1477, 0.5729, 0.1998, 0.9498, + 0.4379, 0.8034, 0.0235, 0.9902, 0.3432, 0.0700, 0.1825, + 0.5442, 0.2482, 0.6281, 0.6943, 0.3994, 0.1696, 0.8276, + 0.4876, 0.7936, 0.7452, 0.0998, 0.2679, 0.8349, 0.3267, + 0.0044, 0.0261, 0.7294, 0.7269, 0.9705, 0.1244, 0.6999, + 0.0989, 0.9010, 0.3293, 0.8054, 0.8993, 0.1777, 0.0858, + 0.0842, 0.5815, 0.0073, 0.8773, 0.5984, 0.9584, 0.9585, + 0.9084, 0.8010, 0.3890, 0.7561, 0.2065, 0.0869, 0.1566, + 0.5409, 0.0893, 0.4037, 0.9105, 0.3894, 0.1083, 0.0753, + 0.9158, 0.4006, 0.0324, 0.5094, 0.9774, 0.7974, 0.7971, + 0.4057, 0.2885, 0.0520, 0.2695, 0.5617, 0.7978, 0.7923, + 0.0655, 0.7159, 0.9957, 0.9205, 0.7614, 0.5249, 0.9947, + 0.7384, 0.2115, 0.5030, 0.2376, 0.3351, 0.0950, 0.7426, + 0.9408, 0.1119, 0.8775, 0.3131, 0.2215, 0.5071, 0.5270, + 0.2205, 0.8988, 0.4070, 0.6511, 0.6751, 0.8449, 0.8296, + 0.8009, 0.0238, 0.1850, 0.1927, 0.3420, 0.8088, 0.0148, + 0.6612, 0.4105, 0.1426, 0.8882, 0.2834, 0.4158, 0.8305, + 0.3284, 0.6229, 0.4603, 0.3207, 0.5603, 0.9335, 0.2077, + 0.3713, 0.2711, 0.3356, 0.1915, 0.1831, 0.6739, 0.8426, + 0.8846, 0.4385, 0.3712, 0.1660, 0.5165, 0.4995, 0.8937, + 0.3826, 0.4064, 0.9294, 0.5277, 0.4615, 0.3664, 0.3226, + 0.2012, 0.4698, 0.4989, 0.3815, 0.0360, 0.2844, 0.1905, + 0.6394, 0.1553, 0.2179, 0.9991, 0.4322, 0.8990, 0.8663, + 0.5714, 0.2616, 0.9118, 0.1532, 0.0794, 0.4487, 0.6562, + 0.8275, 0.3748, 0.9786, 0.2334, 0.4221, 0.0891, 0.4672, + 0.7959, 0.9325, 0.2602, 0.6239, 0.2789, 0.5279, 0.9698, + 0.2620, 0.9710, 0.0659, 0.7009, 0.4749, 0.0914, 0.8769, + 0.1261, 0.4952, 0.9824, 0.3145, 0.3912, 0.7378, 0.2369, + 0.7117, 0.3375, 0.4187, 0.9390, 0.2536, 0.2477, 0.8490, + 0.6862, 0.8212, 0.4774, 0.7214, 0.2067, 0.2441, 0.9886, + 0.6487, 0.5109, 0.5780, 0.0528, 0.8446, 0.2500, 0.0099, + 0.3498, 0.0603, 0.6328, 0.1396, 0.3979, 0.0437, 0.4013, + 0.4045, 0.6436, 0.0264, 0.5833, 0.5292, 0.4239, 0.4451, + 0.5547, 0.9082, 0.0133, 0.9732, 0.3574, 0.5837, 0.2932, + 0.7709, 0.0813, 0.8692, 0.3324, 0.5254, 0.2501, 0.7664, + 0.8569, 0.9977, 0.7986, 0.2734, 0.2131, 0.2578, 0.0864, + 0.4157, 0.7639, 0.4495, 0.8317, 0.0808, 0.3465, 0.3449, + 0.1795, 0.3111, 0.3583, 0.5361, 0.7493, 0.6307, 0.5749, + 0.7117, 0.5264, 0.3319, 0.4855, 0.7740, 0.0389, 0.7199, + 0.1757, 0.7320, 0.3881, 0.8473, 0.7211, 0.0089, 0.8715, + 0.8121, 0.6785, 0.3229, 0.7947, 0.1252, 0.4363, 0.7100, + 0.8129, 0.7275, 0.7882, 0.1837, 0.4836, 0.9665, 0.3166, + 0.8373, 0.4110, 0.9919, 0.7071, 0.1476, 0.4264, 0.6374, + 0.1843, 0.0355, 0.4476, 0.5128, 0.6553, 0.9636, 0.4657, + 0.3003, 0.5021, 0.2604, 0.1725, 0.4826, 0.8980, 0.9472, + 0.5359, 0.5329, 0.7276, 0.9162, 0.7128, 0.3214, 0.4430, + 0.0268, 0.4336, 0.6726, 0.3183, 0.6498, 0.8979, 0.4377, + 0.4508, 0.7071, 0.7374, 0.6664, 0.3207, 0.9118, 0.0607, + 0.6305, 0.1649, 0.7631, 0.3880, 0.1389, 0.3349, 0.7098, + 0.2548, 0.1348, 0.7807, 0.3721, 0.5610, 0.2096, 0.6681, + 0.4693, 0.1901, 0.1930, 0.1622, 0.2963, 0.0603, 0.2890, + 0.1230, 0.6505, 0.5223, 0.4052, 0.7992, 0.2060, 0.6551, + 0.9600, 0.8356, 0.2101, 0.1915, 0.5095, 0.9710, 0.1888, + 0.1645, 0.3590, 0.3490, 0.1788, 0.3894, 0.3501, 0.3981, + 0.8373, 0.9445, 0.9392, 0.0706, 0.5984, 0.1523, 0.8809, + 0.3914, 0.7705, 0.3109, 0.6448, 0.4013, 0.0518, 0.5764, + 0.9274, 0.9661, 0.6301, 0.2194, 0.3082, 0.6486, 0.9235, + 0.3097, 0.0386, 0.1441, 0.2865, 0.6143, 0.4563, 0.3660, + 0.2231, 0.3622, 0.4918, 0.2906, 0.1025, 0.3484, 0.1783, + 0.0793, 0.4740, 0.8350, 0.2433, 0.0513, 0.4263, 0.6836, + 0.8954, 0.2130, 0.9199, 0.7013, 0.7526, 0.7990, 0.3316, + 0.3824, 0.6560, 0.0665, 0.3473, 0.7352, 0.3958, 0.6717, + 0.1787, 0.9112, 0.3996, 0.1342, 0.8973, 0.7673, 0.3783, + 0.7610, 0.5775, 0.8738, 0.8225, 0.6836, 0.9352, 0.4659, + 0.0218, 0.6016, 0.7349, 0.5865, 0.4088, 0.3716, 0.3419, + 0.4366, 0.2265, 0.1625, 0.3329, 0.2090, 0.9832, 0.8807, + 0.9078, 0.0817, 0.5180, 0.2440, 0.2482, 0.7292, 0.3941, + 0.8401, 0.0141, 0.7489, 0.3723, 0.0350, 0.2835, 0.3314, + 0.1685, 0.1121, 0.6204, 0.8138, 0.4395, 0.6725, 0.3304, + 0.8152, 0.9086, 0.9942, 0.4917, 0.4382, 0.2414, 0.3708, + 0.4786, 0.6864, 0.9521, 0.6060, 0.5707, 0.3741, 0.7770, + 0.9669, 0.3737, 0.9687, 0.3704, 0.3850, 0.7958, 0.7808, + 0.1612, 0.9230, 0.8525, 0.6799, 0.0805, 0.9623, 0.0798, + 0.1563, 0.7621, 0.9897, 0.4134, 0.9079, 0.9743, 0.2876, + 0.0876, 0.7910, 0.9308, 0.0610, 0.2187, 0.5615, 0.3229, + 0.9375, 0.5980, 0.9792, 0.8630, 0.9130, 0.2219, 0.2254, + 0.9540, 0.2937, 0.2448, 0.2621, 0.5306, 0.1461, 0.7337, + 0.4278, 0.4789, 0.9525, 0.8487, 0.1678, 0.8083, 0.8602, + 0.5427, 0.9150, 0.6338, 0.0596, 0.3932, 0.3519, 0.0682, + 0.2049, 0.6992, 0.1087, 0.4717, 0.7743, 0.1069, 0.9390, + 0.7215, 0.0549, 0.2173, 0.5676, 0.3363, 0.3647, 0.6589, + 0.8482, 0.0780, 0.4024, 0.0875, 0.0825, 0.2565, 0.7274, + 0.2689, 0.2341, 0.8282, 0.6484, 0.5740, 0.2458, 0.3418, + 0.6679, 0.0928, 0.8043, 0.0375, 0.0455, 0.1021, 0.0727, + 0.6034, 0.7073, 0.9501, 0.1827, 0.6546, 0.0622, 0.8697, + 0.6037, 0.6145, 0.7007, 0.9749, 0.2250, 0.9919, 0.6048, + 0.0930, 0.2305, 0.6915, 0.5382, 0.1474, 0.1571, 0.4897, + 0.4500, 0.3177, 0.8426, 0.5450, 0.6520, 0.4501, 0.7247, + 0.0507, 0.2850, 0.0212, 0.9359, 0.6125, 0.4131, 0.8209, + 0.5987, 0.8486, 0.6476, 0.7153, 0.8008, 0.6473, 0.9929, + 0.7390, 0.7887, 0.4013, 0.3755, 0.5104, 0.8562, 0.2127, + 0.2174, 0.7440, 0.3993, 0.8984, 0.4550, 0.9304, 0.9089, + 0.9769, 0.3961, 0.4588, 0.7478, 0.4077, 0.7220, 0.1553, + 0.0217, 0.3009, 0.6585, 0.6058, 0.6903, 0.5106, 0.3638, + 0.8147, 0.1664, 0.2008, 0.2597, 0.0618, 0.8442, 0.8484, + 0.5244, 0.1691, 0.8820, 0.2008, 0.5312, 0.1231, 0.2789, + 0.2030, 0.9130, 0.3220, 0.9544, 0.0942, 0.3181, 0.3072, + 0.8830, 0.5897, 0.4002, 0.4521, 0.5237, 0.1548, 0.2054, + 0.1146, 0.0603, 0.1840, 0.5799, 0.0520, 0.7941, 0.5931, + 0.8282, 0.2801, 0.8827, 0.2904, 0.6095, 0.1708, 0.2367, + 0.4340, 0.5936, 0.4575, 0.2879, 0.8225, 0.0562, 0.7618, + 0.0424, 0.3732, 0.7233, 0.7772, 0.4983, 0.8623, 0.4285, + 0.4221, 0.1088, 0.3304, 0.1568, 0.5143, 0.6215, 0.5503, + 0.7690, 0.3756, 0.5348, 0.7357, 0.8109, 0.3130, 0.0158, + 0.0023, 0.6162, 0.8535, 0.2750, 0.7217, 0.5469, 0.3265, + 0.1222, 0.4043, 0.3551, 0.1335, 0.2431, 0.0804, 0.8806, + 0.9161, 0.7825, 0.7689, 0.7354, 0.8643, 0.4190, 0.5343, + 0.3840, 0.7807, 0.6542, 0.2807, 0.0621, 0.5952, 0.8649, + 0.8056, 0.2886, 0.8492, 0.0497, 0.6903, 0.0986, 0.3481, + 0.9830, 0.1513, 0.7150, 0.5003, 0.6462, 0.3091, 0.5104, + 0.1090, 0.4875, 0.2487, 0.1973, 0.1005, 0.5965, 0.6503, + 0.6776, 0.1194, 0.6968, 0.4165, 0.3997, 0.2356, 0.9948, + 0.2469, 0.5798, 0.3444, 0.2015, 0.9011, 0.6742, 0.3975, + 0.8612, 0.7831, 0.0862, 0.4379, 0.0809, 0.7931, 0.0607, + 0.3830, 0.8447, 0.6462, 0.9840, 0.5433, 0.4101, 0.8260, + 0.5601, 0.6978, 0.1804, 0.4108, 0.0158, 0.7436, 0.8390, + 0.9475, 0.4933, 0.1487, 0.4644, 0.5877, 0.7798, 0.6315, + 0.5300, 0.7588, 0.5234, 0.0393, 0.3814, 0.5279, 0.6528, + 0.9869, 0.9540, 0.6270, 0.7013, 0.4305, 0.2085, 0.9008, + 0.4866, 0.4271, 0.1271, 0.3216, 0.3653, 0.9917, 0.2290, + 0.3330, 0.6020, 0.4543, 0.6559, 0.8582, 0.2937, 0.1913, + 0.3080, 0.3895, 0.3059, 0.0633, 0.6122, 0.1175, 0.6572, + 0.2524, 0.7704, 0.8210, 0.4794, 0.7538, 0.4031, 0.0275, + 0.5311, 0.0815, 0.4034, 0.6148, 0.1185, 0.7439, 0.4016, + 0.5608, 0.0706, 0.4835, 0.2505, 0.9363, 0.3741, 0.3124, + 0.4634, 0.9844, 0.4189, 0.3458, 0.6504, 0.0883, 0.9154, + 0.1987, 0.8102, 0.2403, 0.6352, 0.9134, 0.0740, 0.6217, + 0.7461, 0.9307, 0.2340, 0.9526, 0.2394, 0.1661, 0.5903, + 0.3531, 0.3579, 0.3335, 0.4941, 0.0937, 0.0190, 0.7400, + 0.6048, 0.0736, 0.8480, 0.7056, 0.9624, 0.8952, 0.2590, + 0.2918, 0.9369, 0.3316, 0.8127, 0.4530, 0.8583, 0.8824, + 0.6823, 0.0554, 0.6007, 0.8647, 0.0307, 0.2993, 0.2476, + 0.7318, 0.8917, 0.9643, 0.6157, 0.2184, 0.8408, 0.3345, + 0.0712, 0.8159, 0.2459, 0.0991, 0.7444, 0.2222, 0.0014, + 0.1305, 0.8914, 0.0089, 0.5321, 0.7917, 0.7163, 0.9580, + 0.3624, 0.0142, 0.8937, 0.5115, 0.5049, 0.8434, 0.7234, + 0.7161, 0.2634, 0.8592, 0.3961, 0.5586, 0.2620, 0.0375, + 0.1665, 0.2915, 0.9139, 0.7009, 0.5095, 0.4519, 0.1213, + 0.3561, 0.0066, 0.4379, 0.3522, 0.6225, 0.6900, 0.8216, + 0.8841, 0.6553, 0.8193, 0.7688, 0.5104, 0.3926, 0.7388, + 0.4735, 0.1897, 0.7788, 0.8825, 0.9103, 0.2988, 0.1239, + 0.1792, 0.1266, 0.4818, 0.8893, 0.6604, 0.1883, 0.9700, + 0.5469, 0.0958, 0.2762, 0.2054, 0.3215, 0.7664]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5980, 0.9421, 0.9493, ..., 0.6518, 0.5202, 0.4457]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.272708654403687 seconds + +[18.39, 17.85, 18.29, 17.9, 18.11, 17.8, 18.16, 22.08, 18.79, 18.14] +[46.54] +14.219237565994263 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 223318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.272708654403687, 'TIME_S_1KI': 0.046000361163917314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.7633163213729, 'W': 46.54} +[18.39, 17.85, 18.29, 17.9, 18.11, 17.8, 18.16, 22.08, 18.79, 18.14, 20.06, 18.33, 18.25, 21.91, 18.14, 18.0, 18.29, 18.13, 17.88, 18.08] +335.245 +16.76225 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 223318, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.272708654403687, 'TIME_S_1KI': 0.046000361163917314, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 661.7633163213729, 'W': 46.54, 'J_1KI': 2.963322778823798, 'W_1KI': 0.2084023679237679, 'W_D': 29.777749999999997, 'J_D': 423.41690143078563, 'W_D_1KI': 0.13334236380408207, 'J_D_1KI': 0.0005970963549919042} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..d34e261 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 115566, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.362370014190674, "TIME_S_1KI": 0.08966625144238508, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 657.2445391845703, "W": 46.58, "J_1KI": 5.687179094063741, "W_1KI": 0.40305972344807295, "W_D": 30.2825, "J_D": 427.28655555725095, "W_D_1KI": 0.2620364120935223, "J_D_1KI": 0.002267417857272228} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..30badbd --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.024602413177490234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 4997, 4999, 5000]), + col_indices=tensor([5115, 9337, 5262, ..., 1244, 4227, 2124]), + values=tensor([0.7036, 0.8839, 0.8989, ..., 0.3409, 0.8377, 0.4572]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.4155, 0.9580, 0.1653, ..., 0.8843, 0.0512, 0.6581]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.024602413177490234 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '42678', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.8775720596313477} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4999, 5000]), + col_indices=tensor([ 717, 6679, 2344, ..., 3928, 4219, 6236]), + values=tensor([0.9595, 0.5891, 0.3421, ..., 0.3760, 0.2961, 0.2336]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.6496, 0.5288, 0.3835, ..., 0.2038, 0.3313, 0.3083]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 3.8775720596313477 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '115566', '-ss', '10000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.362370014190674} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([6593, 2332, 3653, ..., 6447, 6839, 4175]), + values=tensor([0.4277, 0.1691, 0.2657, ..., 0.2731, 0.4419, 0.1553]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.3155, 0.6085, 0.7514, ..., 0.0185, 0.1956, 0.3828]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.362370014190674 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([6593, 2332, 3653, ..., 6447, 6839, 4175]), + values=tensor([0.4277, 0.1691, 0.2657, ..., 0.2731, 0.4419, 0.1553]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.3155, 0.6085, 0.7514, ..., 0.0185, 0.1956, 0.3828]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.362370014190674 seconds + +[18.44, 17.79, 18.13, 18.16, 18.19, 17.86, 18.01, 17.82, 18.05, 18.06] +[46.58] +14.110015869140625 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 115566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.362370014190674, 'TIME_S_1KI': 0.08966625144238508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 657.2445391845703, 'W': 46.58} +[18.44, 17.79, 18.13, 18.16, 18.19, 17.86, 18.01, 17.82, 18.05, 18.06, 18.35, 17.92, 18.08, 18.77, 17.97, 18.23, 18.59, 17.91, 18.14, 17.81] +325.95 +16.2975 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 115566, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.362370014190674, 'TIME_S_1KI': 0.08966625144238508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 657.2445391845703, 'W': 46.58, 'J_1KI': 5.687179094063741, 'W_1KI': 0.40305972344807295, 'W_D': 30.2825, 'J_D': 427.28655555725095, 'W_D_1KI': 0.2620364120935223, 'J_D_1KI': 0.002267417857272228} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..5b0e976 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.447505474090576, "TIME_S_1KI": 154.47505474090576, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2621.8117760252953, "W": 44.61, "J_1KI": 26218.117760252953, "W_1KI": 446.1, "W_D": 28.050250000000002, "J_D": 1648.56480095166, "W_D_1KI": 280.50250000000005, "J_D_1KI": 2805.0250000000005} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..55f3dfa --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,47 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.447505474090576} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 102, ..., 24999899, + 24999940, 25000000]), + col_indices=tensor([ 5577, 28835, 47310, ..., 481805, 486701, + 494412]), + values=tensor([0.4828, 0.8396, 0.7554, ..., 0.0896, 0.7495, 0.8303]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9266, 0.8773, 0.4193, ..., 0.0231, 0.6267, 0.1934]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 15.447505474090576 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 102, ..., 24999899, + 24999940, 25000000]), + col_indices=tensor([ 5577, 28835, 47310, ..., 481805, 486701, + 494412]), + values=tensor([0.4828, 0.8396, 0.7554, ..., 0.0896, 0.7495, 0.8303]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9266, 0.8773, 0.4193, ..., 0.0231, 0.6267, 0.1934]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 15.447505474090576 seconds + +[18.81, 17.9, 18.4, 17.9, 18.2, 17.84, 18.01, 22.17, 18.54, 18.17] +[44.61] +58.77183985710144 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.447505474090576, 'TIME_S_1KI': 154.47505474090576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.8117760252953, 'W': 44.61} +[18.81, 17.9, 18.4, 17.9, 18.2, 17.84, 18.01, 22.17, 18.54, 18.17, 18.44, 18.17, 17.99, 18.08, 18.12, 18.6, 18.11, 18.22, 18.18, 18.11] +331.19499999999994 +16.559749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.447505474090576, 'TIME_S_1KI': 154.47505474090576, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2621.8117760252953, 'W': 44.61, 'J_1KI': 26218.117760252953, 'W_1KI': 446.1, 'W_D': 28.050250000000002, 'J_D': 1648.56480095166, 'W_D_1KI': 280.50250000000005, 'J_D_1KI': 2805.0250000000005} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..8a43ad7 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 751, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.154199123382568, "TIME_S_1KI": 13.520904292120598, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 705.406801700592, "W": 48.84, "J_1KI": 939.290015580016, "W_1KI": 65.03328894806924, "W_D": 32.38000000000001, "J_D": 467.67142176628124, "W_D_1KI": 43.115845539280976, "J_D_1KI": 57.41124572474165} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..9480055 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3980276584625244} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 2499987, + 2499996, 2500000]), + col_indices=tensor([ 56026, 195485, 327540, ..., 74351, 467081, + 495492]), + values=tensor([0.8691, 0.8234, 0.1160, ..., 0.0380, 0.6115, 0.8262]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8652, 0.4148, 0.1413, ..., 0.7873, 0.1950, 0.8001]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 1.3980276584625244 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '751', '-ss', '500000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.154199123382568} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 11, ..., 2499987, + 2499993, 2500000]), + col_indices=tensor([159259, 352180, 455738, ..., 361655, 368506, + 421546]), + values=tensor([0.7015, 0.3878, 0.3559, ..., 0.2417, 0.3895, 0.7278]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4879, 0.7909, 0.7587, ..., 0.1983, 0.9582, 0.5253]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.154199123382568 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 11, ..., 2499987, + 2499993, 2500000]), + col_indices=tensor([159259, 352180, 455738, ..., 361655, 368506, + 421546]), + values=tensor([0.7015, 0.3878, 0.3559, ..., 0.2417, 0.3895, 0.7278]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4879, 0.7909, 0.7587, ..., 0.1983, 0.9582, 0.5253]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.154199123382568 seconds + +[18.79, 17.69, 18.39, 21.58, 18.07, 18.02, 18.17, 17.87, 17.87, 18.03] +[48.84] +14.44321870803833 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.154199123382568, 'TIME_S_1KI': 13.520904292120598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 705.406801700592, 'W': 48.84} +[18.79, 17.69, 18.39, 21.58, 18.07, 18.02, 18.17, 17.87, 17.87, 18.03, 18.42, 17.82, 17.95, 18.61, 17.96, 18.22, 17.97, 18.42, 18.04, 17.86] +329.19999999999993 +16.459999999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 751, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.154199123382568, 'TIME_S_1KI': 13.520904292120598, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 705.406801700592, 'W': 48.84, 'J_1KI': 939.290015580016, 'W_1KI': 65.03328894806924, 'W_D': 32.38000000000001, 'J_D': 467.67142176628124, 'W_D_1KI': 43.115845539280976, 'J_D_1KI': 57.41124572474165} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..e9ce521 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 147, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.563251495361328, "TIME_S_1KI": 71.85885370994102, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 973.9529043245316, "W": 47.49, "J_1KI": 6625.529961391371, "W_1KI": 323.0612244897959, "W_D": 30.996250000000003, "J_D": 635.6893600898982, "W_D_1KI": 210.858843537415, "J_D_1KI": 1434.4139016150682} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..b78ba4c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.09878396987915} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 30, 58, ..., 12499941, + 12499962, 12500000]), + col_indices=tensor([ 1470, 2567, 5271, ..., 471166, 490246, + 499700]), + values=tensor([0.1668, 0.7788, 0.4321, ..., 0.7966, 0.9450, 0.5105]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9128, 0.0898, 0.7303, ..., 0.7724, 0.8343, 0.8680]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 7.09878396987915 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '147', '-ss', '500000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.563251495361328} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 16, 39, ..., 12499961, + 12499982, 12500000]), + col_indices=tensor([ 42415, 50722, 59820, ..., 419133, 436999, + 480407]), + values=tensor([0.4848, 0.7890, 0.2846, ..., 0.4428, 0.7066, 0.1150]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.7955, 0.8202, 0.0668, ..., 0.2866, 0.4586, 0.4680]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.563251495361328 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 16, 39, ..., 12499961, + 12499982, 12500000]), + col_indices=tensor([ 42415, 50722, 59820, ..., 419133, 436999, + 480407]), + values=tensor([0.4848, 0.7890, 0.2846, ..., 0.4428, 0.7066, 0.1150]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.7955, 0.8202, 0.0668, ..., 0.2866, 0.4586, 0.4680]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.563251495361328 seconds + +[18.64, 21.68, 17.93, 17.89, 17.96, 18.08, 18.05, 17.94, 17.92, 17.72] +[47.49] +20.508589267730713 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.563251495361328, 'TIME_S_1KI': 71.85885370994102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.9529043245316, 'W': 47.49} +[18.64, 21.68, 17.93, 17.89, 17.96, 18.08, 18.05, 17.94, 17.92, 17.72, 18.36, 17.99, 18.51, 17.9, 18.05, 18.98, 18.56, 18.01, 18.0, 18.13] +329.875 +16.49375 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.563251495361328, 'TIME_S_1KI': 71.85885370994102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.9529043245316, 'W': 47.49, 'J_1KI': 6625.529961391371, 'W_1KI': 323.0612244897959, 'W_D': 30.996250000000003, 'J_D': 635.6893600898982, 'W_D_1KI': 210.858843537415, 'J_D_1KI': 1434.4139016150682} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..4d243f2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 9147, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.50059986114502, "TIME_S_1KI": 1.1479829300475588, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 681.5257016324997, "W": 47.63, "J_1KI": 74.50811212774677, "W_1KI": 5.207171750300645, "W_D": 31.135, "J_D": 445.50289146184923, "W_D_1KI": 3.4038482562588825, "J_D_1KI": 0.37212728285327235} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..2f48a00 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.13143563270568848} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 12, ..., 249990, 249995, + 250000]), + col_indices=tensor([33764, 781, 3609, ..., 16676, 21435, 31146]), + values=tensor([0.7781, 0.6572, 0.9120, ..., 0.8330, 0.7571, 0.7121]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7934, 0.6307, 0.2590, ..., 0.3547, 0.1547, 0.8460]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.13143563270568848 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7988', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 9.168656826019287} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 18, ..., 249992, 249995, + 250000]), + col_indices=tensor([ 6728, 8437, 8523, ..., 40465, 44043, 46138]), + values=tensor([0.4640, 0.7108, 0.7346, ..., 0.1761, 0.2770, 0.7056]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0571, 0.2747, 0.4590, ..., 0.7273, 0.2570, 0.3128]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 9.168656826019287 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9147', '-ss', '50000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.50059986114502} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 249987, 249996, + 250000]), + col_indices=tensor([ 4811, 5188, 33226, ..., 17568, 20020, 26384]), + values=tensor([0.5580, 0.6578, 0.5141, ..., 0.8482, 0.1339, 0.2046]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2505, 0.9140, 0.1873, ..., 0.2385, 0.4644, 0.1302]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.50059986114502 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 249987, 249996, + 250000]), + col_indices=tensor([ 4811, 5188, 33226, ..., 17568, 20020, 26384]), + values=tensor([0.5580, 0.6578, 0.5141, ..., 0.8482, 0.1339, 0.2046]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2505, 0.9140, 0.1873, ..., 0.2385, 0.4644, 0.1302]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.50059986114502 seconds + +[18.33, 18.14, 21.59, 19.0, 18.11, 18.16, 17.98, 18.16, 18.03, 18.31] +[47.63] +14.308748722076416 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.50059986114502, 'TIME_S_1KI': 1.1479829300475588, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 681.5257016324997, 'W': 47.63} +[18.33, 18.14, 21.59, 19.0, 18.11, 18.16, 17.98, 18.16, 18.03, 18.31, 18.43, 17.74, 17.82, 18.07, 18.59, 18.18, 17.93, 18.01, 17.93, 17.85] +329.90000000000003 +16.495 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 9147, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.50059986114502, 'TIME_S_1KI': 1.1479829300475588, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 681.5257016324997, 'W': 47.63, 'J_1KI': 74.50811212774677, 'W_1KI': 5.207171750300645, 'W_D': 31.135, 'J_D': 445.50289146184923, 'W_D_1KI': 3.4038482562588825, 'J_D_1KI': 0.37212728285327235} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..db8a72e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 1997, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.44909930229187, "TIME_S_1KI": 5.232398248518714, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 733.6282246422767, "W": 48.73, "J_1KI": 367.3651600612302, "W_1KI": 24.40160240360541, "W_D": 32.179249999999996, "J_D": 484.4573373244404, "W_D_1KI": 16.113795693540307, "J_D_1KI": 8.069001348793345} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..a863f3a --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.5546464920043945} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 117, ..., 2499903, + 2499953, 2500000]), + col_indices=tensor([ 566, 1603, 2858, ..., 47622, 48780, 49985]), + values=tensor([0.9915, 0.7849, 0.7900, ..., 0.9170, 0.4625, 0.4875]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6866, 0.0421, 0.1446, ..., 0.6566, 0.6603, 0.7026]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.5546464920043945 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1893', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.948294878005981} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 33, 68, ..., 2499891, + 2499938, 2500000]), + col_indices=tensor([ 3534, 3824, 4376, ..., 49368, 49484, 49571]), + values=tensor([0.2824, 0.5783, 0.2215, ..., 0.8826, 0.1249, 0.2741]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5004, 0.8496, 0.6985, ..., 0.2602, 0.0299, 0.5346]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 9.948294878005981 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1997', '-ss', '50000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.44909930229187} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499904, + 2499948, 2500000]), + col_indices=tensor([ 1519, 4331, 6515, ..., 42103, 42230, 49135]), + values=tensor([0.6099, 0.9393, 0.8647, ..., 0.8575, 0.6331, 0.4704]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1131, 0.9150, 0.5556, ..., 0.6033, 0.7715, 0.6124]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.44909930229187 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 107, ..., 2499904, + 2499948, 2500000]), + col_indices=tensor([ 1519, 4331, 6515, ..., 42103, 42230, 49135]), + values=tensor([0.6099, 0.9393, 0.8647, ..., 0.8575, 0.6331, 0.4704]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1131, 0.9150, 0.5556, ..., 0.6033, 0.7715, 0.6124]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.44909930229187 seconds + +[18.64, 18.18, 18.44, 22.12, 18.9, 18.07, 17.86, 17.86, 18.53, 18.0] +[48.73] +15.054960489273071 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.44909930229187, 'TIME_S_1KI': 5.232398248518714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.6282246422767, 'W': 48.73} +[18.64, 18.18, 18.44, 22.12, 18.9, 18.07, 17.86, 17.86, 18.53, 18.0, 18.34, 18.09, 18.08, 17.94, 18.14, 18.44, 18.04, 17.91, 17.9, 18.05] +331.015 +16.55075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 1997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.44909930229187, 'TIME_S_1KI': 5.232398248518714, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.6282246422767, 'W': 48.73, 'J_1KI': 367.3651600612302, 'W_1KI': 24.40160240360541, 'W_D': 32.179249999999996, 'J_D': 484.4573373244404, 'W_D_1KI': 16.113795693540307, 'J_D_1KI': 8.069001348793345} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..981b4ab --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 134, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.514585256576538, "TIME_S_1KI": 78.46705415355626, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2494.187549471855, "W": 43.5, "J_1KI": 18613.339921431754, "W_1KI": 324.6268656716418, "W_D": 26.962, "J_D": 1545.9375795140265, "W_D_1KI": 201.2089552238806, "J_D_1KI": 1501.5593673423925} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..7744b5c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.827495813369751} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 509, 1029, ..., 24999021, + 24999525, 25000000]), + col_indices=tensor([ 77, 168, 174, ..., 49716, 49743, 49917]), + values=tensor([0.0871, 0.3865, 0.3717, ..., 0.4376, 0.0483, 0.0994]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1988, 0.8388, 0.2584, ..., 0.5965, 0.5005, 0.7795]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 7.827495813369751 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '134', '-ss', '50000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.514585256576538} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 525, 1042, ..., 24999041, + 24999512, 25000000]), + col_indices=tensor([ 163, 320, 387, ..., 49821, 49828, 49920]), + values=tensor([0.8765, 0.3303, 0.5777, ..., 0.2129, 0.9852, 0.0873]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2121, 0.5034, 0.7106, ..., 0.6677, 0.7232, 0.0645]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.514585256576538 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 525, 1042, ..., 24999041, + 24999512, 25000000]), + col_indices=tensor([ 163, 320, 387, ..., 49821, 49828, 49920]), + values=tensor([0.8765, 0.3303, 0.5777, ..., 0.2129, 0.9852, 0.0873]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2121, 0.5034, 0.7106, ..., 0.6677, 0.7232, 0.0645]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.514585256576538 seconds + +[18.5, 18.14, 19.17, 17.93, 18.04, 17.95, 18.11, 18.93, 18.07, 18.37] +[43.5] +57.337644815444946 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.514585256576538, 'TIME_S_1KI': 78.46705415355626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2494.187549471855, 'W': 43.5} +[18.5, 18.14, 19.17, 17.93, 18.04, 17.95, 18.11, 18.93, 18.07, 18.37, 22.44, 18.33, 18.23, 18.16, 18.38, 17.89, 18.61, 17.96, 18.23, 17.95] +330.76 +16.538 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 134, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.514585256576538, 'TIME_S_1KI': 78.46705415355626, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2494.187549471855, 'W': 43.5, 'J_1KI': 18613.339921431754, 'W_1KI': 324.6268656716418, 'W_D': 26.962, 'J_D': 1545.9375795140265, 'W_D_1KI': 201.2089552238806, 'J_D_1KI': 1501.5593673423925} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..01c9d2b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 22242, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.517975091934204, "TIME_S_1KI": 0.472888008809199, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 668.1220734119415, "W": 47.0, "J_1KI": 30.03875880819807, "W_1KI": 2.113119323801816, "W_D": 30.704, "J_D": 436.4685136604309, "W_D_1KI": 1.3804513982555526, "J_D_1KI": 0.062065075004745646} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..f23093c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06578350067138672} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([16918, 1143, 1724, ..., 48553, 41363, 39308]), + values=tensor([0.9238, 0.1195, 0.2813, ..., 0.9276, 0.6113, 0.0798]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0373, 0.8428, 0.6841, ..., 0.3333, 0.7324, 0.6824]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.06578350067138672 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15961', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.534844875335693} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([29190, 43986, 25006, ..., 44362, 15421, 8070]), + values=tensor([0.3395, 0.8970, 0.1159, ..., 0.8275, 0.4942, 0.3559]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7532, 0.7737, 0.3401, ..., 0.4031, 0.1788, 0.5939]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 7.534844875335693 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22242', '-ss', '50000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.517975091934204} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([22981, 23025, 47875, ..., 28752, 43497, 8642]), + values=tensor([0.4359, 0.4232, 0.6349, ..., 0.3036, 0.5759, 0.5327]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8882, 0.3621, 0.9798, ..., 0.1733, 0.2748, 0.2728]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.517975091934204 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([22981, 23025, 47875, ..., 28752, 43497, 8642]), + values=tensor([0.4359, 0.4232, 0.6349, ..., 0.3036, 0.5759, 0.5327]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8882, 0.3621, 0.9798, ..., 0.1733, 0.2748, 0.2728]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.517975091934204 seconds + +[18.31, 17.89, 18.04, 17.91, 18.19, 18.28, 17.95, 17.98, 18.92, 18.1] +[47.0] +14.215363264083862 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.517975091934204, 'TIME_S_1KI': 0.472888008809199, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 668.1220734119415, 'W': 47.0} +[18.31, 17.89, 18.04, 17.91, 18.19, 18.28, 17.95, 17.98, 18.92, 18.1, 18.24, 18.64, 18.01, 17.83, 18.02, 18.06, 18.04, 17.88, 17.99, 17.93] +325.92 +16.296 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 22242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.517975091934204, 'TIME_S_1KI': 0.472888008809199, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 668.1220734119415, 'W': 47.0, 'J_1KI': 30.03875880819807, 'W_1KI': 2.113119323801816, 'W_D': 30.704, 'J_D': 436.4685136604309, 'W_D_1KI': 1.3804513982555526, 'J_D_1KI': 0.062065075004745646} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..7de68b9 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 11256, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481152534484863, "TIME_S_1KI": 0.9311613836607022, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 671.3460792446136, "W": 47.13, "J_1KI": 59.64339723210853, "W_1KI": 4.187100213219616, "W_D": 30.78675, "J_D": 438.54368566060066, "W_D_1KI": 2.735141257995736, "J_D_1KI": 0.24299407053977753} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..d1f10c6 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.1096796989440918} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 124996, 124997, + 125000]), + col_indices=tensor([38708, 28625, 11454, ..., 884, 22723, 30800]), + values=tensor([0.0038, 0.3289, 0.1581, ..., 0.9719, 0.8303, 0.9998]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.0207, 0.8237, 0.8176, ..., 0.3561, 0.4550, 0.3366]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.1096796989440918 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9573', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 8.929835081100464} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 4, ..., 124997, 124998, + 125000]), + col_indices=tensor([ 5176, 42593, 37500, ..., 3219, 12793, 38658]), + values=tensor([0.6803, 0.7664, 0.9859, ..., 0.5422, 0.9603, 0.0980]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.2299, 0.8541, 0.6200, ..., 0.4981, 0.6521, 0.8502]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 8.929835081100464 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '11256', '-ss', '50000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.481152534484863} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 124996, 124999, + 125000]), + col_indices=tensor([ 3517, 32781, 39284, ..., 16837, 28625, 12663]), + values=tensor([0.4051, 0.3118, 0.0683, ..., 0.4752, 0.1421, 0.6822]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.2287, 0.1097, 0.9835, ..., 0.8729, 0.0701, 0.5217]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.481152534484863 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 124996, 124999, + 125000]), + col_indices=tensor([ 3517, 32781, 39284, ..., 16837, 28625, 12663]), + values=tensor([0.4051, 0.3118, 0.0683, ..., 0.4752, 0.1421, 0.6822]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.2287, 0.1097, 0.9835, ..., 0.8729, 0.0701, 0.5217]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.481152534484863 seconds + +[18.46, 17.73, 18.38, 18.1, 18.29, 18.18, 18.04, 18.01, 18.16, 17.9] +[47.13] +14.244559288024902 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481152534484863, 'TIME_S_1KI': 0.9311613836607022, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 671.3460792446136, 'W': 47.13} +[18.46, 17.73, 18.38, 18.1, 18.29, 18.18, 18.04, 18.01, 18.16, 17.9, 18.94, 17.91, 17.96, 17.99, 18.41, 18.07, 18.2, 17.95, 18.46, 18.75] +326.865 +16.34325 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 11256, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.481152534484863, 'TIME_S_1KI': 0.9311613836607022, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 671.3460792446136, 'W': 47.13, 'J_1KI': 59.64339723210853, 'W_1KI': 4.187100213219616, 'W_D': 30.78675, 'J_D': 438.54368566060066, 'W_D_1KI': 2.735141257995736, 'J_D_1KI': 0.24299407053977753} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..b6b292e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 220904, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.611992359161377, "TIME_S_1KI": 0.04803893256419701, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 660.6772035217285, "W": 46.78, "J_1KI": 2.990788774860249, "W_1KI": 0.211766197081085, "W_D": 30.2935, "J_D": 427.83721387100223, "W_D_1KI": 0.13713423025386592, "J_D_1KI": 0.0006207865419090009} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..bcbbe28 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.021222829818725586} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2499, 2499, 2500]), + col_indices=tensor([2865, 4172, 3505, ..., 1471, 1829, 2284]), + values=tensor([0.9472, 0.7106, 0.6508, ..., 0.6327, 0.4564, 0.0632]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.0811, 0.3767, 0.7595, ..., 0.7571, 0.6856, 0.3676]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.021222829818725586 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49475', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.351640462875366} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([1365, 3411, 751, ..., 1715, 4182, 3544]), + values=tensor([0.2168, 0.4073, 0.8209, ..., 0.1504, 0.5765, 0.8829]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.3766, 0.6993, 0.2098, ..., 0.7754, 0.7068, 0.6832]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 2.351640462875366 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '220904', '-ss', '5000', '-sd', '0.0001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.611992359161377} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([ 469, 3066, 4238, ..., 2570, 4418, 4413]), + values=tensor([0.6778, 0.7938, 0.7053, ..., 0.2703, 0.2957, 0.5133]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1744, 0.7493, 0.4982, ..., 0.1073, 0.6650, 0.9357]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.611992359161377 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([ 469, 3066, 4238, ..., 2570, 4418, 4413]), + values=tensor([0.6778, 0.7938, 0.7053, ..., 0.2703, 0.2957, 0.5133]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.1744, 0.7493, 0.4982, ..., 0.1073, 0.6650, 0.9357]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.611992359161377 seconds + +[18.28, 17.78, 18.16, 18.31, 18.21, 17.89, 17.98, 18.38, 18.01, 17.96] +[46.78] +14.123069763183594 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.611992359161377, 'TIME_S_1KI': 0.04803893256419701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 660.6772035217285, 'W': 46.78} +[18.28, 17.78, 18.16, 18.31, 18.21, 17.89, 17.98, 18.38, 18.01, 17.96, 18.63, 18.15, 17.86, 18.23, 18.11, 18.07, 18.06, 17.82, 21.74, 19.07] +329.73 +16.4865 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 220904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.611992359161377, 'TIME_S_1KI': 0.04803893256419701, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 660.6772035217285, 'W': 46.78, 'J_1KI': 2.990788774860249, 'W_1KI': 0.211766197081085, 'W_D': 30.2935, 'J_D': 427.83721387100223, 'W_D_1KI': 0.13713423025386592, 'J_D_1KI': 0.0006207865419090009} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..96eeba5 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 111257, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7375807762146, "TIME_S_1KI": 0.09651150737674573, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 673.9428442955017, "W": 47.23, "J_1KI": 6.057532059065961, "W_1KI": 0.42451261493658826, "W_D": 30.839999999999996, "J_D": 440.0676967620849, "W_D_1KI": 0.2771960415973826, "J_D_1KI": 0.0024914930440096588} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..1589984 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.025800704956054688} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 11, ..., 24988, 24993, 25000]), + col_indices=tensor([ 36, 564, 3279, ..., 4511, 4767, 4922]), + values=tensor([0.2797, 0.2996, 0.9239, ..., 0.1899, 0.3417, 0.3512]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3254, 0.3602, 0.2662, ..., 0.3074, 0.8226, 0.4658]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.025800704956054688 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '40696', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.8407018184661865} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 11, ..., 24987, 24992, 25000]), + col_indices=tensor([ 298, 713, 1200, ..., 1957, 3799, 4153]), + values=tensor([0.8486, 0.9770, 0.8154, ..., 0.4467, 0.7513, 0.9966]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8767, 0.4273, 0.1763, ..., 0.9403, 0.3580, 0.5902]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 3.8407018184661865 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '111257', '-ss', '5000', '-sd', '0.001', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.7375807762146} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 24991, 24996, 25000]), + col_indices=tensor([ 575, 1907, 4405, ..., 1224, 3086, 3740]), + values=tensor([0.1597, 0.6483, 0.2533, ..., 0.7760, 0.1307, 0.6720]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0745, 0.7131, 0.6004, ..., 0.2535, 0.5073, 0.4932]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.7375807762146 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 24991, 24996, 25000]), + col_indices=tensor([ 575, 1907, 4405, ..., 1224, 3086, 3740]), + values=tensor([0.1597, 0.6483, 0.2533, ..., 0.7760, 0.1307, 0.6720]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0745, 0.7131, 0.6004, ..., 0.2535, 0.5073, 0.4932]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.7375807762146 seconds + +[18.26, 17.96, 17.92, 18.07, 18.39, 18.16, 18.05, 18.66, 19.34, 18.38] +[47.23] +14.269380569458008 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 111257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7375807762146, 'TIME_S_1KI': 0.09651150737674573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.9428442955017, 'W': 47.23} +[18.26, 17.96, 17.92, 18.07, 18.39, 18.16, 18.05, 18.66, 19.34, 18.38, 18.78, 18.12, 17.96, 17.92, 17.93, 18.7, 18.05, 17.87, 17.97, 18.04] +327.8 +16.39 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 111257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.7375807762146, 'TIME_S_1KI': 0.09651150737674573, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 673.9428442955017, 'W': 47.23, 'J_1KI': 6.057532059065961, 'W_1KI': 0.42451261493658826, 'W_D': 30.839999999999996, 'J_D': 440.0676967620849, 'W_D_1KI': 0.2771960415973826, 'J_D_1KI': 0.0024914930440096588} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..a469b78 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 21150, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43382978439331, "TIME_S_1KI": 0.4933252853141046, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 678.8009396362305, "W": 47.74, "J_1KI": 32.09460707499908, "W_1KI": 2.257210401891253, "W_D": 31.383250000000004, "J_D": 446.22914932632455, "W_D_1KI": 1.483841607565012, "J_D_1KI": 0.0701579956295514} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..c2c0006 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.06486701965332031} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 47, 88, ..., 249915, 249957, + 250000]), + col_indices=tensor([ 72, 118, 180, ..., 4779, 4849, 4984]), + values=tensor([0.8923, 0.3860, 0.0290, ..., 0.0532, 0.0516, 0.8464]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1487, 0.9450, 0.3254, ..., 0.6866, 0.3989, 0.7268]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.06486701965332031 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '16186', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.035360336303711} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 82, ..., 249908, 249962, + 250000]), + col_indices=tensor([ 36, 43, 78, ..., 4796, 4867, 4932]), + values=tensor([0.3758, 0.9832, 0.1983, ..., 0.0743, 0.8633, 0.0592]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5985, 0.2492, 0.1240, ..., 0.8930, 0.7764, 0.3200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 8.035360336303711 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '21150', '-ss', '5000', '-sd', '0.01', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.43382978439331} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 47, 95, ..., 249894, 249948, + 250000]), + col_indices=tensor([ 129, 143, 228, ..., 4613, 4768, 4965]), + values=tensor([0.2601, 0.0327, 0.3118, ..., 0.8257, 0.2689, 0.3965]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3768, 0.4725, 0.8369, ..., 0.3357, 0.4139, 0.5546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.43382978439331 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 47, 95, ..., 249894, 249948, + 250000]), + col_indices=tensor([ 129, 143, 228, ..., 4613, 4768, 4965]), + values=tensor([0.2601, 0.0327, 0.3118, ..., 0.8257, 0.2689, 0.3965]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3768, 0.4725, 0.8369, ..., 0.3357, 0.4139, 0.5546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.43382978439331 seconds + +[18.56, 17.95, 18.07, 18.36, 18.1, 18.2, 18.17, 18.16, 18.01, 17.83] +[47.74] +14.218704223632812 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21150, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43382978439331, 'TIME_S_1KI': 0.4933252853141046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 678.8009396362305, 'W': 47.74} +[18.56, 17.95, 18.07, 18.36, 18.1, 18.2, 18.17, 18.16, 18.01, 17.83, 19.27, 18.21, 17.98, 17.93, 18.07, 18.27, 18.29, 17.78, 18.3, 18.91] +327.135 +16.356749999999998 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 21150, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.43382978439331, 'TIME_S_1KI': 0.4933252853141046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 678.8009396362305, 'W': 47.74, 'J_1KI': 32.09460707499908, 'W_1KI': 2.257210401891253, 'W_D': 31.383250000000004, 'J_D': 446.22914932632455, 'W_D_1KI': 1.483841607565012, 'J_D_1KI': 0.0701579956295514} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..c9a64dd --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 4516, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.427689790725708, "TIME_S_1KI": 2.309054426644311, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 694.9637099742889, "W": 47.9, "J_1KI": 153.88921832911623, "W_1KI": 10.606731620903455, "W_D": 31.338749999999997, "J_D": 454.682546262145, "W_D_1KI": 6.939492914083259, "J_D_1KI": 1.5366459065729094} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..ce3acc5 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.2486708164215088} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 247, 481, ..., 1249493, + 1249753, 1250000]), + col_indices=tensor([ 6, 31, 64, ..., 4955, 4959, 4978]), + values=tensor([0.8259, 0.7056, 0.5562, ..., 0.4513, 0.5248, 0.2272]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7144, 0.3655, 0.7208, ..., 0.9456, 0.6678, 0.5049]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.2486708164215088 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4222', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.815936088562012} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 269, 519, ..., 1249537, + 1249769, 1250000]), + col_indices=tensor([ 42, 49, 74, ..., 4955, 4973, 4990]), + values=tensor([0.0021, 0.9721, 0.1598, ..., 0.6170, 0.8086, 0.1248]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.9473, 0.7157, 0.7204, ..., 0.2702, 0.4361, 0.8753]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.815936088562012 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '4516', '-ss', '5000', '-sd', '0.05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.427689790725708} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 262, 521, ..., 1249547, + 1249780, 1250000]), + col_indices=tensor([ 5, 16, 32, ..., 4965, 4966, 4994]), + values=tensor([0.6294, 0.1213, 0.8577, ..., 0.8057, 0.3565, 0.7731]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.5561, 0.2020, 0.8277, ..., 0.0800, 0.4571, 0.4718]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.427689790725708 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 262, 521, ..., 1249547, + 1249780, 1250000]), + col_indices=tensor([ 5, 16, 32, ..., 4965, 4966, 4994]), + values=tensor([0.6294, 0.1213, 0.8577, ..., 0.8057, 0.3565, 0.7731]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.5561, 0.2020, 0.8277, ..., 0.0800, 0.4571, 0.4718]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.427689790725708 seconds + +[18.38, 21.41, 18.53, 17.98, 18.13, 17.94, 18.18, 18.19, 18.64, 17.87] +[47.9] +14.508636951446533 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.427689790725708, 'TIME_S_1KI': 2.309054426644311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.9637099742889, 'W': 47.9} +[18.38, 21.41, 18.53, 17.98, 18.13, 17.94, 18.18, 18.19, 18.64, 17.87, 18.41, 18.3, 18.02, 18.19, 18.18, 18.28, 18.35, 17.94, 18.16, 18.95] +331.225 +16.56125 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 4516, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.427689790725708, 'TIME_S_1KI': 2.309054426644311, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 694.9637099742889, 'W': 47.9, 'J_1KI': 153.88921832911623, 'W_1KI': 10.606731620903455, 'W_D': 31.338749999999997, 'J_D': 454.682546262145, 'W_D_1KI': 6.939492914083259, 'J_D_1KI': 1.5366459065729094} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..6d226de --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 2077, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.487555503845215, "TIME_S_1KI": 5.04937674715706, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 724.931280415058, "W": 48.23, "J_1KI": 349.0280599013279, "W_1KI": 23.220991815117955, "W_D": 31.779249999999998, "J_D": 477.6647811140418, "W_D_1KI": 15.300553683196917, "J_D_1KI": 7.366660415597938} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..ec991c2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.53643798828125} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 485, 953, ..., 2499027, + 2499524, 2500000]), + col_indices=tensor([ 4, 6, 18, ..., 4982, 4984, 4999]), + values=tensor([0.6950, 0.7335, 0.8547, ..., 0.7303, 0.2740, 0.2643]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4725, 0.9871, 0.6689, ..., 0.5705, 0.1526, 0.5563]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.53643798828125 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1957', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.889901638031006} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 453, 921, ..., 2498978, + 2499478, 2500000]), + col_indices=tensor([ 4, 20, 34, ..., 4974, 4986, 4991]), + values=tensor([0.7939, 0.2865, 0.3388, ..., 0.3715, 0.9532, 0.1224]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8979, 0.8998, 0.6031, ..., 0.0686, 0.1119, 0.6753]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 9.889901638031006 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2077', '-ss', '5000', '-sd', '0.1', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.487555503845215} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1011, ..., 2499001, + 2499492, 2500000]), + col_indices=tensor([ 12, 16, 26, ..., 4984, 4992, 4995]), + values=tensor([0.3129, 0.5758, 0.2112, ..., 0.6208, 0.5668, 0.8482]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7114, 0.1437, 0.5452, ..., 0.6795, 0.1114, 0.8178]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.487555503845215 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 534, 1011, ..., 2499001, + 2499492, 2500000]), + col_indices=tensor([ 12, 16, 26, ..., 4984, 4992, 4995]), + values=tensor([0.3129, 0.5758, 0.2112, ..., 0.6208, 0.5668, 0.8482]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7114, 0.1437, 0.5452, ..., 0.6795, 0.1114, 0.8178]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.487555503845215 seconds + +[18.45, 17.83, 21.85, 18.4, 18.31, 17.94, 17.97, 18.32, 18.18, 17.83] +[48.23] +15.030712842941284 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.487555503845215, 'TIME_S_1KI': 5.04937674715706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.931280415058, 'W': 48.23} +[18.45, 17.83, 21.85, 18.4, 18.31, 17.94, 17.97, 18.32, 18.18, 17.83, 18.49, 17.82, 18.18, 18.17, 18.15, 17.82, 18.07, 17.68, 17.99, 17.9] +329.015 +16.45075 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 2077, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.487555503845215, 'TIME_S_1KI': 5.04937674715706, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 724.931280415058, 'W': 48.23, 'J_1KI': 349.0280599013279, 'W_1KI': 23.220991815117955, 'W_D': 31.779249999999998, 'J_D': 477.6647811140418, 'W_D_1KI': 15.300553683196917, 'J_D_1KI': 7.366660415597938} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..e6d6ed2 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 962, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.386102437973022, "TIME_S_1KI": 10.796364280637237, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 760.2223528313637, "W": 48.37, "J_1KI": 790.2519260201285, "W_1KI": 50.280665280665275, "W_D": 31.813249999999996, "J_D": 500.00297221857306, "W_D_1KI": 33.069906444906444, "J_D_1KI": 34.376202125682376} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..c8b98a6 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.090480089187622} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 979, 1989, ..., 4997947, + 4998978, 5000000]), + col_indices=tensor([ 0, 1, 2, ..., 4989, 4991, 4992]), + values=tensor([0.4629, 0.8349, 0.1230, ..., 0.3254, 0.2010, 0.4262]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.8820, 0.9283, 0.2134, ..., 0.8569, 0.5183, 0.3465]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 1.090480089187622 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '962', '-ss', '5000', '-sd', '0.2', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.386102437973022} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1020, 2003, ..., 4997983, + 4998972, 5000000]), + col_indices=tensor([ 2, 12, 14, ..., 4982, 4988, 4994]), + values=tensor([0.5268, 0.0953, 0.2601, ..., 0.2366, 0.8226, 0.2641]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6710, 0.0398, 0.1150, ..., 0.1943, 0.5070, 0.9802]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.386102437973022 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1020, 2003, ..., 4997983, + 4998972, 5000000]), + col_indices=tensor([ 2, 12, 14, ..., 4982, 4988, 4994]), + values=tensor([0.5268, 0.0953, 0.2601, ..., 0.2366, 0.8226, 0.2641]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6710, 0.0398, 0.1150, ..., 0.1943, 0.5070, 0.9802]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.386102437973022 seconds + +[18.34, 18.12, 18.35, 17.91, 17.99, 18.03, 18.28, 18.28, 17.95, 18.09] +[48.37] +15.71681523323059 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 962, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.386102437973022, 'TIME_S_1KI': 10.796364280637237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 760.2223528313637, 'W': 48.37} +[18.34, 18.12, 18.35, 17.91, 17.99, 18.03, 18.28, 18.28, 17.95, 18.09, 18.27, 18.18, 18.12, 17.89, 18.15, 21.61, 18.11, 17.8, 18.1, 21.83] +331.13500000000005 +16.55675 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 962, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.386102437973022, 'TIME_S_1KI': 10.796364280637237, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 760.2223528313637, 'W': 48.37, 'J_1KI': 790.2519260201285, 'W_1KI': 50.280665280665275, 'W_D': 31.813249999999996, 'J_D': 500.00297221857306, 'W_D_1KI': 33.069906444906444, 'J_D_1KI': 34.376202125682376} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..6efa366 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 640, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.40328574180603, "TIME_S_1KI": 16.255133971571922, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 792.4070657157898, "W": 48.54, "J_1KI": 1238.1360401809216, "W_1KI": 75.84375, "W_D": 32.294250000000005, "J_D": 527.1980198185445, "W_D_1KI": 50.45976562500001, "J_D_1KI": 78.84338378906251} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..e11bc1c --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.6401786804199219} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1479, 2972, ..., 7496976, + 7498517, 7500000]), + col_indices=tensor([ 4, 10, 12, ..., 4987, 4989, 4997]), + values=tensor([0.8092, 0.2326, 0.1918, ..., 0.7537, 0.2703, 0.9406]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.6079, 0.4244, 0.8803, ..., 0.9929, 0.8834, 0.4182]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 1.6401786804199219 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '640', '-ss', '5000', '-sd', '0.3', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.40328574180603} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1469, 2947, ..., 7496962, + 7498492, 7500000]), + col_indices=tensor([ 5, 8, 10, ..., 4979, 4981, 4995]), + values=tensor([0.5097, 0.4133, 0.1946, ..., 0.6762, 0.7827, 0.5941]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.3256, 0.8526, 0.3288, ..., 0.0837, 0.1622, 0.2040]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.40328574180603 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1469, 2947, ..., 7496962, + 7498492, 7500000]), + col_indices=tensor([ 5, 8, 10, ..., 4979, 4981, 4995]), + values=tensor([0.5097, 0.4133, 0.1946, ..., 0.6762, 0.7827, 0.5941]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.3256, 0.8526, 0.3288, ..., 0.0837, 0.1622, 0.2040]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.40328574180603 seconds + +[18.61, 18.25, 18.17, 17.83, 17.98, 17.89, 17.97, 17.91, 17.89, 18.07] +[48.54] +16.32482624053955 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 640, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.40328574180603, 'TIME_S_1KI': 16.255133971571922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.4070657157898, 'W': 48.54} +[18.61, 18.25, 18.17, 17.83, 17.98, 17.89, 17.97, 17.91, 17.89, 18.07, 18.46, 18.32, 17.92, 18.02, 17.98, 18.33, 17.98, 17.9, 18.02, 17.97] +324.91499999999996 +16.245749999999997 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 640, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.40328574180603, 'TIME_S_1KI': 16.255133971571922, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 792.4070657157898, 'W': 48.54, 'J_1KI': 1238.1360401809216, 'W_1KI': 75.84375, 'W_D': 32.294250000000005, 'J_D': 527.1980198185445, 'W_D_1KI': 50.45976562500001, 'J_D_1KI': 78.84338378906251} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..679e37e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 393, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.361774921417236, "TIME_S_1KI": 26.365839494700346, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 830.257935385704, "W": 47.87, "J_1KI": 2112.615611668458, "W_1KI": 121.80661577608141, "W_D": 31.368499999999997, "J_D": 544.0556934645175, "W_D_1KI": 79.81806615776081, "J_D_1KI": 203.09940498157965} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..4eb279b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.6700339317321777} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1981, 4013, ..., 9996053, + 9998014, 10000000]), + col_indices=tensor([ 1, 2, 4, ..., 4993, 4995, 4997]), + values=tensor([0.1269, 0.6137, 0.4927, ..., 0.6127, 0.1027, 0.1107]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1274, 0.2875, 0.5158, ..., 0.6638, 0.6368, 0.8182]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 2.6700339317321777 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '393', '-ss', '5000', '-sd', '0.4', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.361774921417236} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2035, 3994, ..., 9995956, + 9997978, 10000000]), + col_indices=tensor([ 2, 3, 4, ..., 4989, 4993, 4999]), + values=tensor([0.0874, 0.4595, 0.0218, ..., 0.9380, 0.2756, 0.2464]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1585, 0.7775, 0.6260, ..., 0.0357, 0.4122, 0.0843]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.361774921417236 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2035, 3994, ..., 9995956, + 9997978, 10000000]), + col_indices=tensor([ 2, 3, 4, ..., 4989, 4993, 4999]), + values=tensor([0.0874, 0.4595, 0.0218, ..., 0.9380, 0.2756, 0.2464]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1585, 0.7775, 0.6260, ..., 0.0357, 0.4122, 0.0843]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.361774921417236 seconds + +[18.09, 21.49, 19.13, 18.18, 17.87, 18.12, 18.49, 17.91, 18.18, 17.86] +[47.87] +17.344013690948486 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.361774921417236, 'TIME_S_1KI': 26.365839494700346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 830.257935385704, 'W': 47.87} +[18.09, 21.49, 19.13, 18.18, 17.87, 18.12, 18.49, 17.91, 18.18, 17.86, 18.63, 18.02, 17.95, 17.88, 17.98, 18.08, 17.96, 17.78, 18.35, 18.74] +330.03 +16.5015 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 393, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.361774921417236, 'TIME_S_1KI': 26.365839494700346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 830.257935385704, 'W': 47.87, 'J_1KI': 2112.615611668458, 'W_1KI': 121.80661577608141, 'W_D': 31.368499999999997, 'J_D': 544.0556934645175, 'W_D_1KI': 79.81806615776081, 'J_D_1KI': 203.09940498157965} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..afa179f --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 307, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.394981145858765, "TIME_S_1KI": 33.85987343927936, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 930.0089691495895, "W": 47.58, "J_1KI": 3029.3451763830276, "W_1KI": 154.98371335504885, "W_D": 31.056999999999995, "J_D": 607.0468380596636, "W_D_1KI": 101.16286644951138, "J_D_1KI": 329.5207376205583} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..91aa70e --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 3.415400505065918} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2477, 4964, ..., 12494964, + 12497482, 12500000]), + col_indices=tensor([ 1, 2, 4, ..., 4993, 4994, 4997]), + values=tensor([0.5791, 0.4301, 0.3570, ..., 0.1858, 0.4639, 0.9573]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0093, 0.1244, 0.8882, ..., 0.7606, 0.5225, 0.2163]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 3.415400505065918 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '307', '-ss', '5000', '-sd', '0.5', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.394981145858765} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2499, 4994, ..., 12495082, + 12497507, 12500000]), + col_indices=tensor([ 0, 1, 6, ..., 4995, 4997, 4999]), + values=tensor([0.6585, 0.9778, 0.9803, ..., 0.8325, 0.0849, 0.1040]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.3486, 0.4233, 0.3644, ..., 0.4149, 0.2376, 0.4812]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.394981145858765 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2499, 4994, ..., 12495082, + 12497507, 12500000]), + col_indices=tensor([ 0, 1, 6, ..., 4995, 4997, 4999]), + values=tensor([0.6585, 0.9778, 0.9803, ..., 0.8325, 0.0849, 0.1040]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.3486, 0.4233, 0.3644, ..., 0.4149, 0.2376, 0.4812]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.394981145858765 seconds + +[18.48, 21.56, 18.9, 18.32, 18.22, 17.83, 18.09, 17.98, 18.55, 18.34] +[47.58] +19.546216249465942 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.394981145858765, 'TIME_S_1KI': 33.85987343927936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 930.0089691495895, 'W': 47.58} +[18.48, 21.56, 18.9, 18.32, 18.22, 17.83, 18.09, 17.98, 18.55, 18.34, 18.35, 17.93, 17.78, 17.82, 18.22, 18.23, 18.5, 17.76, 18.34, 17.69] +330.46000000000004 +16.523000000000003 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 307, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.394981145858765, 'TIME_S_1KI': 33.85987343927936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 930.0089691495895, 'W': 47.58, 'J_1KI': 3029.3451763830276, 'W_1KI': 154.98371335504885, 'W_D': 31.056999999999995, 'J_D': 607.0468380596636, 'W_D_1KI': 101.16286644951138, 'J_D_1KI': 329.5207376205583} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..ca05b98 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 355542, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.588425874710083, "TIME_S_1KI": 0.02978108317641821, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 655.0935684347153, "W": 46.57, "J_1KI": 1.8425209073322286, "W_1KI": 0.13098311873140162, "W_D": 30.1475, "J_D": 424.08059597134593, "W_D_1KI": 0.08479307648604104, "J_D_1KI": 0.00023848962003375422} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..a05082b --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.022435426712036133} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1292, 659, 4365, 3710, 1440, 2896, 123, 3649, 2612, + 927, 1659, 4214, 4385, 2636, 1869, 118, 1932, 2570, + 3752, 4154, 1992, 3194, 1579, 4685, 2991, 2783, 3487, + 3211, 2142, 3564, 4650, 3661, 3610, 3388, 1017, 3880, + 4125, 2970, 4559, 2499, 1693, 43, 2397, 1040, 4017, + 1828, 4674, 1496, 1775, 719, 2677, 389, 3154, 2997, + 3651, 773, 1883, 4184, 1034, 1972, 2681, 2277, 1527, + 4621, 238, 4365, 4905, 2103, 4738, 2026, 4073, 1797, + 3650, 3259, 4594, 2607, 1806, 3417, 2906, 213, 1348, + 1781, 4696, 1901, 709, 1550, 1968, 4490, 826, 4326, + 374, 3405, 4945, 642, 3326, 217, 437, 4704, 4681, + 3950, 1228, 3052, 3026, 3269, 1740, 2853, 2996, 1978, + 359, 3246, 3815, 3422, 1128, 733, 749, 4078, 2740, + 127, 2401, 240, 391, 4079, 575, 1175, 4439, 4510, + 2136, 1342, 880, 2183, 3085, 3808, 4189, 2436, 1877, + 140, 1033, 744, 134, 1457, 133, 407, 2079, 2372, + 2867, 1110, 602, 2915, 3299, 4776, 1097, 3465, 774, + 2845, 4963, 619, 3626, 3224, 3032, 4984, 3635, 1115, + 1431, 229, 362, 2520, 4880, 2306, 2092, 2949, 3111, + 141, 801, 4774, 1268, 4702, 3013, 4053, 4202, 2338, + 3307, 2339, 1627, 3991, 2211, 3208, 4859, 2254, 850, + 2555, 416, 3498, 2761, 1743, 3828, 3909, 4942, 4647, + 2857, 399, 2142, 1173, 2936, 2739, 3524, 2473, 2398, + 3617, 4358, 1503, 3513, 1560, 2497, 176, 1685, 851, + 2706, 2662, 1211, 466, 3647, 2835, 1798, 4560, 4189, + 74, 1919, 4892, 1659, 1504, 1873, 179, 4512, 1622, + 131, 802, 3776, 894, 98, 1072, 3715, 1448, 4255, + 4226, 676, 4655, 4974, 2293, 491, 1924]), + values=tensor([0.9106, 0.6103, 0.8018, 0.6726, 0.7831, 0.8787, 0.8641, + 0.0319, 0.4873, 0.6079, 0.6438, 0.5806, 0.1055, 0.2960, + 0.2595, 0.4592, 0.2559, 0.3932, 0.7042, 0.8694, 0.4660, + 0.4246, 0.4675, 0.7217, 0.9048, 0.6757, 0.7971, 0.3444, + 0.9040, 0.2589, 0.8383, 0.9787, 0.5364, 0.5478, 0.4280, + 0.9375, 0.9169, 0.6011, 0.6510, 0.3645, 0.9595, 0.3413, + 0.1561, 0.3706, 0.5420, 0.2194, 0.4928, 0.9365, 0.2372, + 0.4934, 0.8170, 0.4062, 0.4573, 0.8424, 0.2137, 0.2198, + 0.8285, 0.9490, 0.8645, 0.5816, 0.3427, 0.8902, 0.3651, + 0.7666, 0.8408, 0.8585, 0.8931, 0.5551, 0.8982, 0.6356, + 0.4250, 0.1088, 0.6737, 0.3958, 0.4828, 0.5186, 0.8805, + 0.2395, 0.2572, 0.2532, 0.6717, 0.2414, 0.7893, 0.8437, + 0.3171, 0.1858, 0.6604, 0.8284, 0.5385, 0.2314, 0.5114, + 0.2593, 0.8363, 0.9654, 0.7652, 0.9942, 0.9048, 0.6526, + 0.7743, 0.0670, 0.4879, 0.0500, 0.2026, 0.0553, 0.2990, + 0.2738, 0.8845, 0.6958, 0.2567, 0.3351, 0.1957, 0.2099, + 0.3337, 0.5048, 0.9817, 0.1630, 0.6715, 0.7671, 0.0645, + 0.2446, 0.2884, 0.8150, 0.9791, 0.9499, 0.4039, 0.4962, + 0.6049, 0.6707, 0.4315, 0.8269, 0.1062, 0.0634, 0.9597, + 0.8898, 0.1177, 0.9543, 0.8326, 0.6160, 0.9716, 0.8673, + 0.7943, 0.1918, 0.0735, 0.7498, 0.7051, 0.7537, 0.5409, + 0.9422, 0.7547, 0.3930, 0.7287, 0.3187, 0.8163, 0.1055, + 0.0953, 0.5157, 0.5484, 0.2625, 0.0877, 0.4823, 0.2711, + 0.4063, 0.7443, 0.2411, 0.7149, 0.2424, 0.1102, 0.7648, + 0.9164, 0.7435, 0.3343, 0.4014, 0.3868, 0.7585, 0.7825, + 0.9665, 0.6243, 0.8999, 0.5120, 0.3172, 0.9824, 0.0450, + 0.4103, 0.8334, 0.8361, 0.7898, 0.9067, 0.7235, 0.2233, + 0.3637, 0.9009, 0.8914, 0.3259, 0.8165, 0.9365, 0.9274, + 0.5741, 0.2639, 0.6520, 0.7150, 0.2093, 0.3816, 0.4707, + 0.4201, 0.9190, 0.5078, 0.8874, 0.9120, 0.2753, 0.7359, + 0.5812, 0.5682, 0.7646, 0.6267, 0.4102, 0.8266, 0.8853, + 0.7018, 0.9169, 0.0053, 0.7880, 0.6418, 0.6555, 0.9720, + 0.3526, 0.6341, 0.5088, 0.2195, 0.0203, 0.5525, 0.7633, + 0.6606, 0.4333, 0.8817, 0.0693, 0.9617, 0.5559, 0.9634, + 0.6048, 0.0232, 0.1068, 0.9352, 0.6002, 0.6363, 0.5154, + 0.1116, 0.5347, 0.0671, 0.7793, 0.1196]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9677, 0.1967, 0.0087, ..., 0.2565, 0.6584, 0.6200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.022435426712036133 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46800', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.382112741470337} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([4150, 2888, 4184, 1530, 678, 479, 1107, 471, 4285, + 3837, 1975, 2514, 511, 1660, 2068, 1448, 4627, 3695, + 4646, 2830, 2653, 1667, 2953, 3899, 1002, 4696, 4142, + 2682, 2648, 3794, 1459, 982, 980, 1746, 3339, 149, + 1718, 4640, 314, 53, 2403, 1141, 3357, 1085, 466, + 1619, 2477, 3859, 2168, 947, 4059, 1003, 2781, 2708, + 1717, 3009, 1864, 3007, 3896, 3680, 4372, 3309, 2254, + 2203, 4715, 1069, 4309, 2391, 3090, 3258, 952, 4475, + 4160, 3612, 4789, 3335, 819, 1827, 2260, 3171, 3323, + 4626, 3362, 7, 972, 4803, 364, 2649, 4177, 4599, + 2900, 3224, 1640, 4077, 3701, 2791, 1433, 655, 2314, + 3198, 317, 850, 1087, 611, 645, 558, 726, 1381, + 90, 1884, 2477, 176, 4078, 600, 1776, 1815, 4980, + 3290, 976, 3882, 4218, 3337, 4340, 4550, 1601, 376, + 2443, 2180, 1347, 4274, 3578, 2389, 1349, 3996, 4180, + 3976, 1026, 1825, 1698, 4427, 2513, 1604, 114, 2995, + 2989, 1072, 3384, 2975, 4300, 3198, 3255, 1005, 1851, + 4373, 2417, 1761, 1977, 1033, 304, 4563, 572, 4037, + 3427, 1513, 75, 468, 3187, 2009, 1764, 1805, 1467, + 3749, 4166, 2128, 4824, 3213, 2655, 2007, 1437, 1298, + 483, 971, 2056, 2156, 2263, 607, 4650, 771, 456, + 2047, 3920, 3689, 2454, 4552, 1948, 4918, 2583, 4601, + 1062, 3584, 2635, 2071, 2042, 2779, 1369, 1671, 4485, + 2542, 4111, 1550, 2280, 3307, 1653, 1055, 571, 3882, + 2132, 941, 2447, 3838, 493, 2724, 4427, 2495, 491, + 348, 2552, 3299, 317, 1166, 2830, 4896, 4608, 3014, + 670, 2086, 2508, 2837, 2920, 612, 4090, 2710, 1095, + 1628, 1220, 274, 3831, 1535, 1786, 4549]), + values=tensor([0.5255, 0.0182, 0.6174, 0.1076, 0.3535, 0.7090, 0.2797, + 0.4131, 0.9644, 0.6573, 0.3774, 0.2463, 0.8634, 0.5392, + 0.6180, 0.7460, 0.0840, 0.6919, 0.2395, 0.6380, 0.3064, + 0.4299, 0.2434, 0.7003, 0.1509, 0.6268, 0.3419, 0.0217, + 0.6724, 0.4826, 0.7793, 0.9245, 0.4498, 0.8997, 0.8789, + 0.2006, 0.9117, 0.6104, 0.9445, 0.6803, 0.5546, 0.0430, + 0.8599, 0.6166, 0.9366, 0.0741, 0.0108, 0.4102, 0.8063, + 0.1147, 0.2712, 0.9101, 0.6498, 0.4997, 0.5120, 0.1408, + 0.5873, 0.5440, 0.2130, 0.6524, 0.1914, 0.2027, 0.3598, + 0.1760, 0.9961, 0.4064, 0.1145, 0.4074, 0.8942, 0.9988, + 0.7396, 0.3520, 0.8007, 0.2689, 0.7383, 0.4192, 0.4738, + 0.5964, 0.1917, 0.1869, 0.3576, 0.9988, 0.6764, 0.1906, + 0.7629, 0.4501, 0.8709, 0.2468, 0.5177, 0.2466, 0.2197, + 0.1446, 0.0928, 0.2356, 0.4535, 0.4306, 0.8108, 0.4445, + 0.2001, 0.2909, 0.2893, 0.9446, 0.2722, 0.1526, 0.7522, + 0.5034, 0.0891, 0.6792, 0.6980, 0.8787, 0.8816, 0.0939, + 0.0544, 0.5728, 0.3453, 0.6599, 0.1401, 0.4967, 0.8703, + 0.0012, 0.1313, 0.5851, 0.4868, 0.8996, 0.7538, 0.7366, + 0.3299, 0.6412, 0.9032, 0.8207, 0.4202, 0.9740, 0.5987, + 0.7801, 0.2814, 0.4031, 0.0887, 0.7346, 0.5935, 0.7540, + 0.2319, 0.3570, 0.1145, 0.5888, 0.6276, 0.7231, 0.7135, + 0.9613, 0.8035, 0.6211, 0.0088, 0.0973, 0.8083, 0.1435, + 0.0594, 0.5423, 0.4477, 0.3960, 0.6871, 0.1103, 0.2807, + 0.9626, 0.5226, 0.3908, 0.2801, 0.5699, 0.2801, 0.6331, + 0.2050, 0.6787, 0.0958, 0.2630, 0.1454, 0.4463, 0.8988, + 0.0901, 0.2439, 0.8477, 0.1410, 0.2267, 0.5153, 0.7658, + 0.4023, 0.6070, 0.4869, 0.6448, 0.8450, 0.9154, 0.1431, + 0.8925, 0.4147, 0.0491, 0.3877, 0.8712, 0.5490, 0.2553, + 0.4929, 0.0602, 0.3093, 0.9867, 0.9857, 0.7010, 0.9249, + 0.3952, 0.9763, 0.0416, 0.7299, 0.7590, 0.5814, 0.9861, + 0.2685, 0.2403, 0.0997, 0.7290, 0.0363, 0.1796, 0.0573, + 0.1340, 0.1547, 0.5881, 0.5516, 0.6658, 0.9991, 0.5590, + 0.3010, 0.2004, 0.5300, 0.9600, 0.5439, 0.0253, 0.1689, + 0.7972, 0.3164, 0.6988, 0.4588, 0.6168, 0.9056, 0.7303, + 0.2798, 0.7978, 0.1100, 0.0574, 0.5151, 0.8940, 0.9058, + 0.1406, 0.2261, 0.0686, 0.3738, 0.0528]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9505, 0.7309, 0.0755, ..., 0.0952, 0.7737, 0.4268]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.382112741470337 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '355542', '-ss', '5000', '-sd', '1e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.588425874710083} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4976, 2999, 3531, 729, 4293, 4703, 527, 519, 1087, + 2588, 2503, 4621, 623, 2508, 4018, 2181, 4566, 129, + 1882, 141, 241, 4234, 3011, 3475, 1507, 1753, 1592, + 16, 1837, 2881, 4935, 1126, 1212, 1843, 3769, 1406, + 4716, 3662, 2501, 4960, 3438, 3187, 281, 2582, 4408, + 1833, 1310, 2092, 1021, 4933, 3776, 3565, 3765, 4067, + 3036, 4650, 3192, 1618, 3142, 599, 3093, 1420, 3804, + 496, 2492, 4465, 4733, 3170, 4826, 4434, 3018, 360, + 3823, 3346, 2952, 3585, 1580, 4171, 3383, 4158, 1729, + 4780, 335, 3992, 4542, 2789, 4029, 416, 4212, 913, + 792, 2949, 3671, 2278, 3431, 4080, 1970, 3530, 1625, + 1846, 123, 3801, 179, 2650, 2793, 3609, 4951, 1904, + 3327, 3068, 797, 2882, 3831, 1946, 4674, 2611, 3167, + 3799, 820, 4595, 2920, 4476, 339, 16, 1057, 3858, + 1996, 2480, 211, 316, 2763, 1963, 1855, 789, 4091, + 3327, 1560, 1579, 4983, 674, 1259, 1259, 1614, 4040, + 4463, 3393, 619, 1723, 4630, 933, 1161, 3845, 4268, + 4840, 4044, 3897, 1211, 4242, 540, 433, 3144, 2336, + 4751, 151, 2282, 2250, 2548, 3515, 851, 1606, 4107, + 209, 996, 4881, 3062, 1226, 876, 3179, 583, 2155, + 304, 1823, 35, 692, 3702, 4106, 1336, 1789, 1359, + 3986, 1095, 312, 3819, 955, 1411, 4676, 1229, 4251, + 263, 3639, 4785, 3043, 1158, 1138, 2259, 2515, 3611, + 4898, 3676, 269, 509, 3101, 1147, 409, 3191, 1984, + 2546, 1127, 3710, 4610, 4906, 997, 3399, 3628, 347, + 847, 2034, 588, 2961, 2738, 4337, 3827, 1890, 1227, + 1635, 1728, 2759, 1429, 4176, 4191, 4862, 4968, 1336, + 119, 3406, 554, 532, 1507, 1917, 2750]), + values=tensor([0.5199, 0.0559, 0.9006, 0.7777, 0.2843, 0.6207, 0.6975, + 0.8689, 0.5742, 0.4737, 0.6755, 0.5958, 0.4384, 0.4390, + 0.8599, 0.7635, 0.7028, 0.4511, 0.3241, 0.0806, 0.8242, + 0.9053, 0.3849, 0.6596, 0.0472, 0.0762, 0.5653, 0.2663, + 0.8433, 0.4091, 0.7837, 0.1469, 0.0020, 0.4944, 0.8738, + 0.1320, 0.2431, 0.2650, 0.7193, 0.2483, 0.4028, 0.5091, + 0.3427, 0.6757, 0.9747, 0.5603, 0.1148, 0.1932, 0.5560, + 0.7920, 0.7790, 0.7814, 0.5929, 0.3079, 0.4008, 0.4233, + 0.4893, 0.9629, 0.6575, 0.2210, 0.9465, 0.6464, 0.0795, + 0.6735, 0.0196, 0.9297, 0.4843, 0.7360, 0.6757, 0.8217, + 0.8318, 0.9782, 0.6754, 0.1098, 0.6940, 0.8548, 0.6163, + 0.6934, 0.6171, 0.1041, 0.7511, 0.4878, 0.3635, 0.8620, + 0.3905, 0.3542, 0.8549, 0.4722, 0.9206, 0.2789, 0.9568, + 0.1952, 0.7173, 0.8016, 0.5436, 0.6025, 0.2628, 0.4776, + 0.0648, 0.4119, 0.1873, 0.5768, 0.6229, 0.4612, 0.3781, + 0.9685, 0.2051, 0.1702, 0.4419, 0.4538, 0.4515, 0.1665, + 0.0292, 0.6419, 0.1217, 0.0198, 0.2563, 0.3495, 0.0936, + 0.2664, 0.5259, 0.3038, 0.7533, 0.2522, 0.7253, 0.1463, + 0.2087, 0.7896, 0.0364, 0.6152, 0.7986, 0.8112, 0.7556, + 0.0756, 0.4969, 0.3790, 0.1978, 0.1043, 0.8605, 0.5011, + 0.8762, 0.2381, 0.0660, 0.2738, 0.0392, 0.5623, 0.1390, + 0.4257, 0.6102, 0.9603, 0.2834, 0.0200, 0.2511, 0.8990, + 0.6324, 0.9167, 0.3730, 0.6817, 0.4652, 0.8996, 0.9136, + 0.8631, 0.9770, 0.2565, 0.6496, 0.1799, 0.4018, 0.4388, + 0.9314, 0.6833, 0.7914, 0.9455, 0.7655, 0.3170, 0.6697, + 0.9757, 0.4151, 0.9576, 0.1567, 0.8861, 0.0170, 0.7822, + 0.2221, 0.7184, 0.0581, 0.4824, 0.4815, 0.2522, 0.1559, + 0.1892, 0.1146, 0.0347, 0.4792, 0.0555, 0.6999, 0.5092, + 0.0919, 0.7115, 0.9111, 0.4498, 0.4923, 0.5300, 0.3126, + 0.8103, 0.3276, 0.8722, 0.4823, 0.2311, 0.5957, 0.0760, + 0.2892, 0.3555, 0.6604, 0.7686, 0.1412, 0.8595, 0.6702, + 0.1119, 0.1550, 0.8493, 0.8158, 0.3714, 0.8983, 0.3484, + 0.6611, 0.8110, 0.2241, 0.8267, 0.9605, 0.8151, 0.1779, + 0.2906, 0.1723, 0.3272, 0.4678, 0.1292, 0.9514, 0.6369, + 0.8054, 0.2983, 0.3742, 0.8673, 0.0274, 0.1851, 0.9052, + 0.8742, 0.4529, 0.2266, 0.4410, 0.5323]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4269, 0.0147, 0.1392, ..., 0.1737, 0.9746, 0.9710]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.588425874710083 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4976, 2999, 3531, 729, 4293, 4703, 527, 519, 1087, + 2588, 2503, 4621, 623, 2508, 4018, 2181, 4566, 129, + 1882, 141, 241, 4234, 3011, 3475, 1507, 1753, 1592, + 16, 1837, 2881, 4935, 1126, 1212, 1843, 3769, 1406, + 4716, 3662, 2501, 4960, 3438, 3187, 281, 2582, 4408, + 1833, 1310, 2092, 1021, 4933, 3776, 3565, 3765, 4067, + 3036, 4650, 3192, 1618, 3142, 599, 3093, 1420, 3804, + 496, 2492, 4465, 4733, 3170, 4826, 4434, 3018, 360, + 3823, 3346, 2952, 3585, 1580, 4171, 3383, 4158, 1729, + 4780, 335, 3992, 4542, 2789, 4029, 416, 4212, 913, + 792, 2949, 3671, 2278, 3431, 4080, 1970, 3530, 1625, + 1846, 123, 3801, 179, 2650, 2793, 3609, 4951, 1904, + 3327, 3068, 797, 2882, 3831, 1946, 4674, 2611, 3167, + 3799, 820, 4595, 2920, 4476, 339, 16, 1057, 3858, + 1996, 2480, 211, 316, 2763, 1963, 1855, 789, 4091, + 3327, 1560, 1579, 4983, 674, 1259, 1259, 1614, 4040, + 4463, 3393, 619, 1723, 4630, 933, 1161, 3845, 4268, + 4840, 4044, 3897, 1211, 4242, 540, 433, 3144, 2336, + 4751, 151, 2282, 2250, 2548, 3515, 851, 1606, 4107, + 209, 996, 4881, 3062, 1226, 876, 3179, 583, 2155, + 304, 1823, 35, 692, 3702, 4106, 1336, 1789, 1359, + 3986, 1095, 312, 3819, 955, 1411, 4676, 1229, 4251, + 263, 3639, 4785, 3043, 1158, 1138, 2259, 2515, 3611, + 4898, 3676, 269, 509, 3101, 1147, 409, 3191, 1984, + 2546, 1127, 3710, 4610, 4906, 997, 3399, 3628, 347, + 847, 2034, 588, 2961, 2738, 4337, 3827, 1890, 1227, + 1635, 1728, 2759, 1429, 4176, 4191, 4862, 4968, 1336, + 119, 3406, 554, 532, 1507, 1917, 2750]), + values=tensor([0.5199, 0.0559, 0.9006, 0.7777, 0.2843, 0.6207, 0.6975, + 0.8689, 0.5742, 0.4737, 0.6755, 0.5958, 0.4384, 0.4390, + 0.8599, 0.7635, 0.7028, 0.4511, 0.3241, 0.0806, 0.8242, + 0.9053, 0.3849, 0.6596, 0.0472, 0.0762, 0.5653, 0.2663, + 0.8433, 0.4091, 0.7837, 0.1469, 0.0020, 0.4944, 0.8738, + 0.1320, 0.2431, 0.2650, 0.7193, 0.2483, 0.4028, 0.5091, + 0.3427, 0.6757, 0.9747, 0.5603, 0.1148, 0.1932, 0.5560, + 0.7920, 0.7790, 0.7814, 0.5929, 0.3079, 0.4008, 0.4233, + 0.4893, 0.9629, 0.6575, 0.2210, 0.9465, 0.6464, 0.0795, + 0.6735, 0.0196, 0.9297, 0.4843, 0.7360, 0.6757, 0.8217, + 0.8318, 0.9782, 0.6754, 0.1098, 0.6940, 0.8548, 0.6163, + 0.6934, 0.6171, 0.1041, 0.7511, 0.4878, 0.3635, 0.8620, + 0.3905, 0.3542, 0.8549, 0.4722, 0.9206, 0.2789, 0.9568, + 0.1952, 0.7173, 0.8016, 0.5436, 0.6025, 0.2628, 0.4776, + 0.0648, 0.4119, 0.1873, 0.5768, 0.6229, 0.4612, 0.3781, + 0.9685, 0.2051, 0.1702, 0.4419, 0.4538, 0.4515, 0.1665, + 0.0292, 0.6419, 0.1217, 0.0198, 0.2563, 0.3495, 0.0936, + 0.2664, 0.5259, 0.3038, 0.7533, 0.2522, 0.7253, 0.1463, + 0.2087, 0.7896, 0.0364, 0.6152, 0.7986, 0.8112, 0.7556, + 0.0756, 0.4969, 0.3790, 0.1978, 0.1043, 0.8605, 0.5011, + 0.8762, 0.2381, 0.0660, 0.2738, 0.0392, 0.5623, 0.1390, + 0.4257, 0.6102, 0.9603, 0.2834, 0.0200, 0.2511, 0.8990, + 0.6324, 0.9167, 0.3730, 0.6817, 0.4652, 0.8996, 0.9136, + 0.8631, 0.9770, 0.2565, 0.6496, 0.1799, 0.4018, 0.4388, + 0.9314, 0.6833, 0.7914, 0.9455, 0.7655, 0.3170, 0.6697, + 0.9757, 0.4151, 0.9576, 0.1567, 0.8861, 0.0170, 0.7822, + 0.2221, 0.7184, 0.0581, 0.4824, 0.4815, 0.2522, 0.1559, + 0.1892, 0.1146, 0.0347, 0.4792, 0.0555, 0.6999, 0.5092, + 0.0919, 0.7115, 0.9111, 0.4498, 0.4923, 0.5300, 0.3126, + 0.8103, 0.3276, 0.8722, 0.4823, 0.2311, 0.5957, 0.0760, + 0.2892, 0.3555, 0.6604, 0.7686, 0.1412, 0.8595, 0.6702, + 0.1119, 0.1550, 0.8493, 0.8158, 0.3714, 0.8983, 0.3484, + 0.6611, 0.8110, 0.2241, 0.8267, 0.9605, 0.8151, 0.1779, + 0.2906, 0.1723, 0.3272, 0.4678, 0.1292, 0.9514, 0.6369, + 0.8054, 0.2983, 0.3742, 0.8673, 0.0274, 0.1851, 0.9052, + 0.8742, 0.4529, 0.2266, 0.4410, 0.5323]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4269, 0.0147, 0.1392, ..., 0.1737, 0.9746, 0.9710]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.588425874710083 seconds + +[18.34, 17.96, 17.95, 19.17, 18.44, 18.12, 18.19, 19.28, 18.14, 18.1] +[46.57] +14.066857814788818 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 355542, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.588425874710083, 'TIME_S_1KI': 0.02978108317641821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 655.0935684347153, 'W': 46.57} +[18.34, 17.96, 17.95, 19.17, 18.44, 18.12, 18.19, 19.28, 18.14, 18.1, 18.29, 18.16, 18.13, 17.89, 18.15, 18.08, 18.32, 17.91, 18.06, 18.27] +328.45 +16.4225 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 355542, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.588425874710083, 'TIME_S_1KI': 0.02978108317641821, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 655.0935684347153, 'W': 46.57, 'J_1KI': 1.8425209073322286, 'W_1KI': 0.13098311873140162, 'W_D': 30.1475, 'J_D': 424.08059597134593, 'W_D_1KI': 0.08479307648604104, 'J_D_1KI': 0.00023848962003375422} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..0d245c9 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 1, "ITERATIONS": 303638, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.314902782440186, "TIME_S_1KI": 0.033971053631100805, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 652.3264142632485, "W": 46.59, "J_1KI": 2.148368828220606, "W_1KI": 0.1534392928421344, "W_D": 30.048000000000002, "J_D": 420.71483356475835, "W_D_1KI": 0.0989599457248434, "J_D_1KI": 0.0003259142324901475} diff --git a/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..2579376 --- /dev/null +++ b/pytorch/output_synthetic_1core_old2/xeon_4216_1_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018550395965576172} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([2148, 2186, 2653, ..., 4713, 108, 1050]), + values=tensor([0.2240, 0.7824, 0.4591, ..., 0.0832, 0.2125, 0.9204]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9989, 0.0104, 0.9920, ..., 0.8791, 0.9452, 0.9082]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.018550395965576172 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '56602', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9573328495025635} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 1249, 1250, 1250]), + col_indices=tensor([ 143, 2910, 3407, ..., 360, 1598, 1598]), + values=tensor([0.9185, 0.3997, 0.2489, ..., 0.0567, 0.3179, 0.0072]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.5677, 0.2086, 0.3083, ..., 0.5984, 0.8633, 0.3307]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 1.9573328495025635 seconds + +['apptainer', 'run', '--env', 'OMP_PROC_BIND=true', '--env', 'OMP_PLACES={0:1}', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '303638', '-ss', '5000', '-sd', '5e-05', '-c', '1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.314902782440186} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([4078, 67, 3564, ..., 1146, 2529, 4353]), + values=tensor([0.7614, 0.2386, 0.2238, ..., 0.8351, 0.5866, 0.5164]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.2149, 0.3426, 0.8577, ..., 0.2235, 0.1553, 0.5182]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.314902782440186 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([4078, 67, 3564, ..., 1146, 2529, 4353]), + values=tensor([0.7614, 0.2386, 0.2238, ..., 0.8351, 0.5866, 0.5164]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.2149, 0.3426, 0.8577, ..., 0.2235, 0.1553, 0.5182]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.314902782440186 seconds + +[18.14, 22.27, 18.32, 18.01, 17.87, 18.05, 18.11, 18.01, 18.55, 17.98] +[46.59] +14.001425504684448 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 303638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.314902782440186, 'TIME_S_1KI': 0.033971053631100805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.3264142632485, 'W': 46.59} +[18.14, 22.27, 18.32, 18.01, 17.87, 18.05, 18.11, 18.01, 18.55, 17.98, 18.58, 17.96, 18.17, 18.69, 18.11, 18.0, 18.22, 18.0, 18.06, 18.18] +330.84000000000003 +16.542 +{'CPU': 'Xeon 4216', 'CORES': 1, 'ITERATIONS': 303638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.314902782440186, 'TIME_S_1KI': 0.033971053631100805, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 652.3264142632485, 'W': 46.59, 'J_1KI': 2.148368828220606, 'W_1KI': 0.1534392928421344, 'W_D': 30.048000000000002, 'J_D': 420.71483356475835, 'W_D_1KI': 0.0989599457248434, 'J_D_1KI': 0.0003259142324901475} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..60ceca9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 271, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 13.764680624008179, "TIME_S_1KI": 50.79217942438442, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 708.3959098911284, "W": 52.59794170063422, "J_1KI": 2614.0070475687394, "W_1KI": 194.08834575879789, "W_D": 33.72694170063423, "J_D": 454.2388310525417, "W_D_1KI": 124.45365941193442, "J_D_1KI": 459.2385956159942} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..3f5c857 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8156983852386475} + +tensor(indices=tensor([[87675, 12759, 51005, ..., 2425, 41762, 29389], + [45918, 64834, 60882, ..., 3878, 87168, 85462]]), + values=tensor([0.8155, 0.0162, 0.6356, ..., 0.2182, 0.2053, 0.5055]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1922, 0.3096, 0.1781, ..., 0.3622, 0.7702, 0.3772]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.8156983852386475 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 128 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.945516109466553} + +tensor(indices=tensor([[48382, 90202, 56319, ..., 51472, 45013, 24927], + [ 9625, 87352, 54913, ..., 87014, 15190, 70778]]), + values=tensor([2.9508e-04, 5.3837e-01, 2.3044e-01, ..., + 7.8739e-01, 2.0008e-01, 6.1077e-01]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1999, 0.8074, 0.7307, ..., 0.2500, 0.5226, 0.2995]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 4.945516109466553 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 271 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 13.764680624008179} + +tensor(indices=tensor([[66316, 95038, 72364, ..., 73501, 98405, 57084], + [28561, 3708, 81350, ..., 68253, 50641, 67993]]), + values=tensor([0.0455, 0.3371, 0.7609, ..., 0.8030, 0.1462, 0.3187]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3763, 0.9250, 0.8921, ..., 0.1108, 0.7157, 0.5685]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 13.764680624008179 seconds + +tensor(indices=tensor([[66316, 95038, 72364, ..., 73501, 98405, 57084], + [28561, 3708, 81350, ..., 68253, 50641, 67993]]), + values=tensor([0.0455, 0.3371, 0.7609, ..., 0.8030, 0.1462, 0.3187]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3763, 0.9250, 0.8921, ..., 0.1108, 0.7157, 0.5685]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 13.764680624008179 seconds + +[20.92, 20.8, 20.44, 20.52, 20.8, 20.96, 21.32, 21.28, 21.44, 21.12] +[21.12, 20.92, 20.92, 22.0, 23.28, 35.6, 50.2, 69.52, 84.52, 96.84, 95.28, 92.36, 89.24] +13.468129873275757 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 271, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 13.764680624008179, 'TIME_S_1KI': 50.79217942438442, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.3959098911284, 'W': 52.59794170063422} +[20.92, 20.8, 20.44, 20.52, 20.8, 20.96, 21.32, 21.28, 21.44, 21.12, 20.92, 21.04, 20.96, 20.96, 20.96, 21.24, 21.16, 20.96, 20.8, 20.6] +377.41999999999996 +18.871 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 271, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 13.764680624008179, 'TIME_S_1KI': 50.79217942438442, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.3959098911284, 'W': 52.59794170063422, 'J_1KI': 2614.0070475687394, 'W_1KI': 194.08834575879789, 'W_D': 33.72694170063423, 'J_D': 454.2388310525417, 'W_D_1KI': 124.45365941193442, 'J_D_1KI': 459.2385956159942} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..e22cb60 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 32, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.193521499633789, "TIME_S_1KI": 318.5475468635559, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 800.2320325660704, "W": 45.6654704340178, "J_1KI": 25007.2510176897, "W_1KI": 1427.0459510630562, "W_D": 26.934470434017797, "J_D": 471.9939555346964, "W_D_1KI": 841.7022010630561, "J_D_1KI": 26303.193783220504} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..6441d67 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.276198148727417} + +tensor(indices=tensor([[ 9271, 3520, 84372, ..., 69208, 85337, 10954], + [92692, 94391, 59106, ..., 52928, 14707, 1304]]), + values=tensor([0.4952, 0.0287, 0.0135, ..., 0.7782, 0.3099, 0.9948]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.6617, 0.1769, 0.5475, ..., 0.2510, 0.2331, 0.9029]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 3.276198148727417 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 32 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.193521499633789} + +tensor(indices=tensor([[75264, 47872, 55415, ..., 19097, 10593, 87775], + [90533, 17061, 42308, ..., 10695, 74233, 93542]]), + values=tensor([0.3597, 0.1443, 0.1824, ..., 0.5042, 0.8186, 0.7909]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9072, 0.8205, 0.6569, ..., 0.2116, 0.6400, 0.9604]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.193521499633789 seconds + +tensor(indices=tensor([[75264, 47872, 55415, ..., 19097, 10593, 87775], + [90533, 17061, 42308, ..., 10695, 74233, 93542]]), + values=tensor([0.3597, 0.1443, 0.1824, ..., 0.5042, 0.8186, 0.7909]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9072, 0.8205, 0.6569, ..., 0.2116, 0.6400, 0.9604]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.193521499633789 seconds + +[20.88, 20.92, 20.92, 21.0, 21.2, 20.96, 20.76, 20.56, 20.8, 20.72] +[20.92, 20.88, 24.56, 26.16, 28.56, 33.56, 33.56, 44.52, 49.88, 60.4, 67.44, 69.84, 70.84, 66.44, 65.92, 64.72, 61.04] +17.52378821372986 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.193521499633789, 'TIME_S_1KI': 318.5475468635559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 800.2320325660704, 'W': 45.6654704340178} +[20.88, 20.92, 20.92, 21.0, 21.2, 20.96, 20.76, 20.56, 20.8, 20.72, 20.96, 20.8, 20.64, 20.68, 20.68, 20.68, 20.68, 20.8, 20.88, 20.76] +374.62 +18.731 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.193521499633789, 'TIME_S_1KI': 318.5475468635559, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 800.2320325660704, 'W': 45.6654704340178, 'J_1KI': 25007.2510176897, 'W_1KI': 1427.0459510630562, 'W_D': 26.934470434017797, 'J_D': 471.9939555346964, 'W_D_1KI': 841.7022010630561, 'J_D_1KI': 26303.193783220504} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..30b9174 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 31.283674955368042, "TIME_S_1KI": 3128.367495536804, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2065.684582710266, "W": 29.080338518534504, "J_1KI": 206568.4582710266, "W_1KI": 2908.0338518534504, "W_D": 10.482338518534501, "J_D": 744.5994844484327, "W_D_1KI": 1048.2338518534502, "J_D_1KI": 104823.38518534502} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..c07cd28 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 31.283674955368042} + +tensor(indices=tensor([[39756, 56280, 8316, ..., 2685, 54729, 61793], + [13279, 85477, 14865, ..., 46128, 20132, 4621]]), + values=tensor([0.9595, 0.6864, 0.2705, ..., 0.5992, 0.1879, 0.1775]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2852, 0.8402, 0.2843, ..., 0.8163, 0.4481, 0.7594]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 31.283674955368042 seconds + +tensor(indices=tensor([[39756, 56280, 8316, ..., 2685, 54729, 61793], + [13279, 85477, 14865, ..., 46128, 20132, 4621]]), + values=tensor([0.9595, 0.6864, 0.2705, ..., 0.5992, 0.1879, 0.1775]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.2852, 0.8402, 0.2843, ..., 0.8163, 0.4481, 0.7594]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 31.283674955368042 seconds + +[20.64, 20.76, 20.88, 20.64, 20.64, 20.96, 20.72, 20.68, 20.64, 20.64] +[20.76, 21.12, 22.04, 23.12, 25.0, 25.4, 26.04, 26.04, 25.72, 25.88, 26.12, 27.96, 29.68, 30.28, 30.04, 28.48, 27.16, 25.6, 26.88, 28.84, 28.84, 37.4, 42.44, 41.44, 41.24, 40.84, 27.8, 27.92, 30.0, 28.04, 30.2, 30.48, 27.8, 27.8, 30.44, 30.44, 28.2, 30.84, 30.88, 27.88, 30.2, 27.8, 30.24, 30.6, 28.52, 30.96, 30.96, 30.68, 28.04, 30.36, 30.12, 27.84, 40.24, 40.0, 40.64, 41.0, 35.0, 29.04, 28.6, 28.6, 28.32, 27.68, 28.04, 28.04, 28.92, 29.48, 29.28, 29.04, 29.16, 29.2] +71.03371858596802 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 31.283674955368042, 'TIME_S_1KI': 3128.367495536804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.684582710266, 'W': 29.080338518534504} +[20.64, 20.76, 20.88, 20.64, 20.64, 20.96, 20.72, 20.68, 20.64, 20.64, 20.68, 20.56, 20.6, 20.6, 20.52, 20.52, 20.48, 20.56, 20.8, 20.84] +371.96000000000004 +18.598000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 31.283674955368042, 'TIME_S_1KI': 3128.367495536804, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2065.684582710266, 'W': 29.080338518534504, 'J_1KI': 206568.4582710266, 'W_1KI': 2908.0338518534504, 'W_D': 10.482338518534501, 'J_D': 744.5994844484327, 'W_D_1KI': 1048.2338518534502, 'J_D_1KI': 104823.38518534502} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..2455b73 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1832, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.071704387664795, "TIME_S_1KI": 5.497655233441482, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 976.8089994430543, "W": 58.91607362719782, "J_1KI": 533.19268528551, "W_1KI": 32.15942883580667, "W_D": 39.98607362719782, "J_D": 662.9558652973176, "W_D_1KI": 21.826459403492258, "J_D_1KI": 11.914006224613678} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..6325e2c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.06184816360473633} + +tensor(indices=tensor([[75304, 38597, 21108, ..., 63266, 13593, 40311], + [ 3074, 52932, 99179, ..., 39992, 22754, 12119]]), + values=tensor([0.2157, 0.4966, 0.6862, ..., 0.6410, 0.7889, 0.4866]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4290, 0.6506, 0.7947, ..., 0.0732, 0.3256, 0.4046]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.06184816360473633 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1697 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.723018407821655} + +tensor(indices=tensor([[32948, 43371, 82870, ..., 10594, 53145, 32060], + [ 1904, 74630, 78746, ..., 93228, 32178, 83466]]), + values=tensor([0.8749, 0.1860, 0.3958, ..., 0.8630, 0.3346, 0.9677]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9391, 0.7132, 0.5247, ..., 0.8437, 0.9708, 0.9628]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 9.723018407821655 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1832 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.071704387664795} + +tensor(indices=tensor([[26971, 40953, 39814, ..., 48771, 70000, 19437], + [45426, 84946, 82027, ..., 12344, 23550, 53977]]), + values=tensor([0.9311, 0.0175, 0.1352, ..., 0.9589, 0.9058, 0.3457]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8777, 0.3638, 0.4402, ..., 0.0749, 0.6330, 0.9975]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.071704387664795 seconds + +tensor(indices=tensor([[26971, 40953, 39814, ..., 48771, 70000, 19437], + [45426, 84946, 82027, ..., 12344, 23550, 53977]]), + values=tensor([0.9311, 0.0175, 0.1352, ..., 0.9589, 0.9058, 0.3457]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8777, 0.3638, 0.4402, ..., 0.0749, 0.6330, 0.9975]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.071704387664795 seconds + +[20.88, 20.92, 20.68, 20.64, 20.64, 20.92, 20.84, 20.84, 20.88, 20.76] +[20.6, 20.52, 23.0, 23.0, 23.92, 36.84, 51.4, 69.2, 80.84, 92.96, 91.76, 88.68, 88.6, 89.12, 88.16, 91.24] +16.57966899871826 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1832, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.071704387664795, 'TIME_S_1KI': 5.497655233441482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 976.8089994430543, 'W': 58.91607362719782} +[20.88, 20.92, 20.68, 20.64, 20.64, 20.92, 20.84, 20.84, 20.88, 20.76, 21.32, 21.36, 21.24, 21.28, 21.28, 21.28, 21.44, 21.32, 20.96, 21.2] +378.6 +18.93 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1832, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.071704387664795, 'TIME_S_1KI': 5.497655233441482, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 976.8089994430543, 'W': 58.91607362719782, 'J_1KI': 533.19268528551, 'W_1KI': 32.15942883580667, 'W_D': 39.98607362719782, 'J_D': 662.9558652973176, 'W_D_1KI': 21.826459403492258, 'J_D_1KI': 11.914006224613678} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..c96f39e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 394, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.222769498825073, "TIME_S_1KI": 41.17454187519054, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 708.5154049015046, "W": 52.95461197417597, "J_1KI": 1798.2624489885902, "W_1KI": 134.40256846237554, "W_D": 33.928611974175965, "J_D": 453.95374178838733, "W_D_1KI": 86.1132283608527, "J_D_1KI": 218.5614933016566} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..92e7e49 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3051793575286865} + +tensor(indices=tensor([[88403, 87548, 52192, ..., 59728, 48491, 94162], + [67388, 80472, 816, ..., 95142, 91120, 76887]]), + values=tensor([0.3114, 0.0870, 0.9383, ..., 0.5238, 0.1913, 0.2103]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.2968, 0.9416, 0.8998, ..., 0.5714, 0.3998, 0.4329]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.3051793575286865 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 344 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.166104316711426} + +tensor(indices=tensor([[20909, 9803, 70840, ..., 90513, 81461, 46810], + [85565, 84050, 62844, ..., 50848, 41376, 28879]]), + values=tensor([0.5769, 0.3783, 0.4085, ..., 0.9544, 0.5084, 0.7093]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.9432, 0.0308, 0.7174, ..., 0.2088, 0.9069, 0.2391]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 9.166104316711426 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 394 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.222769498825073} + +tensor(indices=tensor([[20567, 75374, 98664, ..., 13464, 76560, 82237], + [39142, 19176, 53733, ..., 65264, 55675, 79644]]), + values=tensor([0.1789, 0.8134, 0.2991, ..., 0.8210, 0.2089, 0.9370]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1532, 0.5314, 0.9651, ..., 0.7016, 0.2350, 0.5567]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 16.222769498825073 seconds + +tensor(indices=tensor([[20567, 75374, 98664, ..., 13464, 76560, 82237], + [39142, 19176, 53733, ..., 65264, 55675, 79644]]), + values=tensor([0.1789, 0.8134, 0.2991, ..., 0.8210, 0.2089, 0.9370]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1532, 0.5314, 0.9651, ..., 0.7016, 0.2350, 0.5567]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 16.222769498825073 seconds + +[21.44, 21.4, 21.4, 21.04, 20.8, 21.04, 21.12, 21.12, 21.12, 21.28] +[21.4, 21.28, 21.48, 25.68, 27.88, 41.68, 58.0, 68.92, 83.68, 90.36, 89.76, 89.96, 89.96] +13.37967324256897 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.222769498825073, 'TIME_S_1KI': 41.17454187519054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.5154049015046, 'W': 52.95461197417597} +[21.44, 21.4, 21.4, 21.04, 20.8, 21.04, 21.12, 21.12, 21.12, 21.28, 21.28, 21.28, 21.24, 21.12, 21.16, 21.2, 20.96, 21.0, 21.04, 20.96] +380.52 +19.026 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.222769498825073, 'TIME_S_1KI': 41.17454187519054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 708.5154049015046, 'W': 52.95461197417597, 'J_1KI': 1798.2624489885902, 'W_1KI': 134.40256846237554, 'W_D': 33.928611974175965, 'J_D': 453.95374178838733, 'W_D_1KI': 86.1132283608527, 'J_D_1KI': 218.5614933016566} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..8cbcc03 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 46668, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.390840530395508, "TIME_S_1KI": 0.2226545069511337, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 319.462904214859, "W": 22.4082438518141, "J_1KI": 6.845438077801898, "W_1KI": 0.4801629350264442, "W_D": 3.910243851814098, "J_D": 55.74635234022139, "W_D_1KI": 0.08378854572328143, "J_D_1KI": 0.0017954175392834797} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..48cfbd1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.008530378341674805} + +tensor(indices=tensor([[4260, 7467, 1843, ..., 3581, 5591, 9302], + [2225, 6552, 2549, ..., 7794, 6939, 2246]]), + values=tensor([0.7011, 0.8027, 0.2229, ..., 0.7766, 0.6417, 0.3726]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.2531, 0.3956, 0.6385, ..., 0.3478, 0.4814, 0.1613]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.008530378341674805 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12308 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7692008018493652} + +tensor(indices=tensor([[3014, 923, 1786, ..., 902, 8732, 6670], + [3903, 6521, 536, ..., 774, 4513, 6944]]), + values=tensor([7.9449e-01, 6.8308e-01, 5.0985e-04, ..., + 9.0327e-01, 9.1565e-01, 1.2359e-01]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.3017, 0.9286, 0.0426, ..., 0.4292, 0.7149, 0.3663]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.7692008018493652 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 46668 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.390840530395508} + +tensor(indices=tensor([[3451, 9326, 4891, ..., 4078, 2637, 3370], + [6115, 6303, 3658, ..., 6924, 9859, 9331]]), + values=tensor([0.0295, 0.2400, 0.5735, ..., 0.1153, 0.3220, 0.9341]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0544, 0.2230, 0.6888, ..., 0.3717, 0.9136, 0.5477]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.390840530395508 seconds + +tensor(indices=tensor([[3451, 9326, 4891, ..., 4078, 2637, 3370], + [6115, 6303, 3658, ..., 6924, 9859, 9331]]), + values=tensor([0.0295, 0.2400, 0.5735, ..., 0.1153, 0.3220, 0.9341]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0544, 0.2230, 0.6888, ..., 0.3717, 0.9136, 0.5477]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.390840530395508 seconds + +[20.44, 20.4, 20.12, 20.24, 20.28, 20.56, 20.6, 20.44, 20.6, 20.64] +[20.4, 20.4, 20.32, 23.88, 25.0, 26.8, 27.68, 28.0, 25.0, 23.6, 23.6, 23.64, 23.52, 23.56] +14.256489992141724 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 46668, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.390840530395508, 'TIME_S_1KI': 0.2226545069511337, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.462904214859, 'W': 22.4082438518141} +[20.44, 20.4, 20.12, 20.24, 20.28, 20.56, 20.6, 20.44, 20.6, 20.64, 20.48, 20.48, 20.56, 20.76, 20.72, 20.84, 20.72, 20.8, 20.72, 20.68] +369.96000000000004 +18.498 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 46668, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.390840530395508, 'TIME_S_1KI': 0.2226545069511337, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.462904214859, 'W': 22.4082438518141, 'J_1KI': 6.845438077801898, 'W_1KI': 0.4801629350264442, 'W_D': 3.910243851814098, 'J_D': 55.74635234022139, 'W_D_1KI': 0.08378854572328143, 'J_D_1KI': 0.0017954175392834797} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..70aa208 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4771, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.456588745117188, "TIME_S_1KI": 2.1916974942605716, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 345.3197015190124, "W": 24.216936772507907, "J_1KI": 72.37889363215518, "W_1KI": 5.075861826138736, "W_D": 5.9229367725079065, "J_D": 84.45769907283783, "W_D_1KI": 1.2414455612047592, "J_D_1KI": 0.26020657329800023} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..d1d5c26 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.03788900375366211} + +tensor(indices=tensor([[9546, 1087, 4155, ..., 8348, 2935, 3579], + [1888, 2547, 9842, ..., 6533, 4579, 5585]]), + values=tensor([0.1027, 0.7460, 0.0607, ..., 0.2436, 0.7855, 0.7171]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.7497, 0.9777, 0.9514, ..., 0.3250, 0.2323, 0.1579]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.03788900375366211 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2771 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.097935914993286} + +tensor(indices=tensor([[6106, 136, 2523, ..., 8889, 9274, 3723], + [5590, 6300, 4306, ..., 7685, 3210, 6936]]), + values=tensor([0.9665, 0.3671, 0.5854, ..., 0.9200, 0.9533, 0.4205]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6909, 0.7993, 0.7158, ..., 0.8001, 0.5021, 0.0727]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 6.097935914993286 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 4771 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.456588745117188} + +tensor(indices=tensor([[9978, 815, 5642, ..., 2306, 6116, 2391], + [8813, 7900, 5878, ..., 4817, 5059, 8644]]), + values=tensor([0.5302, 0.7735, 0.1040, ..., 0.2245, 0.8566, 0.6063]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6286, 0.3523, 0.9674, ..., 0.1237, 0.2337, 0.3825]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.456588745117188 seconds + +tensor(indices=tensor([[9978, 815, 5642, ..., 2306, 6116, 2391], + [8813, 7900, 5878, ..., 4817, 5059, 8644]]), + values=tensor([0.5302, 0.7735, 0.1040, ..., 0.2245, 0.8566, 0.6063]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6286, 0.3523, 0.9674, ..., 0.1237, 0.2337, 0.3825]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.456588745117188 seconds + +[19.96, 19.84, 19.92, 19.76, 20.24, 20.24, 20.44, 20.6, 20.72, 20.56] +[20.36, 20.28, 20.96, 23.4, 27.08, 30.76, 31.68, 30.12, 29.76, 29.76, 24.24, 24.28, 24.32, 24.36] +14.259429454803467 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.456588745117188, 'TIME_S_1KI': 2.1916974942605716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.3197015190124, 'W': 24.216936772507907} +[19.96, 19.84, 19.92, 19.76, 20.24, 20.24, 20.44, 20.6, 20.72, 20.56, 20.44, 20.12, 20.28, 20.36, 20.44, 20.36, 20.72, 20.64, 20.48, 20.48] +365.88 +18.294 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.456588745117188, 'TIME_S_1KI': 2.1916974942605716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 345.3197015190124, 'W': 24.216936772507907, 'J_1KI': 72.37889363215518, 'W_1KI': 5.075861826138736, 'W_D': 5.9229367725079065, 'J_D': 84.45769907283783, 'W_D_1KI': 1.2414455612047592, 'J_D_1KI': 0.26020657329800023} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..6d256d1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 482, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.787339210510254, "TIME_S_1KI": 22.38037180603787, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.30498919487, "W": 22.873398379914686, "J_1KI": 674.9066165868672, "W_1KI": 47.45518336081885, "W_D": 4.648398379914685, "J_D": 66.10942369103431, "W_D_1KI": 9.643980041316773, "J_D_1KI": 20.008257347130236} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..b720bb4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.24234294891357422} + +tensor(indices=tensor([[6662, 3221, 7897, ..., 532, 1571, 7483], + [6421, 9924, 1728, ..., 2998, 4696, 8454]]), + values=tensor([0.3997, 0.7558, 0.1388, ..., 0.9978, 0.9847, 0.4915]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2367, 0.8301, 0.6234, ..., 0.1068, 0.8744, 0.1789]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.24234294891357422 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 433 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.420110702514648} + +tensor(indices=tensor([[8814, 7878, 4438, ..., 7793, 1664, 4067], + [4175, 7327, 9728, ..., 8525, 2648, 5059]]), + values=tensor([0.6624, 0.1479, 0.0268, ..., 0.3446, 0.2815, 0.6822]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.3623, 0.7529, 0.4046, ..., 0.5406, 0.5705, 0.6638]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 9.420110702514648 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 482 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.787339210510254} + +tensor(indices=tensor([[7165, 886, 8232, ..., 9245, 7171, 3937], + [2750, 5776, 3871, ..., 7360, 5711, 8012]]), + values=tensor([0.9030, 0.7860, 0.3138, ..., 0.7945, 0.6629, 0.9283]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.5331, 0.7024, 0.7644, ..., 0.4663, 0.2246, 0.1004]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.787339210510254 seconds + +tensor(indices=tensor([[7165, 886, 8232, ..., 9245, 7171, 3937], + [2750, 5776, 3871, ..., 7360, 5711, 8012]]), + values=tensor([0.9030, 0.7860, 0.3138, ..., 0.7945, 0.6629, 0.9283]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.5331, 0.7024, 0.7644, ..., 0.4663, 0.2246, 0.1004]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.787339210510254 seconds + +[20.12, 20.2, 20.2, 20.2, 20.08, 20.2, 20.12, 20.16, 20.28, 20.2] +[20.2, 20.48, 21.2, 22.08, 25.56, 26.24, 26.24, 27.08, 27.08, 26.76, 24.76, 24.76, 24.8, 25.16] +14.221978902816772 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.787339210510254, 'TIME_S_1KI': 22.38037180603787, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.30498919487, 'W': 22.873398379914686} +[20.12, 20.2, 20.2, 20.2, 20.08, 20.2, 20.12, 20.16, 20.28, 20.2, 20.04, 20.16, 20.32, 20.32, 20.44, 20.36, 20.36, 20.36, 20.4, 20.32] +364.5 +18.225 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.787339210510254, 'TIME_S_1KI': 22.38037180603787, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.30498919487, 'W': 22.873398379914686, 'J_1KI': 674.9066165868672, 'W_1KI': 47.45518336081885, 'W_D': 4.648398379914685, 'J_D': 66.10942369103431, 'W_D_1KI': 9.643980041316773, 'J_D_1KI': 20.008257347130236} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..cdb5ffb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 93, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.05737018585205, "TIME_S_1KI": 108.14376543926936, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 373.22606477737423, "W": 24.433023960718955, "J_1KI": 4013.183492229831, "W_1KI": 262.7206877496662, "W_D": 6.007023960718957, "J_D": 91.76014878416062, "W_D_1KI": 64.59165549160168, "J_D_1KI": 694.5339300172224} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..e80daea --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 1.1215338706970215} + +tensor(indices=tensor([[9734, 2448, 6356, ..., 4917, 9802, 3270], + [9826, 4299, 6896, ..., 8201, 7630, 8237]]), + values=tensor([0.5595, 0.2399, 0.7407, ..., 0.9554, 0.1670, 0.2830]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.8054, 0.5531, 0.8915, ..., 0.9134, 0.0499, 0.3252]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 1.1215338706970215 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 93 -ss 10000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.05737018585205} + +tensor(indices=tensor([[9900, 3881, 7981, ..., 4026, 5068, 8385], + [3269, 2600, 1720, ..., 664, 3017, 89]]), + values=tensor([0.9872, 0.0396, 0.9040, ..., 0.9380, 0.9571, 0.8866]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3769, 0.2213, 0.8969, ..., 0.3864, 0.9817, 0.7398]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.05737018585205 seconds + +tensor(indices=tensor([[9900, 3881, 7981, ..., 4026, 5068, 8385], + [3269, 2600, 1720, ..., 664, 3017, 89]]), + values=tensor([0.9872, 0.0396, 0.9040, ..., 0.9380, 0.9571, 0.8866]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.3769, 0.2213, 0.8969, ..., 0.3864, 0.9817, 0.7398]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.05737018585205 seconds + +[20.04, 20.2, 20.28, 20.4, 20.52, 20.52, 20.12, 20.24, 20.08, 20.16] +[20.4, 20.56, 20.56, 24.08, 25.0, 30.4, 31.68, 32.28, 29.84, 28.04, 25.2, 25.2, 25.12, 25.4, 25.36] +15.27547574043274 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.05737018585205, 'TIME_S_1KI': 108.14376543926936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 373.22606477737423, 'W': 24.433023960718955} +[20.04, 20.2, 20.28, 20.4, 20.52, 20.52, 20.12, 20.24, 20.08, 20.16, 20.36, 20.36, 20.36, 20.32, 20.8, 20.88, 21.12, 21.0, 20.92, 20.24] +368.52 +18.426 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.05737018585205, 'TIME_S_1KI': 108.14376543926936, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 373.22606477737423, 'W': 24.433023960718955, 'J_1KI': 4013.183492229831, 'W_1KI': 262.7206877496662, 'W_D': 6.007023960718957, 'J_D': 91.76014878416062, 'W_D_1KI': 64.59165549160168, 'J_D_1KI': 694.5339300172224} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..573238c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 48, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.678550004959106, "TIME_S_1KI": 222.46979176998138, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 425.31584163665775, "W": 24.57520335684497, "J_1KI": 8860.746700763704, "W_1KI": 511.9834032676035, "W_D": 5.9762033568449695, "J_D": 103.42839990377426, "W_D_1KI": 124.50423660093686, "J_D_1KI": 2593.838262519518} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..987220e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 2.180788278579712} + +tensor(indices=tensor([[7020, 9750, 6426, ..., 787, 3178, 3390], + [ 795, 78, 6603, ..., 8745, 2373, 9995]]), + values=tensor([0.1634, 0.1032, 0.5646, ..., 0.7997, 0.2164, 0.4007]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8075, 0.9053, 0.9449, ..., 0.1857, 0.8558, 0.5072]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 2.180788278579712 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 48 -ss 10000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.678550004959106} + +tensor(indices=tensor([[8095, 9416, 2882, ..., 7380, 2196, 3263], + [8948, 6866, 3950, ..., 8270, 8493, 8265]]), + values=tensor([0.1962, 0.2056, 0.3047, ..., 0.7796, 0.5955, 0.4389]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7710, 0.9411, 0.3029, ..., 0.1351, 0.4651, 0.1903]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.678550004959106 seconds + +tensor(indices=tensor([[8095, 9416, 2882, ..., 7380, 2196, 3263], + [8948, 6866, 3950, ..., 8270, 8493, 8265]]), + values=tensor([0.1962, 0.2056, 0.3047, ..., 0.7796, 0.5955, 0.4389]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.7710, 0.9411, 0.3029, ..., 0.1351, 0.4651, 0.1903]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.678550004959106 seconds + +[20.84, 20.8, 21.2, 21.16, 21.0, 20.84, 20.84, 20.96, 20.92, 20.92] +[20.96, 20.92, 21.08, 21.92, 23.68, 27.48, 30.76, 31.2, 30.36, 29.88, 29.88, 24.72, 24.8, 24.72, 24.88, 25.0, 29.2] +17.30670690536499 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.678550004959106, 'TIME_S_1KI': 222.46979176998138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.31584163665775, 'W': 24.57520335684497} +[20.84, 20.8, 21.2, 21.16, 21.0, 20.84, 20.84, 20.96, 20.92, 20.92, 20.16, 20.36, 20.4, 20.24, 20.52, 20.4, 20.36, 20.36, 20.36, 20.6] +371.98 +18.599 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.678550004959106, 'TIME_S_1KI': 222.46979176998138, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 425.31584163665775, 'W': 24.57520335684497, 'J_1KI': 8860.746700763704, 'W_1KI': 511.9834032676035, 'W_D': 5.9762033568449695, 'J_D': 103.42839990377426, 'W_D_1KI': 124.50423660093686, 'J_D_1KI': 2593.838262519518} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..3af4a2d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 24, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.624644994735718, "TIME_S_1KI": 442.6935414473216, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 518.5183028793334, "W": 25.44127487819161, "J_1KI": 21604.929286638893, "W_1KI": 1060.0531199246504, "W_D": 6.432274878191613, "J_D": 131.0961132829189, "W_D_1KI": 268.0114532579839, "J_D_1KI": 11167.143885749327} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..c24e550 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 4.365953207015991} + +tensor(indices=tensor([[ 99, 2326, 2539, ..., 4365, 6270, 8280], + [5551, 6794, 1853, ..., 2090, 5489, 112]]), + values=tensor([0.0738, 0.6009, 0.0674, ..., 0.2677, 0.6451, 0.5010]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.7928, 0.9854, 0.6657, ..., 0.9983, 0.2925, 0.1386]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 4.365953207015991 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 24 -ss 10000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.624644994735718} + +tensor(indices=tensor([[ 88, 1641, 2149, ..., 1236, 5843, 5202], + [9639, 9310, 7087, ..., 9751, 3863, 4367]]), + values=tensor([0.3768, 0.6802, 0.9278, ..., 0.8025, 0.2272, 0.4561]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.0615, 0.7028, 0.9390, ..., 0.7443, 0.5986, 0.3258]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.624644994735718 seconds + +tensor(indices=tensor([[ 88, 1641, 2149, ..., 1236, 5843, 5202], + [9639, 9310, 7087, ..., 9751, 3863, 4367]]), + values=tensor([0.3768, 0.6802, 0.9278, ..., 0.8025, 0.2272, 0.4561]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.0615, 0.7028, 0.9390, ..., 0.7443, 0.5986, 0.3258]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.624644994735718 seconds + +[20.28, 20.24, 20.48, 20.76, 20.64, 20.6, 20.56, 20.32, 20.32, 20.44] +[20.44, 20.56, 21.0, 22.44, 23.28, 24.96, 30.76, 30.76, 30.88, 30.08, 29.68, 25.0, 25.0, 24.96, 25.04, 25.0, 24.84, 32.12, 32.76, 33.12] +20.380987405776978 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.624644994735718, 'TIME_S_1KI': 442.6935414473216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 518.5183028793334, 'W': 25.44127487819161} +[20.28, 20.24, 20.48, 20.76, 20.64, 20.6, 20.56, 20.32, 20.32, 20.44, 23.68, 23.0, 21.72, 21.2, 20.76, 20.84, 21.56, 21.56, 22.12, 22.6] +380.17999999999995 +19.008999999999997 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 24, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.624644994735718, 'TIME_S_1KI': 442.6935414473216, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 518.5183028793334, 'W': 25.44127487819161, 'J_1KI': 21604.929286638893, 'W_1KI': 1060.0531199246504, 'W_D': 6.432274878191613, 'J_D': 131.0961132829189, 'W_D_1KI': 268.0114532579839, 'J_D_1KI': 11167.143885749327} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..05e572b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 16, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.421019077301025, "TIME_S_1KI": 651.3136923313141, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 587.9804890060425, "W": 26.20605496671889, "J_1KI": 36748.780562877655, "W_1KI": 1637.8784354199306, "W_D": 7.64405496671889, "J_D": 171.5082710094452, "W_D_1KI": 477.75343541993067, "J_D_1KI": 29859.589713745667} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..3df6353 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.48048210144043} + +tensor(indices=tensor([[ 852, 8454, 9427, ..., 9165, 5261, 694], + [9596, 8684, 4910, ..., 4257, 2424, 1009]]), + values=tensor([0.9528, 0.4086, 0.1813, ..., 0.8890, 0.1647, 0.4658]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.0108, 0.4453, 0.6905, ..., 0.6137, 0.6470, 0.8002]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 6.48048210144043 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 10000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.421019077301025} + +tensor(indices=tensor([[7206, 2, 5429, ..., 8763, 1098, 3639], + [3192, 8766, 6974, ..., 6071, 1653, 2948]]), + values=tensor([0.9744, 0.3006, 0.2871, ..., 0.6358, 0.0072, 0.0285]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.0154, 0.9798, 0.3106, ..., 0.3384, 0.2682, 0.4514]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.421019077301025 seconds + +tensor(indices=tensor([[7206, 2, 5429, ..., 8763, 1098, 3639], + [3192, 8766, 6974, ..., 6071, 1653, 2948]]), + values=tensor([0.9744, 0.3006, 0.2871, ..., 0.6358, 0.0072, 0.0285]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.0154, 0.9798, 0.3106, ..., 0.3384, 0.2682, 0.4514]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.421019077301025 seconds + +[20.48, 20.56, 20.48, 20.36, 20.32, 20.36, 20.4, 20.56, 21.0, 20.84] +[20.84, 20.88, 20.88, 24.16, 25.28, 27.52, 28.68, 34.4, 31.72, 30.48, 30.4, 29.28, 24.92, 25.0, 25.12, 25.08, 25.08, 25.2, 30.04, 31.44, 31.76, 32.28] +22.43681812286377 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.421019077301025, 'TIME_S_1KI': 651.3136923313141, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 587.9804890060425, 'W': 26.20605496671889} +[20.48, 20.56, 20.48, 20.36, 20.32, 20.36, 20.4, 20.56, 21.0, 20.84, 20.8, 20.88, 20.72, 20.72, 20.68, 20.6, 20.8, 20.72, 20.72, 20.6] +371.24 +18.562 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 16, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.421019077301025, 'TIME_S_1KI': 651.3136923313141, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 587.9804890060425, 'W': 26.20605496671889, 'J_1KI': 36748.780562877655, 'W_1KI': 1637.8784354199306, 'W_D': 7.64405496671889, 'J_D': 171.5082710094452, 'W_D_1KI': 477.75343541993067, 'J_D_1KI': 29859.589713745667} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..b6f44f3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 12, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.651156663894653, "TIME_S_1KI": 887.5963886578878, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 679.2371678161621, "W": 26.61789400659298, "J_1KI": 56603.09731801351, "W_1KI": 2218.1578338827485, "W_D": 7.888894006592977, "J_D": 201.30931548953052, "W_D_1KI": 657.4078338827482, "J_D_1KI": 54783.98615689568} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..700d83b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,58 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 8.748601913452148} + +tensor(indices=tensor([[9814, 862, 3411, ..., 142, 4949, 2967], + [1718, 5091, 4646, ..., 2250, 6496, 2288]]), + values=tensor([0.4131, 0.4291, 0.7890, ..., 0.6343, 0.9085, 0.8753]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.6637, 0.4070, 0.3620, ..., 0.4265, 0.6034, 0.2691]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 8.748601913452148 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 12 -ss 10000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.651156663894653} + +tensor(indices=tensor([[4239, 9035, 8971, ..., 6883, 8308, 3042], + [6578, 5910, 3542, ..., 641, 8094, 2809]]), + values=tensor([0.0724, 0.6155, 0.4483, ..., 0.2306, 0.4346, 0.3652]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([1.9211e-04, 5.6409e-01, 9.6984e-01, ..., 8.2036e-01, 6.2265e-01, + 8.9305e-01]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.651156663894653 seconds + +tensor(indices=tensor([[4239, 9035, 8971, ..., 6883, 8308, 3042], + [6578, 5910, 3542, ..., 641, 8094, 2809]]), + values=tensor([0.0724, 0.6155, 0.4483, ..., 0.2306, 0.4346, 0.3652]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([1.9211e-04, 5.6409e-01, 9.6984e-01, ..., 8.2036e-01, 6.2265e-01, + 8.9305e-01]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.651156663894653 seconds + +[20.4, 20.44, 20.76, 21.12, 21.12, 20.92, 20.72, 20.52, 20.24, 20.24] +[20.36, 20.24, 20.96, 21.84, 23.4, 24.68, 26.24, 27.6, 32.28, 32.28, 31.36, 31.32, 31.08, 25.16, 25.0, 24.88, 24.88, 24.88, 27.4, 32.8, 33.0, 33.0, 33.12, 33.64, 27.52] +25.518065690994263 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.651156663894653, 'TIME_S_1KI': 887.5963886578878, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 679.2371678161621, 'W': 26.61789400659298} +[20.4, 20.44, 20.76, 21.12, 21.12, 20.92, 20.72, 20.52, 20.24, 20.24, 20.72, 20.84, 20.64, 20.76, 20.84, 21.0, 21.04, 21.28, 21.16, 21.0] +374.58000000000004 +18.729000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 12, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.651156663894653, 'TIME_S_1KI': 887.5963886578878, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 679.2371678161621, 'W': 26.61789400659298, 'J_1KI': 56603.09731801351, 'W_1KI': 2218.1578338827485, 'W_D': 7.888894006592977, 'J_D': 201.30931548953052, 'W_D_1KI': 657.4078338827482, 'J_D_1KI': 54783.98615689568} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..2cdeddc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.939157962799072, "TIME_S_1KI": 1093.9157962799072, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 772.4580437278748, "W": 27.105212052696082, "J_1KI": 77245.80437278749, "W_1KI": 2710.5212052696083, "W_D": 8.568212052696083, "J_D": 244.1812411429883, "W_D_1KI": 856.8212052696083, "J_D_1KI": 85682.12052696083} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..fc040ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.939157962799072} + +tensor(indices=tensor([[2819, 6810, 1468, ..., 3087, 6551, 2497], + [6022, 1330, 2101, ..., 7279, 1779, 9798]]), + values=tensor([0.9764, 0.3420, 0.6138, ..., 0.3178, 0.7752, 0.7221]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.5434, 0.5258, 0.5249, ..., 0.3573, 0.2980, 0.4188]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.939157962799072 seconds + +tensor(indices=tensor([[2819, 6810, 1468, ..., 3087, 6551, 2497], + [6022, 1330, 2101, ..., 7279, 1779, 9798]]), + values=tensor([0.9764, 0.3420, 0.6138, ..., 0.3178, 0.7752, 0.7221]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.5434, 0.5258, 0.5249, ..., 0.3573, 0.2980, 0.4188]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.939157962799072 seconds + +[20.56, 20.36, 20.4, 20.44, 20.44, 20.48, 20.6, 20.8, 20.64, 20.8] +[20.92, 20.52, 23.88, 23.88, 25.48, 27.24, 28.48, 29.12, 27.44, 31.2, 31.16, 31.32, 31.24, 27.52, 25.36, 25.48, 25.48, 24.84, 24.76, 24.92, 31.52, 32.64, 33.44, 34.56, 35.24, 29.0, 28.24, 28.08] +28.49850583076477 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.939157962799072, 'TIME_S_1KI': 1093.9157962799072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 772.4580437278748, 'W': 27.105212052696082} +[20.56, 20.36, 20.4, 20.44, 20.44, 20.48, 20.6, 20.8, 20.64, 20.8, 20.36, 20.4, 20.48, 20.48, 20.68, 20.68, 20.84, 20.84, 20.8, 21.04] +370.73999999999995 +18.537 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.939157962799072, 'TIME_S_1KI': 1093.9157962799072, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 772.4580437278748, 'W': 27.105212052696082, 'J_1KI': 77245.80437278749, 'W_1KI': 2710.5212052696083, 'W_D': 8.568212052696083, 'J_D': 244.1812411429883, 'W_D_1KI': 856.8212052696083, 'J_D_1KI': 85682.12052696083} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..5049a60 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 322612, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.590221643447876, "TIME_S_1KI": 0.0328264963592423, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.4309623336792, "W": 21.94978314858273, "J_1KI": 0.9684418506865188, "W_1KI": 0.06803771449475757, "W_D": 3.480783148582727, "J_D": 49.54511037421225, "W_D_1KI": 0.01078937903296445, "J_D_1KI": 3.344382426247149e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..9cc8c51 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,855 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0063915252685546875} + +tensor(indices=tensor([[6531, 5009, 6226, ..., 7299, 5897, 3103], + [1509, 1798, 8047, ..., 7873, 8465, 9893]]), + values=tensor([1.8274e-01, 1.3171e-01, 9.4703e-01, 6.9894e-01, + 8.8842e-01, 2.8949e-01, 4.8613e-01, 9.4468e-01, + 3.1966e-01, 3.8973e-01, 8.7980e-02, 1.8208e-01, + 8.0635e-01, 4.3709e-01, 7.1457e-01, 1.7143e-03, + 3.8915e-01, 7.0611e-01, 2.7473e-01, 4.9061e-01, + 3.6810e-01, 5.8967e-01, 7.0480e-01, 5.3766e-01, + 4.0913e-01, 9.6549e-01, 6.1571e-01, 8.3093e-01, + 7.3101e-01, 6.1409e-01, 8.1888e-01, 4.3830e-01, + 6.0785e-01, 8.1728e-01, 5.6484e-01, 4.5429e-01, + 1.3598e-02, 9.4445e-01, 6.2773e-01, 8.4316e-02, + 9.9915e-01, 4.1969e-01, 2.3328e-01, 5.1494e-01, + 5.8779e-01, 4.7868e-01, 8.7937e-01, 2.6790e-01, + 5.7259e-01, 4.2882e-01, 3.1565e-01, 3.2625e-01, + 9.0146e-01, 8.0874e-02, 2.4090e-01, 2.6131e-01, + 2.0288e-01, 3.8924e-01, 7.4831e-01, 6.3677e-01, + 8.1293e-01, 2.6238e-01, 3.2585e-01, 7.3452e-01, + 5.6416e-01, 5.6782e-02, 4.2030e-01, 8.0623e-01, + 1.4971e-01, 3.0893e-01, 3.9527e-01, 1.6948e-01, + 4.7552e-01, 1.7000e-01, 8.7623e-01, 5.2396e-01, + 1.2589e-01, 1.8006e-03, 4.1573e-01, 1.3443e-01, + 7.3887e-01, 6.5673e-01, 8.5907e-02, 7.5865e-01, + 6.6659e-01, 1.0121e-01, 6.5711e-01, 5.9729e-02, + 6.8244e-01, 6.5463e-02, 8.2005e-01, 9.0804e-01, + 7.9953e-01, 9.4009e-01, 2.3069e-02, 8.2841e-01, + 5.8589e-01, 3.1697e-01, 7.4998e-01, 6.8794e-01, + 7.6612e-01, 9.2560e-01, 6.6761e-02, 4.8006e-01, + 9.4050e-01, 3.5698e-01, 9.8101e-01, 5.4168e-01, + 7.6054e-01, 3.1150e-01, 8.6051e-03, 9.3417e-01, + 7.5034e-01, 8.8645e-02, 6.7571e-01, 8.8507e-01, + 9.5099e-01, 9.3354e-01, 5.1056e-01, 9.0909e-01, + 7.8455e-01, 3.7142e-01, 9.8336e-01, 7.5787e-01, + 6.2696e-01, 8.3049e-01, 4.5069e-01, 7.9309e-01, + 7.3250e-01, 8.4018e-01, 4.1472e-01, 9.1284e-01, + 4.6630e-01, 8.6820e-01, 7.8861e-01, 6.5541e-01, + 3.5383e-01, 2.9937e-01, 9.3731e-01, 2.1894e-01, + 4.4383e-02, 5.9312e-01, 3.2496e-02, 7.2981e-01, + 3.1451e-01, 8.7708e-01, 9.1672e-01, 2.3202e-01, + 2.9589e-01, 5.9435e-01, 3.4765e-01, 9.0311e-01, + 1.8333e-01, 1.1106e-01, 4.4052e-01, 5.9000e-01, + 7.9852e-02, 2.6718e-01, 3.4857e-02, 9.1684e-01, + 7.6505e-01, 8.8619e-01, 8.0262e-01, 2.5563e-01, + 7.2263e-01, 9.3396e-01, 7.3929e-01, 3.8702e-01, + 1.3226e-01, 5.7927e-01, 5.4189e-01, 4.3096e-01, + 3.4655e-01, 3.9122e-01, 7.5965e-02, 6.5337e-01, + 2.1138e-01, 8.4234e-01, 8.7264e-01, 2.2717e-01, + 9.0429e-02, 5.7103e-01, 6.2882e-01, 7.3698e-01, + 6.3623e-01, 6.3982e-01, 1.9739e-01, 8.4060e-01, + 3.3003e-01, 4.9660e-01, 1.9799e-01, 7.0780e-01, + 6.3067e-01, 2.9768e-01, 5.6642e-01, 9.7123e-02, + 8.9369e-01, 2.8787e-01, 5.1051e-01, 4.9204e-01, + 2.4530e-01, 6.3961e-01, 3.5993e-01, 6.7177e-02, + 3.7201e-01, 9.4129e-01, 6.9687e-01, 5.1933e-01, + 6.7800e-01, 5.0805e-01, 1.2808e-01, 9.4823e-01, + 6.9286e-01, 4.9013e-01, 6.7241e-03, 8.7866e-01, + 6.1656e-02, 6.4661e-01, 8.6992e-01, 8.4496e-01, + 9.4215e-01, 4.4664e-01, 4.9955e-01, 9.9948e-01, + 4.9480e-01, 5.4024e-01, 4.6492e-01, 2.5849e-01, + 3.8974e-01, 6.2627e-01, 5.7653e-01, 4.2389e-01, + 7.6787e-01, 7.5789e-01, 2.4184e-01, 2.8465e-01, + 5.5595e-01, 7.3342e-02, 7.1497e-01, 9.6346e-01, + 9.7746e-01, 7.9472e-01, 8.7652e-02, 1.8715e-01, + 4.5932e-01, 3.4563e-01, 7.7006e-01, 6.5611e-01, + 1.1189e-01, 1.4374e-01, 6.1668e-01, 6.9152e-01, + 1.7084e-01, 2.5926e-01, 7.8839e-01, 3.0434e-01, + 1.4793e-02, 8.5789e-02, 9.8418e-01, 4.2752e-01, + 9.2107e-01, 5.9224e-01, 3.9420e-02, 4.8915e-01, + 4.4386e-01, 2.0829e-01, 7.3444e-01, 8.1903e-01, + 2.6290e-01, 6.6537e-01, 8.4000e-01, 2.5595e-01, + 4.0731e-01, 6.8829e-01, 4.3187e-01, 2.5161e-01, + 5.7778e-02, 4.2142e-01, 3.4661e-01, 9.5717e-03, + 8.2014e-01, 9.9152e-01, 7.9126e-01, 5.5851e-01, + 3.9898e-01, 5.4644e-01, 3.6736e-01, 7.5146e-01, + 1.3484e-02, 8.3191e-01, 4.3689e-01, 5.7221e-01, + 7.4722e-01, 3.4696e-01, 5.7035e-01, 6.9266e-01, + 3.9067e-01, 2.6615e-01, 8.6375e-02, 5.2389e-01, + 2.3607e-01, 3.8171e-02, 4.6422e-01, 4.2960e-01, + 7.6993e-01, 7.2438e-01, 6.6914e-01, 2.8809e-01, + 5.9982e-01, 9.3564e-01, 6.7298e-01, 9.8455e-01, + 2.1397e-01, 4.6357e-01, 3.5856e-01, 4.4183e-01, + 2.7721e-01, 3.0327e-03, 7.4110e-01, 2.2977e-01, + 8.7459e-01, 1.0703e-01, 7.9397e-01, 4.5549e-01, + 3.9910e-01, 6.3637e-01, 4.3441e-01, 3.9538e-01, + 1.2420e-01, 4.6351e-01, 8.3468e-01, 8.4438e-01, + 7.6945e-02, 5.5673e-01, 8.8493e-01, 7.4308e-01, + 6.1880e-01, 6.9064e-01, 6.7795e-01, 5.4500e-01, + 7.5239e-01, 2.0050e-01, 3.4931e-01, 6.2398e-01, + 9.7196e-01, 6.3820e-01, 7.3089e-01, 3.4582e-01, + 8.0866e-01, 3.5258e-01, 6.5847e-01, 8.8414e-01, + 4.1696e-01, 1.8678e-01, 9.2396e-01, 5.9620e-01, + 1.6077e-01, 4.2120e-01, 1.8918e-01, 5.2123e-01, + 1.7851e-01, 3.4184e-01, 5.4679e-01, 1.3234e-01, + 9.1826e-02, 7.9099e-01, 8.8189e-01, 9.0175e-01, + 1.7123e-01, 6.0683e-01, 4.2738e-01, 5.6278e-01, + 4.9255e-01, 7.9049e-01, 8.7944e-01, 9.2670e-01, + 8.9780e-01, 1.0749e-01, 4.4563e-01, 2.6063e-01, + 2.5849e-02, 1.1129e-01, 3.1267e-02, 2.2027e-01, + 5.6144e-01, 2.9170e-01, 1.8039e-01, 1.5903e-01, + 3.4717e-01, 4.0336e-01, 7.2706e-01, 1.7974e-02, + 2.3456e-02, 8.2536e-02, 3.5354e-01, 1.7750e-01, + 9.4682e-01, 8.3936e-01, 2.2513e-01, 4.4961e-01, + 2.3696e-01, 8.5964e-01, 9.8494e-01, 1.7563e-02, + 5.3791e-02, 3.0311e-01, 1.8444e-01, 7.8937e-01, + 1.2962e-01, 8.5763e-01, 5.9738e-01, 8.3023e-01, + 4.2043e-01, 8.1808e-01, 5.2683e-02, 7.7283e-01, + 1.1802e-01, 4.8761e-02, 9.4686e-01, 4.9158e-01, + 6.8594e-01, 8.6413e-01, 6.3360e-01, 7.0435e-01, + 4.4241e-01, 5.4223e-01, 9.2170e-02, 1.2718e-01, + 6.9363e-01, 2.5608e-01, 7.5010e-01, 6.3292e-01, + 5.9086e-01, 3.2914e-01, 3.1103e-01, 8.9677e-01, + 1.0121e-01, 4.3241e-01, 4.2980e-01, 2.8694e-01, + 4.0966e-01, 8.1106e-01, 3.8040e-01, 8.5407e-01, + 9.6007e-01, 9.5141e-01, 3.4374e-01, 8.3399e-02, + 7.3088e-02, 4.4979e-02, 5.9427e-02, 9.9277e-01, + 5.6203e-02, 7.0467e-01, 3.8642e-01, 7.3438e-01, + 1.9552e-01, 7.4537e-01, 6.5169e-01, 4.3856e-01, + 5.5461e-02, 2.3476e-01, 6.7478e-02, 5.0204e-01, + 7.5056e-01, 8.3286e-01, 7.1863e-01, 8.6393e-01, + 1.7863e-01, 4.9429e-01, 3.4442e-01, 3.2541e-01, + 2.5106e-01, 1.9425e-01, 1.8020e-01, 5.1441e-01, + 3.5170e-01, 8.7807e-01, 8.6473e-01, 1.9229e-01, + 6.5321e-01, 6.9682e-01, 1.7086e-01, 8.7784e-01, + 4.0662e-01, 7.2317e-01, 5.0809e-01, 9.2901e-01, + 8.4092e-01, 8.6549e-01, 6.3197e-01, 2.3633e-01, + 3.0594e-01, 4.2200e-01, 7.4541e-01, 1.2018e-01, + 7.5112e-02, 1.3092e-01, 9.2154e-01, 8.8811e-02, + 8.7578e-01, 9.0517e-01, 5.6398e-01, 6.2378e-01, + 4.4698e-02, 9.5865e-01, 2.8098e-01, 4.1172e-01, + 8.0593e-01, 7.9595e-01, 4.1944e-01, 3.3432e-01, + 6.3504e-01, 3.8222e-01, 5.3025e-01, 7.0158e-02, + 8.8135e-01, 2.2467e-01, 6.2387e-01, 3.4397e-01, + 9.6666e-01, 9.5846e-01, 6.7847e-02, 6.2782e-01, + 7.9968e-01, 6.7155e-01, 7.8801e-01, 9.7779e-01, + 9.0575e-01, 9.2961e-01, 9.4715e-01, 7.0690e-01, + 6.8618e-01, 2.3570e-01, 9.0781e-01, 5.8170e-02, + 7.3564e-01, 8.2641e-01, 1.5290e-01, 1.5723e-01, + 8.4923e-01, 8.1570e-02, 2.8130e-01, 9.2513e-01, + 4.3182e-01, 6.6054e-01, 2.4382e-01, 7.2432e-01, + 3.4434e-01, 3.4177e-01, 7.4884e-01, 9.1796e-01, + 9.1789e-02, 3.4813e-01, 6.5085e-01, 7.2365e-01, + 2.4657e-01, 1.3651e-03, 8.5138e-01, 8.8796e-01, + 9.7451e-02, 3.2607e-01, 1.4216e-01, 1.8386e-01, + 9.3178e-01, 9.7611e-01, 3.3144e-01, 5.6880e-01, + 6.8426e-01, 3.6420e-01, 5.4294e-01, 9.2576e-01, + 4.8927e-01, 2.7722e-01, 9.8398e-01, 1.6117e-02, + 8.0649e-01, 9.9951e-01, 8.5810e-01, 3.4273e-01, + 9.5921e-01, 6.3507e-02, 4.7643e-02, 8.7609e-02, + 6.8921e-02, 9.8403e-01, 4.2887e-01, 2.0880e-01, + 2.9444e-01, 5.8552e-01, 9.7816e-01, 1.1658e-01, + 6.1893e-01, 4.5843e-01, 6.4214e-01, 1.8712e-01, + 7.3632e-01, 8.8423e-02, 5.5959e-01, 2.3478e-01, + 5.1437e-01, 8.0746e-01, 3.8025e-01, 2.3011e-01, + 7.1813e-01, 4.0158e-01, 5.9806e-02, 9.3487e-01, + 5.9841e-02, 4.4228e-01, 7.5495e-01, 4.2698e-01, + 2.4272e-01, 5.5055e-01, 3.0665e-01, 7.9681e-01, + 1.4962e-01, 3.1573e-01, 9.3048e-02, 3.6544e-01, + 8.9697e-01, 8.6956e-01, 7.4429e-01, 3.8395e-02, + 1.7191e-01, 5.7807e-02, 2.4042e-01, 3.1787e-01, + 7.5392e-01, 8.7488e-01, 1.0773e-01, 6.1583e-01, + 8.2142e-01, 4.0587e-01, 7.4155e-01, 8.8869e-01, + 3.7847e-02, 4.3624e-01, 6.5675e-01, 3.2238e-01, + 7.0351e-01, 5.2367e-01, 1.5838e-01, 5.5461e-01, + 2.9382e-01, 2.8515e-01, 2.3704e-01, 7.2705e-01, + 2.0832e-01, 2.6590e-02, 6.7537e-01, 2.0931e-01, + 5.8498e-01, 2.8330e-01, 4.6502e-02, 3.6790e-01, + 4.5451e-01, 7.7444e-01, 2.3394e-01, 4.9206e-01, + 8.0746e-01, 3.1398e-01, 6.6716e-01, 8.0770e-01, + 6.9697e-01, 2.4173e-02, 8.8831e-02, 7.9790e-01, + 3.3161e-01, 3.8369e-02, 6.9336e-01, 4.0335e-01, + 2.2317e-01, 1.0822e-01, 1.6387e-01, 5.5667e-01, + 5.7041e-01, 7.1765e-01, 3.9602e-01, 3.9177e-02, + 4.8498e-01, 1.3937e-02, 4.0747e-01, 8.6653e-01, + 8.1159e-01, 1.1919e-01, 9.8707e-01, 9.1502e-01, + 7.3238e-01, 3.5334e-02, 4.4768e-01, 6.2685e-01, + 1.6206e-01, 6.8830e-02, 3.1334e-01, 1.4842e-01, + 6.9137e-01, 8.1889e-01, 6.7155e-01, 6.4816e-01, + 5.8655e-01, 2.2397e-01, 6.9494e-01, 1.5247e-01, + 3.8091e-01, 6.1703e-01, 3.7711e-01, 8.0704e-01, + 5.7216e-01, 1.5334e-01, 4.5103e-01, 1.1769e-01, + 2.7928e-01, 4.6722e-02, 1.5661e-01, 2.0047e-02, + 2.7392e-01, 1.7883e-04, 9.7608e-01, 3.1084e-01, + 3.9239e-01, 4.2157e-01, 6.1009e-01, 1.1239e-02, + 7.3249e-01, 5.5877e-01, 7.8178e-01, 7.5887e-01, + 1.0220e-01, 1.9925e-01, 6.4808e-02, 4.7862e-01, + 1.4682e-01, 3.2238e-01, 8.1676e-01, 8.9943e-02, + 9.5661e-01, 1.2316e-01, 3.9183e-01, 1.5529e-01, + 3.3565e-02, 4.1912e-01, 1.5265e-01, 1.6918e-01, + 8.4822e-01, 5.6437e-02, 4.3839e-01, 3.5378e-01, + 2.3601e-01, 4.6402e-01, 6.4736e-01, 4.1283e-01, + 9.3776e-01, 5.5137e-01, 2.4268e-01, 2.9512e-01, + 9.8574e-01, 8.5497e-01, 8.7782e-01, 9.8983e-01, + 7.8944e-01, 2.0972e-01, 8.2484e-01, 6.9684e-02, + 7.0552e-01, 8.0297e-01, 5.3394e-01, 8.4794e-01, + 9.3146e-01, 2.3750e-01, 3.6112e-01, 6.3799e-01, + 4.0536e-02, 6.5030e-01, 3.9558e-01, 1.3887e-01, + 4.3541e-01, 3.5313e-01, 6.8191e-01, 7.4065e-01, + 7.8729e-01, 6.4817e-01, 1.7835e-01, 5.8161e-01, + 7.6717e-01, 5.7891e-01, 4.5927e-01, 8.3266e-01, + 2.0825e-03, 6.8615e-01, 9.4770e-01, 5.2583e-01, + 5.0315e-01, 9.5751e-01, 9.7359e-01, 4.9156e-01, + 9.6610e-01, 4.0715e-01, 4.2464e-01, 1.1841e-01, + 6.4372e-01, 2.8158e-01, 9.9665e-01, 7.3244e-01, + 5.6274e-01, 3.3760e-01, 8.2306e-01, 3.7732e-01, + 7.2901e-01, 2.2468e-01, 1.5846e-01, 6.4550e-01, + 5.8227e-01, 8.0545e-01, 2.0274e-01, 6.4922e-01, + 9.3740e-01, 5.6141e-01, 2.4653e-01, 7.0960e-01, + 1.6331e-01, 5.8355e-01, 7.6376e-01, 4.7350e-02, + 6.6533e-01, 4.7537e-01, 3.6943e-02, 4.9880e-01, + 4.0803e-01, 9.1669e-01, 9.4891e-01, 9.1845e-01, + 9.4092e-01, 2.1117e-01, 6.9429e-01, 9.6603e-01, + 6.4735e-02, 6.5162e-01, 1.7921e-01, 2.6392e-02, + 2.6736e-01, 9.2894e-01, 7.2489e-01, 9.3865e-01, + 4.7631e-01, 9.8383e-01, 7.2129e-01, 4.7492e-02, + 7.3194e-01, 2.4074e-01, 8.3825e-01, 5.9133e-01, + 2.7701e-01, 5.1323e-01, 6.8115e-01, 4.1453e-01, + 4.4066e-01, 2.6814e-01, 4.0186e-01, 8.2413e-01, + 7.3857e-01, 3.1618e-01, 2.5082e-01, 3.8493e-01, + 9.8323e-01, 8.5277e-01, 6.9363e-02, 8.1748e-01, + 9.6895e-02, 9.5787e-01, 6.5146e-01, 9.4993e-01, + 2.1052e-01, 7.8882e-01, 2.4942e-01, 2.0535e-01, + 1.5358e-01, 4.6167e-01, 8.4672e-01, 4.3349e-01, + 4.2356e-02, 9.9978e-01, 7.0084e-01, 1.2268e-01, + 9.2513e-01, 7.7630e-01, 8.5385e-01, 5.6577e-01, + 8.3986e-01, 2.0309e-01, 5.1098e-01, 7.4229e-01, + 3.1814e-01, 9.0746e-01, 7.2630e-01, 4.3664e-01, + 7.4381e-01, 5.9753e-02, 6.0605e-02, 6.7539e-01, + 6.5869e-01, 9.9780e-01, 6.6974e-01, 7.2630e-02, + 5.5134e-01, 3.1435e-01, 2.7913e-01, 3.2911e-01, + 8.8194e-01, 1.6839e-01, 7.6567e-01, 9.0047e-01, + 7.2171e-02, 6.6165e-01, 7.9677e-01, 8.0414e-01, + 6.9870e-02, 6.6994e-01, 3.8281e-01, 8.2249e-01, + 9.7607e-01, 3.8803e-01, 7.6559e-01, 7.2852e-01, + 5.4892e-01, 3.5755e-01, 6.6625e-01, 3.1938e-01, + 5.7765e-01, 2.8918e-01, 5.3040e-01, 8.5150e-02, + 1.6498e-01, 4.6298e-01, 9.9212e-01, 8.9814e-01, + 6.6018e-01, 1.3632e-01, 7.1850e-01, 6.5697e-01, + 3.1891e-02, 9.0172e-02, 6.3434e-01, 8.3883e-01, + 9.3126e-01, 1.8100e-01, 8.6503e-01, 3.1589e-01, + 3.9466e-01, 1.4804e-01, 1.7795e-01, 1.7245e-01, + 2.3928e-01, 3.2795e-01, 7.3689e-02, 7.9092e-01, + 8.6769e-01, 4.4039e-01, 1.1558e-01, 9.6351e-01, + 8.9935e-02, 9.4641e-01, 1.1469e-01, 7.7539e-01, + 2.6163e-01, 6.5319e-01, 3.3449e-01, 5.4947e-01, + 9.6249e-01, 1.3386e-01, 5.7449e-01, 8.4729e-01, + 1.0842e-01, 5.0652e-01, 9.1868e-02, 4.4581e-01, + 8.7897e-01, 1.8840e-02, 4.9435e-01, 3.9386e-01, + 7.2086e-01, 5.5196e-01, 7.8325e-01, 7.1420e-01, + 9.7010e-02, 9.0129e-01, 2.2216e-01, 1.5383e-01, + 2.7085e-01, 3.9669e-01, 8.1358e-01, 3.7652e-01, + 7.1104e-02, 9.7852e-01, 5.8186e-01, 4.4514e-01, + 7.8695e-02, 8.5210e-01, 4.0394e-01, 5.9823e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.5617, 0.4615, 0.0922, ..., 0.2381, 0.0524, 0.5912]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.0063915252685546875 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16428 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.5346786975860596} + +tensor(indices=tensor([[ 590, 3456, 8102, ..., 618, 6836, 4427], + [2868, 8373, 7234, ..., 6879, 5303, 9502]]), + values=tensor([9.3980e-01, 2.0924e-01, 7.3761e-01, 1.9972e-01, + 8.8011e-01, 5.3812e-01, 3.6872e-01, 2.6021e-01, + 3.7716e-01, 9.5151e-01, 9.8074e-01, 1.5150e-01, + 7.7941e-01, 8.1355e-01, 8.0630e-01, 2.3829e-02, + 6.7859e-01, 7.9427e-01, 8.7623e-01, 4.9791e-01, + 9.3403e-01, 8.2135e-01, 1.1547e-01, 4.4194e-01, + 1.4630e-01, 1.4365e-01, 7.5453e-01, 3.9005e-02, + 5.9209e-02, 5.1300e-01, 7.5768e-01, 6.0731e-01, + 9.4481e-01, 4.9780e-01, 7.4903e-01, 5.9665e-01, + 7.9405e-01, 9.4519e-01, 7.0891e-01, 6.1723e-01, + 3.9285e-02, 7.6031e-01, 3.9363e-03, 1.2753e-01, + 7.2318e-01, 7.1139e-01, 5.6133e-01, 9.2760e-01, + 3.1576e-01, 4.8976e-01, 4.3192e-04, 8.6321e-01, + 6.8771e-01, 2.6251e-03, 2.6515e-01, 6.8177e-01, + 5.0286e-01, 6.2614e-01, 1.8234e-01, 4.3005e-01, + 6.4523e-01, 2.5698e-01, 7.1845e-01, 7.6252e-01, + 7.6869e-01, 6.3299e-01, 1.4320e-01, 4.4766e-01, + 7.5569e-01, 8.6703e-01, 4.1720e-02, 8.2502e-01, + 4.7042e-01, 8.1217e-02, 3.6532e-01, 5.8516e-01, + 9.0020e-01, 9.0098e-01, 2.1208e-01, 5.4736e-01, + 3.6057e-01, 8.8637e-01, 5.3047e-01, 3.4990e-01, + 7.0358e-01, 7.3448e-01, 8.1761e-01, 6.6041e-01, + 5.6312e-01, 9.6780e-01, 6.3649e-01, 6.0757e-01, + 1.4517e-01, 3.0991e-01, 5.3668e-01, 2.1221e-01, + 8.0099e-01, 2.6108e-01, 8.2474e-01, 9.8639e-01, + 6.9699e-01, 2.0547e-01, 5.1843e-01, 8.3567e-01, + 7.4015e-02, 6.6994e-01, 6.8314e-01, 3.4188e-01, + 6.8842e-01, 4.4340e-01, 9.8937e-02, 3.4438e-01, + 9.4861e-01, 5.9279e-01, 8.0433e-01, 1.6296e-01, + 6.3474e-01, 9.6061e-01, 2.3902e-01, 7.9784e-02, + 7.8187e-01, 7.0597e-02, 7.1477e-01, 8.4488e-01, + 2.6930e-01, 2.4181e-01, 6.1844e-01, 9.3043e-01, + 7.6764e-01, 7.6906e-02, 1.1995e-01, 6.4774e-01, + 2.6039e-01, 9.2128e-01, 9.4832e-01, 7.2107e-01, + 7.8156e-01, 7.0943e-01, 2.2407e-01, 1.7009e-01, + 4.5425e-01, 9.8882e-01, 2.0275e-01, 3.9760e-01, + 3.5605e-01, 2.1628e-01, 6.8576e-01, 4.0826e-01, + 5.7426e-01, 4.3358e-01, 3.9176e-01, 3.9978e-01, + 7.4437e-01, 9.5539e-01, 3.4715e-01, 2.1925e-01, + 5.2156e-01, 6.1222e-01, 8.1708e-02, 4.1750e-01, + 4.2883e-01, 4.7256e-01, 6.5101e-01, 8.5399e-01, + 3.5991e-01, 5.1742e-02, 3.3819e-01, 2.6176e-01, + 7.6724e-01, 3.7901e-01, 4.1881e-01, 8.4313e-02, + 5.0658e-02, 5.3520e-01, 7.2586e-01, 8.2698e-01, + 1.0644e-01, 5.0536e-01, 3.0711e-01, 4.8387e-01, + 2.3270e-01, 4.5620e-03, 9.3661e-01, 5.8240e-01, + 3.2272e-01, 9.8497e-01, 2.2491e-01, 4.1813e-01, + 4.3934e-01, 5.0139e-01, 9.2963e-01, 6.8480e-02, + 3.2874e-01, 9.0476e-01, 3.4815e-01, 9.7508e-01, + 1.1288e-01, 3.4680e-01, 6.2537e-01, 6.7785e-02, + 6.9303e-01, 8.9571e-01, 2.0214e-01, 4.0385e-01, + 8.0907e-01, 3.2271e-01, 5.9968e-01, 4.2663e-01, + 6.8223e-01, 2.9274e-01, 2.0836e-01, 7.0047e-01, + 4.0327e-01, 5.8008e-01, 7.5424e-01, 6.1483e-01, + 2.3986e-01, 6.5611e-01, 3.9618e-01, 2.6681e-01, + 2.1049e-01, 2.0221e-01, 2.0416e-01, 2.9563e-02, + 4.0333e-01, 2.4612e-02, 5.4181e-01, 4.0342e-01, + 6.2244e-02, 1.8140e-01, 9.7965e-01, 1.3469e-02, + 9.7332e-01, 5.7635e-01, 6.8757e-01, 2.0644e-01, + 5.9989e-01, 9.6148e-01, 9.0623e-01, 1.6101e-01, + 4.4778e-01, 6.3850e-01, 2.3347e-01, 4.1197e-01, + 7.6711e-01, 1.4207e-02, 3.5486e-01, 5.0936e-01, + 1.5066e-01, 4.2368e-01, 2.0567e-01, 2.6686e-01, + 8.4834e-02, 8.7106e-01, 6.3452e-01, 9.6244e-01, + 9.6771e-01, 6.2916e-01, 1.8488e-02, 4.3862e-01, + 6.9388e-01, 2.1128e-01, 2.0427e-01, 7.4735e-01, + 7.0126e-02, 9.7224e-01, 5.2410e-01, 5.1732e-01, + 6.7039e-01, 4.1901e-01, 9.9751e-01, 4.4852e-01, + 5.2623e-01, 3.6131e-01, 7.5008e-01, 5.2582e-01, + 7.4389e-01, 5.4081e-01, 6.2039e-01, 9.8166e-01, + 6.0986e-01, 1.1631e-01, 3.6934e-01, 9.3659e-01, + 9.5344e-01, 2.7991e-01, 7.6728e-01, 3.1339e-01, + 6.0029e-01, 7.3935e-01, 7.6507e-01, 8.2490e-01, + 6.5697e-01, 7.3675e-01, 4.4749e-02, 4.3369e-01, + 6.9442e-01, 6.7207e-01, 3.7249e-01, 2.8771e-01, + 2.1778e-01, 4.8918e-02, 8.8046e-01, 4.1791e-01, + 4.2864e-01, 5.6302e-01, 8.7765e-01, 6.4109e-01, + 2.5129e-01, 8.5674e-01, 8.1933e-01, 3.9898e-02, + 2.1281e-01, 8.6252e-02, 9.2912e-01, 4.3768e-02, + 6.2567e-01, 2.9476e-01, 3.1776e-01, 1.6157e-01, + 5.2626e-01, 6.7294e-01, 9.9571e-02, 3.2149e-01, + 6.0565e-01, 2.0051e-01, 5.3948e-01, 2.2110e-01, + 6.5181e-01, 1.3607e-01, 8.1922e-01, 4.3484e-01, + 9.0847e-01, 7.9633e-01, 9.6716e-01, 3.0420e-01, + 9.3449e-01, 5.2731e-01, 2.1143e-01, 2.5964e-01, + 7.9793e-01, 7.3743e-02, 1.8333e-01, 7.9498e-02, + 1.2570e-01, 5.0718e-01, 6.8423e-01, 5.8465e-01, + 4.4492e-01, 9.7480e-01, 1.1621e-01, 8.4759e-02, + 6.6532e-01, 8.5833e-01, 6.1099e-01, 6.1318e-01, + 4.6774e-01, 6.5572e-01, 1.1246e-01, 6.5799e-01, + 7.3757e-01, 8.0903e-01, 8.4420e-01, 6.8300e-01, + 8.8714e-02, 3.5052e-01, 7.7174e-01, 9.3813e-01, + 6.6640e-02, 5.4909e-02, 5.8354e-01, 7.3905e-01, + 9.3700e-01, 4.8255e-01, 3.8641e-01, 9.7132e-01, + 2.9239e-01, 2.0816e-01, 8.8409e-02, 4.1596e-01, + 8.4402e-02, 8.8392e-01, 6.4239e-03, 1.6515e-01, + 4.5023e-01, 4.6982e-01, 6.2998e-01, 9.1901e-01, + 4.1998e-01, 2.0298e-01, 2.2412e-01, 5.9850e-01, + 7.3177e-01, 9.0420e-01, 9.1783e-01, 2.5563e-01, + 5.7369e-01, 5.7293e-01, 8.0127e-01, 5.3121e-01, + 4.6511e-01, 4.0007e-01, 8.4263e-01, 7.5862e-01, + 7.3671e-01, 4.0494e-01, 2.2754e-01, 2.5182e-02, + 4.1241e-01, 7.4259e-01, 6.4058e-01, 5.2645e-01, + 6.8409e-01, 6.8910e-01, 4.4102e-02, 5.3711e-01, + 2.8712e-01, 6.4244e-01, 4.8348e-01, 2.7068e-01, + 2.3391e-01, 6.3533e-01, 3.4888e-01, 3.5401e-01, + 3.2461e-01, 6.4913e-02, 4.0404e-01, 2.3171e-01, + 5.5817e-01, 2.5780e-01, 9.0908e-02, 3.1808e-01, + 1.4225e-01, 4.0033e-01, 5.0123e-01, 7.4963e-01, + 3.9496e-01, 3.4057e-01, 4.8016e-01, 2.1764e-01, + 1.7029e-01, 5.8525e-03, 4.1291e-01, 1.9772e-01, + 3.1794e-01, 2.7539e-01, 7.3945e-01, 2.2492e-02, + 1.0103e-01, 9.2650e-01, 9.3859e-01, 8.1887e-01, + 1.6835e-01, 1.9378e-01, 8.2436e-01, 8.2590e-02, + 9.2226e-01, 7.6395e-01, 6.5565e-01, 9.5683e-01, + 3.5304e-01, 3.4652e-01, 2.1621e-01, 9.2533e-01, + 2.4449e-01, 7.1480e-01, 4.8689e-02, 6.0518e-01, + 9.5858e-01, 1.2337e-01, 6.4475e-01, 6.2304e-01, + 4.5522e-01, 7.4843e-01, 1.6163e-01, 2.8430e-01, + 6.2974e-01, 7.7638e-01, 1.2383e-01, 5.2313e-01, + 2.5453e-01, 9.3537e-01, 5.1703e-01, 5.4193e-03, + 3.8220e-01, 4.8464e-01, 8.1847e-01, 9.9370e-01, + 9.4112e-01, 5.3811e-01, 1.3544e-01, 4.6620e-01, + 2.0205e-01, 9.2009e-01, 8.3337e-02, 5.8811e-01, + 8.5878e-01, 3.1165e-01, 7.5100e-01, 2.2693e-01, + 5.2868e-01, 2.9134e-01, 5.5545e-01, 6.7822e-01, + 3.6759e-02, 3.7069e-01, 6.3774e-01, 6.0078e-01, + 9.7159e-01, 4.2871e-01, 2.6610e-01, 3.2708e-01, + 2.1172e-01, 8.7167e-01, 2.8950e-01, 4.4122e-01, + 4.0410e-01, 8.6006e-01, 9.1488e-01, 4.6654e-01, + 8.6394e-01, 3.0126e-02, 8.9756e-01, 3.8992e-01, + 1.0344e-01, 7.7566e-01, 8.0606e-01, 5.5617e-01, + 2.6525e-01, 1.2010e-01, 3.1965e-01, 7.3579e-03, + 7.9680e-01, 6.6109e-01, 8.2379e-02, 6.4722e-01, + 4.4352e-01, 4.8975e-01, 7.3198e-01, 6.1094e-01, + 9.0506e-01, 4.5773e-01, 7.3352e-01, 6.9339e-02, + 7.8723e-01, 1.2409e-02, 3.5525e-01, 2.3384e-01, + 7.7318e-01, 9.5560e-02, 4.0047e-01, 5.1668e-01, + 8.9112e-02, 3.2035e-02, 6.3501e-02, 1.7982e-01, + 1.2049e-01, 2.3441e-01, 4.4011e-01, 8.6877e-01, + 9.9417e-01, 2.3683e-02, 6.9238e-01, 7.9711e-01, + 7.7115e-01, 5.0210e-01, 6.0058e-01, 1.0517e-01, + 9.2353e-01, 2.7779e-01, 5.1551e-01, 7.4438e-01, + 3.4330e-01, 8.5408e-02, 6.0693e-01, 6.4999e-01, + 4.7929e-01, 7.3575e-02, 3.5041e-01, 6.0948e-01, + 1.1754e-01, 2.5063e-01, 7.9128e-01, 2.2900e-01, + 4.8583e-01, 6.6217e-01, 4.4931e-01, 2.7376e-01, + 1.1922e-01, 2.0092e-01, 1.6614e-01, 8.9307e-01, + 8.3700e-01, 5.8193e-01, 7.0433e-01, 5.4257e-01, + 1.1886e-01, 3.9171e-01, 5.3060e-01, 3.7601e-01, + 8.2193e-01, 8.3044e-02, 7.7999e-02, 5.3588e-01, + 9.4752e-01, 3.4832e-01, 4.2878e-01, 8.2430e-01, + 8.4775e-02, 8.4050e-01, 3.3625e-02, 4.6800e-02, + 4.5717e-02, 1.2502e-01, 6.9747e-01, 8.2760e-02, + 4.3675e-02, 5.1765e-01, 2.9155e-01, 9.5369e-01, + 2.1084e-01, 8.7205e-03, 2.6614e-01, 5.9801e-01, + 3.9840e-01, 4.2313e-01, 8.3958e-01, 8.5297e-01, + 3.3050e-01, 9.6740e-01, 9.4362e-01, 7.5655e-01, + 1.7804e-01, 6.5795e-01, 9.4822e-01, 4.4202e-01, + 8.8292e-01, 7.8517e-01, 4.6982e-01, 1.7460e-01, + 7.0291e-01, 1.5445e-01, 5.4453e-01, 7.1619e-01, + 7.9068e-02, 4.4015e-01, 5.8691e-01, 5.7842e-01, + 5.8982e-01, 6.1497e-01, 1.4333e-02, 4.0597e-01, + 5.6445e-01, 3.5353e-01, 6.9029e-01, 4.0400e-01, + 3.2173e-02, 8.8586e-01, 1.5402e-01, 7.6567e-01, + 1.4192e-01, 3.3261e-01, 1.0210e-01, 1.6159e-02, + 4.8590e-01, 3.7349e-01, 8.0871e-01, 5.0577e-01, + 6.4183e-01, 4.2971e-01, 6.5274e-01, 9.2009e-01, + 5.3137e-01, 8.0186e-01, 6.8814e-01, 2.9441e-01, + 8.9687e-01, 3.8130e-01, 4.4378e-02, 2.8861e-01, + 7.2112e-01, 2.4761e-01, 6.4441e-01, 8.6971e-01, + 1.1301e-01, 9.3368e-01, 5.9198e-02, 6.7704e-02, + 8.7530e-01, 4.9697e-01, 9.8205e-01, 8.7931e-02, + 3.9454e-01, 8.9717e-01, 6.0296e-01, 7.1658e-01, + 2.1582e-01, 2.9941e-01, 4.3789e-01, 9.1507e-02, + 4.7146e-01, 2.1934e-01, 2.1347e-01, 4.5240e-01, + 9.7703e-01, 6.9867e-01, 4.0753e-01, 4.4012e-01, + 2.8410e-01, 5.6495e-01, 5.2300e-01, 1.5739e-01, + 9.3214e-01, 3.0601e-01, 9.2538e-01, 3.8376e-01, + 2.4815e-01, 1.3798e-01, 7.5809e-01, 7.3823e-01, + 3.9704e-01, 3.2335e-01, 6.5221e-01, 9.6252e-01, + 1.0270e-01, 3.2566e-01, 2.7898e-02, 4.6397e-01, + 1.7873e-01, 3.0688e-02, 8.2924e-01, 9.9677e-01, + 8.5376e-01, 3.5844e-01, 7.9983e-01, 8.4292e-01, + 3.2699e-02, 5.4083e-01, 6.6945e-01, 6.7251e-01, + 7.2591e-01, 5.0909e-01, 5.2797e-01, 5.4113e-01, + 7.1775e-01, 1.4615e-01, 8.2571e-01, 7.1694e-01, + 6.8237e-01, 5.2386e-01, 5.1072e-01, 8.4710e-01, + 6.5595e-01, 9.3020e-01, 7.4390e-01, 4.2245e-03, + 4.1648e-01, 6.0174e-01, 4.9503e-01, 1.6676e-02, + 5.4810e-01, 3.1518e-01, 4.6989e-01, 8.2236e-01, + 4.1851e-01, 7.2530e-01, 5.7207e-01, 3.1228e-01, + 7.6644e-01, 3.4942e-01, 6.4856e-01, 4.2391e-01, + 3.3566e-01, 7.2411e-01, 4.2159e-02, 5.2709e-02, + 6.6183e-01, 8.0725e-01, 8.5621e-01, 6.0339e-01, + 3.6345e-01, 4.1402e-01, 9.6011e-01, 8.8824e-01, + 6.0628e-01, 6.6483e-01, 9.1385e-01, 4.0484e-01, + 5.5665e-01, 8.0204e-01, 6.2053e-01, 7.6860e-03, + 8.3237e-01, 2.3141e-01, 3.1934e-01, 8.0291e-01, + 8.4203e-02, 5.3623e-01, 9.9059e-01, 1.0051e-01, + 6.9937e-01, 5.9176e-01, 9.2248e-01, 1.5353e-01, + 8.4769e-01, 6.8652e-01, 8.8694e-02, 8.7145e-01, + 4.2706e-01, 3.4751e-01, 7.4366e-01, 2.2356e-01, + 4.7699e-01, 9.3733e-01, 2.1662e-01, 1.1339e-01, + 4.0523e-01, 1.7736e-01, 1.6895e-01, 2.5806e-01, + 7.0695e-01, 9.2181e-01, 5.4170e-01, 3.8203e-01, + 7.4046e-01, 6.5818e-01, 5.1090e-01, 8.7862e-01, + 1.3123e-01, 7.5126e-01, 5.3021e-01, 1.8405e-01, + 1.1610e-01, 9.3532e-01, 8.0448e-01, 7.7876e-01, + 9.3350e-01, 6.2264e-01, 2.2977e-01, 3.8149e-01, + 7.5633e-01, 2.8235e-01, 6.2190e-01, 6.6984e-02, + 4.5040e-01, 6.0331e-01, 8.7119e-01, 2.9878e-01, + 1.7939e-01, 4.7227e-01, 2.2740e-01, 8.7627e-01, + 3.4452e-01, 7.4579e-01, 8.9393e-01, 8.8555e-01, + 6.6373e-01, 3.0609e-03, 9.6868e-01, 1.9425e-02, + 9.0681e-02, 7.3980e-01, 3.0558e-01, 5.0586e-01, + 5.8262e-01, 8.2588e-02, 7.8022e-01, 3.6200e-01, + 8.3576e-01, 2.6981e-01, 6.7916e-01, 8.1272e-01, + 1.8667e-01, 9.1346e-01, 2.1154e-01, 1.3605e-01, + 9.2174e-01, 9.3586e-01, 9.7918e-01, 1.2226e-01, + 6.1391e-01, 5.5224e-01, 8.8976e-02, 8.4942e-01, + 2.1126e-01, 1.7147e-01, 1.4787e-01, 4.1747e-01, + 7.1002e-01, 7.1192e-01, 5.9617e-01, 2.2752e-01, + 4.8150e-01, 2.7610e-01, 6.5350e-01, 1.0340e-01, + 8.2399e-01, 1.1799e-02, 7.5394e-01, 5.4959e-01, + 1.1376e-01, 9.6653e-01, 4.5771e-01, 8.6853e-02, + 6.6322e-01, 9.2775e-01, 4.2260e-01, 9.6262e-01, + 8.0052e-01, 9.4482e-01, 9.4995e-01, 9.6294e-01, + 8.3859e-01, 3.3024e-01, 8.2911e-01, 7.4175e-01, + 1.7354e-01, 9.9180e-01, 2.9791e-01, 2.7445e-01, + 3.6295e-01, 4.9000e-01, 7.3120e-01, 5.1286e-01, + 1.1173e-01, 9.2494e-01, 4.1786e-01, 7.3458e-02, + 1.6572e-01, 7.1465e-01, 4.0893e-01, 8.3752e-01, + 8.4768e-01, 8.6468e-01, 2.0049e-01, 9.3350e-01, + 9.5348e-02, 9.1665e-01, 3.9016e-01, 7.8405e-01, + 7.3052e-02, 2.0218e-01, 6.8118e-01, 3.7576e-01, + 7.5355e-01, 9.3269e-01, 3.0466e-02, 6.3949e-01, + 7.9125e-01, 6.0585e-01, 8.5763e-01, 2.5345e-01, + 8.5740e-01, 8.0033e-01, 9.4737e-01, 7.3777e-01, + 2.1790e-01, 1.7226e-01, 3.9799e-02, 9.2216e-01, + 5.3208e-03, 3.1461e-01, 1.3466e-02, 2.1634e-01, + 7.3006e-01, 8.2773e-01, 5.0593e-01, 6.1746e-01, + 2.1383e-01, 5.0275e-01, 4.3739e-01, 1.4458e-01, + 8.7042e-01, 9.7655e-01, 7.8033e-01, 3.6769e-01, + 5.6001e-01, 5.6403e-02, 6.1109e-01, 8.9023e-02, + 4.3435e-01, 5.9807e-01, 2.3826e-02, 1.3670e-01, + 1.5891e-01, 8.3424e-01, 1.6111e-01, 7.1805e-01, + 3.5689e-01, 2.2476e-01, 1.9888e-01, 6.4235e-02, + 3.9536e-01, 9.1705e-01, 2.1074e-01, 1.9189e-01, + 2.2442e-01, 3.0201e-01, 1.7353e-01, 8.7424e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.5368, 0.1666, 0.6527, ..., 0.6727, 0.4401, 0.9207]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.5346786975860596 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 322612 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.590221643447876} + +tensor(indices=tensor([[3057, 2976, 3212, ..., 9822, 1166, 9335], + [ 811, 4589, 846, ..., 9298, 3868, 7199]]), + values=tensor([0.0543, 0.4302, 0.0994, 0.5488, 0.0095, 0.6929, 0.3444, + 0.9559, 0.6179, 0.6351, 0.3136, 0.2412, 0.7863, 0.0035, + 0.0801, 0.2634, 0.2833, 0.7907, 0.7373, 0.8129, 0.9055, + 0.5812, 0.7675, 0.5935, 0.5298, 0.0284, 0.1388, 0.3543, + 0.5153, 0.4481, 0.6456, 0.3929, 0.3690, 0.5234, 0.9876, + 0.9517, 0.3214, 0.6952, 0.1402, 0.6578, 0.8320, 0.1407, + 0.3417, 0.4438, 0.0031, 0.2123, 0.4657, 0.0306, 0.9865, + 0.9067, 0.3028, 0.6568, 0.7977, 0.0040, 0.3076, 0.5366, + 0.3071, 0.5876, 0.6461, 0.5779, 0.0339, 0.0736, 0.5228, + 0.8795, 0.7699, 0.9676, 0.6550, 0.0287, 0.8302, 0.7925, + 0.6213, 0.8483, 0.1677, 0.9946, 0.9677, 0.7123, 0.4367, + 0.4094, 0.8703, 0.7347, 0.8350, 0.9220, 0.0975, 0.6237, + 0.2012, 0.9885, 0.8532, 0.1699, 0.4943, 0.5338, 0.6809, + 0.9606, 0.0904, 0.8257, 0.5328, 0.5336, 0.1099, 0.3730, + 0.4777, 0.1041, 0.0772, 0.4630, 0.5995, 0.8288, 0.0291, + 0.6636, 0.2286, 0.0912, 0.7715, 0.0978, 0.7364, 0.4267, + 0.8934, 0.5542, 0.3134, 0.2763, 0.1593, 0.0290, 0.5532, + 0.8920, 0.0087, 0.8365, 0.3980, 0.0810, 0.9834, 0.7835, + 0.2949, 0.7568, 0.2087, 0.7973, 0.9922, 0.5651, 0.7804, + 0.8745, 0.1661, 0.6457, 0.3021, 0.8795, 0.7138, 0.9751, + 0.2134, 0.9652, 0.3028, 0.5445, 0.9952, 0.5792, 0.3788, + 0.8996, 0.3669, 0.0946, 0.9229, 0.6031, 0.7818, 0.4133, + 0.1061, 0.2570, 0.4691, 0.4308, 0.1253, 0.0043, 0.6358, + 0.5025, 0.3965, 0.6940, 0.4634, 0.0589, 0.3967, 0.1940, + 0.9202, 0.6443, 0.9260, 0.8764, 0.0393, 0.4896, 0.9637, + 0.0113, 0.1036, 0.0099, 0.5306, 0.3922, 0.4121, 0.1224, + 0.4047, 0.1325, 0.8563, 0.2615, 0.9893, 0.8912, 0.2571, + 0.0249, 0.1633, 0.9798, 0.8757, 0.8333, 0.2900, 0.0045, + 0.8367, 0.9595, 0.1064, 0.3069, 0.5171, 0.8252, 0.8110, + 0.2575, 0.1600, 0.8806, 0.1561, 0.2472, 0.9271, 0.4886, + 0.4986, 0.1100, 0.4345, 0.2759, 0.5313, 0.1771, 0.7999, + 0.0488, 0.0390, 0.5093, 0.1548, 0.6464, 0.1627, 0.6558, + 0.1572, 0.2495, 0.9322, 0.7157, 0.4668, 0.4148, 0.7636, + 0.0653, 0.8070, 0.7042, 0.4959, 0.5314, 0.1657, 0.6758, + 0.8910, 0.3838, 0.4981, 0.0952, 0.7370, 0.7102, 0.0200, + 0.6492, 0.6555, 0.3976, 0.6479, 0.0871, 0.5706, 0.6223, + 0.3503, 0.9693, 0.9745, 0.3258, 0.5500, 0.0900, 0.6838, + 0.4921, 0.6932, 0.6845, 0.4719, 0.7011, 0.6402, 0.2161, + 0.4239, 0.3922, 0.5004, 0.1089, 0.5987, 0.3018, 0.4712, + 0.5419, 0.7595, 0.1429, 0.5908, 0.9497, 0.1830, 0.9497, + 0.7674, 0.9819, 0.3494, 0.7847, 0.3924, 0.9068, 0.9749, + 0.4761, 0.8134, 0.0121, 0.1843, 0.9743, 0.3074, 0.3445, + 0.4895, 0.8854, 0.0320, 0.0049, 0.8569, 0.3226, 0.6765, + 0.8386, 0.2483, 0.1704, 0.4367, 0.5819, 0.0869, 0.1097, + 0.4418, 0.1178, 0.1821, 0.5152, 0.6587, 0.5174, 0.2799, + 0.3870, 0.0117, 0.1531, 0.9385, 0.9391, 0.1302, 0.2140, + 0.3724, 0.8879, 0.0938, 0.3708, 0.5522, 0.4417, 0.3964, + 0.7463, 0.9558, 0.0669, 0.5548, 0.5234, 0.0853, 0.1138, + 0.8678, 0.3310, 0.0152, 0.6624, 0.3980, 0.6678, 0.4432, + 0.0494, 0.8963, 0.9662, 0.7101, 0.6310, 0.6613, 0.9957, + 0.1886, 0.5126, 0.0689, 0.1803, 0.8117, 0.2930, 0.2743, + 0.6428, 0.8360, 0.9771, 0.6295, 0.8942, 0.3782, 0.4390, + 0.8376, 0.6518, 0.1859, 0.1078, 0.6418, 0.6277, 0.2182, + 0.8285, 0.0201, 0.4175, 0.6579, 0.2120, 0.2868, 0.8944, + 0.1727, 0.5733, 0.9648, 0.9956, 0.7652, 0.2757, 0.5883, + 0.2875, 0.1045, 0.3302, 0.1487, 0.3985, 0.5759, 0.8840, + 0.2145, 0.6726, 0.8027, 0.9264, 0.3630, 0.6595, 0.3812, + 0.8326, 0.4214, 0.8528, 0.2157, 0.1357, 0.8286, 0.5692, + 0.2532, 0.6737, 0.5221, 0.6093, 0.3457, 0.4801, 0.5835, + 0.3095, 0.0045, 0.2857, 0.7371, 0.7701, 0.0392, 0.1498, + 0.5585, 0.0969, 0.1630, 0.2354, 0.3416, 0.2065, 0.7283, + 0.9993, 0.3639, 0.6140, 0.4648, 0.0651, 0.7698, 0.3888, + 0.3916, 0.9008, 0.0126, 0.2140, 0.5975, 0.0739, 0.1880, + 0.4070, 0.0785, 0.5808, 0.5739, 0.9085, 0.1848, 0.7995, + 0.8002, 0.3984, 0.1396, 0.7672, 0.8570, 0.7280, 0.7760, + 0.2240, 0.2431, 0.0847, 0.6985, 0.9078, 0.0350, 0.4763, + 0.5933, 0.2319, 0.9181, 0.0037, 0.8922, 0.7625, 0.6346, + 0.0908, 0.8897, 0.3477, 0.8710, 0.9691, 0.6099, 0.5974, + 0.6382, 0.2652, 0.2413, 0.7611, 0.0153, 0.1354, 0.3637, + 0.3144, 0.6894, 0.1509, 0.1727, 0.5222, 0.8677, 0.2286, + 0.6188, 0.6767, 0.2371, 0.1726, 0.2276, 0.6774, 0.3111, + 0.0428, 0.8712, 0.6122, 0.0447, 0.6916, 0.1738, 0.3061, + 0.2475, 0.3205, 0.3913, 0.6528, 0.5473, 0.4414, 0.0212, + 0.3370, 0.1674, 0.1422, 0.1986, 0.3611, 0.5059, 0.5348, + 0.6571, 0.3835, 0.7609, 0.0718, 0.3221, 0.1383, 0.3737, + 0.7746, 0.1251, 0.2567, 0.5684, 0.3247, 0.3971, 0.6010, + 0.0648, 0.8633, 0.0805, 0.7282, 0.2766, 0.7059, 0.5954, + 0.2439, 0.7674, 0.9377, 0.6609, 0.7542, 0.7704, 0.8950, + 0.4678, 0.2067, 0.9129, 0.8857, 0.2682, 0.6849, 0.8430, + 0.7571, 0.4670, 0.5771, 0.9897, 0.2146, 0.7750, 0.7194, + 0.9839, 0.5309, 0.4060, 0.8810, 0.0874, 0.4896, 0.8400, + 0.5489, 0.5344, 0.1942, 0.6990, 0.3000, 0.4960, 0.1662, + 0.7224, 0.1836, 0.2489, 0.8673, 0.8266, 0.2073, 0.0891, + 0.8615, 0.9136, 0.5767, 0.1424, 0.9666, 0.8107, 0.2354, + 0.0249, 0.1228, 0.6666, 0.6277, 0.2367, 0.8992, 0.6632, + 0.7700, 0.1096, 0.6942, 0.0784, 0.6168, 0.6651, 0.2052, + 0.4103, 0.1794, 0.2624, 0.4852, 0.6366, 0.6205, 0.0349, + 0.0396, 0.4378, 0.0257, 0.1618, 0.3357, 0.2269, 0.6556, + 0.3935, 0.5176, 0.1662, 0.0655, 0.9820, 0.6530, 0.6810, + 0.9312, 0.3764, 0.4398, 0.2752, 0.0637, 0.9123, 0.1534, + 0.6800, 0.4242, 0.7719, 0.8485, 0.8966, 0.2986, 0.8242, + 0.8799, 0.3515, 0.0906, 0.6228, 0.8342, 0.7353, 0.1487, + 0.5462, 0.2079, 0.1419, 0.7258, 0.7592, 0.2242, 0.4110, + 0.5101, 0.3140, 0.9107, 0.3592, 0.8343, 0.5521, 0.1178, + 0.0907, 0.1036, 0.0028, 0.8893, 0.7144, 0.5346, 0.3686, + 0.2751, 0.9585, 0.4984, 0.9242, 0.8648, 0.9938, 0.3833, + 0.1089, 0.7648, 0.1953, 0.6647, 0.0302, 0.3662, 0.0095, + 0.4888, 0.0873, 0.5189, 0.5623, 0.1224, 0.7313, 0.6615, + 0.4534, 0.1238, 0.0257, 0.2610, 0.8201, 0.7477, 0.4531, + 0.4351, 0.5829, 0.4299, 0.7520, 0.1382, 0.6662, 0.2282, + 0.1907, 0.5049, 0.7379, 0.8600, 0.4110, 0.5450, 0.3224, + 0.1691, 0.3173, 0.7439, 0.5773, 0.3617, 0.6346, 0.8756, + 0.8984, 0.0937, 0.6091, 0.2869, 0.3253, 0.2277, 0.6989, + 0.7174, 0.4293, 0.2867, 0.6426, 0.8525, 0.6727, 0.0675, + 0.9922, 0.8372, 0.3156, 0.5992, 0.8322, 0.4425, 0.0788, + 0.2144, 0.3252, 0.7054, 0.1044, 0.4587, 0.0290, 0.6455, + 0.7487, 0.5060, 0.3105, 0.9988, 0.9062, 0.2249, 0.2928, + 0.5862, 0.4877, 0.0749, 0.7155, 0.2128, 0.5260, 0.4031, + 0.0514, 0.5837, 0.8876, 0.4657, 0.2657, 0.1463, 0.4145, + 0.9150, 0.3793, 0.5812, 0.7100, 0.9129, 0.3491, 0.4390, + 0.9625, 0.5903, 0.4477, 0.8325, 0.8136, 0.9609, 0.2855, + 0.4284, 0.4214, 0.5452, 0.9364, 0.5131, 0.4127, 0.1449, + 0.4879, 0.4025, 0.1576, 0.0267, 0.1292, 0.5989, 0.6623, + 0.4965, 0.3628, 0.7831, 0.6126, 0.3342, 0.0172, 0.0586, + 0.1396, 0.8514, 0.0545, 0.2630, 0.6067, 0.3610, 0.1828, + 0.5973, 0.0987, 0.0989, 0.2277, 0.3142, 0.8938, 0.5887, + 0.8845, 0.8102, 0.5811, 0.4668, 0.4184, 0.2472, 0.6212, + 0.5556, 0.9449, 0.6479, 0.0902, 0.9831, 0.7467, 0.3481, + 0.0601, 0.2696, 0.7574, 0.7561, 0.4606, 0.5930, 0.5553, + 0.6355, 0.5461, 0.2260, 0.1554, 0.4139, 0.9252, 0.0786, + 0.7852, 0.5235, 0.0331, 0.8317, 0.9051, 0.4651, 0.8714, + 0.2347, 0.5858, 0.9607, 0.1518, 0.9595, 0.2867, 0.1757, + 0.6518, 0.5655, 0.0714, 0.9620, 0.4652, 0.8860, 0.3676, + 0.3260, 0.7763, 0.9967, 0.7842, 0.7057, 0.6560, 0.7265, + 0.2137, 0.6320, 0.5385, 0.4604, 0.1886, 0.4078, 0.9142, + 0.8475, 0.6468, 0.2898, 0.4649, 0.6873, 0.6762, 0.4567, + 0.6818, 0.8820, 0.9396, 0.2772, 0.8861, 0.8306, 0.9998, + 0.1239, 0.2711, 0.3528, 0.5631, 0.0741, 0.0942, 0.2558, + 0.3961, 0.8867, 0.5671, 0.9161, 0.5934, 0.7912, 0.2832, + 0.0464, 0.6601, 0.4288, 0.3823, 0.9581, 0.3930, 0.9660, + 0.8338, 0.4459, 0.1839, 0.3388, 0.8211, 0.5045, 0.4105, + 0.6358, 0.3087, 0.5392, 0.1460, 0.3012, 0.4985, 0.0642, + 0.2927, 0.2785, 0.1014, 0.9256, 0.1279, 0.3891, 0.1804, + 0.6268, 0.9135, 0.6018, 0.6878, 0.3519, 0.5529, 0.3220, + 0.1780, 0.5402, 0.9309, 0.7029, 0.9767, 0.9777, 0.5648, + 0.8962, 0.0457, 0.0885, 0.4595, 0.0983, 0.2514, 0.2495, + 0.0871, 0.5298, 0.1401, 0.6797, 0.5621, 0.4042, 0.5547, + 0.9098, 0.1095, 0.0025, 0.5011, 0.2508, 0.9018, 0.0612, + 0.1938, 0.1131, 0.5572, 0.8075, 0.3773, 0.3623, 0.2733, + 0.9519, 0.2171, 0.1693, 0.2932, 0.9407, 0.6946, 0.1658, + 0.6643, 0.8098, 0.7816, 0.6001, 0.6480, 0.2716, 0.8095, + 0.6576, 0.2628, 0.8066, 0.1108, 0.2999, 0.3048, 0.1355, + 0.3870, 0.5332, 0.6001, 0.7245, 0.6916, 0.5084]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.4966, 0.1351, 0.7679, ..., 0.2452, 0.1476, 0.9159]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.590221643447876 seconds + +tensor(indices=tensor([[3057, 2976, 3212, ..., 9822, 1166, 9335], + [ 811, 4589, 846, ..., 9298, 3868, 7199]]), + values=tensor([0.0543, 0.4302, 0.0994, 0.5488, 0.0095, 0.6929, 0.3444, + 0.9559, 0.6179, 0.6351, 0.3136, 0.2412, 0.7863, 0.0035, + 0.0801, 0.2634, 0.2833, 0.7907, 0.7373, 0.8129, 0.9055, + 0.5812, 0.7675, 0.5935, 0.5298, 0.0284, 0.1388, 0.3543, + 0.5153, 0.4481, 0.6456, 0.3929, 0.3690, 0.5234, 0.9876, + 0.9517, 0.3214, 0.6952, 0.1402, 0.6578, 0.8320, 0.1407, + 0.3417, 0.4438, 0.0031, 0.2123, 0.4657, 0.0306, 0.9865, + 0.9067, 0.3028, 0.6568, 0.7977, 0.0040, 0.3076, 0.5366, + 0.3071, 0.5876, 0.6461, 0.5779, 0.0339, 0.0736, 0.5228, + 0.8795, 0.7699, 0.9676, 0.6550, 0.0287, 0.8302, 0.7925, + 0.6213, 0.8483, 0.1677, 0.9946, 0.9677, 0.7123, 0.4367, + 0.4094, 0.8703, 0.7347, 0.8350, 0.9220, 0.0975, 0.6237, + 0.2012, 0.9885, 0.8532, 0.1699, 0.4943, 0.5338, 0.6809, + 0.9606, 0.0904, 0.8257, 0.5328, 0.5336, 0.1099, 0.3730, + 0.4777, 0.1041, 0.0772, 0.4630, 0.5995, 0.8288, 0.0291, + 0.6636, 0.2286, 0.0912, 0.7715, 0.0978, 0.7364, 0.4267, + 0.8934, 0.5542, 0.3134, 0.2763, 0.1593, 0.0290, 0.5532, + 0.8920, 0.0087, 0.8365, 0.3980, 0.0810, 0.9834, 0.7835, + 0.2949, 0.7568, 0.2087, 0.7973, 0.9922, 0.5651, 0.7804, + 0.8745, 0.1661, 0.6457, 0.3021, 0.8795, 0.7138, 0.9751, + 0.2134, 0.9652, 0.3028, 0.5445, 0.9952, 0.5792, 0.3788, + 0.8996, 0.3669, 0.0946, 0.9229, 0.6031, 0.7818, 0.4133, + 0.1061, 0.2570, 0.4691, 0.4308, 0.1253, 0.0043, 0.6358, + 0.5025, 0.3965, 0.6940, 0.4634, 0.0589, 0.3967, 0.1940, + 0.9202, 0.6443, 0.9260, 0.8764, 0.0393, 0.4896, 0.9637, + 0.0113, 0.1036, 0.0099, 0.5306, 0.3922, 0.4121, 0.1224, + 0.4047, 0.1325, 0.8563, 0.2615, 0.9893, 0.8912, 0.2571, + 0.0249, 0.1633, 0.9798, 0.8757, 0.8333, 0.2900, 0.0045, + 0.8367, 0.9595, 0.1064, 0.3069, 0.5171, 0.8252, 0.8110, + 0.2575, 0.1600, 0.8806, 0.1561, 0.2472, 0.9271, 0.4886, + 0.4986, 0.1100, 0.4345, 0.2759, 0.5313, 0.1771, 0.7999, + 0.0488, 0.0390, 0.5093, 0.1548, 0.6464, 0.1627, 0.6558, + 0.1572, 0.2495, 0.9322, 0.7157, 0.4668, 0.4148, 0.7636, + 0.0653, 0.8070, 0.7042, 0.4959, 0.5314, 0.1657, 0.6758, + 0.8910, 0.3838, 0.4981, 0.0952, 0.7370, 0.7102, 0.0200, + 0.6492, 0.6555, 0.3976, 0.6479, 0.0871, 0.5706, 0.6223, + 0.3503, 0.9693, 0.9745, 0.3258, 0.5500, 0.0900, 0.6838, + 0.4921, 0.6932, 0.6845, 0.4719, 0.7011, 0.6402, 0.2161, + 0.4239, 0.3922, 0.5004, 0.1089, 0.5987, 0.3018, 0.4712, + 0.5419, 0.7595, 0.1429, 0.5908, 0.9497, 0.1830, 0.9497, + 0.7674, 0.9819, 0.3494, 0.7847, 0.3924, 0.9068, 0.9749, + 0.4761, 0.8134, 0.0121, 0.1843, 0.9743, 0.3074, 0.3445, + 0.4895, 0.8854, 0.0320, 0.0049, 0.8569, 0.3226, 0.6765, + 0.8386, 0.2483, 0.1704, 0.4367, 0.5819, 0.0869, 0.1097, + 0.4418, 0.1178, 0.1821, 0.5152, 0.6587, 0.5174, 0.2799, + 0.3870, 0.0117, 0.1531, 0.9385, 0.9391, 0.1302, 0.2140, + 0.3724, 0.8879, 0.0938, 0.3708, 0.5522, 0.4417, 0.3964, + 0.7463, 0.9558, 0.0669, 0.5548, 0.5234, 0.0853, 0.1138, + 0.8678, 0.3310, 0.0152, 0.6624, 0.3980, 0.6678, 0.4432, + 0.0494, 0.8963, 0.9662, 0.7101, 0.6310, 0.6613, 0.9957, + 0.1886, 0.5126, 0.0689, 0.1803, 0.8117, 0.2930, 0.2743, + 0.6428, 0.8360, 0.9771, 0.6295, 0.8942, 0.3782, 0.4390, + 0.8376, 0.6518, 0.1859, 0.1078, 0.6418, 0.6277, 0.2182, + 0.8285, 0.0201, 0.4175, 0.6579, 0.2120, 0.2868, 0.8944, + 0.1727, 0.5733, 0.9648, 0.9956, 0.7652, 0.2757, 0.5883, + 0.2875, 0.1045, 0.3302, 0.1487, 0.3985, 0.5759, 0.8840, + 0.2145, 0.6726, 0.8027, 0.9264, 0.3630, 0.6595, 0.3812, + 0.8326, 0.4214, 0.8528, 0.2157, 0.1357, 0.8286, 0.5692, + 0.2532, 0.6737, 0.5221, 0.6093, 0.3457, 0.4801, 0.5835, + 0.3095, 0.0045, 0.2857, 0.7371, 0.7701, 0.0392, 0.1498, + 0.5585, 0.0969, 0.1630, 0.2354, 0.3416, 0.2065, 0.7283, + 0.9993, 0.3639, 0.6140, 0.4648, 0.0651, 0.7698, 0.3888, + 0.3916, 0.9008, 0.0126, 0.2140, 0.5975, 0.0739, 0.1880, + 0.4070, 0.0785, 0.5808, 0.5739, 0.9085, 0.1848, 0.7995, + 0.8002, 0.3984, 0.1396, 0.7672, 0.8570, 0.7280, 0.7760, + 0.2240, 0.2431, 0.0847, 0.6985, 0.9078, 0.0350, 0.4763, + 0.5933, 0.2319, 0.9181, 0.0037, 0.8922, 0.7625, 0.6346, + 0.0908, 0.8897, 0.3477, 0.8710, 0.9691, 0.6099, 0.5974, + 0.6382, 0.2652, 0.2413, 0.7611, 0.0153, 0.1354, 0.3637, + 0.3144, 0.6894, 0.1509, 0.1727, 0.5222, 0.8677, 0.2286, + 0.6188, 0.6767, 0.2371, 0.1726, 0.2276, 0.6774, 0.3111, + 0.0428, 0.8712, 0.6122, 0.0447, 0.6916, 0.1738, 0.3061, + 0.2475, 0.3205, 0.3913, 0.6528, 0.5473, 0.4414, 0.0212, + 0.3370, 0.1674, 0.1422, 0.1986, 0.3611, 0.5059, 0.5348, + 0.6571, 0.3835, 0.7609, 0.0718, 0.3221, 0.1383, 0.3737, + 0.7746, 0.1251, 0.2567, 0.5684, 0.3247, 0.3971, 0.6010, + 0.0648, 0.8633, 0.0805, 0.7282, 0.2766, 0.7059, 0.5954, + 0.2439, 0.7674, 0.9377, 0.6609, 0.7542, 0.7704, 0.8950, + 0.4678, 0.2067, 0.9129, 0.8857, 0.2682, 0.6849, 0.8430, + 0.7571, 0.4670, 0.5771, 0.9897, 0.2146, 0.7750, 0.7194, + 0.9839, 0.5309, 0.4060, 0.8810, 0.0874, 0.4896, 0.8400, + 0.5489, 0.5344, 0.1942, 0.6990, 0.3000, 0.4960, 0.1662, + 0.7224, 0.1836, 0.2489, 0.8673, 0.8266, 0.2073, 0.0891, + 0.8615, 0.9136, 0.5767, 0.1424, 0.9666, 0.8107, 0.2354, + 0.0249, 0.1228, 0.6666, 0.6277, 0.2367, 0.8992, 0.6632, + 0.7700, 0.1096, 0.6942, 0.0784, 0.6168, 0.6651, 0.2052, + 0.4103, 0.1794, 0.2624, 0.4852, 0.6366, 0.6205, 0.0349, + 0.0396, 0.4378, 0.0257, 0.1618, 0.3357, 0.2269, 0.6556, + 0.3935, 0.5176, 0.1662, 0.0655, 0.9820, 0.6530, 0.6810, + 0.9312, 0.3764, 0.4398, 0.2752, 0.0637, 0.9123, 0.1534, + 0.6800, 0.4242, 0.7719, 0.8485, 0.8966, 0.2986, 0.8242, + 0.8799, 0.3515, 0.0906, 0.6228, 0.8342, 0.7353, 0.1487, + 0.5462, 0.2079, 0.1419, 0.7258, 0.7592, 0.2242, 0.4110, + 0.5101, 0.3140, 0.9107, 0.3592, 0.8343, 0.5521, 0.1178, + 0.0907, 0.1036, 0.0028, 0.8893, 0.7144, 0.5346, 0.3686, + 0.2751, 0.9585, 0.4984, 0.9242, 0.8648, 0.9938, 0.3833, + 0.1089, 0.7648, 0.1953, 0.6647, 0.0302, 0.3662, 0.0095, + 0.4888, 0.0873, 0.5189, 0.5623, 0.1224, 0.7313, 0.6615, + 0.4534, 0.1238, 0.0257, 0.2610, 0.8201, 0.7477, 0.4531, + 0.4351, 0.5829, 0.4299, 0.7520, 0.1382, 0.6662, 0.2282, + 0.1907, 0.5049, 0.7379, 0.8600, 0.4110, 0.5450, 0.3224, + 0.1691, 0.3173, 0.7439, 0.5773, 0.3617, 0.6346, 0.8756, + 0.8984, 0.0937, 0.6091, 0.2869, 0.3253, 0.2277, 0.6989, + 0.7174, 0.4293, 0.2867, 0.6426, 0.8525, 0.6727, 0.0675, + 0.9922, 0.8372, 0.3156, 0.5992, 0.8322, 0.4425, 0.0788, + 0.2144, 0.3252, 0.7054, 0.1044, 0.4587, 0.0290, 0.6455, + 0.7487, 0.5060, 0.3105, 0.9988, 0.9062, 0.2249, 0.2928, + 0.5862, 0.4877, 0.0749, 0.7155, 0.2128, 0.5260, 0.4031, + 0.0514, 0.5837, 0.8876, 0.4657, 0.2657, 0.1463, 0.4145, + 0.9150, 0.3793, 0.5812, 0.7100, 0.9129, 0.3491, 0.4390, + 0.9625, 0.5903, 0.4477, 0.8325, 0.8136, 0.9609, 0.2855, + 0.4284, 0.4214, 0.5452, 0.9364, 0.5131, 0.4127, 0.1449, + 0.4879, 0.4025, 0.1576, 0.0267, 0.1292, 0.5989, 0.6623, + 0.4965, 0.3628, 0.7831, 0.6126, 0.3342, 0.0172, 0.0586, + 0.1396, 0.8514, 0.0545, 0.2630, 0.6067, 0.3610, 0.1828, + 0.5973, 0.0987, 0.0989, 0.2277, 0.3142, 0.8938, 0.5887, + 0.8845, 0.8102, 0.5811, 0.4668, 0.4184, 0.2472, 0.6212, + 0.5556, 0.9449, 0.6479, 0.0902, 0.9831, 0.7467, 0.3481, + 0.0601, 0.2696, 0.7574, 0.7561, 0.4606, 0.5930, 0.5553, + 0.6355, 0.5461, 0.2260, 0.1554, 0.4139, 0.9252, 0.0786, + 0.7852, 0.5235, 0.0331, 0.8317, 0.9051, 0.4651, 0.8714, + 0.2347, 0.5858, 0.9607, 0.1518, 0.9595, 0.2867, 0.1757, + 0.6518, 0.5655, 0.0714, 0.9620, 0.4652, 0.8860, 0.3676, + 0.3260, 0.7763, 0.9967, 0.7842, 0.7057, 0.6560, 0.7265, + 0.2137, 0.6320, 0.5385, 0.4604, 0.1886, 0.4078, 0.9142, + 0.8475, 0.6468, 0.2898, 0.4649, 0.6873, 0.6762, 0.4567, + 0.6818, 0.8820, 0.9396, 0.2772, 0.8861, 0.8306, 0.9998, + 0.1239, 0.2711, 0.3528, 0.5631, 0.0741, 0.0942, 0.2558, + 0.3961, 0.8867, 0.5671, 0.9161, 0.5934, 0.7912, 0.2832, + 0.0464, 0.6601, 0.4288, 0.3823, 0.9581, 0.3930, 0.9660, + 0.8338, 0.4459, 0.1839, 0.3388, 0.8211, 0.5045, 0.4105, + 0.6358, 0.3087, 0.5392, 0.1460, 0.3012, 0.4985, 0.0642, + 0.2927, 0.2785, 0.1014, 0.9256, 0.1279, 0.3891, 0.1804, + 0.6268, 0.9135, 0.6018, 0.6878, 0.3519, 0.5529, 0.3220, + 0.1780, 0.5402, 0.9309, 0.7029, 0.9767, 0.9777, 0.5648, + 0.8962, 0.0457, 0.0885, 0.4595, 0.0983, 0.2514, 0.2495, + 0.0871, 0.5298, 0.1401, 0.6797, 0.5621, 0.4042, 0.5547, + 0.9098, 0.1095, 0.0025, 0.5011, 0.2508, 0.9018, 0.0612, + 0.1938, 0.1131, 0.5572, 0.8075, 0.3773, 0.3623, 0.2733, + 0.9519, 0.2171, 0.1693, 0.2932, 0.9407, 0.6946, 0.1658, + 0.6643, 0.8098, 0.7816, 0.6001, 0.6480, 0.2716, 0.8095, + 0.6576, 0.2628, 0.8066, 0.1108, 0.2999, 0.3048, 0.1355, + 0.3870, 0.5332, 0.6001, 0.7245, 0.6916, 0.5084]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.4966, 0.1351, 0.7679, ..., 0.2452, 0.1476, 0.9159]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.590221643447876 seconds + +[20.96, 21.04, 21.04, 20.56, 20.64, 20.64, 20.68, 20.84, 21.0, 21.48] +[21.96, 21.64, 21.92, 22.44, 23.44, 23.96, 24.84, 24.56, 24.4, 24.4, 24.0, 24.08, 24.04, 24.16] +14.23389744758606 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 322612, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.590221643447876, 'TIME_S_1KI': 0.0328264963592423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.4309623336792, 'W': 21.94978314858273} +[20.96, 21.04, 21.04, 20.56, 20.64, 20.64, 20.68, 20.84, 21.0, 21.48, 20.36, 20.08, 20.24, 20.08, 20.12, 20.08, 20.2, 20.2, 20.36, 20.36] +369.38 +18.469 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 322612, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.590221643447876, 'TIME_S_1KI': 0.0328264963592423, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.4309623336792, 'W': 21.94978314858273, 'J_1KI': 0.9684418506865188, 'W_1KI': 0.06803771449475757, 'W_D': 3.480783148582727, 'J_D': 49.54511037421225, 'W_D_1KI': 0.01078937903296445, 'J_D_1KI': 3.344382426247149e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..d4c2625 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 90138, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.656882047653198, "TIME_S_1KI": 0.1182285168037143, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 319.2859945774078, "W": 22.370679958106543, "J_1KI": 3.542190802740329, "W_1KI": 0.24818256404742223, "W_D": 3.960679958106539, "J_D": 56.52888700723637, "W_D_1KI": 0.04394018014718031, "J_D_1KI": 0.0004874767594930031} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..4303f77 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,74 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.007491111755371094} + +tensor(indices=tensor([[2756, 9344, 4426, ..., 3268, 4228, 9757], + [3780, 8217, 6825, ..., 118, 2662, 5701]]), + values=tensor([6.2826e-01, 5.5465e-01, 4.4670e-01, ..., + 8.5825e-01, 7.4451e-04, 2.4789e-01]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.5584, 0.1963, 0.8382, ..., 0.7245, 0.9482, 0.1967]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.007491111755371094 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 14016 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6326839923858643} + +tensor(indices=tensor([[8056, 9983, 6792, ..., 3697, 7248, 264], + [1779, 52, 5806, ..., 8892, 575, 7534]]), + values=tensor([0.3422, 0.8481, 0.1360, ..., 0.4769, 0.1113, 0.4417]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.0067, 0.0062, 0.3907, ..., 0.9039, 0.9687, 0.4816]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 1.6326839923858643 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 90138 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.656882047653198} + +tensor(indices=tensor([[8655, 2913, 2863, ..., 6439, 5860, 9666], + [5257, 5508, 99, ..., 1182, 2584, 5812]]), + values=tensor([0.2932, 0.1402, 0.2307, ..., 0.6916, 0.4523, 0.4866]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.9971, 0.6513, 0.0780, ..., 0.2777, 0.8749, 0.7131]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.656882047653198 seconds + +tensor(indices=tensor([[8655, 2913, 2863, ..., 6439, 5860, 9666], + [5257, 5508, 99, ..., 1182, 2584, 5812]]), + values=tensor([0.2932, 0.1402, 0.2307, ..., 0.6916, 0.4523, 0.4866]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.9971, 0.6513, 0.0780, ..., 0.2777, 0.8749, 0.7131]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.656882047653198 seconds + +[20.08, 20.4, 20.24, 20.16, 20.24, 20.2, 20.2, 20.2, 20.28, 20.2] +[20.12, 20.16, 20.08, 23.8, 25.88, 26.72, 27.76, 25.36, 25.04, 24.08, 24.08, 24.08, 23.8, 23.8] +14.272520780563354 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 90138, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.656882047653198, 'TIME_S_1KI': 0.1182285168037143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.2859945774078, 'W': 22.370679958106543} +[20.08, 20.4, 20.24, 20.16, 20.24, 20.2, 20.2, 20.2, 20.28, 20.2, 20.52, 20.6, 20.4, 20.16, 20.08, 20.08, 20.04, 21.04, 22.24, 22.48] +368.20000000000005 +18.410000000000004 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 90138, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.656882047653198, 'TIME_S_1KI': 0.1182285168037143, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 319.2859945774078, 'W': 22.370679958106543, 'J_1KI': 3.542190802740329, 'W_1KI': 0.24818256404742223, 'W_D': 3.960679958106539, 'J_D': 56.52888700723637, 'W_D_1KI': 0.04394018014718031, 'J_D_1KI': 0.0004874767594930031} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..34d30a8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 10, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.5594961643219, "TIME_S_1KI": 1255.94961643219, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 827.1928561782838, "W": 32.43919728285898, "J_1KI": 82719.28561782837, "W_1KI": 3243.9197282858977, "W_D": 13.937197282858975, "J_D": 355.3956630616189, "W_D_1KI": 1393.7197282858974, "J_D_1KI": 139371.97282858976} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..3b3c81a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.5594961643219} + +tensor(indices=tensor([[410496, 464269, 190919, ..., 205291, 263754, 480479], + [165636, 118584, 230457, ..., 297377, 39670, 364887]]), + values=tensor([0.2919, 0.2085, 0.3748, ..., 0.1736, 0.8936, 0.7600]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.9977, 0.7496, 0.0514, ..., 0.6361, 0.8488, 0.3956]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.5594961643219 seconds + +tensor(indices=tensor([[410496, 464269, 190919, ..., 205291, 263754, 480479], + [165636, 118584, 230457, ..., 297377, 39670, 364887]]), + values=tensor([0.2919, 0.2085, 0.3748, ..., 0.1736, 0.8936, 0.7600]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.9977, 0.7496, 0.0514, ..., 0.6361, 0.8488, 0.3956]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 12.5594961643219 seconds + +[20.72, 20.88, 20.76, 20.6, 20.48, 20.36, 20.08, 20.08, 20.28, 20.56] +[20.56, 20.56, 21.16, 25.36, 27.08, 28.12, 30.16, 28.12, 33.72, 36.28, 36.76, 36.76, 39.24, 41.52, 38.88, 38.36, 38.72, 38.56, 38.4, 35.68, 37.48, 39.4, 37.16, 34.72, 34.72] +25.499794244766235 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.5594961643219, 'TIME_S_1KI': 1255.94961643219, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 827.1928561782838, 'W': 32.43919728285898} +[20.72, 20.88, 20.76, 20.6, 20.48, 20.36, 20.08, 20.08, 20.28, 20.56, 20.56, 20.56, 20.76, 20.84, 20.68, 20.56, 20.72, 20.6, 20.52, 20.72] +370.0400000000001 +18.502000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 10, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.5594961643219, 'TIME_S_1KI': 1255.94961643219, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 827.1928561782838, 'W': 32.43919728285898, 'J_1KI': 82719.28561782837, 'W_1KI': 3243.9197282858977, 'W_D': 13.937197282858975, 'J_D': 355.3956630616189, 'W_D_1KI': 1393.7197282858974, 'J_D_1KI': 139371.97282858976} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..2ec83c9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 62, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.657742977142334, "TIME_S_1KI": 171.89908027648926, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 697.9486754226685, "W": 52.139699435514764, "J_1KI": 11257.236700365622, "W_1KI": 840.9628941212059, "W_D": 33.18969943551477, "J_D": 444.28155531168, "W_D_1KI": 535.3177328308833, "J_D_1KI": 8634.15698114328} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..7bcebef --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,90 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.6742308139801025} + +tensor(indices=tensor([[161326, 494680, 393905, ..., 207864, 78513, 309703], + [219586, 334379, 219336, ..., 379906, 197784, 255225]]), + values=tensor([0.6337, 0.1106, 0.6983, ..., 0.8742, 0.6120, 0.2840]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.2434, 0.2246, 0.4390, ..., 0.3419, 0.9968, 0.7529]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 2.6742308139801025 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 39 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.555628538131714} + +tensor(indices=tensor([[ 10452, 198718, 405227, ..., 131891, 206160, 322148], + [ 23653, 385606, 184282, ..., 18909, 34144, 365286]]), + values=tensor([0.8067, 0.6427, 0.5793, ..., 0.6924, 0.8071, 0.5423]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6707, 0.1395, 0.9760, ..., 0.2643, 0.0599, 0.8111]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 7.555628538131714 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 54 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.143649339675903} + +tensor(indices=tensor([[ 60271, 246052, 264786, ..., 260607, 118685, 192993], + [278453, 247524, 291781, ..., 283148, 420625, 118178]]), + values=tensor([0.3336, 0.1850, 0.7733, ..., 0.2218, 0.9336, 0.7316]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.0502, 0.0017, 0.6628, ..., 0.9914, 0.9247, 0.6148]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.143649339675903 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 62 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.657742977142334} + +tensor(indices=tensor([[ 83041, 219945, 460873, ..., 56262, 366202, 23559], + [395987, 215905, 324839, ..., 240654, 328297, 323708]]), + values=tensor([0.5588, 0.0317, 0.7367, ..., 0.6753, 0.2369, 0.9061]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4409, 0.8767, 0.4241, ..., 0.4264, 0.0387, 0.4233]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.657742977142334 seconds + +tensor(indices=tensor([[ 83041, 219945, 460873, ..., 56262, 366202, 23559], + [395987, 215905, 324839, ..., 240654, 328297, 323708]]), + values=tensor([0.5588, 0.0317, 0.7367, ..., 0.6753, 0.2369, 0.9061]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4409, 0.8767, 0.4241, ..., 0.4264, 0.0387, 0.4233]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.657742977142334 seconds + +[21.12, 20.92, 21.16, 21.32, 21.2, 21.2, 21.28, 21.36, 21.16, 21.16] +[21.36, 21.0, 20.84, 24.44, 25.76, 37.32, 51.2, 65.36, 78.84, 91.84, 93.16, 94.0, 94.24] +13.386127710342407 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 62, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.657742977142334, 'TIME_S_1KI': 171.89908027648926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 697.9486754226685, 'W': 52.139699435514764} +[21.12, 20.92, 21.16, 21.32, 21.2, 21.2, 21.28, 21.36, 21.16, 21.16, 20.96, 20.64, 20.64, 21.04, 21.0, 21.04, 21.04, 20.96, 20.88, 21.08] +378.99999999999994 +18.949999999999996 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 62, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.657742977142334, 'TIME_S_1KI': 171.89908027648926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 697.9486754226685, 'W': 52.139699435514764, 'J_1KI': 11257.236700365622, 'W_1KI': 840.9628941212059, 'W_D': 33.18969943551477, 'J_D': 444.28155531168, 'W_D_1KI': 535.3177328308833, 'J_D_1KI': 8634.15698114328} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..3d4e91c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 14, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.483234643936157, "TIME_S_1KI": 748.8024745668683, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 607.1887491989137, "W": 34.911636506462884, "J_1KI": 43370.62494277955, "W_1KI": 2493.688321890206, "W_D": 16.024636506462883, "J_D": 278.70303344059, "W_D_1KI": 1144.6168933187776, "J_D_1KI": 81758.34952276981} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..1329062 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 500000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.247653961181641} + +tensor(indices=tensor([[221783, 259565, 444479, ..., 146029, 192668, 6817], + [446504, 62710, 453058, ..., 22163, 324735, 243752]]), + values=tensor([0.7520, 0.2563, 0.6285, ..., 0.1981, 0.9331, 0.8017]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3260, 0.1176, 0.3592, ..., 0.0044, 0.6512, 0.2894]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 7.247653961181641 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 14 -ss 500000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.483234643936157} + +tensor(indices=tensor([[379716, 307160, 143738, ..., 419131, 213426, 299454], + [436618, 415373, 49567, ..., 221735, 478258, 260712]]), + values=tensor([0.9471, 0.2552, 0.1617, ..., 0.1831, 0.6071, 0.2097]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.9206, 0.6942, 0.0426, ..., 0.6642, 0.5765, 0.2233]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.483234643936157 seconds + +tensor(indices=tensor([[379716, 307160, 143738, ..., 419131, 213426, 299454], + [436618, 415373, 49567, ..., 221735, 478258, 260712]]), + values=tensor([0.9471, 0.2552, 0.1617, ..., 0.1831, 0.6071, 0.2097]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.9206, 0.6942, 0.0426, ..., 0.6642, 0.5765, 0.2233]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.483234643936157 seconds + +[21.0, 21.12, 21.36, 21.32, 21.2, 21.2, 21.32, 21.0, 21.0, 20.96] +[20.8, 20.64, 20.76, 22.56, 23.24, 25.12, 30.96, 36.92, 41.0, 47.16, 49.44, 47.76, 47.08, 47.08, 48.0, 47.28, 46.84] +17.392159461975098 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 14, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.483234643936157, 'TIME_S_1KI': 748.8024745668683, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 607.1887491989137, 'W': 34.911636506462884} +[21.0, 21.12, 21.36, 21.32, 21.2, 21.2, 21.32, 21.0, 21.0, 20.96, 20.72, 20.84, 21.0, 21.04, 20.68, 20.56, 20.76, 20.68, 20.8, 21.04] +377.74 +18.887 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 14, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.483234643936157, 'TIME_S_1KI': 748.8024745668683, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 607.1887491989137, 'W': 34.911636506462884, 'J_1KI': 43370.62494277955, 'W_1KI': 2493.688321890206, 'W_D': 16.024636506462883, 'J_D': 278.70303344059, 'W_D_1KI': 1144.6168933187776, 'J_D_1KI': 81758.34952276981} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..e169a34 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1180, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.234981060028076, "TIME_S_1KI": 18.843204288159388, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 812.2502558135985, "W": 56.25240817885772, "J_1KI": 688.3476744183038, "W_1KI": 47.67153235496417, "W_D": 37.299408178857725, "J_D": 538.5805659847259, "W_D_1KI": 31.609667948184512, "J_D_1KI": 26.787854193376706} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..f9f7017 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.23186945915222168} + +tensor(indices=tensor([[ 3093, 22047, 49113, ..., 38914, 7091, 44894], + [ 6576, 5943, 18822, ..., 22025, 47491, 1726]]), + values=tensor([0.8331, 0.6040, 0.7551, ..., 0.6148, 0.6329, 0.7368]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8419, 0.1603, 0.2730, ..., 0.5403, 0.7243, 0.9266]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.23186945915222168 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 452 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.021474838256836} + +tensor(indices=tensor([[39035, 14538, 293, ..., 47266, 35252, 9380], + [35924, 22373, 21491, ..., 22617, 18549, 19328]]), + values=tensor([0.1327, 0.3596, 0.0822, ..., 0.9232, 0.7600, 0.9574]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0874, 0.5358, 0.1953, ..., 0.8529, 0.4692, 0.1863]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 4.021474838256836 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1180 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.234981060028076} + +tensor(indices=tensor([[ 7225, 39241, 7218, ..., 36218, 12414, 43773], + [ 6536, 40429, 27964, ..., 8502, 5031, 39426]]), + values=tensor([0.7528, 0.7569, 0.4875, ..., 0.9272, 0.2313, 0.9474]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7003, 0.8492, 0.9196, ..., 0.2459, 0.0471, 0.8297]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 22.234981060028076 seconds + +tensor(indices=tensor([[ 7225, 39241, 7218, ..., 36218, 12414, 43773], + [ 6536, 40429, 27964, ..., 8502, 5031, 39426]]), + values=tensor([0.7528, 0.7569, 0.4875, ..., 0.9272, 0.2313, 0.9474]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7003, 0.8492, 0.9196, ..., 0.2459, 0.0471, 0.8297]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 22.234981060028076 seconds + +[21.44, 21.2, 21.16, 21.08, 21.04, 21.04, 21.0, 21.16, 21.24, 21.72] +[21.84, 21.88, 21.96, 26.0, 30.32, 44.36, 60.28, 71.52, 86.2, 89.96, 89.96, 89.44, 88.4, 86.28] +14.439386367797852 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1180, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.234981060028076, 'TIME_S_1KI': 18.843204288159388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 812.2502558135985, 'W': 56.25240817885772} +[21.44, 21.2, 21.16, 21.08, 21.04, 21.04, 21.0, 21.16, 21.24, 21.72, 21.32, 21.16, 21.08, 20.96, 21.0, 20.88, 20.84, 20.84, 20.8, 20.68] +379.05999999999995 +18.952999999999996 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1180, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.234981060028076, 'TIME_S_1KI': 18.843204288159388, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 812.2502558135985, 'W': 56.25240817885772, 'J_1KI': 688.3476744183038, 'W_1KI': 47.67153235496417, 'W_D': 37.299408178857725, 'J_D': 538.5805659847259, 'W_D_1KI': 31.609667948184512, 'J_D_1KI': 26.787854193376706} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..f053fdb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 111, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.219213724136353, "TIME_S_1KI": 128.10102454176894, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 433.3483148097992, "W": 38.30053612271195, "J_1KI": 3904.038872160353, "W_1KI": 345.04987497938686, "W_D": 19.167536122711947, "J_D": 216.86953548693657, "W_D_1KI": 172.68050561001755, "J_D_1KI": 1555.6802307208789} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..cbb8b97 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 1.3785991668701172} + +tensor(indices=tensor([[21177, 37175, 20723, ..., 498, 13377, 18964], + [24409, 46791, 3897, ..., 8940, 28313, 48867]]), + values=tensor([0.5577, 0.2856, 0.2330, ..., 0.8925, 0.3888, 0.9425]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5781, 0.9034, 0.2965, ..., 0.0438, 0.1171, 0.8306]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 1.3785991668701172 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 76 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.165635347366333} + +tensor(indices=tensor([[20377, 19312, 47932, ..., 44889, 24132, 22051], + [39465, 1659, 15194, ..., 33009, 12235, 17289]]), + values=tensor([0.0567, 0.4823, 0.2608, ..., 0.3508, 0.7064, 0.8620]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7398, 0.1222, 0.1802, ..., 0.7764, 0.0168, 0.6856]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 7.165635347366333 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 111 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.219213724136353} + +tensor(indices=tensor([[ 4966, 38343, 1551, ..., 1254, 8295, 9155], + [ 5867, 7318, 7839, ..., 42498, 3367, 42083]]), + values=tensor([0.4466, 0.7296, 0.2151, ..., 0.3988, 0.4040, 0.0822]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7842, 0.9146, 0.9564, ..., 0.8472, 0.2228, 0.9312]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 14.219213724136353 seconds + +tensor(indices=tensor([[ 4966, 38343, 1551, ..., 1254, 8295, 9155], + [ 5867, 7318, 7839, ..., 42498, 3367, 42083]]), + values=tensor([0.4466, 0.7296, 0.2151, ..., 0.3988, 0.4040, 0.0822]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7842, 0.9146, 0.9564, ..., 0.8472, 0.2228, 0.9312]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 14.219213724136353 seconds + +[21.56, 21.56, 21.16, 21.64, 21.68, 21.44, 21.12, 21.04, 21.04, 21.08] +[21.24, 21.2, 22.0, 23.12, 23.12, 26.08, 41.28, 53.12, 66.32, 78.24, 83.8] +11.314419031143188 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 111, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.219213724136353, 'TIME_S_1KI': 128.10102454176894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 433.3483148097992, 'W': 38.30053612271195} +[21.56, 21.56, 21.16, 21.64, 21.68, 21.44, 21.12, 21.04, 21.04, 21.08, 21.08, 20.96, 20.84, 20.96, 21.12, 21.2, 21.4, 21.48, 21.44, 21.44] +382.65999999999997 +19.133 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 111, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.219213724136353, 'TIME_S_1KI': 128.10102454176894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 433.3483148097992, 'W': 38.30053612271195, 'J_1KI': 3904.038872160353, 'W_1KI': 345.04987497938686, 'W_D': 19.167536122711947, 'J_D': 216.86953548693657, 'W_D_1KI': 172.68050561001755, 'J_D_1KI': 1555.6802307208789} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..5919928 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 17, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.405381917953491, "TIME_S_1KI": 612.0812892913818, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 794.4304734420776, "W": 33.90466266931279, "J_1KI": 46731.20432012222, "W_1KI": 1994.3919217242817, "W_D": 15.214662669312794, "J_D": 356.4993931818009, "W_D_1KI": 894.9801570183996, "J_D_1KI": 52645.89158931762} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..e1aebfa --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.292728424072266} + +tensor(indices=tensor([[46599, 29541, 11569, ..., 49621, 42011, 22905], + [48688, 48104, 46343, ..., 36933, 17531, 12574]]), + values=tensor([0.8638, 0.4863, 0.3352, ..., 0.8202, 0.6548, 0.6620]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.4864, 0.1279, 0.6321, ..., 0.9253, 0.6589, 0.5359]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 6.292728424072266 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 16 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.705764532089233} + +tensor(indices=tensor([[39660, 29189, 2678, ..., 17024, 22178, 24835], + [16877, 43070, 49060, ..., 48562, 32992, 29018]]), + values=tensor([0.7525, 0.1851, 0.1280, ..., 0.8552, 0.6501, 0.4578]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.8308, 0.6898, 0.7229, ..., 0.2179, 0.4033, 0.1076]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 9.705764532089233 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 17 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.405381917953491} + +tensor(indices=tensor([[42594, 14994, 45859, ..., 8801, 38969, 41867], + [35203, 18039, 42339, ..., 25677, 49612, 40248]]), + values=tensor([0.0796, 0.4528, 0.5170, ..., 0.7249, 0.0378, 0.5268]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.8033, 0.6281, 0.0420, ..., 0.8993, 0.6223, 0.8136]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.405381917953491 seconds + +tensor(indices=tensor([[42594, 14994, 45859, ..., 8801, 38969, 41867], + [35203, 18039, 42339, ..., 25677, 49612, 40248]]), + values=tensor([0.0796, 0.4528, 0.5170, ..., 0.7249, 0.0378, 0.5268]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.8033, 0.6281, 0.0420, ..., 0.8993, 0.6223, 0.8136]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.405381917953491 seconds + +[20.88, 20.32, 20.24, 20.28, 20.16, 20.68, 20.6, 20.96, 21.0, 21.12] +[21.12, 21.2, 20.96, 24.16, 25.16, 26.4, 27.92, 29.32, 27.44, 34.4, 36.64, 39.56, 43.44, 42.4, 42.4, 44.48, 44.2, 44.08, 44.28, 47.44, 44.08, 40.16, 36.24] +23.431304454803467 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.405381917953491, 'TIME_S_1KI': 612.0812892913818, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 794.4304734420776, 'W': 33.90466266931279} +[20.88, 20.32, 20.24, 20.28, 20.16, 20.68, 20.6, 20.96, 21.0, 21.12, 20.56, 20.64, 20.72, 21.04, 21.08, 21.2, 21.2, 21.12, 20.84, 20.88] +373.79999999999995 +18.689999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.405381917953491, 'TIME_S_1KI': 612.0812892913818, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 794.4304734420776, 'W': 33.90466266931279, 'J_1KI': 46731.20432012222, 'W_1KI': 1994.3919217242817, 'W_D': 15.214662669312794, 'J_D': 356.4993931818009, 'W_D_1KI': 894.9801570183996, 'J_D_1KI': 52645.89158931762} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..62236f4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 7473, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.878392457962036, "TIME_S_1KI": 1.4556928218870648, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1044.8769326019287, "W": 59.73144197874582, "J_1KI": 139.82027734536715, "W_1KI": 7.992966944834179, "W_D": 40.891441978745824, "J_D": 715.3104470443727, "W_D_1KI": 5.471891071690863, "J_D_1KI": 0.7322214735301571} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..d4fd0f2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02452564239501953} + +tensor(indices=tensor([[38907, 4961, 13232, ..., 47145, 11952, 32147], + [49116, 43019, 5516, ..., 10304, 29950, 20531]]), + values=tensor([0.9076, 0.6540, 0.6740, ..., 0.6286, 0.5251, 0.7125]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5564, 0.4570, 0.2159, ..., 0.8596, 0.6015, 0.7709]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.02452564239501953 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 4281 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.014506101608276} + +tensor(indices=tensor([[23889, 23218, 28748, ..., 37746, 21839, 27005], + [42283, 41234, 21702, ..., 23289, 45765, 25426]]), + values=tensor([0.1631, 0.8201, 0.5446, ..., 0.6305, 0.2811, 0.8469]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6325, 0.8451, 0.5648, ..., 0.1693, 0.4427, 0.2648]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 6.014506101608276 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 7473 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.878392457962036} + +tensor(indices=tensor([[41277, 18080, 34130, ..., 49656, 5714, 22684], + [14897, 38024, 47851, ..., 47472, 30197, 49103]]), + values=tensor([0.1440, 0.3554, 0.8017, ..., 0.4832, 0.6586, 0.8061]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9500, 0.6112, 0.2016, ..., 0.4442, 0.9073, 0.1163]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.878392457962036 seconds + +tensor(indices=tensor([[41277, 18080, 34130, ..., 49656, 5714, 22684], + [14897, 38024, 47851, ..., 47472, 30197, 49103]]), + values=tensor([0.1440, 0.3554, 0.8017, ..., 0.4832, 0.6586, 0.8061]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9500, 0.6112, 0.2016, ..., 0.4442, 0.9073, 0.1163]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.878392457962036 seconds + +[20.88, 20.88, 20.68, 20.84, 20.72, 20.68, 20.72, 20.68, 20.56, 20.68] +[20.68, 20.72, 20.6, 22.0, 23.84, 35.52, 50.08, 67.08, 79.28, 91.44, 91.4, 89.76, 89.28, 89.28, 88.48, 88.48, 87.92] +17.492913246154785 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 7473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.878392457962036, 'TIME_S_1KI': 1.4556928218870648, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1044.8769326019287, 'W': 59.73144197874582} +[20.88, 20.88, 20.68, 20.84, 20.72, 20.68, 20.72, 20.68, 20.56, 20.68, 21.32, 21.32, 21.24, 21.04, 21.12, 21.0, 21.04, 21.12, 21.16, 21.12] +376.79999999999995 +18.839999999999996 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 7473, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.878392457962036, 'TIME_S_1KI': 1.4556928218870648, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1044.8769326019287, 'W': 59.73144197874582, 'J_1KI': 139.82027734536715, 'W_1KI': 7.992966944834179, 'W_D': 40.891441978745824, 'J_D': 715.3104470443727, 'W_D_1KI': 5.471891071690863, 'J_D_1KI': 0.7322214735301571} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..e3bfa50 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 2771, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.318060398101807, "TIME_S_1KI": 5.888870587550273, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1258.8953846740724, "W": 64.44745463401114, "J_1KI": 454.31085697368184, "W_1KI": 23.257832780227766, "W_D": 45.63045463401114, "J_D": 891.3302948200704, "W_D_1KI": 16.467143498380057, "J_D_1KI": 5.942671778556499} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..a8463c6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,90 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.09811758995056152} + +tensor(indices=tensor([[ 7729, 34145, 36289, ..., 43305, 32754, 32079], + [30511, 41923, 46928, ..., 27665, 9853, 3265]]), + values=tensor([0.3620, 0.4251, 0.2141, ..., 0.7926, 0.4925, 0.4046]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.3934, 0.5542, 0.8621, ..., 0.3731, 0.8789, 0.7592]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.09811758995056152 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1070 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.211747169494629} + +tensor(indices=tensor([[19982, 25773, 3803, ..., 32566, 16139, 10198], + [ 2092, 30161, 6970, ..., 32857, 34517, 41616]]), + values=tensor([0.8500, 0.0484, 0.6700, ..., 0.1838, 0.4331, 0.9793]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.5935, 0.9838, 0.2936, ..., 0.6047, 0.7506, 0.5025]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 6.211747169494629 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1808 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.8491294384002686} + +tensor(indices=tensor([[49469, 13147, 20830, ..., 36303, 49705, 33401], + [ 1713, 23815, 29340, ..., 10279, 14575, 1295]]), + values=tensor([0.1023, 0.2361, 0.2272, ..., 0.0541, 0.0946, 0.8773]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.6456, 0.9103, 0.5121, ..., 0.6946, 0.4718, 0.4688]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 6.8491294384002686 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 2771 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.318060398101807} + +tensor(indices=tensor([[ 5328, 30154, 24559, ..., 45152, 30750, 34408], + [33414, 20638, 7492, ..., 12450, 35607, 33873]]), + values=tensor([0.6157, 0.8729, 0.6805, ..., 0.7439, 0.9809, 0.3606]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.6901, 0.1728, 0.8105, ..., 0.4035, 0.3773, 0.0751]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 16.318060398101807 seconds + +tensor(indices=tensor([[ 5328, 30154, 24559, ..., 45152, 30750, 34408], + [33414, 20638, 7492, ..., 12450, 35607, 33873]]), + values=tensor([0.6157, 0.8729, 0.6805, ..., 0.7439, 0.9809, 0.3606]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.6901, 0.1728, 0.8105, ..., 0.4035, 0.3773, 0.0751]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 16.318060398101807 seconds + +[21.2, 21.24, 21.04, 21.04, 20.8, 20.6, 20.52, 20.68, 20.76, 21.0] +[21.0, 21.24, 21.36, 22.88, 23.76, 36.36, 51.44, 68.16, 82.08, 95.84, 94.96, 94.04, 92.92, 91.4, 91.4, 89.84, 89.56, 89.2, 88.96] +19.533671140670776 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 2771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.318060398101807, 'TIME_S_1KI': 5.888870587550273, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.8953846740724, 'W': 64.44745463401114} +[21.2, 21.24, 21.04, 21.04, 20.8, 20.6, 20.52, 20.68, 20.76, 21.0, 20.96, 21.2, 21.08, 20.92, 20.84, 20.88, 20.68, 20.88, 21.04, 21.12] +376.34000000000003 +18.817 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 2771, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.318060398101807, 'TIME_S_1KI': 5.888870587550273, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1258.8953846740724, 'W': 64.44745463401114, 'J_1KI': 454.31085697368184, 'W_1KI': 23.257832780227766, 'W_D': 45.63045463401114, 'J_D': 891.3302948200704, 'W_D_1KI': 16.467143498380057, 'J_D_1KI': 5.942671778556499} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..4a68163 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 168738, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485045194625854, "TIME_S_1KI": 0.06213801985697267, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 310.91162288665765, "W": 21.843671537148175, "J_1KI": 1.8425702739552303, "W_1KI": 0.1294531850392216, "W_D": 3.3166715371481743, "J_D": 47.20780242657653, "W_D_1KI": 0.019655747591817933, "J_D_1KI": 0.00011648678775271683} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..c6c4b62 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.007000446319580078} + +tensor(indices=tensor([[1325, 1497, 480, ..., 3500, 3478, 2116], + [4719, 1979, 2610, ..., 176, 2285, 1304]]), + values=tensor([0.1366, 0.7326, 0.5451, ..., 0.1112, 0.4479, 0.6593]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9564, 0.6336, 0.4132, ..., 0.6517, 0.0119, 0.5457]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.007000446319580078 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 14999 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.9333367347717285} + +tensor(indices=tensor([[3373, 3356, 2049, ..., 630, 3185, 4638], + [2608, 27, 2482, ..., 1785, 2528, 195]]), + values=tensor([0.1742, 0.5283, 0.9682, ..., 0.3574, 0.8555, 0.6831]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.0960, 0.5170, 0.0071, ..., 0.4377, 0.3597, 0.2283]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.9333367347717285 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 168738 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.485045194625854} + +tensor(indices=tensor([[ 737, 1696, 697, ..., 2524, 3932, 1456], + [3112, 608, 3160, ..., 3432, 4304, 4310]]), + values=tensor([0.4453, 0.3883, 0.3697, ..., 0.6281, 0.7959, 0.8251]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.1120, 0.1935, 0.0241, ..., 0.7250, 0.1547, 0.1567]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.485045194625854 seconds + +tensor(indices=tensor([[ 737, 1696, 697, ..., 2524, 3932, 1456], + [3112, 608, 3160, ..., 3432, 4304, 4310]]), + values=tensor([0.4453, 0.3883, 0.3697, ..., 0.6281, 0.7959, 0.8251]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.1120, 0.1935, 0.0241, ..., 0.7250, 0.1547, 0.1567]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.485045194625854 seconds + +[20.48, 20.6, 20.6, 20.76, 20.92, 20.8, 20.84, 20.72, 20.64, 20.72] +[20.6, 20.48, 21.28, 23.0, 24.0, 24.0, 24.72, 25.48, 24.84, 24.24, 23.88, 23.68, 23.76, 23.52] +14.233487367630005 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 168738, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485045194625854, 'TIME_S_1KI': 0.06213801985697267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.91162288665765, 'W': 21.843671537148175} +[20.48, 20.6, 20.6, 20.76, 20.92, 20.8, 20.84, 20.72, 20.64, 20.72, 20.48, 20.6, 20.56, 20.52, 20.52, 20.52, 20.28, 20.4, 20.28, 20.28] +370.54 +18.527 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 168738, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.485045194625854, 'TIME_S_1KI': 0.06213801985697267, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 310.91162288665765, 'W': 21.843671537148175, 'J_1KI': 1.8425702739552303, 'W_1KI': 0.1294531850392216, 'W_D': 3.3166715371481743, 'J_D': 47.20780242657653, 'W_D_1KI': 0.019655747591817933, 'J_D_1KI': 0.00011648678775271683} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..ced88f1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 19199, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.324403285980225, "TIME_S_1KI": 0.5377573460065745, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 317.08369788169864, "W": 22.21914334536831, "J_1KI": 16.51563612071976, "W_1KI": 1.1573073256611444, "W_D": 3.6761433453683097, "J_D": 52.461299149751724, "W_D_1KI": 0.1914757719343877, "J_D_1KI": 0.009973215893243799} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..1dfeeb8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.011479377746582031} + +tensor(indices=tensor([[1452, 3904, 3193, ..., 823, 1975, 3091], + [4638, 3330, 3560, ..., 47, 1573, 246]]), + values=tensor([0.3894, 0.5016, 0.2854, ..., 0.3081, 0.2629, 0.3399]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1493, 0.7474, 0.2368, ..., 0.4590, 0.8438, 0.4019]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.011479377746582031 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 9146 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 5.001960515975952} + +tensor(indices=tensor([[ 709, 2727, 2357, ..., 3324, 1671, 1352], + [ 652, 4578, 2312, ..., 2968, 74, 2460]]), + values=tensor([0.7523, 0.8276, 0.4835, ..., 0.4561, 0.9480, 0.9648]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.7147, 0.2348, 0.5775, ..., 0.5765, 0.4946, 0.7943]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 5.001960515975952 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 19199 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.324403285980225} + +tensor(indices=tensor([[ 189, 3026, 3194, ..., 2733, 557, 3535], + [1335, 1189, 1948, ..., 2126, 1727, 3645]]), + values=tensor([0.4443, 0.5993, 0.1976, ..., 0.9174, 0.4711, 0.4087]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.4135, 0.6602, 0.6487, ..., 0.5052, 0.9387, 0.1346]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.324403285980225 seconds + +tensor(indices=tensor([[ 189, 3026, 3194, ..., 2733, 557, 3535], + [1335, 1189, 1948, ..., 2126, 1727, 3645]]), + values=tensor([0.4443, 0.5993, 0.1976, ..., 0.9174, 0.4711, 0.4087]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.4135, 0.6602, 0.6487, ..., 0.5052, 0.9387, 0.1346]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.324403285980225 seconds + +[20.44, 20.2, 20.36, 20.52, 20.4, 20.44, 20.6, 20.6, 20.52, 20.48] +[20.36, 20.28, 20.48, 22.12, 23.12, 26.8, 27.4, 27.08, 26.48, 23.68, 23.72, 23.72, 23.68, 23.8] +14.270743608474731 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 19199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.324403285980225, 'TIME_S_1KI': 0.5377573460065745, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.08369788169864, 'W': 22.21914334536831} +[20.44, 20.2, 20.36, 20.52, 20.4, 20.44, 20.6, 20.6, 20.52, 20.48, 20.56, 20.64, 20.76, 20.68, 20.72, 20.96, 20.88, 20.76, 20.8, 20.56] +370.86 +18.543 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 19199, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.324403285980225, 'TIME_S_1KI': 0.5377573460065745, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 317.08369788169864, 'W': 22.21914334536831, 'J_1KI': 16.51563612071976, 'W_1KI': 1.1573073256611444, 'W_D': 3.6761433453683097, 'J_D': 52.461299149751724, 'W_D_1KI': 0.1914757719343877, 'J_D_1KI': 0.009973215893243799} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..de325ee --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1914, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.46642279624939, "TIME_S_1KI": 5.468350468259869, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 320.75712821960445, "W": 22.459238592806493, "J_1KI": 167.58470648882155, "W_1KI": 11.734189442427635, "W_D": 4.083238592806495, "J_D": 58.3157741279602, "W_D_1KI": 2.133353496764104, "J_D_1KI": 1.1146047527503156} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..33a3190 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.09189152717590332} + +tensor(indices=tensor([[2980, 2328, 458, ..., 732, 4549, 3438], + [1447, 4197, 1922, ..., 4043, 2433, 2509]]), + values=tensor([0.6066, 0.4082, 0.0041, ..., 0.8757, 0.4243, 0.6669]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8479, 0.4058, 0.2386, ..., 0.1854, 0.7438, 0.2444]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.09189152717590332 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1142 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.2618207931518555} + +tensor(indices=tensor([[4719, 1739, 456, ..., 4893, 4449, 9], + [ 841, 3936, 408, ..., 4329, 1822, 746]]), + values=tensor([0.0640, 0.0924, 0.3454, ..., 0.9454, 0.0700, 0.9850]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6145, 0.7674, 0.6984, ..., 0.9894, 0.8960, 0.3364]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 6.2618207931518555 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 1914 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.46642279624939} + +tensor(indices=tensor([[2553, 1257, 3594, ..., 2355, 1562, 3224], + [3933, 3858, 4569, ..., 4222, 4125, 3399]]), + values=tensor([0.9785, 0.8002, 0.3740, ..., 0.6273, 0.8465, 0.3459]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0276, 0.0059, 0.6722, ..., 0.5421, 0.0550, 0.8689]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.46642279624939 seconds + +tensor(indices=tensor([[2553, 1257, 3594, ..., 2355, 1562, 3224], + [3933, 3858, 4569, ..., 4222, 4125, 3399]]), + values=tensor([0.9785, 0.8002, 0.3740, ..., 0.6273, 0.8465, 0.3459]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.0276, 0.0059, 0.6722, ..., 0.5421, 0.0550, 0.8689]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.46642279624939 seconds + +[20.56, 20.32, 20.52, 20.48, 20.48, 20.52, 20.56, 20.4, 20.6, 20.6] +[20.6, 20.6, 20.68, 21.52, 22.44, 26.48, 27.2, 27.56, 27.24, 24.36, 24.36, 24.44, 24.44, 24.48] +14.281745433807373 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.46642279624939, 'TIME_S_1KI': 5.468350468259869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 320.75712821960445, 'W': 22.459238592806493} +[20.56, 20.32, 20.52, 20.48, 20.48, 20.52, 20.56, 20.4, 20.6, 20.6, 20.2, 20.12, 20.12, 20.12, 20.36, 20.2, 20.52, 20.68, 20.64, 20.4] +367.52 +18.375999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1914, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.46642279624939, 'TIME_S_1KI': 5.468350468259869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 320.75712821960445, 'W': 22.459238592806493, 'J_1KI': 167.58470648882155, 'W_1KI': 11.734189442427635, 'W_D': 4.083238592806495, 'J_D': 58.3157741279602, 'W_D_1KI': 2.133353496764104, 'J_D_1KI': 1.1146047527503156} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..12658d2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 396, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.784477710723877, "TIME_S_1KI": 27.233529572535044, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 339.25271774291997, "W": 23.760931780899526, "J_1KI": 856.6987821790908, "W_1KI": 60.00235298206951, "W_D": 5.452931780899526, "J_D": 77.85561371898656, "W_D_1KI": 13.770029749746277, "J_D_1KI": 34.772802398349185} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..13c9cf8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.2925379276275635} + +tensor(indices=tensor([[3750, 3884, 3492, ..., 3406, 2250, 3102], + [3234, 2665, 4221, ..., 4702, 323, 4674]]), + values=tensor([0.3725, 0.1908, 0.9128, ..., 0.8545, 0.3414, 0.5388]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7012, 0.4862, 0.3372, ..., 0.3754, 0.0073, 0.8999]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.2925379276275635 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 358 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.48960566520691} + +tensor(indices=tensor([[2986, 602, 764, ..., 3703, 3989, 3355], + [ 21, 4618, 5, ..., 3862, 1934, 584]]), + values=tensor([0.7726, 0.3678, 0.2931, ..., 0.4991, 0.4848, 0.7880]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7329, 0.8480, 0.6480, ..., 0.4599, 0.5664, 0.1124]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.48960566520691 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 396 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.784477710723877} + +tensor(indices=tensor([[3255, 3755, 2705, ..., 3115, 4213, 1938], + [2887, 827, 3861, ..., 750, 1754, 938]]), + values=tensor([0.8045, 0.2373, 0.1615, ..., 0.2583, 0.7570, 0.7810]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.2347, 0.2983, 0.3742, ..., 0.5974, 0.4844, 0.2548]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.784477710723877 seconds + +tensor(indices=tensor([[3255, 3755, 2705, ..., 3115, 4213, 1938], + [2887, 827, 3861, ..., 750, 1754, 938]]), + values=tensor([0.8045, 0.2373, 0.1615, ..., 0.2583, 0.7570, 0.7810]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.2347, 0.2983, 0.3742, ..., 0.5974, 0.4844, 0.2548]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.784477710723877 seconds + +[20.36, 20.32, 20.32, 20.28, 20.16, 20.4, 20.52, 20.28, 20.36, 20.48] +[20.32, 20.4, 23.72, 23.72, 24.72, 28.32, 29.28, 29.92, 27.12, 27.36, 24.96, 25.04, 25.12, 24.96] +14.277753114700317 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.784477710723877, 'TIME_S_1KI': 27.233529572535044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 339.25271774291997, 'W': 23.760931780899526} +[20.36, 20.32, 20.32, 20.28, 20.16, 20.4, 20.52, 20.28, 20.36, 20.48, 20.68, 20.6, 20.52, 20.24, 20.24, 20.2, 20.08, 20.2, 20.48, 20.4] +366.15999999999997 +18.308 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 396, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.784477710723877, 'TIME_S_1KI': 27.233529572535044, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 339.25271774291997, 'W': 23.760931780899526, 'J_1KI': 856.6987821790908, 'W_1KI': 60.00235298206951, 'W_D': 5.452931780899526, 'J_D': 77.85561371898656, 'W_D_1KI': 13.770029749746277, 'J_D_1KI': 34.772802398349185} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..88a0c32 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.462316274642944, "TIME_S_1KI": 53.1082044398119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 354.70012279510496, "W": 23.18372200056496, "J_1KI": 1800.508237538604, "W_1KI": 117.68386802317238, "W_D": 4.7517220005649605, "J_D": 72.69912816619873, "W_D_1KI": 24.12041624652264, "J_D_1KI": 122.43866114986113} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..7aa3065 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.5312061309814453} + +tensor(indices=tensor([[1853, 1099, 3271, ..., 1312, 3241, 799], + [3370, 2068, 3045, ..., 4091, 3401, 3622]]), + values=tensor([0.6967, 0.5060, 0.7932, ..., 0.0386, 0.9170, 0.8505]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.4194, 0.3154, 0.0286, ..., 0.7128, 0.7672, 0.7125]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.5312061309814453 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 197 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.462316274642944} + +tensor(indices=tensor([[ 534, 162, 3293, ..., 3876, 3096, 4570], + [2983, 2029, 783, ..., 3180, 4049, 877]]), + values=tensor([0.8944, 0.6534, 0.6975, ..., 0.5364, 0.8432, 0.3549]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7133, 0.2228, 0.0976, ..., 0.2266, 0.3701, 0.1159]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.462316274642944 seconds + +tensor(indices=tensor([[ 534, 162, 3293, ..., 3876, 3096, 4570], + [2983, 2029, 783, ..., 3180, 4049, 877]]), + values=tensor([0.8944, 0.6534, 0.6975, ..., 0.5364, 0.8432, 0.3549]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7133, 0.2228, 0.0976, ..., 0.2266, 0.3701, 0.1159]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.462316274642944 seconds + +[20.64, 20.84, 20.96, 21.16, 20.96, 20.92, 20.6, 20.56, 20.52, 20.32] +[20.48, 20.56, 20.56, 21.8, 23.08, 26.4, 27.44, 27.96, 27.52, 27.56, 25.24, 25.2, 25.36, 25.44, 25.44] +15.299533128738403 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.462316274642944, 'TIME_S_1KI': 53.1082044398119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 354.70012279510496, 'W': 23.18372200056496} +[20.64, 20.84, 20.96, 21.16, 20.96, 20.92, 20.6, 20.56, 20.52, 20.32, 19.96, 19.96, 20.04, 20.24, 20.44, 20.16, 20.2, 20.28, 20.16, 20.36] +368.64 +18.432 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.462316274642944, 'TIME_S_1KI': 53.1082044398119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 354.70012279510496, 'W': 23.18372200056496, 'J_1KI': 1800.508237538604, 'W_1KI': 117.68386802317238, 'W_D': 4.7517220005649605, 'J_D': 72.69912816619873, 'W_D_1KI': 24.12041624652264, 'J_D_1KI': 122.43866114986113} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..fc28132 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 98, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.360224723815918, "TIME_S_1KI": 105.71657881444814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 367.190686454773, "W": 23.931765947540203, "J_1KI": 3746.843739334419, "W_1KI": 244.20169334224698, "W_D": 5.381765947540202, "J_D": 82.57369460105903, "W_D_1KI": 54.91597905653268, "J_D_1KI": 560.3671332299252} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..e41dc4d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 1.0649404525756836} + +tensor(indices=tensor([[3814, 2510, 4754, ..., 1402, 2377, 1325], + [1306, 798, 1013, ..., 3536, 78, 4562]]), + values=tensor([0.5014, 0.2122, 0.1157, ..., 0.1611, 0.3118, 0.8655]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.2317, 0.6495, 0.2388, ..., 0.5848, 0.2642, 0.7839]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 1.0649404525756836 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 98 -ss 5000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.360224723815918} + +tensor(indices=tensor([[ 109, 771, 1957, ..., 326, 3410, 1109], + [4437, 277, 3391, ..., 1192, 806, 4729]]), + values=tensor([0.7352, 0.5289, 0.0568, ..., 0.2544, 0.0842, 0.8243]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.4082, 0.8634, 0.3411, ..., 0.3915, 0.6123, 0.9531]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.360224723815918 seconds + +tensor(indices=tensor([[ 109, 771, 1957, ..., 326, 3410, 1109], + [4437, 277, 3391, ..., 1192, 806, 4729]]), + values=tensor([0.7352, 0.5289, 0.0568, ..., 0.2544, 0.0842, 0.8243]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.4082, 0.8634, 0.3411, ..., 0.3915, 0.6123, 0.9531]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.360224723815918 seconds + +[20.44, 20.44, 20.32, 20.52, 20.72, 20.68, 20.92, 20.96, 20.68, 20.68] +[20.64, 20.72, 20.56, 21.92, 22.88, 29.16, 29.96, 30.52, 30.28, 29.92, 24.68, 24.84, 25.04, 25.04, 25.32] +15.343234062194824 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 98, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.360224723815918, 'TIME_S_1KI': 105.71657881444814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 367.190686454773, 'W': 23.931765947540203} +[20.44, 20.44, 20.32, 20.52, 20.72, 20.68, 20.92, 20.96, 20.68, 20.68, 21.0, 21.04, 20.64, 20.72, 20.48, 20.48, 20.48, 20.44, 20.24, 20.36] +371.0 +18.55 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 98, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.360224723815918, 'TIME_S_1KI': 105.71657881444814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 367.190686454773, 'W': 23.931765947540203, 'J_1KI': 3746.843739334419, 'W_1KI': 244.20169334224698, 'W_D': 5.381765947540202, 'J_D': 82.57369460105903, 'W_D_1KI': 54.91597905653268, 'J_D_1KI': 560.3671332299252} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..57a3699 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 64, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.220601797103882, "TIME_S_1KI": 159.69690307974815, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 403.9369924163819, "W": 24.765001026128928, "J_1KI": 6311.515506505967, "W_1KI": 386.9531410332645, "W_D": 6.221001026128928, "J_D": 101.46950697326663, "W_D_1KI": 97.2031410332645, "J_D_1KI": 1518.7990786447579} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..ec5f6f2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.6283369064331055} + +tensor(indices=tensor([[1279, 3416, 4653, ..., 1483, 1169, 4571], + [2791, 1620, 3013, ..., 1069, 1185, 4438]]), + values=tensor([0.3064, 0.9507, 0.5509, ..., 0.1641, 0.9585, 0.2381]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.5387, 0.1602, 0.4905, ..., 0.0478, 0.9961, 0.4036]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 1.6283369064331055 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 64 -ss 5000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.220601797103882} + +tensor(indices=tensor([[1686, 256, 3950, ..., 1979, 3593, 3339], + [3296, 3703, 2609, ..., 2893, 3290, 3232]]), + values=tensor([0.6617, 0.6880, 0.5955, ..., 0.2432, 0.9292, 0.5425]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.8891, 0.0731, 0.6022, ..., 0.7799, 0.9835, 0.9320]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.220601797103882 seconds + +tensor(indices=tensor([[1686, 256, 3950, ..., 1979, 3593, 3339], + [3296, 3703, 2609, ..., 2893, 3290, 3232]]), + values=tensor([0.6617, 0.6880, 0.5955, ..., 0.2432, 0.9292, 0.5425]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.8891, 0.0731, 0.6022, ..., 0.7799, 0.9835, 0.9320]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.220601797103882 seconds + +[20.52, 20.4, 20.4, 20.32, 20.24, 20.16, 20.36, 20.56, 20.64, 20.56] +[20.64, 20.84, 21.48, 23.16, 25.32, 25.32, 31.0, 32.0, 31.84, 30.84, 27.56, 24.76, 24.8, 25.2, 25.4, 29.72] +16.310800552368164 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 64, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.220601797103882, 'TIME_S_1KI': 159.69690307974815, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 403.9369924163819, 'W': 24.765001026128928} +[20.52, 20.4, 20.4, 20.32, 20.24, 20.16, 20.36, 20.56, 20.64, 20.56, 20.8, 20.64, 20.88, 20.76, 20.76, 20.64, 20.72, 20.88, 20.96, 21.24] +370.88 +18.544 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 64, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.220601797103882, 'TIME_S_1KI': 159.69690307974815, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 403.9369924163819, 'W': 24.765001026128928, 'J_1KI': 6311.515506505967, 'W_1KI': 386.9531410332645, 'W_D': 6.221001026128928, 'J_D': 101.46950697326663, 'W_D_1KI': 97.2031410332645, 'J_D_1KI': 1518.7990786447579} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..3c160b2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 50, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.600855827331543, "TIME_S_1KI": 212.01711654663086, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 415.4362985801697, "W": 24.028108964441873, "J_1KI": 8308.725971603395, "W_1KI": 480.5621792888375, "W_D": 5.398108964441871, "J_D": 93.33112359523771, "W_D_1KI": 107.96217928883742, "J_D_1KI": 2159.2435857767487} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..91b781e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 2.07926344871521} + +tensor(indices=tensor([[1778, 2259, 3703, ..., 3719, 4026, 598], + [ 690, 3634, 3705, ..., 611, 602, 4123]]), + values=tensor([0.3210, 0.9373, 0.0094, ..., 0.0958, 0.0204, 0.0169]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4258, 0.7131, 0.4201, ..., 0.1966, 0.1804, 0.5093]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 2.07926344871521 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 50 -ss 5000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.600855827331543} + +tensor(indices=tensor([[2288, 688, 2079, ..., 2242, 4150, 1898], + [1257, 2014, 3731, ..., 4302, 2449, 2264]]), + values=tensor([0.0994, 0.0069, 0.8096, ..., 0.7147, 0.6368, 0.0974]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8795, 0.2397, 0.9683, ..., 0.7944, 0.9967, 0.2444]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.600855827331543 seconds + +tensor(indices=tensor([[2288, 688, 2079, ..., 2242, 4150, 1898], + [1257, 2014, 3731, ..., 4302, 2449, 2264]]), + values=tensor([0.0994, 0.0069, 0.8096, ..., 0.7147, 0.6368, 0.0974]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8795, 0.2397, 0.9683, ..., 0.7944, 0.9967, 0.2444]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.600855827331543 seconds + +[20.8, 20.76, 20.52, 20.4, 20.56, 20.56, 20.48, 20.64, 20.72, 20.8] +[20.76, 20.88, 21.68, 22.64, 24.04, 27.4, 28.4, 27.88, 27.88, 27.48, 27.0, 25.24, 25.2, 25.24, 25.28, 25.16, 29.96] +17.28959608078003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 50, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.600855827331543, 'TIME_S_1KI': 212.01711654663086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 415.4362985801697, 'W': 24.028108964441873} +[20.8, 20.76, 20.52, 20.4, 20.56, 20.56, 20.48, 20.64, 20.72, 20.8, 20.6, 20.64, 20.72, 20.56, 21.04, 21.04, 20.8, 20.96, 20.92, 20.36] +372.6 +18.630000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 50, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.600855827331543, 'TIME_S_1KI': 212.01711654663086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 415.4362985801697, 'W': 24.028108964441873, 'J_1KI': 8308.725971603395, 'W_1KI': 480.5621792888375, 'W_D': 5.398108964441871, 'J_D': 93.33112359523771, 'W_D_1KI': 107.96217928883742, 'J_D_1KI': 2159.2435857767487} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..c516020 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 39, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.249986171722412, "TIME_S_1KI": 262.8201582492926, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 448.57725488662726, "W": 25.866452261197104, "J_1KI": 11501.980894528904, "W_1KI": 663.2423656717206, "W_D": 7.232452261197103, "J_D": 125.4255337634087, "W_D_1KI": 185.4474938768488, "J_D_1KI": 4755.063945560226} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..447c49c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 2.6484789848327637} + +tensor(indices=tensor([[2606, 4503, 568, ..., 4002, 1170, 4255], + [ 326, 86, 3578, ..., 1350, 1054, 1712]]), + values=tensor([0.9165, 0.3495, 0.3325, ..., 0.4870, 0.4561, 0.6575]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.1516, 0.9580, 0.8544, ..., 0.9817, 0.5121, 0.4328]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 2.6484789848327637 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 39 -ss 5000 -sd 0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.249986171722412} + +tensor(indices=tensor([[3439, 237, 4370, ..., 2920, 4672, 4476], + [2193, 2292, 215, ..., 299, 4749, 1213]]), + values=tensor([0.5546, 0.6374, 0.2752, ..., 0.5437, 0.9306, 0.9193]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.2963, 0.0823, 0.0479, ..., 0.1943, 0.5608, 0.1183]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.249986171722412 seconds + +tensor(indices=tensor([[3439, 237, 4370, ..., 2920, 4672, 4476], + [2193, 2292, 215, ..., 299, 4749, 1213]]), + values=tensor([0.5546, 0.6374, 0.2752, ..., 0.5437, 0.9306, 0.9193]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.2963, 0.0823, 0.0479, ..., 0.1943, 0.5608, 0.1183]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.249986171722412 seconds + +[20.64, 20.56, 20.64, 20.68, 20.76, 20.76, 20.68, 20.92, 20.88, 20.84] +[20.8, 20.84, 21.16, 25.48, 27.44, 31.6, 34.96, 32.72, 31.52, 31.52, 30.56, 25.24, 25.08, 25.12, 25.0, 25.08, 29.52] +17.342047929763794 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.249986171722412, 'TIME_S_1KI': 262.8201582492926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 448.57725488662726, 'W': 25.866452261197104} +[20.64, 20.56, 20.64, 20.68, 20.76, 20.76, 20.68, 20.92, 20.88, 20.84, 20.44, 20.56, 20.68, 20.68, 20.76, 20.76, 20.76, 20.6, 20.56, 20.96] +372.68 +18.634 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 39, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.249986171722412, 'TIME_S_1KI': 262.8201582492926, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 448.57725488662726, 'W': 25.866452261197104, 'J_1KI': 11501.980894528904, 'W_1KI': 663.2423656717206, 'W_D': 7.232452261197103, 'J_D': 125.4255337634087, 'W_D_1KI': 185.4474938768488, 'J_D_1KI': 4755.063945560226} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..28eb4c8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 647809, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.034083366394043, "TIME_S_1KI": 0.015489262060876035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 321.12039955139164, "W": 22.569400922042004, "J_1KI": 0.49570228192475196, "W_1KI": 0.03483959148767925, "W_D": 4.131400922042001, "J_D": 58.78211474800112, "W_D_1KI": 0.006377498494219748, "J_D_1KI": 9.844720425649764e-06} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..aa624bc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,429 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.00798487663269043} + +tensor(indices=tensor([[3331, 2594, 128, 418, 605, 954, 4150, 3376, 4525, + 1730, 1055, 4528, 2131, 2082, 2044, 3925, 775, 4132, + 3544, 29, 1555, 4892, 2277, 2301, 1417, 3746, 2415, + 2852, 3029, 3530, 1464, 704, 2336, 523, 1025, 3770, + 3336, 4184, 3428, 1858, 4031, 618, 1596, 741, 4715, + 576, 2062, 962, 2955, 4966, 1253, 1125, 1920, 2461, + 1, 3350, 3672, 1904, 1002, 557, 3061, 249, 3440, + 3945, 522, 2609, 38, 1155, 4450, 3697, 4742, 2677, + 2646, 229, 3301, 785, 1548, 4280, 2396, 167, 4966, + 2669, 1852, 1140, 1287, 4213, 4486, 2690, 1752, 1491, + 2894, 4058, 2107, 1662, 1872, 4314, 3611, 4103, 4627, + 4502, 688, 4203, 2550, 4295, 1296, 2042, 4018, 571, + 2387, 4981, 3300, 2029, 4633, 3978, 3391, 4743, 1452, + 1138, 1840, 1245, 2243, 788, 1287, 3827, 3691, 1234, + 3704, 1895, 3294, 3571, 2918, 4993, 1712, 2778, 4525, + 1319, 3523, 4464, 1964, 4767, 4371, 1266, 2686, 693, + 4818, 195, 1228, 1769, 2259, 3320, 1320, 4251, 1867, + 3359, 358, 2977, 4045, 4475, 4438, 1731, 4811, 1243, + 3414, 4368, 3439, 122, 2620, 681, 4471, 1202, 2151, + 4575, 3928, 1381, 1417, 1573, 2580, 3982, 4022, 779, + 3763, 3562, 4657, 2106, 2013, 3719, 2603, 2516, 216, + 2035, 224, 833, 4182, 3961, 1977, 2209, 375, 3454, + 2854, 4132, 2211, 657, 2285, 2465, 4701, 284, 4448, + 942, 3692, 2598, 3597, 31, 1998, 2998, 92, 4580, + 3650, 839, 1510, 4809, 1402, 3726, 1460, 1686, 535, + 4064, 3510, 2203, 2978, 1839, 2020, 3234, 1604, 1266, + 4129, 2108, 2450, 1828, 2825, 3220, 4030, 4969, 623, + 200, 436, 1894, 2702, 859, 2175, 3789], + [3102, 2800, 1646, 4106, 2112, 56, 4309, 755, 2023, + 380, 782, 195, 4086, 2594, 3370, 1746, 2994, 3693, + 703, 2276, 260, 309, 1025, 266, 2030, 1507, 3163, + 4780, 2763, 577, 320, 2501, 2592, 4514, 4087, 246, + 930, 4338, 2547, 2749, 3374, 1565, 599, 4521, 2121, + 1136, 3684, 3132, 4701, 1018, 1149, 1472, 203, 3277, + 769, 3162, 960, 2118, 1526, 4493, 1824, 663, 4806, + 4000, 650, 490, 2588, 3845, 3735, 1817, 4153, 1428, + 4548, 941, 1433, 581, 3691, 1000, 4786, 290, 773, + 4164, 2652, 4619, 560, 2742, 83, 4235, 4807, 4264, + 736, 2760, 1550, 2797, 4761, 3672, 1904, 1894, 1881, + 816, 2822, 4401, 4952, 4914, 558, 1013, 1024, 1697, + 3189, 3618, 293, 28, 192, 2739, 1766, 2630, 1669, + 4041, 4574, 593, 3722, 4699, 3275, 860, 3019, 2075, + 4040, 2006, 1730, 1600, 2402, 2157, 166, 1378, 1399, + 4422, 2137, 3744, 3893, 1225, 132, 2068, 3579, 4654, + 4658, 1938, 3823, 1633, 906, 2949, 2081, 64, 3990, + 4190, 4793, 4733, 2599, 1883, 3927, 1080, 2064, 4486, + 894, 1620, 1830, 4509, 4597, 986, 4979, 1131, 3910, + 1225, 4551, 2611, 3926, 3556, 1897, 1735, 837, 2583, + 813, 3120, 3046, 163, 909, 830, 632, 109, 4602, + 4676, 560, 484, 3978, 2049, 2919, 706, 4157, 600, + 2853, 3554, 272, 4205, 2167, 4248, 2607, 549, 3952, + 2295, 4681, 4270, 4479, 315, 2467, 4848, 2610, 4300, + 4932, 1560, 3947, 3044, 4275, 2912, 2152, 1365, 1899, + 3049, 1991, 347, 79, 4601, 1263, 2954, 3568, 690, + 3571, 4152, 1840, 588, 4396, 62, 4102, 965, 2300, + 3260, 2051, 3578, 2790, 3601, 992, 4790]]), + values=tensor([0.5308, 0.2935, 0.7566, 0.3585, 0.6475, 0.7047, 0.7578, + 0.0058, 0.1019, 0.7860, 0.7099, 0.2212, 0.5501, 0.7611, + 0.7826, 0.7019, 0.5660, 0.5041, 0.4862, 0.1996, 0.4100, + 0.1832, 0.2303, 0.5758, 0.2716, 0.2313, 0.3258, 0.3250, + 0.4346, 0.8707, 0.1212, 0.3646, 0.4672, 0.0928, 0.6034, + 0.9696, 0.1838, 0.5850, 0.1931, 0.7914, 0.1435, 0.6869, + 0.2555, 0.2296, 0.6732, 0.4803, 0.6716, 0.1286, 0.4803, + 0.3739, 0.6806, 0.9985, 0.3470, 0.7165, 0.8930, 0.9574, + 0.8466, 0.0725, 0.5125, 0.7071, 0.2493, 0.5607, 0.3224, + 0.2639, 0.1702, 0.4337, 0.5616, 0.5870, 0.2922, 0.5830, + 0.7712, 0.2778, 0.0836, 0.3419, 0.8571, 0.6071, 0.9182, + 0.8482, 0.3759, 0.2984, 0.2401, 0.3440, 0.5708, 0.1727, + 0.2654, 0.4365, 0.7598, 0.2484, 0.1329, 0.2592, 0.5136, + 0.0516, 0.3122, 0.3578, 0.8714, 0.2378, 0.3500, 0.6926, + 0.5177, 0.8250, 0.8117, 0.8502, 0.0624, 0.2303, 0.7681, + 0.5578, 0.9518, 0.3780, 0.1286, 0.5249, 0.2306, 0.2275, + 0.8762, 0.4346, 0.7283, 0.6473, 0.6748, 0.5830, 0.2499, + 0.2885, 0.0497, 0.1526, 0.6170, 0.7048, 0.2794, 0.5729, + 0.6707, 0.7002, 0.9461, 0.8027, 0.4204, 0.2761, 0.7738, + 0.4041, 0.0639, 0.4465, 0.2435, 0.4246, 0.4965, 0.6159, + 0.9463, 0.4218, 0.4811, 0.3576, 0.9963, 0.0688, 0.2500, + 0.9904, 0.4861, 0.3198, 0.6051, 0.9546, 0.2932, 0.0618, + 0.5689, 0.6875, 0.8475, 0.4694, 0.6435, 0.3629, 0.7615, + 0.5834, 0.0204, 0.4318, 0.3116, 0.9681, 0.6884, 0.1458, + 0.4788, 0.6817, 0.2977, 0.6534, 0.7315, 0.3829, 0.3298, + 0.6474, 0.1355, 0.8518, 0.9891, 0.1276, 0.0283, 0.9595, + 0.2393, 0.2944, 0.4158, 0.4899, 0.2124, 0.4958, 0.6456, + 0.2877, 0.8585, 0.4953, 0.5966, 0.3054, 0.4804, 0.3215, + 0.7145, 0.8982, 0.7747, 0.3501, 0.2130, 0.5478, 0.4679, + 0.0838, 0.9072, 0.5287, 0.4470, 0.1881, 0.2730, 0.2766, + 0.7074, 0.1165, 0.3840, 0.2007, 0.5302, 0.0052, 0.4464, + 0.5485, 0.5976, 0.4999, 0.4585, 0.8889, 0.1692, 0.4592, + 0.1729, 0.5465, 0.8347, 0.5595, 0.6711, 0.0224, 0.8734, + 0.4647, 0.2825, 0.0027, 0.5251, 0.8309, 0.8781, 0.8539, + 0.9184, 0.5766, 0.9439, 0.9132, 0.9601, 0.8376, 0.0736, + 0.5607, 0.9104, 0.4847, 0.3939, 0.6502]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4720, 0.6573, 0.3034, ..., 0.1111, 0.9271, 0.5454]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.00798487663269043 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 13149 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.21312499046325684} + +tensor(indices=tensor([[2068, 1175, 283, 1592, 3855, 4432, 1875, 1654, 17, + 393, 1567, 465, 1456, 2518, 366, 4783, 2983, 95, + 293, 1239, 766, 660, 3317, 467, 2270, 1132, 2363, + 2856, 4620, 1162, 1868, 2915, 3465, 2815, 952, 2357, + 4811, 2366, 2735, 4366, 2991, 549, 3090, 4716, 653, + 4406, 2500, 3293, 3365, 4783, 4624, 4809, 3884, 2879, + 2122, 309, 1298, 4418, 878, 2676, 4294, 2276, 1724, + 4855, 1745, 1281, 117, 2665, 3616, 864, 3284, 2188, + 784, 1173, 4917, 3653, 1561, 3019, 4259, 1888, 2456, + 1413, 2484, 496, 511, 4778, 2701, 4452, 3922, 4148, + 3787, 2657, 3769, 3997, 4688, 2, 1753, 4389, 1283, + 778, 2190, 783, 1240, 1510, 2672, 1725, 2782, 3695, + 1587, 3452, 2628, 1517, 1385, 1862, 4164, 4291, 2328, + 3450, 4200, 3785, 3396, 798, 1530, 352, 2835, 4263, + 1645, 3982, 930, 2640, 2427, 1264, 846, 2692, 58, + 3236, 764, 4551, 2876, 4726, 1164, 600, 1651, 4633, + 2903, 248, 4660, 3696, 889, 3239, 9, 2931, 4235, + 439, 2427, 1542, 664, 3278, 3093, 935, 3844, 914, + 2918, 3398, 4523, 4195, 565, 360, 819, 4657, 3975, + 1701, 4903, 308, 135, 356, 3626, 1050, 895, 1833, + 4209, 267, 3606, 1170, 4706, 2812, 2824, 1650, 1850, + 3246, 4955, 3111, 4037, 2461, 1374, 1002, 1584, 406, + 2301, 901, 4998, 4363, 2991, 4588, 3389, 2795, 2216, + 2933, 757, 159, 60, 1042, 2922, 2083, 3257, 4804, + 197, 1527, 3945, 558, 4760, 436, 4774, 3292, 4920, + 800, 3, 808, 2092, 4283, 3829, 3310, 4040, 3812, + 3287, 2375, 3865, 1922, 3713, 3119, 197, 2774, 1902, + 2429, 1896, 3600, 513, 52, 174, 3115], + [3418, 875, 3316, 4137, 142, 448, 1399, 4737, 2814, + 1042, 3660, 2501, 1808, 1390, 1447, 4692, 1694, 4232, + 4881, 1913, 1269, 1434, 447, 4711, 4433, 2748, 4752, + 2001, 1206, 1130, 1700, 2832, 623, 2109, 2780, 2075, + 4315, 2547, 3681, 2389, 109, 3641, 834, 2210, 229, + 1222, 2820, 972, 1477, 1761, 3272, 3789, 4379, 1300, + 3757, 1941, 202, 3235, 1387, 4890, 1878, 1757, 2279, + 308, 2190, 52, 104, 4937, 2445, 2971, 4323, 494, + 2783, 138, 2717, 1399, 109, 3384, 745, 4430, 2774, + 3386, 1245, 918, 760, 4050, 1611, 1219, 4391, 4689, + 1732, 3131, 1062, 4251, 1022, 900, 530, 4362, 2888, + 4943, 2587, 4439, 1701, 2822, 2922, 4090, 1230, 4156, + 4974, 4379, 4552, 2477, 917, 2342, 1133, 3378, 2884, + 3914, 1896, 4241, 955, 860, 4290, 1383, 678, 3599, + 4810, 1006, 265, 1738, 2052, 1963, 3124, 3050, 11, + 1116, 1544, 3166, 2291, 4594, 4297, 4099, 2992, 1435, + 1421, 1727, 964, 4857, 2882, 1919, 3506, 1404, 1302, + 2929, 4416, 915, 3932, 2552, 2473, 3037, 31, 2188, + 3234, 541, 3350, 802, 4270, 4772, 654, 132, 1739, + 4076, 4187, 2367, 4364, 180, 4302, 4580, 3975, 2984, + 1504, 203, 4440, 1645, 1141, 2603, 2608, 837, 3424, + 4910, 95, 4800, 4229, 1475, 3334, 4157, 4811, 3524, + 427, 3127, 3076, 583, 2803, 4393, 3000, 4919, 3573, + 3178, 2387, 4588, 2844, 3531, 2136, 2027, 3445, 1203, + 927, 136, 4736, 4245, 788, 405, 1639, 4319, 197, + 4273, 2123, 1251, 3398, 897, 1913, 3910, 4739, 3376, + 821, 3693, 2224, 1991, 4175, 1594, 3691, 485, 3745, + 2746, 622, 4329, 439, 1813, 4090, 976]]), + values=tensor([0.6445, 0.5564, 0.4133, 0.2667, 0.5401, 0.6325, 0.6807, + 0.3740, 0.4861, 0.6345, 0.2392, 0.4229, 0.0860, 0.6708, + 0.8917, 0.1643, 0.5194, 0.5865, 0.9413, 0.9167, 0.1803, + 0.8271, 0.2797, 0.4889, 0.6838, 0.2254, 0.8048, 0.3600, + 0.1098, 0.6457, 0.8961, 0.6077, 0.7915, 0.9805, 0.5690, + 0.0971, 0.0821, 0.2981, 0.6005, 0.2327, 0.0641, 0.4201, + 0.8347, 0.0340, 0.0895, 0.4225, 0.5387, 0.7279, 0.7755, + 0.9683, 0.7746, 0.1072, 0.7370, 0.8597, 0.6586, 0.6629, + 0.0704, 0.1725, 0.5275, 0.3573, 0.8667, 0.3164, 0.4444, + 0.4762, 0.2609, 0.4331, 0.4681, 0.4993, 0.3439, 0.5745, + 0.4234, 0.9043, 0.8217, 0.7960, 0.6251, 0.7407, 0.8441, + 0.6047, 0.9484, 0.3858, 0.4823, 0.6985, 0.5389, 0.7834, + 0.6923, 0.3515, 0.7091, 0.7292, 0.1273, 0.0501, 0.5241, + 0.6713, 0.7050, 0.2840, 0.3237, 0.3932, 0.0665, 0.8082, + 0.8933, 0.0657, 0.3727, 0.6318, 0.1964, 0.7228, 0.5099, + 0.3533, 0.6887, 0.3319, 0.6284, 0.8302, 0.5065, 0.1181, + 0.5505, 0.1921, 0.0692, 0.7826, 0.4135, 0.6018, 0.2224, + 0.0366, 0.8823, 0.3243, 0.7645, 0.8418, 0.4068, 0.5612, + 0.4019, 0.4322, 0.0488, 0.6099, 0.8740, 0.8644, 0.0570, + 0.9712, 0.4353, 0.9271, 0.7816, 0.5894, 0.2605, 0.3148, + 0.5806, 0.5995, 0.3096, 0.6551, 0.3508, 0.2511, 0.2050, + 0.0651, 0.7459, 0.4501, 0.1863, 0.9783, 0.0249, 0.4558, + 0.1240, 0.5923, 0.5497, 0.1635, 0.5564, 0.6237, 0.2292, + 0.1680, 0.0265, 0.8977, 0.2207, 0.9811, 0.1516, 0.8246, + 0.4875, 0.3850, 0.3296, 0.4790, 0.7536, 0.9991, 0.6052, + 0.6281, 0.1426, 0.4474, 0.8788, 0.7047, 0.0412, 0.8001, + 0.7827, 0.7265, 0.5733, 0.1696, 0.4186, 0.8441, 0.9932, + 0.0587, 0.0882, 0.7996, 0.3527, 0.6326, 0.9704, 0.6735, + 0.9025, 0.5447, 0.5788, 0.9686, 0.3286, 0.2455, 0.4213, + 0.0151, 0.8286, 0.9135, 0.2171, 0.7071, 0.9648, 0.2608, + 0.3151, 0.1599, 0.7815, 0.3125, 0.0833, 0.5846, 0.2182, + 0.4320, 0.7787, 0.8163, 0.1476, 0.5709, 0.2803, 0.6676, + 0.4426, 0.2713, 0.2312, 0.0550, 0.2774, 0.0930, 0.7021, + 0.7750, 0.4753, 0.4557, 0.6503, 0.4704, 0.2421, 0.9248, + 0.8122, 0.9297, 0.8012, 0.5587, 0.5683, 0.3376, 0.1363, + 0.2368, 0.4533, 0.5475, 0.6795, 0.9440]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.8798, 0.0634, 0.3872, ..., 0.1248, 0.6610, 0.5536]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.21312499046325684 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 647809 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.034083366394043} + +tensor(indices=tensor([[ 121, 1134, 1108, 3563, 409, 1965, 451, 3718, 4663, + 3094, 2133, 4774, 447, 3847, 357, 632, 78, 1982, + 936, 674, 2118, 215, 3206, 1145, 2646, 3424, 4973, + 282, 3067, 42, 2240, 103, 3404, 2262, 849, 1071, + 1521, 3713, 3008, 878, 4751, 355, 1699, 3164, 3886, + 3302, 3687, 983, 23, 1628, 786, 4372, 455, 4994, + 4972, 2511, 549, 1789, 4396, 2366, 1121, 2216, 540, + 1649, 3329, 4672, 393, 1213, 3563, 2484, 1274, 4302, + 4049, 3236, 4932, 1076, 974, 4225, 3263, 3057, 797, + 751, 2199, 3471, 4646, 406, 2376, 1791, 4229, 418, + 4629, 710, 567, 2634, 1834, 3130, 743, 4370, 144, + 4097, 1381, 2666, 3436, 4777, 3003, 3839, 921, 2460, + 1414, 976, 2289, 1284, 2885, 3615, 3050, 2445, 947, + 1972, 4741, 3950, 2256, 1419, 4618, 3656, 42, 3720, + 2537, 1372, 3468, 863, 165, 3044, 4733, 2483, 3772, + 2056, 296, 920, 472, 78, 1731, 3153, 4231, 4883, + 1769, 3849, 3530, 211, 2726, 3781, 765, 1904, 469, + 4696, 2806, 4652, 43, 4446, 2539, 4034, 558, 1090, + 3073, 4279, 2456, 2375, 53, 2275, 2987, 2330, 4613, + 2035, 3261, 4038, 4751, 2803, 2056, 1488, 4964, 1152, + 3486, 3518, 1830, 800, 2919, 3858, 827, 405, 1037, + 3033, 4437, 2565, 1325, 812, 2019, 4375, 3284, 4232, + 1064, 2413, 3648, 1839, 530, 2825, 279, 3601, 1949, + 3747, 2720, 2092, 3314, 3379, 2546, 3259, 327, 2754, + 3083, 533, 1056, 3993, 2052, 3615, 579, 2143, 792, + 4456, 4003, 2797, 494, 4506, 2229, 1125, 4565, 4703, + 4414, 120, 955, 3823, 3227, 3490, 4878, 1773, 4762, + 2281, 3721, 1484, 2991, 1686, 422, 3420], + [ 254, 4893, 767, 2200, 1114, 4260, 4239, 348, 2618, + 4825, 3555, 2649, 2285, 1677, 3861, 1158, 1825, 2426, + 2725, 515, 905, 1883, 4397, 2556, 4318, 279, 2513, + 1917, 4400, 2264, 997, 3127, 29, 2793, 3709, 3058, + 1621, 437, 2090, 3604, 4884, 2934, 709, 1514, 1528, + 3927, 2652, 2322, 1120, 4205, 1770, 1789, 3500, 1271, + 613, 2274, 3302, 4222, 1296, 2556, 4236, 4749, 503, + 2498, 1369, 2771, 145, 2690, 2120, 438, 4899, 3477, + 360, 4362, 4081, 4303, 432, 3384, 1827, 1522, 2185, + 2631, 1534, 908, 689, 594, 2425, 2135, 1201, 1718, + 3505, 2805, 2705, 3957, 1560, 678, 2831, 2699, 345, + 3373, 4748, 3500, 910, 1447, 4957, 1979, 4376, 4201, + 1510, 3259, 2114, 1172, 3823, 2499, 3911, 4481, 1543, + 100, 2612, 3434, 1981, 3456, 978, 2614, 4301, 4520, + 791, 4692, 417, 1112, 2363, 1270, 4978, 3050, 2007, + 368, 4025, 2483, 818, 3991, 2953, 666, 767, 3990, + 3630, 3990, 4974, 2624, 1878, 4561, 3807, 1966, 3699, + 483, 1262, 4959, 1176, 287, 3198, 4955, 4494, 3603, + 4673, 1760, 2464, 1359, 4839, 4212, 293, 642, 438, + 3573, 1804, 3204, 1247, 4763, 1898, 1887, 2931, 249, + 1171, 1571, 1317, 2323, 2747, 2091, 3293, 951, 1397, + 3005, 3299, 2315, 667, 3134, 3023, 4985, 3548, 2758, + 714, 236, 1453, 4933, 1858, 486, 3565, 1582, 4709, + 2400, 108, 4770, 3636, 4519, 2214, 551, 2064, 4915, + 1456, 1570, 2524, 2398, 4520, 4116, 2507, 59, 3112, + 355, 4858, 2838, 2986, 10, 491, 2819, 2599, 1131, + 3432, 2344, 3187, 2898, 1737, 3409, 295, 1100, 1399, + 208, 3654, 1138, 3779, 1971, 3265, 75]]), + values=tensor([0.3432, 0.9070, 0.7177, 0.4621, 0.1772, 0.3603, 0.6409, + 0.1173, 0.2547, 0.0481, 0.7629, 0.9290, 0.4552, 0.0478, + 0.3687, 0.4587, 0.5707, 0.4695, 0.5772, 0.2076, 0.9113, + 0.0515, 0.6159, 0.5417, 0.7875, 0.5165, 0.1322, 0.3322, + 0.9266, 0.9568, 0.3405, 0.7746, 0.2883, 0.9298, 0.9025, + 0.1334, 0.5550, 0.8117, 0.6279, 0.3449, 0.0243, 0.1229, + 0.9611, 0.1874, 0.1658, 0.2885, 0.8268, 0.2574, 0.6107, + 0.2002, 0.9192, 0.3921, 0.8484, 0.3827, 0.7423, 0.2848, + 0.0035, 0.9043, 0.9321, 0.5788, 0.7712, 0.5319, 0.6982, + 0.1756, 0.4030, 0.4438, 0.2942, 0.4649, 0.2292, 0.6423, + 0.7195, 0.2790, 0.0687, 0.0143, 0.1979, 0.1425, 0.5878, + 0.6472, 0.6386, 0.5533, 0.2421, 0.9610, 0.6525, 0.3424, + 0.1032, 0.8260, 0.5753, 0.5679, 0.3964, 0.0736, 0.1785, + 0.9597, 0.1690, 0.6128, 0.9492, 0.5273, 0.8584, 0.7174, + 0.8218, 0.1864, 0.1570, 0.1062, 0.3101, 0.4385, 0.1763, + 0.7406, 0.0742, 0.1294, 0.4697, 0.3390, 0.1168, 0.7284, + 0.0881, 0.4031, 0.5976, 0.7480, 0.9746, 0.6784, 0.1715, + 0.0343, 0.5531, 0.3940, 0.2529, 0.8111, 0.1221, 0.7053, + 0.2359, 0.7700, 0.0328, 0.4181, 0.4534, 0.3087, 0.6532, + 0.1721, 0.2822, 0.5211, 0.1748, 0.1064, 0.4268, 0.1492, + 0.7787, 0.8925, 0.4263, 0.6584, 0.3680, 0.6540, 0.0074, + 0.4741, 0.9439, 0.1620, 0.4818, 0.3863, 0.6675, 0.1834, + 0.9545, 0.8397, 0.1610, 0.2541, 0.4147, 0.4165, 0.3677, + 0.4380, 0.4202, 0.8723, 0.5223, 0.9512, 0.5945, 0.2059, + 0.2868, 0.3277, 0.1297, 0.7623, 0.4239, 0.1388, 0.1046, + 0.5244, 0.0718, 0.0844, 0.7335, 0.5597, 0.7750, 0.7483, + 0.4119, 0.2415, 0.0673, 0.2374, 0.4232, 0.8744, 0.7790, + 0.2419, 0.5519, 0.7006, 0.8827, 0.2686, 0.1595, 0.6097, + 0.2538, 0.5961, 0.8895, 0.0692, 0.6139, 0.2809, 0.9917, + 0.5446, 0.0495, 0.9018, 0.4905, 0.4142, 0.3006, 0.7093, + 0.4632, 0.8864, 0.7590, 0.6438, 0.0933, 0.7017, 0.4753, + 0.9850, 0.8030, 0.7347, 0.7125, 0.5472, 0.6968, 0.5733, + 0.3327, 0.0741, 0.8108, 0.0779, 0.5405, 0.9621, 0.9310, + 0.8746, 0.5794, 0.5021, 0.3896, 0.6361, 0.4835, 0.3480, + 0.6176, 0.0477, 0.5665, 0.7604, 0.5747, 0.7509, 0.5836, + 0.7729, 0.4839, 0.7593, 0.1163, 0.1495]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9300, 0.7841, 0.7110, ..., 0.9115, 0.2040, 0.4111]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.034083366394043 seconds + +tensor(indices=tensor([[ 121, 1134, 1108, 3563, 409, 1965, 451, 3718, 4663, + 3094, 2133, 4774, 447, 3847, 357, 632, 78, 1982, + 936, 674, 2118, 215, 3206, 1145, 2646, 3424, 4973, + 282, 3067, 42, 2240, 103, 3404, 2262, 849, 1071, + 1521, 3713, 3008, 878, 4751, 355, 1699, 3164, 3886, + 3302, 3687, 983, 23, 1628, 786, 4372, 455, 4994, + 4972, 2511, 549, 1789, 4396, 2366, 1121, 2216, 540, + 1649, 3329, 4672, 393, 1213, 3563, 2484, 1274, 4302, + 4049, 3236, 4932, 1076, 974, 4225, 3263, 3057, 797, + 751, 2199, 3471, 4646, 406, 2376, 1791, 4229, 418, + 4629, 710, 567, 2634, 1834, 3130, 743, 4370, 144, + 4097, 1381, 2666, 3436, 4777, 3003, 3839, 921, 2460, + 1414, 976, 2289, 1284, 2885, 3615, 3050, 2445, 947, + 1972, 4741, 3950, 2256, 1419, 4618, 3656, 42, 3720, + 2537, 1372, 3468, 863, 165, 3044, 4733, 2483, 3772, + 2056, 296, 920, 472, 78, 1731, 3153, 4231, 4883, + 1769, 3849, 3530, 211, 2726, 3781, 765, 1904, 469, + 4696, 2806, 4652, 43, 4446, 2539, 4034, 558, 1090, + 3073, 4279, 2456, 2375, 53, 2275, 2987, 2330, 4613, + 2035, 3261, 4038, 4751, 2803, 2056, 1488, 4964, 1152, + 3486, 3518, 1830, 800, 2919, 3858, 827, 405, 1037, + 3033, 4437, 2565, 1325, 812, 2019, 4375, 3284, 4232, + 1064, 2413, 3648, 1839, 530, 2825, 279, 3601, 1949, + 3747, 2720, 2092, 3314, 3379, 2546, 3259, 327, 2754, + 3083, 533, 1056, 3993, 2052, 3615, 579, 2143, 792, + 4456, 4003, 2797, 494, 4506, 2229, 1125, 4565, 4703, + 4414, 120, 955, 3823, 3227, 3490, 4878, 1773, 4762, + 2281, 3721, 1484, 2991, 1686, 422, 3420], + [ 254, 4893, 767, 2200, 1114, 4260, 4239, 348, 2618, + 4825, 3555, 2649, 2285, 1677, 3861, 1158, 1825, 2426, + 2725, 515, 905, 1883, 4397, 2556, 4318, 279, 2513, + 1917, 4400, 2264, 997, 3127, 29, 2793, 3709, 3058, + 1621, 437, 2090, 3604, 4884, 2934, 709, 1514, 1528, + 3927, 2652, 2322, 1120, 4205, 1770, 1789, 3500, 1271, + 613, 2274, 3302, 4222, 1296, 2556, 4236, 4749, 503, + 2498, 1369, 2771, 145, 2690, 2120, 438, 4899, 3477, + 360, 4362, 4081, 4303, 432, 3384, 1827, 1522, 2185, + 2631, 1534, 908, 689, 594, 2425, 2135, 1201, 1718, + 3505, 2805, 2705, 3957, 1560, 678, 2831, 2699, 345, + 3373, 4748, 3500, 910, 1447, 4957, 1979, 4376, 4201, + 1510, 3259, 2114, 1172, 3823, 2499, 3911, 4481, 1543, + 100, 2612, 3434, 1981, 3456, 978, 2614, 4301, 4520, + 791, 4692, 417, 1112, 2363, 1270, 4978, 3050, 2007, + 368, 4025, 2483, 818, 3991, 2953, 666, 767, 3990, + 3630, 3990, 4974, 2624, 1878, 4561, 3807, 1966, 3699, + 483, 1262, 4959, 1176, 287, 3198, 4955, 4494, 3603, + 4673, 1760, 2464, 1359, 4839, 4212, 293, 642, 438, + 3573, 1804, 3204, 1247, 4763, 1898, 1887, 2931, 249, + 1171, 1571, 1317, 2323, 2747, 2091, 3293, 951, 1397, + 3005, 3299, 2315, 667, 3134, 3023, 4985, 3548, 2758, + 714, 236, 1453, 4933, 1858, 486, 3565, 1582, 4709, + 2400, 108, 4770, 3636, 4519, 2214, 551, 2064, 4915, + 1456, 1570, 2524, 2398, 4520, 4116, 2507, 59, 3112, + 355, 4858, 2838, 2986, 10, 491, 2819, 2599, 1131, + 3432, 2344, 3187, 2898, 1737, 3409, 295, 1100, 1399, + 208, 3654, 1138, 3779, 1971, 3265, 75]]), + values=tensor([0.3432, 0.9070, 0.7177, 0.4621, 0.1772, 0.3603, 0.6409, + 0.1173, 0.2547, 0.0481, 0.7629, 0.9290, 0.4552, 0.0478, + 0.3687, 0.4587, 0.5707, 0.4695, 0.5772, 0.2076, 0.9113, + 0.0515, 0.6159, 0.5417, 0.7875, 0.5165, 0.1322, 0.3322, + 0.9266, 0.9568, 0.3405, 0.7746, 0.2883, 0.9298, 0.9025, + 0.1334, 0.5550, 0.8117, 0.6279, 0.3449, 0.0243, 0.1229, + 0.9611, 0.1874, 0.1658, 0.2885, 0.8268, 0.2574, 0.6107, + 0.2002, 0.9192, 0.3921, 0.8484, 0.3827, 0.7423, 0.2848, + 0.0035, 0.9043, 0.9321, 0.5788, 0.7712, 0.5319, 0.6982, + 0.1756, 0.4030, 0.4438, 0.2942, 0.4649, 0.2292, 0.6423, + 0.7195, 0.2790, 0.0687, 0.0143, 0.1979, 0.1425, 0.5878, + 0.6472, 0.6386, 0.5533, 0.2421, 0.9610, 0.6525, 0.3424, + 0.1032, 0.8260, 0.5753, 0.5679, 0.3964, 0.0736, 0.1785, + 0.9597, 0.1690, 0.6128, 0.9492, 0.5273, 0.8584, 0.7174, + 0.8218, 0.1864, 0.1570, 0.1062, 0.3101, 0.4385, 0.1763, + 0.7406, 0.0742, 0.1294, 0.4697, 0.3390, 0.1168, 0.7284, + 0.0881, 0.4031, 0.5976, 0.7480, 0.9746, 0.6784, 0.1715, + 0.0343, 0.5531, 0.3940, 0.2529, 0.8111, 0.1221, 0.7053, + 0.2359, 0.7700, 0.0328, 0.4181, 0.4534, 0.3087, 0.6532, + 0.1721, 0.2822, 0.5211, 0.1748, 0.1064, 0.4268, 0.1492, + 0.7787, 0.8925, 0.4263, 0.6584, 0.3680, 0.6540, 0.0074, + 0.4741, 0.9439, 0.1620, 0.4818, 0.3863, 0.6675, 0.1834, + 0.9545, 0.8397, 0.1610, 0.2541, 0.4147, 0.4165, 0.3677, + 0.4380, 0.4202, 0.8723, 0.5223, 0.9512, 0.5945, 0.2059, + 0.2868, 0.3277, 0.1297, 0.7623, 0.4239, 0.1388, 0.1046, + 0.5244, 0.0718, 0.0844, 0.7335, 0.5597, 0.7750, 0.7483, + 0.4119, 0.2415, 0.0673, 0.2374, 0.4232, 0.8744, 0.7790, + 0.2419, 0.5519, 0.7006, 0.8827, 0.2686, 0.1595, 0.6097, + 0.2538, 0.5961, 0.8895, 0.0692, 0.6139, 0.2809, 0.9917, + 0.5446, 0.0495, 0.9018, 0.4905, 0.4142, 0.3006, 0.7093, + 0.4632, 0.8864, 0.7590, 0.6438, 0.0933, 0.7017, 0.4753, + 0.9850, 0.8030, 0.7347, 0.7125, 0.5472, 0.6968, 0.5733, + 0.3327, 0.0741, 0.8108, 0.0779, 0.5405, 0.9621, 0.9310, + 0.8746, 0.5794, 0.5021, 0.3896, 0.6361, 0.4835, 0.3480, + 0.6176, 0.0477, 0.5665, 0.7604, 0.5747, 0.7509, 0.5836, + 0.7729, 0.4839, 0.7593, 0.1163, 0.1495]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9300, 0.7841, 0.7110, ..., 0.9115, 0.2040, 0.4111]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.034083366394043 seconds + +[20.2, 20.16, 20.08, 20.08, 20.16, 20.24, 20.36, 20.4, 20.64, 20.64] +[20.84, 20.88, 23.88, 24.6, 25.64, 26.2, 26.2, 27.0, 24.0, 23.92, 24.0, 23.52, 23.56, 23.76] +14.228131294250488 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 647809, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.034083366394043, 'TIME_S_1KI': 0.015489262060876035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.12039955139164, 'W': 22.569400922042004} +[20.2, 20.16, 20.08, 20.08, 20.16, 20.24, 20.36, 20.4, 20.64, 20.64, 20.88, 20.56, 20.52, 20.6, 20.68, 20.72, 20.0, 21.0, 21.12, 21.16] +368.76000000000005 +18.438000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 647809, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.034083366394043, 'TIME_S_1KI': 0.015489262060876035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 321.12039955139164, 'W': 22.569400922042004, 'J_1KI': 0.49570228192475196, 'W_1KI': 0.03483959148767925, 'W_D': 4.131400922042001, 'J_D': 58.78211474800112, 'W_D_1KI': 0.006377498494219748, 'J_D_1KI': 9.844720425649764e-06} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..569d057 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 289181, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.686526536941528, "TIME_S_1KI": 0.03695445598757016, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 324.68591009140016, "W": 22.814226029486633, "J_1KI": 1.1227774649489426, "W_1KI": 0.07889254836758512, "W_D": 4.440226029486631, "J_D": 63.1920989794731, "W_D_1KI": 0.015354487429971649, "J_D_1KI": 5.309646010620216e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..0c12568 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 10 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0067293643951416016} + +tensor(indices=tensor([[4742, 824, 4219, ..., 2208, 2692, 1106], + [2227, 2640, 905, ..., 2494, 4908, 3090]]), + values=tensor([0.5330, 0.4399, 0.1637, ..., 0.1800, 0.7845, 0.3210]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.5676, 0.2274, 0.7445, ..., 0.8344, 0.2887, 0.3063]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.0067293643951416016 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 15603 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.566535234451294} + +tensor(indices=tensor([[1815, 2596, 437, ..., 1883, 1495, 3601], + [ 662, 1366, 4123, ..., 2867, 17, 4303]]), + values=tensor([0.2537, 0.9612, 0.4196, ..., 0.7819, 0.3756, 0.7330]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4565, 0.9887, 0.5185, ..., 0.0050, 0.0496, 0.5640]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.566535234451294 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic coo 289181 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.686526536941528} + +tensor(indices=tensor([[3774, 4104, 1716, ..., 2959, 3358, 810], + [2642, 4754, 2109, ..., 867, 2120, 3002]]), + values=tensor([0.8150, 0.9729, 0.8881, ..., 0.8163, 0.5997, 0.7733]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6444, 0.5658, 0.7442, ..., 0.5847, 0.4527, 0.5143]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.686526536941528 seconds + +tensor(indices=tensor([[3774, 4104, 1716, ..., 2959, 3358, 810], + [2642, 4754, 2109, ..., 867, 2120, 3002]]), + values=tensor([0.8150, 0.9729, 0.8881, ..., 0.8163, 0.5997, 0.7733]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6444, 0.5658, 0.7442, ..., 0.5847, 0.4527, 0.5143]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.686526536941528 seconds + +[20.16, 20.28, 20.16, 20.28, 20.4, 20.24, 20.28, 20.52, 20.68, 20.8] +[21.0, 21.0, 20.8, 23.84, 25.6, 27.36, 28.12, 28.64, 25.52, 23.88, 23.96, 24.08, 24.0, 23.76] +14.231730222702026 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 289181, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.686526536941528, 'TIME_S_1KI': 0.03695445598757016, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.68591009140016, 'W': 22.814226029486633} +[20.16, 20.28, 20.16, 20.28, 20.4, 20.24, 20.28, 20.52, 20.68, 20.8, 20.4, 20.4, 20.6, 20.6, 20.56, 20.4, 20.16, 20.32, 20.56, 20.72] +367.48 +18.374000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 289181, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.686526536941528, 'TIME_S_1KI': 0.03695445598757016, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 324.68591009140016, 'W': 22.814226029486633, 'J_1KI': 1.1227774649489426, 'W_1KI': 0.07889254836758512, 'W_D': 4.440226029486631, 'J_D': 63.1920989794731, 'W_D_1KI': 0.015354487429971649, 'J_D_1KI': 5.309646010620216e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json index b76a936..5a829d2 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 690, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.003246307373047, "TIME_S_1KI": 21.74383522807688, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 736.2026811027528, "W": 51.180846035126734, "J_1KI": 1066.960407395294, "W_1KI": 74.1751391813431, "W_D": 35.27984603512673, "J_D": 507.4772937932015, "W_D_1KI": 51.1302116451112, "J_D_1KI": 74.10175600740754} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 681, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.767336130142212, "TIME_S_1KI": 18.74792383280795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1572.8213312911987, "W": 66.33395904463319, "J_1KI": 2309.576110559763, "W_1KI": 97.40669463235417, "W_D": 47.20295904463319, "J_D": 1119.2128730852603, "W_D_1KI": 69.3141836191383, "J_D_1KI": 101.78294217200926} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output index 175c45d..e2fad62 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.520033359527588} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.1540968418121338} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 21, ..., 999972, - 999987, 1000000]), - col_indices=tensor([ 5448, 9227, 13530, ..., 69235, 78462, 82074]), - values=tensor([0.3723, 0.4802, 0.5484, ..., 0.8279, 0.7827, 0.0235]), +tensor(crow_indices=tensor([ 0, 9, 17, ..., 999985, + 999991, 1000000]), + col_indices=tensor([17175, 31207, 43376, ..., 77066, 82322, 87573]), + values=tensor([0.5674, 0.0287, 0.4379, ..., 0.7433, 0.7100, 0.4913]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9343, 0.5585, 0.5996, ..., 0.1233, 0.3217, 0.3044]) +tensor([0.0850, 0.1151, 0.7763, ..., 0.6909, 0.3596, 0.4186]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 1.520033359527588 seconds +Time: 0.1540968418121338 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 690 -ss 100000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.003246307373047} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 681 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 12.767336130142212} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 20, ..., 999982, - 999992, 1000000]), - col_indices=tensor([ 122, 3109, 10697, ..., 57820, 90383, 91253]), - values=tensor([0.9003, 0.9806, 0.3302, ..., 0.9034, 0.0249, 0.7877]), +tensor(crow_indices=tensor([ 0, 13, 35, ..., 999978, + 999995, 1000000]), + col_indices=tensor([11566, 25133, 27464, ..., 53113, 69352, 69486]), + values=tensor([0.0574, 0.1837, 0.1783, ..., 0.8977, 0.9517, 0.8812]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6863, 0.9695, 0.8316, ..., 0.5738, 0.3295, 0.1413]) +tensor([0.1378, 0.0464, 0.2618, ..., 0.0793, 0.9401, 0.6278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 15.003246307373047 seconds +Time: 12.767336130142212 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 20, ..., 999982, - 999992, 1000000]), - col_indices=tensor([ 122, 3109, 10697, ..., 57820, 90383, 91253]), - values=tensor([0.9003, 0.9806, 0.3302, ..., 0.9034, 0.0249, 0.7877]), +tensor(crow_indices=tensor([ 0, 13, 35, ..., 999978, + 999995, 1000000]), + col_indices=tensor([11566, 25133, 27464, ..., 53113, 69352, 69486]), + values=tensor([0.0574, 0.1837, 0.1783, ..., 0.8977, 0.9517, 0.8812]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6863, 0.9695, 0.8316, ..., 0.5738, 0.3295, 0.1413]) +tensor([0.1378, 0.0464, 0.2618, ..., 0.0793, 0.9401, 0.6278]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 15.003246307373047 seconds +Time: 12.767336130142212 seconds -[17.56, 17.56, 17.72, 17.72, 17.72, 17.92, 17.92, 17.64, 17.48, 17.56] -[17.24, 17.28, 18.12, 18.12, 20.28, 29.36, 44.04, 63.04, 74.68, 91.04, 91.28, 91.4, 88.68, 90.76] -14.384339809417725 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.003246307373047, 'TIME_S_1KI': 21.74383522807688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 736.2026811027528, 'W': 51.180846035126734} -[17.56, 17.56, 17.72, 17.72, 17.72, 17.92, 17.92, 17.64, 17.48, 17.56, 17.84, 17.84, 17.6, 17.92, 17.8, 17.8, 17.68, 17.52, 17.16, 17.08] -318.02000000000004 -15.901000000000002 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.003246307373047, 'TIME_S_1KI': 21.74383522807688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 736.2026811027528, 'W': 51.180846035126734, 'J_1KI': 1066.960407395294, 'W_1KI': 74.1751391813431, 'W_D': 35.27984603512673, 'J_D': 507.4772937932015, 'W_D_1KI': 51.1302116451112, 'J_D_1KI': 74.10175600740754} +[21.08, 21.24, 21.48, 21.4, 21.28, 21.32, 21.36, 21.36, 21.08, 20.88] +[20.96, 20.76, 20.76, 21.96, 23.16, 31.64, 49.56, 62.28, 78.52, 91.0, 89.12, 89.6, 88.56, 88.56, 88.4, 87.48, 87.84, 87.84, 88.64, 87.08, 87.56, 86.84, 86.88] +23.71065068244934 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.767336130142212, 'TIME_S_1KI': 18.74792383280795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1572.8213312911987, 'W': 66.33395904463319} +[21.08, 21.24, 21.48, 21.4, 21.28, 21.32, 21.36, 21.36, 21.08, 20.88, 21.6, 21.4, 21.16, 20.96, 20.84, 20.92, 21.2, 21.48, 21.64, 21.44] +382.62 +19.131 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 12.767336130142212, 'TIME_S_1KI': 18.74792383280795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1572.8213312911987, 'W': 66.33395904463319, 'J_1KI': 2309.576110559763, 'W_1KI': 97.40669463235417, 'W_D': 47.20295904463319, 'J_D': 1119.2128730852603, 'W_D_1KI': 69.3141836191383, 'J_D_1KI': 101.78294217200926} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json index a46b44b..c18ffcf 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.453001976013184, "TIME_S_1KI": 144.53001976013184, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.84748960495, "W": 50.98554654857658, "J_1KI": 10498.4748960495, "W_1KI": 509.85546548576576, "W_D": 35.17954654857658, "J_D": 724.3848723731041, "W_D_1KI": 351.7954654857658, "J_D_1KI": 3517.954654857658} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 90, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.579551696777344, "TIME_S_1KI": 161.9950188530816, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1234.5135778713227, "W": 56.902850471974645, "J_1KI": 13716.817531903585, "W_1KI": 632.2538941330516, "W_D": 37.990850471974646, "J_D": 824.2156650066377, "W_D_1KI": 422.12056079971825, "J_D_1KI": 4690.228453330203} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output index f3d22b6..e9c004f 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.453001976013184} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 1.1569969654083252} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 101, 216, ..., 9999796, - 9999906, 10000000]), - col_indices=tensor([ 2079, 2370, 2404, ..., 91560, 92604, 94393]), - values=tensor([0.9041, 0.3243, 0.4250, ..., 0.8376, 0.1046, 0.5896]), +tensor(crow_indices=tensor([ 0, 107, 211, ..., 9999805, + 9999901, 10000000]), + col_indices=tensor([ 835, 870, 1184, ..., 95475, 95735, 97798]), + values=tensor([0.3341, 0.5436, 0.0587, ..., 0.4495, 0.2234, 0.1531]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3984, 0.6112, 0.9728, ..., 0.2358, 0.4123, 0.2200]) +tensor([0.0457, 0.8936, 0.6535, ..., 0.4171, 0.3043, 0.4939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,16 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 14.453001976013184 seconds +Time: 1.1569969654083252 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 90 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.579551696777344} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 101, 216, ..., 9999796, - 9999906, 10000000]), - col_indices=tensor([ 2079, 2370, 2404, ..., 91560, 92604, 94393]), - values=tensor([0.9041, 0.3243, 0.4250, ..., 0.8376, 0.1046, 0.5896]), +tensor(crow_indices=tensor([ 0, 86, 187, ..., 9999810, + 9999912, 10000000]), + col_indices=tensor([ 586, 606, 2288, ..., 97134, 98615, 99354]), + values=tensor([0.0352, 0.3503, 0.4758, ..., 0.8606, 0.4637, 0.4341]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.3984, 0.6112, 0.9728, ..., 0.2358, 0.4123, 0.2200]) +tensor([0.1404, 0.2606, 0.7055, ..., 0.0689, 0.1209, 0.4700]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -33,13 +36,30 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 14.453001976013184 seconds +Time: 14.579551696777344 seconds -[17.32, 17.28, 17.28, 17.36, 17.64, 17.72, 17.68, 17.92, 17.88, 17.72] -[17.76, 17.96, 18.52, 20.36, 21.52, 25.36, 25.36, 31.2, 31.76, 45.88, 58.28, 65.88, 77.32, 84.48, 83.76, 84.4, 84.16, 84.96, 85.8, 86.16] -20.5910804271698 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.453001976013184, 'TIME_S_1KI': 144.53001976013184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.84748960495, 'W': 50.98554654857658} -[17.32, 17.28, 17.28, 17.36, 17.64, 17.72, 17.68, 17.92, 17.88, 17.72, 17.16, 17.24, 17.32, 17.32, 17.44, 17.8, 17.76, 17.76, 17.72, 17.8] -316.12 -15.806000000000001 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.453001976013184, 'TIME_S_1KI': 144.53001976013184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.84748960495, 'W': 50.98554654857658, 'J_1KI': 10498.4748960495, 'W_1KI': 509.85546548576576, 'W_D': 35.17954654857658, 'J_D': 724.3848723731041, 'W_D_1KI': 351.7954654857658, 'J_D_1KI': 3517.954654857658} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 86, 187, ..., 9999810, + 9999912, 10000000]), + col_indices=tensor([ 586, 606, 2288, ..., 97134, 98615, 99354]), + values=tensor([0.0352, 0.3503, 0.4758, ..., 0.8606, 0.4637, 0.4341]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1404, 0.2606, 0.7055, ..., 0.0689, 0.1209, 0.4700]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 14.579551696777344 seconds + +[21.08, 21.12, 21.04, 21.04, 20.92, 20.92, 21.04, 20.8, 21.0, 21.24] +[21.24, 21.04, 21.08, 24.52, 26.88, 28.96, 37.92, 39.28, 42.44, 52.92, 63.52, 70.12, 81.96, 84.36, 86.44, 88.32, 87.92, 87.92, 88.28, 87.76, 85.56] +21.695109605789185 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 90, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.579551696777344, 'TIME_S_1KI': 161.9950188530816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1234.5135778713227, 'W': 56.902850471974645} +[21.08, 21.12, 21.04, 21.04, 20.92, 20.92, 21.04, 20.8, 21.0, 21.24, 20.92, 21.04, 20.88, 20.88, 20.84, 20.96, 20.92, 21.08, 21.36, 21.56] +378.24 +18.912 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 90, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.579551696777344, 'TIME_S_1KI': 161.9950188530816, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1234.5135778713227, 'W': 56.902850471974645, 'J_1KI': 13716.817531903585, 'W_1KI': 632.2538941330516, 'W_D': 37.990850471974646, 'J_D': 824.2156650066377, 'W_D_1KI': 422.12056079971825, 'J_D_1KI': 4690.228453330203} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..89d2769 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 15, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.035902976989746, "TIME_S_1KI": 669.060198465983, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1645.6200160980222, "W": 31.002306455007126, "J_1KI": 109708.00107320149, "W_1KI": 2066.8204303338084, "W_D": 12.343306455007127, "J_D": 655.1897097291944, "W_D_1KI": 822.8870970004751, "J_D_1KI": 54859.13980003168} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..debf383 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 100000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 6.8235437870025635} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1028, 2081, ..., + 99997896, 99998957, 100000000]), + col_indices=tensor([ 124, 166, 183, ..., 99846, 99876, 99948]), + values=tensor([0.8465, 0.2478, 0.2733, ..., 0.4852, 0.2351, 0.0898]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.2002, 0.6847, 0.0655, ..., 0.6857, 0.4421, 0.6041]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 6.8235437870025635 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15 -ss 100000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.035902976989746} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1018, 2029, ..., + 99998012, 99999042, 100000000]), + col_indices=tensor([ 135, 301, 599, ..., 99852, 99911, 99921]), + values=tensor([0.1942, 0.0224, 0.2187, ..., 0.1366, 0.5257, 0.1941]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.8770, 0.9277, 0.4956, ..., 0.2234, 0.3583, 0.8131]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.035902976989746 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1018, 2029, ..., + 99998012, 99999042, 100000000]), + col_indices=tensor([ 135, 301, 599, ..., 99852, 99911, 99921]), + values=tensor([0.1942, 0.0224, 0.2187, ..., 0.1366, 0.5257, 0.1941]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.8770, 0.9277, 0.4956, ..., 0.2234, 0.3583, 0.8131]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.035902976989746 seconds + +[20.64, 20.72, 20.76, 20.84, 20.84, 20.64, 20.72, 20.72, 20.52, 20.36] +[20.56, 20.72, 20.88, 23.44, 24.24, 25.24, 26.4, 26.8, 25.64, 25.92, 27.36, 27.36, 29.2, 31.08, 30.84, 30.0, 28.04, 26.32, 26.08, 26.72, 29.6, 42.32, 51.04, 50.16, 49.24, 44.16, 44.16, 28.4, 28.12, 28.12, 27.28, 27.0, 26.12, 25.4, 25.28, 25.32, 25.4, 25.56, 25.56, 25.84, 26.12, 26.16, 26.2, 30.2, 33.96, 41.44, 47.56, 51.68, 53.48, 55.56, 53.04, 57.84] +53.080567359924316 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 15, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.035902976989746, 'TIME_S_1KI': 669.060198465983, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1645.6200160980222, 'W': 31.002306455007126} +[20.64, 20.72, 20.76, 20.84, 20.84, 20.64, 20.72, 20.72, 20.52, 20.36, 20.8, 20.68, 20.68, 20.48, 20.52, 20.56, 20.64, 21.16, 21.16, 21.28] +373.18 +18.659 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 15, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.035902976989746, 'TIME_S_1KI': 669.060198465983, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1645.6200160980222, 'W': 31.002306455007126, 'J_1KI': 109708.00107320149, 'W_1KI': 2066.8204303338084, 'W_D': 12.343306455007127, 'J_D': 655.1897097291944, 'W_D_1KI': 822.8870970004751, 'J_D_1KI': 54859.13980003168} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json index c8834ef..078e111 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4862, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.123133897781372, "TIME_S_1KI": 3.316152591069801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1103.8810626983643, "W": 59.41987873604025, "J_1KI": 227.04258796757802, "W_1KI": 12.221283162492854, "W_D": 43.51287873604025, "J_D": 808.3665574879647, "W_D_1KI": 8.949584273146904, "J_D_1KI": 1.8407207472535796} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4377, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.716068029403687, "TIME_S_1KI": 2.6767347565464217, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 931.1554330062867, "W": 56.32022952696994, "J_1KI": 212.73827576108903, "W_1KI": 12.867313120166767, "W_D": 37.50822952696994, "J_D": 620.1322686328889, "W_D_1KI": 8.569392169744102, "J_D_1KI": 1.9578232053333566} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output index 7c14ca6..abd8fec 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3613910675048828} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.028092145919799805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99999, 99999, 100000]), - col_indices=tensor([27177, 91140, 35351, ..., 36842, 63353, 40213]), - values=tensor([0.8085, 0.0220, 0.0238, ..., 0.9528, 0.8072, 0.8356]), + col_indices=tensor([91606, 44856, 81980, ..., 58749, 11667, 65996]), + values=tensor([0.6228, 0.5426, 0.7214, ..., 0.2159, 0.4095, 0.9061]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5173, 0.6337, 0.4770, ..., 0.8095, 0.3804, 0.7829]) +tensor([0.3666, 0.3385, 0.5084, ..., 0.0188, 0.4462, 0.5866]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.3613910675048828 seconds +Time: 0.028092145919799805 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2905 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.272582054138184} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3737 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.963433027267456} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99997, +tensor(crow_indices=tensor([ 0, 0, 3, ..., 99998, 99999, 100000]), - col_indices=tensor([57428, 26674, 73957, ..., 55311, 85675, 87326]), - values=tensor([0.1133, 0.1214, 0.0575, ..., 0.3604, 0.8021, 0.3274]), + col_indices=tensor([12625, 71568, 82165, ..., 56450, 55484, 67350]), + values=tensor([0.8626, 0.1693, 0.6451, ..., 0.6303, 0.1406, 0.5137]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7862, 0.8717, 0.9240, ..., 0.8758, 0.8236, 0.0748]) +tensor([0.7143, 0.3777, 0.4972, ..., 0.3298, 0.4245, 0.8777]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 6.272582054138184 seconds +Time: 8.963433027267456 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4862 -ss 100000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.123133897781372} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4377 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.716068029403687} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99999, 100000]), - col_indices=tensor([36017, 48507, 97216, ..., 44545, 10809, 6488]), - values=tensor([0.2196, 0.1379, 0.1607, ..., 0.1720, 0.9833, 0.1649]), + col_indices=tensor([14042, 63263, 67415, ..., 5690, 28440, 2947]), + values=tensor([0.6549, 0.8223, 0.8286, ..., 0.0757, 0.1191, 0.8433]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6606, 0.9945, 0.6366, ..., 0.7352, 0.9827, 0.2989]) +tensor([0.4605, 0.0591, 0.4857, ..., 0.9698, 0.5094, 0.1166]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 16.123133897781372 seconds +Time: 11.716068029403687 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99998, 99999, 100000]), - col_indices=tensor([36017, 48507, 97216, ..., 44545, 10809, 6488]), - values=tensor([0.2196, 0.1379, 0.1607, ..., 0.1720, 0.9833, 0.1649]), + col_indices=tensor([14042, 63263, 67415, ..., 5690, 28440, 2947]), + values=tensor([0.6549, 0.8223, 0.8286, ..., 0.0757, 0.1191, 0.8433]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6606, 0.9945, 0.6366, ..., 0.7352, 0.9827, 0.2989]) +tensor([0.4605, 0.0591, 0.4857, ..., 0.9698, 0.5094, 0.1166]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 16.123133897781372 seconds +Time: 11.716068029403687 seconds -[17.72, 17.76, 17.68, 17.88, 17.72, 17.52, 17.6, 17.48, 17.6, 17.6] -[17.72, 17.72, 17.8, 21.08, 22.76, 35.32, 49.56, 66.44, 76.68, 89.56, 88.0, 87.44, 86.52, 85.84, 85.76, 86.6, 86.04, 85.28] -18.57763910293579 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.123133897781372, 'TIME_S_1KI': 3.316152591069801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.8810626983643, 'W': 59.41987873604025} -[17.72, 17.76, 17.68, 17.88, 17.72, 17.52, 17.6, 17.48, 17.6, 17.6, 17.8, 17.8, 17.76, 17.68, 17.76, 17.88, 17.72, 17.48, 17.52, 17.48] -318.14 -15.907 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.123133897781372, 'TIME_S_1KI': 3.316152591069801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.8810626983643, 'W': 59.41987873604025, 'J_1KI': 227.04258796757802, 'W_1KI': 12.221283162492854, 'W_D': 43.51287873604025, 'J_D': 808.3665574879647, 'W_D_1KI': 8.949584273146904, 'J_D_1KI': 1.8407207472535796} +[20.72, 20.72, 21.04, 21.12, 21.0, 20.84, 20.76, 20.64, 20.76, 20.88] +[20.68, 20.6, 21.76, 23.12, 29.64, 29.64, 46.92, 60.56, 75.84, 88.72, 89.44, 88.68, 87.76, 85.84, 84.76, 84.72] +16.53323221206665 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4377, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.716068029403687, 'TIME_S_1KI': 2.6767347565464217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 931.1554330062867, 'W': 56.32022952696994} +[20.72, 20.72, 21.04, 21.12, 21.0, 20.84, 20.76, 20.64, 20.76, 20.88, 21.2, 21.0, 21.0, 20.96, 20.88, 20.88, 21.0, 20.84, 20.96, 20.88] +376.24 +18.812 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4377, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.716068029403687, 'TIME_S_1KI': 2.6767347565464217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 931.1554330062867, 'W': 56.32022952696994, 'J_1KI': 212.73827576108903, 'W_1KI': 12.867313120166767, 'W_D': 37.50822952696994, 'J_D': 620.1322686328889, 'W_D_1KI': 8.569392169744102, 'J_D_1KI': 1.9578232053333566} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.json index 0831cb8..a8d877c 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1295, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.363306522369385, "TIME_S_1KI": 10.319155615729255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1148.1477067947387, "W": 58.62479671787461, "J_1KI": 886.600545787443, "W_1KI": 45.27011329565607, "W_D": 42.857796717874606, "J_D": 839.3561048357486, "W_D_1KI": 33.09482372036649, "J_D_1KI": 25.55584843271544} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 978, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.565504789352417, "TIME_S_1KI": 10.80317463123969, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1068.5367103862764, "W": 60.39816254634489, "J_1KI": 1092.5733235033501, "W_1KI": 61.75681241957555, "W_D": 41.59016254634489, "J_D": 735.7941632347108, "W_D_1KI": 42.52572857499478, "J_D_1KI": 43.482340056231884} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.output index fe6c43d..7837d89 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.8107287883758545} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.14230060577392578} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 12, ..., 499988, 499996, +tensor(crow_indices=tensor([ 0, 8, 11, ..., 499993, 499997, 500000]), - col_indices=tensor([ 2924, 5581, 32898, ..., 25573, 35176, 44980]), - values=tensor([0.4338, 0.2090, 0.8667, ..., 0.7968, 0.4161, 0.0285]), + col_indices=tensor([ 9796, 23171, 67855, ..., 67066, 76742, 83085]), + values=tensor([0.7201, 0.1273, 0.8027, ..., 0.9756, 0.6707, 0.8283]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.7311, 0.8750, 0.4611, ..., 0.2473, 0.3600, 0.7684]) +tensor([0.4403, 0.8898, 0.0117, ..., 0.2918, 0.2383, 0.8005]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.8107287883758545 seconds +Time: 0.14230060577392578 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1295 -ss 100000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.363306522369385} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 737 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.9050374031066895} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 499992, 499997, +tensor(crow_indices=tensor([ 0, 6, 10, ..., 499991, 499994, 500000]), - col_indices=tensor([20312, 45798, 57469, ..., 9915, 72511, 98823]), - values=tensor([0.2450, 0.9842, 0.2161, ..., 0.8948, 0.9103, 0.6478]), + col_indices=tensor([ 6674, 21503, 46805, ..., 26719, 29980, 87585]), + values=tensor([0.5980, 0.3045, 0.5949, ..., 0.7519, 0.5339, 0.5740]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.2448, 0.6389, 0.3272, ..., 0.3843, 0.8480, 0.1017]) +tensor([0.4906, 0.6526, 0.2791, ..., 0.1176, 0.6105, 0.9789]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 13.363306522369385 seconds +Time: 7.9050374031066895 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 978 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.565504789352417} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 499992, 499997, +tensor(crow_indices=tensor([ 0, 6, 10, ..., 499990, 499995, 500000]), - col_indices=tensor([20312, 45798, 57469, ..., 9915, 72511, 98823]), - values=tensor([0.2450, 0.9842, 0.2161, ..., 0.8948, 0.9103, 0.6478]), + col_indices=tensor([ 8342, 20491, 53690, ..., 28124, 41534, 52404]), + values=tensor([0.2581, 0.1900, 0.2922, ..., 0.1401, 0.2546, 0.0818]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.2448, 0.6389, 0.3272, ..., 0.3843, 0.8480, 0.1017]) +tensor([0.6836, 0.9613, 0.4530, ..., 0.7394, 0.7530, 0.5065]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +56,30 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 13.363306522369385 seconds +Time: 10.565504789352417 seconds -[17.48, 17.6, 17.56, 17.72, 17.52, 17.48, 17.56, 17.6, 17.6, 17.8] -[18.0, 17.84, 17.76, 18.88, 19.6, 35.4, 49.36, 64.12, 77.28, 86.8, 84.84, 85.28, 83.64, 83.64, 82.96, 82.4, 81.96, 80.44, 80.16] -19.58467698097229 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.363306522369385, 'TIME_S_1KI': 10.319155615729255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1148.1477067947387, 'W': 58.62479671787461} -[17.48, 17.6, 17.56, 17.72, 17.52, 17.48, 17.56, 17.6, 17.6, 17.8, 17.64, 17.44, 17.36, 17.52, 17.36, 17.2, 17.52, 17.44, 17.6, 17.6] -315.34 -15.767 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.363306522369385, 'TIME_S_1KI': 10.319155615729255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1148.1477067947387, 'W': 58.62479671787461, 'J_1KI': 886.600545787443, 'W_1KI': 45.27011329565607, 'W_D': 42.857796717874606, 'J_D': 839.3561048357486, 'W_D_1KI': 33.09482372036649, 'J_D_1KI': 25.55584843271544} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 10, ..., 499990, 499995, + 500000]), + col_indices=tensor([ 8342, 20491, 53690, ..., 28124, 41534, 52404]), + values=tensor([0.2581, 0.1900, 0.2922, ..., 0.1401, 0.2546, 0.0818]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6836, 0.9613, 0.4530, ..., 0.7394, 0.7530, 0.5065]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.565504789352417 seconds + +[20.72, 20.72, 20.84, 20.8, 20.96, 21.12, 21.04, 21.04, 21.08, 21.0] +[21.04, 21.12, 24.28, 25.44, 29.92, 47.24, 47.24, 60.52, 73.88, 86.68, 91.28, 89.52, 88.64, 89.4, 88.52, 88.48, 88.84] +17.691543340682983 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 978, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.565504789352417, 'TIME_S_1KI': 10.80317463123969, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1068.5367103862764, 'W': 60.39816254634489} +[20.72, 20.72, 20.84, 20.8, 20.96, 21.12, 21.04, 21.04, 21.08, 21.0, 20.96, 20.96, 20.8, 20.56, 20.76, 20.92, 20.8, 20.8, 21.04, 21.16] +376.16 +18.808 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 978, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.565504789352417, 'TIME_S_1KI': 10.80317463123969, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1068.5367103862764, 'W': 60.39816254634489, 'J_1KI': 1092.5733235033501, 'W_1KI': 61.75681241957555, 'W_D': 41.59016254634489, 'J_D': 735.7941632347108, 'W_D_1KI': 42.52572857499478, 'J_D_1KI': 43.482340056231884} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json index 30708d9..d2c9447 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 33188, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.848255395889282, "TIME_S_1KI": 0.3268728274041606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 283.2797647285461, "W": 19.966696619139725, "J_1KI": 8.535608193580394, "W_1KI": 0.6016239791231688, "W_D": 4.899696619139723, "J_D": 69.51499950075144, "W_D_1KI": 0.14763458536638915, "J_D_1KI": 0.0044484327276843785} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 32642, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.62722635269165, "TIME_S_1KI": 0.32556909358163255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 316.55393952369684, "W": 22.276847086365876, "J_1KI": 9.697749510559918, "W_1KI": 0.6824596252179975, "W_D": 3.922847086365877, "J_D": 55.743646958827945, "W_D_1KI": 0.12017790228435381, "J_D_1KI": 0.0036816954317858528} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output index ce5f5e4..6982470 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.039971351623535156} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.040006160736083984} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 9999, 10000]), - col_indices=tensor([5709, 5957, 5382, ..., 8260, 3428, 9778]), - values=tensor([0.1410, 0.8327, 0.5618, ..., 0.6127, 0.7950, 0.7693]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 9999, 9999, 10000]), + col_indices=tensor([1340, 1915, 9033, ..., 503, 5965, 6883]), + values=tensor([0.0083, 0.0540, 0.2304, ..., 0.4985, 0.4495, 0.5188]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9182, 0.7141, 0.2679, ..., 0.7671, 0.9231, 0.7252]) +tensor([0.8249, 0.8459, 0.0607, ..., 0.1481, 0.6545, 0.4281]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.039971351623535156 seconds +Time: 0.040006160736083984 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 26268 -ss 10000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.310471773147583} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 26245 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.442145824432373} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 9999, 9999, 10000]), - col_indices=tensor([6011, 6260, 4075, ..., 5576, 1824, 8975]), - values=tensor([0.9641, 0.0766, 0.9967, ..., 0.9539, 0.3769, 0.8002]), +tensor(crow_indices=tensor([ 0, 1, 3, ..., 9997, 9998, 10000]), + col_indices=tensor([2860, 3463, 6973, ..., 9469, 1415, 8396]), + values=tensor([0.6085, 0.7024, 0.8682, ..., 0.8356, 0.2567, 0.3000]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.0022, 0.8912, 0.7670, ..., 0.3905, 0.8453, 0.1961]) +tensor([0.0965, 0.5015, 0.6181, ..., 0.3988, 0.7058, 0.2579]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 8.310471773147583 seconds +Time: 8.442145824432373 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 33188 -ss 10000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.848255395889282} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 32642 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.62722635269165} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9995, 9997, 10000]), - col_indices=tensor([1948, 7195, 8876, ..., 111, 6612, 9607]), - values=tensor([0.9860, 0.8888, 0.2852, ..., 0.2300, 0.3266, 0.6773]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), + col_indices=tensor([9643, 3533, 4391, ..., 7997, 4934, 5111]), + values=tensor([0.3528, 0.0869, 0.9026, ..., 0.8307, 0.9649, 0.1279]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8303, 0.1186, 0.7718, ..., 0.9103, 0.1807, 0.5186]) +tensor([0.0038, 0.4961, 0.2678, ..., 0.2768, 0.1846, 0.1025]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.848255395889282 seconds +Time: 10.62722635269165 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9995, 9997, 10000]), - col_indices=tensor([1948, 7195, 8876, ..., 111, 6612, 9607]), - values=tensor([0.9860, 0.8888, 0.2852, ..., 0.2300, 0.3266, 0.6773]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9998, 9999, 10000]), + col_indices=tensor([9643, 3533, 4391, ..., 7997, 4934, 5111]), + values=tensor([0.3528, 0.0869, 0.9026, ..., 0.8307, 0.9649, 0.1279]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.8303, 0.1186, 0.7718, ..., 0.9103, 0.1807, 0.5186]) +tensor([0.0038, 0.4961, 0.2678, ..., 0.2768, 0.1846, 0.1025]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.848255395889282 seconds +Time: 10.62722635269165 seconds -[17.16, 17.24, 17.28, 17.28, 17.12, 16.8, 16.4, 16.48, 16.32, 16.6] -[16.76, 16.84, 20.04, 22.12, 23.96, 24.76, 24.76, 25.32, 22.2, 20.84, 19.88, 20.16, 20.24, 20.04] -14.187613010406494 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 33188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.848255395889282, 'TIME_S_1KI': 0.3268728274041606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.2797647285461, 'W': 19.966696619139725} -[17.16, 17.24, 17.28, 17.28, 17.12, 16.8, 16.4, 16.48, 16.32, 16.6, 16.72, 16.48, 16.8, 16.64, 16.64, 16.68, 16.68, 16.56, 16.44, 16.52] -301.34000000000003 -15.067000000000002 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 33188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.848255395889282, 'TIME_S_1KI': 0.3268728274041606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.2797647285461, 'W': 19.966696619139725, 'J_1KI': 8.535608193580394, 'W_1KI': 0.6016239791231688, 'W_D': 4.899696619139723, 'J_D': 69.51499950075144, 'W_D_1KI': 0.14763458536638915, 'J_D_1KI': 0.0044484327276843785} +[20.44, 20.4, 20.28, 20.6, 20.64, 20.8, 20.64, 20.64, 20.44, 20.08] +[19.88, 19.96, 20.4, 23.88, 26.4, 27.16, 27.92, 25.4, 24.92, 23.48, 23.48, 23.52, 23.48, 23.4] +14.209997415542603 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.62722635269165, 'TIME_S_1KI': 0.32556909358163255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.55393952369684, 'W': 22.276847086365876} +[20.44, 20.4, 20.28, 20.6, 20.64, 20.8, 20.64, 20.64, 20.44, 20.08, 20.36, 20.2, 20.16, 20.24, 20.28, 20.16, 20.28, 20.28, 20.4, 20.4] +367.08 +18.354 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 32642, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.62722635269165, 'TIME_S_1KI': 0.32556909358163255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 316.55393952369684, 'W': 22.276847086365876, 'J_1KI': 9.697749510559918, 'W_1KI': 0.6824596252179975, 'W_D': 3.922847086365877, 'J_D': 55.743646958827945, 'W_D_1KI': 0.12017790228435381, 'J_D_1KI': 0.0036816954317858528} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json index 9b3c069..58fd1a6 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4682, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.500732183456421, "TIME_S_1KI": 2.2427877367484883, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 298.49395805358887, "W": 21.02945948615313, "J_1KI": 63.753515175905356, "W_1KI": 4.491554781322753, "W_D": 5.974459486153128, "J_D": 84.80199219703674, "W_D_1KI": 1.2760485873885365, "J_D_1KI": 0.27254348299627007} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4524, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.524961471557617, "TIME_S_1KI": 2.326472473819102, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 325.2464212036133, "W": 24.58634437533153, "J_1KI": 71.89355022184202, "W_1KI": 5.434647297818641, "W_D": 6.037344375331532, "J_D": 79.86647472572331, "W_D_1KI": 1.334514671823946, "J_D_1KI": 0.29498555964278206} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output index faf5980..f9d776f 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2965726852416992} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2320706844329834} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 21, ..., 99978, 99991, +tensor(crow_indices=tensor([ 0, 7, 19, ..., 99974, 99985, 100000]), - col_indices=tensor([ 670, 2215, 2340, ..., 5626, 6766, 7426]), - values=tensor([0.8454, 0.9971, 0.8602, ..., 0.7286, 0.6853, 0.9650]), + col_indices=tensor([1906, 2047, 2463, ..., 7748, 7892, 9833]), + values=tensor([0.5643, 0.8413, 0.3457, ..., 0.8525, 0.1641, 0.7374]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8368, 0.0517, 0.8880, ..., 0.1583, 0.3761, 0.3465]) +tensor([0.0072, 0.2945, 0.2791, ..., 0.1876, 0.3163, 0.8664]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.2965726852416992 seconds +Time: 0.2320706844329834 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3540 -ss 10000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.9380598068237305} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4524 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.524961471557617} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 19, ..., 99981, 99990, +tensor(crow_indices=tensor([ 0, 11, 23, ..., 99980, 99990, 100000]), - col_indices=tensor([ 218, 1968, 1988, ..., 6580, 8417, 9626]), - values=tensor([0.6495, 0.9774, 0.8878, ..., 0.7618, 0.8151, 0.2290]), + col_indices=tensor([1014, 1917, 2238, ..., 8617, 8839, 8958]), + values=tensor([0.4207, 0.3514, 0.4674, ..., 0.3563, 0.0399, 0.1482]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.1753, 0.6932, 0.8031, ..., 0.2090, 0.4929, 0.1708]) +tensor([0.4775, 0.1329, 0.4678, ..., 0.3641, 0.6182, 0.5714]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 7.9380598068237305 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4682 -ss 10000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.500732183456421} +Time: 10.524961471557617 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 20, ..., 99977, 99983, +tensor(crow_indices=tensor([ 0, 11, 23, ..., 99980, 99990, 100000]), - col_indices=tensor([ 32, 224, 1507, ..., 8865, 9626, 9660]), - values=tensor([0.3062, 0.4385, 0.1947, ..., 0.8116, 0.9937, 0.3510]), + col_indices=tensor([1014, 1917, 2238, ..., 8617, 8839, 8958]), + values=tensor([0.4207, 0.3514, 0.4674, ..., 0.3563, 0.0399, 0.1482]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0637, 0.2734, 0.9054, ..., 0.1369, 0.8787, 0.2539]) +tensor([0.4775, 0.1329, 0.4678, ..., 0.3641, 0.6182, 0.5714]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.500732183456421 seconds +Time: 10.524961471557617 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 20, ..., 99977, 99983, - 100000]), - col_indices=tensor([ 32, 224, 1507, ..., 8865, 9626, 9660]), - values=tensor([0.3062, 0.4385, 0.1947, ..., 0.8116, 0.9937, 0.3510]), - size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0637, 0.2734, 0.9054, ..., 0.1369, 0.8787, 0.2539]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 100000 -Density: 0.001 -Time: 10.500732183456421 seconds - -[16.56, 16.28, 16.36, 16.32, 16.4, 16.32, 16.36, 16.44, 16.36, 16.28] -[16.24, 15.96, 19.32, 19.32, 21.08, 28.0, 29.28, 30.04, 27.12, 25.96, 20.44, 20.04, 19.96, 19.96] -14.194086074829102 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.500732183456421, 'TIME_S_1KI': 2.2427877367484883, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.49395805358887, 'W': 21.02945948615313} -[16.56, 16.28, 16.36, 16.32, 16.4, 16.32, 16.36, 16.44, 16.36, 16.28, 16.52, 16.8, 17.04, 17.04, 17.16, 17.48, 17.4, 17.4, 16.84, 16.84] -301.1 -15.055000000000001 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.500732183456421, 'TIME_S_1KI': 2.2427877367484883, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.49395805358887, 'W': 21.02945948615313, 'J_1KI': 63.753515175905356, 'W_1KI': 4.491554781322753, 'W_D': 5.974459486153128, 'J_D': 84.80199219703674, 'W_D_1KI': 1.2760485873885365, 'J_D_1KI': 0.27254348299627007} +[20.36, 20.56, 20.56, 20.52, 20.68, 20.68, 20.56, 20.36, 20.8, 20.8] +[20.64, 20.76, 20.76, 24.36, 25.84, 32.36, 33.28, 33.76, 30.6, 26.92, 24.52, 24.16, 24.16] +13.228742599487305 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4524, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.524961471557617, 'TIME_S_1KI': 2.326472473819102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.2464212036133, 'W': 24.58634437533153} +[20.36, 20.56, 20.56, 20.52, 20.68, 20.68, 20.56, 20.36, 20.8, 20.8, 20.8, 20.8, 20.68, 20.72, 20.68, 20.52, 20.52, 20.56, 20.48, 20.64] +370.98 +18.549 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4524, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.524961471557617, 'TIME_S_1KI': 2.326472473819102, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 325.2464212036133, 'W': 24.58634437533153, 'J_1KI': 71.89355022184202, 'W_1KI': 5.434647297818641, 'W_D': 6.037344375331532, 'J_D': 79.86647472572331, 'W_D_1KI': 1.334514671823946, 'J_D_1KI': 0.29498555964278206} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json index b135810..4a3134d 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 481, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.463550806045532, "TIME_S_1KI": 21.75374387951254, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.4283616256714, "W": 23.410421194984455, "J_1KI": 691.1192549390258, "W_1KI": 48.67031433468701, "W_D": 6.792421194984453, "J_D": 96.45249141454698, "W_D_1KI": 14.121457785830463, "J_D_1KI": 29.35854009528163} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 478, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.185055017471313, "TIME_S_1KI": 21.307646480065507, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 359.0441580104829, "W": 25.246291403734826, "J_1KI": 751.1384058796713, "W_1KI": 52.81650921283437, "W_D": 6.905291403734825, "J_D": 98.20470255303393, "W_D_1KI": 14.446216325805073, "J_D_1KI": 30.222209886621492} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output index bf59b23..7774b9b 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1813583374023438} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1943562030792236} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 110, 205, ..., 999812, - 999909, 1000000]), - col_indices=tensor([ 238, 361, 384, ..., 9612, 9634, 9698]), - values=tensor([0.5750, 0.8151, 0.3516, ..., 0.9302, 0.8544, 0.3311]), +tensor(crow_indices=tensor([ 0, 86, 175, ..., 999809, + 999908, 1000000]), + col_indices=tensor([ 216, 253, 262, ..., 9562, 9600, 9698]), + values=tensor([0.0465, 0.0516, 0.1303, ..., 0.2073, 0.6303, 0.1025]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.6330, 0.0586, 0.5281, ..., 0.1634, 0.9727, 0.9265]) +tensor([0.5186, 0.3626, 0.2367, ..., 0.6864, 0.2085, 0.5807]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 2.1813583374023438 seconds +Time: 2.1943562030792236 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 481 -ss 10000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.463550806045532} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 478 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.185055017471313} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 92, 212, ..., 999818, - 999919, 1000000]), - col_indices=tensor([ 159, 190, 205, ..., 9628, 9649, 9961]), - values=tensor([0.6162, 0.4289, 0.4486, ..., 0.9461, 0.6549, 0.6632]), +tensor(crow_indices=tensor([ 0, 86, 179, ..., 999806, + 999912, 1000000]), + col_indices=tensor([ 78, 134, 234, ..., 9468, 9789, 9980]), + values=tensor([0.0445, 0.5511, 0.1857, ..., 0.1042, 0.0198, 0.6991]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8230, 0.2220, 0.9348, ..., 0.2779, 0.8915, 0.3439]) +tensor([0.2366, 0.8915, 0.1308, ..., 0.5580, 0.2955, 0.4245]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.463550806045532 seconds +Time: 10.185055017471313 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 92, 212, ..., 999818, - 999919, 1000000]), - col_indices=tensor([ 159, 190, 205, ..., 9628, 9649, 9961]), - values=tensor([0.6162, 0.4289, 0.4486, ..., 0.9461, 0.6549, 0.6632]), +tensor(crow_indices=tensor([ 0, 86, 179, ..., 999806, + 999912, 1000000]), + col_indices=tensor([ 78, 134, 234, ..., 9468, 9789, 9980]), + values=tensor([0.0445, 0.5511, 0.1857, ..., 0.1042, 0.0198, 0.6991]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8230, 0.2220, 0.9348, ..., 0.2779, 0.8915, 0.3439]) +tensor([0.2366, 0.8915, 0.1308, ..., 0.5580, 0.2955, 0.4245]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.463550806045532 seconds +Time: 10.185055017471313 seconds -[20.24, 19.08, 17.84, 18.08, 19.12, 19.88, 20.84, 22.4, 22.4, 22.88] -[22.68, 22.68, 21.44, 22.2, 23.24, 31.92, 32.08, 31.76, 30.52, 23.92, 21.88, 21.88, 22.0, 22.24] -14.200016260147095 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.463550806045532, 'TIME_S_1KI': 21.75374387951254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.4283616256714, 'W': 23.410421194984455} -[20.24, 19.08, 17.84, 18.08, 19.12, 19.88, 20.84, 22.4, 22.4, 22.88, 16.72, 17.0, 17.08, 16.92, 17.04, 16.84, 16.64, 16.56, 16.56, 16.32] -332.36 -16.618000000000002 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.463550806045532, 'TIME_S_1KI': 21.75374387951254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.4283616256714, 'W': 23.410421194984455, 'J_1KI': 691.1192549390258, 'W_1KI': 48.67031433468701, 'W_D': 6.792421194984453, 'J_D': 96.45249141454698, 'W_D_1KI': 14.121457785830463, 'J_D_1KI': 29.35854009528163} +[20.68, 20.56, 20.6, 20.68, 20.88, 20.68, 20.68, 20.72, 20.52, 20.56] +[20.84, 20.88, 24.12, 26.0, 26.0, 33.56, 34.08, 34.64, 30.68, 29.36, 23.6, 24.0, 24.24, 24.2] +14.221659421920776 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 478, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.185055017471313, 'TIME_S_1KI': 21.307646480065507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 359.0441580104829, 'W': 25.246291403734826} +[20.68, 20.56, 20.6, 20.68, 20.88, 20.68, 20.68, 20.72, 20.52, 20.56, 20.08, 19.96, 20.2, 20.28, 20.28, 20.04, 19.96, 20.0, 20.0, 20.24] +366.82 +18.341 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 478, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.185055017471313, 'TIME_S_1KI': 21.307646480065507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 359.0441580104829, 'W': 25.246291403734826, 'J_1KI': 751.1384058796713, 'W_1KI': 52.81650921283437, 'W_D': 6.905291403734825, 'J_D': 98.20470255303393, 'W_D_1KI': 14.446216325805073, 'J_D_1KI': 30.222209886621492} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json index 18a5242..3f166c3 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 11.54654312133789, "TIME_S_1KI": 115.4654312133789, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 353.263671875, "W": 21.71035974754772, "J_1KI": 3532.63671875, "W_1KI": 217.1035974754772, "W_D": 6.712359747547719, "J_D": 109.22126021575927, "W_D_1KI": 67.1235974754772, "J_D_1KI": 671.235974754772} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.613454580307007, "TIME_S_1KI": 106.13454580307007, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 404.4998287200927, "W": 24.764895943428325, "J_1KI": 4044.998287200927, "W_1KI": 247.64895943428326, "W_D": 6.426895943428324, "J_D": 104.97432794618597, "W_D_1KI": 64.26895943428323, "J_D_1KI": 642.6895943428324} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output index d49136b..1d6440a 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 11.54654312133789} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.613454580307007} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 487, 979, ..., 4998949, - 4999474, 5000000]), - col_indices=tensor([ 0, 2, 9, ..., 9960, 9969, 9987]), - values=tensor([0.4699, 0.5377, 0.9444, ..., 0.8781, 0.2092, 0.8180]), +tensor(crow_indices=tensor([ 0, 466, 940, ..., 4998981, + 4999488, 5000000]), + col_indices=tensor([ 7, 62, 67, ..., 9933, 9947, 9995]), + values=tensor([0.3280, 0.5982, 0.7566, ..., 0.0214, 0.1608, 0.2417]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7212, 0.1994, 0.8974, ..., 0.2164, 0.6888, 0.0461]) +tensor([0.6482, 0.9620, 0.9641, ..., 0.2268, 0.6878, 0.0467]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 11.54654312133789 seconds +Time: 10.613454580307007 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 487, 979, ..., 4998949, - 4999474, 5000000]), - col_indices=tensor([ 0, 2, 9, ..., 9960, 9969, 9987]), - values=tensor([0.4699, 0.5377, 0.9444, ..., 0.8781, 0.2092, 0.8180]), +tensor(crow_indices=tensor([ 0, 466, 940, ..., 4998981, + 4999488, 5000000]), + col_indices=tensor([ 7, 62, 67, ..., 9933, 9947, 9995]), + values=tensor([0.3280, 0.5982, 0.7566, ..., 0.0214, 0.1608, 0.2417]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7212, 0.1994, 0.8974, ..., 0.2164, 0.6888, 0.0461]) +tensor([0.6482, 0.9620, 0.9641, ..., 0.2268, 0.6878, 0.0467]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 11.54654312133789 seconds +Time: 10.613454580307007 seconds -[16.56, 16.48, 16.72, 16.72, 16.8, 16.8, 16.64, 16.52, 16.52, 16.48] -[16.44, 16.64, 17.04, 18.24, 19.64, 27.0, 30.2, 30.0, 30.0, 30.08, 26.08, 22.8, 20.4, 20.48, 20.64, 20.48] -16.271663665771484 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 11.54654312133789, 'TIME_S_1KI': 115.4654312133789, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.263671875, 'W': 21.71035974754772} -[16.56, 16.48, 16.72, 16.72, 16.8, 16.8, 16.64, 16.52, 16.52, 16.48, 16.52, 16.36, 16.6, 16.84, 16.68, 16.68, 16.68, 16.96, 16.76, 16.84] -299.96000000000004 -14.998000000000001 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 11.54654312133789, 'TIME_S_1KI': 115.4654312133789, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.263671875, 'W': 21.71035974754772, 'J_1KI': 3532.63671875, 'W_1KI': 217.1035974754772, 'W_D': 6.712359747547719, 'J_D': 109.22126021575927, 'W_D_1KI': 67.1235974754772, 'J_D_1KI': 671.235974754772} +[20.4, 20.44, 20.2, 20.08, 20.12, 20.08, 20.2, 20.16, 20.4, 20.56] +[20.8, 20.8, 20.88, 21.88, 22.92, 26.0, 32.08, 34.68, 34.76, 34.4, 27.36, 24.32, 24.48, 24.36, 24.64, 24.64] +16.33359694480896 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.613454580307007, 'TIME_S_1KI': 106.13454580307007, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 404.4998287200927, 'W': 24.764895943428325} +[20.4, 20.44, 20.2, 20.08, 20.12, 20.08, 20.2, 20.16, 20.4, 20.56, 20.68, 20.52, 20.68, 20.6, 20.4, 20.4, 20.32, 20.24, 20.64, 20.92] +366.76 +18.338 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.613454580307007, 'TIME_S_1KI': 106.13454580307007, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 404.4998287200927, 'W': 24.764895943428325, 'J_1KI': 4044.998287200927, 'W_1KI': 247.64895943428326, 'W_D': 6.426895943428324, 'J_D': 104.97432794618597, 'W_D_1KI': 64.26895943428323, 'J_D_1KI': 642.6895943428324} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json index 94990e6..33437d6 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.129345178604126, "TIME_S_1KI": 211.29345178604126, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 619.1563564968109, "W": 21.836067076186225, "J_1KI": 6191.563564968109, "W_1KI": 218.36067076186225, "W_D": 6.581067076186224, "J_D": 186.6045519340038, "W_D_1KI": 65.81067076186224, "J_D_1KI": 658.1067076186224} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31971502304077, "TIME_S_1KI": 213.19715023040771, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 743.9782962799072, "W": 25.26275875228295, "J_1KI": 7439.782962799072, "W_1KI": 252.6275875228295, "W_D": 6.6447587522829465, "J_D": 195.68552841711036, "W_D_1KI": 66.44758752282947, "J_D_1KI": 664.4758752282946} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output index 12caf81..fb5c8df 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.129345178604126} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.31971502304077} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 944, 1951, ..., 9997982, - 9998990, 10000000]), - col_indices=tensor([ 12, 17, 18, ..., 9973, 9979, 9988]), - values=tensor([0.0454, 0.8748, 0.1892, ..., 0.1417, 0.8974, 0.6226]), +tensor(crow_indices=tensor([ 0, 1001, 2011, ..., 9998036, + 9998985, 10000000]), + col_indices=tensor([ 6, 7, 22, ..., 9960, 9965, 9984]), + values=tensor([0.5702, 0.0939, 0.6315, ..., 0.0656, 0.0225, 0.3527]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7518, 0.8666, 0.4972, ..., 0.4338, 0.0856, 0.8852]) +tensor([0.2410, 0.7808, 0.4106, ..., 0.9776, 0.6480, 0.7308]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.129345178604126 seconds +Time: 21.31971502304077 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 944, 1951, ..., 9997982, - 9998990, 10000000]), - col_indices=tensor([ 12, 17, 18, ..., 9973, 9979, 9988]), - values=tensor([0.0454, 0.8748, 0.1892, ..., 0.1417, 0.8974, 0.6226]), +tensor(crow_indices=tensor([ 0, 1001, 2011, ..., 9998036, + 9998985, 10000000]), + col_indices=tensor([ 6, 7, 22, ..., 9960, 9965, 9984]), + values=tensor([0.5702, 0.0939, 0.6315, ..., 0.0656, 0.0225, 0.3527]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.7518, 0.8666, 0.4972, ..., 0.4338, 0.0856, 0.8852]) +tensor([0.2410, 0.7808, 0.4106, ..., 0.9776, 0.6480, 0.7308]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 21.129345178604126 seconds +Time: 21.31971502304077 seconds -[16.8, 16.76, 16.76, 16.76, 16.88, 16.92, 16.92, 17.0, 17.04, 17.04] -[16.88, 17.04, 20.52, 21.6, 23.68, 23.68, 29.08, 30.56, 30.32, 30.08, 28.48, 23.96, 23.32, 20.92, 20.92, 21.08, 21.44, 21.44, 20.92, 20.72, 20.64, 20.4, 20.4, 20.56, 20.44, 20.36, 20.48, 20.68] -28.354756116867065 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.129345178604126, 'TIME_S_1KI': 211.29345178604126, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 619.1563564968109, 'W': 21.836067076186225} -[16.8, 16.76, 16.76, 16.76, 16.88, 16.92, 16.92, 17.0, 17.04, 17.04, 17.12, 17.28, 17.24, 17.24, 16.84, 16.64, 16.96, 16.72, 17.12, 17.08] -305.1 -15.255 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.129345178604126, 'TIME_S_1KI': 211.29345178604126, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 619.1563564968109, 'W': 21.836067076186225, 'J_1KI': 6191.563564968109, 'W_1KI': 218.36067076186225, 'W_D': 6.581067076186224, 'J_D': 186.6045519340038, 'W_D_1KI': 65.81067076186224, 'J_D_1KI': 658.1067076186224} +[20.36, 20.52, 20.36, 20.48, 20.68, 21.0, 21.24, 21.24, 21.16, 20.84] +[20.52, 20.32, 20.44, 21.76, 22.64, 26.96, 33.24, 37.24, 37.04, 37.08, 28.88, 28.88, 27.96, 25.08, 25.04, 25.0, 24.72, 24.48, 24.16, 24.2, 24.12, 24.08, 24.36, 24.36, 24.52, 24.6, 24.56, 24.6, 24.64] +29.44960618019104 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31971502304077, 'TIME_S_1KI': 213.19715023040771, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 743.9782962799072, 'W': 25.26275875228295} +[20.36, 20.52, 20.36, 20.48, 20.68, 21.0, 21.24, 21.24, 21.16, 20.84, 20.68, 20.64, 20.8, 20.8, 20.6, 20.48, 20.4, 20.24, 20.52, 20.52] +372.36 +18.618000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.31971502304077, 'TIME_S_1KI': 213.19715023040771, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 743.9782962799072, 'W': 25.26275875228295, 'J_1KI': 7439.782962799072, 'W_1KI': 252.6275875228295, 'W_D': 6.6447587522829465, 'J_D': 195.68552841711036, 'W_D_1KI': 66.44758752282947, 'J_D_1KI': 664.4758752282946} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.json index 364c8ea..68787ce 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.021928787231445, "TIME_S_1KI": 420.21928787231445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1136.679587249756, "W": 21.603080734566593, "J_1KI": 11366.79587249756, "W_1KI": 216.03080734566592, "W_D": 6.324080734566593, "J_D": 332.751312992096, "W_D_1KI": 63.24080734566592, "J_D_1KI": 632.4080734566592} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.423949003219604, "TIME_S_1KI": 424.23949003219604, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1297.7673851776126, "W": 25.077883657212237, "J_1KI": 12977.673851776126, "W_1KI": 250.77883657212234, "W_D": 6.708883657212237, "J_D": 347.18122630643876, "W_D_1KI": 67.08883657212236, "J_D_1KI": 670.8883657212235} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.output index b23f94c..add5f7d 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.021928787231445} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.423949003219604} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1977, 4005, ..., 19996092, - 19998061, 20000000]), - col_indices=tensor([ 0, 21, 29, ..., 9987, 9990, 9994]), - values=tensor([0.7851, 0.1514, 0.2063, ..., 0.8497, 0.4761, 0.4899]), +tensor(crow_indices=tensor([ 0, 1994, 3991, ..., 19995993, + 19997995, 20000000]), + col_indices=tensor([ 0, 1, 3, ..., 9992, 9994, 9999]), + values=tensor([0.6041, 0.5243, 0.8741, ..., 0.1407, 0.3932, 0.5137]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7703, 0.2799, 0.3874, ..., 0.1897, 0.4899, 0.7472]) +tensor([0.3259, 0.3007, 0.5756, ..., 0.3180, 0.3051, 0.2793]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.021928787231445 seconds +Time: 42.423949003219604 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1977, 4005, ..., 19996092, - 19998061, 20000000]), - col_indices=tensor([ 0, 21, 29, ..., 9987, 9990, 9994]), - values=tensor([0.7851, 0.1514, 0.2063, ..., 0.8497, 0.4761, 0.4899]), +tensor(crow_indices=tensor([ 0, 1994, 3991, ..., 19995993, + 19997995, 20000000]), + col_indices=tensor([ 0, 1, 3, ..., 9992, 9994, 9999]), + values=tensor([0.6041, 0.5243, 0.8741, ..., 0.1407, 0.3932, 0.5137]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.7703, 0.2799, 0.3874, ..., 0.1897, 0.4899, 0.7472]) +tensor([0.3259, 0.3007, 0.5756, ..., 0.3180, 0.3051, 0.2793]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 42.021928787231445 seconds +Time: 42.423949003219604 seconds -[16.84, 16.68, 16.68, 16.88, 16.96, 17.08, 17.12, 17.24, 17.0, 17.0] -[16.76, 16.8, 19.84, 20.68, 22.88, 22.88, 24.36, 33.76, 31.92, 32.48, 31.68, 31.64, 25.4, 24.92, 23.64, 20.2, 20.04, 20.04, 20.04, 19.84, 20.12, 20.36, 20.72, 20.76, 20.48, 20.32, 20.4, 20.28, 20.76, 20.84, 20.84, 20.92, 21.0, 20.96, 20.8, 20.8, 20.84, 20.6, 20.56, 20.56, 20.84, 20.76, 20.76, 20.8, 21.0, 21.08, 20.8, 20.96, 20.72, 20.72, 20.72, 20.76] -52.61655044555664 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.021928787231445, 'TIME_S_1KI': 420.21928787231445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.679587249756, 'W': 21.603080734566593} -[16.84, 16.68, 16.68, 16.88, 16.96, 17.08, 17.12, 17.24, 17.0, 17.0, 17.2, 17.16, 17.08, 16.84, 16.84, 16.8, 17.0, 17.04, 17.16, 17.0] -305.58 -15.279 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.021928787231445, 'TIME_S_1KI': 420.21928787231445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.679587249756, 'W': 21.603080734566593, 'J_1KI': 11366.79587249756, 'W_1KI': 216.03080734566592, 'W_D': 6.324080734566593, 'J_D': 332.751312992096, 'W_D_1KI': 63.24080734566592, 'J_D_1KI': 632.4080734566592} +[20.44, 20.56, 20.28, 20.28, 20.12, 20.0, 20.16, 20.32, 20.52, 20.6] +[20.52, 20.28, 20.92, 22.92, 23.88, 25.76, 34.6, 34.6, 35.28, 35.44, 36.72, 34.64, 28.64, 28.32, 26.96, 24.44, 24.28, 24.44, 24.32, 24.16, 24.16, 24.2, 24.36, 24.32, 24.52, 24.44, 24.44, 24.44, 24.28, 24.16, 24.2, 24.28, 24.28, 24.56, 24.52, 24.4, 24.44, 24.32, 24.16, 24.28, 24.28, 24.04, 23.88, 23.64, 23.68, 23.68, 24.12, 24.08, 24.32, 24.24, 24.16] +51.74947786331177 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.423949003219604, 'TIME_S_1KI': 424.23949003219604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1297.7673851776126, 'W': 25.077883657212237} +[20.44, 20.56, 20.28, 20.28, 20.12, 20.0, 20.16, 20.32, 20.52, 20.6, 20.12, 20.16, 20.28, 20.44, 20.72, 20.68, 20.64, 20.72, 20.72, 20.4] +367.38 +18.369 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.423949003219604, 'TIME_S_1KI': 424.23949003219604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1297.7673851776126, 'W': 25.077883657212237, 'J_1KI': 12977.673851776126, 'W_1KI': 250.77883657212234, 'W_D': 6.708883657212237, 'J_D': 347.18122630643876, 'W_D_1KI': 67.08883657212236, 'J_D_1KI': 670.8883657212235} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.json index f2ba0ed..87a99e2 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.62427043914795, "TIME_S_1KI": 636.2427043914795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1670.8393529319762, "W": 21.725499941161722, "J_1KI": 16708.393529319765, "W_1KI": 217.2549994116172, "W_D": 6.518499941161723, "J_D": 501.3171735184193, "W_D_1KI": 65.18499941161723, "J_D_1KI": 651.8499941161723} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.51457595825195, "TIME_S_1KI": 635.1457595825195, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1889.5348065662383, "W": 24.82306850434245, "J_1KI": 18895.348065662383, "W_1KI": 248.23068504342453, "W_D": 6.464068504342453, "J_D": 492.04563202357303, "W_D_1KI": 64.64068504342453, "J_D_1KI": 646.4068504342453} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.output index 6258d91..197fdaa 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.62427043914795} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.51457595825195} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2940, 5882, ..., 29993962, - 29997042, 30000000]), - col_indices=tensor([ 8, 14, 15, ..., 9977, 9993, 9996]), - values=tensor([0.8256, 0.2654, 0.0882, ..., 0.9659, 0.5243, 0.2720]), +tensor(crow_indices=tensor([ 0, 2997, 6036, ..., 29994089, + 29997023, 30000000]), + col_indices=tensor([ 3, 4, 5, ..., 9990, 9992, 9998]), + values=tensor([0.6330, 0.2666, 0.3549, ..., 0.7937, 0.6841, 0.3201]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.4780, 0.6736, 0.3887, ..., 0.7294, 0.7408, 0.5543]) +tensor([0.0444, 0.4833, 0.9897, ..., 0.1872, 0.1999, 0.3106]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,16 +16,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 63.62427043914795 seconds +Time: 63.51457595825195 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2940, 5882, ..., 29993962, - 29997042, 30000000]), - col_indices=tensor([ 8, 14, 15, ..., 9977, 9993, 9996]), - values=tensor([0.8256, 0.2654, 0.0882, ..., 0.9659, 0.5243, 0.2720]), +tensor(crow_indices=tensor([ 0, 2997, 6036, ..., 29994089, + 29997023, 30000000]), + col_indices=tensor([ 3, 4, 5, ..., 9990, 9992, 9998]), + values=tensor([0.6330, 0.2666, 0.3549, ..., 0.7937, 0.6841, 0.3201]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.4780, 0.6736, 0.3887, ..., 0.7294, 0.7408, 0.5543]) +tensor([0.0444, 0.4833, 0.9897, ..., 0.1872, 0.1999, 0.3106]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -33,13 +33,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 63.62427043914795 seconds +Time: 63.51457595825195 seconds -[16.52, 16.44, 16.52, 16.76, 16.76, 16.68, 17.12, 16.6, 16.6, 16.64] -[16.76, 16.68, 16.6, 20.16, 21.48, 23.48, 24.68, 31.48, 34.68, 34.64, 35.08, 35.08, 34.68, 24.92, 25.84, 25.44, 24.52, 23.52, 20.92, 20.76, 20.76, 20.88, 20.6, 20.8, 20.8, 20.96, 20.88, 20.6, 20.8, 20.56, 20.44, 20.52, 20.48, 20.8, 20.56, 20.6, 20.6, 20.68, 20.72, 20.68, 20.88, 20.92, 20.8, 20.8, 20.52, 20.56, 20.68, 20.96, 20.8, 20.8, 20.92, 20.8, 20.56, 20.68, 20.8, 20.72, 20.8, 21.12, 21.24, 21.16, 21.12, 21.12, 21.04, 20.6, 20.36, 20.2, 20.4, 20.48, 20.52, 20.88, 20.64, 20.32, 20.4, 20.4, 20.4, 20.4] -76.90683102607727 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.62427043914795, 'TIME_S_1KI': 636.2427043914795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1670.8393529319762, 'W': 21.725499941161722} -[16.52, 16.44, 16.52, 16.76, 16.76, 16.68, 17.12, 16.6, 16.6, 16.64, 16.32, 16.72, 16.92, 17.2, 17.32, 17.52, 17.16, 17.24, 17.28, 17.12] -304.14 -15.206999999999999 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.62427043914795, 'TIME_S_1KI': 636.2427043914795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1670.8393529319762, 'W': 21.725499941161722, 'J_1KI': 16708.393529319765, 'W_1KI': 217.2549994116172, 'W_D': 6.518499941161723, 'J_D': 501.3171735184193, 'W_D_1KI': 65.18499941161723, 'J_D_1KI': 651.8499941161723} +[20.36, 20.68, 20.6, 20.56, 20.56, 20.4, 20.04, 20.04, 20.04, 20.04] +[20.24, 20.76, 20.96, 24.4, 26.68, 27.72, 29.08, 34.08, 34.68, 33.64, 34.52, 34.48, 34.48, 28.36, 30.48, 29.8, 28.56, 27.48, 24.2, 24.16, 24.04, 23.92, 24.12, 24.32, 24.08, 24.08, 24.2, 23.96, 23.8, 23.96, 24.0, 24.24, 24.4, 24.32, 24.32, 24.48, 24.52, 24.52, 24.56, 24.68, 24.52, 24.44, 24.32, 24.16, 24.36, 24.64, 24.56, 24.68, 24.64, 24.52, 24.52, 24.64, 24.64, 24.44, 24.44, 24.2, 24.16, 24.28, 24.36, 24.36, 24.52, 24.44, 24.32, 24.32, 24.4, 24.52, 24.56, 24.52, 24.48, 24.32, 24.24, 24.44, 24.44, 24.72, 24.6] +76.12011408805847 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.51457595825195, 'TIME_S_1KI': 635.1457595825195, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1889.5348065662383, 'W': 24.82306850434245} +[20.36, 20.68, 20.6, 20.56, 20.56, 20.4, 20.04, 20.04, 20.04, 20.04, 21.12, 20.32, 20.32, 20.08, 20.24, 20.36, 20.44, 20.68, 20.76, 20.6] +367.17999999999995 +18.358999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.51457595825195, 'TIME_S_1KI': 635.1457595825195, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1889.5348065662383, 'W': 24.82306850434245, 'J_1KI': 18895.348065662383, 'W_1KI': 248.23068504342453, 'W_D': 6.464068504342453, 'J_D': 492.04563202357303, 'W_D_1KI': 64.64068504342453, 'J_D_1KI': 646.4068504342453} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..3d915b6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 84.6539237499237, "TIME_S_1KI": 846.5392374992371, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2521.516366138459, "W": 24.86361045903338, "J_1KI": 25215.16366138459, "W_1KI": 248.63610459033382, "W_D": 6.453610459033381, "J_D": 654.4859774065027, "W_D_1KI": 64.53610459033382, "J_D_1KI": 645.3610459033382} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..47e9940 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 84.6539237499237} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3917, 7951, ..., 39991952, + 39996009, 40000000]), + col_indices=tensor([ 1, 2, 4, ..., 9995, 9996, 9999]), + values=tensor([0.0966, 0.2927, 0.1488, ..., 0.0691, 0.2938, 0.1140]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4209, 0.9243, 0.0966, ..., 0.4873, 0.3754, 0.4036]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 84.6539237499237 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3917, 7951, ..., 39991952, + 39996009, 40000000]), + col_indices=tensor([ 1, 2, 4, ..., 9995, 9996, 9999]), + values=tensor([0.0966, 0.2927, 0.1488, ..., 0.0691, 0.2938, 0.1140]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.4209, 0.9243, 0.0966, ..., 0.4873, 0.3754, 0.4036]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 84.6539237499237 seconds + +[20.0, 20.04, 20.0, 20.0, 20.28, 20.56, 20.52, 20.56, 20.84, 20.64] +[20.52, 20.6, 21.16, 24.48, 26.48, 27.76, 28.64, 27.96, 27.96, 38.92, 38.12, 38.68, 37.92, 32.92, 25.72, 25.88, 25.44, 27.52, 27.12, 26.76, 26.68, 26.68, 24.24, 24.52, 24.56, 24.84, 24.84, 24.6, 24.64, 24.4, 24.24, 24.16, 24.08, 24.08, 24.04, 24.32, 24.32, 24.36, 24.44, 24.32, 24.52, 24.56, 24.64, 24.88, 24.88, 24.68, 24.68, 24.76, 24.76, 24.48, 24.44, 24.4, 24.48, 24.52, 24.72, 24.72, 24.64, 24.52, 24.52, 24.6, 24.36, 24.48, 24.36, 24.36, 24.4, 24.24, 24.36, 24.36, 24.36, 24.36, 24.32, 24.32, 24.36, 24.64, 24.76, 24.76, 24.84, 24.64, 24.68, 24.4, 24.44, 24.32, 24.24, 24.24, 24.28, 24.2, 24.36, 24.28, 24.32, 24.44, 24.44, 24.44, 24.48, 24.52, 24.76, 24.76, 24.76, 24.8, 24.68, 24.44] +101.41392660140991 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 84.6539237499237, 'TIME_S_1KI': 846.5392374992371, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2521.516366138459, 'W': 24.86361045903338} +[20.0, 20.04, 20.0, 20.0, 20.28, 20.56, 20.52, 20.56, 20.84, 20.64, 20.52, 20.52, 20.64, 20.84, 20.72, 20.6, 20.36, 20.56, 20.4, 20.36] +368.2 +18.41 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 84.6539237499237, 'TIME_S_1KI': 846.5392374992371, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2521.516366138459, 'W': 24.86361045903338, 'J_1KI': 25215.16366138459, 'W_1KI': 248.63610459033382, 'W_D': 6.453610459033381, 'J_D': 654.4859774065027, 'W_D_1KI': 64.53610459033382, 'J_D_1KI': 645.3610459033382} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..74b3dc6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 107.38222455978394, "TIME_S_1KI": 1073.8222455978394, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3211.2984370231634, "W": 24.76028863403025, "J_1KI": 32112.984370231636, "W_1KI": 247.6028863403025, "W_D": 6.163288634030248, "J_D": 799.3509061999323, "W_D_1KI": 61.632886340302484, "J_D_1KI": 616.3288634030248} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..80ae590 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 107.38222455978394} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5036, 9995, ..., 49989852, + 49994946, 50000000]), + col_indices=tensor([ 0, 1, 5, ..., 9992, 9993, 9994]), + values=tensor([0.1426, 0.2723, 0.4807, ..., 0.6484, 0.4851, 0.2642]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1080, 0.5862, 0.7267, ..., 0.6646, 0.4667, 0.9799]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 107.38222455978394 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5036, 9995, ..., 49989852, + 49994946, 50000000]), + col_indices=tensor([ 0, 1, 5, ..., 9992, 9993, 9994]), + values=tensor([0.1426, 0.2723, 0.4807, ..., 0.6484, 0.4851, 0.2642]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.1080, 0.5862, 0.7267, ..., 0.6646, 0.4667, 0.9799]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 107.38222455978394 seconds + +[20.8, 20.8, 20.64, 20.6, 20.6, 20.68, 20.72, 20.6, 20.56, 20.44] +[20.56, 20.56, 20.56, 21.84, 22.92, 24.12, 25.4, 25.92, 27.4, 38.8, 39.56, 40.04, 40.28, 34.76, 27.88, 27.88, 28.28, 28.6, 28.96, 31.0, 29.84, 28.44, 27.6, 26.56, 24.24, 24.32, 24.36, 24.36, 24.24, 24.32, 24.44, 24.44, 24.44, 24.44, 24.32, 24.2, 24.32, 24.44, 24.36, 24.36, 24.36, 24.28, 24.16, 23.88, 23.96, 24.12, 24.12, 24.28, 24.64, 24.6, 24.64, 24.8, 24.8, 24.64, 24.4, 24.36, 24.48, 24.4, 24.28, 24.08, 23.96, 24.0, 24.16, 24.08, 24.4, 24.4, 24.32, 24.6, 24.68, 24.44, 24.44, 24.16, 24.12, 24.24, 24.48, 24.6, 24.64, 24.64, 24.48, 24.4, 24.28, 24.12, 24.2, 24.08, 23.92, 24.08, 23.96, 23.84, 24.0, 24.0, 24.0, 24.0, 24.08, 24.12, 24.2, 24.32, 24.4, 24.32, 24.36, 24.72, 24.6, 24.64, 24.64, 24.48, 24.24, 24.28, 24.24, 24.4, 24.36, 24.36, 24.24, 24.2, 24.12, 24.16, 24.2, 24.2, 24.28, 24.28, 24.28, 24.24, 24.36, 24.2, 24.36, 24.44, 24.76, 24.48, 24.44, 24.44] +129.69551706314087 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 107.38222455978394, 'TIME_S_1KI': 1073.8222455978394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3211.2984370231634, 'W': 24.76028863403025} +[20.8, 20.8, 20.64, 20.6, 20.6, 20.68, 20.72, 20.6, 20.56, 20.44, 20.92, 20.92, 20.92, 20.88, 20.76, 20.4, 20.72, 20.48, 20.44, 20.28] +371.94 +18.597 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 107.38222455978394, 'TIME_S_1KI': 1073.8222455978394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3211.2984370231634, 'W': 24.76028863403025, 'J_1KI': 32112.984370231636, 'W_1KI': 247.6028863403025, 'W_D': 6.163288634030248, 'J_D': 799.3509061999323, 'W_D_1KI': 61.632886340302484, 'J_D_1KI': 616.3288634030248} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json index 66821f5..1ef843a 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 141552, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.798607587814331, "TIME_S_1KI": 0.07628721309352274, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 269.76709407806396, "W": 18.985490730093364, "J_1KI": 1.9057808725985077, "W_1KI": 0.13412379005661076, "W_D": 4.025490730093365, "J_D": 57.198676185607916, "W_D_1KI": 0.028438246934648505, "J_D_1KI": 0.0002009031799949736} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 140907, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.631132364273071, "TIME_S_1KI": 0.07544786535994004, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 311.0322494506836, "W": 21.8403623772563, "J_1KI": 2.2073583956133027, "W_1KI": 0.15499842007321354, "W_D": 3.2933623772562974, "J_D": 46.90132383155815, "W_D_1KI": 0.023372595948081342, "J_D_1KI": 0.00016587249709440512} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output index 2de78b8..52e484b 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,266 +1,373 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015316009521484375} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015331745147705078} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([9112, 5292, 3575, 5961, 5092, 9328, 2019, 8963, 6545, - 7422, 818, 4088, 407, 2015, 64, 4328, 8365, 6206, - 9094, 5899, 6513, 5954, 4347, 8726, 6057, 3857, 2262, - 5262, 3242, 3739, 9546, 2710, 9191, 261, 5342, 5031, - 4219, 1193, 8424, 3055, 1118, 7444, 3421, 4719, 884, - 9797, 6413, 7746, 2915, 8061, 1921, 7039, 8615, 6298, - 8962, 4075, 2597, 5310, 5194, 9913, 649, 821, 1630, - 6166, 2303, 666, 4248, 9703, 9421, 933, 8076, 2414, - 2921, 3906, 2279, 4858, 3629, 8129, 746, 1260, 8400, - 3559, 6490, 9630, 9771, 2054, 6368, 4294, 59, 1873, - 8913, 6245, 1629, 8022, 7105, 7514, 7136, 705, 7667, - 598, 5268, 8736, 8255, 3780, 9168, 855, 891, 5701, - 1088, 5566, 9882, 6656, 6138, 1874, 1409, 2224, 9088, - 5917, 2460, 1920, 8927, 3689, 9209, 2413, 7383, 5435, - 1164, 4656, 1145, 9858, 1276, 1566, 4567, 6670, 8019, - 77, 4463, 9904, 1378, 6274, 6811, 7718, 6698, 6133, - 1817, 5750, 9902, 8179, 7778, 9310, 3186, 2889, 6830, - 3732, 9331, 80, 4061, 9725, 2461, 365, 5801, 7394, - 1364, 7916, 4458, 9726, 1384, 8245, 4422, 1021, 8441, - 8635, 9830, 6237, 8296, 9417, 1755, 5150, 8755, 573, - 6482, 6077, 4649, 5929, 3346, 2879, 2567, 9845, 9617, - 7984, 1423, 20, 9389, 7465, 1857, 6041, 81, 3754, - 3443, 6292, 8270, 777, 2884, 6004, 5456, 7516, 4457, - 2299, 9549, 3283, 8815, 7872, 6555, 9764, 3765, 3453, - 3743, 966, 1219, 6368, 7586, 7598, 502, 5829, 8794, - 426, 1814, 9137, 7261, 9402, 2755, 22, 4007, 5976, - 44, 8313, 1900, 8548, 3115, 7074, 1012, 8434, 6097, - 7768, 5045, 3244, 2886, 1954, 541, 5982, 5878, 5999, - 4618, 9582, 4847, 7320, 9835, 5610, 1305, 8884, 6363, - 6478, 839, 5162, 9728, 5499, 1528, 5689, 3183, 7129, - 2488, 4352, 928, 8220, 1382, 3689, 8735, 2195, 9727, - 771, 6448, 8679, 1573, 5638, 2952, 6446, 4389, 8540, - 4711, 8193, 7117, 2236, 5186, 1639, 2122, 9434, 4600, - 6263, 5259, 9546, 979, 6158, 4405, 3909, 3341, 4623, - 4859, 8302, 4199, 6104, 2336, 9661, 8228, 7721, 9724, - 9263, 6964, 9466, 2887, 6905, 9477, 5133, 4559, 533, - 2189, 788, 3859, 2643, 7749, 7409, 8072, 2895, 4935, - 3245, 8702, 7464, 5201, 7428, 20, 9781, 4720, 1259, - 9047, 5360, 2447, 5616, 727, 910, 8863, 9370, 6403, - 6786, 6714, 5315, 5973, 2498, 2170, 4132, 8638, 1133, - 5562, 8468, 1057, 3841, 5439, 5548, 364, 3661, 4774, - 5311, 8396, 2293, 3242, 6666, 9193, 5519, 7639, 9475, - 3599, 3244, 2790, 1000, 3648, 8996, 802, 8274, 5880, - 3912, 7828, 7630, 88, 1120, 4545, 662, 8080, 8593, - 2010, 2001, 5509, 1694, 1693, 9968, 188, 5356, 2456, - 5653, 1100, 9884, 8606, 5679, 1447, 3651, 3652, 4270, - 4855, 4081, 6277, 7485, 8206, 943, 9917, 8822, 8258, - 9850, 3678, 6542, 8398, 7438, 4466, 4288, 3429, 1080, - 3756, 619, 8368, 2943, 5279, 4660, 7902, 7718, 2077, - 4700, 6225, 6141, 3048, 5583, 5672, 4237, 9220, 6789, - 8111, 8353, 6234, 7938, 6008, 8305, 6276, 297, 9555, - 5707, 1837, 2743, 2829, 3042, 4370, 7388, 411, 7128, - 8906, 2135, 2863, 8798, 9039, 1464, 3844, 5839, 7296, - 2402, 5778, 6693, 1937, 753, 3696, 9554, 7527, 2292, - 1677, 6534, 2459, 4599, 1635, 4597, 5136, 8224, 1999, - 8607, 1884, 7826, 4374, 308, 5413, 6530, 4467, 4307, - 1520, 5060, 5277, 5551, 2836, 8168, 4583, 6885, 7397, - 5086, 2166, 9687, 6985, 782, 3027, 3733, 5636, 3222, - 7058, 3585, 5104, 9529, 2646, 8519, 9774, 6453, 1089, - 4686, 8416, 5947, 3116, 5175, 8420, 7199, 8094, 9363, - 5867, 977, 7349, 1138, 8895, 8226, 6801, 1193, 7834, - 270, 5919, 3616, 9787, 5305, 1621, 9604, 7543, 6013, - 1208, 3164, 2863, 3384, 9301, 2260, 8403, 6920, 6068, - 5765, 6466, 4, 435, 2987, 5160, 1159, 5240, 7665, - 7666, 3496, 5890, 4087, 2953, 5050, 4444, 9104, 2002, - 2471, 8127, 5828, 6674, 2055, 338, 861, 9115, 7320, - 5987, 2695, 7595, 1847, 5469, 5364, 131, 9179, 7641, - 950, 6916, 2403, 4215, 3306, 372, 8899, 6595, 9275, - 9627, 6167, 7426, 8582, 6983, 176, 5262, 6497, 2411, - 5764, 3749, 2701, 4911, 6555, 8022, 3195, 6448, 88, - 3416, 512, 1476, 3734, 9571, 5879, 8312, 9489, 4113, - 9717, 371, 1290, 2779, 4391, 6234, 2856, 3490, 1940, - 9905, 1525, 994, 3707, 929, 5995, 5360, 7010, 5650, - 8880, 951, 6174, 5544, 4925, 6103, 5678, 4768, 3960, - 2589, 2623, 2714, 910, 2437, 5839, 6147, 8012, 6910, - 3761, 374, 344, 2233, 8593, 1259, 791, 3808, 9483, - 3657, 9667, 1835, 262, 6735, 8158, 1317, 3345, 593, - 9900, 4517, 43, 7724, 7756, 8051, 2464, 2382, 9551, - 482, 4411, 9983, 8638, 3604, 5980, 6568, 498, 884, - 9577, 1204, 8829, 2143, 8709, 5032, 8066, 4401, 2060, - 5903, 5267, 8698, 5847, 5943, 7454, 1160, 8740, 9725, - 1839, 5588, 4536, 704, 5542, 6133, 6713, 1191, 4737, - 9399, 4710, 6386, 7107, 8125, 104, 1397, 1398, 5125, - 3131, 2162, 8266, 675, 2153, 3931, 9784, 7854, 3433, - 1129, 6921, 4688, 2676, 7539, 2908, 159, 4716, 1238, - 3207, 1609, 6019, 8773, 1910, 6472, 7137, 9086, 8335, - 696, 7521, 729, 751, 3579, 5449, 8063, 6228, 2390, - 7376, 4836, 8275, 7154, 3050, 2576, 1473, 8343, 2610, - 7357, 3457, 410, 3334, 1324, 5461, 8837, 3534, 9245, - 7741, 3589, 1706, 4804, 9563, 1793, 2579, 2082, 8159, - 5577, 6674, 1427, 6129, 9640, 4057, 5553, 3970, 7282, - 9122, 3611, 9436, 8975, 6478, 8150, 1984, 2504, 4171, - 7462, 3550, 3994, 7069, 7248, 4676, 4904, 254, 8661, - 438, 7287, 9117, 2771, 7521, 2100, 6071, 7282, 9158, - 6042, 7770, 8275, 8513, 4678, 3085, 5802, 2613, 9576, - 3338, 5128, 2956, 544, 9210, 477, 521, 1403, 2461, - 9392, 8604, 1695, 5759, 8186, 8157, 9519, 6561, 9941, - 3767, 3185, 2012, 3106, 9408, 3943, 6742, 6790, 3649, - 2308, 3780, 5471, 9861, 2028, 220, 5009, 5916, 1418, - 7499, 7134, 5634, 2982, 2159, 5007, 6379, 1644, 1585, - 655, 2063, 509, 4928, 4419, 5224, 6728, 1096, 5532, - 7183, 8187, 9649, 6985, 850, 2224, 2125, 5250, 2441, - 2746, 5010, 9796, 7532, 4370, 5895, 816, 964, 9747, - 142, 9638, 2350, 593, 1466, 2605, 5324, 3252, 9029, - 8442, 930, 4343, 9939, 6148, 3972, 3871, 2071, 3696, - 8203, 3524, 2625, 1316, 1646, 6666, 529, 6459, 989, - 5600, 8399, 1285, 8481, 7590, 29, 8642, 6338, 8582, - 762, 2654, 667, 5523, 3773, 4019, 1164, 6494, 6234, - 2145, 3360, 6578, 3672, 4448, 9661, 2981, 1353, 5538, - 5490, 6854, 5331, 4417, 8287, 9963, 1542, 9640, 5716, - 2268, 5783, 3828, 2802, 3884, 6132, 9548, 5307, 8501, - 2200]), - values=tensor([0.9419, 0.1524, 0.6218, 0.8687, 0.7562, 0.7677, 0.3979, - 0.9838, 0.6947, 0.4933, 0.1301, 0.2967, 0.8344, 0.5735, - 0.9834, 0.9958, 0.7271, 0.2308, 0.3268, 0.5366, 0.3581, - 0.1114, 0.7784, 0.4911, 0.1357, 0.4900, 0.8647, 0.2578, - 0.0862, 0.4816, 0.6605, 0.6509, 0.5868, 0.1831, 0.6063, - 0.9861, 0.3291, 0.5581, 0.6974, 0.9991, 0.9943, 0.2495, - 0.7625, 0.5401, 0.7095, 0.7088, 0.7284, 0.6546, 0.9421, - 0.9648, 0.1593, 0.7038, 0.5084, 0.3162, 0.5113, 0.2822, - 0.9359, 0.1984, 0.8329, 0.6721, 0.0232, 0.0056, 0.2146, - 0.4983, 0.4969, 0.9219, 0.1003, 0.5479, 0.3558, 0.7477, - 0.6115, 0.2532, 0.6522, 0.9992, 0.4170, 0.8240, 0.1605, - 0.8999, 0.9924, 0.9449, 0.9921, 0.5871, 0.5119, 0.1034, - 0.9235, 0.5163, 0.6439, 0.4713, 0.2523, 0.6757, 0.9199, - 0.4059, 0.3534, 0.1535, 0.6795, 0.7794, 0.2738, 0.5412, - 0.6588, 0.5045, 0.2670, 0.8080, 0.1721, 0.4191, 0.2213, - 0.1215, 0.5896, 0.0276, 0.4051, 0.0606, 0.7105, 0.3300, - 0.4106, 0.7694, 0.3908, 0.3300, 0.1863, 0.9625, 0.9792, - 0.7697, 0.4510, 0.7286, 0.3380, 0.0833, 0.2386, 0.2422, - 0.2105, 0.7153, 0.0126, 0.8818, 0.6452, 0.7286, 0.3076, - 0.4200, 0.4807, 0.0386, 0.4801, 0.6112, 0.2188, 0.3858, - 0.6821, 0.3694, 0.1392, 0.6327, 0.5410, 0.3909, 0.7069, - 0.1312, 0.6250, 0.0480, 0.8549, 0.5135, 0.3555, 0.7340, - 0.5811, 0.9088, 0.1032, 0.1489, 0.2774, 0.2296, 0.4495, - 0.1487, 0.9200, 0.5402, 0.0202, 0.1078, 0.2516, 0.0530, - 0.5956, 0.3365, 0.3965, 0.5546, 0.6031, 0.2539, 0.5550, - 0.2459, 0.4351, 0.8734, 0.9592, 0.8627, 0.9645, 0.9888, - 0.0276, 0.4813, 0.8493, 0.9115, 0.1586, 0.0765, 0.6361, - 0.9602, 0.6837, 0.3314, 0.7764, 0.3676, 0.5147, 0.2879, - 0.7930, 0.3046, 0.9136, 0.3619, 0.8794, 0.7331, 0.7243, - 0.5387, 0.8338, 0.0246, 0.0508, 0.2668, 0.8956, 0.4222, - 0.5078, 0.8587, 0.1868, 0.2753, 0.0682, 0.7738, 0.7703, - 0.8842, 0.9299, 0.7459, 0.0157, 0.4309, 0.8926, 0.2625, - 0.8518, 0.5178, 0.5814, 0.1994, 0.2824, 0.5835, 0.0235, - 0.5291, 0.9061, 0.9534, 0.0623, 0.2911, 0.7065, 0.7912, - 0.3245, 0.5227, 0.8463, 0.5932, 0.9274, 0.1290, 0.1611, - 0.8205, 0.7713, 0.8694, 0.8998, 0.4222, 0.2481, 0.8594, - 0.5966, 0.3578, 0.7561, 0.5612, 0.9178, 0.8585, 0.6036, - 0.8103, 0.3002, 0.8454, 0.2318, 0.3982, 0.7981, 0.3046, - 0.4387, 0.6304, 0.9387, 0.6685, 0.0773, 0.6743, 0.7880, - 0.2724, 0.2127, 0.2579, 0.6638, 0.3307, 0.4078, 0.4611, - 0.2895, 0.5460, 0.4074, 0.2836, 0.0658, 0.5132, 0.7184, - 0.0086, 0.2446, 0.0312, 0.3818, 0.3470, 0.9987, 0.8897, - 0.4455, 0.4200, 0.2110, 0.0170, 0.2269, 0.7015, 0.4058, - 0.9687, 0.0766, 0.1367, 0.6704, 0.0153, 0.3212, 0.0176, - 0.4644, 0.2227, 0.7908, 0.0782, 0.2919, 0.8451, 0.5976, - 0.2473, 0.4640, 0.5505, 0.5809, 0.2989, 0.1468, 0.9500, - 0.3546, 0.0940, 0.8787, 0.7992, 0.7177, 0.5259, 0.8114, - 0.0774, 0.0167, 0.2662, 0.3178, 0.3026, 0.5005, 0.5937, - 0.5755, 0.4120, 0.1624, 0.4236, 0.9572, 0.1195, 0.8325, - 0.5589, 0.0781, 0.5692, 0.2217, 0.7537, 0.5700, 0.7103, - 0.7619, 0.2773, 0.0054, 0.4076, 0.0417, 0.9429, 0.6666, - 0.3130, 0.3752, 0.5850, 0.5828, 0.3646, 0.6606, 0.8812, - 0.8699, 0.6326, 0.1294, 0.3652, 0.1063, 0.3989, 0.2296, - 0.2561, 0.2038, 0.4856, 0.2115, 0.2677, 0.5171, 0.1168, - 0.0241, 0.6735, 0.7119, 0.7784, 0.2445, 0.7568, 0.4921, - 0.7068, 0.3231, 0.3789, 0.2044, 0.4127, 0.4285, 0.4637, - 0.9276, 0.2871, 0.0196, 0.6856, 0.2086, 0.8049, 0.5030, - 0.1551, 0.8769, 0.7722, 0.6771, 0.2796, 0.5061, 0.0514, - 0.2069, 0.4519, 0.6146, 0.0401, 0.7884, 0.8928, 0.1753, - 0.7534, 0.6318, 0.5825, 0.1058, 0.3381, 0.6609, 0.8592, - 0.1686, 0.2129, 0.2167, 0.4535, 0.8999, 0.2380, 0.4550, - 0.3194, 0.8715, 0.6805, 0.2977, 0.1180, 0.4624, 0.3632, - 0.0919, 0.3659, 0.3251, 0.3164, 0.3733, 0.2507, 0.2841, - 0.6267, 0.7667, 0.1658, 0.2106, 0.0127, 0.6038, 0.9089, - 0.4336, 0.4538, 0.2798, 0.7512, 0.9252, 0.8121, 0.4723, - 0.7860, 0.4615, 0.2088, 0.9765, 0.2109, 0.9559, 0.5039, - 0.9927, 0.1420, 0.6840, 0.8661, 0.4242, 0.4038, 0.3666, - 0.7210, 0.8886, 0.9968, 0.3170, 0.9198, 0.6665, 0.7540, - 0.6145, 0.7453, 0.6836, 0.0079, 0.6941, 0.0866, 0.0031, - 0.2017, 0.0065, 0.0384, 0.9053, 0.7335, 0.9115, 0.2172, - 0.2567, 0.1205, 0.4361, 0.8961, 0.5130, 0.3240, 0.8733, - 0.7390, 0.2479, 0.0175, 0.9269, 0.1145, 0.4122, 0.4735, - 0.1742, 0.2389, 0.1684, 0.3168, 0.1924, 0.1236, 0.4187, - 0.4623, 0.9975, 0.8678, 0.2889, 0.3517, 0.7091, 0.3332, - 0.9999, 0.5389, 0.2389, 0.1080, 0.5787, 0.9800, 0.8558, - 0.0630, 0.0153, 0.1451, 0.4288, 0.5911, 0.4383, 0.7038, - 0.9016, 0.9862, 0.5629, 0.0913, 0.5216, 0.0805, 0.8630, - 0.5475, 0.9317, 0.0118, 0.3983, 0.7472, 0.6659, 0.5661, - 0.8338, 0.0422, 0.5743, 0.1600, 0.5976, 0.2809, 0.1125, - 0.4399, 0.7717, 0.4561, 0.7256, 0.1621, 0.0820, 0.8140, - 0.0876, 0.8240, 0.8347, 0.2352, 0.3461, 0.5639, 0.8348, - 0.2411, 0.2358, 0.1766, 0.8525, 0.3548, 0.9754, 0.6037, - 0.4568, 0.0645, 0.2813, 0.1774, 0.7397, 0.4609, 0.8191, - 0.3010, 0.4836, 0.9086, 0.3092, 0.8842, 0.1688, 0.5204, - 0.4125, 0.8982, 0.5637, 0.8321, 0.1566, 0.9723, 0.6668, - 0.9762, 0.3312, 0.8818, 0.1159, 0.9962, 0.9987, 0.3133, - 0.5044, 0.1359, 0.4905, 0.8045, 0.5800, 0.3357, 0.6064, - 0.3912, 0.1467, 0.8994, 0.9625, 0.4036, 0.9552, 0.3418, - 0.0859, 0.0108, 0.4910, 0.3283, 0.5233, 0.5467, 0.3156, - 0.8281, 0.7394, 0.2690, 0.9744, 0.6414, 0.6867, 0.8652, - 0.1889, 0.1663, 0.1405, 0.4948, 0.0840, 0.6008, 0.5443, - 0.4155, 0.4281, 0.2116, 0.4298, 0.8508, 0.1255, 0.4732, - 0.9480, 0.7508, 0.4759, 0.8987, 0.4490, 0.0439, 0.9165, - 0.1613, 0.1714, 0.5982, 0.2093, 0.4536, 0.6336, 0.0431, - 0.5964, 0.6823, 0.9029, 0.0699, 0.3806, 0.0497, 0.7357, - 0.1694, 0.0286, 0.6138, 0.3334, 0.6829, 0.9751, 0.2773, - 0.8377, 0.5984, 0.0468, 0.8367, 0.4483, 0.8092, 0.3294, - 0.4584, 0.5056, 0.4772, 0.6300, 0.2983, 0.0246, 0.2509, - 0.0554, 0.8305, 0.0779, 0.2888, 0.3687, 0.6734, 0.4892, - 0.6403, 0.8594, 0.3164, 0.2151, 0.6638, 0.4833, 0.2775, - 0.8670, 0.3530, 0.3362, 0.0667, 0.0039, 0.2958, 0.4611, - 0.8969, 0.4913, 0.5028, 0.2409, 0.4242, 0.4639, 0.6091, - 0.4342, 0.7204, 0.4175, 0.9777, 0.0265, 0.9729, 0.7386, - 0.0448, 0.2047, 0.9187, 0.1396, 0.6324, 0.1331, 0.4202, - 0.8775, 0.7537, 0.5040, 0.3992, 0.6867, 0.4340, 0.9415, - 0.1384, 0.3816, 0.3686, 0.8722, 0.6098, 0.9188, 0.7850, - 0.8822, 0.4949, 0.0392, 0.6331, 0.0948, 0.8931, 0.8620, - 0.1994, 0.6761, 0.0973, 0.2537, 0.4015, 0.3324, 0.4775, - 0.8446, 0.8239, 0.8515, 0.2371, 0.2909, 0.6105, 0.7363, - 0.1806, 0.2458, 0.4418, 0.9172, 0.1027, 0.3665, 0.7822, - 0.8082, 0.3933, 0.0369, 0.7168, 0.8492, 0.4962, 0.1940, - 0.6763, 0.0429, 0.4543, 0.6495, 0.8034, 0.5999, 0.2784, - 0.9127, 0.6404, 0.0464, 0.5851, 0.6435, 0.9494, 0.8839, - 0.8651, 0.1417, 0.8417, 0.9900, 0.9699, 0.7335, 0.6386, - 0.9418, 0.5679, 0.2693, 0.6588, 0.1472, 0.9922, 0.3796, - 0.8868, 0.1143, 0.8328, 0.3848, 0.1063, 0.7051, 0.4142, - 0.7126, 0.4323, 0.6031, 0.5715, 0.2782, 0.6345, 0.3381, - 0.8477, 0.7664, 0.2106, 0.5225, 0.1520, 0.3002, 0.7031, - 0.9238, 0.4058, 0.1139, 0.1480, 0.9764, 0.4270, 0.2600, - 0.9333, 0.2794, 0.6468, 0.6423, 0.1334, 0.0294, 0.5212, - 0.8696, 0.2381, 0.9706, 0.6807, 0.1174, 0.1191, 0.9581, - 0.2531, 0.5576, 0.1977, 0.3556, 0.0861, 0.6770, 0.3195, - 0.5369, 0.7386, 0.0730, 0.9206, 0.7720, 0.6605, 0.4471, - 0.0248, 0.9663, 0.1581, 0.8167, 0.5993, 0.8833, 0.0649, - 0.0611, 0.8784, 0.2448, 0.4650, 0.9544, 0.9479, 0.1518, - 0.0437, 0.9857, 0.3792, 0.0676, 0.3550, 0.6595, 0.0586, - 0.4581, 0.2105, 0.0507, 0.6273, 0.4066, 0.6590, 0.4389, - 0.7618, 0.7600, 0.5286, 0.2619, 0.2052, 0.1600, 0.6812, - 0.2262, 0.1949, 0.2984, 0.1847, 0.7597, 0.8327, 0.3651, - 0.6843, 0.6989, 0.5748, 0.8175, 0.9017, 0.5440, 0.9000, - 0.8242, 0.3536, 0.6788, 0.4515, 0.6560, 0.7385, 0.1301, - 0.6121, 0.9098, 0.6224, 0.9725, 0.4414, 0.2350, 0.6079, - 0.7587, 0.7617, 0.6482, 0.2990, 0.3517, 0.1562, 0.3369, - 0.3247, 0.5464, 0.7908, 0.7880, 0.4408, 0.8916, 0.2765, - 0.4959, 0.0729, 0.2794, 0.6614, 0.8756, 0.3920, 0.6706, - 0.6288, 0.4740, 0.9397, 0.6608, 0.2196, 0.0473, 0.5088, - 0.0610, 0.9391, 0.7491, 0.7894, 0.7759, 0.1647, 0.6719, - 0.4100, 0.2124, 0.9261, 0.1591, 0.3181, 0.7087, 0.7252, - 0.2191, 0.3099, 0.6307, 0.3026, 0.4511, 0.2841, 0.7426, - 0.5396, 0.7791, 0.6038, 0.1869, 0.7513, 0.3030, 0.5002, - 0.3280, 0.7800, 0.6094, 0.9292, 0.6542, 0.8481, 0.9804, - 0.8992, 0.6104, 0.2123, 0.5077, 0.0578, 0.7525, 0.6613, - 0.4603, 0.7024, 0.4804, 0.7634, 0.3067, 0.6368]), + col_indices=tensor([8405, 3204, 2972, 1728, 8539, 2449, 4306, 8461, 1900, + 6394, 6467, 3043, 8848, 7120, 1025, 8428, 162, 5645, + 4597, 4341, 6591, 931, 8370, 6722, 5467, 1067, 8460, + 358, 8224, 9869, 799, 1596, 7427, 7388, 1226, 228, + 3271, 3170, 8096, 5302, 5790, 6514, 8748, 5918, 9971, + 197, 1618, 1351, 7802, 8964, 8876, 631, 6847, 7452, + 8667, 4699, 8859, 7586, 3862, 7337, 1550, 2860, 2842, + 7405, 9686, 3858, 5452, 7008, 3461, 4520, 9246, 2432, + 388, 8869, 6514, 9291, 553, 7274, 7695, 1650, 4314, + 5529, 1110, 479, 1479, 6095, 480, 1763, 885, 3827, + 1596, 1833, 9980, 2202, 3640, 2433, 9809, 4417, 42, + 5001, 3874, 5683, 9524, 7821, 9322, 7548, 8980, 1058, + 1834, 2182, 3745, 7455, 5264, 3172, 6552, 2499, 5339, + 669, 7014, 8338, 9574, 8079, 7414, 497, 6413, 6175, + 1745, 1605, 5680, 2366, 3485, 7277, 7049, 5308, 4601, + 6996, 608, 7351, 433, 1200, 3985, 7040, 4499, 5722, + 2838, 3269, 7195, 1460, 3492, 4918, 6475, 2876, 5539, + 9021, 883, 4541, 550, 6907, 7264, 1959, 8534, 2352, + 3967, 8175, 2007, 3391, 161, 8993, 3179, 9912, 3504, + 1358, 9085, 8840, 7327, 2853, 9841, 2747, 8704, 4016, + 1399, 8285, 9608, 6182, 435, 8047, 2340, 7196, 8583, + 4449, 8395, 7191, 6211, 6737, 5071, 1503, 6589, 3001, + 8832, 9591, 6519, 1136, 9584, 173, 6388, 8695, 6693, + 2212, 7406, 5349, 759, 895, 7243, 4123, 5188, 8390, + 5292, 5367, 4589, 9146, 9019, 6770, 4553, 5599, 9976, + 6664, 3656, 5631, 1687, 984, 9730, 1042, 6264, 9526, + 6361, 1307, 3837, 9344, 2770, 2872, 4977, 201, 7501, + 6962, 4107, 8302, 6006, 7694, 3046, 630, 5806, 9311, + 8170, 7137, 2758, 4301, 4845, 9972, 2442, 169, 1529, + 1585, 9103, 6352, 7123, 4320, 7368, 5229, 7599, 7993, + 6455, 3807, 3719, 6097, 8999, 6930, 6317, 8987, 2624, + 3016, 1106, 1053, 2742, 4637, 2308, 9948, 2185, 1116, + 8046, 7353, 4954, 1307, 1393, 558, 333, 4918, 1648, + 9699, 529, 9667, 2372, 9402, 2559, 2928, 3450, 445, + 684, 4219, 4693, 7234, 7033, 7381, 5562, 9329, 5262, + 9841, 3327, 3081, 7086, 5427, 5554, 2209, 4920, 3586, + 1554, 5195, 6450, 5377, 5882, 7767, 8796, 9954, 9884, + 489, 1235, 5988, 7518, 1904, 2937, 1176, 2237, 8892, + 1707, 910, 6150, 6524, 4264, 2947, 7508, 5219, 6441, + 458, 6600, 7271, 938, 2553, 3230, 7362, 1236, 1004, + 8394, 7534, 9345, 534, 8107, 4951, 5678, 8737, 8813, + 8392, 4806, 248, 938, 2781, 9247, 3533, 5836, 7509, + 4314, 2210, 4055, 5120, 7958, 7227, 9610, 7677, 7603, + 6044, 2573, 7763, 8822, 1719, 8414, 2441, 7305, 1404, + 2674, 2159, 2192, 4505, 4495, 3558, 3314, 4373, 5845, + 8736, 1666, 601, 537, 4862, 8467, 9724, 3911, 180, + 8536, 4293, 347, 9155, 8769, 2154, 1519, 1547, 1597, + 4410, 3665, 2136, 6882, 229, 3733, 6221, 6643, 6773, + 9907, 5437, 8634, 1927, 7389, 6061, 9305, 3836, 4165, + 9342, 3182, 2163, 1583, 4012, 3842, 6660, 7019, 4730, + 406, 9927, 337, 9643, 1818, 5513, 6747, 2384, 7976, + 2462, 2918, 305, 5957, 4678, 5407, 5643, 9444, 5111, + 5676, 8986, 7669, 6427, 6280, 8427, 7314, 1189, 2454, + 1095, 6982, 6134, 4607, 6235, 6593, 4655, 2588, 7493, + 2103, 9079, 9839, 8621, 4926, 5268, 9380, 3396, 3865, + 8970, 8710, 5322, 2726, 8902, 8485, 8198, 2214, 7513, + 9890, 1175, 3933, 425, 1493, 1239, 8052, 4759, 4260, + 1277, 6252, 8290, 8383, 4260, 1699, 3566, 8316, 3964, + 2549, 1378, 1827, 6076, 1532, 6690, 1969, 8556, 8482, + 6938, 5258, 9178, 3497, 7282, 238, 4318, 2879, 5378, + 5412, 9996, 1146, 3590, 7729, 4623, 5410, 962, 895, + 5703, 2681, 8847, 596, 8814, 5480, 5812, 748, 5769, + 305, 818, 6680, 2895, 4560, 8145, 3513, 6385, 175, + 6917, 9786, 3962, 774, 853, 7670, 4741, 5405, 4246, + 3893, 7991, 1474, 3551, 6159, 6090, 4927, 3224, 1640, + 3114, 8015, 2173, 255, 1267, 9667, 1717, 3042, 4507, + 4918, 9570, 8005, 6338, 2115, 6181, 3834, 6104, 4530, + 1148, 5569, 4429, 6800, 7543, 387, 3312, 699, 866, + 2506, 506, 1904, 8789, 6893, 256, 1464, 5551, 3588, + 8688, 2373, 7227, 8928, 4647, 8092, 4687, 72, 7357, + 1578, 4474, 5418, 9053, 1108, 5794, 6803, 3821, 511, + 300, 3108, 8844, 6337, 7387, 4030, 579, 4009, 6440, + 3869, 3064, 993, 7419, 245, 2473, 8269, 8264, 1085, + 4156, 5827, 9876, 5799, 2174, 4305, 259, 9456, 4955, + 4208, 5083, 43, 4387, 9814, 9360, 386, 1752, 4785, + 3446, 5062, 9752, 2311, 8955, 622, 6776, 8574, 9302, + 4790, 655, 4831, 9840, 7127, 9105, 1431, 5726, 3388, + 3657, 4878, 2354, 3578, 2499, 3866, 8509, 1881, 7335, + 6601, 3341, 6254, 2273, 7740, 2637, 1115, 975, 3329, + 1775, 1559, 5718, 7519, 5942, 322, 8045, 1340, 4767, + 5833, 7282, 5554, 2887, 9749, 9558, 8359, 6855, 9584, + 4086, 4753, 5191, 753, 5406, 9244, 9654, 8447, 9082, + 6001, 194, 9993, 4566, 3951, 129, 1731, 4478, 191, + 6836, 5128, 382, 9834, 8875, 9533, 6557, 5498, 2227, + 9403, 5263, 2307, 884, 3491, 7247, 1837, 2469, 267, + 6322, 6447, 6929, 4924, 5796, 9524, 2255, 8155, 2950, + 4640, 7562, 6543, 2933, 8264, 4913, 8011, 9225, 3968, + 1410, 2024, 1530, 4793, 5507, 8763, 3239, 7895, 8148, + 6827, 8356, 1854, 843, 6096, 2804, 1909, 9785, 6387, + 7673, 6705, 8343, 3849, 3174, 5910, 4704, 7394, 474, + 3321, 3717, 2224, 2666, 6865, 6293, 16, 7369, 4709, + 4173, 8279, 9907, 2855, 3867, 9278, 5385, 1832, 8290, + 2955, 7203, 4127, 4752, 1629, 9848, 3826, 5429, 5958, + 68, 4199, 2086, 7910, 7713, 6999, 3504, 5910, 9514, + 7344, 2242, 8520, 7674, 3276, 5682, 3393, 293, 9302, + 6335, 6738, 7598, 2303, 1242, 1489, 1391, 1348, 8026, + 1632, 4225, 9575, 6443, 6778, 7864, 3100, 7895, 7267, + 278, 1598, 6920, 2130, 6708, 2222, 7860, 9683, 5596, + 5433, 5369, 4302, 7757, 5029, 9720, 6442, 3922, 4883, + 8184, 3102, 241, 2060, 2051, 5854, 6466, 1560, 8294, + 8353, 9744, 8274, 50, 4886, 8756, 6979, 3849, 8781, + 3412, 12, 8029, 6788, 2744, 6946, 3497, 5974, 7756, + 1348, 4878, 526, 5253, 256, 9339, 8706, 4347, 7913, + 4622, 5510, 9337, 6024, 8292, 5570, 4984, 2851, 9389, + 3654, 9014, 6218, 1407, 4058, 6675, 7956, 9094, 2413, + 7488, 6792, 9677, 4020, 3105, 1693, 6213, 4371, 6287, + 30, 4725, 6267, 3081, 8936, 9823, 9570, 4733, 6783, + 273, 958, 6582, 615, 4897, 8951, 4570, 1341, 3248, + 1420, 502, 3369, 8369, 6671, 9681, 3054, 3003, 6451, + 9716, 1681, 7101, 39, 5966, 8111, 8216, 6635, 8101, + 2136, 9235, 3576, 8397, 3583, 4224, 9031, 9390, 8092, + 3880]), + values=tensor([6.7284e-01, 2.3900e-01, 6.5201e-01, 9.5280e-01, + 4.5114e-01, 2.9471e-01, 4.5734e-01, 6.8893e-01, + 5.2672e-01, 3.3320e-01, 6.4197e-02, 6.4944e-01, + 2.9724e-01, 7.6196e-01, 1.6004e-01, 2.3942e-01, + 6.5243e-01, 2.4275e-01, 8.8345e-01, 6.6883e-01, + 2.9906e-01, 5.0195e-01, 6.5408e-01, 6.7996e-01, + 5.6406e-01, 7.9788e-01, 5.4481e-01, 4.3555e-01, + 8.6063e-01, 9.1411e-02, 6.0713e-01, 5.7997e-01, + 6.0640e-02, 4.9382e-01, 4.6011e-01, 5.4951e-01, + 4.7349e-02, 2.3234e-02, 1.4015e-01, 2.8620e-01, + 2.1442e-01, 8.9791e-02, 4.5344e-01, 4.3583e-01, + 6.1989e-01, 1.5062e-01, 7.2646e-01, 5.3365e-01, + 8.6184e-01, 2.0673e-01, 7.6212e-01, 5.8155e-01, + 4.5740e-01, 2.8329e-01, 4.9478e-01, 9.3009e-01, + 1.8286e-01, 5.6947e-02, 6.0118e-01, 4.3733e-01, + 5.5588e-01, 4.2838e-01, 5.6459e-01, 8.4035e-01, + 8.7621e-02, 1.9246e-02, 9.0031e-01, 1.7228e-01, + 5.8043e-02, 8.6147e-01, 5.3967e-01, 1.0401e-01, + 6.4297e-01, 2.3820e-01, 5.0347e-01, 7.6964e-02, + 8.7759e-01, 2.4147e-01, 8.7855e-01, 4.1691e-01, + 6.3752e-01, 7.0382e-01, 9.9326e-01, 4.1063e-01, + 4.4622e-01, 2.4364e-01, 4.0963e-01, 3.1693e-02, + 8.6700e-01, 4.1666e-01, 9.4448e-01, 8.2675e-01, + 4.7751e-01, 9.2695e-01, 6.0314e-01, 9.2138e-01, + 1.4625e-01, 2.5206e-01, 9.0453e-01, 2.4382e-01, + 1.5897e-01, 2.4499e-01, 3.4191e-01, 6.7168e-01, + 4.1488e-02, 9.7071e-01, 5.5967e-02, 2.4740e-01, + 7.2956e-03, 2.7270e-01, 7.5182e-01, 2.0828e-01, + 1.0051e-01, 2.5177e-01, 6.2524e-01, 9.7516e-01, + 8.6156e-01, 5.2042e-01, 8.3215e-01, 8.7360e-01, + 1.5606e-01, 3.2183e-01, 4.2139e-01, 2.0422e-01, + 3.9586e-01, 8.0727e-01, 1.0302e-01, 1.1382e-01, + 5.1004e-01, 3.2795e-01, 8.2137e-01, 3.2261e-01, + 7.0038e-02, 1.7923e-01, 9.5610e-01, 4.6698e-01, + 9.8470e-01, 4.5587e-01, 8.0670e-01, 4.6605e-01, + 1.8780e-01, 6.4039e-01, 9.3114e-01, 3.0272e-01, + 9.8498e-01, 2.2248e-01, 1.9831e-01, 7.5753e-01, + 8.2018e-01, 6.7423e-01, 6.8698e-01, 9.0892e-02, + 2.7236e-01, 5.4472e-01, 3.4483e-02, 6.8456e-02, + 1.1555e-01, 8.6634e-03, 4.1766e-01, 3.1723e-01, + 1.2976e-01, 3.8397e-03, 2.7749e-01, 9.9115e-01, + 7.7752e-01, 5.0544e-01, 2.6415e-01, 6.3775e-01, + 4.7887e-01, 3.7576e-01, 7.9856e-01, 9.4711e-01, + 6.0286e-01, 6.8308e-01, 9.6113e-01, 4.9255e-01, + 3.6775e-01, 5.2263e-01, 6.2766e-01, 7.3977e-01, + 9.4424e-01, 9.8500e-01, 3.9607e-01, 1.6933e-01, + 8.8363e-01, 1.0458e-01, 2.9988e-01, 1.3942e-02, + 4.8825e-01, 1.5640e-01, 5.2429e-01, 3.9715e-01, + 7.5114e-01, 1.6761e-01, 3.3442e-04, 3.6434e-01, + 3.0004e-01, 1.1102e-01, 8.9603e-02, 6.7442e-02, + 2.2614e-01, 7.4541e-03, 4.0538e-01, 8.9735e-01, + 5.3553e-01, 1.6095e-01, 1.0884e-01, 8.0001e-01, + 8.3703e-01, 8.8172e-01, 8.4030e-01, 5.3336e-01, + 7.8335e-01, 8.2011e-01, 5.7604e-01, 7.5220e-01, + 6.6944e-01, 5.5728e-01, 1.0526e-01, 4.8634e-01, + 4.7077e-01, 8.4887e-03, 9.7680e-01, 4.9928e-01, + 7.9986e-01, 8.0485e-01, 2.2445e-01, 2.1596e-01, + 3.3115e-01, 7.4942e-01, 2.2026e-01, 3.5363e-01, + 5.2340e-02, 3.3892e-01, 1.4778e-01, 8.8866e-01, + 3.3457e-01, 5.6666e-01, 4.7224e-02, 5.6940e-01, + 9.2598e-01, 6.5532e-01, 2.1104e-01, 7.0581e-01, + 7.1450e-01, 9.9594e-01, 5.0111e-01, 9.0606e-01, + 7.1402e-01, 1.0611e-01, 1.9124e-01, 4.4886e-01, + 4.1790e-01, 6.3145e-01, 1.3915e-01, 6.5452e-02, + 2.2336e-01, 4.4936e-01, 1.8306e-01, 9.5797e-01, + 7.2508e-01, 7.4360e-01, 2.9959e-01, 3.6232e-01, + 2.6235e-01, 7.3433e-01, 3.7883e-01, 3.1894e-01, + 2.2008e-01, 3.6052e-01, 1.1438e-01, 3.3992e-01, + 7.6666e-01, 4.5329e-01, 6.2006e-01, 1.4753e-01, + 3.5206e-01, 7.6845e-01, 1.5007e-01, 9.4915e-01, + 4.6136e-01, 3.0307e-01, 8.1058e-01, 2.3801e-01, + 5.6631e-01, 1.3771e-01, 3.6326e-01, 9.9517e-01, + 1.9698e-01, 8.0361e-01, 5.5305e-01, 4.6679e-01, + 6.3218e-01, 3.5439e-01, 8.6998e-01, 9.5586e-01, + 2.7042e-02, 5.3393e-01, 3.9862e-02, 5.7918e-01, + 3.3490e-01, 5.5427e-01, 3.3072e-01, 5.8553e-01, + 2.9713e-01, 6.6912e-02, 8.2524e-01, 5.4715e-01, + 8.8384e-01, 5.1390e-01, 1.1852e-01, 1.4378e-01, + 6.4392e-01, 8.2779e-01, 7.3467e-01, 5.8504e-01, + 7.4839e-01, 6.7309e-01, 7.0892e-01, 9.0289e-01, + 7.0032e-01, 1.1911e-01, 4.2782e-01, 6.6641e-01, + 1.4603e-01, 8.8407e-01, 9.8017e-01, 2.0924e-01, + 8.9195e-01, 1.7585e-01, 3.8335e-01, 6.2602e-01, + 6.7197e-01, 7.2299e-01, 3.1231e-01, 5.4222e-01, + 9.4541e-01, 1.8304e-01, 6.5121e-01, 8.6184e-02, + 7.7775e-01, 8.7839e-01, 2.3229e-01, 5.9250e-01, + 8.6499e-01, 2.1107e-01, 9.0432e-02, 2.7555e-01, + 4.8389e-01, 8.4567e-01, 4.5383e-01, 9.4194e-01, + 5.4357e-01, 1.3152e-01, 4.3346e-01, 7.8606e-01, + 4.2547e-01, 3.4762e-01, 4.7979e-01, 7.8569e-01, + 9.6889e-01, 6.9702e-01, 4.9604e-01, 8.3090e-01, + 4.1678e-01, 5.9352e-01, 3.8124e-01, 4.6377e-01, + 1.0166e-01, 9.1961e-01, 1.5522e-01, 5.5934e-01, + 3.6562e-01, 8.3577e-01, 2.0207e-01, 4.2819e-01, + 1.2792e-01, 7.5772e-01, 8.6249e-01, 9.4417e-01, + 3.9444e-01, 4.0838e-01, 4.9816e-01, 4.3029e-01, + 4.5358e-01, 5.8395e-01, 4.4894e-01, 7.7585e-01, + 9.2699e-01, 8.9653e-02, 9.7928e-01, 8.9817e-03, + 1.8708e-01, 6.8393e-01, 7.2167e-01, 4.3107e-01, + 6.7943e-01, 5.5751e-01, 9.3518e-01, 5.2987e-01, + 6.3457e-01, 3.6471e-01, 4.9452e-01, 7.4895e-01, + 6.8679e-01, 5.5312e-01, 5.2419e-01, 8.5460e-01, + 7.9391e-01, 1.7385e-01, 9.5587e-01, 9.9109e-02, + 7.3728e-02, 2.4872e-01, 9.1148e-01, 5.7418e-01, + 1.7148e-01, 1.8477e-01, 9.7908e-01, 8.8093e-01, + 4.0588e-01, 4.2187e-01, 3.0672e-01, 7.2382e-01, + 9.3326e-01, 2.6402e-01, 9.9244e-01, 5.2118e-01, + 6.1823e-01, 6.8477e-02, 5.8955e-01, 1.6019e-01, + 9.3195e-01, 5.2941e-01, 1.0774e-02, 6.1364e-01, + 7.6914e-01, 1.3894e-01, 1.7362e-01, 6.9496e-01, + 3.5245e-01, 1.1475e-02, 3.3801e-01, 4.4516e-01, + 9.5616e-01, 7.4985e-01, 8.5083e-01, 3.6594e-01, + 1.0235e-01, 7.0746e-01, 9.6979e-01, 1.3543e-02, + 2.8796e-01, 6.1949e-02, 2.6992e-02, 9.1273e-01, + 8.6556e-01, 3.0780e-01, 6.4665e-01, 1.8146e-01, + 7.5969e-01, 1.3735e-01, 8.1153e-01, 6.3759e-01, + 7.3130e-01, 1.5084e-01, 6.0445e-01, 5.2790e-02, + 3.1030e-01, 2.7397e-01, 3.7647e-01, 6.6786e-01, + 2.8898e-01, 5.3802e-01, 2.0706e-01, 9.8232e-01, + 4.0072e-01, 6.5602e-01, 2.5676e-01, 7.2448e-01, + 4.9669e-01, 1.5761e-01, 8.7466e-01, 5.5684e-01, + 8.7040e-02, 7.5995e-01, 1.1106e-01, 2.9085e-01, + 9.8035e-01, 6.7685e-01, 9.4294e-01, 8.9409e-01, + 2.6306e-01, 9.0530e-01, 9.5236e-01, 2.1010e-01, + 6.4626e-02, 7.5613e-01, 7.6627e-01, 8.9898e-01, + 5.6274e-01, 8.1495e-01, 1.5004e-01, 1.8230e-01, + 9.5290e-01, 6.5195e-01, 6.5254e-01, 8.8844e-01, + 1.8875e-01, 2.9033e-01, 6.9334e-01, 6.0301e-01, + 6.6077e-01, 4.4510e-01, 6.6792e-01, 7.1726e-01, + 9.1370e-02, 2.5121e-02, 7.5508e-01, 1.5975e-01, + 3.0899e-01, 9.5319e-01, 7.7881e-01, 7.2161e-01, + 6.4345e-01, 2.9532e-01, 6.8682e-01, 9.7396e-01, + 7.9147e-01, 2.1109e-01, 3.7081e-01, 3.6608e-01, + 4.9962e-01, 7.7670e-01, 1.4282e-01, 1.7623e-01, + 5.5143e-02, 1.5004e-01, 9.1042e-01, 7.6209e-01, + 9.2178e-01, 5.2719e-01, 6.1102e-01, 3.1794e-01, + 3.0555e-01, 6.3926e-01, 4.4607e-01, 8.3964e-01, + 4.8160e-02, 3.3323e-02, 5.0311e-01, 9.8431e-02, + 4.5379e-01, 2.3119e-01, 8.3049e-01, 2.5630e-01, + 6.0934e-02, 7.5036e-01, 4.7201e-01, 4.5936e-01, + 1.9975e-01, 4.9073e-01, 5.3683e-01, 9.7363e-01, + 4.2561e-01, 9.1606e-01, 3.5696e-02, 8.2224e-01, + 3.8126e-01, 1.0790e-01, 2.8611e-01, 2.2295e-02, + 5.3869e-01, 9.0168e-01, 8.7743e-01, 8.1037e-02, + 4.1938e-01, 2.5051e-01, 5.7514e-02, 6.0531e-02, + 7.7808e-01, 7.9379e-01, 9.1350e-01, 3.9917e-01, + 5.4794e-02, 8.1496e-01, 6.6088e-01, 5.8484e-01, + 4.2305e-01, 4.3370e-01, 7.3297e-02, 5.5913e-02, + 8.4045e-01, 7.6105e-01, 8.7528e-01, 4.3202e-01, + 8.9172e-01, 8.6879e-01, 4.1518e-01, 1.7527e-01, + 9.4501e-01, 8.0570e-01, 1.0758e-01, 3.9354e-01, + 3.1546e-01, 6.2608e-01, 7.2196e-01, 1.5551e-01, + 9.9974e-01, 3.9102e-01, 1.6842e-02, 1.9328e-02, + 5.4845e-01, 2.7665e-01, 2.8637e-01, 4.9182e-01, + 6.7504e-01, 7.2587e-01, 8.8467e-01, 1.0607e-01, + 7.7312e-01, 4.8611e-01, 2.5318e-01, 3.3891e-01, + 7.7335e-01, 6.1049e-01, 6.6088e-01, 8.6657e-01, + 2.0273e-01, 7.8941e-01, 8.9409e-01, 5.7757e-01, + 9.6393e-01, 7.4716e-01, 5.9547e-02, 1.8865e-01, + 6.3501e-01, 8.7404e-01, 4.4095e-01, 3.6506e-01, + 7.2481e-01, 8.9206e-01, 8.4170e-02, 9.4447e-01, + 6.9089e-01, 3.9458e-01, 8.6112e-01, 7.5526e-01, + 9.9067e-01, 2.6171e-01, 8.0898e-01, 5.9769e-01, + 2.7331e-01, 3.2705e-01, 3.9083e-01, 9.6094e-01, + 9.2580e-01, 6.7605e-02, 3.1709e-01, 6.8023e-01, + 4.8974e-01, 7.5589e-02, 9.2221e-01, 7.5469e-01, + 8.8183e-01, 1.6377e-01, 9.6375e-03, 3.3793e-01, + 2.3492e-01, 3.4106e-01, 5.8479e-01, 5.9416e-01, + 6.7823e-01, 8.4072e-02, 9.1279e-01, 3.4327e-01, + 8.9723e-01, 2.0385e-01, 8.4832e-01, 2.4326e-01, + 2.9757e-01, 1.5411e-01, 4.1549e-01, 9.9754e-01, + 8.2215e-02, 1.1129e-01, 3.2030e-01, 7.3338e-01, + 4.3526e-01, 5.5518e-01, 5.3489e-01, 2.7224e-01, + 6.3943e-01, 9.6967e-01, 2.1444e-01, 5.0571e-01, + 1.1652e-01, 5.6648e-01, 5.9982e-01, 2.0260e-01, + 9.3364e-02, 4.0195e-01, 5.0089e-01, 5.9360e-01, + 8.4155e-01, 9.4135e-01, 4.3227e-01, 3.0775e-01, + 2.9982e-01, 5.3979e-01, 7.2040e-01, 3.2556e-01, + 6.4445e-01, 7.0885e-01, 1.5769e-01, 9.7550e-01, + 6.2021e-01, 2.2997e-01, 6.1931e-02, 8.9500e-01, + 3.0950e-02, 3.1290e-01, 1.1484e-01, 8.2680e-01, + 4.0950e-01, 8.7252e-01, 4.5743e-01, 4.9794e-01, + 1.8726e-01, 6.5377e-01, 1.4108e-01, 8.0440e-01, + 6.4258e-01, 5.2127e-01, 7.2231e-01, 1.8995e-01, + 5.9371e-01, 4.1550e-01, 6.8761e-01, 5.0838e-01, + 7.7081e-01, 1.0324e-01, 7.1560e-01, 8.5084e-01, + 6.3916e-01, 3.8309e-01, 9.0713e-01, 9.3477e-01, + 2.3406e-01, 5.1308e-01, 1.9473e-01, 5.5402e-01, + 5.1634e-01, 8.8753e-01, 1.4013e-01, 1.9051e-01, + 2.2020e-01, 8.1057e-01, 7.7292e-01, 6.7562e-01, + 8.2572e-01, 8.8934e-01, 4.1165e-01, 1.4867e-01, + 6.1045e-01, 6.9009e-01, 1.3321e-01, 7.2085e-01, + 9.1150e-01, 5.1073e-01, 2.7697e-01, 8.0230e-01, + 6.6664e-01, 6.0306e-01, 4.0895e-01, 4.3363e-01, + 7.6798e-01, 2.2873e-01, 8.6343e-01, 5.4334e-01, + 1.4972e-01, 7.2367e-01, 6.8744e-01, 4.9620e-01, + 2.0397e-01, 5.8321e-01, 2.6687e-01, 1.4576e-01, + 4.7357e-02, 8.5024e-01, 6.9078e-01, 9.6730e-01, + 7.6547e-02, 6.7814e-02, 5.9450e-01, 6.7249e-01, + 2.7619e-01, 3.7984e-02, 2.5022e-01, 4.9006e-01, + 3.5482e-01, 5.7049e-01, 9.3265e-01, 8.1548e-01, + 9.8389e-01, 1.6947e-01, 3.5708e-01, 7.1355e-01, + 3.4123e-01, 9.8439e-01, 3.9709e-01, 4.5721e-01, + 9.0098e-01, 7.7299e-01, 1.9546e-01, 2.6807e-01, + 6.7456e-01, 9.2008e-02, 1.2200e-01, 7.2725e-01, + 2.9009e-01, 8.8888e-01, 7.9591e-01, 6.2024e-01, + 2.6276e-01, 4.5005e-01, 2.9941e-02, 7.1456e-01, + 1.6561e-01, 6.7509e-01, 6.8661e-01, 3.8384e-03, + 3.4462e-01, 9.7537e-01, 2.7105e-01, 4.9538e-02, + 9.4146e-03, 5.8914e-02, 7.7462e-01, 6.9680e-01, + 5.8943e-01, 5.2246e-02, 5.5718e-01, 1.0008e-01, + 3.8825e-01, 6.2144e-01, 1.3224e-01, 3.6086e-01, + 9.3186e-02, 6.3189e-02, 1.2043e-01, 9.3079e-01, + 2.0862e-01, 8.0978e-01, 3.2035e-01, 4.1287e-01, + 7.7569e-01, 2.7014e-01, 2.7541e-01, 7.7254e-02, + 9.0426e-01, 9.0111e-01, 9.9649e-01, 2.8990e-01, + 9.9242e-01, 8.2893e-01, 7.4374e-01, 1.5144e-02, + 3.9035e-01, 2.3784e-01, 8.0811e-01, 6.5321e-01, + 2.1964e-01, 2.1249e-01, 7.5127e-01, 9.0744e-01, + 1.9959e-01, 5.4802e-01, 8.8538e-01, 9.1395e-01, + 5.3063e-01, 1.2008e-01, 1.7048e-01, 9.1207e-01, + 2.6278e-01, 9.2645e-01, 8.2747e-01, 9.6857e-01, + 9.8322e-02, 4.0264e-01, 6.5218e-01, 2.0678e-01, + 6.2241e-01, 7.8897e-01, 9.6576e-01, 7.5764e-01, + 9.4029e-01, 4.6050e-01, 8.1197e-01, 4.1446e-01, + 7.9137e-01, 6.5767e-01, 4.6881e-01, 1.6327e-01, + 2.5526e-01, 3.8289e-01, 5.6239e-01, 1.0614e-01, + 1.8257e-01, 5.1021e-01, 2.2861e-03, 4.4588e-01, + 7.0344e-01, 3.1334e-02, 1.9093e-01, 2.4944e-01, + 1.4806e-01, 2.4972e-02, 2.6549e-01, 9.5238e-01, + 9.6324e-01, 6.6426e-01, 1.7107e-02, 7.4529e-02, + 9.3508e-01, 3.8114e-01, 6.4942e-01, 5.7663e-01, + 1.6566e-01, 5.8717e-02, 1.7405e-01, 6.9250e-01, + 6.8469e-01, 6.0282e-01, 8.9204e-01, 9.0987e-01, + 6.5370e-01, 1.3911e-01, 3.8425e-01, 4.3091e-01, + 2.2932e-01, 6.4166e-01, 2.4029e-01, 2.3272e-01, + 3.7573e-02, 5.0533e-01, 3.8266e-01, 3.7061e-01, + 2.7898e-01, 6.0683e-01, 8.0638e-01, 6.6009e-01, + 9.6427e-01, 1.8692e-01, 6.5648e-01, 2.9602e-01, + 8.7292e-01, 6.8054e-01, 4.3611e-01, 9.8152e-01, + 7.6621e-01, 4.0709e-02, 7.4696e-01, 1.7859e-01, + 6.9355e-03, 5.9218e-01, 3.9863e-01, 7.0453e-01, + 6.1117e-01, 6.5703e-01, 3.7845e-01, 1.5070e-01, + 8.6886e-01, 3.6431e-01, 3.6489e-01, 4.1253e-01, + 8.4863e-01, 3.4015e-01, 6.1953e-02, 6.5606e-01, + 5.4749e-01, 4.9369e-01, 9.7731e-01, 7.7142e-01, + 8.5965e-01, 8.2801e-01, 6.5078e-01, 8.7432e-01, + 9.0166e-01, 2.3195e-01, 6.4731e-01, 5.0317e-01, + 6.6529e-01, 6.9366e-01, 6.1387e-01, 9.8014e-01, + 3.6308e-01, 2.2972e-01, 5.2855e-02, 8.9525e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8762, 0.1751, 0.9607, ..., 0.4579, 0.7370, 0.8447]) +tensor([0.2994, 0.2473, 0.7750, ..., 0.9795, 0.5606, 0.1160]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -268,378 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.015316009521484375 seconds +Time: 0.015331745147705078 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68555 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.085230827331543} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68485 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.103281259536743} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5549, 4311, 3241, 5542, 7620, 1988, 9532, 4491, 7500, - 4898, 3159, 1258, 6868, 335, 9155, 7115, 9938, 3068, - 4095, 7787, 2876, 5241, 3578, 3454, 7989, 7487, 5119, - 9460, 589, 1959, 8998, 1257, 4995, 3427, 1603, 8939, - 5925, 8459, 1032, 7868, 2079, 7633, 5392, 1940, 403, - 1541, 5211, 2249, 6169, 1536, 600, 3741, 1855, 2078, - 9467, 4376, 9886, 140, 7626, 2658, 4941, 2404, 1127, - 9326, 3998, 7203, 3548, 3845, 7541, 6009, 5880, 6410, - 8529, 8533, 3424, 4363, 2126, 3596, 3335, 3666, 9687, - 545, 5692, 9731, 4506, 9205, 961, 2972, 1083, 2410, - 9819, 5938, 1114, 738, 303, 4876, 608, 9920, 1043, - 2303, 3667, 1648, 2528, 6777, 9350, 5247, 2380, 2637, - 2051, 2983, 6090, 8443, 1485, 8407, 3852, 1200, 3501, - 2069, 8613, 311, 7821, 1110, 1230, 771, 2825, 2694, - 2831, 4517, 1613, 165, 4836, 9488, 5628, 1028, 8908, - 4232, 8787, 7402, 313, 3216, 3478, 8545, 1053, 6740, - 4075, 1942, 9361, 3819, 870, 2654, 7876, 1915, 6408, - 4819, 6068, 6165, 3715, 4592, 7086, 5068, 2369, 932, - 4895, 6064, 8025, 9284, 5199, 6337, 8148, 1045, 5028, - 501, 6243, 2043, 2137, 8791, 7547, 7501, 4276, 8489, - 8133, 1778, 2390, 2799, 8745, 7560, 5221, 552, 9809, - 1029, 9837, 3177, 7973, 1442, 4595, 6945, 371, 2047, - 2944, 2906, 6498, 4469, 5406, 5052, 7367, 933, 4447, - 2055, 2312, 5395, 3952, 712, 7636, 4991, 7542, 5071, - 4454, 4646, 9187, 2398, 7029, 242, 1303, 2080, 397, - 3296, 3588, 6026, 6025, 7104, 6897, 6113, 7634, 6734, - 4805, 8677, 4803, 2213, 4955, 4012, 3203, 2270, 8248, - 7769, 3668, 3425, 2655, 6439, 8554, 9308, 0, 7354, - 7253, 2174, 4104, 4444, 2290, 9428, 6233, 1866, 7587, - 3521, 719, 6920, 4896, 9532, 6571, 2230, 8694, 7233, - 1233, 636, 3293, 9047, 165, 7097, 9322, 6128, 7810, - 2149, 2341, 6666, 5136, 9684, 2136, 4547, 1610, 9629, - 4739, 7954, 7857, 5262, 7124, 5253, 140, 5788, 9593, - 808, 975, 2558, 2499, 1436, 2400, 9017, 178, 43, - 8816, 5056, 9697, 5824, 9857, 5803, 7232, 1952, 6200, - 4884, 4917, 8256, 7548, 7004, 880, 6839, 5048, 1085, - 6600, 2059, 1674, 219, 2299, 6249, 353, 8171, 7355, - 8683, 7342, 8967, 524, 9396, 4980, 9008, 5424, 845, - 8258, 3895, 3766, 7658, 6607, 7066, 4890, 6141, 7787, - 112, 7248, 7565, 9127, 1557, 5497, 8834, 1027, 911, - 5589, 8373, 3154, 5794, 7004, 4726, 2964, 5607, 7111, - 2769, 9679, 355, 4618, 1775, 9626, 5328, 1984, 2250, - 2090, 8585, 4747, 7137, 4998, 6039, 3795, 740, 1205, - 1619, 4997, 8185, 8214, 4583, 4093, 9636, 7799, 1874, - 7419, 1200, 8029, 7740, 3308, 5274, 3619, 2357, 899, - 1736, 4072, 415, 4070, 2248, 4814, 2946, 8794, 9445, - 6337, 9432, 4000, 7916, 9175, 9859, 7482, 7810, 6689, - 2981, 9118, 2494, 5004, 7307, 6762, 3643, 6961, 6980, - 2197, 3909, 3079, 438, 6429, 795, 73, 1026, 979, - 8384, 3755, 7922, 411, 4115, 3082, 528, 3768, 7563, - 6437, 4847, 8646, 6464, 6868, 4170, 7703, 1806, 6256, - 5263, 3894, 4106, 4889, 7034, 8952, 1641, 6916, 6302, - 5046, 1726, 68, 4468, 19, 2245, 4083, 3913, 7807, - 2460, 7156, 5834, 7509, 1278, 2636, 9991, 6660, 356, - 7389, 3994, 2360, 5328, 3156, 3549, 1186, 4409, 3178, - 6057, 4081, 3933, 3196, 3467, 6356, 957, 7072, 6056, - 1517, 9932, 8236, 3059, 3518, 7725, 5374, 253, 6103, - 9053, 5451, 3843, 8745, 3424, 2609, 5589, 2776, 2372, - 7125, 6217, 61, 5880, 7087, 9463, 3614, 9996, 51, - 4653, 539, 4104, 147, 7735, 4532, 9631, 6016, 4264, - 7189, 2124, 5360, 5338, 3217, 2096, 2341, 5867, 8239, - 2211, 6275, 6944, 5486, 3694, 8641, 2576, 6016, 3141, - 1, 2692, 9626, 549, 6556, 2800, 9006, 8047, 8733, - 8001, 8352, 8195, 6963, 4005, 7067, 3512, 4209, 9954, - 4445, 3646, 8905, 4713, 3526, 9003, 7451, 3437, 219, - 1503, 8839, 7882, 345, 6970, 4236, 2208, 5512, 8927, - 9951, 3787, 1901, 5577, 1088, 9910, 3298, 9382, 5591, - 9483, 7472, 9189, 60, 7374, 1511, 1324, 9001, 4608, - 5517, 5004, 5129, 7089, 1404, 7860, 699, 3528, 2226, - 1012, 1401, 8016, 4790, 2726, 6318, 8383, 1848, 6756, - 4556, 3436, 1808, 8982, 5, 3514, 278, 3993, 4715, - 213, 7894, 2785, 3705, 6527, 5420, 6147, 9936, 913, - 5219, 3050, 7066, 9881, 9308, 7445, 5575, 7043, 4840, - 1951, 3401, 1568, 3685, 4270, 197, 386, 2602, 7271, - 2689, 4939, 7521, 9720, 7213, 4350, 8459, 8461, 1245, - 7252, 516, 6211, 441, 2127, 7899, 3164, 1347, 8168, - 1675, 6484, 2975, 6772, 4995, 6275, 7968, 3036, 4636, - 8825, 8994, 4389, 3513, 2409, 7963, 8754, 1654, 9887, - 8968, 1585, 6186, 7899, 7431, 811, 7807, 9296, 9932, - 5090, 5043, 7917, 3680, 8993, 116, 1344, 2602, 6020, - 4409, 3996, 63, 8531, 2822, 1480, 6085, 4912, 9387, - 6576, 1534, 4978, 9762, 1093, 8697, 8777, 5374, 5581, - 7441, 5246, 5968, 1885, 7897, 2975, 4762, 3125, 2488, - 4768, 9657, 6999, 6361, 7556, 908, 266, 4115, 2029, - 6944, 9578, 5384, 6233, 3049, 7347, 2202, 2797, 6742, - 7052, 1750, 5660, 4425, 8800, 5195, 122, 475, 666, - 2091, 3067, 1595, 3773, 1980, 7656, 6000, 4626, 3490, - 8811, 2139, 7508, 1457, 7313, 1673, 2130, 5629, 3544, - 2197, 3987, 2008, 3661, 1996, 5074, 9955, 6834, 9013, - 2610, 61, 6725, 217, 1994, 3874, 7317, 2288, 7046, - 2138, 5148, 3006, 483, 4607, 9598, 1030, 6362, 5693, - 2535, 138, 933, 5602, 5934, 6478, 1869, 48, 1045, - 9153, 7656, 3285, 6621, 9725, 8888, 3411, 4535, 631, - 3055, 1164, 1333, 7342, 831, 3411, 7166, 1880, 6529, - 2636, 9466, 1237, 4517, 8045, 7254, 7616, 5855, 9583, - 196, 4085, 2628, 4971, 2455, 2757, 942, 9895, 7185, - 5273, 4956, 6187, 2588, 7001, 6614, 8140, 5545, 5915, - 3587, 7135, 877, 154, 7304, 9965, 8199, 8045, 2731, - 8307, 7114, 1128, 5242, 3191, 3118, 320, 7211, 7380, - 157, 3611, 5108, 657, 6019, 7596, 6799, 3299, 1717, - 9758, 7744, 1030, 3550, 7635, 8504, 4578, 9913, 3711, - 5056, 5815, 3389, 1558, 3687, 6256, 8891, 9548, 1985, - 544, 1687, 4379, 3053, 2465, 2164, 3167, 3551, 215, - 8811, 373, 6584, 1223, 6182, 3783, 6774, 4105, 4491, - 1691, 2126, 6272, 8878, 5848, 4533, 6683, 2810, 9188, - 1364, 9342, 4748, 6153, 8042, 3464, 8775, 4628, 1962, - 8094, 9114, 1543, 1701, 94, 2356, 9228, 5976, 4299, - 8430, 2075, 4774, 4949, 6412, 5320, 4378, 5355, 8428, - 7672, 9514, 6847, 4732, 9295, 1375, 9467, 4035, 4088, - 8389, 2352, 882, 7081, 3147, 6746, 9986, 2742, 7502, - 309, 8743, 4501, 8545, 4706, 1495, 254, 3125, 2344, - 6817]), - values=tensor([2.8840e-01, 4.4183e-01, 1.3721e-01, 8.1096e-01, - 4.1966e-01, 2.5994e-01, 3.7630e-01, 9.8367e-01, - 4.1568e-01, 2.1859e-01, 3.0153e-01, 3.2703e-01, - 7.8175e-01, 6.1920e-01, 9.9573e-01, 4.3919e-01, - 3.2517e-01, 5.0981e-01, 3.5351e-03, 9.0989e-01, - 6.7808e-01, 3.7911e-02, 4.0401e-01, 9.3286e-01, - 9.7730e-01, 9.8442e-01, 7.7077e-01, 7.3182e-01, - 4.8449e-01, 5.2800e-01, 8.8383e-02, 3.0085e-01, - 4.8370e-01, 1.7831e-01, 1.6859e-01, 4.7312e-01, - 1.7065e-01, 3.5337e-01, 9.7315e-01, 2.8214e-02, - 8.7500e-01, 7.8286e-01, 1.8664e-01, 2.4130e-01, - 1.7065e-01, 8.6065e-01, 6.3363e-01, 9.7779e-01, - 8.0725e-01, 9.9678e-01, 9.1147e-02, 1.8360e-01, - 4.8954e-01, 8.8423e-01, 3.4003e-01, 9.0388e-01, - 4.6861e-01, 1.7894e-01, 1.1389e-01, 4.6669e-02, - 7.7478e-01, 5.4584e-01, 8.6114e-01, 7.9349e-01, - 1.6974e-01, 4.3423e-01, 5.6248e-02, 4.5439e-01, - 5.0219e-01, 9.2351e-01, 2.6826e-01, 3.8333e-01, - 9.0307e-01, 7.2118e-01, 7.7457e-01, 1.6562e-01, - 5.4879e-01, 3.3296e-01, 1.4218e-01, 4.3012e-01, - 7.4206e-01, 6.6912e-01, 1.5511e-03, 8.2296e-01, - 8.0557e-01, 1.0555e-01, 2.0850e-01, 4.0289e-01, - 2.4296e-02, 1.4722e-01, 5.2017e-01, 9.0282e-01, - 8.1596e-01, 4.9496e-01, 8.8061e-01, 5.2639e-01, - 7.3326e-01, 5.2719e-01, 2.9275e-02, 2.2815e-01, - 8.3470e-01, 9.7604e-01, 3.8310e-01, 8.6305e-01, - 8.2162e-01, 3.7334e-01, 1.6936e-01, 9.0413e-01, - 2.3517e-01, 8.0772e-01, 6.7720e-01, 7.1841e-01, - 9.7242e-01, 5.7008e-01, 5.2057e-01, 3.3845e-01, - 2.9399e-01, 1.0539e-01, 7.1592e-01, 6.1387e-01, - 8.5398e-01, 9.5324e-01, 1.1651e-01, 2.6991e-01, - 3.6423e-01, 4.1949e-01, 7.9844e-01, 6.9822e-01, - 4.6081e-01, 2.8289e-01, 9.0768e-01, 6.5808e-01, - 9.1438e-01, 7.0070e-01, 4.9658e-01, 5.4039e-01, - 5.5886e-01, 9.2690e-01, 3.3048e-02, 5.1747e-01, - 2.0464e-01, 5.6253e-01, 2.9711e-01, 1.8511e-01, - 8.2642e-01, 8.8128e-01, 4.4636e-01, 4.8270e-01, - 9.8079e-01, 2.7729e-01, 6.7355e-01, 2.7171e-01, - 8.3580e-01, 6.7101e-02, 7.8897e-01, 1.8105e-01, - 8.3328e-01, 9.0933e-01, 2.5938e-02, 8.9679e-01, - 6.6176e-01, 8.6587e-01, 3.0818e-01, 5.4161e-01, - 3.3177e-01, 8.1727e-01, 7.3641e-01, 4.8683e-01, - 6.6534e-01, 2.4498e-01, 1.0098e-01, 5.1743e-01, - 6.1353e-01, 5.1134e-02, 1.3179e-01, 2.8674e-01, - 5.6531e-02, 9.1340e-01, 9.4780e-02, 7.7755e-01, - 9.7366e-01, 9.4401e-01, 9.1712e-01, 7.3659e-02, - 4.5041e-01, 6.0334e-01, 1.7813e-01, 5.8013e-01, - 4.4415e-01, 9.2524e-02, 5.5202e-01, 2.1924e-01, - 8.9768e-02, 8.7639e-01, 3.4106e-01, 8.9521e-01, - 4.3736e-01, 9.6161e-01, 3.0577e-01, 7.2673e-01, - 5.6922e-01, 9.1136e-01, 1.6725e-01, 4.6346e-01, - 5.0976e-01, 5.1232e-01, 7.7783e-01, 9.9038e-01, - 6.3570e-02, 7.5502e-01, 4.3562e-01, 1.6386e-01, - 4.4897e-01, 5.7603e-01, 9.5300e-01, 8.4625e-01, - 7.8549e-01, 7.0669e-01, 8.8842e-01, 4.6161e-01, - 1.0886e-01, 9.9797e-01, 7.7704e-01, 4.4745e-01, - 5.5348e-01, 7.9816e-01, 7.5833e-01, 9.0172e-01, - 6.5109e-01, 3.3934e-01, 9.6855e-01, 3.9876e-01, - 2.7865e-01, 2.3499e-01, 8.3360e-01, 1.2160e-01, - 4.8235e-01, 5.5299e-01, 3.3846e-01, 2.5896e-01, - 1.1429e-01, 5.2068e-01, 4.0576e-01, 1.0800e-01, - 1.1673e-01, 1.9148e-01, 5.3974e-01, 6.1500e-02, - 7.3862e-01, 4.0671e-02, 1.4820e-01, 3.2336e-01, - 5.6085e-01, 2.9156e-01, 2.7142e-01, 5.1415e-01, - 7.3523e-01, 9.0248e-01, 7.5671e-01, 4.9535e-01, - 7.9571e-01, 7.4643e-01, 7.8957e-01, 4.1524e-01, - 4.5184e-01, 6.5274e-02, 3.1191e-01, 7.9157e-01, - 2.7545e-02, 9.2301e-01, 9.9453e-01, 2.4729e-01, - 4.2412e-01, 3.6611e-01, 9.3792e-01, 9.9284e-01, - 5.1807e-01, 7.8474e-01, 1.1153e-01, 2.7679e-01, - 5.1358e-01, 4.8805e-02, 8.5891e-01, 2.0175e-01, - 1.4589e-01, 9.3637e-02, 5.3722e-01, 7.6807e-01, - 6.8017e-01, 5.2222e-01, 5.5103e-01, 3.4505e-01, - 9.7557e-01, 9.3842e-02, 6.2360e-01, 7.1001e-01, - 5.3117e-01, 1.7962e-01, 3.6912e-01, 6.7579e-01, - 1.0135e-01, 6.7780e-01, 1.8499e-01, 9.2320e-01, - 2.0157e-01, 8.5156e-01, 6.2098e-01, 8.7820e-01, - 5.7171e-01, 5.8453e-01, 9.9270e-01, 2.8080e-01, - 1.8687e-02, 8.7414e-01, 1.7689e-01, 2.9132e-01, - 1.9790e-01, 3.0467e-01, 3.5501e-01, 5.7991e-01, - 5.4925e-01, 7.5190e-01, 8.8675e-01, 1.4479e-01, - 8.1647e-01, 8.5290e-01, 9.0125e-01, 1.6470e-01, - 9.9914e-01, 9.4545e-01, 1.9357e-02, 1.8278e-01, - 5.5496e-02, 8.5773e-01, 6.8462e-01, 2.8210e-01, - 9.6506e-01, 7.4561e-01, 9.4014e-01, 6.6528e-02, - 3.4719e-01, 5.7601e-02, 4.6877e-01, 8.0231e-01, - 8.7718e-01, 7.6647e-01, 5.4947e-01, 6.9274e-02, - 2.6517e-01, 7.9298e-01, 7.4926e-01, 3.8426e-01, - 8.0665e-01, 8.1102e-01, 3.9734e-02, 4.8270e-01, - 2.7288e-01, 8.5640e-01, 9.0925e-01, 9.8226e-01, - 9.3178e-01, 1.3909e-01, 4.3400e-01, 3.4806e-01, - 5.9163e-02, 4.1550e-01, 5.7548e-01, 9.3041e-01, - 5.5564e-01, 8.0414e-01, 4.8781e-01, 8.2555e-01, - 9.5966e-01, 1.1761e-01, 5.4737e-01, 4.5517e-01, - 2.3339e-01, 3.3553e-01, 5.0046e-01, 9.1915e-01, - 2.3385e-01, 8.0373e-01, 8.4412e-01, 5.9031e-01, - 3.0581e-02, 3.1007e-01, 7.9126e-01, 8.8832e-01, - 1.8686e-01, 6.4693e-01, 7.3473e-01, 3.9851e-01, - 2.5276e-01, 7.5542e-01, 5.4021e-01, 7.7912e-01, - 8.0820e-01, 8.3534e-01, 3.3797e-01, 3.1584e-01, - 3.1064e-01, 4.4400e-01, 3.2848e-01, 7.7776e-01, - 8.5737e-01, 3.0283e-01, 5.6505e-01, 1.0186e-01, - 5.9803e-02, 5.5969e-01, 3.5781e-01, 6.9147e-01, - 8.0119e-01, 9.8082e-01, 8.5171e-01, 7.8022e-01, - 3.9640e-01, 9.0353e-01, 9.0919e-01, 9.3838e-01, - 9.7094e-01, 4.5839e-01, 4.4371e-01, 7.0686e-01, - 2.4518e-01, 1.3004e-01, 6.9358e-01, 4.2710e-01, - 6.7932e-01, 4.6308e-01, 8.1491e-01, 1.7000e-01, - 3.0062e-01, 7.7898e-01, 6.1484e-01, 4.0302e-01, - 9.5065e-01, 8.1030e-01, 9.3582e-03, 2.2491e-01, - 6.8609e-03, 1.2118e-01, 2.9171e-03, 9.3268e-01, - 7.7786e-01, 7.5091e-01, 9.5908e-01, 6.1182e-03, - 1.1122e-01, 2.2413e-01, 2.2139e-01, 7.5067e-01, - 9.7482e-01, 6.9779e-01, 3.9209e-02, 4.0097e-01, - 4.2371e-01, 6.4973e-01, 9.5894e-01, 8.5062e-02, - 5.8204e-01, 6.8671e-01, 5.1221e-01, 4.0473e-01, - 7.2026e-01, 1.2949e-01, 2.9672e-01, 9.2554e-01, - 2.0556e-01, 9.6803e-01, 3.5591e-01, 9.3032e-01, - 5.6400e-01, 1.5262e-01, 4.7225e-01, 9.9073e-01, - 8.9405e-01, 9.6195e-01, 9.9314e-01, 2.3625e-01, - 5.1743e-01, 1.1974e-01, 9.0363e-01, 8.6600e-01, - 8.0337e-01, 9.6559e-01, 2.6426e-01, 3.4869e-01, - 2.5160e-01, 9.6810e-02, 9.6466e-02, 4.3477e-01, - 5.1657e-01, 2.9493e-04, 6.7096e-01, 6.4018e-01, - 3.3778e-01, 9.4351e-02, 1.1026e-01, 5.3561e-01, - 3.6227e-01, 3.7598e-01, 8.5462e-01, 5.9498e-02, - 9.2387e-01, 7.9899e-02, 3.4728e-01, 1.0305e-01, - 4.6347e-01, 2.2397e-01, 1.3323e-01, 5.8151e-01, - 7.8188e-01, 7.5203e-01, 2.3923e-01, 8.7475e-01, - 3.0265e-01, 5.6830e-01, 1.2483e-02, 8.4118e-01, - 3.6248e-01, 7.1321e-02, 3.8929e-01, 7.3302e-01, - 2.9683e-02, 3.3447e-04, 3.4344e-01, 7.5402e-01, - 3.3043e-01, 3.5817e-01, 1.0547e-01, 5.2731e-01, - 3.5698e-01, 5.2197e-01, 2.1774e-02, 8.4981e-01, - 3.1434e-01, 4.3985e-01, 3.3751e-01, 8.2447e-01, - 9.0353e-01, 3.5193e-01, 9.9614e-01, 5.6655e-01, - 4.4749e-01, 1.7903e-02, 5.5607e-01, 5.5539e-01, - 4.6695e-01, 1.7420e-01, 6.8828e-01, 5.0139e-01, - 2.8580e-02, 2.6173e-01, 9.4751e-01, 1.9893e-01, - 8.5899e-01, 5.1191e-01, 8.7673e-01, 9.1152e-01, - 2.6874e-01, 4.6410e-01, 8.2734e-01, 6.1037e-01, - 1.6229e-01, 2.8812e-01, 8.8686e-01, 7.5397e-01, - 5.7413e-01, 7.9610e-01, 2.7033e-02, 9.3147e-01, - 1.6480e-01, 8.9252e-01, 6.1944e-01, 2.5837e-01, - 6.7630e-01, 4.4219e-02, 7.2525e-01, 7.9502e-01, - 4.1810e-01, 2.8494e-01, 5.8932e-02, 5.1582e-01, - 1.8311e-01, 4.1472e-01, 4.7158e-02, 4.5326e-01, - 3.3751e-01, 9.6958e-01, 8.9434e-01, 1.4553e-01, - 3.6062e-01, 5.6479e-01, 3.4382e-01, 5.0365e-01, - 6.0594e-01, 6.7806e-02, 1.6118e-01, 8.6570e-01, - 4.0710e-01, 2.4099e-01, 6.6341e-01, 7.4617e-01, - 6.2105e-01, 9.9820e-01, 2.9075e-01, 7.0006e-01, - 2.9853e-01, 2.3004e-02, 1.5685e-01, 3.4050e-01, - 2.7630e-01, 7.4879e-01, 3.9249e-01, 8.5814e-01, - 2.9174e-01, 7.5799e-01, 7.4573e-01, 1.4019e-01, - 6.8477e-01, 3.3049e-01, 5.9636e-01, 3.2202e-01, - 1.4347e-02, 5.9175e-01, 1.7777e-01, 8.4410e-01, - 7.7933e-01, 4.2032e-01, 5.5168e-01, 9.9130e-03, - 7.4954e-02, 1.4053e-01, 6.4507e-01, 8.5374e-01, - 1.0934e-01, 8.0720e-01, 6.0686e-01, 5.7376e-01, - 4.4972e-01, 7.2877e-01, 8.2845e-01, 5.5441e-01, - 1.8664e-03, 6.9860e-01, 2.3945e-01, 4.9831e-01, - 8.3538e-01, 2.4266e-01, 8.0127e-02, 3.2858e-01, - 7.2302e-02, 3.0432e-01, 3.2243e-01, 2.3603e-02, - 1.9193e-01, 7.7673e-01, 5.5788e-01, 6.9296e-01, - 1.6739e-01, 6.4205e-01, 4.9813e-01, 5.3317e-01, - 4.5772e-01, 1.0799e-01, 8.3504e-01, 7.2689e-01, - 6.4582e-01, 3.5702e-01, 2.6752e-01, 5.0306e-01, - 2.5481e-01, 4.2631e-01, 7.2228e-01, 6.4601e-01, - 4.7649e-01, 7.0767e-02, 5.7134e-01, 9.5651e-01, - 4.8798e-01, 3.5081e-01, 8.1035e-01, 2.2577e-01, - 8.9774e-01, 9.8753e-01, 3.2962e-01, 5.9843e-01, - 4.4286e-01, 8.9385e-01, 8.4978e-01, 9.8700e-01, - 6.9001e-01, 1.0624e-01, 5.2676e-01, 2.1199e-01, - 7.7498e-01, 9.6944e-01, 8.7014e-01, 2.3069e-01, - 4.9641e-01, 3.0380e-01, 2.2540e-01, 6.8408e-01, - 1.6711e-01, 8.1494e-01, 5.3474e-01, 4.7828e-01, - 1.1990e-01, 8.3419e-01, 5.3938e-01, 9.8791e-01, - 6.0981e-01, 2.8847e-01, 2.3305e-01, 2.4698e-01, - 4.4778e-01, 2.1873e-02, 1.3042e-03, 3.7369e-01, - 9.7526e-01, 1.0262e-01, 7.6608e-01, 9.2036e-01, - 8.3530e-01, 6.3795e-01, 4.3448e-01, 2.4097e-01, - 3.5225e-01, 9.3811e-02, 5.2934e-01, 4.5612e-01, - 7.4354e-01, 8.7226e-01, 8.6587e-01, 8.9845e-01, - 3.8513e-01, 3.8296e-02, 9.1128e-01, 8.1981e-01, - 1.4811e-01, 9.0155e-01, 6.4841e-01, 4.4068e-01, - 9.8688e-01, 9.9709e-01, 4.2270e-01, 6.9585e-01, - 2.7717e-01, 5.9451e-02, 9.5034e-01, 6.1396e-01, - 5.8699e-01, 5.6065e-01, 9.7675e-01, 7.6908e-01, - 4.3459e-01, 2.6185e-01, 3.3918e-01, 8.8475e-01, - 9.9469e-01, 5.1719e-01, 1.2536e-02, 7.9953e-01, - 5.8694e-02, 6.8508e-02, 1.3971e-02, 9.1346e-01, - 5.7906e-01, 8.7957e-01, 8.7184e-01, 7.9957e-01, - 6.6740e-01, 8.8910e-01, 2.3765e-01, 1.5346e-01, - 8.2845e-01, 6.1739e-02, 5.9085e-01, 9.9421e-01, - 1.1093e-01, 8.7460e-01, 7.9463e-01, 3.9043e-01, - 4.2107e-01, 9.3567e-01, 5.7116e-01, 9.5562e-01, - 1.4046e-01, 9.5840e-01, 7.2657e-01, 8.9020e-01, - 3.1909e-02, 6.2548e-01, 7.6863e-01, 2.7453e-01, - 5.2984e-01, 2.9711e-01, 4.6701e-01, 5.1813e-02, - 3.6967e-01, 6.3356e-01, 6.8065e-01, 7.5516e-01, - 4.5039e-01, 3.1351e-01, 6.9119e-01, 6.1214e-01, - 5.8392e-01, 8.1954e-02, 5.3556e-02, 6.7371e-01, - 4.6413e-01, 7.7358e-01, 3.2483e-01, 3.2597e-01, - 6.2412e-01, 9.0713e-02, 1.2413e-01, 5.7210e-01, - 8.7935e-01, 2.8617e-01, 3.7725e-01, 2.9843e-01, - 9.4261e-01, 3.6302e-01, 4.7908e-02, 6.4665e-01, - 9.3962e-01, 8.7430e-02, 9.9035e-01, 8.1030e-01, - 9.0691e-02, 7.7773e-01, 5.4666e-02, 4.8836e-01, - 5.3026e-01, 5.7899e-01, 8.0673e-01, 2.0044e-01, - 1.9934e-01, 2.8598e-01, 7.4524e-01, 1.2927e-01, - 9.2808e-01, 9.2045e-01, 3.8985e-01, 5.5247e-01, - 5.7464e-01, 5.7580e-02, 1.4284e-01, 4.7990e-02, - 4.3020e-01, 8.2560e-01, 8.1836e-01, 5.9019e-01, - 8.9344e-01, 8.7895e-01, 3.3983e-01, 5.4129e-01, - 9.0138e-01, 6.8974e-01, 8.1632e-01, 3.1266e-01, - 9.1417e-01, 6.4220e-01, 9.6898e-01, 4.5624e-01, - 7.5150e-01, 8.2712e-01, 3.1443e-01, 3.8511e-01, - 3.0069e-01, 5.4418e-01, 7.7085e-01, 7.1004e-01, - 9.0850e-01, 2.8569e-01, 2.8229e-01, 8.2281e-01, - 3.9838e-01, 5.0278e-01, 8.3763e-02, 9.3758e-01, - 9.9311e-01, 1.9914e-01, 5.0422e-01, 1.3989e-01, - 6.7582e-01, 9.3789e-01, 8.2716e-03, 8.7877e-01, - 4.9822e-01, 2.0486e-01, 2.8594e-01, 6.3313e-01, - 3.0356e-01, 4.3013e-01, 6.1970e-01, 3.6020e-01, - 9.6547e-01, 6.1613e-01, 6.4540e-01, 7.4650e-01, - 6.1805e-01, 8.3533e-01, 8.5825e-01, 1.7147e-02, - 1.2759e-01, 3.6667e-01, 3.3350e-01, 6.2218e-01, - 6.5278e-01, 2.5488e-01, 6.1015e-01, 9.2496e-01, - 8.2075e-02, 6.5632e-01, 7.0234e-01, 7.5650e-01, - 2.0221e-01, 1.0267e-01, 1.1805e-03, 2.4251e-01, - 9.2195e-01, 7.5362e-01, 9.2722e-01, 8.3398e-01, - 2.6393e-01, 4.1074e-01, 5.8470e-01, 4.7939e-01, - 9.9882e-01, 1.5441e-01, 2.9312e-01, 6.3439e-01, - 5.9746e-01, 4.5249e-01, 7.6343e-01, 6.6838e-01, - 2.6124e-01, 6.3346e-01, 8.9367e-01, 5.0087e-01, - 7.9238e-01, 3.7238e-01, 3.5710e-01, 3.5431e-01, - 4.2045e-01, 2.7507e-01, 6.1473e-01, 9.4034e-01, - 4.8498e-01, 5.8045e-01, 7.1439e-01, 2.6115e-01, - 8.6137e-01, 8.0278e-01, 3.6885e-01, 6.1547e-01, - 3.4485e-01, 8.5126e-01, 9.5304e-01, 2.8787e-01, - 4.7971e-01, 1.8011e-01, 9.1167e-01, 4.1315e-01, - 8.2359e-02, 1.3913e-01, 4.7291e-01, 9.7543e-01, - 4.5087e-01, 4.6225e-01, 8.0787e-01, 7.3710e-02, - 2.2523e-01, 2.4144e-01, 2.3303e-01, 3.6756e-01, - 6.4454e-01, 6.0330e-01, 3.3924e-01, 8.9650e-01, - 3.5891e-01, 7.6267e-02, 8.8689e-01, 1.9321e-01, - 9.8928e-01, 1.6353e-01, 9.4096e-01, 1.6797e-01, - 5.8028e-01, 9.4390e-01, 1.7118e-01, 7.7669e-01, - 1.5969e-01, 1.8190e-01, 7.3046e-01, 7.7660e-01]), + col_indices=tensor([2600, 5231, 4903, 9112, 3021, 8353, 9178, 130, 8574, + 8568, 4172, 3677, 5291, 6332, 7185, 8743, 1050, 4714, + 7487, 3002, 9575, 1127, 8611, 3697, 7547, 4848, 4597, + 8478, 6650, 7918, 2672, 5498, 5868, 1402, 8454, 6503, + 7999, 381, 8700, 6506, 9003, 5098, 1533, 3939, 8694, + 1558, 7980, 7289, 6478, 6969, 5028, 9191, 704, 8233, + 85, 5074, 7407, 9648, 2755, 6041, 9100, 1783, 4364, + 6231, 2515, 2191, 7161, 9117, 2910, 6339, 5040, 701, + 1022, 2764, 3915, 4823, 8854, 4649, 5196, 262, 2509, + 9622, 2752, 9967, 8566, 1481, 3124, 7804, 258, 5053, + 5265, 7868, 1631, 3382, 9274, 80, 952, 2549, 8459, + 3606, 5729, 8370, 6346, 7066, 7655, 8449, 3667, 8314, + 962, 8143, 8454, 3605, 3074, 5478, 3353, 441, 7705, + 7022, 8726, 4961, 4216, 63, 2015, 8506, 3554, 6644, + 5284, 9189, 1076, 3135, 2648, 2852, 4653, 1510, 1251, + 769, 7240, 307, 4328, 4693, 2496, 5907, 7159, 4528, + 2637, 1963, 5300, 1946, 9977, 7226, 8871, 2393, 3185, + 2055, 8996, 4763, 6461, 8679, 551, 1875, 5398, 8796, + 9397, 2010, 3884, 8730, 3738, 1437, 6634, 9859, 5475, + 2314, 3141, 1816, 5466, 8478, 2859, 1302, 6065, 6945, + 6428, 8977, 7430, 806, 5144, 1225, 3950, 1795, 7546, + 9642, 318, 1038, 5295, 3577, 442, 3864, 1076, 3784, + 6040, 7328, 8239, 7926, 8640, 8085, 948, 6343, 381, + 3160, 9460, 2616, 54, 294, 5971, 8748, 6799, 1862, + 805, 6529, 7385, 9996, 8118, 6272, 6767, 7418, 5501, + 7609, 3317, 9334, 8438, 5999, 6373, 4416, 6333, 9698, + 8308, 6027, 4883, 1440, 5439, 1790, 5242, 9132, 3263, + 6271, 8036, 4063, 2967, 6105, 490, 7979, 5860, 7267, + 9364, 1480, 7430, 7086, 9569, 6626, 8327, 3260, 5777, + 4675, 6953, 4646, 1545, 1382, 2646, 4470, 7121, 2939, + 4042, 2216, 3407, 5735, 1494, 2450, 3286, 3517, 2367, + 6450, 2059, 7415, 5762, 4007, 8636, 7393, 9375, 2187, + 2822, 18, 7251, 3435, 1150, 798, 8265, 5306, 278, + 3854, 4468, 9742, 2547, 252, 3510, 9968, 6242, 1564, + 2291, 1367, 4931, 6969, 645, 3856, 3142, 6094, 1663, + 1142, 8256, 2356, 2211, 5672, 2154, 410, 2525, 4023, + 8630, 3832, 5973, 5725, 1247, 5639, 730, 3630, 230, + 5717, 6063, 4879, 2227, 4466, 6629, 6951, 5863, 3365, + 9946, 7502, 8193, 2933, 4330, 982, 5209, 5098, 5451, + 3069, 5297, 6244, 7409, 6057, 225, 8229, 3118, 5943, + 8606, 3015, 5985, 6655, 768, 3896, 9498, 7530, 8714, + 8694, 4039, 4341, 8137, 3320, 1958, 6206, 8607, 5474, + 9977, 5768, 8688, 455, 4607, 8183, 3139, 3382, 5994, + 9106, 170, 4651, 7203, 8800, 5892, 6565, 711, 2073, + 2768, 7125, 780, 2767, 5423, 4502, 5306, 3607, 6804, + 7976, 804, 3573, 7010, 3928, 3285, 7510, 4481, 250, + 887, 5966, 6750, 3813, 7639, 4571, 1937, 8346, 2102, + 5164, 871, 4218, 1699, 1955, 1859, 8782, 4105, 1147, + 6027, 8549, 5013, 1855, 434, 6804, 7217, 2108, 4741, + 8764, 5639, 9213, 2332, 2204, 567, 361, 9186, 8226, + 4590, 8585, 6151, 1853, 9272, 931, 6723, 7769, 397, + 4017, 9475, 947, 5641, 5272, 9775, 5694, 8649, 7773, + 6704, 6739, 8898, 2839, 7941, 3069, 7043, 4156, 1657, + 5111, 492, 2671, 7947, 5235, 1303, 7210, 5113, 7326, + 5901, 3113, 8474, 5313, 4231, 7306, 7073, 8081, 6155, + 7321, 5958, 4320, 5990, 5980, 6648, 7740, 4016, 9308, + 261, 7587, 4523, 4203, 8744, 1113, 2608, 8358, 2573, + 1531, 7921, 7402, 5653, 7816, 4442, 9625, 1624, 8635, + 3519, 2146, 1219, 5046, 4542, 5759, 9376, 4906, 7085, + 2579, 3158, 6273, 6533, 4081, 4252, 7408, 2324, 2931, + 9186, 8260, 2535, 6928, 6379, 1912, 301, 3325, 708, + 1550, 8745, 4513, 295, 2086, 2883, 5973, 9631, 4793, + 9683, 464, 2498, 7085, 2762, 1565, 2782, 7837, 1616, + 4292, 7324, 3947, 220, 7680, 9430, 7901, 1431, 7300, + 6480, 6056, 337, 9029, 7438, 9968, 1524, 753, 444, + 834, 3240, 5502, 2804, 9961, 1771, 3584, 3086, 207, + 6223, 1563, 5473, 2493, 1805, 6550, 8260, 9959, 7819, + 603, 6758, 1800, 7889, 7475, 3701, 4129, 6909, 4425, + 663, 4845, 2823, 9943, 2367, 6986, 241, 3488, 6028, + 6218, 8353, 48, 7604, 2, 4515, 6091, 1857, 42, + 3017, 5072, 9767, 8838, 875, 1395, 8454, 9127, 1947, + 4114, 2786, 865, 7323, 868, 1430, 108, 914, 900, + 8180, 3798, 774, 7769, 1853, 726, 1381, 265, 1001, + 2509, 8791, 7132, 8838, 9046, 2149, 6061, 905, 9792, + 8383, 1381, 9849, 3163, 7013, 6121, 5930, 8998, 9729, + 2447, 7059, 6540, 3520, 9789, 3794, 5216, 4933, 477, + 6174, 3700, 1864, 2905, 1787, 9892, 4190, 4270, 3402, + 9252, 4167, 6968, 9057, 2777, 8840, 1397, 485, 8194, + 6635, 8509, 2222, 855, 7621, 1716, 5644, 1854, 835, + 1940, 3433, 128, 9592, 7480, 4192, 1665, 3634, 9242, + 6625, 4718, 719, 3784, 9986, 6055, 5985, 213, 5542, + 635, 766, 4151, 5756, 9328, 4479, 8567, 3511, 9781, + 3298, 4963, 8024, 1938, 1046, 2323, 4039, 1321, 7947, + 1649, 2868, 8260, 488, 7570, 213, 6996, 6425, 826, + 1478, 314, 2986, 6475, 4150, 460, 6068, 3508, 7024, + 1426, 9136, 5910, 7813, 6000, 6024, 7524, 1401, 5040, + 4922, 8395, 5715, 9896, 8867, 9091, 4804, 2077, 7512, + 7101, 9479, 3257, 973, 2824, 2885, 8225, 3072, 380, + 6176, 8409, 3488, 6191, 9734, 2809, 9741, 6990, 3892, + 945, 7435, 2152, 8740, 5952, 6695, 7940, 1671, 3020, + 15, 5964, 3508, 1151, 9252, 6174, 1692, 1718, 9424, + 7302, 9817, 6106, 1360, 5641, 6897, 26, 3535, 930, + 4261, 3482, 4024, 7098, 9435, 1230, 3220, 3675, 3190, + 3331, 4389, 4488, 2175, 9831, 2601, 1122, 2559, 8675, + 1849, 6508, 6031, 6363, 1826, 5945, 3447, 6509, 4897, + 7706, 3947, 491, 6760, 9059, 6278, 8828, 5967, 2563, + 6789, 1085, 6073, 6434, 8946, 2745, 2922, 2350, 231, + 7938, 4916, 1773, 2219, 4669, 9442, 6941, 763, 6207, + 4821, 8158, 1830, 8402, 2113, 9283, 9883, 9382, 1105, + 10, 7386, 5532, 5867, 9867, 6588, 5464, 4484, 4330, + 2580, 7403, 6398, 3848, 7634, 1183, 9704, 1468, 5476, + 5791, 7578, 8280, 612, 3298, 3916, 3841, 9170, 1478, + 266, 6986, 1963, 1665, 1478, 1000, 4131, 6466, 2924, + 3512, 3906, 6195, 2246, 3267, 9842, 2166, 5954, 5374, + 8297, 4253, 260, 209, 2565, 2289, 1705, 2909, 1652, + 2938, 8341, 8294, 8641, 5826, 8001, 3307, 9083, 9375, + 8216, 2647, 761, 2906, 6501, 6354, 5386, 18, 8077, + 4168, 4578, 1635, 2720, 3137, 5656, 953, 8779, 9671, + 2134, 2867, 4373, 2608, 2028, 6006, 6457, 2957, 9881, + 9542, 5291, 6312, 575, 5310, 655, 9818, 1563, 3843, + 7682, 1682, 883, 1618, 2590, 4064, 4563, 2543, 3540, + 348]), + values=tensor([7.8246e-01, 2.3595e-01, 1.5596e-01, 1.3324e-01, + 3.5975e-01, 1.3864e-01, 3.3398e-01, 3.3426e-01, + 6.5371e-01, 2.1365e-01, 6.4688e-01, 8.9320e-01, + 1.4264e-01, 7.2912e-01, 5.6899e-01, 6.1770e-01, + 1.0075e-01, 8.1499e-02, 3.5519e-01, 8.6810e-01, + 6.5121e-01, 1.2629e-01, 5.2786e-01, 8.6830e-01, + 8.4677e-01, 7.3480e-01, 8.8555e-02, 4.3203e-01, + 6.0372e-01, 5.5090e-01, 8.9181e-01, 1.4938e-01, + 5.9344e-01, 8.5791e-01, 3.0043e-01, 6.3547e-01, + 3.8101e-01, 5.5988e-01, 6.8361e-01, 7.3448e-01, + 1.8616e-01, 6.7206e-01, 3.5459e-01, 3.8084e-01, + 4.8088e-01, 2.3744e-01, 1.8088e-03, 4.9690e-01, + 8.2795e-01, 7.9128e-01, 3.3057e-01, 8.9113e-01, + 5.4547e-01, 5.2657e-01, 1.6707e-01, 1.7261e-01, + 9.4872e-01, 2.9374e-01, 4.8638e-01, 7.5425e-01, + 6.9408e-01, 4.2174e-01, 4.3439e-01, 2.8370e-02, + 7.4152e-01, 8.5934e-01, 2.9298e-01, 8.5329e-01, + 3.8014e-01, 2.1854e-01, 2.1548e-01, 6.5745e-01, + 7.9623e-01, 5.8057e-01, 2.9081e-01, 9.8173e-01, + 6.0964e-01, 8.9575e-01, 4.9794e-01, 6.0210e-01, + 3.2884e-01, 2.6866e-01, 6.1081e-03, 9.7721e-01, + 8.0519e-01, 4.7070e-01, 7.8138e-03, 7.2483e-01, + 9.9633e-01, 4.2919e-01, 3.5796e-01, 2.6967e-01, + 2.1524e-02, 1.3160e-01, 3.5480e-01, 5.9259e-02, + 6.1098e-01, 6.4580e-01, 7.5812e-01, 9.9300e-01, + 6.2299e-01, 8.4524e-01, 5.0397e-01, 2.4435e-01, + 8.4559e-01, 4.3966e-01, 8.6919e-01, 9.5407e-01, + 3.9762e-01, 1.2054e-01, 4.5601e-01, 9.0332e-01, + 6.9750e-01, 7.7232e-01, 1.2349e-01, 5.7490e-01, + 8.7631e-01, 5.6629e-01, 3.5034e-01, 9.2630e-01, + 6.1068e-01, 2.8973e-01, 7.2836e-02, 7.1680e-01, + 7.2171e-01, 1.6608e-01, 7.1736e-01, 1.5011e-01, + 1.9258e-01, 4.7540e-01, 1.2623e-01, 5.4691e-01, + 3.1216e-01, 7.9172e-01, 8.1675e-01, 8.0643e-01, + 5.3432e-01, 1.6374e-02, 8.6053e-01, 2.4244e-01, + 9.1974e-01, 4.3023e-01, 2.7241e-02, 6.0449e-01, + 8.0286e-01, 4.3504e-02, 3.2913e-01, 2.5231e-01, + 1.8334e-01, 4.5909e-01, 4.0428e-01, 8.5720e-01, + 4.8212e-01, 6.9230e-01, 1.5970e-01, 4.7273e-01, + 1.9216e-01, 6.4464e-01, 3.5132e-01, 6.7396e-01, + 2.7547e-01, 4.3671e-01, 4.7036e-01, 2.7506e-01, + 6.5240e-01, 8.9075e-01, 9.8417e-01, 9.6379e-01, + 6.5690e-01, 3.3018e-01, 8.4863e-01, 5.4156e-01, + 3.3978e-01, 1.2904e-01, 9.6353e-01, 2.3847e-02, + 4.3514e-01, 8.2010e-01, 4.9946e-01, 8.1152e-01, + 8.4783e-01, 6.9276e-01, 3.8143e-01, 2.4622e-01, + 2.7254e-01, 8.2344e-01, 5.1953e-02, 3.1952e-01, + 9.6208e-01, 8.1073e-01, 2.0146e-01, 1.4808e-01, + 6.6333e-02, 9.1530e-02, 2.3463e-01, 4.5059e-01, + 3.1984e-01, 2.6712e-03, 7.5853e-01, 7.7848e-01, + 5.8728e-02, 8.8800e-01, 8.9732e-01, 7.0222e-01, + 8.0840e-01, 7.7232e-02, 8.3591e-01, 1.4898e-02, + 5.2645e-02, 4.3454e-01, 5.7785e-01, 7.4946e-02, + 7.0466e-01, 5.4613e-02, 7.2026e-01, 2.1416e-01, + 6.2011e-01, 3.7102e-01, 8.1549e-02, 3.4583e-01, + 9.8627e-01, 3.8176e-01, 7.0585e-01, 6.3774e-01, + 3.0407e-01, 4.9411e-01, 5.3641e-01, 8.7269e-01, + 3.2044e-01, 9.9363e-01, 7.8009e-01, 5.0334e-01, + 8.8030e-02, 4.3475e-01, 8.8989e-01, 8.5230e-01, + 2.2098e-01, 3.8829e-01, 8.6076e-01, 2.5414e-01, + 6.2980e-02, 9.1373e-01, 4.3328e-01, 5.0450e-01, + 1.2935e-01, 9.2979e-01, 4.8364e-01, 5.9625e-01, + 8.4292e-01, 5.8689e-01, 3.1227e-01, 1.8234e-01, + 8.5157e-01, 3.0334e-01, 7.6983e-01, 6.3715e-01, + 6.4977e-01, 6.7812e-01, 8.7250e-01, 9.3836e-01, + 5.9727e-01, 5.4649e-01, 8.2999e-03, 8.9994e-01, + 5.8971e-01, 1.7571e-01, 4.2725e-01, 4.2743e-01, + 6.7930e-01, 4.1239e-01, 7.2830e-01, 9.0333e-01, + 1.4575e-02, 1.3958e-01, 1.2802e-01, 2.2352e-02, + 6.2753e-01, 9.8401e-01, 2.6866e-01, 8.4549e-01, + 6.3621e-01, 1.3622e-01, 1.1825e-01, 1.6935e-01, + 8.0968e-01, 7.2794e-01, 6.0600e-01, 6.3835e-01, + 7.0075e-01, 9.5177e-01, 3.9760e-01, 4.6679e-02, + 5.5392e-01, 8.3430e-01, 6.4781e-02, 4.4178e-01, + 8.5842e-01, 1.2844e-01, 1.4042e-01, 9.9142e-01, + 8.8575e-01, 1.1920e-01, 4.9462e-01, 3.3885e-01, + 8.6737e-01, 5.5141e-01, 4.6633e-01, 4.9962e-01, + 4.1515e-01, 2.3735e-01, 8.7407e-01, 6.4269e-01, + 2.5199e-02, 7.5966e-01, 8.7166e-01, 9.9289e-01, + 6.8726e-01, 1.7632e-01, 3.5292e-02, 1.8296e-01, + 5.8990e-01, 3.0367e-01, 1.9937e-01, 4.2755e-01, + 3.5678e-01, 7.1431e-01, 8.1820e-01, 1.8679e-01, + 6.4666e-01, 5.5054e-01, 6.5137e-01, 5.0743e-02, + 4.0434e-01, 6.5070e-02, 6.5028e-01, 1.5087e-01, + 4.7091e-01, 8.7475e-01, 8.5853e-01, 3.4273e-01, + 1.5462e-01, 3.5063e-02, 5.7511e-01, 4.9131e-01, + 3.0079e-01, 2.0709e-01, 3.6057e-01, 6.3567e-01, + 6.8409e-02, 8.9877e-02, 9.9039e-02, 1.3843e-01, + 9.0567e-01, 8.0548e-01, 4.2303e-01, 5.3174e-01, + 8.4286e-01, 2.1463e-01, 3.2691e-01, 8.3047e-02, + 4.8146e-01, 2.2127e-01, 2.9311e-01, 6.4648e-01, + 2.2833e-01, 3.2931e-01, 1.0493e-01, 3.5890e-01, + 1.2917e-01, 4.0978e-01, 5.7782e-01, 5.0569e-02, + 3.2021e-01, 8.2385e-01, 3.9169e-01, 1.8586e-01, + 3.8588e-01, 6.0899e-01, 3.4886e-01, 2.1382e-01, + 8.3535e-01, 3.5128e-01, 6.1500e-01, 7.9708e-02, + 1.1538e-01, 5.0411e-01, 9.3582e-01, 5.4800e-01, + 7.0448e-01, 2.2045e-02, 7.5175e-01, 6.4875e-01, + 8.1017e-01, 4.3169e-01, 4.4460e-01, 2.6180e-01, + 8.7328e-01, 7.6743e-01, 7.8735e-01, 4.6738e-01, + 9.6189e-01, 2.8717e-01, 9.2960e-01, 1.9109e-01, + 3.7028e-01, 5.6161e-01, 3.6894e-01, 3.6198e-01, + 3.2918e-01, 5.7667e-02, 1.0207e-02, 9.2071e-01, + 3.1454e-01, 4.1115e-01, 1.0862e-01, 5.2641e-01, + 8.0394e-01, 6.6571e-01, 4.8358e-02, 8.1520e-01, + 6.3662e-01, 2.9600e-01, 3.2779e-01, 8.9674e-01, + 9.9743e-02, 6.1643e-01, 5.1554e-01, 7.0243e-01, + 2.3874e-01, 7.0855e-01, 3.6668e-01, 4.1051e-01, + 6.7852e-01, 2.1936e-01, 3.9380e-02, 5.9537e-01, + 7.3656e-01, 8.3381e-01, 9.3346e-01, 3.8423e-01, + 2.0000e-01, 1.8021e-01, 3.6918e-01, 7.0276e-01, + 5.8744e-01, 7.2461e-01, 7.5015e-02, 4.9881e-01, + 3.7240e-01, 1.9849e-01, 9.1491e-01, 6.1737e-02, + 7.7286e-01, 5.0051e-01, 8.1823e-01, 1.0679e-01, + 3.5539e-01, 1.4076e-01, 4.9475e-01, 6.9463e-01, + 5.3637e-01, 8.5349e-01, 8.1877e-01, 4.0704e-01, + 2.6188e-01, 1.2887e-02, 8.6145e-01, 3.1114e-01, + 4.5799e-01, 1.4071e-01, 6.5588e-01, 2.2405e-01, + 4.5578e-01, 8.0548e-01, 7.7317e-01, 2.7677e-01, + 5.0230e-03, 5.2026e-01, 4.8114e-01, 8.6761e-01, + 6.3646e-01, 3.0802e-01, 9.7501e-01, 6.1702e-01, + 4.2876e-01, 7.5840e-01, 3.6739e-01, 8.5991e-01, + 4.5013e-01, 6.7846e-01, 2.9783e-01, 6.4458e-01, + 2.9371e-01, 9.1762e-01, 4.1420e-01, 3.0070e-01, + 3.7932e-01, 3.5872e-01, 5.3674e-01, 4.2499e-01, + 1.9526e-01, 1.1185e-01, 7.8643e-01, 2.4631e-01, + 8.7986e-01, 8.0195e-01, 8.7098e-01, 2.9181e-03, + 7.6462e-01, 5.1701e-01, 7.4836e-01, 7.1422e-01, + 8.9158e-01, 3.2634e-02, 8.3780e-01, 5.0178e-01, + 4.6650e-01, 5.6664e-01, 6.5009e-01, 9.8255e-01, + 6.8280e-01, 2.3946e-01, 7.1952e-01, 9.1209e-01, + 3.5961e-01, 9.3318e-01, 1.5645e-01, 2.3178e-01, + 8.6692e-01, 6.9005e-02, 9.0529e-01, 8.7445e-01, + 9.5951e-01, 2.5697e-01, 4.2690e-01, 6.9626e-02, + 6.0911e-01, 6.1096e-01, 8.6617e-01, 2.4256e-01, + 6.3662e-02, 7.6996e-01, 6.5914e-01, 4.8079e-01, + 6.3483e-01, 5.9418e-02, 4.1139e-02, 2.5581e-01, + 1.7531e-01, 9.5334e-02, 5.9598e-01, 3.8733e-02, + 3.9532e-01, 8.7378e-01, 8.9086e-01, 3.2202e-01, + 8.3359e-01, 5.7557e-01, 1.1533e-01, 8.6420e-01, + 9.9545e-01, 9.1420e-01, 2.6416e-02, 4.2307e-01, + 1.0066e-01, 2.9426e-01, 7.9233e-01, 1.5421e-01, + 4.8560e-01, 4.4030e-01, 8.4016e-01, 9.6702e-01, + 9.1754e-01, 9.1238e-01, 7.7488e-01, 6.8367e-01, + 1.0599e-01, 2.6778e-01, 2.2029e-02, 5.1490e-01, + 9.9870e-01, 5.4678e-01, 8.7014e-01, 3.4645e-01, + 5.0016e-01, 8.9480e-01, 1.4469e-02, 5.5451e-01, + 6.7168e-01, 9.0138e-01, 3.6657e-01, 7.0062e-01, + 8.9909e-01, 9.1201e-01, 4.7640e-01, 5.9457e-01, + 8.9323e-01, 3.4244e-01, 8.7984e-01, 2.3517e-01, + 8.4980e-01, 2.1681e-01, 1.9758e-04, 6.9511e-02, + 3.8274e-02, 9.4853e-02, 9.0618e-01, 3.0139e-01, + 8.9355e-01, 4.6760e-01, 1.4096e-01, 8.2591e-01, + 4.4598e-01, 2.8877e-01, 6.7195e-01, 7.5599e-01, + 2.9840e-01, 7.8140e-01, 8.9954e-02, 9.2088e-01, + 8.5295e-01, 8.9580e-01, 2.7642e-01, 5.8053e-01, + 8.9167e-01, 2.7810e-01, 5.9503e-02, 8.0781e-04, + 7.4389e-01, 8.9743e-01, 3.2512e-01, 3.7630e-01, + 2.6254e-01, 1.9455e-01, 3.5616e-01, 7.2963e-01, + 3.7282e-01, 7.7497e-01, 3.2324e-01, 7.8404e-01, + 8.8770e-01, 4.7263e-01, 3.2362e-02, 7.4668e-01, + 8.1883e-01, 1.7464e-01, 2.2685e-01, 3.3782e-01, + 9.1224e-01, 7.2529e-01, 7.6500e-01, 1.8841e-01, + 9.3134e-01, 2.8109e-01, 9.5078e-01, 8.8750e-01, + 9.3460e-01, 4.2478e-01, 4.2110e-01, 1.9916e-01, + 4.8994e-01, 3.5883e-01, 3.5431e-01, 8.8017e-01, + 7.2542e-01, 6.4231e-01, 8.1128e-02, 1.0298e-01, + 8.6559e-01, 4.0653e-01, 6.4333e-01, 4.8698e-01, + 3.4835e-02, 4.2394e-01, 5.8007e-01, 9.5857e-01, + 8.7677e-01, 4.8312e-01, 1.3213e-01, 4.5485e-01, + 2.1528e-01, 9.2603e-01, 2.3899e-01, 7.8235e-01, + 7.8966e-01, 1.7745e-01, 7.1781e-01, 2.3690e-01, + 4.7774e-03, 3.0594e-01, 8.0674e-01, 7.3595e-01, + 8.4502e-02, 5.5691e-01, 2.5024e-01, 6.9358e-02, + 4.9706e-01, 4.3730e-01, 7.8166e-01, 1.6935e-01, + 1.6508e-01, 2.7327e-01, 9.0574e-01, 5.2528e-01, + 8.9068e-01, 7.8645e-02, 7.2571e-01, 3.2557e-01, + 7.0667e-02, 9.4967e-01, 6.3316e-01, 2.9000e-02, + 3.4708e-01, 6.6396e-01, 9.0666e-01, 3.5206e-01, + 6.7177e-01, 5.7557e-01, 1.8779e-01, 6.9343e-01, + 3.9372e-01, 5.9109e-01, 6.7368e-01, 2.2778e-01, + 7.1807e-01, 5.4674e-01, 5.3182e-01, 2.0904e-01, + 8.4413e-01, 6.0459e-01, 1.1344e-01, 7.5947e-01, + 1.0212e-01, 5.0148e-01, 4.2838e-01, 9.3970e-01, + 1.0145e-01, 8.1058e-01, 2.0145e-01, 6.5984e-01, + 8.4074e-01, 2.9148e-01, 8.8836e-01, 3.5675e-01, + 3.3781e-03, 3.4694e-01, 8.8142e-01, 1.2057e-01, + 7.5534e-01, 6.6374e-01, 2.9146e-01, 4.9017e-01, + 7.9016e-01, 8.1144e-01, 9.4202e-01, 6.7169e-01, + 1.3500e-01, 4.9304e-01, 8.3060e-01, 3.7731e-02, + 9.5146e-01, 3.9467e-01, 7.1969e-01, 3.7580e-01, + 8.3433e-01, 1.5332e-01, 1.0273e-01, 8.3539e-01, + 8.6309e-01, 9.1852e-01, 1.6511e-01, 4.7218e-01, + 1.6903e-02, 3.5493e-01, 1.1011e-01, 2.2466e-01, + 6.0511e-01, 4.2601e-01, 2.6126e-01, 8.0575e-01, + 9.0862e-01, 4.9018e-01, 5.4674e-01, 4.7682e-01, + 4.1996e-01, 8.2935e-01, 4.3385e-01, 3.4773e-01, + 3.3128e-01, 4.9912e-01, 4.9471e-02, 6.4172e-01, + 1.1662e-01, 6.6203e-01, 9.8112e-02, 9.2938e-01, + 2.9718e-02, 9.8190e-01, 6.3086e-01, 8.7214e-01, + 5.3547e-01, 6.7579e-01, 2.0165e-02, 6.0576e-01, + 9.6922e-01, 7.6144e-01, 9.1006e-01, 1.4672e-02, + 9.6140e-01, 8.0479e-01, 7.1496e-01, 2.3834e-01, + 8.9398e-01, 9.0521e-01, 2.9766e-01, 6.9856e-01, + 8.5344e-03, 7.9688e-01, 8.0142e-01, 8.8869e-01, + 3.4227e-01, 2.1357e-01, 6.2048e-01, 7.9537e-01, + 6.0477e-01, 9.2039e-01, 7.4622e-01, 4.9977e-01, + 1.9398e-01, 6.2467e-01, 5.9633e-01, 1.5733e-01, + 4.9534e-01, 7.9245e-01, 3.2054e-01, 4.8535e-01, + 8.4652e-01, 3.1971e-01, 6.0611e-01, 7.5468e-01, + 1.6312e-01, 7.5539e-01, 4.2907e-01, 7.0602e-01, + 5.3821e-01, 3.9985e-01, 1.5314e-01, 8.6026e-01, + 2.4601e-01, 6.7286e-01, 5.1079e-01, 4.1859e-02, + 3.8473e-01, 5.5873e-01, 8.3452e-01, 1.1368e-01, + 9.2685e-01, 9.1260e-01, 9.7249e-02, 8.4021e-02, + 8.4798e-01, 3.0291e-01, 6.0838e-01, 9.6725e-01, + 7.0402e-01, 2.9535e-01, 5.4417e-01, 4.6542e-01, + 3.7483e-01, 6.3094e-01, 4.0316e-01, 4.4070e-01, + 2.6808e-01, 2.7701e-01, 2.6057e-01, 5.9947e-02, + 8.0296e-01, 7.8043e-02, 9.0563e-01, 6.3495e-01, + 4.2526e-01, 7.3881e-01, 1.5869e-01, 8.4296e-01, + 4.3391e-01, 9.4310e-01, 6.8542e-01, 8.4341e-01, + 8.6949e-01, 4.4998e-01, 4.4610e-01, 4.2581e-01, + 3.2566e-01, 2.1422e-01, 5.7861e-01, 5.4453e-01, + 9.0283e-01, 4.8108e-01, 2.2328e-01, 2.1700e-01, + 9.5599e-03, 3.6690e-01, 8.1444e-01, 1.1487e-01, + 7.0918e-01, 6.1090e-02, 9.5540e-02, 7.4000e-01, + 1.0984e-01, 4.2988e-01, 2.5680e-01, 9.2592e-01, + 2.5509e-02, 8.1388e-01, 3.2157e-01, 5.0467e-01, + 8.2294e-01, 4.5031e-03, 5.7156e-02, 9.7650e-01, + 6.1624e-01, 9.8700e-01, 3.0159e-01, 4.4716e-02, + 7.0071e-01, 2.2741e-01, 5.8226e-01, 4.6503e-01, + 6.9925e-01, 6.0856e-01, 8.4729e-01, 5.2405e-01, + 6.2983e-01, 8.6326e-01, 2.6933e-01, 7.9482e-01, + 1.5775e-01, 1.7148e-02, 2.8225e-01, 7.3105e-01, + 5.6952e-01, 1.8160e-01, 1.4063e-01, 9.5017e-02, + 9.1195e-01, 1.8641e-01, 3.3720e-01, 2.9270e-02, + 8.3750e-01, 2.7987e-01, 8.9777e-01, 4.6594e-01, + 6.5160e-01, 2.3072e-01, 1.4245e-01, 3.3501e-01, + 1.5265e-01, 4.9406e-01, 3.3783e-01, 1.1797e-01, + 7.6807e-01, 8.7067e-01, 5.7038e-01, 6.1121e-01, + 8.2708e-01, 7.8141e-01, 5.3589e-01, 3.3598e-01, + 6.1544e-01, 5.4266e-01, 7.0345e-01, 3.6353e-01, + 2.7923e-01, 9.6844e-01, 8.3587e-01, 5.7945e-01, + 7.7723e-01, 1.0070e-01, 4.2058e-01, 6.5976e-01, + 3.6488e-01, 6.2612e-01, 6.4766e-01, 3.5445e-01, + 7.8372e-01, 1.1738e-01, 4.7597e-01, 3.8343e-01, + 9.1960e-01, 1.5316e-01, 3.7295e-01, 7.9212e-01, + 4.4000e-01, 6.4553e-01, 6.4485e-01, 5.7470e-01, + 1.9268e-01, 2.6403e-02, 5.3412e-01, 2.2108e-02]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4288, 0.6749, 0.2273, ..., 0.2100, 0.8485, 0.9690]) +tensor([0.4069, 0.2647, 0.7465, ..., 0.2206, 0.0986, 0.2189]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -647,271 +754,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 5.085230827331543 seconds +Time: 5.103281259536743 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141552 -ss 10000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.798607587814331} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 140907 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.631132364273071} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([3726, 7095, 4684, 2020, 836, 1255, 7107, 2910, 7765, - 3618, 8004, 8576, 4555, 2122, 3631, 3042, 6509, 854, - 5117, 7246, 7468, 4038, 6190, 4788, 6872, 1487, 1270, - 5894, 3327, 4557, 2868, 4373, 8003, 1239, 2993, 4213, - 4878, 8600, 9710, 4771, 8192, 5937, 3800, 5340, 2757, - 7379, 410, 167, 7636, 1048, 2438, 5415, 3035, 8972, - 8984, 1069, 8135, 9320, 5730, 3547, 7645, 6319, 8708, - 4794, 9059, 642, 937, 2015, 2851, 401, 5086, 3408, - 3671, 8044, 1220, 336, 7855, 7629, 930, 2750, 1987, - 9765, 3121, 9993, 829, 9850, 1435, 2979, 5489, 3796, - 4432, 9879, 9991, 2308, 6643, 3959, 9751, 638, 1357, - 4879, 6697, 7016, 2005, 3140, 2355, 1476, 7190, 9157, - 586, 8925, 8359, 7415, 6315, 5275, 7818, 5569, 2433, - 2192, 6764, 8455, 1408, 5314, 6201, 4055, 2442, 1110, - 8696, 5368, 8279, 443, 7835, 3502, 7362, 1330, 775, - 9823, 1517, 4273, 5726, 452, 5679, 1189, 861, 4804, - 3949, 9279, 5272, 3517, 1449, 1605, 1487, 9465, 4077, - 1506, 7136, 8000, 3536, 5423, 6093, 3644, 4106, 578, - 4413, 8078, 3736, 1232, 8907, 611, 6360, 4917, 5521, - 8890, 8861, 4942, 8337, 4548, 4613, 1027, 8700, 6909, - 958, 3790, 2989, 8565, 911, 5630, 6072, 1354, 9245, - 1994, 8552, 1111, 4146, 7268, 4683, 8105, 4378, 9360, - 6653, 8808, 5006, 4328, 5559, 3943, 4417, 7458, 2312, - 116, 5416, 6906, 1303, 5319, 7254, 8596, 3357, 6527, - 8398, 8704, 8210, 823, 4023, 1928, 9697, 2295, 5245, - 8970, 1573, 3796, 4232, 3566, 6537, 4098, 575, 7372, - 366, 6301, 8492, 2459, 7103, 5364, 5367, 4901, 9480, - 278, 5400, 7252, 9075, 5325, 1810, 7648, 324, 2329, - 8, 3227, 8008, 8213, 741, 1909, 4083, 9465, 4927, - 4913, 2356, 2450, 8390, 5767, 5848, 2748, 9467, 3514, - 4253, 2231, 1205, 4867, 6627, 4336, 5974, 7755, 3790, - 221, 6230, 4840, 1706, 8664, 7040, 1728, 4302, 8186, - 7438, 2847, 378, 6914, 377, 2565, 6264, 2119, 4808, - 8605, 8197, 5489, 4175, 5779, 6350, 3342, 6552, 3810, - 3380, 2149, 6531, 4018, 9838, 7109, 6420, 2314, 8276, - 8549, 3270, 8204, 1510, 99, 9666, 881, 9634, 6348, - 5206, 5717, 6256, 9140, 2894, 55, 7420, 6909, 8907, - 9934, 7455, 256, 6416, 3264, 139, 9653, 9104, 120, - 4401, 5358, 2182, 3677, 5160, 5409, 8367, 6563, 4808, - 6260, 2681, 8624, 9491, 9352, 8644, 896, 2705, 7108, - 2245, 6339, 6089, 2552, 2272, 1181, 6435, 563, 8137, - 1459, 7439, 2939, 7035, 867, 6590, 8094, 5026, 4423, - 4770, 7804, 7887, 761, 2953, 9927, 2073, 9826, 6475, - 9197, 874, 7559, 607, 1174, 7682, 3265, 6020, 6386, - 5306, 8737, 3204, 8378, 33, 4395, 8617, 1260, 266, - 2349, 7503, 7582, 3161, 3035, 9262, 8314, 2986, 5564, - 2126, 6464, 7360, 5226, 3325, 1629, 8069, 7692, 7794, - 4182, 2653, 7141, 4784, 5328, 9808, 7261, 1363, 8658, - 9123, 3810, 2620, 9643, 1362, 3134, 7384, 7296, 5209, - 6036, 7751, 7232, 736, 6172, 4771, 6452, 9215, 8373, - 5885, 7654, 3268, 8433, 3826, 1191, 4084, 6339, 3322, - 2278, 9573, 752, 1314, 9267, 6354, 6143, 35, 7253, - 1096, 5121, 7153, 7760, 1966, 6253, 3182, 9955, 1848, - 5514, 2387, 9249, 933, 7029, 186, 2702, 6241, 8466, - 1489, 3322, 5487, 5027, 3772, 1949, 4952, 2162, 9526, - 1901, 4241, 3525, 396, 3698, 5708, 9938, 9685, 4339, - 2948, 209, 6838, 1145, 5442, 7328, 5423, 6485, 1415, - 7421, 1724, 9251, 933, 6123, 4946, 5577, 7971, 9846, - 5274, 2640, 4773, 3064, 9692, 9463, 1802, 2781, 6650, - 4562, 991, 4382, 6448, 7016, 6327, 1852, 5951, 1025, - 7839, 4680, 3410, 4015, 1478, 3864, 5981, 6067, 988, - 2131, 7857, 2239, 375, 7733, 1730, 8801, 1787, 6496, - 1921, 8121, 6115, 2130, 7431, 2124, 1900, 3400, 3093, - 2034, 8191, 8469, 3659, 2287, 1005, 9444, 6813, 6960, - 3470, 8573, 1717, 3039, 2558, 4596, 4880, 4472, 7262, - 5686, 6336, 2458, 2937, 9212, 6313, 6778, 8810, 9277, - 928, 1704, 6201, 8834, 7081, 7414, 5902, 2525, 6986, - 4360, 144, 652, 1234, 5719, 484, 2482, 5766, 4021, - 4006, 6231, 1726, 4378, 3881, 6930, 8631, 3608, 4246, - 75, 8644, 7232, 8461, 8874, 3157, 9402, 5888, 3476, - 3276, 9018, 5589, 8991, 2990, 7947, 2115, 8859, 6426, - 1953, 3665, 3323, 3802, 5811, 8129, 4154, 4205, 5918, - 2014, 5679, 7180, 635, 5363, 4653, 1897, 3249, 6982, - 678, 2068, 9858, 2498, 9298, 382, 644, 7370, 2418, - 8329, 5775, 8507, 7061, 9961, 4182, 287, 2359, 5919, - 8303, 3664, 2342, 4924, 2424, 8480, 1042, 4823, 3665, - 6553, 4427, 9234, 7736, 4966, 935, 6525, 8224, 4229, - 311, 9329, 7173, 6315, 1432, 9741, 5248, 4687, 1042, - 3524, 3804, 6875, 7125, 7161, 3553, 195, 2516, 9006, - 3709, 8580, 57, 2078, 1413, 9823, 6439, 5603, 3945, - 4348, 2406, 4610, 6846, 5704, 6321, 9398, 4952, 1295, - 738, 1206, 9447, 1290, 7633, 6389, 2334, 2897, 916, - 3131, 3092, 1954, 7671, 2004, 8145, 1483, 4726, 1441, - 9782, 9165, 5535, 6096, 1612, 3414, 7082, 3936, 7625, - 116, 6121, 9652, 4539, 5938, 5687, 6276, 1101, 1633, - 6510, 9508, 5331, 5376, 9358, 2109, 2723, 7391, 4555, - 2365, 6412, 3706, 6937, 5899, 2966, 8411, 6231, 9569, - 2849, 6339, 4931, 6281, 2950, 4951, 973, 1306, 6662, - 8109, 5095, 9232, 9872, 443, 8647, 7506, 8104, 8756, - 9936, 686, 4823, 2682, 3740, 3137, 923, 9193, 8324, - 4020, 1627, 6261, 9840, 304, 4142, 1317, 5620, 8274, - 3594, 572, 272, 1611, 5193, 347, 416, 442, 6874, - 8019, 1732, 1301, 6279, 8538, 2543, 4932, 6155, 3059, - 3059, 4499, 9216, 1298, 4595, 9507, 1178, 62, 1121, - 2558, 3614, 1585, 9579, 2799, 9710, 2223, 2927, 6219, - 1930, 1468, 3790, 7212, 2706, 1924, 107, 5637, 3603, - 8882, 6287, 8286, 8516, 3947, 4130, 4658, 4839, 7052, - 5285, 9433, 2234, 8542, 478, 1484, 8586, 9538, 899, - 6365, 3422, 1846, 4728, 2876, 7973, 1396, 5267, 8176, - 201, 8298, 5151, 9727, 4317, 6828, 3372, 2733, 3465, - 1632, 1856, 2211, 4987, 6790, 3737, 9878, 8936, 3754, - 4531, 7583, 4001, 4220, 5379, 7676, 9403, 5140, 5873, - 7088, 8903, 2296, 2171, 9394, 1645, 9743, 4507, 9272, - 2477, 8320, 7982, 3456, 5868, 9002, 5854, 753, 6254, - 29, 7699, 1129, 8991, 4514, 3222, 4361, 7378, 3047, - 9627, 6076, 4663, 3561, 5068, 9330, 6752, 8500, 1957, - 9072, 5138, 8731, 1067, 4146, 1100, 4646, 1538, 8687, - 8992, 6143, 9364, 7127, 9769, 2434, 7246, 2305, 7691, - 4140, 198, 608, 1773, 6904, 7875, 2110, 7719, 282, - 4734, 4064, 2354, 1595, 7483, 992, 7146, 9086, 2639, - 12, 4356, 2592, 9295, 444, 3493, 9301, 5028, 8522, - 1719]), - values=tensor([0.8600, 0.5616, 0.7072, 0.3175, 0.5762, 0.8898, 0.7613, - 0.6444, 0.1843, 0.4299, 0.9701, 0.9160, 0.9531, 0.4047, - 0.1402, 0.4728, 0.0686, 0.5538, 0.1157, 0.4298, 0.8825, - 0.1354, 0.8143, 0.5913, 0.0480, 0.3359, 0.5316, 0.6111, - 0.8620, 0.8841, 0.0457, 0.2228, 0.0040, 0.0383, 0.6512, - 0.9426, 0.9603, 0.6779, 0.3036, 0.2358, 0.8460, 0.4662, - 0.3348, 0.0817, 0.8411, 0.0542, 0.1380, 0.3042, 0.1998, - 0.2999, 0.4133, 0.8056, 0.7595, 0.1627, 0.9131, 0.7276, - 0.6625, 0.1656, 0.8899, 0.5354, 0.8460, 0.1291, 0.5991, - 0.0805, 0.4357, 0.4936, 0.4524, 0.3191, 0.7007, 0.0375, - 0.6902, 0.4198, 0.3498, 0.7962, 0.5312, 0.3669, 0.5804, - 0.4017, 0.9393, 0.3637, 0.5425, 0.4731, 0.0877, 0.9094, - 0.7214, 0.9414, 0.9950, 0.2474, 0.5511, 0.8001, 0.0442, - 0.9555, 0.3790, 0.0597, 0.0816, 0.0893, 0.0727, 0.0855, - 0.8418, 0.0900, 0.4889, 0.1537, 0.1078, 0.3076, 0.1132, - 0.3499, 0.2464, 0.3119, 0.7397, 0.5203, 0.7296, 0.3076, - 0.0908, 0.5180, 0.8820, 0.2955, 0.7721, 0.2443, 0.4412, - 0.0643, 0.1279, 0.8697, 0.6469, 0.3773, 0.3600, 0.5487, - 0.3518, 0.6432, 0.2385, 0.7556, 0.7224, 0.2014, 0.8943, - 0.1852, 0.9558, 0.6299, 0.3671, 0.9896, 0.6391, 0.1768, - 0.0298, 0.7972, 0.7124, 0.7051, 0.9680, 0.5269, 0.0532, - 0.0232, 0.6522, 0.9276, 0.9199, 0.4637, 0.4411, 0.9074, - 0.0961, 0.7283, 0.8054, 0.0583, 0.2110, 0.5960, 0.2328, - 0.9165, 0.5817, 0.5985, 0.0950, 0.4392, 0.8056, 0.8382, - 0.3675, 0.5339, 0.7866, 0.5051, 0.9951, 0.7743, 0.5281, - 0.2536, 0.4500, 0.8310, 0.7085, 0.2637, 0.3306, 0.5037, - 0.3989, 0.2457, 0.9774, 0.4426, 0.8629, 0.9169, 0.4122, - 0.5182, 0.4459, 0.2392, 0.3915, 0.8545, 0.1335, 0.2459, - 0.0435, 0.8607, 0.2092, 0.1152, 0.2264, 0.8530, 0.0610, - 0.5909, 0.8510, 0.9060, 0.1898, 0.9730, 0.5210, 0.5104, - 0.4607, 0.9295, 0.4482, 0.7724, 0.0968, 0.9352, 0.3537, - 0.8985, 0.5128, 0.0067, 0.2582, 0.7387, 0.7979, 0.7165, - 0.3443, 0.7560, 0.5700, 0.8397, 0.1771, 0.8402, 0.3091, - 0.9997, 0.1542, 0.0685, 0.8646, 0.3574, 0.0226, 0.7519, - 0.6524, 0.0072, 0.1165, 0.5114, 0.0917, 0.6314, 0.9212, - 0.5712, 0.7090, 0.0393, 0.1201, 0.8493, 0.2898, 0.8259, - 0.4117, 0.6401, 0.8024, 0.9462, 0.0945, 0.6655, 0.6495, - 0.4999, 0.2014, 0.1871, 0.0303, 0.7432, 0.6428, 0.9133, - 0.2792, 0.0556, 0.1388, 0.0015, 0.6331, 0.2758, 0.8367, - 0.8769, 0.0407, 0.6240, 0.7762, 0.1332, 0.5250, 0.1198, - 0.5519, 0.3094, 0.1346, 0.0647, 0.4742, 0.5731, 0.8318, - 0.2796, 0.5726, 0.3119, 0.9865, 0.9941, 0.5736, 0.5420, - 0.2171, 0.5354, 0.9426, 0.6173, 0.1128, 0.0283, 0.7768, - 0.9444, 0.6839, 0.6100, 0.7668, 0.0394, 0.8433, 0.0499, - 0.7761, 0.2587, 0.7427, 0.8822, 0.5529, 0.7552, 0.8520, - 0.8179, 0.9600, 0.0466, 0.5899, 0.0884, 0.9116, 0.1822, - 0.4571, 0.9885, 0.1750, 0.1838, 0.7352, 0.3448, 0.7858, - 0.2306, 0.7624, 0.8788, 0.3283, 0.6481, 0.6658, 0.3363, - 0.3090, 0.0330, 0.1595, 0.9700, 0.3235, 0.4224, 0.0628, - 0.0681, 0.2514, 0.1198, 0.5855, 0.6518, 0.4456, 0.5212, - 0.5722, 0.1297, 0.2828, 0.7370, 0.0325, 0.1233, 0.8781, - 0.1529, 0.3239, 0.4868, 0.4461, 0.0181, 0.9715, 0.6327, - 0.0701, 0.9788, 0.7771, 0.3935, 0.2992, 0.4526, 0.2252, - 0.9195, 0.1472, 0.6317, 0.3246, 0.5631, 0.7653, 0.3490, - 0.2479, 0.8149, 0.9333, 0.5535, 0.7152, 0.7420, 0.8502, - 0.5933, 0.4264, 0.3490, 0.4412, 0.9800, 0.1953, 0.5818, - 0.4513, 0.1650, 0.3902, 0.4948, 0.7564, 0.9652, 0.1048, - 0.4943, 0.8495, 0.3903, 0.7487, 0.4664, 0.0427, 0.1690, - 0.7279, 0.3082, 0.6150, 0.2912, 0.2094, 0.9971, 0.3723, - 0.7357, 0.3658, 0.4009, 0.9999, 0.6043, 0.9117, 0.7334, - 0.5824, 0.0506, 0.7330, 0.0230, 0.4406, 0.0807, 0.6942, - 0.2245, 0.2485, 0.5748, 0.8330, 0.7840, 0.7060, 0.2348, - 0.0035, 0.5364, 0.0187, 0.6599, 0.9337, 0.6053, 0.1831, - 0.4356, 0.0679, 0.0908, 0.3894, 0.8661, 0.9286, 0.5274, - 0.2198, 0.1337, 0.5968, 0.2616, 0.9505, 0.9894, 0.6963, - 0.4705, 0.2413, 0.0355, 0.0514, 0.8694, 0.1783, 0.6758, - 0.2101, 0.3192, 0.4910, 0.5302, 0.4899, 0.5359, 0.1925, - 0.2343, 0.0188, 0.3525, 0.6944, 0.1704, 0.1291, 0.7572, - 0.5869, 0.4575, 0.5402, 0.3083, 0.7036, 0.7430, 0.3675, - 0.5465, 0.0686, 0.3712, 0.7453, 0.8458, 0.5033, 0.3961, - 0.8154, 0.6781, 0.6892, 0.5106, 0.8864, 0.2284, 0.3130, - 0.1335, 0.9835, 0.3056, 0.3963, 0.3066, 0.8333, 0.1173, - 0.1717, 0.1906, 0.5577, 0.8909, 0.5702, 0.9202, 0.0098, - 0.9034, 0.1638, 0.7871, 0.0602, 0.1046, 0.7108, 0.1792, - 0.5907, 0.9144, 0.7471, 0.9590, 0.3509, 0.9697, 0.2362, - 0.7800, 0.2561, 0.6928, 0.6592, 0.6715, 0.3954, 0.9109, - 0.8543, 0.2309, 0.9931, 0.3675, 0.9653, 0.9348, 0.8702, - 0.8780, 0.0545, 0.0095, 0.1323, 0.2713, 0.1104, 0.0360, - 0.7644, 0.3254, 0.2794, 0.7010, 0.6247, 0.7531, 0.2093, - 0.4077, 0.7399, 0.9364, 0.9070, 0.3612, 0.1375, 0.2106, - 0.6888, 0.0838, 0.3848, 0.1242, 0.2979, 0.7983, 0.3405, - 0.6686, 0.9463, 0.7854, 0.8071, 0.4226, 0.2682, 0.4134, - 0.9403, 0.2631, 0.8116, 0.3477, 0.7616, 0.0356, 0.3800, - 0.4796, 0.9664, 0.9730, 0.3146, 0.3811, 0.3444, 0.8364, - 0.4593, 0.2380, 0.3529, 0.8631, 0.8304, 0.7458, 0.2703, - 0.8323, 0.2629, 0.9141, 0.3840, 0.6975, 0.0508, 0.9536, - 0.3777, 0.3904, 0.8044, 0.1828, 0.1908, 0.1226, 0.5426, - 0.6712, 0.1985, 0.0131, 0.9193, 0.7070, 0.7883, 0.2011, - 0.5491, 0.4025, 0.3145, 0.0253, 0.2556, 0.4276, 0.9673, - 0.4027, 0.7313, 0.3000, 0.8244, 0.6521, 0.0329, 0.9128, - 0.1532, 0.7920, 0.6837, 0.1740, 0.6020, 0.4749, 0.1878, - 0.6918, 0.3430, 0.8863, 0.0954, 0.2221, 0.0196, 0.9170, - 0.2529, 0.8027, 0.9234, 0.7784, 0.4154, 0.7635, 0.3669, - 0.0588, 0.6705, 0.6605, 0.4654, 0.5060, 0.1257, 0.2869, - 0.1042, 0.7987, 0.7289, 0.1239, 0.7381, 0.3130, 0.2067, - 0.2775, 0.8959, 0.6030, 0.6860, 0.7855, 0.9485, 0.1748, - 0.7731, 0.6068, 0.3676, 0.3474, 0.3768, 0.8882, 0.3443, - 0.2956, 0.9448, 0.4045, 0.2381, 0.2525, 0.2118, 0.1224, - 0.6778, 0.2811, 0.1046, 0.4058, 0.6707, 0.0635, 0.5605, - 0.3192, 0.2209, 0.6866, 0.2181, 0.4822, 0.4068, 0.9604, - 0.5810, 0.0649, 0.5097, 0.1731, 0.2222, 0.3090, 0.8573, - 0.2342, 0.2229, 0.3303, 0.5365, 0.1241, 0.0313, 0.7432, - 0.3762, 0.5850, 0.5199, 0.0453, 0.7767, 0.0550, 0.5852, - 0.6872, 0.3713, 0.6670, 0.1341, 0.8513, 0.2874, 0.5203, - 0.7337, 0.0145, 0.5286, 0.0318, 0.0106, 0.8079, 0.5923, - 0.1886, 0.8415, 0.1474, 0.3036, 0.7165, 0.9739, 0.2050, - 0.0209, 0.3028, 0.8424, 0.4722, 0.9670, 0.8856, 0.7059, - 0.8881, 0.4477, 0.3433, 0.8833, 0.6854, 0.9979, 0.4134, - 0.6004, 0.9236, 0.5985, 0.8387, 0.3869, 0.9226, 0.8476, - 0.2815, 0.5644, 0.8983, 0.8519, 0.4415, 0.5941, 0.4933, - 0.3941, 0.0630, 0.8087, 0.5422, 0.7823, 0.0794, 0.7566, - 0.8197, 0.3825, 0.5699, 0.9195, 0.6894, 0.8052, 0.9060, - 0.8181, 0.6439, 0.3874, 0.8570, 0.7838, 0.5526, 0.0770, - 0.5884, 0.8769, 0.0709, 0.5994, 0.2015, 0.1734, 0.9822, - 0.8660, 0.0374, 0.8196, 0.5480, 0.3620, 0.6184, 0.3047, - 0.1654, 0.6492, 0.4554, 0.0477, 0.9808, 0.2615, 0.3836, - 0.1987, 0.6319, 0.9715, 0.7191, 0.5039, 0.7992, 0.8664, - 0.0815, 0.4937, 0.8132, 0.9455, 0.3586, 0.8318, 0.8930, - 0.1422, 0.4286, 0.9237, 0.7140, 0.1449, 0.5233, 0.9544, - 0.6412, 0.0940, 0.5388, 0.3539, 0.9951, 0.5860, 0.5837, - 0.0540, 0.2181, 0.6021, 0.1546, 0.1818, 0.6251, 0.6991, - 0.5441, 0.4257, 0.3883, 0.4987, 0.7504, 0.8178, 0.0667, - 0.3423, 0.4631, 0.0636, 0.6995, 0.6845, 0.1335, 0.5769, - 0.3366, 0.8741, 0.7448, 0.5158, 0.4520, 0.7964, 0.5429, - 0.3853, 0.3489, 0.7282, 0.3002, 0.4892, 0.4084, 0.7785, - 0.6224, 0.6482, 0.1360, 0.3743, 0.3204, 0.3293, 0.5975, - 0.8635, 0.1468, 0.1420, 0.1950, 0.5502, 0.3099, 0.4665, - 0.2473, 0.9781, 0.4882, 0.8615, 0.4379, 0.3304, 0.9224, - 0.6375, 0.4835, 0.7192, 0.6721, 0.7342, 0.5743, 0.9239, - 0.9773, 0.2213, 0.3478, 0.7032, 0.0574, 0.8041, 0.3853, - 0.1147, 0.0390, 0.9320, 0.5858, 0.0975, 0.5982, 0.1467, - 0.9118, 0.4835, 0.9183, 0.3489, 0.0389, 0.2553, 0.5860, - 0.2665, 0.6450, 0.3179, 0.5337, 0.7581, 0.4409, 0.1177, - 0.0512, 0.8850, 0.2142, 0.1547, 0.5876, 0.8678, 0.5430, - 0.4686, 0.4656, 0.5329, 0.4015, 0.3146, 0.2257, 0.1820, - 0.9287, 0.0585, 0.6678, 0.0868, 0.7648, 0.2970, 0.6893, - 0.7312, 0.6106, 0.1958, 0.8679, 0.9976, 0.5849, 0.7869, - 0.3363, 0.5231, 0.9619, 0.1567, 0.1143, 0.9307, 0.2825, - 0.3303, 0.5892, 0.7606, 0.7858, 0.0785, 0.3935, 0.0941, - 0.7542, 0.7552, 0.7909, 0.6337, 0.4503, 0.8151, 0.1544, - 0.0385, 0.1762, 0.7871, 0.9429, 0.7065, 0.2556, 0.7752, - 0.3810, 0.5819, 0.5096, 0.6816, 0.5826, 0.0960, 0.1244, - 0.3464, 0.1206, 0.8110, 0.0102, 0.2242, 0.3161]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([2638, 4549, 5551, 2117, 4942, 9438, 9207, 6102, 6113, + 5097, 4681, 4774, 1006, 5714, 544, 8676, 4376, 3736, + 8577, 2814, 4988, 6404, 8803, 1894, 1225, 2662, 6498, + 474, 7502, 3741, 1251, 8621, 3601, 2924, 4778, 6508, + 8933, 6759, 4360, 1296, 9451, 5379, 2580, 1056, 6875, + 4437, 1085, 2249, 7387, 3599, 6972, 7834, 9715, 7328, + 9763, 5133, 813, 9023, 2249, 27, 3432, 3721, 9617, + 1590, 7696, 56, 5980, 9100, 4781, 77, 5788, 3266, + 5149, 2548, 3482, 8983, 3044, 7387, 7570, 4435, 738, + 926, 5784, 1972, 982, 8367, 9033, 5243, 9717, 9376, + 7878, 1215, 6076, 5447, 8437, 7705, 917, 2939, 9446, + 9814, 4295, 9541, 589, 1385, 8969, 2091, 9768, 3150, + 2282, 7241, 6389, 594, 5984, 3290, 5621, 3106, 7389, + 1549, 9798, 483, 161, 2192, 9062, 7275, 3614, 3964, + 9168, 9147, 8052, 1337, 423, 4173, 259, 6709, 4077, + 346, 597, 2400, 9499, 1392, 3670, 7737, 4953, 2762, + 3925, 5710, 8086, 6513, 3032, 504, 3777, 5879, 6044, + 9260, 3763, 3127, 963, 1869, 9765, 1796, 435, 5600, + 5627, 9102, 3333, 8833, 301, 7374, 7928, 7149, 6664, + 9548, 354, 7834, 2447, 5726, 4946, 4759, 5625, 2130, + 8779, 9304, 7900, 7798, 965, 31, 5611, 84, 7073, + 6207, 1689, 4705, 5870, 9222, 1708, 6221, 1677, 1776, + 2545, 5442, 3654, 7124, 7411, 7264, 8352, 250, 8624, + 3373, 4378, 2496, 9847, 1473, 3096, 7950, 1600, 8490, + 4224, 1529, 3981, 7942, 6717, 5606, 1194, 9089, 2551, + 1975, 9279, 8460, 3058, 9478, 4262, 5240, 3169, 275, + 8839, 1527, 2001, 8946, 7691, 7384, 5098, 5785, 7893, + 8114, 8440, 5428, 2718, 1104, 6538, 2397, 6013, 2109, + 9439, 5088, 92, 5038, 5221, 729, 7397, 1491, 8841, + 411, 7003, 3812, 3353, 8669, 4897, 6943, 3833, 7041, + 7756, 6642, 2488, 660, 996, 2981, 7454, 7786, 980, + 1221, 4578, 2033, 9314, 2613, 4376, 2586, 5987, 7835, + 8216, 1913, 7481, 2665, 6049, 1239, 7388, 3281, 7709, + 1591, 3156, 1803, 3851, 2376, 6086, 5052, 4524, 2336, + 8715, 8344, 5852, 7207, 3167, 7908, 2225, 3206, 5863, + 5099, 1361, 3099, 8942, 4099, 6186, 5775, 6474, 7452, + 4562, 465, 7762, 6510, 536, 9242, 8125, 3782, 3904, + 5530, 2008, 8821, 8028, 8192, 7694, 3356, 1431, 2479, + 5529, 5205, 4156, 2387, 2402, 2740, 4691, 1092, 8112, + 179, 4166, 7791, 8923, 6912, 993, 4400, 7872, 1058, + 5287, 5705, 2995, 5492, 9979, 5923, 2908, 2631, 6491, + 7799, 3357, 8539, 6937, 6776, 8019, 6710, 5292, 4317, + 5324, 6939, 8323, 8297, 1302, 6950, 9139, 4923, 2869, + 4762, 3011, 3683, 5086, 3546, 5468, 7039, 5094, 685, + 1912, 8181, 7088, 1069, 5927, 3264, 9562, 7548, 5987, + 6296, 8000, 5529, 8254, 6063, 232, 632, 4123, 5683, + 4331, 3062, 4320, 4702, 422, 808, 9851, 5159, 4742, + 9696, 4849, 4939, 5560, 6715, 8514, 7411, 6753, 1479, + 6447, 9423, 2299, 746, 3126, 8944, 6474, 9238, 491, + 3663, 7658, 9643, 9359, 8097, 7891, 3910, 2281, 2498, + 8362, 9549, 8992, 8173, 2719, 6566, 3163, 3205, 6566, + 4769, 461, 7427, 9910, 1123, 8896, 596, 4564, 6952, + 6648, 8124, 5243, 7751, 7557, 7865, 955, 2635, 8267, + 931, 6662, 4121, 14, 6956, 2418, 5213, 1632, 9138, + 5125, 7816, 5857, 663, 9933, 7314, 8839, 2670, 5825, + 8248, 1854, 378, 5585, 2403, 179, 8217, 1377, 1515, + 5503, 703, 2836, 1663, 9359, 8473, 542, 7157, 2322, + 2413, 2427, 69, 5467, 2965, 8435, 2568, 1445, 9874, + 1244, 3903, 9337, 5230, 2636, 8117, 1241, 5622, 6156, + 8074, 3395, 7868, 7595, 5963, 2485, 3144, 7917, 5995, + 166, 1965, 3368, 3505, 9895, 5435, 7614, 9297, 6947, + 2080, 7891, 6125, 7336, 6858, 5385, 4016, 4445, 5136, + 5376, 1116, 1115, 6672, 4261, 3458, 804, 4289, 5383, + 838, 6307, 4905, 9028, 4232, 5849, 9450, 3130, 6114, + 2811, 7246, 3562, 9672, 3081, 5993, 8682, 355, 1008, + 800, 3529, 1850, 5285, 8636, 9050, 6252, 2864, 8240, + 6668, 1243, 4693, 4257, 2053, 1439, 260, 4787, 554, + 3091, 1743, 3251, 1845, 7534, 9151, 8619, 5438, 7117, + 347, 3021, 9772, 1610, 1975, 3795, 6469, 2068, 5080, + 1912, 3486, 3533, 4972, 6311, 3709, 7121, 457, 1117, + 111, 106, 8099, 6628, 9452, 6539, 5386, 6312, 7232, + 3654, 1148, 6079, 1714, 2309, 4143, 3624, 3813, 8187, + 7477, 7185, 3345, 7237, 6387, 6466, 4935, 3446, 7809, + 533, 5015, 4608, 9916, 9577, 9588, 241, 7114, 7563, + 1553, 395, 7125, 3051, 9142, 5107, 5502, 5137, 3599, + 852, 4115, 2564, 4851, 5123, 2048, 1236, 9937, 7410, + 8132, 1152, 1528, 7683, 7131, 6592, 6457, 928, 5814, + 3559, 3379, 2378, 1111, 5109, 894, 6251, 9030, 7124, + 2881, 6077, 5197, 8530, 9054, 7424, 1949, 220, 2172, + 2545, 5621, 4772, 8756, 760, 5415, 6366, 598, 6818, + 4198, 6352, 4806, 9914, 4096, 4871, 5855, 2949, 6370, + 8519, 2947, 2383, 6716, 1419, 620, 9696, 8189, 1734, + 1617, 9194, 2156, 4648, 2570, 2697, 4094, 4909, 7651, + 8049, 5255, 8752, 5010, 2102, 9639, 3049, 7862, 3829, + 6831, 1725, 5211, 4265, 1268, 5022, 6549, 3396, 4993, + 823, 7710, 5486, 8054, 2421, 4386, 5787, 6383, 2870, + 3977, 650, 5270, 5527, 6328, 2739, 6786, 3390, 8530, + 7113, 787, 1039, 9842, 9695, 2938, 3630, 754, 9415, + 8080, 1030, 1497, 6446, 6175, 4422, 7964, 4649, 3655, + 3471, 8985, 3288, 8037, 9604, 5683, 3362, 3632, 375, + 6360, 7045, 5162, 4231, 7591, 5760, 23, 6358, 9455, + 5328, 6988, 4279, 5895, 7355, 6237, 9635, 6321, 4753, + 7272, 2201, 4494, 2414, 6780, 5222, 4451, 6904, 1207, + 7807, 5438, 3375, 6327, 5615, 9394, 5988, 9301, 2925, + 667, 3093, 7177, 5634, 3154, 4048, 6975, 1088, 4111, + 1601, 5108, 2231, 1630, 3176, 7591, 7169, 9468, 9206, + 2238, 9386, 1558, 5452, 7827, 2881, 1563, 3103, 5205, + 1681, 1927, 7558, 690, 5697, 8917, 9114, 5288, 1097, + 5950, 2646, 6335, 4119, 8894, 7803, 6086, 279, 271, + 5870, 3974, 9831, 2830, 2865, 61, 8320, 8560, 7767, + 9220, 5213, 7576, 6987, 8627, 5376, 2233, 9175, 4429, + 7405, 3574, 4102, 7903, 8731, 3230, 8418, 4637, 1920, + 9702, 3725, 5427, 9976, 7818, 2118, 8475, 2958, 35, + 1080, 327, 9877, 4772, 5399, 3747, 2143, 1775, 4356, + 6442, 2415, 647, 3460, 9349, 9724, 7767, 1941, 4389, + 9102, 5108, 5065, 6550, 1907, 5152, 733, 6179, 2077, + 4737, 6361, 8596, 8587, 844, 2988, 3389, 8702, 8796, + 471, 9163, 952, 9619, 788, 7764, 9638, 5773, 925, + 9310, 1086, 664, 2124, 5101, 7887, 2417, 4568, 1699, + 8947, 5281, 4747, 4713, 2372, 336, 9298, 6123, 3691, + 3035, 9241, 7383, 8218, 9451, 8726, 5429, 8881, 3953, + 8147]), + values=tensor([3.5525e-01, 6.2390e-02, 3.1706e-01, 4.0180e-01, + 7.0767e-01, 6.0253e-01, 3.3377e-02, 9.4604e-01, + 7.0063e-01, 3.8853e-01, 9.2083e-01, 6.5516e-01, + 7.7974e-01, 6.5413e-01, 8.2971e-01, 9.8177e-01, + 4.2740e-01, 5.7567e-01, 5.1914e-01, 8.9527e-01, + 9.9707e-01, 3.8811e-01, 2.7807e-01, 7.3366e-01, + 8.6617e-01, 6.5039e-01, 5.7168e-01, 4.1089e-01, + 6.9600e-01, 4.6175e-01, 6.3599e-01, 6.6030e-01, + 6.3570e-01, 4.4529e-01, 1.5356e-01, 3.7506e-01, + 3.0920e-01, 5.5182e-02, 2.7623e-02, 9.4146e-01, + 1.4998e-01, 9.2834e-01, 4.6332e-01, 1.4701e-01, + 7.3849e-01, 3.4665e-01, 1.2101e-01, 3.8189e-01, + 5.3427e-01, 9.6812e-01, 4.5313e-01, 5.1369e-01, + 2.9337e-01, 3.4808e-01, 6.9042e-01, 2.8985e-01, + 3.0274e-01, 7.1270e-01, 4.4209e-01, 2.5354e-01, + 4.3125e-01, 4.4832e-01, 8.4908e-01, 4.5862e-01, + 7.1769e-01, 2.8420e-01, 5.8850e-03, 1.1255e-01, + 5.9985e-01, 1.7373e-01, 8.0426e-01, 9.7592e-01, + 7.1575e-01, 6.3632e-01, 7.9312e-01, 2.0031e-01, + 7.6063e-01, 6.6472e-01, 2.2589e-01, 9.2208e-01, + 6.5396e-01, 4.2109e-02, 3.7447e-01, 7.6540e-01, + 7.0063e-01, 8.3329e-01, 8.2316e-01, 1.7633e-01, + 8.6842e-01, 5.3704e-01, 1.3609e-02, 5.6863e-02, + 2.1631e-01, 5.9704e-01, 9.5535e-01, 3.0130e-01, + 5.8427e-01, 3.1746e-01, 8.7151e-02, 3.9761e-01, + 5.7926e-01, 3.0026e-02, 1.7793e-01, 1.9793e-01, + 7.4937e-01, 5.0263e-01, 8.2545e-01, 1.1973e-01, + 1.6562e-01, 7.7594e-01, 1.8837e-01, 8.6153e-01, + 7.8911e-01, 1.3421e-01, 5.7230e-02, 3.1506e-01, + 2.1554e-01, 3.6549e-01, 2.4812e-01, 5.2062e-01, + 4.7621e-01, 7.0552e-04, 3.1203e-01, 5.0315e-01, + 5.5286e-01, 1.5342e-01, 1.0512e-01, 7.6972e-02, + 6.9395e-01, 4.3240e-01, 7.5458e-02, 5.8978e-01, + 9.2966e-01, 2.2644e-01, 6.4399e-01, 9.0790e-01, + 7.7642e-01, 3.1571e-01, 8.5384e-01, 1.8489e-01, + 6.1722e-01, 1.5912e-01, 4.4592e-01, 8.4061e-01, + 5.2576e-01, 7.0110e-01, 7.7925e-01, 3.9959e-01, + 4.5082e-01, 5.5213e-01, 9.8018e-01, 7.7799e-01, + 1.3578e-01, 3.4526e-01, 4.7119e-01, 8.5088e-01, + 6.7157e-01, 9.3544e-01, 1.5214e-01, 6.2251e-01, + 7.6667e-01, 1.7567e-01, 4.9737e-01, 5.1175e-01, + 7.1612e-01, 7.2408e-01, 6.9980e-01, 3.0419e-01, + 6.0054e-01, 5.7174e-01, 6.8748e-01, 9.4845e-01, + 7.1462e-01, 1.5120e-01, 8.6912e-01, 9.1935e-01, + 3.6111e-01, 8.1820e-02, 6.1956e-01, 3.4202e-03, + 1.2152e-01, 6.8458e-01, 3.0768e-01, 9.0789e-01, + 7.6720e-01, 2.7189e-01, 6.9353e-01, 7.9099e-01, + 2.3552e-01, 1.1454e-01, 9.8231e-01, 4.9996e-01, + 1.3841e-01, 3.5163e-02, 7.2178e-01, 6.3485e-01, + 8.3967e-01, 4.4897e-01, 4.2056e-01, 6.1844e-01, + 5.2028e-01, 8.0720e-01, 2.4333e-01, 4.6516e-01, + 6.6278e-02, 9.9295e-01, 4.4345e-01, 6.7626e-01, + 1.4199e-01, 1.9578e-01, 3.2340e-01, 1.2497e-01, + 5.4437e-01, 3.1542e-01, 7.3764e-02, 3.5269e-01, + 5.8521e-01, 1.5353e-01, 9.4268e-01, 5.1486e-01, + 3.0407e-01, 6.0438e-01, 7.0658e-01, 2.9060e-01, + 8.0022e-01, 6.5866e-01, 6.8525e-01, 8.8905e-01, + 6.6721e-01, 3.4904e-01, 7.5454e-01, 9.4064e-01, + 9.2511e-01, 1.8937e-01, 6.8604e-01, 9.1651e-01, + 8.0611e-01, 2.8921e-01, 2.2560e-01, 4.5702e-01, + 4.0794e-02, 8.9309e-02, 9.0514e-01, 6.4010e-02, + 6.6079e-01, 8.3236e-01, 2.3584e-01, 9.3562e-01, + 5.1196e-01, 5.5585e-01, 7.4674e-01, 5.3913e-01, + 2.9549e-01, 3.8633e-01, 3.6471e-01, 8.8689e-01, + 5.7123e-01, 7.8263e-01, 7.0740e-01, 2.3999e-01, + 5.2772e-01, 4.3146e-01, 9.3444e-01, 5.8832e-01, + 3.3716e-01, 6.2253e-01, 2.3752e-01, 1.5749e-01, + 6.0000e-01, 3.7743e-01, 2.2864e-01, 3.6329e-01, + 4.5749e-01, 1.3859e-01, 6.7955e-01, 5.2897e-01, + 1.2541e-01, 4.8079e-01, 2.3266e-02, 4.7930e-01, + 7.5775e-01, 9.2777e-01, 2.6071e-01, 2.2327e-02, + 4.6225e-01, 5.5258e-01, 5.2123e-01, 9.9821e-01, + 3.2558e-01, 4.7963e-01, 3.7063e-01, 1.5380e-01, + 6.6777e-01, 4.3100e-01, 5.5667e-01, 2.5923e-01, + 7.6729e-01, 9.5233e-02, 4.9658e-01, 9.4489e-02, + 5.8064e-01, 5.2373e-01, 7.1528e-01, 5.5713e-01, + 6.6993e-01, 7.5111e-01, 8.2473e-01, 6.7453e-01, + 4.3187e-01, 8.8628e-01, 6.6343e-01, 8.8953e-01, + 4.1874e-01, 9.5320e-01, 2.3026e-01, 3.3196e-01, + 7.5014e-01, 5.3870e-01, 6.6378e-01, 9.6080e-01, + 9.4847e-01, 8.7059e-01, 7.3370e-01, 4.4158e-01, + 7.1513e-02, 2.5418e-01, 6.0814e-01, 2.6452e-01, + 2.8110e-01, 4.4729e-01, 9.4631e-01, 5.4748e-01, + 1.6969e-01, 8.8721e-01, 9.9136e-01, 1.2834e-01, + 8.8176e-03, 5.5180e-01, 3.6915e-01, 7.1940e-01, + 1.3341e-01, 8.9815e-01, 8.8533e-01, 6.5943e-01, + 1.3171e-01, 2.9590e-01, 8.2510e-01, 8.0570e-01, + 4.4295e-01, 4.3316e-01, 1.1916e-01, 5.3998e-01, + 3.2253e-01, 3.5158e-01, 2.3870e-02, 6.6068e-01, + 7.2138e-01, 4.1483e-02, 3.8432e-01, 5.5785e-01, + 7.7686e-01, 4.8200e-01, 7.1684e-01, 5.0659e-01, + 2.7888e-01, 2.3748e-01, 8.4027e-01, 8.3539e-01, + 4.7410e-01, 3.2015e-01, 5.7136e-01, 3.0491e-01, + 7.8725e-01, 1.0225e-01, 3.0280e-01, 7.0065e-01, + 3.6449e-02, 8.7209e-01, 8.0256e-02, 1.5990e-02, + 6.8739e-01, 8.2098e-02, 4.7788e-01, 7.1443e-01, + 5.4242e-01, 5.8476e-01, 4.0597e-01, 4.7993e-01, + 7.1985e-01, 5.6322e-01, 3.1445e-02, 2.0865e-01, + 1.5054e-01, 4.8874e-01, 7.9048e-01, 4.8559e-02, + 6.8106e-01, 5.5711e-02, 9.8975e-01, 2.7205e-01, + 2.5193e-01, 2.8619e-02, 1.6935e-01, 6.1505e-01, + 8.5667e-01, 5.6590e-01, 8.2641e-01, 7.4738e-01, + 4.7740e-01, 6.8425e-01, 1.6035e-01, 4.9346e-01, + 2.3684e-01, 5.1331e-01, 6.4406e-01, 6.9698e-01, + 7.8976e-02, 3.0208e-01, 7.3196e-01, 2.3264e-01, + 4.2798e-02, 2.2799e-01, 7.8877e-01, 8.7436e-01, + 3.7345e-01, 9.8637e-01, 9.6538e-01, 7.8811e-02, + 2.9433e-01, 3.7116e-01, 1.6586e-01, 8.1262e-01, + 7.1237e-01, 3.9317e-01, 4.0607e-01, 4.9944e-01, + 5.6036e-01, 5.3630e-01, 3.8482e-02, 7.0579e-02, + 1.5052e-02, 3.9458e-01, 4.9153e-01, 7.7399e-01, + 9.4132e-01, 9.8169e-01, 7.1758e-01, 2.2970e-01, + 4.1691e-01, 4.2223e-01, 8.8312e-01, 9.9968e-01, + 7.2897e-01, 8.1109e-01, 6.4614e-01, 1.7118e-01, + 7.3946e-01, 2.4816e-01, 8.9376e-01, 3.7336e-01, + 4.8095e-01, 9.0200e-01, 8.3086e-02, 7.0358e-01, + 2.6066e-01, 7.4314e-01, 8.4932e-01, 2.0470e-01, + 5.6401e-01, 5.8671e-01, 6.8786e-01, 7.7478e-01, + 1.4807e-01, 9.8777e-02, 2.9188e-01, 9.4080e-01, + 2.4621e-01, 2.0040e-01, 4.4002e-01, 9.4446e-02, + 4.7541e-02, 9.0731e-01, 6.8464e-01, 6.0628e-01, + 4.0497e-01, 4.3627e-01, 9.2313e-01, 6.5432e-01, + 7.8830e-01, 9.3517e-01, 9.1528e-01, 4.3097e-01, + 1.7597e-01, 3.2670e-01, 7.7403e-01, 2.9494e-03, + 6.9962e-01, 2.7023e-01, 2.0295e-01, 9.7020e-01, + 9.1071e-01, 2.9348e-01, 7.4686e-01, 7.1413e-01, + 7.9889e-01, 1.6951e-01, 4.1044e-01, 5.1511e-01, + 7.6664e-01, 7.5993e-01, 9.8307e-01, 1.1932e-01, + 4.2850e-01, 1.4130e-01, 5.9822e-03, 3.8538e-01, + 7.0704e-01, 1.8775e-01, 5.8466e-01, 3.2036e-01, + 1.7749e-01, 6.5440e-02, 6.4009e-01, 9.6568e-01, + 4.5433e-01, 4.8468e-01, 2.1953e-01, 5.9475e-01, + 5.9432e-01, 4.5611e-01, 9.2185e-01, 4.1013e-01, + 5.8628e-01, 7.7766e-01, 8.1271e-02, 8.7169e-01, + 1.9883e-01, 8.4098e-01, 1.2441e-01, 1.6244e-01, + 9.7958e-01, 6.3408e-02, 9.1554e-01, 7.6837e-01, + 4.2736e-01, 7.2365e-01, 5.4395e-01, 3.0341e-01, + 4.3202e-01, 1.9040e-01, 4.9899e-01, 3.7195e-01, + 8.1995e-01, 3.5838e-01, 8.5230e-01, 2.5760e-01, + 1.6760e-01, 8.5692e-01, 2.2390e-01, 8.1457e-01, + 9.8090e-01, 3.7941e-02, 5.3237e-01, 5.9576e-01, + 8.8111e-01, 6.0732e-02, 7.2630e-01, 6.8858e-01, + 2.9466e-01, 5.8679e-02, 1.3595e-01, 3.7713e-01, + 5.7706e-02, 6.0019e-01, 6.9180e-02, 1.4434e-01, + 2.3448e-01, 8.4736e-01, 8.6409e-02, 8.3623e-01, + 1.5255e-01, 7.0814e-02, 6.3446e-01, 1.3937e-01, + 7.7004e-01, 2.9534e-01, 1.9728e-01, 2.0145e-02, + 8.9785e-01, 2.1766e-01, 8.3063e-01, 2.8683e-01, + 1.5078e-01, 2.2279e-01, 5.9308e-01, 7.5600e-01, + 8.3585e-01, 3.6225e-01, 2.7372e-01, 3.0123e-01, + 1.1595e-01, 3.8505e-01, 4.0316e-01, 1.8412e-01, + 8.0834e-01, 6.1984e-03, 3.0006e-01, 4.9017e-01, + 3.6079e-01, 4.0362e-01, 7.0827e-01, 2.2091e-01, + 7.5211e-01, 4.4784e-01, 4.4226e-01, 8.2791e-01, + 1.3555e-02, 5.3265e-01, 9.1538e-01, 6.2884e-01, + 6.6689e-01, 9.7188e-01, 8.0720e-01, 9.4091e-01, + 5.9135e-01, 4.9020e-01, 7.3827e-01, 2.0256e-01, + 1.1750e-02, 2.0886e-01, 1.9299e-01, 8.9001e-01, + 9.3115e-01, 7.9706e-01, 8.7781e-01, 9.7369e-01, + 8.4326e-01, 6.5339e-01, 7.2301e-03, 4.4875e-01, + 7.4221e-01, 5.7669e-01, 7.9307e-02, 5.3834e-01, + 7.2401e-01, 7.3897e-01, 3.3198e-01, 4.1645e-01, + 2.5751e-01, 7.7243e-01, 1.6508e-01, 3.3601e-01, + 1.2216e-01, 9.6966e-01, 6.4482e-02, 2.6164e-01, + 2.6966e-01, 8.1784e-01, 8.3198e-02, 4.5606e-01, + 8.5068e-01, 2.7896e-01, 3.4568e-01, 2.9177e-03, + 2.6710e-01, 4.1286e-01, 9.9692e-02, 4.4005e-02, + 5.3149e-01, 8.0285e-01, 9.2733e-01, 9.8415e-01, + 4.0517e-01, 6.9211e-01, 6.8433e-01, 7.4389e-01, + 7.7251e-02, 7.8116e-01, 4.3742e-01, 4.2201e-02, + 6.3515e-02, 5.3299e-01, 7.3031e-01, 1.4772e-01, + 7.8104e-01, 9.2317e-01, 5.2068e-01, 2.7853e-02, + 4.4905e-01, 3.7854e-01, 9.0340e-01, 7.3587e-01, + 8.0233e-01, 6.8180e-01, 7.2164e-01, 1.5280e-01, + 2.5554e-01, 2.0733e-01, 1.4318e-01, 6.3584e-01, + 7.8482e-01, 6.8190e-01, 7.3859e-01, 4.9329e-01, + 4.1293e-01, 5.5564e-01, 3.3628e-02, 5.6929e-01, + 1.4332e-01, 3.5152e-02, 4.6137e-01, 4.6820e-02, + 1.8616e-01, 4.8859e-01, 6.3675e-01, 1.9397e-02, + 5.9888e-01, 8.2305e-01, 2.5241e-01, 9.0465e-02, + 8.8729e-01, 1.9497e-01, 2.9390e-02, 9.4063e-01, + 6.9774e-01, 5.9022e-01, 7.9131e-01, 5.3939e-01, + 3.4305e-01, 2.4816e-01, 8.6604e-01, 9.8164e-01, + 2.2706e-01, 4.6569e-01, 6.9859e-01, 7.2450e-01, + 5.7793e-01, 6.1170e-01, 3.4399e-01, 4.2448e-01, + 1.7335e-01, 5.9485e-01, 1.6204e-01, 1.4951e-01, + 4.4594e-01, 8.0794e-01, 8.0718e-02, 5.4362e-01, + 2.3661e-01, 2.4661e-01, 5.2734e-01, 3.2346e-01, + 7.4938e-01, 9.5916e-01, 1.6617e-01, 6.3971e-01, + 7.7201e-01, 1.4215e-01, 8.2792e-01, 9.9614e-01, + 1.0073e-01, 6.4922e-01, 9.7990e-01, 5.5729e-01, + 6.4425e-01, 9.9597e-01, 8.0593e-01, 4.1681e-01, + 5.1384e-01, 1.9041e-01, 9.2539e-02, 1.4975e-01, + 5.8771e-01, 3.0319e-02, 4.8759e-01, 3.5443e-01, + 8.2084e-01, 6.9219e-01, 8.9037e-03, 8.8679e-02, + 6.1502e-01, 6.5215e-01, 7.1123e-01, 2.4235e-01, + 8.2987e-01, 6.8720e-01, 5.2843e-01, 8.2907e-01, + 3.8220e-01, 8.3923e-01, 4.6456e-01, 1.7218e-01, + 7.6149e-01, 7.1157e-01, 8.5231e-03, 6.7050e-01, + 1.3597e-01, 2.0987e-01, 1.9253e-02, 3.5143e-01, + 2.2750e-01, 1.4359e-01, 9.5187e-01, 9.2095e-01, + 7.7361e-01, 6.7116e-03, 4.6572e-01, 5.8405e-01, + 6.9670e-02, 8.2697e-01, 7.8156e-01, 8.1744e-01, + 3.3562e-01, 9.2200e-01, 6.3820e-01, 1.0771e-01, + 2.5406e-01, 6.1061e-01, 8.2491e-01, 9.4288e-01, + 3.0216e-01, 6.2460e-01, 2.6166e-01, 4.8909e-01, + 7.7266e-01, 7.0343e-01, 8.5985e-01, 1.1788e-01, + 5.9938e-01, 9.0807e-01, 8.9671e-01, 2.0172e-01, + 4.1501e-01, 3.8874e-01, 3.3951e-01, 9.9315e-01, + 3.9520e-01, 4.0721e-01, 7.9266e-01, 3.7646e-01, + 2.0510e-01, 9.8256e-01, 4.1987e-01, 6.5800e-01, + 4.1487e-01, 5.4416e-01, 1.5974e-01, 7.0997e-01, + 9.3577e-02, 7.8041e-01, 5.7365e-01, 6.4202e-01, + 2.4799e-01, 2.0890e-01, 4.0900e-01, 9.6599e-01, + 1.4471e-01, 1.4378e-01, 4.4160e-01, 6.5426e-01, + 7.6465e-01, 5.0391e-01, 1.7862e-01, 9.5819e-01, + 5.9414e-01, 7.8674e-01, 5.1636e-01, 3.7388e-01, + 8.9213e-01, 2.5634e-01, 8.3257e-02, 7.6556e-01, + 3.9435e-01, 3.2539e-01, 4.2520e-01, 2.6640e-01, + 2.9244e-01, 8.6129e-01, 4.6294e-01, 5.7341e-01, + 6.7467e-01, 2.6493e-01, 7.8233e-01, 3.8148e-01, + 1.9676e-01, 5.9091e-01, 6.7302e-01, 7.9015e-01, + 9.3704e-01, 8.3071e-01, 1.9322e-01, 1.3391e-01, + 7.9375e-01, 8.9058e-01, 3.2576e-01, 7.6412e-01, + 2.9206e-01, 9.4303e-02, 2.6508e-01, 2.6732e-02, + 8.6474e-01, 6.5165e-01, 6.6929e-01, 3.2381e-01, + 7.3144e-01, 5.0760e-01, 8.3973e-01, 5.6446e-01, + 5.0774e-01, 4.5819e-01, 5.0472e-01, 8.2957e-01, + 6.9893e-01, 5.4532e-01, 2.1384e-01, 2.7863e-01, + 9.7407e-01, 4.9597e-01, 5.9598e-01, 1.0999e-01, + 6.0630e-01, 4.4774e-01, 1.2226e-01, 7.8700e-01, + 2.9926e-01, 9.9008e-02, 6.1613e-01, 5.9640e-01, + 6.9008e-01, 2.2797e-01, 4.3699e-01, 7.4226e-01, + 2.5932e-01, 9.4341e-01, 3.6656e-01, 8.9090e-01, + 1.1825e-01, 1.8231e-01, 9.1802e-01, 5.6671e-01, + 2.4127e-01, 8.4582e-02, 4.4272e-02, 3.4668e-01, + 5.0760e-01, 8.9506e-01, 4.8637e-01, 1.2431e-01, + 8.0726e-01, 9.6584e-01, 7.1123e-01, 8.4781e-01, + 1.6727e-01, 6.6336e-01, 5.4632e-01, 3.5048e-02, + 5.4051e-01, 7.8963e-01, 5.6210e-01, 7.3553e-01, + 4.1628e-01, 4.2604e-01, 2.2175e-01, 9.6073e-01, + 5.1360e-01, 4.9418e-01, 7.9926e-01, 3.7731e-01, + 9.8090e-01, 3.0631e-01, 1.4232e-02, 2.8290e-01, + 1.9600e-01, 2.2115e-01, 6.7154e-01, 4.3191e-01, + 4.9638e-01, 2.2288e-01, 6.3807e-01, 7.0286e-01, + 5.3409e-01, 4.7524e-01, 6.8467e-02, 9.4389e-02, + 3.3462e-01, 3.4218e-01, 6.9728e-01, 5.3604e-01, + 4.3637e-01, 1.6252e-01, 7.1201e-01, 1.5898e-01, + 1.5749e-01, 6.4281e-01, 2.9275e-02, 7.4966e-01, + 3.4188e-01, 2.2876e-01, 1.6752e-01, 7.7030e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0219, 0.9207, 0.3669, ..., 0.7955, 0.2670, 0.3543]) +tensor([0.7308, 0.4342, 0.3494, ..., 0.0020, 0.7901, 0.4274]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -919,268 +1133,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.798607587814331 seconds +Time: 10.631132364273071 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([3726, 7095, 4684, 2020, 836, 1255, 7107, 2910, 7765, - 3618, 8004, 8576, 4555, 2122, 3631, 3042, 6509, 854, - 5117, 7246, 7468, 4038, 6190, 4788, 6872, 1487, 1270, - 5894, 3327, 4557, 2868, 4373, 8003, 1239, 2993, 4213, - 4878, 8600, 9710, 4771, 8192, 5937, 3800, 5340, 2757, - 7379, 410, 167, 7636, 1048, 2438, 5415, 3035, 8972, - 8984, 1069, 8135, 9320, 5730, 3547, 7645, 6319, 8708, - 4794, 9059, 642, 937, 2015, 2851, 401, 5086, 3408, - 3671, 8044, 1220, 336, 7855, 7629, 930, 2750, 1987, - 9765, 3121, 9993, 829, 9850, 1435, 2979, 5489, 3796, - 4432, 9879, 9991, 2308, 6643, 3959, 9751, 638, 1357, - 4879, 6697, 7016, 2005, 3140, 2355, 1476, 7190, 9157, - 586, 8925, 8359, 7415, 6315, 5275, 7818, 5569, 2433, - 2192, 6764, 8455, 1408, 5314, 6201, 4055, 2442, 1110, - 8696, 5368, 8279, 443, 7835, 3502, 7362, 1330, 775, - 9823, 1517, 4273, 5726, 452, 5679, 1189, 861, 4804, - 3949, 9279, 5272, 3517, 1449, 1605, 1487, 9465, 4077, - 1506, 7136, 8000, 3536, 5423, 6093, 3644, 4106, 578, - 4413, 8078, 3736, 1232, 8907, 611, 6360, 4917, 5521, - 8890, 8861, 4942, 8337, 4548, 4613, 1027, 8700, 6909, - 958, 3790, 2989, 8565, 911, 5630, 6072, 1354, 9245, - 1994, 8552, 1111, 4146, 7268, 4683, 8105, 4378, 9360, - 6653, 8808, 5006, 4328, 5559, 3943, 4417, 7458, 2312, - 116, 5416, 6906, 1303, 5319, 7254, 8596, 3357, 6527, - 8398, 8704, 8210, 823, 4023, 1928, 9697, 2295, 5245, - 8970, 1573, 3796, 4232, 3566, 6537, 4098, 575, 7372, - 366, 6301, 8492, 2459, 7103, 5364, 5367, 4901, 9480, - 278, 5400, 7252, 9075, 5325, 1810, 7648, 324, 2329, - 8, 3227, 8008, 8213, 741, 1909, 4083, 9465, 4927, - 4913, 2356, 2450, 8390, 5767, 5848, 2748, 9467, 3514, - 4253, 2231, 1205, 4867, 6627, 4336, 5974, 7755, 3790, - 221, 6230, 4840, 1706, 8664, 7040, 1728, 4302, 8186, - 7438, 2847, 378, 6914, 377, 2565, 6264, 2119, 4808, - 8605, 8197, 5489, 4175, 5779, 6350, 3342, 6552, 3810, - 3380, 2149, 6531, 4018, 9838, 7109, 6420, 2314, 8276, - 8549, 3270, 8204, 1510, 99, 9666, 881, 9634, 6348, - 5206, 5717, 6256, 9140, 2894, 55, 7420, 6909, 8907, - 9934, 7455, 256, 6416, 3264, 139, 9653, 9104, 120, - 4401, 5358, 2182, 3677, 5160, 5409, 8367, 6563, 4808, - 6260, 2681, 8624, 9491, 9352, 8644, 896, 2705, 7108, - 2245, 6339, 6089, 2552, 2272, 1181, 6435, 563, 8137, - 1459, 7439, 2939, 7035, 867, 6590, 8094, 5026, 4423, - 4770, 7804, 7887, 761, 2953, 9927, 2073, 9826, 6475, - 9197, 874, 7559, 607, 1174, 7682, 3265, 6020, 6386, - 5306, 8737, 3204, 8378, 33, 4395, 8617, 1260, 266, - 2349, 7503, 7582, 3161, 3035, 9262, 8314, 2986, 5564, - 2126, 6464, 7360, 5226, 3325, 1629, 8069, 7692, 7794, - 4182, 2653, 7141, 4784, 5328, 9808, 7261, 1363, 8658, - 9123, 3810, 2620, 9643, 1362, 3134, 7384, 7296, 5209, - 6036, 7751, 7232, 736, 6172, 4771, 6452, 9215, 8373, - 5885, 7654, 3268, 8433, 3826, 1191, 4084, 6339, 3322, - 2278, 9573, 752, 1314, 9267, 6354, 6143, 35, 7253, - 1096, 5121, 7153, 7760, 1966, 6253, 3182, 9955, 1848, - 5514, 2387, 9249, 933, 7029, 186, 2702, 6241, 8466, - 1489, 3322, 5487, 5027, 3772, 1949, 4952, 2162, 9526, - 1901, 4241, 3525, 396, 3698, 5708, 9938, 9685, 4339, - 2948, 209, 6838, 1145, 5442, 7328, 5423, 6485, 1415, - 7421, 1724, 9251, 933, 6123, 4946, 5577, 7971, 9846, - 5274, 2640, 4773, 3064, 9692, 9463, 1802, 2781, 6650, - 4562, 991, 4382, 6448, 7016, 6327, 1852, 5951, 1025, - 7839, 4680, 3410, 4015, 1478, 3864, 5981, 6067, 988, - 2131, 7857, 2239, 375, 7733, 1730, 8801, 1787, 6496, - 1921, 8121, 6115, 2130, 7431, 2124, 1900, 3400, 3093, - 2034, 8191, 8469, 3659, 2287, 1005, 9444, 6813, 6960, - 3470, 8573, 1717, 3039, 2558, 4596, 4880, 4472, 7262, - 5686, 6336, 2458, 2937, 9212, 6313, 6778, 8810, 9277, - 928, 1704, 6201, 8834, 7081, 7414, 5902, 2525, 6986, - 4360, 144, 652, 1234, 5719, 484, 2482, 5766, 4021, - 4006, 6231, 1726, 4378, 3881, 6930, 8631, 3608, 4246, - 75, 8644, 7232, 8461, 8874, 3157, 9402, 5888, 3476, - 3276, 9018, 5589, 8991, 2990, 7947, 2115, 8859, 6426, - 1953, 3665, 3323, 3802, 5811, 8129, 4154, 4205, 5918, - 2014, 5679, 7180, 635, 5363, 4653, 1897, 3249, 6982, - 678, 2068, 9858, 2498, 9298, 382, 644, 7370, 2418, - 8329, 5775, 8507, 7061, 9961, 4182, 287, 2359, 5919, - 8303, 3664, 2342, 4924, 2424, 8480, 1042, 4823, 3665, - 6553, 4427, 9234, 7736, 4966, 935, 6525, 8224, 4229, - 311, 9329, 7173, 6315, 1432, 9741, 5248, 4687, 1042, - 3524, 3804, 6875, 7125, 7161, 3553, 195, 2516, 9006, - 3709, 8580, 57, 2078, 1413, 9823, 6439, 5603, 3945, - 4348, 2406, 4610, 6846, 5704, 6321, 9398, 4952, 1295, - 738, 1206, 9447, 1290, 7633, 6389, 2334, 2897, 916, - 3131, 3092, 1954, 7671, 2004, 8145, 1483, 4726, 1441, - 9782, 9165, 5535, 6096, 1612, 3414, 7082, 3936, 7625, - 116, 6121, 9652, 4539, 5938, 5687, 6276, 1101, 1633, - 6510, 9508, 5331, 5376, 9358, 2109, 2723, 7391, 4555, - 2365, 6412, 3706, 6937, 5899, 2966, 8411, 6231, 9569, - 2849, 6339, 4931, 6281, 2950, 4951, 973, 1306, 6662, - 8109, 5095, 9232, 9872, 443, 8647, 7506, 8104, 8756, - 9936, 686, 4823, 2682, 3740, 3137, 923, 9193, 8324, - 4020, 1627, 6261, 9840, 304, 4142, 1317, 5620, 8274, - 3594, 572, 272, 1611, 5193, 347, 416, 442, 6874, - 8019, 1732, 1301, 6279, 8538, 2543, 4932, 6155, 3059, - 3059, 4499, 9216, 1298, 4595, 9507, 1178, 62, 1121, - 2558, 3614, 1585, 9579, 2799, 9710, 2223, 2927, 6219, - 1930, 1468, 3790, 7212, 2706, 1924, 107, 5637, 3603, - 8882, 6287, 8286, 8516, 3947, 4130, 4658, 4839, 7052, - 5285, 9433, 2234, 8542, 478, 1484, 8586, 9538, 899, - 6365, 3422, 1846, 4728, 2876, 7973, 1396, 5267, 8176, - 201, 8298, 5151, 9727, 4317, 6828, 3372, 2733, 3465, - 1632, 1856, 2211, 4987, 6790, 3737, 9878, 8936, 3754, - 4531, 7583, 4001, 4220, 5379, 7676, 9403, 5140, 5873, - 7088, 8903, 2296, 2171, 9394, 1645, 9743, 4507, 9272, - 2477, 8320, 7982, 3456, 5868, 9002, 5854, 753, 6254, - 29, 7699, 1129, 8991, 4514, 3222, 4361, 7378, 3047, - 9627, 6076, 4663, 3561, 5068, 9330, 6752, 8500, 1957, - 9072, 5138, 8731, 1067, 4146, 1100, 4646, 1538, 8687, - 8992, 6143, 9364, 7127, 9769, 2434, 7246, 2305, 7691, - 4140, 198, 608, 1773, 6904, 7875, 2110, 7719, 282, - 4734, 4064, 2354, 1595, 7483, 992, 7146, 9086, 2639, - 12, 4356, 2592, 9295, 444, 3493, 9301, 5028, 8522, - 1719]), - values=tensor([0.8600, 0.5616, 0.7072, 0.3175, 0.5762, 0.8898, 0.7613, - 0.6444, 0.1843, 0.4299, 0.9701, 0.9160, 0.9531, 0.4047, - 0.1402, 0.4728, 0.0686, 0.5538, 0.1157, 0.4298, 0.8825, - 0.1354, 0.8143, 0.5913, 0.0480, 0.3359, 0.5316, 0.6111, - 0.8620, 0.8841, 0.0457, 0.2228, 0.0040, 0.0383, 0.6512, - 0.9426, 0.9603, 0.6779, 0.3036, 0.2358, 0.8460, 0.4662, - 0.3348, 0.0817, 0.8411, 0.0542, 0.1380, 0.3042, 0.1998, - 0.2999, 0.4133, 0.8056, 0.7595, 0.1627, 0.9131, 0.7276, - 0.6625, 0.1656, 0.8899, 0.5354, 0.8460, 0.1291, 0.5991, - 0.0805, 0.4357, 0.4936, 0.4524, 0.3191, 0.7007, 0.0375, - 0.6902, 0.4198, 0.3498, 0.7962, 0.5312, 0.3669, 0.5804, - 0.4017, 0.9393, 0.3637, 0.5425, 0.4731, 0.0877, 0.9094, - 0.7214, 0.9414, 0.9950, 0.2474, 0.5511, 0.8001, 0.0442, - 0.9555, 0.3790, 0.0597, 0.0816, 0.0893, 0.0727, 0.0855, - 0.8418, 0.0900, 0.4889, 0.1537, 0.1078, 0.3076, 0.1132, - 0.3499, 0.2464, 0.3119, 0.7397, 0.5203, 0.7296, 0.3076, - 0.0908, 0.5180, 0.8820, 0.2955, 0.7721, 0.2443, 0.4412, - 0.0643, 0.1279, 0.8697, 0.6469, 0.3773, 0.3600, 0.5487, - 0.3518, 0.6432, 0.2385, 0.7556, 0.7224, 0.2014, 0.8943, - 0.1852, 0.9558, 0.6299, 0.3671, 0.9896, 0.6391, 0.1768, - 0.0298, 0.7972, 0.7124, 0.7051, 0.9680, 0.5269, 0.0532, - 0.0232, 0.6522, 0.9276, 0.9199, 0.4637, 0.4411, 0.9074, - 0.0961, 0.7283, 0.8054, 0.0583, 0.2110, 0.5960, 0.2328, - 0.9165, 0.5817, 0.5985, 0.0950, 0.4392, 0.8056, 0.8382, - 0.3675, 0.5339, 0.7866, 0.5051, 0.9951, 0.7743, 0.5281, - 0.2536, 0.4500, 0.8310, 0.7085, 0.2637, 0.3306, 0.5037, - 0.3989, 0.2457, 0.9774, 0.4426, 0.8629, 0.9169, 0.4122, - 0.5182, 0.4459, 0.2392, 0.3915, 0.8545, 0.1335, 0.2459, - 0.0435, 0.8607, 0.2092, 0.1152, 0.2264, 0.8530, 0.0610, - 0.5909, 0.8510, 0.9060, 0.1898, 0.9730, 0.5210, 0.5104, - 0.4607, 0.9295, 0.4482, 0.7724, 0.0968, 0.9352, 0.3537, - 0.8985, 0.5128, 0.0067, 0.2582, 0.7387, 0.7979, 0.7165, - 0.3443, 0.7560, 0.5700, 0.8397, 0.1771, 0.8402, 0.3091, - 0.9997, 0.1542, 0.0685, 0.8646, 0.3574, 0.0226, 0.7519, - 0.6524, 0.0072, 0.1165, 0.5114, 0.0917, 0.6314, 0.9212, - 0.5712, 0.7090, 0.0393, 0.1201, 0.8493, 0.2898, 0.8259, - 0.4117, 0.6401, 0.8024, 0.9462, 0.0945, 0.6655, 0.6495, - 0.4999, 0.2014, 0.1871, 0.0303, 0.7432, 0.6428, 0.9133, - 0.2792, 0.0556, 0.1388, 0.0015, 0.6331, 0.2758, 0.8367, - 0.8769, 0.0407, 0.6240, 0.7762, 0.1332, 0.5250, 0.1198, - 0.5519, 0.3094, 0.1346, 0.0647, 0.4742, 0.5731, 0.8318, - 0.2796, 0.5726, 0.3119, 0.9865, 0.9941, 0.5736, 0.5420, - 0.2171, 0.5354, 0.9426, 0.6173, 0.1128, 0.0283, 0.7768, - 0.9444, 0.6839, 0.6100, 0.7668, 0.0394, 0.8433, 0.0499, - 0.7761, 0.2587, 0.7427, 0.8822, 0.5529, 0.7552, 0.8520, - 0.8179, 0.9600, 0.0466, 0.5899, 0.0884, 0.9116, 0.1822, - 0.4571, 0.9885, 0.1750, 0.1838, 0.7352, 0.3448, 0.7858, - 0.2306, 0.7624, 0.8788, 0.3283, 0.6481, 0.6658, 0.3363, - 0.3090, 0.0330, 0.1595, 0.9700, 0.3235, 0.4224, 0.0628, - 0.0681, 0.2514, 0.1198, 0.5855, 0.6518, 0.4456, 0.5212, - 0.5722, 0.1297, 0.2828, 0.7370, 0.0325, 0.1233, 0.8781, - 0.1529, 0.3239, 0.4868, 0.4461, 0.0181, 0.9715, 0.6327, - 0.0701, 0.9788, 0.7771, 0.3935, 0.2992, 0.4526, 0.2252, - 0.9195, 0.1472, 0.6317, 0.3246, 0.5631, 0.7653, 0.3490, - 0.2479, 0.8149, 0.9333, 0.5535, 0.7152, 0.7420, 0.8502, - 0.5933, 0.4264, 0.3490, 0.4412, 0.9800, 0.1953, 0.5818, - 0.4513, 0.1650, 0.3902, 0.4948, 0.7564, 0.9652, 0.1048, - 0.4943, 0.8495, 0.3903, 0.7487, 0.4664, 0.0427, 0.1690, - 0.7279, 0.3082, 0.6150, 0.2912, 0.2094, 0.9971, 0.3723, - 0.7357, 0.3658, 0.4009, 0.9999, 0.6043, 0.9117, 0.7334, - 0.5824, 0.0506, 0.7330, 0.0230, 0.4406, 0.0807, 0.6942, - 0.2245, 0.2485, 0.5748, 0.8330, 0.7840, 0.7060, 0.2348, - 0.0035, 0.5364, 0.0187, 0.6599, 0.9337, 0.6053, 0.1831, - 0.4356, 0.0679, 0.0908, 0.3894, 0.8661, 0.9286, 0.5274, - 0.2198, 0.1337, 0.5968, 0.2616, 0.9505, 0.9894, 0.6963, - 0.4705, 0.2413, 0.0355, 0.0514, 0.8694, 0.1783, 0.6758, - 0.2101, 0.3192, 0.4910, 0.5302, 0.4899, 0.5359, 0.1925, - 0.2343, 0.0188, 0.3525, 0.6944, 0.1704, 0.1291, 0.7572, - 0.5869, 0.4575, 0.5402, 0.3083, 0.7036, 0.7430, 0.3675, - 0.5465, 0.0686, 0.3712, 0.7453, 0.8458, 0.5033, 0.3961, - 0.8154, 0.6781, 0.6892, 0.5106, 0.8864, 0.2284, 0.3130, - 0.1335, 0.9835, 0.3056, 0.3963, 0.3066, 0.8333, 0.1173, - 0.1717, 0.1906, 0.5577, 0.8909, 0.5702, 0.9202, 0.0098, - 0.9034, 0.1638, 0.7871, 0.0602, 0.1046, 0.7108, 0.1792, - 0.5907, 0.9144, 0.7471, 0.9590, 0.3509, 0.9697, 0.2362, - 0.7800, 0.2561, 0.6928, 0.6592, 0.6715, 0.3954, 0.9109, - 0.8543, 0.2309, 0.9931, 0.3675, 0.9653, 0.9348, 0.8702, - 0.8780, 0.0545, 0.0095, 0.1323, 0.2713, 0.1104, 0.0360, - 0.7644, 0.3254, 0.2794, 0.7010, 0.6247, 0.7531, 0.2093, - 0.4077, 0.7399, 0.9364, 0.9070, 0.3612, 0.1375, 0.2106, - 0.6888, 0.0838, 0.3848, 0.1242, 0.2979, 0.7983, 0.3405, - 0.6686, 0.9463, 0.7854, 0.8071, 0.4226, 0.2682, 0.4134, - 0.9403, 0.2631, 0.8116, 0.3477, 0.7616, 0.0356, 0.3800, - 0.4796, 0.9664, 0.9730, 0.3146, 0.3811, 0.3444, 0.8364, - 0.4593, 0.2380, 0.3529, 0.8631, 0.8304, 0.7458, 0.2703, - 0.8323, 0.2629, 0.9141, 0.3840, 0.6975, 0.0508, 0.9536, - 0.3777, 0.3904, 0.8044, 0.1828, 0.1908, 0.1226, 0.5426, - 0.6712, 0.1985, 0.0131, 0.9193, 0.7070, 0.7883, 0.2011, - 0.5491, 0.4025, 0.3145, 0.0253, 0.2556, 0.4276, 0.9673, - 0.4027, 0.7313, 0.3000, 0.8244, 0.6521, 0.0329, 0.9128, - 0.1532, 0.7920, 0.6837, 0.1740, 0.6020, 0.4749, 0.1878, - 0.6918, 0.3430, 0.8863, 0.0954, 0.2221, 0.0196, 0.9170, - 0.2529, 0.8027, 0.9234, 0.7784, 0.4154, 0.7635, 0.3669, - 0.0588, 0.6705, 0.6605, 0.4654, 0.5060, 0.1257, 0.2869, - 0.1042, 0.7987, 0.7289, 0.1239, 0.7381, 0.3130, 0.2067, - 0.2775, 0.8959, 0.6030, 0.6860, 0.7855, 0.9485, 0.1748, - 0.7731, 0.6068, 0.3676, 0.3474, 0.3768, 0.8882, 0.3443, - 0.2956, 0.9448, 0.4045, 0.2381, 0.2525, 0.2118, 0.1224, - 0.6778, 0.2811, 0.1046, 0.4058, 0.6707, 0.0635, 0.5605, - 0.3192, 0.2209, 0.6866, 0.2181, 0.4822, 0.4068, 0.9604, - 0.5810, 0.0649, 0.5097, 0.1731, 0.2222, 0.3090, 0.8573, - 0.2342, 0.2229, 0.3303, 0.5365, 0.1241, 0.0313, 0.7432, - 0.3762, 0.5850, 0.5199, 0.0453, 0.7767, 0.0550, 0.5852, - 0.6872, 0.3713, 0.6670, 0.1341, 0.8513, 0.2874, 0.5203, - 0.7337, 0.0145, 0.5286, 0.0318, 0.0106, 0.8079, 0.5923, - 0.1886, 0.8415, 0.1474, 0.3036, 0.7165, 0.9739, 0.2050, - 0.0209, 0.3028, 0.8424, 0.4722, 0.9670, 0.8856, 0.7059, - 0.8881, 0.4477, 0.3433, 0.8833, 0.6854, 0.9979, 0.4134, - 0.6004, 0.9236, 0.5985, 0.8387, 0.3869, 0.9226, 0.8476, - 0.2815, 0.5644, 0.8983, 0.8519, 0.4415, 0.5941, 0.4933, - 0.3941, 0.0630, 0.8087, 0.5422, 0.7823, 0.0794, 0.7566, - 0.8197, 0.3825, 0.5699, 0.9195, 0.6894, 0.8052, 0.9060, - 0.8181, 0.6439, 0.3874, 0.8570, 0.7838, 0.5526, 0.0770, - 0.5884, 0.8769, 0.0709, 0.5994, 0.2015, 0.1734, 0.9822, - 0.8660, 0.0374, 0.8196, 0.5480, 0.3620, 0.6184, 0.3047, - 0.1654, 0.6492, 0.4554, 0.0477, 0.9808, 0.2615, 0.3836, - 0.1987, 0.6319, 0.9715, 0.7191, 0.5039, 0.7992, 0.8664, - 0.0815, 0.4937, 0.8132, 0.9455, 0.3586, 0.8318, 0.8930, - 0.1422, 0.4286, 0.9237, 0.7140, 0.1449, 0.5233, 0.9544, - 0.6412, 0.0940, 0.5388, 0.3539, 0.9951, 0.5860, 0.5837, - 0.0540, 0.2181, 0.6021, 0.1546, 0.1818, 0.6251, 0.6991, - 0.5441, 0.4257, 0.3883, 0.4987, 0.7504, 0.8178, 0.0667, - 0.3423, 0.4631, 0.0636, 0.6995, 0.6845, 0.1335, 0.5769, - 0.3366, 0.8741, 0.7448, 0.5158, 0.4520, 0.7964, 0.5429, - 0.3853, 0.3489, 0.7282, 0.3002, 0.4892, 0.4084, 0.7785, - 0.6224, 0.6482, 0.1360, 0.3743, 0.3204, 0.3293, 0.5975, - 0.8635, 0.1468, 0.1420, 0.1950, 0.5502, 0.3099, 0.4665, - 0.2473, 0.9781, 0.4882, 0.8615, 0.4379, 0.3304, 0.9224, - 0.6375, 0.4835, 0.7192, 0.6721, 0.7342, 0.5743, 0.9239, - 0.9773, 0.2213, 0.3478, 0.7032, 0.0574, 0.8041, 0.3853, - 0.1147, 0.0390, 0.9320, 0.5858, 0.0975, 0.5982, 0.1467, - 0.9118, 0.4835, 0.9183, 0.3489, 0.0389, 0.2553, 0.5860, - 0.2665, 0.6450, 0.3179, 0.5337, 0.7581, 0.4409, 0.1177, - 0.0512, 0.8850, 0.2142, 0.1547, 0.5876, 0.8678, 0.5430, - 0.4686, 0.4656, 0.5329, 0.4015, 0.3146, 0.2257, 0.1820, - 0.9287, 0.0585, 0.6678, 0.0868, 0.7648, 0.2970, 0.6893, - 0.7312, 0.6106, 0.1958, 0.8679, 0.9976, 0.5849, 0.7869, - 0.3363, 0.5231, 0.9619, 0.1567, 0.1143, 0.9307, 0.2825, - 0.3303, 0.5892, 0.7606, 0.7858, 0.0785, 0.3935, 0.0941, - 0.7542, 0.7552, 0.7909, 0.6337, 0.4503, 0.8151, 0.1544, - 0.0385, 0.1762, 0.7871, 0.9429, 0.7065, 0.2556, 0.7752, - 0.3810, 0.5819, 0.5096, 0.6816, 0.5826, 0.0960, 0.1244, - 0.3464, 0.1206, 0.8110, 0.0102, 0.2242, 0.3161]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 999, 1000]), + col_indices=tensor([2638, 4549, 5551, 2117, 4942, 9438, 9207, 6102, 6113, + 5097, 4681, 4774, 1006, 5714, 544, 8676, 4376, 3736, + 8577, 2814, 4988, 6404, 8803, 1894, 1225, 2662, 6498, + 474, 7502, 3741, 1251, 8621, 3601, 2924, 4778, 6508, + 8933, 6759, 4360, 1296, 9451, 5379, 2580, 1056, 6875, + 4437, 1085, 2249, 7387, 3599, 6972, 7834, 9715, 7328, + 9763, 5133, 813, 9023, 2249, 27, 3432, 3721, 9617, + 1590, 7696, 56, 5980, 9100, 4781, 77, 5788, 3266, + 5149, 2548, 3482, 8983, 3044, 7387, 7570, 4435, 738, + 926, 5784, 1972, 982, 8367, 9033, 5243, 9717, 9376, + 7878, 1215, 6076, 5447, 8437, 7705, 917, 2939, 9446, + 9814, 4295, 9541, 589, 1385, 8969, 2091, 9768, 3150, + 2282, 7241, 6389, 594, 5984, 3290, 5621, 3106, 7389, + 1549, 9798, 483, 161, 2192, 9062, 7275, 3614, 3964, + 9168, 9147, 8052, 1337, 423, 4173, 259, 6709, 4077, + 346, 597, 2400, 9499, 1392, 3670, 7737, 4953, 2762, + 3925, 5710, 8086, 6513, 3032, 504, 3777, 5879, 6044, + 9260, 3763, 3127, 963, 1869, 9765, 1796, 435, 5600, + 5627, 9102, 3333, 8833, 301, 7374, 7928, 7149, 6664, + 9548, 354, 7834, 2447, 5726, 4946, 4759, 5625, 2130, + 8779, 9304, 7900, 7798, 965, 31, 5611, 84, 7073, + 6207, 1689, 4705, 5870, 9222, 1708, 6221, 1677, 1776, + 2545, 5442, 3654, 7124, 7411, 7264, 8352, 250, 8624, + 3373, 4378, 2496, 9847, 1473, 3096, 7950, 1600, 8490, + 4224, 1529, 3981, 7942, 6717, 5606, 1194, 9089, 2551, + 1975, 9279, 8460, 3058, 9478, 4262, 5240, 3169, 275, + 8839, 1527, 2001, 8946, 7691, 7384, 5098, 5785, 7893, + 8114, 8440, 5428, 2718, 1104, 6538, 2397, 6013, 2109, + 9439, 5088, 92, 5038, 5221, 729, 7397, 1491, 8841, + 411, 7003, 3812, 3353, 8669, 4897, 6943, 3833, 7041, + 7756, 6642, 2488, 660, 996, 2981, 7454, 7786, 980, + 1221, 4578, 2033, 9314, 2613, 4376, 2586, 5987, 7835, + 8216, 1913, 7481, 2665, 6049, 1239, 7388, 3281, 7709, + 1591, 3156, 1803, 3851, 2376, 6086, 5052, 4524, 2336, + 8715, 8344, 5852, 7207, 3167, 7908, 2225, 3206, 5863, + 5099, 1361, 3099, 8942, 4099, 6186, 5775, 6474, 7452, + 4562, 465, 7762, 6510, 536, 9242, 8125, 3782, 3904, + 5530, 2008, 8821, 8028, 8192, 7694, 3356, 1431, 2479, + 5529, 5205, 4156, 2387, 2402, 2740, 4691, 1092, 8112, + 179, 4166, 7791, 8923, 6912, 993, 4400, 7872, 1058, + 5287, 5705, 2995, 5492, 9979, 5923, 2908, 2631, 6491, + 7799, 3357, 8539, 6937, 6776, 8019, 6710, 5292, 4317, + 5324, 6939, 8323, 8297, 1302, 6950, 9139, 4923, 2869, + 4762, 3011, 3683, 5086, 3546, 5468, 7039, 5094, 685, + 1912, 8181, 7088, 1069, 5927, 3264, 9562, 7548, 5987, + 6296, 8000, 5529, 8254, 6063, 232, 632, 4123, 5683, + 4331, 3062, 4320, 4702, 422, 808, 9851, 5159, 4742, + 9696, 4849, 4939, 5560, 6715, 8514, 7411, 6753, 1479, + 6447, 9423, 2299, 746, 3126, 8944, 6474, 9238, 491, + 3663, 7658, 9643, 9359, 8097, 7891, 3910, 2281, 2498, + 8362, 9549, 8992, 8173, 2719, 6566, 3163, 3205, 6566, + 4769, 461, 7427, 9910, 1123, 8896, 596, 4564, 6952, + 6648, 8124, 5243, 7751, 7557, 7865, 955, 2635, 8267, + 931, 6662, 4121, 14, 6956, 2418, 5213, 1632, 9138, + 5125, 7816, 5857, 663, 9933, 7314, 8839, 2670, 5825, + 8248, 1854, 378, 5585, 2403, 179, 8217, 1377, 1515, + 5503, 703, 2836, 1663, 9359, 8473, 542, 7157, 2322, + 2413, 2427, 69, 5467, 2965, 8435, 2568, 1445, 9874, + 1244, 3903, 9337, 5230, 2636, 8117, 1241, 5622, 6156, + 8074, 3395, 7868, 7595, 5963, 2485, 3144, 7917, 5995, + 166, 1965, 3368, 3505, 9895, 5435, 7614, 9297, 6947, + 2080, 7891, 6125, 7336, 6858, 5385, 4016, 4445, 5136, + 5376, 1116, 1115, 6672, 4261, 3458, 804, 4289, 5383, + 838, 6307, 4905, 9028, 4232, 5849, 9450, 3130, 6114, + 2811, 7246, 3562, 9672, 3081, 5993, 8682, 355, 1008, + 800, 3529, 1850, 5285, 8636, 9050, 6252, 2864, 8240, + 6668, 1243, 4693, 4257, 2053, 1439, 260, 4787, 554, + 3091, 1743, 3251, 1845, 7534, 9151, 8619, 5438, 7117, + 347, 3021, 9772, 1610, 1975, 3795, 6469, 2068, 5080, + 1912, 3486, 3533, 4972, 6311, 3709, 7121, 457, 1117, + 111, 106, 8099, 6628, 9452, 6539, 5386, 6312, 7232, + 3654, 1148, 6079, 1714, 2309, 4143, 3624, 3813, 8187, + 7477, 7185, 3345, 7237, 6387, 6466, 4935, 3446, 7809, + 533, 5015, 4608, 9916, 9577, 9588, 241, 7114, 7563, + 1553, 395, 7125, 3051, 9142, 5107, 5502, 5137, 3599, + 852, 4115, 2564, 4851, 5123, 2048, 1236, 9937, 7410, + 8132, 1152, 1528, 7683, 7131, 6592, 6457, 928, 5814, + 3559, 3379, 2378, 1111, 5109, 894, 6251, 9030, 7124, + 2881, 6077, 5197, 8530, 9054, 7424, 1949, 220, 2172, + 2545, 5621, 4772, 8756, 760, 5415, 6366, 598, 6818, + 4198, 6352, 4806, 9914, 4096, 4871, 5855, 2949, 6370, + 8519, 2947, 2383, 6716, 1419, 620, 9696, 8189, 1734, + 1617, 9194, 2156, 4648, 2570, 2697, 4094, 4909, 7651, + 8049, 5255, 8752, 5010, 2102, 9639, 3049, 7862, 3829, + 6831, 1725, 5211, 4265, 1268, 5022, 6549, 3396, 4993, + 823, 7710, 5486, 8054, 2421, 4386, 5787, 6383, 2870, + 3977, 650, 5270, 5527, 6328, 2739, 6786, 3390, 8530, + 7113, 787, 1039, 9842, 9695, 2938, 3630, 754, 9415, + 8080, 1030, 1497, 6446, 6175, 4422, 7964, 4649, 3655, + 3471, 8985, 3288, 8037, 9604, 5683, 3362, 3632, 375, + 6360, 7045, 5162, 4231, 7591, 5760, 23, 6358, 9455, + 5328, 6988, 4279, 5895, 7355, 6237, 9635, 6321, 4753, + 7272, 2201, 4494, 2414, 6780, 5222, 4451, 6904, 1207, + 7807, 5438, 3375, 6327, 5615, 9394, 5988, 9301, 2925, + 667, 3093, 7177, 5634, 3154, 4048, 6975, 1088, 4111, + 1601, 5108, 2231, 1630, 3176, 7591, 7169, 9468, 9206, + 2238, 9386, 1558, 5452, 7827, 2881, 1563, 3103, 5205, + 1681, 1927, 7558, 690, 5697, 8917, 9114, 5288, 1097, + 5950, 2646, 6335, 4119, 8894, 7803, 6086, 279, 271, + 5870, 3974, 9831, 2830, 2865, 61, 8320, 8560, 7767, + 9220, 5213, 7576, 6987, 8627, 5376, 2233, 9175, 4429, + 7405, 3574, 4102, 7903, 8731, 3230, 8418, 4637, 1920, + 9702, 3725, 5427, 9976, 7818, 2118, 8475, 2958, 35, + 1080, 327, 9877, 4772, 5399, 3747, 2143, 1775, 4356, + 6442, 2415, 647, 3460, 9349, 9724, 7767, 1941, 4389, + 9102, 5108, 5065, 6550, 1907, 5152, 733, 6179, 2077, + 4737, 6361, 8596, 8587, 844, 2988, 3389, 8702, 8796, + 471, 9163, 952, 9619, 788, 7764, 9638, 5773, 925, + 9310, 1086, 664, 2124, 5101, 7887, 2417, 4568, 1699, + 8947, 5281, 4747, 4713, 2372, 336, 9298, 6123, 3691, + 3035, 9241, 7383, 8218, 9451, 8726, 5429, 8881, 3953, + 8147]), + values=tensor([3.5525e-01, 6.2390e-02, 3.1706e-01, 4.0180e-01, + 7.0767e-01, 6.0253e-01, 3.3377e-02, 9.4604e-01, + 7.0063e-01, 3.8853e-01, 9.2083e-01, 6.5516e-01, + 7.7974e-01, 6.5413e-01, 8.2971e-01, 9.8177e-01, + 4.2740e-01, 5.7567e-01, 5.1914e-01, 8.9527e-01, + 9.9707e-01, 3.8811e-01, 2.7807e-01, 7.3366e-01, + 8.6617e-01, 6.5039e-01, 5.7168e-01, 4.1089e-01, + 6.9600e-01, 4.6175e-01, 6.3599e-01, 6.6030e-01, + 6.3570e-01, 4.4529e-01, 1.5356e-01, 3.7506e-01, + 3.0920e-01, 5.5182e-02, 2.7623e-02, 9.4146e-01, + 1.4998e-01, 9.2834e-01, 4.6332e-01, 1.4701e-01, + 7.3849e-01, 3.4665e-01, 1.2101e-01, 3.8189e-01, + 5.3427e-01, 9.6812e-01, 4.5313e-01, 5.1369e-01, + 2.9337e-01, 3.4808e-01, 6.9042e-01, 2.8985e-01, + 3.0274e-01, 7.1270e-01, 4.4209e-01, 2.5354e-01, + 4.3125e-01, 4.4832e-01, 8.4908e-01, 4.5862e-01, + 7.1769e-01, 2.8420e-01, 5.8850e-03, 1.1255e-01, + 5.9985e-01, 1.7373e-01, 8.0426e-01, 9.7592e-01, + 7.1575e-01, 6.3632e-01, 7.9312e-01, 2.0031e-01, + 7.6063e-01, 6.6472e-01, 2.2589e-01, 9.2208e-01, + 6.5396e-01, 4.2109e-02, 3.7447e-01, 7.6540e-01, + 7.0063e-01, 8.3329e-01, 8.2316e-01, 1.7633e-01, + 8.6842e-01, 5.3704e-01, 1.3609e-02, 5.6863e-02, + 2.1631e-01, 5.9704e-01, 9.5535e-01, 3.0130e-01, + 5.8427e-01, 3.1746e-01, 8.7151e-02, 3.9761e-01, + 5.7926e-01, 3.0026e-02, 1.7793e-01, 1.9793e-01, + 7.4937e-01, 5.0263e-01, 8.2545e-01, 1.1973e-01, + 1.6562e-01, 7.7594e-01, 1.8837e-01, 8.6153e-01, + 7.8911e-01, 1.3421e-01, 5.7230e-02, 3.1506e-01, + 2.1554e-01, 3.6549e-01, 2.4812e-01, 5.2062e-01, + 4.7621e-01, 7.0552e-04, 3.1203e-01, 5.0315e-01, + 5.5286e-01, 1.5342e-01, 1.0512e-01, 7.6972e-02, + 6.9395e-01, 4.3240e-01, 7.5458e-02, 5.8978e-01, + 9.2966e-01, 2.2644e-01, 6.4399e-01, 9.0790e-01, + 7.7642e-01, 3.1571e-01, 8.5384e-01, 1.8489e-01, + 6.1722e-01, 1.5912e-01, 4.4592e-01, 8.4061e-01, + 5.2576e-01, 7.0110e-01, 7.7925e-01, 3.9959e-01, + 4.5082e-01, 5.5213e-01, 9.8018e-01, 7.7799e-01, + 1.3578e-01, 3.4526e-01, 4.7119e-01, 8.5088e-01, + 6.7157e-01, 9.3544e-01, 1.5214e-01, 6.2251e-01, + 7.6667e-01, 1.7567e-01, 4.9737e-01, 5.1175e-01, + 7.1612e-01, 7.2408e-01, 6.9980e-01, 3.0419e-01, + 6.0054e-01, 5.7174e-01, 6.8748e-01, 9.4845e-01, + 7.1462e-01, 1.5120e-01, 8.6912e-01, 9.1935e-01, + 3.6111e-01, 8.1820e-02, 6.1956e-01, 3.4202e-03, + 1.2152e-01, 6.8458e-01, 3.0768e-01, 9.0789e-01, + 7.6720e-01, 2.7189e-01, 6.9353e-01, 7.9099e-01, + 2.3552e-01, 1.1454e-01, 9.8231e-01, 4.9996e-01, + 1.3841e-01, 3.5163e-02, 7.2178e-01, 6.3485e-01, + 8.3967e-01, 4.4897e-01, 4.2056e-01, 6.1844e-01, + 5.2028e-01, 8.0720e-01, 2.4333e-01, 4.6516e-01, + 6.6278e-02, 9.9295e-01, 4.4345e-01, 6.7626e-01, + 1.4199e-01, 1.9578e-01, 3.2340e-01, 1.2497e-01, + 5.4437e-01, 3.1542e-01, 7.3764e-02, 3.5269e-01, + 5.8521e-01, 1.5353e-01, 9.4268e-01, 5.1486e-01, + 3.0407e-01, 6.0438e-01, 7.0658e-01, 2.9060e-01, + 8.0022e-01, 6.5866e-01, 6.8525e-01, 8.8905e-01, + 6.6721e-01, 3.4904e-01, 7.5454e-01, 9.4064e-01, + 9.2511e-01, 1.8937e-01, 6.8604e-01, 9.1651e-01, + 8.0611e-01, 2.8921e-01, 2.2560e-01, 4.5702e-01, + 4.0794e-02, 8.9309e-02, 9.0514e-01, 6.4010e-02, + 6.6079e-01, 8.3236e-01, 2.3584e-01, 9.3562e-01, + 5.1196e-01, 5.5585e-01, 7.4674e-01, 5.3913e-01, + 2.9549e-01, 3.8633e-01, 3.6471e-01, 8.8689e-01, + 5.7123e-01, 7.8263e-01, 7.0740e-01, 2.3999e-01, + 5.2772e-01, 4.3146e-01, 9.3444e-01, 5.8832e-01, + 3.3716e-01, 6.2253e-01, 2.3752e-01, 1.5749e-01, + 6.0000e-01, 3.7743e-01, 2.2864e-01, 3.6329e-01, + 4.5749e-01, 1.3859e-01, 6.7955e-01, 5.2897e-01, + 1.2541e-01, 4.8079e-01, 2.3266e-02, 4.7930e-01, + 7.5775e-01, 9.2777e-01, 2.6071e-01, 2.2327e-02, + 4.6225e-01, 5.5258e-01, 5.2123e-01, 9.9821e-01, + 3.2558e-01, 4.7963e-01, 3.7063e-01, 1.5380e-01, + 6.6777e-01, 4.3100e-01, 5.5667e-01, 2.5923e-01, + 7.6729e-01, 9.5233e-02, 4.9658e-01, 9.4489e-02, + 5.8064e-01, 5.2373e-01, 7.1528e-01, 5.5713e-01, + 6.6993e-01, 7.5111e-01, 8.2473e-01, 6.7453e-01, + 4.3187e-01, 8.8628e-01, 6.6343e-01, 8.8953e-01, + 4.1874e-01, 9.5320e-01, 2.3026e-01, 3.3196e-01, + 7.5014e-01, 5.3870e-01, 6.6378e-01, 9.6080e-01, + 9.4847e-01, 8.7059e-01, 7.3370e-01, 4.4158e-01, + 7.1513e-02, 2.5418e-01, 6.0814e-01, 2.6452e-01, + 2.8110e-01, 4.4729e-01, 9.4631e-01, 5.4748e-01, + 1.6969e-01, 8.8721e-01, 9.9136e-01, 1.2834e-01, + 8.8176e-03, 5.5180e-01, 3.6915e-01, 7.1940e-01, + 1.3341e-01, 8.9815e-01, 8.8533e-01, 6.5943e-01, + 1.3171e-01, 2.9590e-01, 8.2510e-01, 8.0570e-01, + 4.4295e-01, 4.3316e-01, 1.1916e-01, 5.3998e-01, + 3.2253e-01, 3.5158e-01, 2.3870e-02, 6.6068e-01, + 7.2138e-01, 4.1483e-02, 3.8432e-01, 5.5785e-01, + 7.7686e-01, 4.8200e-01, 7.1684e-01, 5.0659e-01, + 2.7888e-01, 2.3748e-01, 8.4027e-01, 8.3539e-01, + 4.7410e-01, 3.2015e-01, 5.7136e-01, 3.0491e-01, + 7.8725e-01, 1.0225e-01, 3.0280e-01, 7.0065e-01, + 3.6449e-02, 8.7209e-01, 8.0256e-02, 1.5990e-02, + 6.8739e-01, 8.2098e-02, 4.7788e-01, 7.1443e-01, + 5.4242e-01, 5.8476e-01, 4.0597e-01, 4.7993e-01, + 7.1985e-01, 5.6322e-01, 3.1445e-02, 2.0865e-01, + 1.5054e-01, 4.8874e-01, 7.9048e-01, 4.8559e-02, + 6.8106e-01, 5.5711e-02, 9.8975e-01, 2.7205e-01, + 2.5193e-01, 2.8619e-02, 1.6935e-01, 6.1505e-01, + 8.5667e-01, 5.6590e-01, 8.2641e-01, 7.4738e-01, + 4.7740e-01, 6.8425e-01, 1.6035e-01, 4.9346e-01, + 2.3684e-01, 5.1331e-01, 6.4406e-01, 6.9698e-01, + 7.8976e-02, 3.0208e-01, 7.3196e-01, 2.3264e-01, + 4.2798e-02, 2.2799e-01, 7.8877e-01, 8.7436e-01, + 3.7345e-01, 9.8637e-01, 9.6538e-01, 7.8811e-02, + 2.9433e-01, 3.7116e-01, 1.6586e-01, 8.1262e-01, + 7.1237e-01, 3.9317e-01, 4.0607e-01, 4.9944e-01, + 5.6036e-01, 5.3630e-01, 3.8482e-02, 7.0579e-02, + 1.5052e-02, 3.9458e-01, 4.9153e-01, 7.7399e-01, + 9.4132e-01, 9.8169e-01, 7.1758e-01, 2.2970e-01, + 4.1691e-01, 4.2223e-01, 8.8312e-01, 9.9968e-01, + 7.2897e-01, 8.1109e-01, 6.4614e-01, 1.7118e-01, + 7.3946e-01, 2.4816e-01, 8.9376e-01, 3.7336e-01, + 4.8095e-01, 9.0200e-01, 8.3086e-02, 7.0358e-01, + 2.6066e-01, 7.4314e-01, 8.4932e-01, 2.0470e-01, + 5.6401e-01, 5.8671e-01, 6.8786e-01, 7.7478e-01, + 1.4807e-01, 9.8777e-02, 2.9188e-01, 9.4080e-01, + 2.4621e-01, 2.0040e-01, 4.4002e-01, 9.4446e-02, + 4.7541e-02, 9.0731e-01, 6.8464e-01, 6.0628e-01, + 4.0497e-01, 4.3627e-01, 9.2313e-01, 6.5432e-01, + 7.8830e-01, 9.3517e-01, 9.1528e-01, 4.3097e-01, + 1.7597e-01, 3.2670e-01, 7.7403e-01, 2.9494e-03, + 6.9962e-01, 2.7023e-01, 2.0295e-01, 9.7020e-01, + 9.1071e-01, 2.9348e-01, 7.4686e-01, 7.1413e-01, + 7.9889e-01, 1.6951e-01, 4.1044e-01, 5.1511e-01, + 7.6664e-01, 7.5993e-01, 9.8307e-01, 1.1932e-01, + 4.2850e-01, 1.4130e-01, 5.9822e-03, 3.8538e-01, + 7.0704e-01, 1.8775e-01, 5.8466e-01, 3.2036e-01, + 1.7749e-01, 6.5440e-02, 6.4009e-01, 9.6568e-01, + 4.5433e-01, 4.8468e-01, 2.1953e-01, 5.9475e-01, + 5.9432e-01, 4.5611e-01, 9.2185e-01, 4.1013e-01, + 5.8628e-01, 7.7766e-01, 8.1271e-02, 8.7169e-01, + 1.9883e-01, 8.4098e-01, 1.2441e-01, 1.6244e-01, + 9.7958e-01, 6.3408e-02, 9.1554e-01, 7.6837e-01, + 4.2736e-01, 7.2365e-01, 5.4395e-01, 3.0341e-01, + 4.3202e-01, 1.9040e-01, 4.9899e-01, 3.7195e-01, + 8.1995e-01, 3.5838e-01, 8.5230e-01, 2.5760e-01, + 1.6760e-01, 8.5692e-01, 2.2390e-01, 8.1457e-01, + 9.8090e-01, 3.7941e-02, 5.3237e-01, 5.9576e-01, + 8.8111e-01, 6.0732e-02, 7.2630e-01, 6.8858e-01, + 2.9466e-01, 5.8679e-02, 1.3595e-01, 3.7713e-01, + 5.7706e-02, 6.0019e-01, 6.9180e-02, 1.4434e-01, + 2.3448e-01, 8.4736e-01, 8.6409e-02, 8.3623e-01, + 1.5255e-01, 7.0814e-02, 6.3446e-01, 1.3937e-01, + 7.7004e-01, 2.9534e-01, 1.9728e-01, 2.0145e-02, + 8.9785e-01, 2.1766e-01, 8.3063e-01, 2.8683e-01, + 1.5078e-01, 2.2279e-01, 5.9308e-01, 7.5600e-01, + 8.3585e-01, 3.6225e-01, 2.7372e-01, 3.0123e-01, + 1.1595e-01, 3.8505e-01, 4.0316e-01, 1.8412e-01, + 8.0834e-01, 6.1984e-03, 3.0006e-01, 4.9017e-01, + 3.6079e-01, 4.0362e-01, 7.0827e-01, 2.2091e-01, + 7.5211e-01, 4.4784e-01, 4.4226e-01, 8.2791e-01, + 1.3555e-02, 5.3265e-01, 9.1538e-01, 6.2884e-01, + 6.6689e-01, 9.7188e-01, 8.0720e-01, 9.4091e-01, + 5.9135e-01, 4.9020e-01, 7.3827e-01, 2.0256e-01, + 1.1750e-02, 2.0886e-01, 1.9299e-01, 8.9001e-01, + 9.3115e-01, 7.9706e-01, 8.7781e-01, 9.7369e-01, + 8.4326e-01, 6.5339e-01, 7.2301e-03, 4.4875e-01, + 7.4221e-01, 5.7669e-01, 7.9307e-02, 5.3834e-01, + 7.2401e-01, 7.3897e-01, 3.3198e-01, 4.1645e-01, + 2.5751e-01, 7.7243e-01, 1.6508e-01, 3.3601e-01, + 1.2216e-01, 9.6966e-01, 6.4482e-02, 2.6164e-01, + 2.6966e-01, 8.1784e-01, 8.3198e-02, 4.5606e-01, + 8.5068e-01, 2.7896e-01, 3.4568e-01, 2.9177e-03, + 2.6710e-01, 4.1286e-01, 9.9692e-02, 4.4005e-02, + 5.3149e-01, 8.0285e-01, 9.2733e-01, 9.8415e-01, + 4.0517e-01, 6.9211e-01, 6.8433e-01, 7.4389e-01, + 7.7251e-02, 7.8116e-01, 4.3742e-01, 4.2201e-02, + 6.3515e-02, 5.3299e-01, 7.3031e-01, 1.4772e-01, + 7.8104e-01, 9.2317e-01, 5.2068e-01, 2.7853e-02, + 4.4905e-01, 3.7854e-01, 9.0340e-01, 7.3587e-01, + 8.0233e-01, 6.8180e-01, 7.2164e-01, 1.5280e-01, + 2.5554e-01, 2.0733e-01, 1.4318e-01, 6.3584e-01, + 7.8482e-01, 6.8190e-01, 7.3859e-01, 4.9329e-01, + 4.1293e-01, 5.5564e-01, 3.3628e-02, 5.6929e-01, + 1.4332e-01, 3.5152e-02, 4.6137e-01, 4.6820e-02, + 1.8616e-01, 4.8859e-01, 6.3675e-01, 1.9397e-02, + 5.9888e-01, 8.2305e-01, 2.5241e-01, 9.0465e-02, + 8.8729e-01, 1.9497e-01, 2.9390e-02, 9.4063e-01, + 6.9774e-01, 5.9022e-01, 7.9131e-01, 5.3939e-01, + 3.4305e-01, 2.4816e-01, 8.6604e-01, 9.8164e-01, + 2.2706e-01, 4.6569e-01, 6.9859e-01, 7.2450e-01, + 5.7793e-01, 6.1170e-01, 3.4399e-01, 4.2448e-01, + 1.7335e-01, 5.9485e-01, 1.6204e-01, 1.4951e-01, + 4.4594e-01, 8.0794e-01, 8.0718e-02, 5.4362e-01, + 2.3661e-01, 2.4661e-01, 5.2734e-01, 3.2346e-01, + 7.4938e-01, 9.5916e-01, 1.6617e-01, 6.3971e-01, + 7.7201e-01, 1.4215e-01, 8.2792e-01, 9.9614e-01, + 1.0073e-01, 6.4922e-01, 9.7990e-01, 5.5729e-01, + 6.4425e-01, 9.9597e-01, 8.0593e-01, 4.1681e-01, + 5.1384e-01, 1.9041e-01, 9.2539e-02, 1.4975e-01, + 5.8771e-01, 3.0319e-02, 4.8759e-01, 3.5443e-01, + 8.2084e-01, 6.9219e-01, 8.9037e-03, 8.8679e-02, + 6.1502e-01, 6.5215e-01, 7.1123e-01, 2.4235e-01, + 8.2987e-01, 6.8720e-01, 5.2843e-01, 8.2907e-01, + 3.8220e-01, 8.3923e-01, 4.6456e-01, 1.7218e-01, + 7.6149e-01, 7.1157e-01, 8.5231e-03, 6.7050e-01, + 1.3597e-01, 2.0987e-01, 1.9253e-02, 3.5143e-01, + 2.2750e-01, 1.4359e-01, 9.5187e-01, 9.2095e-01, + 7.7361e-01, 6.7116e-03, 4.6572e-01, 5.8405e-01, + 6.9670e-02, 8.2697e-01, 7.8156e-01, 8.1744e-01, + 3.3562e-01, 9.2200e-01, 6.3820e-01, 1.0771e-01, + 2.5406e-01, 6.1061e-01, 8.2491e-01, 9.4288e-01, + 3.0216e-01, 6.2460e-01, 2.6166e-01, 4.8909e-01, + 7.7266e-01, 7.0343e-01, 8.5985e-01, 1.1788e-01, + 5.9938e-01, 9.0807e-01, 8.9671e-01, 2.0172e-01, + 4.1501e-01, 3.8874e-01, 3.3951e-01, 9.9315e-01, + 3.9520e-01, 4.0721e-01, 7.9266e-01, 3.7646e-01, + 2.0510e-01, 9.8256e-01, 4.1987e-01, 6.5800e-01, + 4.1487e-01, 5.4416e-01, 1.5974e-01, 7.0997e-01, + 9.3577e-02, 7.8041e-01, 5.7365e-01, 6.4202e-01, + 2.4799e-01, 2.0890e-01, 4.0900e-01, 9.6599e-01, + 1.4471e-01, 1.4378e-01, 4.4160e-01, 6.5426e-01, + 7.6465e-01, 5.0391e-01, 1.7862e-01, 9.5819e-01, + 5.9414e-01, 7.8674e-01, 5.1636e-01, 3.7388e-01, + 8.9213e-01, 2.5634e-01, 8.3257e-02, 7.6556e-01, + 3.9435e-01, 3.2539e-01, 4.2520e-01, 2.6640e-01, + 2.9244e-01, 8.6129e-01, 4.6294e-01, 5.7341e-01, + 6.7467e-01, 2.6493e-01, 7.8233e-01, 3.8148e-01, + 1.9676e-01, 5.9091e-01, 6.7302e-01, 7.9015e-01, + 9.3704e-01, 8.3071e-01, 1.9322e-01, 1.3391e-01, + 7.9375e-01, 8.9058e-01, 3.2576e-01, 7.6412e-01, + 2.9206e-01, 9.4303e-02, 2.6508e-01, 2.6732e-02, + 8.6474e-01, 6.5165e-01, 6.6929e-01, 3.2381e-01, + 7.3144e-01, 5.0760e-01, 8.3973e-01, 5.6446e-01, + 5.0774e-01, 4.5819e-01, 5.0472e-01, 8.2957e-01, + 6.9893e-01, 5.4532e-01, 2.1384e-01, 2.7863e-01, + 9.7407e-01, 4.9597e-01, 5.9598e-01, 1.0999e-01, + 6.0630e-01, 4.4774e-01, 1.2226e-01, 7.8700e-01, + 2.9926e-01, 9.9008e-02, 6.1613e-01, 5.9640e-01, + 6.9008e-01, 2.2797e-01, 4.3699e-01, 7.4226e-01, + 2.5932e-01, 9.4341e-01, 3.6656e-01, 8.9090e-01, + 1.1825e-01, 1.8231e-01, 9.1802e-01, 5.6671e-01, + 2.4127e-01, 8.4582e-02, 4.4272e-02, 3.4668e-01, + 5.0760e-01, 8.9506e-01, 4.8637e-01, 1.2431e-01, + 8.0726e-01, 9.6584e-01, 7.1123e-01, 8.4781e-01, + 1.6727e-01, 6.6336e-01, 5.4632e-01, 3.5048e-02, + 5.4051e-01, 7.8963e-01, 5.6210e-01, 7.3553e-01, + 4.1628e-01, 4.2604e-01, 2.2175e-01, 9.6073e-01, + 5.1360e-01, 4.9418e-01, 7.9926e-01, 3.7731e-01, + 9.8090e-01, 3.0631e-01, 1.4232e-02, 2.8290e-01, + 1.9600e-01, 2.2115e-01, 6.7154e-01, 4.3191e-01, + 4.9638e-01, 2.2288e-01, 6.3807e-01, 7.0286e-01, + 5.3409e-01, 4.7524e-01, 6.8467e-02, 9.4389e-02, + 3.3462e-01, 3.4218e-01, 6.9728e-01, 5.3604e-01, + 4.3637e-01, 1.6252e-01, 7.1201e-01, 1.5898e-01, + 1.5749e-01, 6.4281e-01, 2.9275e-02, 7.4966e-01, + 3.4188e-01, 2.2876e-01, 1.6752e-01, 7.7030e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.0219, 0.9207, 0.3669, ..., 0.7955, 0.2670, 0.3543]) +tensor([0.7308, 0.4342, 0.3494, ..., 0.0020, 0.7901, 0.4274]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1188,13 +1509,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.798607587814331 seconds +Time: 10.631132364273071 seconds -[16.32, 16.4, 16.24, 16.76, 16.72, 17.12, 17.44, 17.24, 17.36, 17.12] -[16.8, 16.56, 16.56, 19.68, 21.72, 23.52, 24.24, 24.8, 20.88, 20.0, 19.92, 19.92, 19.64, 19.64] -14.209118843078613 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141552, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.798607587814331, 'TIME_S_1KI': 0.07628721309352274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 269.76709407806396, 'W': 18.985490730093364} -[16.32, 16.4, 16.24, 16.76, 16.72, 17.12, 17.44, 17.24, 17.36, 17.12, 16.28, 16.24, 16.24, 16.36, 16.6, 16.4, 16.24, 16.4, 16.44, 16.28] -299.2 -14.959999999999999 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141552, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.798607587814331, 'TIME_S_1KI': 0.07628721309352274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 269.76709407806396, 'W': 18.985490730093364, 'J_1KI': 1.9057808725985077, 'W_1KI': 0.13412379005661076, 'W_D': 4.025490730093365, 'J_D': 57.198676185607916, 'W_D_1KI': 0.028438246934648505, 'J_D_1KI': 0.0002009031799949736} +[20.2, 20.24, 20.32, 20.32, 20.48, 20.56, 20.76, 20.64, 20.76, 20.56] +[20.48, 20.32, 21.2, 23.12, 23.88, 24.68, 24.68, 25.56, 25.0, 24.08, 23.76, 23.68, 23.4, 23.52] +14.241167068481445 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 140907, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.631132364273071, 'TIME_S_1KI': 0.07544786535994004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 311.0322494506836, 'W': 21.8403623772563} +[20.2, 20.24, 20.32, 20.32, 20.48, 20.56, 20.76, 20.64, 20.76, 20.56, 20.52, 20.72, 20.96, 21.0, 21.0, 20.56, 20.56, 20.68, 20.48, 20.52] +370.94000000000005 +18.547000000000004 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 140907, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.631132364273071, 'TIME_S_1KI': 0.07544786535994004, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 311.0322494506836, 'W': 21.8403623772563, 'J_1KI': 2.2073583956133027, 'W_1KI': 0.15499842007321354, 'W_D': 3.2933623772562974, 'J_D': 46.90132383155815, 'W_D_1KI': 0.023372595948081342, 'J_D_1KI': 0.00016587249709440512} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.json index ccc8254..04529c9 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 52454, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.6396803855896, "TIME_S_1KI": 0.20283830376309908, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 254.19103302001952, "W": 17.900584259593533, "J_1KI": 4.845979963778158, "W_1KI": 0.3412625206770415, "W_D": 2.811584259593534, "J_D": 39.92492630434038, "W_D_1KI": 0.05360095053939707, "J_D_1KI": 0.0010218658355777839} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 53204, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.560401201248169, "TIME_S_1KI": 0.19848885800406302, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 308.5575102233887, "W": 21.652677512658492, "J_1KI": 5.799517145767023, "W_1KI": 0.40697461680810637, "W_D": 3.1216775126584935, "J_D": 44.484892940521284, "W_D_1KI": 0.05867373717499612, "J_D_1KI": 0.001102806878712054} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.output index 6f0e024..967cdd4 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02812814712524414} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.027580976486206055} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4998, 4999, 5000]), - col_indices=tensor([4541, 9541, 3383, ..., 9920, 3344, 2731]), - values=tensor([0.3320, 0.1825, 0.5042, ..., 0.6612, 0.1900, 0.5121]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 5000, 5000]), + col_indices=tensor([4636, 2207, 9397, ..., 7394, 6819, 4909]), + values=tensor([0.7602, 0.5956, 0.1963, ..., 0.6661, 0.3565, 0.0864]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.0144, 0.2553, 0.6494, ..., 0.0787, 0.9201, 0.6475]) +tensor([0.2838, 0.4836, 0.6308, ..., 0.2591, 0.6627, 0.6296]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.02812814712524414 seconds +Time: 0.027580976486206055 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37329 -ss 10000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.472296953201294} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 38069 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.513024568557739} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 4999, 5000]), - col_indices=tensor([6333, 2190, 7526, ..., 2226, 3084, 9881]), - values=tensor([0.4590, 0.1089, 0.5094, ..., 0.8341, 0.9457, 0.0387]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 5000, 5000]), + col_indices=tensor([2398, 9128, 2154, ..., 1626, 1842, 5097]), + values=tensor([0.8254, 0.9519, 0.4888, ..., 0.7669, 0.5685, 0.1906]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.3900, 0.6084, 0.4843, ..., 0.7689, 0.5332, 0.9837]) +tensor([0.5556, 0.7922, 0.2509, ..., 0.4770, 0.1507, 0.6847]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 7.472296953201294 seconds +Time: 7.513024568557739 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52454 -ss 10000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.6396803855896} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 53204 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.560401201248169} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([2165, 7276, 636, ..., 1970, 2680, 2527]), - values=tensor([0.0210, 0.6731, 0.7347, ..., 0.2518, 0.3264, 0.9787]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 4998, 4999, 5000]), + col_indices=tensor([ 182, 2198, 3609, ..., 9551, 4834, 5813]), + values=tensor([0.1200, 0.5485, 0.3269, ..., 0.1404, 0.5270, 0.0735]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.5715, 0.5141, 0.9224, ..., 0.0062, 0.8236, 0.9187]) +tensor([0.0967, 0.0255, 0.9669, ..., 0.7699, 0.3609, 0.2228]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.6396803855896 seconds +Time: 10.560401201248169 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), - col_indices=tensor([2165, 7276, 636, ..., 1970, 2680, 2527]), - values=tensor([0.0210, 0.6731, 0.7347, ..., 0.2518, 0.3264, 0.9787]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 4998, 4999, 5000]), + col_indices=tensor([ 182, 2198, 3609, ..., 9551, 4834, 5813]), + values=tensor([0.1200, 0.5485, 0.3269, ..., 0.1404, 0.5270, 0.0735]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.5715, 0.5141, 0.9224, ..., 0.0062, 0.8236, 0.9187]) +tensor([0.0967, 0.0255, 0.9669, ..., 0.7699, 0.3609, 0.2228]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.6396803855896 seconds +Time: 10.560401201248169 seconds -[16.64, 16.48, 16.64, 16.84, 17.0, 16.8, 16.72, 16.4, 16.4, 16.2] -[16.28, 16.44, 16.8, 17.88, 19.0, 19.88, 20.6, 20.92, 20.48, 20.0, 20.12, 20.12, 19.92, 19.84] -14.20015287399292 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 52454, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.6396803855896, 'TIME_S_1KI': 0.20283830376309908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 254.19103302001952, 'W': 17.900584259593533} -[16.64, 16.48, 16.64, 16.84, 17.0, 16.8, 16.72, 16.4, 16.4, 16.2, 16.72, 16.92, 16.8, 16.88, 17.04, 17.16, 16.96, 16.8, 16.84, 16.64] -301.78 -15.088999999999999 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 52454, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.6396803855896, 'TIME_S_1KI': 0.20283830376309908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 254.19103302001952, 'W': 17.900584259593533, 'J_1KI': 4.845979963778158, 'W_1KI': 0.3412625206770415, 'W_D': 2.811584259593534, 'J_D': 39.92492630434038, 'W_D_1KI': 0.05360095053939707, 'J_D_1KI': 0.0010218658355777839} +[20.28, 20.4, 20.36, 20.6, 20.6, 20.56, 20.8, 21.12, 20.96, 21.0] +[20.96, 20.72, 21.76, 21.76, 22.76, 23.56, 24.32, 24.84, 24.2, 24.16, 24.24, 24.04, 23.92, 23.72] +14.250316619873047 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 53204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.560401201248169, 'TIME_S_1KI': 0.19848885800406302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.5575102233887, 'W': 21.652677512658492} +[20.28, 20.4, 20.36, 20.6, 20.6, 20.56, 20.8, 21.12, 20.96, 21.0, 20.36, 20.32, 20.36, 20.36, 20.24, 20.4, 20.68, 20.72, 20.88, 20.88] +370.62 +18.531 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 53204, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.560401201248169, 'TIME_S_1KI': 0.19848885800406302, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.5575102233887, 'W': 21.652677512658492, 'J_1KI': 5.799517145767023, 'W_1KI': 0.40697461680810637, 'W_D': 3.1216775126584935, 'J_D': 44.484892940521284, 'W_D_1KI': 0.05867373717499612, 'J_D_1KI': 0.001102806878712054} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.json index 2f5c509..0be7b79 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.72194480895996, "TIME_S_1KI": 227.2194480895996, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2080.890781612396, "W": 61.453409935621025, "J_1KI": 20808.90781612396, "W_1KI": 614.5340993562103, "W_D": 45.112409935621024, "J_D": 1527.5636953213213, "W_D_1KI": 451.12409935621025, "J_D_1KI": 4511.240993562103} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 48, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.713890314102173, "TIME_S_1KI": 244.03938154379526, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1150.7120539855955, "W": 48.64237277538323, "J_1KI": 23973.167791366574, "W_1KI": 1013.3827661538173, "W_D": 29.75037277538323, "J_D": 703.791994713783, "W_D_1KI": 619.799432820484, "J_D_1KI": 12912.488183760082} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.output index 4a89b40..6a80d34 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.72194480895996} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 500000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.7228524684906006} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 56, 116, ..., 24999900, - 24999953, 25000000]), - col_indices=tensor([ 647, 4700, 33413, ..., 445020, 463377, - 482076]), - values=tensor([0.2494, 0.9199, 0.9974, ..., 0.2647, 0.3316, 0.8056]), +tensor(crow_indices=tensor([ 0, 47, 96, ..., 24999908, + 24999948, 25000000]), + col_indices=tensor([ 1989, 3552, 6705, ..., 466244, 486493, + 489174]), + values=tensor([0.6522, 0.2346, 0.2842, ..., 0.3095, 0.0896, 0.9153]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1920, 0.0118, 0.3050, ..., 0.0609, 0.1776, 0.7503]) +tensor([0.6400, 0.1984, 0.0727, ..., 0.5920, 0.9764, 0.2461]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 22.72194480895996 seconds +Time: 2.7228524684906006 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 38 -ss 500000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.282682418823242} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 56, 116, ..., 24999900, - 24999953, 25000000]), - col_indices=tensor([ 647, 4700, 33413, ..., 445020, 463377, - 482076]), - values=tensor([0.2494, 0.9199, 0.9974, ..., 0.2647, 0.3316, 0.8056]), +tensor(crow_indices=tensor([ 0, 50, 88, ..., 24999898, + 24999958, 25000000]), + col_indices=tensor([ 963, 38747, 47222, ..., 450900, 451431, + 483744]), + values=tensor([0.0669, 0.7481, 0.2594, ..., 0.6735, 0.7504, 0.2787]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1920, 0.0118, 0.3050, ..., 0.0609, 0.1776, 0.7503]) +tensor([0.9383, 0.0659, 0.9642, ..., 0.2790, 0.6849, 0.3467]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +38,52 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 22.72194480895996 seconds +Time: 8.282682418823242 seconds -[17.96, 18.12, 17.96, 17.96, 17.88, 17.96, 17.88, 17.92, 17.76, 17.8] -[17.8, 17.6, 18.48, 20.36, 21.4, 22.72, 22.72, 24.08, 25.12, 34.04, 34.44, 33.88, 34.28, 32.56, 45.4, 59.36, 77.8, 92.4, 98.2, 98.2, 95.48, 95.96, 96.12, 95.0, 93.72, 96.32, 96.24, 93.28, 93.88, 93.4, 91.28, 91.4, 93.2] -33.8612744808197 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.72194480895996, 'TIME_S_1KI': 227.2194480895996, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2080.890781612396, 'W': 61.453409935621025} -[17.96, 18.12, 17.96, 17.96, 17.88, 17.96, 17.88, 17.92, 17.76, 17.8, 17.72, 18.0, 18.44, 18.6, 18.6, 18.64, 18.76, 18.48, 18.16, 17.92] -326.82000000000005 -16.341 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.72194480895996, 'TIME_S_1KI': 227.2194480895996, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2080.890781612396, 'W': 61.453409935621025, 'J_1KI': 20808.90781612396, 'W_1KI': 614.5340993562103, 'W_D': 45.112409935621024, 'J_D': 1527.5636953213213, 'W_D_1KI': 451.12409935621025, 'J_D_1KI': 4511.240993562103} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 48 -ss 500000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.713890314102173} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 101, ..., 24999900, + 24999950, 25000000]), + col_indices=tensor([ 1386, 3010, 3959, ..., 488913, 496080, + 496424]), + values=tensor([0.0244, 0.0960, 0.4481, ..., 0.2771, 0.3942, 0.7484]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3602, 0.3084, 0.6398, ..., 0.1971, 0.4339, 0.7294]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 11.713890314102173 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 53, 101, ..., 24999900, + 24999950, 25000000]), + col_indices=tensor([ 1386, 3010, 3959, ..., 488913, 496080, + 496424]), + values=tensor([0.0244, 0.0960, 0.4481, ..., 0.2771, 0.3942, 0.7484]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3602, 0.3084, 0.6398, ..., 0.1971, 0.4339, 0.7294]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 11.713890314102173 seconds + +[21.04, 20.88, 21.04, 20.92, 21.0, 20.84, 20.96, 20.8, 20.84, 20.92] +[21.24, 21.24, 21.32, 25.36, 26.92, 28.04, 30.04, 30.72, 32.12, 39.2, 39.72, 38.88, 39.52, 34.64, 49.48, 59.92, 59.92, 74.88, 89.32, 91.52, 94.96, 99.12, 97.92] +23.656577348709106 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.713890314102173, 'TIME_S_1KI': 244.03938154379526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1150.7120539855955, 'W': 48.64237277538323} +[21.04, 20.88, 21.04, 20.92, 21.0, 20.84, 20.96, 20.8, 20.84, 20.92, 21.32, 21.28, 21.36, 21.16, 21.08, 20.92, 20.84, 20.96, 20.88, 20.88] +377.84 +18.892 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 48, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.713890314102173, 'TIME_S_1KI': 244.03938154379526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1150.7120539855955, 'W': 48.64237277538323, 'J_1KI': 23973.167791366574, 'W_1KI': 1013.3827661538173, 'W_D': 29.75037277538323, 'J_D': 703.791994713783, 'W_D_1KI': 619.799432820484, 'J_D_1KI': 12912.488183760082} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json index 208d33f..c21bdce 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 279, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.429174900054932, "TIME_S_1KI": 44.549013978691505, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 925.7997315406801, "W": 55.99377793769148, "J_1KI": 3318.2786076726884, "W_1KI": 200.69454457953935, "W_D": 40.002777937691484, "J_D": 661.4049353270533, "W_D_1KI": 143.37913239315944, "J_D_1KI": 513.9037003339048} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 364, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.988611936569214, "TIME_S_1KI": 35.6829998257396, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 885.5386364746093, "W": 57.44468165762061, "J_1KI": 2432.798451853322, "W_1KI": 157.8150594989577, "W_D": 38.28968165762061, "J_D": 590.2546851634979, "W_D_1KI": 105.19143312533136, "J_D_1KI": 288.9874536410202} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output index 2fab080..303fcff 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.175914764404297} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.28778886795043945} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 2499991, +tensor(crow_indices=tensor([ 0, 3, 12, ..., 2499992, 2499995, 2500000]), - col_indices=tensor([236491, 268930, 282894, ..., 290854, 362096, - 428990]), - values=tensor([0.4942, 0.2006, 0.6461, ..., 0.6339, 0.7923, 0.0061]), + col_indices=tensor([281211, 330355, 387890, ..., 320961, 441469, + 468706]), + values=tensor([0.2795, 0.2341, 0.6091, ..., 0.8168, 0.5124, 0.1850]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6272, 0.4782, 0.6613, ..., 0.5722, 0.7323, 0.6099]) +tensor([0.9736, 0.9446, 0.8562, ..., 0.4755, 0.1400, 0.2649]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 4.175914764404297 seconds +Time: 0.28778886795043945 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 251 -ss 500000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.986790895462036} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 364 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.988611936569214} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 2499995, - 2499998, 2500000]), - col_indices=tensor([168226, 184311, 332682, ..., 175948, 40749, - 152556]), - values=tensor([0.8367, 0.0584, 0.1423, ..., 0.1509, 0.1566, 0.6036]), +tensor(crow_indices=tensor([ 0, 4, 12, ..., 2499990, + 2499996, 2500000]), + col_indices=tensor([294772, 391884, 399546, ..., 177105, 373352, + 469734]), + values=tensor([0.9560, 0.6476, 0.4890, ..., 0.2401, 0.0683, 0.2223]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8910, 0.6965, 0.0939, ..., 0.1566, 0.5700, 0.8005]) +tensor([0.4409, 0.5878, 0.2813, ..., 0.9916, 0.6033, 0.0135]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 9.986790895462036 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 263 -ss 500000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.86619520187378} +Time: 12.988611936569214 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 2499987, - 2499997, 2500000]), - col_indices=tensor([120869, 339930, 358219, ..., 35981, 71933, - 400518]), - values=tensor([0.0243, 0.7300, 0.4495, ..., 0.8433, 0.9453, 0.9296]), +tensor(crow_indices=tensor([ 0, 4, 12, ..., 2499990, + 2499996, 2500000]), + col_indices=tensor([294772, 391884, 399546, ..., 177105, 373352, + 469734]), + values=tensor([0.9560, 0.6476, 0.4890, ..., 0.2401, 0.0683, 0.2223]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0560, 0.8217, 0.0541, ..., 0.5269, 0.7792, 0.2112]) +tensor([0.4409, 0.5878, 0.2813, ..., 0.9916, 0.6033, 0.0135]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,52 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 9.86619520187378 seconds +Time: 12.988611936569214 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 279 -ss 500000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.429174900054932} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499986, - 2499991, 2500000]), - col_indices=tensor([ 4708, 62252, 239037, ..., 346193, 443276, - 467019]), - values=tensor([0.7542, 0.0207, 0.5398, ..., 0.0649, 0.4673, 0.8331]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8471, 0.4897, 0.4001, ..., 0.3407, 0.6143, 0.4869]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 12.429174900054932 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499986, - 2499991, 2500000]), - col_indices=tensor([ 4708, 62252, 239037, ..., 346193, 443276, - 467019]), - values=tensor([0.7542, 0.0207, 0.5398, ..., 0.0649, 0.4673, 0.8331]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8471, 0.4897, 0.4001, ..., 0.3407, 0.6143, 0.4869]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 12.429174900054932 seconds - -[17.72, 17.72, 17.84, 17.8, 17.8, 18.04, 17.92, 17.8, 17.72, 17.32] -[17.36, 17.64, 18.04, 20.2, 21.44, 35.64, 48.92, 66.32, 77.96, 77.96, 91.04, 90.76, 89.6, 87.56, 86.4, 84.76] -16.53397512435913 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.429174900054932, 'TIME_S_1KI': 44.549013978691505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 925.7997315406801, 'W': 55.99377793769148} -[17.72, 17.72, 17.84, 17.8, 17.8, 18.04, 17.92, 17.8, 17.72, 17.32, 17.84, 17.72, 17.8, 17.92, 17.8, 17.72, 17.6, 17.72, 17.72, 17.48] -319.82 -15.991 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.429174900054932, 'TIME_S_1KI': 44.549013978691505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 925.7997315406801, 'W': 55.99377793769148, 'J_1KI': 3318.2786076726884, 'W_1KI': 200.69454457953935, 'W_D': 40.002777937691484, 'J_D': 661.4049353270533, 'W_D_1KI': 143.37913239315944, 'J_D_1KI': 513.9037003339048} +[20.96, 20.96, 21.12, 21.32, 21.32, 21.32, 21.44, 20.96, 20.88, 20.92] +[21.0, 20.88, 21.44, 25.64, 27.72, 39.36, 56.0, 68.8, 68.8, 84.4, 92.08, 95.0, 94.44, 95.52, 92.8] +15.415502548217773 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 364, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.988611936569214, 'TIME_S_1KI': 35.6829998257396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.5386364746093, 'W': 57.44468165762061} +[20.96, 20.96, 21.12, 21.32, 21.32, 21.32, 21.44, 20.96, 20.88, 20.92, 20.56, 20.8, 20.88, 21.08, 21.44, 21.96, 21.92, 21.8, 21.8, 21.76] +383.1 +19.155 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 364, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.988611936569214, 'TIME_S_1KI': 35.6829998257396, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 885.5386364746093, 'W': 57.44468165762061, 'J_1KI': 2432.798451853322, 'W_1KI': 157.8150594989577, 'W_D': 38.28968165762061, 'J_D': 590.2546851634979, 'W_D_1KI': 105.19143312533136, 'J_D_1KI': 288.9874536410202} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.json index fd07ea0..aaa275d 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.55699610710144, "TIME_S_1KI": 165.5699610710144, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1413.1975269508362, "W": 57.485721407839506, "J_1KI": 14131.97526950836, "W_1KI": 574.857214078395, "W_D": 41.59072140783951, "J_D": 1022.44354246974, "W_D_1KI": 415.9072140783951, "J_D_1KI": 4159.072140783951} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 61, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.86849045753479, "TIME_S_1KI": 178.1719747136851, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 749.9812630844115, "W": 45.45168786127534, "J_1KI": 12294.774804662482, "W_1KI": 745.1096370700875, "W_D": 26.504687861275343, "J_D": 437.3439186797141, "W_D_1KI": 434.50307969303844, "J_D_1KI": 7123.001306443253} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.output index 33e7f40..48f5d7a 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.55699610710144} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 500000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.7125613689422607} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 27, 44, ..., 12499945, - 12499971, 12500000]), - col_indices=tensor([ 7086, 20899, 31000, ..., 441979, 480995, - 482795]), - values=tensor([0.3494, 0.1791, 0.5321, ..., 0.0256, 0.8127, 0.6614]), +tensor(crow_indices=tensor([ 0, 24, 51, ..., 12499946, + 12499976, 12500000]), + col_indices=tensor([ 5171, 17970, 38847, ..., 438477, 489000, + 492656]), + values=tensor([0.9313, 0.2908, 0.9600, ..., 0.3041, 0.7302, 0.1974]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9181, 0.2128, 0.6316, ..., 0.6360, 0.7946, 0.3835]) +tensor([0.9523, 0.8640, 0.6914, ..., 0.9451, 0.6561, 0.2747]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,17 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 16.55699610710144 seconds +Time: 1.7125613689422607 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 61 -ss 500000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.86849045753479} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 27, 44, ..., 12499945, - 12499971, 12500000]), - col_indices=tensor([ 7086, 20899, 31000, ..., 441979, 480995, - 482795]), - values=tensor([0.3494, 0.1791, 0.5321, ..., 0.0256, 0.8127, 0.6614]), +tensor(crow_indices=tensor([ 0, 25, 52, ..., 12499949, + 12499973, 12500000]), + col_indices=tensor([ 1262, 25976, 29054, ..., 430668, 440930, + 487529]), + values=tensor([0.7736, 0.2160, 0.8850, ..., 0.0805, 0.0894, 0.3255]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9181, 0.2128, 0.6316, ..., 0.6360, 0.7946, 0.3835]) +tensor([0.0040, 0.9165, 0.7576, ..., 0.7760, 0.7936, 0.1567]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -35,13 +38,31 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 16.55699610710144 seconds +Time: 10.86849045753479 seconds -[17.52, 17.52, 17.88, 17.84, 17.84, 17.64, 17.48, 17.72, 17.44, 17.28] -[17.4, 17.16, 20.64, 21.36, 23.44, 25.32, 33.76, 33.76, 31.56, 35.12, 47.32, 60.6, 67.04, 80.92, 86.0, 85.72, 87.24, 88.0, 90.8, 92.64, 93.72, 92.0, 92.0, 90.8] -24.583452939987183 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.55699610710144, 'TIME_S_1KI': 165.5699610710144, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1413.1975269508362, 'W': 57.485721407839506} -[17.52, 17.52, 17.88, 17.84, 17.84, 17.64, 17.48, 17.72, 17.44, 17.28, 18.0, 17.76, 17.6, 17.64, 17.44, 17.4, 17.8, 17.88, 17.76, 17.72] -317.9 -15.895 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.55699610710144, 'TIME_S_1KI': 165.5699610710144, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1413.1975269508362, 'W': 57.485721407839506, 'J_1KI': 14131.97526950836, 'W_1KI': 574.857214078395, 'W_D': 41.59072140783951, 'J_D': 1022.44354246974, 'W_D_1KI': 415.9072140783951, 'J_D_1KI': 4159.072140783951} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 25, 52, ..., 12499949, + 12499973, 12500000]), + col_indices=tensor([ 1262, 25976, 29054, ..., 430668, 440930, + 487529]), + values=tensor([0.7736, 0.2160, 0.8850, ..., 0.0805, 0.0894, 0.3255]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0040, 0.9165, 0.7576, ..., 0.7760, 0.7936, 0.1567]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.86849045753479 seconds + +[21.36, 21.48, 21.52, 21.4, 21.32, 21.12, 21.04, 21.04, 21.08, 21.16] +[21.12, 21.04, 21.2, 22.68, 23.64, 25.64, 31.92, 35.32, 36.08, 46.32, 59.48, 69.52, 69.52, 82.92, 96.08, 98.0] +16.500625133514404 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 61, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.86849045753479, 'TIME_S_1KI': 178.1719747136851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9812630844115, 'W': 45.45168786127534} +[21.36, 21.48, 21.52, 21.4, 21.32, 21.12, 21.04, 21.04, 21.08, 21.16, 20.92, 21.0, 20.96, 20.84, 21.0, 21.04, 20.92, 20.64, 20.56, 20.52] +378.93999999999994 +18.946999999999996 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 61, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.86849045753479, 'TIME_S_1KI': 178.1719747136851, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.9812630844115, 'W': 45.45168786127534, 'J_1KI': 12294.774804662482, 'W_1KI': 745.1096370700875, 'W_D': 26.504687861275343, 'J_D': 437.3439186797141, 'W_D_1KI': 434.50307969303844, 'J_D_1KI': 7123.001306443253} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json index 9346872..100c092 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1658, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.466236591339111, "TIME_S_1KI": 8.725112540011526, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1691.1909151077273, "W": 65.36355506616682, "J_1KI": 1020.0186460239609, "W_1KI": 39.423133333031856, "W_D": 49.293555066166824, "J_D": 1275.40205573082, "W_D_1KI": 29.730732850522816, "J_D_1KI": 17.93168446955538} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1354, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 13.57641315460205, "TIME_S_1KI": 10.026893024078325, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 978.2523400211333, "W": 59.41936290336582, "J_1KI": 722.4906499417527, "W_1KI": 43.884315290521286, "W_D": 40.69436290336582, "J_D": 669.9727797591684, "W_D_1KI": 30.05492090351981, "J_D_1KI": 22.197135083840333} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output index 686a0dc..0d4fdd4 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8603906631469727} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.12745881080627441} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 249990, 249996, +tensor(crow_indices=tensor([ 0, 5, 7, ..., 249993, 249996, 250000]), - col_indices=tensor([ 116, 2220, 2597, ..., 31423, 34504, 36695]), - values=tensor([0.0356, 0.7526, 0.0114, ..., 0.2051, 0.2717, 0.4326]), + col_indices=tensor([32413, 35033, 38444, ..., 17723, 37171, 38352]), + values=tensor([0.4501, 0.5081, 0.6641, ..., 0.4983, 0.1681, 0.9317]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.6822, 0.2973, 0.4245, ..., 0.4266, 0.4462, 0.3842]) +tensor([0.7699, 0.9328, 0.9800, ..., 0.6628, 0.4452, 0.0772]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.8603906631469727 seconds +Time: 0.12745881080627441 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1220 -ss 50000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.725424766540527} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 823 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.379318475723267} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 12, ..., 249990, 249995, +tensor(crow_indices=tensor([ 0, 2, 7, ..., 249994, 249999, 250000]), - col_indices=tensor([ 1339, 17035, 23748, ..., 19329, 30492, 33219]), - values=tensor([0.1487, 0.4152, 0.3651, ..., 0.6580, 0.7478, 0.7026]), + col_indices=tensor([19687, 37471, 1460, ..., 44613, 48031, 39271]), + values=tensor([0.5848, 0.6955, 0.1599, ..., 0.4498, 0.0484, 0.3845]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4336, 0.1383, 0.7901, ..., 0.8031, 0.7630, 0.0295]) +tensor([0.7337, 0.3585, 0.9639, ..., 0.1882, 0.5518, 0.0852]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 7.725424766540527 seconds +Time: 6.379318475723267 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1658 -ss 50000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.466236591339111} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1354 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 13.57641315460205} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 249989, 249992, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249989, 249994, 250000]), - col_indices=tensor([14878, 23641, 26434, ..., 39221, 43609, 44125]), - values=tensor([0.9348, 0.1734, 0.7472, ..., 0.0129, 0.0523, 0.4218]), + col_indices=tensor([ 7528, 9953, 13162, ..., 34281, 39345, 47690]), + values=tensor([0.1278, 0.6541, 0.4504, ..., 0.2745, 0.8378, 0.9762]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4659, 0.1302, 0.1589, ..., 0.1214, 0.1279, 0.7413]) +tensor([0.9309, 0.5895, 0.4899, ..., 0.9489, 0.4389, 0.2461]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 14.466236591339111 seconds +Time: 13.57641315460205 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 7, ..., 249989, 249992, +tensor(crow_indices=tensor([ 0, 6, 9, ..., 249989, 249994, 250000]), - col_indices=tensor([14878, 23641, 26434, ..., 39221, 43609, 44125]), - values=tensor([0.9348, 0.1734, 0.7472, ..., 0.0129, 0.0523, 0.4218]), + col_indices=tensor([ 7528, 9953, 13162, ..., 34281, 39345, 47690]), + values=tensor([0.1278, 0.6541, 0.4504, ..., 0.2745, 0.8378, 0.9762]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4659, 0.1302, 0.1589, ..., 0.1214, 0.1279, 0.7413]) +tensor([0.9309, 0.5895, 0.4899, ..., 0.9489, 0.4389, 0.2461]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 14.466236591339111 seconds +Time: 13.57641315460205 seconds -[18.12, 18.08, 18.12, 18.12, 18.08, 18.0, 17.76, 17.64, 17.48, 17.48] -[17.28, 17.4, 17.28, 19.64, 20.44, 35.08, 49.0, 64.92, 77.28, 86.8, 86.8, 86.52, 85.52, 84.64, 84.32, 84.0, 84.16, 84.16, 84.24, 84.24, 84.04, 84.28, 82.92, 82.4, 81.88] -25.87360668182373 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1658, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.466236591339111, 'TIME_S_1KI': 8.725112540011526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1691.1909151077273, 'W': 65.36355506616682} -[18.12, 18.08, 18.12, 18.12, 18.08, 18.0, 17.76, 17.64, 17.48, 17.48, 18.04, 17.84, 17.8, 17.64, 17.76, 17.92, 17.8, 17.8, 17.72, 18.04] -321.4 -16.07 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1658, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.466236591339111, 'TIME_S_1KI': 8.725112540011526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1691.1909151077273, 'W': 65.36355506616682, 'J_1KI': 1020.0186460239609, 'W_1KI': 39.423133333031856, 'W_D': 49.293555066166824, 'J_D': 1275.40205573082, 'W_D_1KI': 29.730732850522816, 'J_D_1KI': 17.93168446955538} +[20.84, 20.88, 20.92, 20.8, 20.88, 20.96, 20.96, 20.72, 20.84, 20.88] +[20.8, 20.96, 21.12, 23.52, 25.24, 42.44, 56.84, 72.36, 86.48, 91.96, 91.12, 89.24, 89.24, 87.4, 85.72, 87.4] +16.46352791786194 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 13.57641315460205, 'TIME_S_1KI': 10.026893024078325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 978.2523400211333, 'W': 59.41936290336582} +[20.84, 20.88, 20.92, 20.8, 20.88, 20.96, 20.96, 20.72, 20.84, 20.88, 20.48, 20.64, 20.76, 20.96, 21.0, 21.08, 20.84, 20.56, 20.4, 20.4] +374.5 +18.725 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 13.57641315460205, 'TIME_S_1KI': 10.026893024078325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 978.2523400211333, 'W': 59.41936290336582, 'J_1KI': 722.4906499417527, 'W_1KI': 43.884315290521286, 'W_D': 40.69436290336582, 'J_D': 669.9727797591684, 'W_D_1KI': 30.05492090351981, 'J_D_1KI': 22.197135083840333} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json index c314337..da68769 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 135, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.963478088378906, "TIME_S_1KI": 81.21094880280671, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 582.8568821525573, "W": 43.83509814272925, "J_1KI": 4317.458386315239, "W_1KI": 324.7044306868833, "W_D": 28.06209814272925, "J_D": 373.12992837095254, "W_D_1KI": 207.8673936498463, "J_D_1KI": 1539.758471480343} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 239, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.746366024017334, "TIME_S_1KI": 53.33207541429847, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 629.8619846343995, "W": 47.3255940324987, "J_1KI": 2635.4057934493703, "W_1KI": 198.0150377928816, "W_D": 28.3985940324987, "J_D": 377.96028055882465, "W_D_1KI": 118.82256917363472, "J_D_1KI": 497.16556139596116} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output index 514e8a4..6db1157 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.26869797706604} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.6939270496368408} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 44, 102, ..., 2499900, - 2499955, 2500000]), - col_indices=tensor([ 878, 3105, 3271, ..., 44510, 45389, 45985]), - values=tensor([0.2421, 0.0051, 0.2486, ..., 0.1294, 0.9249, 0.4412]), +tensor(crow_indices=tensor([ 0, 66, 106, ..., 2499911, + 2499950, 2500000]), + col_indices=tensor([ 134, 759, 866, ..., 44498, 44758, 48976]), + values=tensor([0.2990, 0.7798, 0.5925, ..., 0.3339, 0.3471, 0.5198]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3218, 0.5110, 0.5510, ..., 0.0407, 0.3623, 0.3415]) +tensor([0.2796, 0.3993, 0.3429, ..., 0.1835, 0.2482, 0.6761]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 8.26869797706604 seconds +Time: 0.6939270496368408 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 126 -ss 50000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.729291677474976} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 151 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.231975555419922} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 44, 101, ..., 2499906, - 2499957, 2500000]), - col_indices=tensor([ 430, 2871, 2934, ..., 46471, 47392, 47877]), - values=tensor([0.4189, 0.2667, 0.2640, ..., 0.7329, 0.4126, 0.0437]), +tensor(crow_indices=tensor([ 0, 52, 98, ..., 2499895, + 2499944, 2500000]), + col_indices=tensor([ 493, 755, 1559, ..., 48761, 49039, 49905]), + values=tensor([0.9495, 0.4314, 0.0398, ..., 0.4761, 0.8641, 0.4230]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6895, 0.8779, 0.8508, ..., 0.5330, 0.3990, 0.7739]) +tensor([0.3749, 0.5339, 0.1389, ..., 0.8895, 0.3881, 0.8722]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 9.729291677474976 seconds +Time: 7.231975555419922 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 135 -ss 50000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.963478088378906} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 219 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.581777334213257} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499884, - 2499945, 2500000]), - col_indices=tensor([ 1316, 2608, 2921, ..., 47281, 49169, 49691]), - values=tensor([0.1237, 0.8262, 0.6046, ..., 0.7531, 0.6389, 0.8086]), +tensor(crow_indices=tensor([ 0, 35, 91, ..., 2499916, + 2499963, 2500000]), + col_indices=tensor([ 597, 922, 947, ..., 43933, 45150, 49587]), + values=tensor([0.2433, 0.0550, 0.3468, ..., 0.1153, 0.4739, 0.8564]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.9516, 0.0943, 0.1293, ..., 0.9488, 0.5626, 0.5458]) +tensor([0.1386, 0.9905, 0.1855, ..., 0.6873, 0.7628, 0.6710]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.963478088378906 seconds +Time: 9.581777334213257 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 239 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 12.746366024017334} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499884, - 2499945, 2500000]), - col_indices=tensor([ 1316, 2608, 2921, ..., 47281, 49169, 49691]), - values=tensor([0.1237, 0.8262, 0.6046, ..., 0.7531, 0.6389, 0.8086]), +tensor(crow_indices=tensor([ 0, 55, 109, ..., 2499908, + 2499954, 2500000]), + col_indices=tensor([ 1697, 2355, 3243, ..., 48565, 49427, 49466]), + values=tensor([0.6723, 0.4018, 0.1468, ..., 0.2877, 0.1034, 0.7125]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.9516, 0.0943, 0.1293, ..., 0.9488, 0.5626, 0.5458]) +tensor([0.9538, 0.9945, 0.0722, ..., 0.6194, 0.2993, 0.0465]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +76,30 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.963478088378906 seconds +Time: 12.746366024017334 seconds -[17.96, 17.88, 17.56, 17.44, 17.36, 17.24, 17.44, 17.68, 17.68, 17.68] -[17.88, 17.88, 17.88, 22.04, 23.04, 29.76, 42.2, 54.16, 64.96, 76.44, 81.04, 80.44, 80.44] -13.296579837799072 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.963478088378906, 'TIME_S_1KI': 81.21094880280671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8568821525573, 'W': 43.83509814272925} -[17.96, 17.88, 17.56, 17.44, 17.36, 17.24, 17.44, 17.68, 17.68, 17.68, 17.84, 17.6, 17.48, 17.48, 17.52, 17.32, 17.4, 17.36, 17.44, 17.68] -315.46 -15.773 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.963478088378906, 'TIME_S_1KI': 81.21094880280671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8568821525573, 'W': 43.83509814272925, 'J_1KI': 4317.458386315239, 'W_1KI': 324.7044306868833, 'W_D': 28.06209814272925, 'J_D': 373.12992837095254, 'W_D_1KI': 207.8673936498463, 'J_D_1KI': 1539.758471480343} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 109, ..., 2499908, + 2499954, 2500000]), + col_indices=tensor([ 1697, 2355, 3243, ..., 48565, 49427, 49466]), + values=tensor([0.6723, 0.4018, 0.1468, ..., 0.2877, 0.1034, 0.7125]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9538, 0.9945, 0.0722, ..., 0.6194, 0.2993, 0.0465]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 12.746366024017334 seconds + +[21.0, 21.08, 20.84, 20.84, 20.96, 21.08, 21.08, 20.8, 21.0, 21.32] +[21.6, 21.64, 24.52, 26.32, 30.72, 42.16, 42.16, 57.76, 66.2, 78.64, 80.6, 81.8, 82.24] +13.309119462966919 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 239, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.746366024017334, 'TIME_S_1KI': 53.33207541429847, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 629.8619846343995, 'W': 47.3255940324987} +[21.0, 21.08, 20.84, 20.84, 20.96, 21.08, 21.08, 20.8, 21.0, 21.32, 21.08, 21.2, 21.44, 21.36, 20.92, 20.96, 21.0, 20.84, 20.84, 21.2] +378.54 +18.927 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 239, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 12.746366024017334, 'TIME_S_1KI': 53.33207541429847, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 629.8619846343995, 'W': 47.3255940324987, 'J_1KI': 2635.4057934493703, 'W_1KI': 198.0150377928816, 'W_D': 28.3985940324987, 'J_D': 377.96028055882465, 'W_D_1KI': 118.82256917363472, 'J_D_1KI': 497.16556139596116} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.json index de9236a..1861861 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.498249053955078, "TIME_S_1KI": 304.9824905395508, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2007.0959778976435, "W": 47.944381683758614, "J_1KI": 20070.959778976434, "W_1KI": 479.44381683758616, "W_D": 32.291381683758615, "J_D": 1351.8143319842811, "W_D_1KI": 322.9138168375861, "J_D_1KI": 3229.138168375861} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 30, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.391724348068237, "TIME_S_1KI": 346.3908116022746, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 833.8964260101318, "W": 36.84918723219108, "J_1KI": 27796.54753367106, "W_1KI": 1228.3062410730358, "W_D": 17.98218723219108, "J_D": 406.9365647139549, "W_D_1KI": 599.406241073036, "J_D_1KI": 19980.208035767868} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.output index 9e427ef..adbb5c5 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,14 @@ -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.498249053955078} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.4301743507385254} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 519, 1009, ..., 24999042, - 24999488, 25000000]), - col_indices=tensor([ 129, 342, 437, ..., 49566, 49630, 49865]), - values=tensor([0.8700, 0.4704, 0.0527, ..., 0.4978, 0.7115, 0.5319]), +tensor(crow_indices=tensor([ 0, 499, 1026, ..., 24999002, + 24999506, 25000000]), + col_indices=tensor([ 76, 152, 185, ..., 49731, 49807, 49819]), + values=tensor([0.9174, 0.2553, 0.8878, ..., 0.4714, 0.3776, 0.8139]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3352, 0.1102, 0.4216, ..., 0.1668, 0.6074, 0.9924]) +tensor([0.8947, 0.0925, 0.0743, ..., 0.5161, 0.0685, 0.1122]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,16 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 30.498249053955078 seconds +Time: 3.4301743507385254 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 30 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.391724348068237} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 519, 1009, ..., 24999042, - 24999488, 25000000]), - col_indices=tensor([ 129, 342, 437, ..., 49566, 49630, 49865]), - values=tensor([0.8700, 0.4704, 0.0527, ..., 0.4978, 0.7115, 0.5319]), +tensor(crow_indices=tensor([ 0, 555, 1044, ..., 24999009, + 24999484, 25000000]), + col_indices=tensor([ 323, 369, 402, ..., 49562, 49819, 49853]), + values=tensor([0.9309, 0.0394, 0.6812, ..., 0.9223, 0.0551, 0.5539]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3352, 0.1102, 0.4216, ..., 0.1668, 0.6074, 0.9924]) +tensor([0.2747, 0.6581, 0.2131, ..., 0.8836, 0.0051, 0.2302]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -33,13 +36,30 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 30.498249053955078 seconds +Time: 10.391724348068237 seconds -[17.68, 17.6, 17.56, 17.16, 17.16, 17.0, 17.0, 17.08, 17.36, 17.4] -[17.52, 17.72, 18.04, 18.88, 20.68, 21.64, 23.68, 24.04, 31.04, 33.36, 33.36, 32.8, 32.72, 32.52, 28.16, 34.64, 41.2, 49.36, 57.8, 58.4, 58.92, 62.72, 62.72, 61.04, 61.04, 59.8, 60.88, 61.56, 64.04, 67.12, 70.76, 68.32, 68.92, 65.8, 66.72, 65.88, 65.84, 68.88, 68.88, 67.08, 67.04] -41.863006830215454 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.498249053955078, 'TIME_S_1KI': 304.9824905395508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2007.0959778976435, 'W': 47.944381683758614} -[17.68, 17.6, 17.56, 17.16, 17.16, 17.0, 17.0, 17.08, 17.36, 17.4, 17.56, 17.4, 17.44, 17.44, 17.4, 17.6, 17.56, 17.52, 17.6, 17.72] -313.05999999999995 -15.652999999999997 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.498249053955078, 'TIME_S_1KI': 304.9824905395508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2007.0959778976435, 'W': 47.944381683758614, 'J_1KI': 20070.959778976434, 'W_1KI': 479.44381683758616, 'W_D': 32.291381683758615, 'J_D': 1351.8143319842811, 'W_D_1KI': 322.9138168375861, 'J_D_1KI': 3229.138168375861} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 555, 1044, ..., 24999009, + 24999484, 25000000]), + col_indices=tensor([ 323, 369, 402, ..., 49562, 49819, 49853]), + values=tensor([0.9309, 0.0394, 0.6812, ..., 0.9223, 0.0551, 0.5539]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2747, 0.6581, 0.2131, ..., 0.8836, 0.0051, 0.2302]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.391724348068237 seconds + +[20.92, 20.72, 21.0, 21.32, 21.32, 21.24, 21.08, 20.76, 20.76, 20.76] +[20.88, 20.88, 21.2, 23.44, 24.12, 25.32, 27.52, 28.24, 30.8, 39.0, 39.28, 38.24, 38.24, 38.36, 32.76, 39.0, 45.44, 53.84, 59.64, 63.36, 63.0, 63.52] +22.629981517791748 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 30, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.391724348068237, 'TIME_S_1KI': 346.3908116022746, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.8964260101318, 'W': 36.84918723219108} +[20.92, 20.72, 21.0, 21.32, 21.32, 21.24, 21.08, 20.76, 20.76, 20.76, 21.0, 20.76, 20.76, 20.68, 20.6, 20.8, 21.16, 21.16, 21.28, 21.2] +377.34 +18.866999999999997 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 30, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.391724348068237, 'TIME_S_1KI': 346.3908116022746, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 833.8964260101318, 'W': 36.84918723219108, 'J_1KI': 27796.54753367106, 'W_1KI': 1228.3062410730358, 'W_D': 17.98218723219108, 'J_D': 406.9365647139549, 'W_D_1KI': 599.406241073036, 'J_D_1KI': 19980.208035767868} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json index 5f4cac5..f973965 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 8811, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.461963891983032, "TIME_S_1KI": 1.6413532961052129, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1601.2496216583254, "W": 63.889106877708535, "J_1KI": 181.7330180068466, "W_1KI": 7.251061954115144, "W_D": 47.822106877708535, "J_D": 1198.5631712055208, "W_D_1KI": 5.4275458946440285, "J_D_1KI": 0.6159965832078116} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 7814, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.055179357528687, "TIME_S_1KI": 1.5427667465483346, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1079.0575048923492, "W": 61.106530536994505, "J_1KI": 138.0928467996352, "W_1KI": 7.820134442922256, "W_D": 42.1665305369945, "J_D": 744.6030863046644, "W_D_1KI": 5.396279823009278, "J_D_1KI": 0.6905912238302122} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output index 6d049d2..537f362 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.2421858310699463} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.19840216636657715} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 24999, 25000, 25000]), - col_indices=tensor([11801, 48673, 42443, ..., 35599, 34008, 22453]), - values=tensor([0.3951, 0.6998, 0.6224, ..., 0.4352, 0.5927, 0.1013]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([40039, 46168, 43905, ..., 5354, 30188, 25611]), + values=tensor([0.7128, 0.9467, 0.3393, ..., 0.8693, 0.4957, 0.7713]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6391, 0.6971, 0.7049, ..., 0.7658, 0.2053, 0.9702]) +tensor([0.6738, 0.8011, 0.8829, ..., 0.3394, 0.1656, 0.5423]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,19 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.2421858310699463 seconds +Time: 0.19840216636657715 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4335 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.2435102462768555} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 5292 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 8.100382566452026} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), - col_indices=tensor([48470, 32812, 24371, ..., 21811, 5693, 27792]), - values=tensor([7.9337e-01, 8.6969e-01, 2.1228e-02, ..., - 3.1628e-01, 5.2154e-01, 8.3659e-04]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 24999, 24999, 25000]), + col_indices=tensor([23900, 46764, 22189, ..., 17050, 32623, 22770]), + values=tensor([0.3210, 0.3821, 0.9500, ..., 0.5752, 0.4593, 0.0859]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5123, 0.3871, 0.5639, ..., 0.2434, 0.7885, 0.9337]) +tensor([0.4131, 0.4291, 0.1695, ..., 0.7277, 0.7700, 0.1738]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -35,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 6.2435102462768555 seconds +Time: 8.100382566452026 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 7290 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.1812264919281} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 6859 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.215805768966675} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([ 1561, 34915, 11685, ..., 9985, 24943, 27218]), - values=tensor([0.3207, 0.5476, 0.7721, ..., 0.5221, 0.2072, 0.7139]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([ 4953, 12651, 31779, ..., 28640, 12333, 41102]), + values=tensor([0.6176, 0.0952, 0.2603, ..., 0.5516, 0.6412, 0.0213]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7737, 0.6011, 0.2764, ..., 0.4575, 0.9058, 0.1946]) +tensor([0.4613, 0.6467, 0.1961, ..., 0.3830, 0.8289, 0.7018]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -54,18 +53,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.1812264919281 seconds +Time: 9.215805768966675 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8337 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.935004234313965} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 7814 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.055179357528687} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24999, 25000, 25000]), - col_indices=tensor([ 4660, 37796, 36819, ..., 14791, 855, 165]), - values=tensor([0.6560, 0.1119, 0.0106, ..., 0.5425, 0.3178, 0.8843]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), + col_indices=tensor([ 5385, 33159, 44473, ..., 49968, 12582, 30371]), + values=tensor([0.5619, 0.7037, 0.4090, ..., 0.1971, 0.5585, 0.9974]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7672, 0.9593, 0.0238, ..., 0.4054, 0.5730, 0.7422]) +tensor([0.0784, 0.9306, 0.8133, ..., 0.4010, 0.5072, 0.2577]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,18 +72,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.935004234313965 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8811 -ss 50000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.461963891983032} +Time: 12.055179357528687 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), - col_indices=tensor([ 8691, 13268, 31788, ..., 20611, 3075, 9688]), - values=tensor([0.8778, 0.6640, 0.4350, ..., 0.9614, 0.5782, 0.7592]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24998, 25000, 25000]), + col_indices=tensor([ 5385, 33159, 44473, ..., 49968, 12582, 30371]), + values=tensor([0.5619, 0.7037, 0.4090, ..., 0.1971, 0.5585, 0.9974]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9817, 0.3893, 0.5308, ..., 0.4402, 0.5461, 0.2337]) +tensor([0.0784, 0.9306, 0.8133, ..., 0.4010, 0.5072, 0.2577]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -92,29 +88,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 14.461963891983032 seconds +Time: 12.055179357528687 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), - col_indices=tensor([ 8691, 13268, 31788, ..., 20611, 3075, 9688]), - values=tensor([0.8778, 0.6640, 0.4350, ..., 0.9614, 0.5782, 0.7592]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9817, 0.3893, 0.5308, ..., 0.4402, 0.5461, 0.2337]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 14.461963891983032 seconds - -[18.04, 17.96, 17.84, 17.68, 17.72, 17.8, 17.84, 17.92, 17.8, 17.8] -[17.8, 17.64, 17.64, 18.72, 19.96, 31.88, 49.36, 63.92, 79.84, 91.32, 91.4, 91.0, 90.8, 90.72, 90.68, 90.12, 89.48, 88.76, 88.76, 87.32, 86.72, 85.68, 84.4, 82.12] -25.062952041625977 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.461963891983032, 'TIME_S_1KI': 1.6413532961052129, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.2496216583254, 'W': 63.889106877708535} -[18.04, 17.96, 17.84, 17.68, 17.72, 17.8, 17.84, 17.92, 17.8, 17.8, 18.0, 17.84, 17.72, 17.92, 17.96, 17.88, 18.12, 17.92, 17.68, 17.64] -321.34000000000003 -16.067 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.461963891983032, 'TIME_S_1KI': 1.6413532961052129, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.2496216583254, 'W': 63.889106877708535, 'J_1KI': 181.7330180068466, 'W_1KI': 7.251061954115144, 'W_D': 47.822106877708535, 'J_D': 1198.5631712055208, 'W_D_1KI': 5.4275458946440285, 'J_D_1KI': 0.6159965832078116} +[21.0, 20.88, 21.08, 21.12, 21.12, 21.36, 21.48, 21.28, 21.28, 21.08] +[20.88, 20.84, 20.84, 21.64, 22.72, 35.92, 51.24, 69.04, 82.6, 94.52, 92.8, 91.8, 90.52, 89.84, 90.16, 90.0, 89.88] +17.658628225326538 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 7814, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.055179357528687, 'TIME_S_1KI': 1.5427667465483346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1079.0575048923492, 'W': 61.106530536994505} +[21.0, 20.88, 21.08, 21.12, 21.12, 21.36, 21.48, 21.28, 21.28, 21.08, 21.16, 20.88, 20.88, 20.84, 20.84, 20.92, 21.0, 20.92, 20.84, 20.92] +378.8 +18.94 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 7814, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.055179357528687, 'TIME_S_1KI': 1.5427667465483346, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1079.0575048923492, 'W': 61.106530536994505, 'J_1KI': 138.0928467996352, 'W_1KI': 7.820134442922256, 'W_D': 42.1665305369945, 'J_D': 744.6030863046644, 'W_D_1KI': 5.396279823009278, 'J_D_1KI': 0.6905912238302122} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.json index a856634..6f5991f 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 3016, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.238471984863281, "TIME_S_1KI": 4.057848801347242, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1021.0734484481814, "W": 58.16446532539802, "J_1KI": 338.5522043926331, "W_1KI": 19.285300174203588, "W_D": 42.31446532539802, "J_D": 742.827717702389, "W_D_1KI": 14.02999513441579, "J_D_1KI": 4.651855150668366} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 2491, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.007378578186035, "TIME_S_1KI": 5.2217497303035065, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 968.181078557968, "W": 57.69933801128247, "J_1KI": 388.67164936088636, "W_1KI": 23.163122445316127, "W_D": 38.79533801128247, "J_D": 650.9764841918944, "W_D_1KI": 15.574202332911469, "J_D_1KI": 6.252188812890995} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.output index 0ab6a55..f8cff14 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.37006235122680664} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4517648220062256} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 124994, 124997, +tensor(crow_indices=tensor([ 0, 3, 4, ..., 124993, 124997, 125000]), - col_indices=tensor([ 3192, 33329, 36206, ..., 17521, 36763, 39198]), - values=tensor([0.7954, 0.1728, 0.6419, ..., 0.6370, 0.0715, 0.4891]), + col_indices=tensor([ 6450, 12457, 13896, ..., 11554, 15088, 16580]), + values=tensor([0.0565, 0.5167, 0.4504, ..., 0.4471, 0.0501, 0.7306]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.4965, 0.4773, 0.1313, ..., 0.0503, 0.1495, 0.6552]) +tensor([0.2612, 0.0403, 0.2390, ..., 0.0627, 0.5163, 0.3230]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.37006235122680664 seconds +Time: 0.4517648220062256 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2837 -ss 50000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.87511658668518} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2324 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.793811798095703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 124994, 124997, +tensor(crow_indices=tensor([ 0, 3, 9, ..., 124992, 124997, 125000]), - col_indices=tensor([18930, 21812, 35293, ..., 949, 2935, 28377]), - values=tensor([0.1354, 0.7141, 0.1182, ..., 0.8833, 0.5348, 0.3796]), + col_indices=tensor([ 3716, 5281, 36891, ..., 21656, 39958, 48014]), + values=tensor([0.2453, 0.4520, 0.9669, ..., 0.2730, 0.3289, 0.7482]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.0699, 0.1291, 0.1295, ..., 0.2384, 0.2084, 0.3934]) +tensor([0.3004, 0.4648, 0.1335, ..., 0.3195, 0.9169, 0.1653]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 9.87511658668518 seconds +Time: 9.793811798095703 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3016 -ss 50000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.238471984863281} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2491 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.007378578186035} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 8, ..., 124995, 124998, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 124995, 124998, 125000]), - col_indices=tensor([ 1947, 25944, 43942, ..., 29833, 871, 28509]), - values=tensor([0.4807, 0.5814, 0.8403, ..., 0.1650, 0.5150, 0.8001]), + col_indices=tensor([45362, 46706, 9382, ..., 48043, 47493, 49777]), + values=tensor([0.5681, 0.3536, 0.5768, ..., 0.4220, 0.9986, 0.2845]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7842, 0.6693, 0.1792, ..., 0.7791, 0.2472, 0.4723]) +tensor([0.4744, 0.3838, 0.9712, ..., 0.0971, 0.2301, 0.3333]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 12.238471984863281 seconds +Time: 13.007378578186035 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 8, ..., 124995, 124998, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 124995, 124998, 125000]), - col_indices=tensor([ 1947, 25944, 43942, ..., 29833, 871, 28509]), - values=tensor([0.4807, 0.5814, 0.8403, ..., 0.1650, 0.5150, 0.8001]), + col_indices=tensor([45362, 46706, 9382, ..., 48043, 47493, 49777]), + values=tensor([0.5681, 0.3536, 0.5768, ..., 0.4220, 0.9986, 0.2845]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7842, 0.6693, 0.1792, ..., 0.7791, 0.2472, 0.4723]) +tensor([0.4744, 0.3838, 0.9712, ..., 0.0971, 0.2301, 0.3333]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 12.238471984863281 seconds +Time: 13.007378578186035 seconds -[17.56, 17.48, 17.4, 17.56, 17.72, 17.76, 17.48, 17.56, 17.56, 17.52] -[17.52, 17.68, 17.76, 20.16, 20.96, 34.48, 49.64, 65.8, 77.8, 88.2, 87.72, 86.68, 86.76, 87.64, 87.64, 89.04, 89.44] -17.554935693740845 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3016, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.238471984863281, 'TIME_S_1KI': 4.057848801347242, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1021.0734484481814, 'W': 58.16446532539802} -[17.56, 17.48, 17.4, 17.56, 17.72, 17.76, 17.48, 17.56, 17.56, 17.52, 17.72, 17.84, 17.8, 17.68, 17.68, 17.52, 17.56, 17.6, 17.48, 17.84] -317.0 -15.85 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3016, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.238471984863281, 'TIME_S_1KI': 4.057848801347242, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1021.0734484481814, 'W': 58.16446532539802, 'J_1KI': 338.5522043926331, 'W_1KI': 19.285300174203588, 'W_D': 42.31446532539802, 'J_D': 742.827717702389, 'W_D_1KI': 14.02999513441579, 'J_D_1KI': 4.651855150668366} +[20.96, 21.0, 21.08, 20.88, 20.96, 21.2, 21.32, 21.36, 21.32, 21.32] +[20.96, 20.88, 21.6, 23.64, 30.64, 30.64, 47.84, 61.28, 75.88, 89.28, 90.92, 90.88, 90.16, 87.96, 86.4, 85.72] +16.779760599136353 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 2491, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.007378578186035, 'TIME_S_1KI': 5.2217497303035065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 968.181078557968, 'W': 57.69933801128247} +[20.96, 21.0, 21.08, 20.88, 20.96, 21.2, 21.32, 21.36, 21.32, 21.32, 21.04, 21.12, 20.76, 20.84, 20.64, 20.76, 20.8, 20.96, 20.96, 20.92] +378.08000000000004 +18.904000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 2491, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.007378578186035, 'TIME_S_1KI': 5.2217497303035065, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 968.181078557968, 'W': 57.69933801128247, 'J_1KI': 388.67164936088636, 'W_1KI': 23.163122445316127, 'W_D': 38.79533801128247, 'J_D': 650.9764841918944, 'W_D_1KI': 15.574202332911469, 'J_D_1KI': 6.252188812890995} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json index fa80299..5155f94 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 93664, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.030004501342773, "TIME_S_1KI": 0.107084947272621, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 253.85587940216067, "W": 17.876929919933424, "J_1KI": 2.7102822792338643, "W_1KI": 0.19086233686297213, "W_D": 2.859929919933423, "J_D": 40.61156071567538, "W_D_1KI": 0.03053392893676784, "J_D_1KI": 0.00032599428741851555} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 97563, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.701276540756226, "TIME_S_1KI": 0.10968580856222365, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 307.9483871459961, "W": 21.602081513270264, "J_1KI": 3.156405472832899, "W_1KI": 0.2214167411136421, "W_D": 3.030081513270261, "J_D": 43.19531496810911, "W_D_1KI": 0.0310576910639306, "J_D_1KI": 0.00031833472795968345} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output index c2bcc94..d679ce7 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018899202346801758} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01863241195678711} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), - col_indices=tensor([1031, 4368, 4092, ..., 190, 4399, 1962]), - values=tensor([0.5248, 0.7492, 0.4749, ..., 0.9820, 0.4892, 0.6710]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([2466, 3111, 709, ..., 1729, 3787, 1090]), + values=tensor([0.6942, 0.7590, 0.7512, ..., 0.6465, 0.5765, 0.6510]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.7298, 0.5753, 0.5464, ..., 0.2530, 0.5594, 0.0214]) +tensor([0.7305, 0.7014, 0.7646, ..., 0.2723, 0.1317, 0.4517]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,37 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.018899202346801758 seconds +Time: 0.01863241195678711 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 55557 -ss 5000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.228086948394775} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 2499, 2500, 2500]), - col_indices=tensor([ 532, 4399, 2173, ..., 2637, 3554, 2146]), - values=tensor([0.0459, 0.9731, 0.3457, ..., 0.8215, 0.1549, 0.6550]), - size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.2796, 0.3928, 0.8109, ..., 0.2089, 0.4148, 0.7694]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 2500 -Density: 0.0001 -Time: 6.228086948394775 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 93664 -ss 5000 -sd 0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.030004501342773} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 56353 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.0648581981658936} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 1, ..., 2498, 2498, 2500]), - col_indices=tensor([1824, 1160, 4169, ..., 4733, 1262, 4559]), - values=tensor([0.9986, 0.6087, 0.2000, ..., 0.7208, 0.5140, 0.1151]), + col_indices=tensor([3295, 3837, 4863, ..., 1765, 634, 3388]), + values=tensor([0.5505, 0.3608, 0.8762, ..., 0.7549, 0.6265, 0.4131]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.6500, 0.1776, 0.6063, ..., 0.2915, 0.3213, 0.4804]) +tensor([0.3793, 0.4403, 0.5456, ..., 0.1478, 0.4802, 0.0889]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.030004501342773 seconds +Time: 6.0648581981658936 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 97563 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.701276540756226} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 2498, 2498, 2500]), - col_indices=tensor([1824, 1160, 4169, ..., 4733, 1262, 4559]), - values=tensor([0.9986, 0.6087, 0.2000, ..., 0.7208, 0.5140, 0.1151]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([2503, 3491, 1356, ..., 4170, 132, 3874]), + values=tensor([0.5964, 0.9649, 0.4378, ..., 0.7609, 0.5045, 0.4703]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.6500, 0.1776, 0.6063, ..., 0.2915, 0.3213, 0.4804]) +tensor([0.5573, 0.9521, 0.0475, ..., 0.2099, 0.5843, 0.2090]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +53,29 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.030004501342773 seconds +Time: 10.701276540756226 seconds -[16.8, 16.8, 17.16, 17.24, 17.04, 16.96, 16.96, 17.0, 16.6, 17.0] -[16.8, 16.84, 17.08, 18.24, 19.2, 19.72, 20.36, 20.2, 20.2, 20.04, 19.8, 19.8, 20.0, 19.96] -14.200194358825684 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93664, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.030004501342773, 'TIME_S_1KI': 0.107084947272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 253.85587940216067, 'W': 17.876929919933424} -[16.8, 16.8, 17.16, 17.24, 17.04, 16.96, 16.96, 17.0, 16.6, 17.0, 16.72, 16.48, 16.44, 16.48, 16.32, 16.48, 16.4, 16.28, 16.28, 16.32] -300.34000000000003 -15.017000000000001 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93664, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.030004501342773, 'TIME_S_1KI': 0.107084947272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 253.85587940216067, 'W': 17.876929919933424, 'J_1KI': 2.7102822792338643, 'W_1KI': 0.19086233686297213, 'W_D': 2.859929919933423, 'J_D': 40.61156071567538, 'W_D_1KI': 0.03053392893676784, 'J_D_1KI': 0.00032599428741851555} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([2503, 3491, 1356, ..., 4170, 132, 3874]), + values=tensor([0.5964, 0.9649, 0.4378, ..., 0.7609, 0.5045, 0.4703]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5573, 0.9521, 0.0475, ..., 0.2099, 0.5843, 0.2090]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.701276540756226 seconds + +[20.48, 20.48, 20.52, 20.44, 20.52, 20.52, 20.48, 20.8, 20.92, 20.96] +[20.92, 21.0, 21.24, 22.08, 23.24, 23.84, 24.64, 24.44, 24.24, 24.24, 23.76, 23.48, 23.64, 23.36] +14.25549602508545 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 97563, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.701276540756226, 'TIME_S_1KI': 0.10968580856222365, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 307.9483871459961, 'W': 21.602081513270264} +[20.48, 20.48, 20.52, 20.44, 20.52, 20.52, 20.48, 20.8, 20.92, 20.96, 20.44, 20.32, 20.48, 20.4, 20.44, 20.68, 20.92, 21.08, 21.0, 21.0] +371.44000000000005 +18.572000000000003 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 97563, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.701276540756226, 'TIME_S_1KI': 0.10968580856222365, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 307.9483871459961, 'W': 21.602081513270264, 'J_1KI': 3.156405472832899, 'W_1KI': 0.2214167411136421, 'W_D': 3.030081513270261, 'J_D': 43.19531496810911, 'W_D_1KI': 0.0310576910639306, 'J_D_1KI': 0.00031833472795968345} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json index 37907ba..ac118ca 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 17878, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.553908824920654, "TIME_S_1KI": 0.5903293894686572, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 275.88499256134037, "W": 19.43945882245899, "J_1KI": 15.431535549912763, "W_1KI": 1.0873396813099334, "W_D": 4.574458822458993, "J_D": 64.92076501369485, "W_D_1KI": 0.2558708369201808, "J_D_1KI": 0.014312050392671485} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 17864, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.506640911102295, "TIME_S_1KI": 0.5881460429412391, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 342.50576765060424, "W": 24.054170329528834, "J_1KI": 19.172960571574354, "W_1KI": 1.346516476126782, "W_D": 5.491170329528835, "J_D": 78.18841736173633, "W_D_1KI": 0.30738750165298, "J_D_1KI": 0.017207092569020376} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output index e1fffa0..4ba74a9 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06726479530334473} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.10043621063232422} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 16, ..., 24990, 24996, 25000]), - col_indices=tensor([ 219, 546, 972, ..., 2610, 3216, 3318]), - values=tensor([0.2561, 0.1283, 0.3219, ..., 0.1859, 0.9829, 0.7598]), +tensor(crow_indices=tensor([ 0, 4, 8, ..., 24992, 24997, 25000]), + col_indices=tensor([ 381, 2226, 3241, ..., 802, 3168, 3287]), + values=tensor([0.1743, 0.3626, 0.9122, ..., 0.6539, 0.6235, 0.9126]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7133, 0.2668, 0.2369, ..., 0.2811, 0.0980, 0.8981]) +tensor([0.5842, 0.8963, 0.5083, ..., 0.9307, 0.9302, 0.9679]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.06726479530334473 seconds +Time: 0.10043621063232422 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15609 -ss 5000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.167168855667114} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 10454 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 6.144280433654785} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 17, ..., 24994, 24998, 25000]), - col_indices=tensor([ 299, 1244, 1941, ..., 4267, 280, 4025]), - values=tensor([0.2861, 0.6940, 0.5528, ..., 0.3063, 0.0705, 0.3058]), +tensor(crow_indices=tensor([ 0, 3, 6, ..., 24993, 24996, 25000]), + col_indices=tensor([1192, 2785, 3904, ..., 226, 840, 1091]), + values=tensor([0.3326, 0.6209, 0.1598, ..., 0.1266, 0.1940, 0.6378]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.7288, 0.9023, 0.1050, ..., 0.1276, 0.6415, 0.2460]) +tensor([0.7146, 0.7823, 0.7677, ..., 0.0176, 0.3523, 0.1129]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 9.167168855667114 seconds +Time: 6.144280433654785 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17878 -ss 5000 -sd 0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.553908824920654} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17864 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.506640911102295} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 24995, 24998, 25000]), - col_indices=tensor([1009, 1198, 2341, ..., 3808, 999, 4327]), - values=tensor([0.4127, 0.3151, 0.8058, ..., 0.6562, 0.4614, 0.0831]), +tensor(crow_indices=tensor([ 0, 5, 10, ..., 24987, 24994, 25000]), + col_indices=tensor([ 191, 921, 1763, ..., 2085, 2421, 3701]), + values=tensor([0.3338, 0.9400, 0.8684, ..., 0.9864, 0.7382, 0.7373]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8967, 0.5216, 0.2765, ..., 0.9189, 0.4761, 0.3303]) +tensor([0.6472, 0.3324, 0.5921, ..., 0.3050, 0.5964, 0.5620]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.553908824920654 seconds +Time: 10.506640911102295 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 24995, 24998, 25000]), - col_indices=tensor([1009, 1198, 2341, ..., 3808, 999, 4327]), - values=tensor([0.4127, 0.3151, 0.8058, ..., 0.6562, 0.4614, 0.0831]), +tensor(crow_indices=tensor([ 0, 5, 10, ..., 24987, 24994, 25000]), + col_indices=tensor([ 191, 921, 1763, ..., 2085, 2421, 3701]), + values=tensor([0.3338, 0.9400, 0.8684, ..., 0.9864, 0.7382, 0.7373]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8967, 0.5216, 0.2765, ..., 0.9189, 0.4761, 0.3303]) +tensor([0.6472, 0.3324, 0.5921, ..., 0.3050, 0.5964, 0.5620]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.553908824920654 seconds +Time: 10.506640911102295 seconds -[16.32, 16.36, 16.36, 16.56, 16.68, 16.68, 16.64, 16.56, 16.68, 16.52] -[16.44, 16.88, 16.88, 17.92, 19.12, 25.32, 25.8, 25.92, 25.32, 22.16, 19.68, 19.6, 19.64, 19.4] -14.192009925842285 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17878, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.553908824920654, 'TIME_S_1KI': 0.5903293894686572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.88499256134037, 'W': 19.43945882245899} -[16.32, 16.36, 16.36, 16.56, 16.68, 16.68, 16.64, 16.56, 16.68, 16.52, 16.52, 16.4, 16.4, 16.2, 16.28, 16.36, 16.32, 16.64, 17.04, 16.92] -297.29999999999995 -14.864999999999998 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17878, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.553908824920654, 'TIME_S_1KI': 0.5903293894686572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.88499256134037, 'W': 19.43945882245899, 'J_1KI': 15.431535549912763, 'W_1KI': 1.0873396813099334, 'W_D': 4.574458822458993, 'J_D': 64.92076501369485, 'W_D_1KI': 0.2558708369201808, 'J_D_1KI': 0.014312050392671485} +[20.24, 20.16, 20.08, 20.16, 20.52, 20.6, 20.6, 20.76, 20.88, 20.88] +[21.0, 20.96, 20.88, 24.4, 25.24, 32.32, 33.08, 30.84, 30.64, 23.88, 23.96, 24.08, 24.08, 23.88] +14.238934993743896 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17864, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.506640911102295, 'TIME_S_1KI': 0.5881460429412391, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.50576765060424, 'W': 24.054170329528834} +[20.24, 20.16, 20.08, 20.16, 20.52, 20.6, 20.6, 20.76, 20.88, 20.88, 20.92, 20.88, 20.76, 20.36, 20.56, 20.6, 21.0, 20.96, 20.96, 20.8] +371.26 +18.563 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17864, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.506640911102295, 'TIME_S_1KI': 0.5881460429412391, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 342.50576765060424, 'W': 24.054170329528834, 'J_1KI': 19.172960571574354, 'W_1KI': 1.346516476126782, 'W_D': 5.491170329528835, 'J_D': 78.18841736173633, 'W_D_1KI': 0.30738750165298, 'J_D_1KI': 0.017207092569020376} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json index b2ee0da..261bbac 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1934, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.419332265853882, "TIME_S_1KI": 5.38745205059663, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 296.42124423980715, "W": 20.84298720811279, "J_1KI": 153.26848202678755, "W_1KI": 10.777139197576416, "W_D": 5.978987208112789, "J_D": 85.03094157409672, "W_D_1KI": 3.0915135512475644, "J_D_1KI": 1.598507523912908} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1964, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.76218032836914, "TIME_S_1KI": 5.47972521811056, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 334.6203072166443, "W": 23.467862072508055, "J_1KI": 170.3769385013464, "W_1KI": 11.949013275207768, "W_D": 4.762862072508057, "J_D": 67.9120392394066, "W_D_1KI": 2.425082521643613, "J_D_1KI": 1.234767068046646} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output index 048fad0..21143e1 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6361579895019531} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.602431058883667} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 88, ..., 249912, 249956, +tensor(crow_indices=tensor([ 0, 49, 106, ..., 249903, 249951, 250000]), - col_indices=tensor([ 150, 155, 160, ..., 4906, 4918, 4974]), - values=tensor([0.5565, 0.2611, 0.6011, ..., 0.5545, 0.3341, 0.9118]), + col_indices=tensor([ 47, 127, 262, ..., 4669, 4927, 4936]), + values=tensor([0.1328, 0.6049, 0.1617, ..., 0.5221, 0.5231, 0.9043]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3996, 0.8652, 0.4997, ..., 0.5638, 0.5133, 0.5074]) +tensor([0.1699, 0.4621, 0.4296, ..., 0.6998, 0.3934, 0.8399]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.6361579895019531 seconds +Time: 0.602431058883667 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1650 -ss 5000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.95723843574524} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1742 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.31101369857788} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 48, 105, ..., 249902, 249944, +tensor(crow_indices=tensor([ 0, 56, 108, ..., 249894, 249946, 250000]), - col_indices=tensor([ 142, 192, 269, ..., 4444, 4647, 4854]), - values=tensor([0.2391, 0.6427, 0.3721, ..., 0.7376, 0.6381, 0.7309]), + col_indices=tensor([ 29, 153, 200, ..., 4735, 4743, 4750]), + values=tensor([0.5893, 0.8988, 0.3934, ..., 0.8056, 0.0308, 0.8684]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0904, 0.1755, 0.9906, ..., 0.2784, 0.3906, 0.1798]) +tensor([0.6292, 0.1193, 0.5842, ..., 0.0946, 0.4658, 0.4768]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 8.95723843574524 seconds +Time: 9.31101369857788 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1934 -ss 5000 -sd 0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.419332265853882} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1964 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.76218032836914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 67, 123, ..., 249901, 249950, +tensor(crow_indices=tensor([ 0, 51, 101, ..., 249884, 249941, 250000]), - col_indices=tensor([ 77, 297, 304, ..., 4744, 4962, 4980]), - values=tensor([0.1458, 0.7428, 0.5307, ..., 0.5713, 0.0836, 0.9823]), + col_indices=tensor([ 337, 422, 423, ..., 4774, 4825, 4844]), + values=tensor([0.7788, 0.2591, 0.4387, ..., 0.3372, 0.5277, 0.6109]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8374, 0.5393, 0.4548, ..., 0.1972, 0.5711, 0.3877]) +tensor([0.9559, 0.3555, 0.1831, ..., 0.4118, 0.5326, 0.1704]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.419332265853882 seconds +Time: 10.76218032836914 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 67, 123, ..., 249901, 249950, +tensor(crow_indices=tensor([ 0, 51, 101, ..., 249884, 249941, 250000]), - col_indices=tensor([ 77, 297, 304, ..., 4744, 4962, 4980]), - values=tensor([0.1458, 0.7428, 0.5307, ..., 0.5713, 0.0836, 0.9823]), + col_indices=tensor([ 337, 422, 423, ..., 4774, 4825, 4844]), + values=tensor([0.7788, 0.2591, 0.4387, ..., 0.3372, 0.5277, 0.6109]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8374, 0.5393, 0.4548, ..., 0.1972, 0.5711, 0.3877]) +tensor([0.9559, 0.3555, 0.1831, ..., 0.4118, 0.5326, 0.1704]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.419332265853882 seconds +Time: 10.76218032836914 seconds -[16.44, 16.56, 16.48, 16.6, 16.72, 16.56, 16.72, 16.6, 16.72, 16.64] -[16.6, 16.56, 16.56, 20.2, 20.72, 28.04, 28.92, 29.0, 26.44, 26.36, 20.12, 20.28, 20.2, 20.4] -14.221629619598389 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1934, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.419332265853882, 'TIME_S_1KI': 5.38745205059663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.42124423980715, 'W': 20.84298720811279} -[16.44, 16.56, 16.48, 16.6, 16.72, 16.56, 16.72, 16.6, 16.72, 16.64, 16.68, 16.44, 16.44, 16.2, 16.24, 16.32, 16.56, 16.56, 16.52, 16.32] -297.28 -14.863999999999999 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1934, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.419332265853882, 'TIME_S_1KI': 5.38745205059663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.42124423980715, 'W': 20.84298720811279, 'J_1KI': 153.26848202678755, 'W_1KI': 10.777139197576416, 'W_D': 5.978987208112789, 'J_D': 85.03094157409672, 'W_D_1KI': 3.0915135512475644, 'J_D_1KI': 1.598507523912908} +[20.8, 20.8, 20.48, 20.4, 20.8, 21.2, 21.24, 21.52, 21.16, 21.16] +[20.72, 20.72, 21.88, 22.96, 26.72, 26.72, 27.88, 28.92, 28.12, 27.84, 24.96, 24.72, 24.4, 24.28] +14.25866174697876 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1964, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.76218032836914, 'TIME_S_1KI': 5.47972521811056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.6203072166443, 'W': 23.467862072508055} +[20.8, 20.8, 20.48, 20.4, 20.8, 21.2, 21.24, 21.52, 21.16, 21.16, 20.6, 20.68, 20.6, 20.56, 20.52, 20.52, 20.68, 20.64, 20.64, 20.76] +374.09999999999997 +18.705 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1964, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.76218032836914, 'TIME_S_1KI': 5.47972521811056, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 334.6203072166443, 'W': 23.467862072508055, 'J_1KI': 170.3769385013464, 'W_1KI': 11.949013275207768, 'W_D': 4.762862072508057, 'J_D': 67.9120392394066, 'W_D_1KI': 2.425082521643613, 'J_D_1KI': 1.234767068046646} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json index e125259..adda367 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 394, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.532436847686768, "TIME_S_1KI": 26.732073217479105, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 305.06099658966065, "W": 21.37670924510136, "J_1KI": 774.2664888062453, "W_1KI": 54.25560722106944, "W_D": 6.54270924510136, "J_D": 93.36916079187395, "W_D_1KI": 16.60586102817604, "J_D_1KI": 42.146855401462034} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 388, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.474353551864624, "TIME_S_1KI": 26.995756576970678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 358.88038782119753, "W": 25.07640303399043, "J_1KI": 924.9494531474163, "W_1KI": 64.62990472677946, "W_D": 6.515403033990431, "J_D": 93.24504652762414, "W_D_1KI": 16.79227586080008, "J_D_1KI": 43.27906149690742} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output index 49ea4fe..b142ac2 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.876847505569458} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.7017886638641357} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 270, 507, ..., 1249492, - 1249752, 1250000]), - col_indices=tensor([ 22, 47, 49, ..., 4884, 4921, 4983]), - values=tensor([0.5298, 0.6030, 0.6480, ..., 0.1911, 0.5303, 0.8187]), +tensor(crow_indices=tensor([ 0, 241, 494, ..., 1249537, + 1249753, 1250000]), + col_indices=tensor([ 2, 8, 17, ..., 4972, 4993, 4996]), + values=tensor([0.4645, 0.2180, 0.1087, ..., 0.3537, 0.6799, 0.0264]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.2222, 0.3805, 0.6044, ..., 0.3111, 0.2543, 0.4407]) +tensor([0.7534, 0.3194, 0.4895, ..., 0.0345, 0.1094, 0.2075]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 2.876847505569458 seconds +Time: 2.7017886638641357 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 364 -ss 5000 -sd 0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.686374425888062} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 388 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.474353551864624} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 236, 499, ..., 1249504, - 1249745, 1250000]), - col_indices=tensor([ 18, 50, 76, ..., 4926, 4932, 4975]), - values=tensor([0.1676, 0.6835, 0.4526, ..., 0.3904, 0.9402, 0.3969]), +tensor(crow_indices=tensor([ 0, 241, 503, ..., 1249508, + 1249751, 1250000]), + col_indices=tensor([ 38, 44, 134, ..., 4933, 4935, 4955]), + values=tensor([0.2322, 0.9230, 0.4915, ..., 0.3294, 0.3139, 0.8303]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.5247, 0.4082, 0.5074, ..., 0.5246, 0.7808, 0.6822]) +tensor([0.3153, 0.8250, 0.4991, ..., 0.6990, 0.6960, 0.9986]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 9.686374425888062 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 394 -ss 5000 -sd 0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.532436847686768} +Time: 10.474353551864624 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 285, 546, ..., 1249527, - 1249741, 1250000]), - col_indices=tensor([ 14, 79, 87, ..., 4936, 4956, 4998]), - values=tensor([0.7519, 0.7358, 0.0306, ..., 0.8162, 0.7664, 0.2246]), +tensor(crow_indices=tensor([ 0, 241, 503, ..., 1249508, + 1249751, 1250000]), + col_indices=tensor([ 38, 44, 134, ..., 4933, 4935, 4955]), + values=tensor([0.2322, 0.9230, 0.4915, ..., 0.3294, 0.3139, 0.8303]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.9515, 0.5215, 0.2042, ..., 0.7715, 0.5250, 0.3133]) +tensor([0.3153, 0.8250, 0.4991, ..., 0.6990, 0.6960, 0.9986]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,30 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.532436847686768 seconds +Time: 10.474353551864624 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 285, 546, ..., 1249527, - 1249741, 1250000]), - col_indices=tensor([ 14, 79, 87, ..., 4936, 4956, 4998]), - values=tensor([0.7519, 0.7358, 0.0306, ..., 0.8162, 0.7664, 0.2246]), - size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.9515, 0.5215, 0.2042, ..., 0.7715, 0.5250, 0.3133]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250000 -Density: 0.05 -Time: 10.532436847686768 seconds - -[16.32, 16.28, 16.04, 16.2, 16.32, 16.36, 16.36, 16.36, 16.64, 16.6] -[16.72, 16.84, 17.04, 21.76, 24.68, 30.6, 31.68, 29.24, 27.56, 20.44, 20.44, 20.32, 20.44, 20.32] -14.270718336105347 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.532436847686768, 'TIME_S_1KI': 26.732073217479105, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 305.06099658966065, 'W': 21.37670924510136} -[16.32, 16.28, 16.04, 16.2, 16.32, 16.36, 16.36, 16.36, 16.64, 16.6, 16.32, 16.36, 16.44, 16.6, 16.76, 16.8, 16.64, 16.76, 16.76, 16.76] -296.68 -14.834 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.532436847686768, 'TIME_S_1KI': 26.732073217479105, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 305.06099658966065, 'W': 21.37670924510136, 'J_1KI': 774.2664888062453, 'W_1KI': 54.25560722106944, 'W_D': 6.54270924510136, 'J_D': 93.36916079187395, 'W_D_1KI': 16.60586102817604, 'J_D_1KI': 42.146855401462034} +[20.84, 20.92, 20.8, 20.8, 20.6, 20.48, 20.48, 20.4, 20.56, 20.64] +[20.76, 20.68, 20.96, 24.88, 26.08, 34.24, 34.88, 32.48, 32.36, 26.92, 26.92, 24.32, 24.24, 24.08] +14.311477899551392 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 388, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.474353551864624, 'TIME_S_1KI': 26.995756576970678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.88038782119753, 'W': 25.07640303399043} +[20.84, 20.92, 20.8, 20.8, 20.6, 20.48, 20.48, 20.4, 20.56, 20.64, 20.72, 20.76, 20.6, 20.52, 20.56, 20.6, 20.64, 20.64, 20.48, 20.56] +371.22 +18.561 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 388, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.474353551864624, 'TIME_S_1KI': 26.995756576970678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 358.88038782119753, 'W': 25.07640303399043, 'J_1KI': 924.9494531474163, 'W_1KI': 64.62990472677946, 'W_D': 6.515403033990431, 'J_D': 93.24504652762414, 'W_D_1KI': 16.79227586080008, 'J_D_1KI': 43.27906149690742} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json index 5879ae9..b7a7ec4 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.453055620193481, "TIME_S_1KI": 53.06119604159128, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 328.8498130035401, "W": 21.582740338257725, "J_1KI": 1669.2883908809142, "W_1KI": 109.55705755460775, "W_D": 6.5647403382577245, "J_D": 100.02500140476232, "W_D_1KI": 33.32355501653667, "J_D_1KI": 169.15510160678514} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 198, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.476684808731079, "TIME_S_1KI": 52.912549539045855, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 398.7726536369324, "W": 26.110499041665946, "J_1KI": 2014.0033011966284, "W_1KI": 131.87120728114112, "W_D": 5.921499041665946, "J_D": 90.43610704588892, "W_D_1KI": 29.906560816494675, "J_D_1KI": 151.0432364469428} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output index 45da6ca..ec6b73e 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.1.output @@ -1,34 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.324424982070923} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.296512126922607} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 475, 961, ..., 2499006, - 2499520, 2500000]), - col_indices=tensor([ 15, 18, 19, ..., 4987, 4990, 4996]), - values=tensor([0.8221, 0.3138, 0.3999, ..., 0.4846, 0.5872, 0.2809]), - size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6625, 0.6086, 0.7821, ..., 0.8108, 0.2752, 0.8534]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 2500000 -Density: 0.1 -Time: 5.324424982070923 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 197 -ss 5000 -sd 0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.453055620193481} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 504, 990, ..., 2498981, +tensor(crow_indices=tensor([ 0, 476, 953, ..., 2499033, 2499511, 2500000]), - col_indices=tensor([ 3, 15, 19, ..., 4959, 4975, 4978]), - values=tensor([0.1275, 0.4769, 0.3626, ..., 0.0765, 0.7881, 0.0735]), + col_indices=tensor([ 7, 24, 50, ..., 4990, 4995, 4999]), + values=tensor([0.5541, 0.2923, 0.9039, ..., 0.3203, 0.2222, 0.1676]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2182, 0.1034, 0.8832, ..., 0.0679, 0.0105, 0.3546]) +tensor([0.7824, 0.4557, 0.0424, ..., 0.4238, 0.5244, 0.3285]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.453055620193481 seconds +Time: 5.296512126922607 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 198 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.476684808731079} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 504, 990, ..., 2498981, - 2499511, 2500000]), - col_indices=tensor([ 3, 15, 19, ..., 4959, 4975, 4978]), - values=tensor([0.1275, 0.4769, 0.3626, ..., 0.0765, 0.7881, 0.0735]), +tensor(crow_indices=tensor([ 0, 499, 1004, ..., 2499034, + 2499522, 2500000]), + col_indices=tensor([ 1, 12, 20, ..., 4980, 4986, 4992]), + values=tensor([0.7404, 0.5790, 0.7957, ..., 0.1738, 0.3236, 0.5858]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2182, 0.1034, 0.8832, ..., 0.0679, 0.0105, 0.3546]) +tensor([0.0161, 0.9413, 0.4866, ..., 0.2496, 0.7035, 0.1589]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +36,30 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.453055620193481 seconds +Time: 10.476684808731079 seconds -[16.48, 16.56, 16.52, 16.6, 16.92, 17.2, 17.24, 17.32, 17.32, 17.2] -[17.2, 17.08, 16.8, 20.08, 22.0, 28.44, 31.8, 32.0, 28.2, 27.28, 20.16, 20.32, 20.56, 20.56, 20.32] -15.236703395843506 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.453055620193481, 'TIME_S_1KI': 53.06119604159128, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.8498130035401, 'W': 21.582740338257725} -[16.48, 16.56, 16.52, 16.6, 16.92, 17.2, 17.24, 17.32, 17.32, 17.2, 16.8, 16.52, 16.44, 16.56, 16.44, 16.36, 16.4, 16.48, 16.24, 16.0] -300.36 -15.018 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.453055620193481, 'TIME_S_1KI': 53.06119604159128, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.8498130035401, 'W': 21.582740338257725, 'J_1KI': 1669.2883908809142, 'W_1KI': 109.55705755460775, 'W_D': 6.5647403382577245, 'J_D': 100.02500140476232, 'W_D_1KI': 33.32355501653667, 'J_D_1KI': 169.15510160678514} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 499, 1004, ..., 2499034, + 2499522, 2500000]), + col_indices=tensor([ 1, 12, 20, ..., 4980, 4986, 4992]), + values=tensor([0.7404, 0.5790, 0.7957, ..., 0.1738, 0.3236, 0.5858]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0161, 0.9413, 0.4866, ..., 0.2496, 0.7035, 0.1589]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.476684808731079 seconds + +[23.88, 23.88, 24.04, 24.36, 24.28, 24.28, 24.6, 24.56, 24.4, 24.48] +[24.56, 23.8, 24.16, 24.52, 28.56, 28.56, 34.0, 34.76, 34.2, 33.8, 27.4, 24.6, 24.56, 24.6, 24.56] +15.272502183914185 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 198, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.476684808731079, 'TIME_S_1KI': 52.912549539045855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 398.7726536369324, 'W': 26.110499041665946} +[23.88, 23.88, 24.04, 24.36, 24.28, 24.28, 24.6, 24.56, 24.4, 24.48, 20.52, 20.32, 20.52, 20.52, 20.52, 20.72, 20.84, 20.8, 20.56, 20.28] +403.78 +20.189 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 198, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.476684808731079, 'TIME_S_1KI': 52.912549539045855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 398.7726536369324, 'W': 26.110499041665946, 'J_1KI': 2014.0033011966284, 'W_1KI': 131.87120728114112, 'W_D': 5.921499041665946, 'J_D': 90.43610704588892, 'W_D_1KI': 29.906560816494675, 'J_D_1KI': 151.0432364469428} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.json index 22c3547..5a4d013 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.520986080169678, "TIME_S_1KI": 105.20986080169678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 343.0604390716553, "W": 22.467293864380782, "J_1KI": 3430.604390716553, "W_1KI": 224.67293864380784, "W_D": 7.879293864380783, "J_D": 120.3115083198548, "W_D_1KI": 78.79293864380782, "J_D_1KI": 787.9293864380782} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.666005373001099, "TIME_S_1KI": 106.66005373001099, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 426.37482541084296, "W": 26.06617543346282, "J_1KI": 4263.74825410843, "W_1KI": 260.6617543346282, "W_D": 7.4991754334628205, "J_D": 122.66700284934048, "W_D_1KI": 74.99175433462821, "J_D_1KI": 749.9175433462822} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.output index 46cc854..21d400e 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.520986080169678} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.666005373001099} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1010, 1992, ..., 4998032, - 4999035, 5000000]), - col_indices=tensor([ 7, 20, 27, ..., 4987, 4995, 4999]), - values=tensor([0.6859, 0.1805, 0.1498, ..., 0.8538, 0.5250, 0.8232]), +tensor(crow_indices=tensor([ 0, 1006, 1980, ..., 4997928, + 4998954, 5000000]), + col_indices=tensor([ 2, 4, 12, ..., 4984, 4990, 4999]), + values=tensor([0.2907, 0.3111, 0.1138, ..., 0.3742, 0.6542, 0.5964]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6117, 0.2257, 0.9695, ..., 0.2383, 0.1591, 0.7207]) +tensor([0.0716, 0.6336, 0.1460, ..., 0.3435, 0.3580, 0.3753]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.520986080169678 seconds +Time: 10.666005373001099 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1010, 1992, ..., 4998032, - 4999035, 5000000]), - col_indices=tensor([ 7, 20, 27, ..., 4987, 4995, 4999]), - values=tensor([0.6859, 0.1805, 0.1498, ..., 0.8538, 0.5250, 0.8232]), +tensor(crow_indices=tensor([ 0, 1006, 1980, ..., 4997928, + 4998954, 5000000]), + col_indices=tensor([ 2, 4, 12, ..., 4984, 4990, 4999]), + values=tensor([0.2907, 0.3111, 0.1138, ..., 0.3742, 0.6542, 0.5964]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6117, 0.2257, 0.9695, ..., 0.2383, 0.1591, 0.7207]) +tensor([0.0716, 0.6336, 0.1460, ..., 0.3435, 0.3580, 0.3753]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.520986080169678 seconds +Time: 10.666005373001099 seconds -[16.36, 16.04, 16.16, 15.96, 15.88, 15.88, 16.04, 16.04, 16.28, 16.32] -[16.4, 16.48, 16.8, 20.24, 21.88, 29.72, 32.92, 30.52, 30.52, 30.68, 27.2, 20.4, 20.72, 20.84, 20.72] -15.269326210021973 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.520986080169678, 'TIME_S_1KI': 105.20986080169678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.0604390716553, 'W': 22.467293864380782} -[16.36, 16.04, 16.16, 15.96, 15.88, 15.88, 16.04, 16.04, 16.28, 16.32, 16.84, 17.08, 16.68, 16.36, 16.32, 16.0, 16.08, 16.08, 16.04, 16.16] -291.76 -14.588 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.520986080169678, 'TIME_S_1KI': 105.20986080169678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.0604390716553, 'W': 22.467293864380782, 'J_1KI': 3430.604390716553, 'W_1KI': 224.67293864380784, 'W_D': 7.879293864380783, 'J_D': 120.3115083198548, 'W_D_1KI': 78.79293864380782, 'J_D_1KI': 787.9293864380782} +[20.6, 20.48, 20.52, 20.36, 20.4, 20.6, 20.68, 20.72, 21.0, 20.84] +[20.6, 20.6, 20.68, 24.08, 25.44, 32.44, 38.52, 38.64, 35.92, 34.48, 26.72, 24.36, 24.4, 24.4, 24.44, 24.44] +16.357398748397827 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.666005373001099, 'TIME_S_1KI': 106.66005373001099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 426.37482541084296, 'W': 26.06617543346282} +[20.6, 20.48, 20.52, 20.36, 20.4, 20.6, 20.68, 20.72, 21.0, 20.84, 20.68, 20.68, 20.68, 21.04, 21.0, 20.64, 20.64, 20.4, 20.24, 20.4] +371.34000000000003 +18.567 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.666005373001099, 'TIME_S_1KI': 106.66005373001099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 426.37482541084296, 'W': 26.06617543346282, 'J_1KI': 4263.74825410843, 'W_1KI': 260.6617543346282, 'W_D': 7.4991754334628205, 'J_D': 122.66700284934048, 'W_D_1KI': 74.99175433462821, 'J_D_1KI': 749.9175433462822} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.json index 1e920c1..c6bb734 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.019237279891968, "TIME_S_1KI": 160.19237279891968, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 445.7399830436707, "W": 20.90802632853554, "J_1KI": 4457.399830436707, "W_1KI": 209.08026328535541, "W_D": 5.958026328535542, "J_D": 127.0196676111222, "W_D_1KI": 59.58026328535542, "J_D_1KI": 595.8026328535542} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 17.13407015800476, "TIME_S_1KI": 171.3407015800476, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 562.5266760253905, "W": 25.07427127954065, "J_1KI": 5625.266760253905, "W_1KI": 250.7427127954065, "W_D": 6.58627127954065, "J_D": 147.75916113281244, "W_D_1KI": 65.86271279540651, "J_D_1KI": 658.627127954065} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.output index 039e7ba..7a83544 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.019237279891968} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 17.13407015800476} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1526, 3031, ..., 7497025, - 7498559, 7500000]), - col_indices=tensor([ 0, 3, 7, ..., 4995, 4996, 4999]), - values=tensor([0.4727, 0.1556, 0.1081, ..., 0.9285, 0.0937, 0.3872]), +tensor(crow_indices=tensor([ 0, 1530, 3021, ..., 7496893, + 7498420, 7500000]), + col_indices=tensor([ 1, 2, 3, ..., 4995, 4996, 4998]), + values=tensor([0.0061, 0.9558, 0.6537, ..., 0.2384, 0.9968, 0.4609]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.9962, 0.8284, 0.0086, ..., 0.9887, 0.5066, 0.4274]) +tensor([0.2735, 0.5951, 0.9100, ..., 0.6989, 0.4928, 0.5249]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 16.019237279891968 seconds +Time: 17.13407015800476 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1526, 3031, ..., 7497025, - 7498559, 7500000]), - col_indices=tensor([ 0, 3, 7, ..., 4995, 4996, 4999]), - values=tensor([0.4727, 0.1556, 0.1081, ..., 0.9285, 0.0937, 0.3872]), +tensor(crow_indices=tensor([ 0, 1530, 3021, ..., 7496893, + 7498420, 7500000]), + col_indices=tensor([ 1, 2, 3, ..., 4995, 4996, 4998]), + values=tensor([0.0061, 0.9558, 0.6537, ..., 0.2384, 0.9968, 0.4609]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.9962, 0.8284, 0.0086, ..., 0.9887, 0.5066, 0.4274]) +tensor([0.2735, 0.5951, 0.9100, ..., 0.6989, 0.4928, 0.5249]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 16.019237279891968 seconds +Time: 17.13407015800476 seconds -[16.44, 16.68, 16.72, 16.56, 16.6, 16.76, 16.84, 16.84, 16.68, 16.52] -[16.44, 16.48, 16.8, 17.84, 18.92, 25.84, 27.76, 29.92, 29.96, 29.48, 23.32, 23.32, 20.24, 20.12, 20.24, 20.04, 20.04, 20.24, 20.28, 20.2, 20.12] -21.319084644317627 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.019237279891968, 'TIME_S_1KI': 160.19237279891968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 445.7399830436707, 'W': 20.90802632853554} -[16.44, 16.68, 16.72, 16.56, 16.6, 16.76, 16.84, 16.84, 16.68, 16.52, 16.44, 16.44, 16.56, 16.56, 16.72, 16.64, 16.56, 16.52, 16.4, 16.44] -299.0 -14.95 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.019237279891968, 'TIME_S_1KI': 160.19237279891968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 445.7399830436707, 'W': 20.90802632853554, 'J_1KI': 4457.399830436707, 'W_1KI': 209.08026328535541, 'W_D': 5.958026328535542, 'J_D': 127.0196676111222, 'W_D_1KI': 59.58026328535542, 'J_D_1KI': 595.8026328535542} +[20.72, 20.56, 20.8, 20.72, 20.48, 20.52, 20.52, 20.36, 20.32, 20.36] +[20.44, 20.56, 22.4, 22.4, 23.44, 25.04, 32.64, 36.04, 34.88, 34.84, 29.96, 27.04, 24.6, 24.44, 24.4, 24.36, 24.36, 24.52, 24.4, 24.52, 24.48, 24.6] +22.434417724609375 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 17.13407015800476, 'TIME_S_1KI': 171.3407015800476, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 562.5266760253905, 'W': 25.07427127954065} +[20.72, 20.56, 20.8, 20.72, 20.48, 20.52, 20.52, 20.36, 20.32, 20.36, 20.4, 20.28, 20.4, 20.32, 20.52, 20.72, 20.72, 20.76, 20.68, 20.68] +369.76 +18.488 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 17.13407015800476, 'TIME_S_1KI': 171.3407015800476, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 562.5266760253905, 'W': 25.07427127954065, 'J_1KI': 5625.266760253905, 'W_1KI': 250.7427127954065, 'W_D': 6.58627127954065, 'J_D': 147.75916113281244, 'W_D_1KI': 65.86271279540651, 'J_D_1KI': 658.627127954065} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.json index d6dabaf..d742da0 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.045433044433594, "TIME_S_1KI": 210.45433044433594, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 590.3603710937499, "W": 21.581829265499348, "J_1KI": 5903.603710937499, "W_1KI": 215.81829265499348, "W_D": 6.281829265499347, "J_D": 171.83636339187612, "W_D_1KI": 62.81829265499347, "J_D_1KI": 628.1829265499347} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.884809732437134, "TIME_S_1KI": 228.84809732437134, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 727.7558030414582, "W": 25.608977605538268, "J_1KI": 7277.558030414582, "W_1KI": 256.0897760553827, "W_D": 6.881977605538271, "J_D": 195.57200666022317, "W_D_1KI": 68.81977605538272, "J_D_1KI": 688.1977605538272} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.output index bcbbf0c..ffdb132 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.045433044433594} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 22.884809732437134} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2076, 4151, ..., 9996002, - 9998013, 10000000]), - col_indices=tensor([ 1, 14, 15, ..., 4994, 4998, 4999]), - values=tensor([0.5831, 0.7619, 0.5912, ..., 0.7349, 0.2932, 0.8119]), +tensor(crow_indices=tensor([ 0, 1964, 3970, ..., 9996031, + 9998010, 10000000]), + col_indices=tensor([ 3, 4, 5, ..., 4988, 4991, 4995]), + values=tensor([0.4690, 0.4427, 0.6781, ..., 0.0015, 0.0307, 0.1366]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6981, 0.1886, 0.6279, ..., 0.1836, 0.5536, 0.9370]) +tensor([0.6025, 0.9562, 0.9790, ..., 0.5619, 0.4470, 0.1844]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 21.045433044433594 seconds +Time: 22.884809732437134 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2076, 4151, ..., 9996002, - 9998013, 10000000]), - col_indices=tensor([ 1, 14, 15, ..., 4994, 4998, 4999]), - values=tensor([0.5831, 0.7619, 0.5912, ..., 0.7349, 0.2932, 0.8119]), +tensor(crow_indices=tensor([ 0, 1964, 3970, ..., 9996031, + 9998010, 10000000]), + col_indices=tensor([ 3, 4, 5, ..., 4988, 4991, 4995]), + values=tensor([0.4690, 0.4427, 0.6781, ..., 0.0015, 0.0307, 0.1366]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6981, 0.1886, 0.6279, ..., 0.1836, 0.5536, 0.9370]) +tensor([0.6025, 0.9562, 0.9790, ..., 0.5619, 0.4470, 0.1844]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 21.045433044433594 seconds +Time: 22.884809732437134 seconds -[16.72, 16.72, 16.28, 16.36, 16.28, 17.72, 18.32, 19.12, 19.12, 19.28] -[18.6, 17.76, 17.4, 20.72, 22.2, 26.08, 32.2, 30.12, 32.12, 31.16, 23.52, 23.52, 22.96, 20.28, 20.12, 20.24, 20.28, 20.28, 20.32, 20.32, 20.2, 20.12, 20.28, 20.28, 20.48, 20.6, 20.48] -27.35451030731201 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.045433044433594, 'TIME_S_1KI': 210.45433044433594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.3603710937499, 'W': 21.581829265499348} -[16.72, 16.72, 16.28, 16.36, 16.28, 17.72, 18.32, 19.12, 19.12, 19.28, 16.44, 16.48, 16.36, 16.32, 16.32, 16.52, 16.72, 16.56, 16.4, 16.36] -306.0 -15.3 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.045433044433594, 'TIME_S_1KI': 210.45433044433594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.3603710937499, 'W': 21.581829265499348, 'J_1KI': 5903.603710937499, 'W_1KI': 215.81829265499348, 'W_D': 6.281829265499347, 'J_D': 171.83636339187612, 'W_D_1KI': 62.81829265499347, 'J_D_1KI': 628.1829265499347} +[20.44, 20.68, 20.68, 20.8, 20.88, 20.8, 20.8, 20.44, 20.48, 20.48] +[20.56, 20.8, 21.04, 24.48, 26.44, 29.32, 37.88, 39.16, 38.08, 37.2, 28.32, 27.64, 24.52, 24.52, 24.6, 24.48, 24.4, 24.44, 24.12, 23.96, 23.96, 23.92, 24.2, 24.28, 24.44, 24.44, 24.4, 24.44] +28.41799521446228 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.884809732437134, 'TIME_S_1KI': 228.84809732437134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.7558030414582, 'W': 25.608977605538268} +[20.44, 20.68, 20.68, 20.8, 20.88, 20.8, 20.8, 20.44, 20.48, 20.48, 20.76, 20.64, 20.8, 21.0, 21.0, 21.08, 21.28, 20.96, 21.0, 20.76] +374.53999999999996 +18.726999999999997 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 22.884809732437134, 'TIME_S_1KI': 228.84809732437134, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.7558030414582, 'W': 25.608977605538268, 'J_1KI': 7277.558030414582, 'W_1KI': 256.0897760553827, 'W_D': 6.881977605538271, 'J_D': 195.57200666022317, 'W_D_1KI': 68.81977605538272, 'J_D_1KI': 688.1977605538272} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.json index edd5f52..562706e 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.53720736503601, "TIME_S_1KI": 265.3720736503601, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 691.3309565734863, "W": 20.656596648884427, "J_1KI": 6913.309565734863, "W_1KI": 206.56596648884428, "W_D": 5.692596648884429, "J_D": 190.5187167835236, "W_D_1KI": 56.92596648884429, "J_D_1KI": 569.2596648884429} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 28.252235651016235, "TIME_S_1KI": 282.52235651016235, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 899.1067306137085, "W": 25.324976515518202, "J_1KI": 8991.067306137085, "W_1KI": 253.249765155182, "W_D": 6.684976515518201, "J_D": 237.33516101837156, "W_D_1KI": 66.84976515518201, "J_D_1KI": 668.4976515518201} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.output index 7bfb817..7502a06 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.53720736503601} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 28.252235651016235} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2531, 5099, ..., 12494952, - 12497469, 12500000]), - col_indices=tensor([ 0, 7, 9, ..., 4997, 4998, 4999]), - values=tensor([0.6564, 0.0127, 0.9586, ..., 0.9277, 0.7224, 0.6295]), +tensor(crow_indices=tensor([ 0, 2468, 4932, ..., 12494945, + 12497414, 12500000]), + col_indices=tensor([ 3, 4, 5, ..., 4992, 4993, 4996]), + values=tensor([0.0730, 0.7493, 0.4780, ..., 0.8548, 0.0678, 0.5608]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9909, 0.2884, 0.1156, ..., 0.4898, 0.4767, 0.4308]) +tensor([0.6126, 0.9438, 0.3718, ..., 0.5337, 0.9270, 0.9516]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,16 +16,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.53720736503601 seconds +Time: 28.252235651016235 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2531, 5099, ..., 12494952, - 12497469, 12500000]), - col_indices=tensor([ 0, 7, 9, ..., 4997, 4998, 4999]), - values=tensor([0.6564, 0.0127, 0.9586, ..., 0.9277, 0.7224, 0.6295]), +tensor(crow_indices=tensor([ 0, 2468, 4932, ..., 12494945, + 12497414, 12500000]), + col_indices=tensor([ 3, 4, 5, ..., 4992, 4993, 4996]), + values=tensor([0.0730, 0.7493, 0.4780, ..., 0.8548, 0.0678, 0.5608]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9909, 0.2884, 0.1156, ..., 0.4898, 0.4767, 0.4308]) +tensor([0.6126, 0.9438, 0.3718, ..., 0.5337, 0.9270, 0.9516]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -33,13 +33,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 26.53720736503601 seconds +Time: 28.252235651016235 seconds -[16.56, 16.48, 16.36, 16.32, 16.28, 16.28, 16.52, 16.48, 16.8, 16.84] -[16.84, 16.88, 16.84, 18.36, 18.96, 19.92, 26.16, 27.2, 30.16, 30.2, 27.92, 24.32, 23.48, 20.0, 20.0, 20.32, 20.36, 20.32, 20.48, 20.28, 20.2, 20.28, 20.2, 20.12, 20.08, 19.92, 19.92, 19.92, 20.04, 20.04, 20.2, 20.32, 20.16] -33.4678053855896 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.53720736503601, 'TIME_S_1KI': 265.3720736503601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 691.3309565734863, 'W': 20.656596648884427} -[16.56, 16.48, 16.36, 16.32, 16.28, 16.28, 16.52, 16.48, 16.8, 16.84, 16.6, 16.96, 16.88, 17.08, 17.04, 16.88, 16.6, 16.6, 16.52, 16.4] -299.28 -14.963999999999999 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.53720736503601, 'TIME_S_1KI': 265.3720736503601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 691.3309565734863, 'W': 20.656596648884427, 'J_1KI': 6913.309565734863, 'W_1KI': 206.56596648884428, 'W_D': 5.692596648884429, 'J_D': 190.5187167835236, 'W_D_1KI': 56.92596648884429, 'J_D_1KI': 569.2596648884429} +[20.8, 20.56, 20.72, 20.76, 20.48, 20.48, 20.48, 20.56, 20.4, 20.52] +[20.64, 20.52, 20.96, 22.72, 23.8, 25.68, 34.24, 34.88, 36.92, 36.92, 36.92, 28.84, 28.0, 26.44, 24.28, 24.36, 24.36, 24.56, 24.52, 24.48, 24.52, 24.52, 24.52, 24.52, 24.44, 24.48, 24.52, 24.76, 24.64, 24.24, 24.16, 23.92, 23.84, 24.12, 24.12] +35.502766609191895 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 28.252235651016235, 'TIME_S_1KI': 282.52235651016235, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 899.1067306137085, 'W': 25.324976515518202} +[20.8, 20.56, 20.72, 20.76, 20.48, 20.48, 20.48, 20.56, 20.4, 20.52, 20.64, 20.64, 20.84, 21.0, 20.96, 21.0, 20.88, 20.8, 20.84, 20.84] +372.8 +18.64 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 28.252235651016235, 'TIME_S_1KI': 282.52235651016235, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 899.1067306137085, 'W': 25.324976515518202, 'J_1KI': 8991.067306137085, 'W_1KI': 253.249765155182, 'W_D': 6.684976515518201, 'J_D': 237.33516101837156, 'W_D_1KI': 66.84976515518201, 'J_D_1KI': 668.4976515518201} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json index 50a61b1..f2babdd 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 277466, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.306395292282104, "TIME_S_1KI": 0.037144714279522914, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 274.4401516246796, "W": 19.314621204373413, "J_1KI": 0.9890947057465764, "W_1KI": 0.06961076746114267, "W_D": 4.503621204373411, "J_D": 63.99165032076835, "W_D_1KI": 0.016231254295565625, "J_D_1KI": 5.849817381432545e-05} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 286286, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521464109420776, "TIME_S_1KI": 0.03675158446246332, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 308.88434463500977, "W": 21.719583434179967, "J_1KI": 1.0789362547767258, "W_1KI": 0.07586673268752216, "W_D": 2.933583434179969, "J_D": 41.71986075353625, "W_D_1KI": 0.010247037697197798, "J_D_1KI": 3.579301012692831e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output index 19817c6..18cd113 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,156 +1,75 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012943029403686523} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), - col_indices=tensor([1969, 3660, 2508, 3210, 4469, 2580, 1634, 4852, 2880, - 4171, 145, 1923, 1759, 292, 976, 2609, 4097, 2004, - 2150, 921, 866, 680, 1106, 3111, 2786, 1102, 3351, - 842, 4401, 1712, 443, 1843, 4964, 3710, 2157, 2831, - 4456, 4953, 180, 4915, 1729, 1065, 1121, 4037, 4202, - 790, 3058, 490, 2402, 821, 2529, 697, 3606, 4981, - 1841, 2976, 2610, 4155, 4902, 3543, 1747, 1961, 3342, - 3548, 4564, 2128, 4095, 930, 3054, 2352, 4784, 3523, - 1341, 4422, 1218, 1510, 1390, 865, 1724, 166, 3901, - 1272, 2149, 1488, 1813, 2931, 985, 697, 3723, 958, - 3733, 65, 4612, 4936, 1010, 4190, 432, 2031, 1484, - 3237, 225, 3100, 3677, 30, 1750, 2122, 2814, 1486, - 4879, 4720, 3646, 3648, 1716, 344, 4485, 818, 3342, - 1481, 4585, 2091, 1043, 1961, 466, 2604, 1267, 2219, - 379, 4743, 1233, 4480, 2900, 1400, 1058, 3165, 3216, - 1136, 4573, 4401, 106, 3885, 340, 2591, 62, 4321, - 4518, 4971, 529, 1453, 2522, 1821, 3359, 1768, 3615, - 560, 3497, 2950, 4004, 3758, 1175, 749, 2979, 552, - 596, 1500, 318, 4113, 4216, 349, 3504, 1704, 2177, - 110, 1213, 2730, 623, 979, 4943, 5, 3100, 4274, - 1196, 1212, 1292, 4837, 2749, 2657, 2151, 116, 3452, - 1048, 4726, 3050, 2235, 3572, 3103, 2468, 336, 256, - 3689, 3259, 4778, 4872, 4322, 4966, 992, 1906, 4507, - 3887, 3908, 4661, 4040, 2063, 4632, 2722, 1787, 3381, - 2148, 4178, 2261, 3851, 1401, 2459, 2100, 1771, 2949, - 4688, 3574, 4745, 4027, 705, 4556, 616, 3800, 1985, - 763, 4632, 1608, 1959, 3621, 896, 2004, 4509, 4069, - 2954, 1079, 553, 3501, 741, 537, 251]), - values=tensor([0.4598, 0.9685, 0.1022, 0.6570, 0.2626, 0.0213, 0.4956, - 0.5380, 0.8437, 0.5470, 0.0848, 0.9620, 0.0264, 0.2693, - 0.6692, 0.6401, 0.9283, 0.8706, 0.6275, 0.5943, 0.3989, - 0.0766, 0.9946, 0.3522, 0.5912, 0.5773, 0.4771, 0.7005, - 0.3650, 0.1154, 0.5630, 0.9184, 0.7700, 0.6786, 0.7164, - 0.8177, 0.4007, 0.8722, 0.7773, 0.2355, 0.0635, 0.3183, - 0.2674, 0.1684, 0.4826, 0.2298, 0.6975, 0.5790, 0.7160, - 0.7090, 0.3192, 0.5709, 0.6972, 0.7933, 0.5030, 0.7005, - 0.6711, 0.9473, 0.3130, 0.7328, 0.0053, 0.6263, 0.3400, - 0.8317, 0.2127, 0.6912, 0.5279, 0.3351, 0.9454, 0.0502, - 0.5418, 0.3942, 0.6647, 0.1585, 0.1151, 0.4637, 0.2766, - 0.6645, 0.2561, 0.3586, 0.9007, 0.5641, 0.9024, 0.0924, - 0.7442, 0.4202, 0.9456, 0.3672, 0.6816, 0.6385, 0.3813, - 0.9352, 0.7740, 0.6736, 0.3753, 0.4691, 0.5509, 0.9780, - 0.7039, 0.8897, 0.5298, 0.1267, 0.2404, 0.1878, 0.8542, - 0.5178, 0.8945, 0.4190, 0.8436, 0.0710, 0.7443, 0.1508, - 0.8032, 0.7493, 0.7469, 0.6805, 0.5330, 0.9512, 0.6693, - 0.2875, 0.6060, 0.0101, 0.6329, 0.2104, 0.2244, 0.8216, - 0.9850, 0.4320, 0.6288, 0.3139, 0.5255, 0.8128, 0.7760, - 0.1620, 0.7643, 0.1907, 0.2993, 0.8513, 0.3012, 0.8852, - 0.2619, 0.0229, 0.3957, 0.9602, 0.2258, 0.9232, 0.3917, - 0.9188, 0.1363, 0.2426, 0.1136, 0.6949, 0.1461, 0.1346, - 0.9068, 0.3140, 0.4405, 0.5547, 0.7829, 0.5939, 0.7712, - 0.2027, 0.1672, 0.0139, 0.3950, 0.5428, 0.8675, 0.7414, - 0.0223, 0.4081, 0.0576, 0.0795, 0.5466, 0.8051, 0.7481, - 0.9772, 0.6723, 0.3420, 0.3703, 0.9258, 0.7375, 0.0725, - 0.3308, 0.3117, 0.7279, 0.7861, 0.0210, 0.0730, 0.7490, - 0.2970, 0.3351, 0.7560, 0.6932, 0.1496, 0.7733, 0.0425, - 0.6190, 0.6584, 0.3857, 0.2533, 0.8724, 0.8049, 0.1764, - 0.5947, 0.7425, 0.4336, 0.4669, 0.5320, 0.4576, 0.7077, - 0.9652, 0.4590, 0.0046, 0.8577, 0.7045, 0.2574, 0.6998, - 0.6151, 0.3438, 0.0111, 0.9822, 0.5390, 0.2832, 0.0368, - 0.2525, 0.0950, 0.9720, 0.8354, 0.5121, 0.4167, 0.7493, - 0.0233, 0.0628, 0.4681, 0.4028, 0.7881, 0.6370, 0.0526, - 0.9211, 0.1381, 0.0723, 0.8459, 0.6295, 0.9210, 0.7735, - 0.6887, 0.7409, 0.5606, 0.9654, 0.1055]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.8735, 0.3833, 0.2694, ..., 0.9142, 0.8243, 0.1368]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 250 -Density: 1e-05 -Time: 0.012943029403686523 seconds - -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 81124 -ss 5000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.069929599761963} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.011764049530029297} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 250, 250]), - col_indices=tensor([4942, 4831, 3020, 4398, 15, 273, 1128, 112, 3723, - 3585, 170, 3865, 4631, 2789, 948, 1973, 2152, 1168, - 2755, 4895, 2445, 616, 942, 1070, 15, 1656, 1361, - 381, 4008, 517, 700, 1825, 1420, 3779, 3711, 3163, - 2300, 1341, 3184, 1297, 1550, 2711, 522, 3966, 1184, - 705, 4226, 2826, 2546, 3410, 4394, 1120, 1279, 3317, - 623, 3259, 183, 314, 3816, 2320, 512, 2393, 3294, - 3842, 3425, 455, 1652, 4397, 4874, 478, 513, 654, - 1631, 4536, 2192, 2601, 483, 4710, 64, 1715, 545, - 2242, 2044, 2755, 1543, 1597, 1787, 451, 1992, 2258, - 3437, 2378, 4190, 1069, 538, 3841, 4601, 4983, 2955, - 805, 2644, 3782, 688, 4775, 1057, 1887, 2840, 250, - 1640, 4588, 2355, 3842, 4628, 1450, 1276, 2776, 1876, - 703, 2888, 1093, 3936, 2831, 3151, 3865, 1871, 2237, - 1038, 3246, 1430, 323, 1681, 4674, 4158, 2884, 296, - 1248, 3609, 4395, 2571, 3253, 2353, 1909, 2894, 3695, - 4937, 191, 309, 1354, 2926, 2944, 4425, 3127, 3554, - 4775, 2727, 2655, 3557, 3449, 1163, 2318, 4832, 772, - 2251, 4810, 985, 1751, 1979, 3306, 2880, 4074, 740, - 443, 4107, 3879, 2043, 2031, 1254, 2409, 1790, 4884, - 4795, 4046, 3848, 914, 1554, 1268, 549, 3310, 1243, - 1703, 3704, 1174, 859, 2408, 4434, 1686, 3699, 3911, - 241, 4764, 2817, 4123, 1459, 2878, 3106, 16, 1449, - 1804, 3917, 2039, 916, 3993, 4637, 4103, 646, 344, - 4563, 2694, 3833, 3678, 2981, 1194, 2210, 1306, 1590, - 4934, 1620, 3680, 1815, 2507, 2898, 4255, 91, 4315, - 1006, 2747, 1763, 4043, 3117, 1987, 1941, 903, 4871, - 2123, 2041, 2574, 168, 2922, 2931, 3435]), - values=tensor([0.2833, 0.5136, 0.0459, 0.1040, 0.7712, 0.7813, 0.1004, - 0.0062, 0.4357, 0.2247, 0.8578, 0.6295, 0.0947, 0.0842, - 0.8159, 0.8756, 0.7754, 0.3890, 0.9475, 0.7902, 0.1690, - 0.2878, 0.8893, 0.3483, 0.2502, 0.4294, 0.6224, 0.2795, - 0.6981, 0.6980, 0.8634, 0.5843, 0.9074, 0.4490, 0.0617, - 0.7705, 0.8034, 0.4257, 0.7807, 0.9320, 0.2211, 0.0095, - 0.0758, 0.1987, 0.1954, 0.2907, 0.0185, 0.5826, 0.8663, - 0.7792, 0.8892, 0.0709, 0.3218, 0.2257, 0.4393, 0.5281, - 0.5105, 0.4052, 0.7907, 0.7483, 0.7892, 0.7607, 0.5190, - 0.1815, 0.1591, 0.5976, 0.6474, 0.6874, 0.8319, 0.0260, - 0.0388, 0.4193, 0.6536, 0.3715, 0.0266, 0.9481, 0.0870, - 0.8892, 0.7519, 0.8104, 0.4144, 0.7611, 0.0463, 0.1582, - 0.3558, 0.3084, 0.5278, 0.9900, 0.2019, 0.3355, 0.5727, - 0.0312, 0.7009, 0.2438, 0.6987, 0.0688, 0.4630, 0.8762, - 0.0429, 0.9174, 0.3364, 0.0108, 0.6176, 0.8302, 0.3550, - 0.6954, 0.9373, 0.8688, 0.2691, 0.2429, 0.5154, 0.3210, - 0.8363, 0.5592, 0.6375, 0.9608, 0.3593, 0.4214, 0.9371, - 0.5875, 0.2839, 0.6313, 0.8389, 0.0214, 0.7557, 0.6381, - 0.6212, 0.9792, 0.4905, 0.7606, 0.5632, 0.9431, 0.6739, - 0.1004, 0.5870, 0.3454, 0.2936, 0.8579, 0.0211, 0.1297, - 0.0434, 0.1458, 0.3630, 0.6936, 0.4422, 0.1285, 0.0197, - 0.5356, 0.2039, 0.0330, 0.8242, 0.3233, 0.8126, 0.8089, - 0.1323, 0.4931, 0.0051, 0.9759, 0.3736, 0.9694, 0.3810, - 0.6330, 0.9848, 0.5658, 0.7909, 0.5722, 0.8562, 0.9056, - 0.3408, 0.6105, 0.9888, 0.2522, 0.9582, 0.6931, 0.8565, - 0.8791, 0.4252, 0.0752, 0.4302, 0.2072, 0.9998, 0.0920, - 0.9465, 0.9645, 0.2478, 0.6900, 0.8499, 0.9862, 0.6104, - 0.7144, 0.8192, 0.7493, 0.2478, 0.5926, 0.6255, 0.9983, - 0.1475, 0.4227, 0.7128, 0.3703, 0.4025, 0.7491, 0.2392, - 0.8266, 0.0100, 0.6364, 0.4916, 0.8482, 0.7480, 0.7567, - 0.6271, 0.0847, 0.4248, 0.2642, 0.0890, 0.5453, 0.8654, - 0.6751, 0.0013, 0.9619, 0.9277, 0.1302, 0.1956, 0.7206, - 0.7741, 0.7104, 0.3550, 0.2532, 0.0939, 0.7434, 0.5649, - 0.0455, 0.1135, 0.6381, 0.8138, 0.5254, 0.5858, 0.1065, - 0.1493, 0.3104, 0.8119, 0.6904, 0.9596, 0.5459, 0.5380, - 0.4871, 0.4126, 0.3848, 0.8347, 0.6321]), + col_indices=tensor([2413, 3613, 1235, 3516, 1027, 3592, 1136, 1072, 2035, + 4272, 4618, 3976, 4149, 3298, 350, 4414, 3786, 1146, + 399, 3574, 4155, 4830, 128, 794, 3518, 433, 2560, + 4873, 2752, 3510, 4084, 2870, 4291, 4281, 1988, 3080, + 4866, 4309, 4282, 2892, 1310, 4747, 4082, 828, 3059, + 3593, 4744, 2427, 3406, 4156, 3292, 976, 4562, 4833, + 4532, 4372, 2935, 1020, 1599, 4418, 1845, 3449, 1510, + 3622, 465, 468, 763, 1529, 2768, 762, 3067, 2016, + 2410, 2110, 2032, 2358, 2073, 3231, 3012, 2623, 1907, + 2165, 678, 3710, 3489, 734, 2517, 3079, 1475, 1596, + 1971, 3893, 2127, 1885, 3746, 4682, 3749, 522, 1261, + 105, 343, 982, 3604, 3099, 4073, 3465, 3456, 2902, + 1206, 959, 2498, 812, 811, 2977, 4941, 1905, 4580, + 3490, 2619, 184, 194, 4583, 3039, 2302, 840, 724, + 2525, 2949, 3085, 3915, 530, 3440, 686, 2759, 3186, + 4768, 712, 2343, 1309, 597, 4582, 742, 3611, 4699, + 4978, 4717, 189, 3013, 3011, 1207, 1385, 1239, 1693, + 2073, 1474, 4653, 3232, 3625, 4060, 4894, 1431, 3470, + 3424, 1752, 889, 2383, 4320, 2778, 1947, 2903, 2893, + 3857, 1797, 480, 1052, 3070, 4984, 3127, 4334, 549, + 2621, 1463, 3110, 411, 3720, 4292, 1798, 3332, 3030, + 4797, 1788, 55, 1874, 4248, 1762, 3018, 2297, 4274, + 4645, 473, 890, 3162, 4450, 2036, 1163, 524, 4495, + 319, 1515, 3954, 1175, 3655, 3922, 4924, 449, 4555, + 4027, 1821, 4539, 853, 67, 3735, 2166, 4620, 2028, + 1305, 2477, 2964, 3566, 600, 973, 4362, 4571, 492, + 3777, 1855, 3798, 399, 1516, 2935, 4373, 4186, 1906, + 3621, 559, 4175, 3992, 4851, 3965, 1667]), + values=tensor([0.8430, 0.6646, 0.9984, 0.9113, 0.9303, 0.9243, 0.7352, + 0.0917, 0.4573, 0.9278, 0.0453, 0.8668, 0.3710, 0.1055, + 0.7000, 0.1257, 0.7807, 0.5659, 0.8613, 0.3219, 0.3080, + 0.5003, 0.2770, 0.3050, 0.5705, 0.6954, 0.5467, 0.3047, + 0.6085, 0.3734, 0.9882, 0.3908, 0.9369, 0.4468, 0.4984, + 0.4857, 0.0606, 0.7805, 0.9192, 0.4185, 0.3230, 0.4503, + 0.8113, 0.2879, 0.6288, 0.1008, 0.7971, 0.1025, 0.1359, + 0.0289, 0.4817, 0.6196, 0.7036, 0.5238, 0.4631, 0.4355, + 0.2301, 0.2225, 0.0513, 0.0755, 0.4215, 0.1328, 0.8430, + 0.3171, 0.7886, 0.4053, 0.0069, 0.1746, 0.7990, 0.3152, + 0.9939, 0.9095, 0.8893, 0.3319, 0.9964, 0.2683, 0.8079, + 0.4800, 0.5214, 0.1655, 0.8731, 0.5239, 0.7444, 0.8821, + 0.9232, 0.7838, 0.7194, 0.8196, 0.0665, 0.4543, 0.6520, + 0.9499, 0.2724, 0.0933, 0.9104, 0.2704, 0.6561, 0.1182, + 0.7878, 0.2648, 0.0651, 0.9617, 0.1709, 0.4927, 0.2008, + 0.1731, 0.9236, 0.3737, 0.2318, 0.1857, 0.1518, 0.8527, + 0.2928, 0.3813, 0.6483, 0.4666, 0.1176, 0.7826, 0.1945, + 0.1327, 0.1488, 0.2745, 0.5971, 0.2582, 0.1814, 0.3538, + 0.3370, 0.2050, 0.8859, 0.9904, 0.9295, 0.4339, 0.8897, + 0.1251, 0.3995, 0.7030, 0.9340, 0.3235, 0.2567, 0.8083, + 0.2711, 0.6190, 0.8430, 0.6309, 0.9551, 0.3993, 0.7269, + 0.3111, 0.2424, 0.5945, 0.5503, 0.9832, 0.0249, 0.7489, + 0.2464, 0.9982, 0.9216, 0.6782, 0.2489, 0.4260, 0.3402, + 0.2915, 0.2474, 0.4530, 0.2474, 0.7350, 0.4044, 0.8279, + 0.0947, 0.6957, 0.9056, 0.2845, 0.9641, 0.8705, 0.9716, + 0.3060, 0.4576, 0.3006, 0.8868, 0.7069, 0.7616, 0.0979, + 0.8234, 0.9186, 0.0510, 0.1872, 0.7646, 0.4331, 0.2076, + 0.1570, 0.1594, 0.0215, 0.2444, 0.6214, 0.2494, 0.0750, + 0.6528, 0.2097, 0.4207, 0.7286, 0.6893, 0.0606, 0.7034, + 0.8229, 0.4644, 0.2350, 0.4603, 0.8086, 0.4955, 0.2817, + 0.8152, 0.3632, 0.1090, 0.5631, 0.4363, 0.8698, 0.2556, + 0.8535, 0.4250, 0.5882, 0.7238, 0.2690, 0.5930, 0.0197, + 0.4762, 0.2296, 0.0722, 0.5008, 0.0242, 0.0750, 0.5110, + 0.0739, 0.8176, 0.1281, 0.6133, 0.9998, 0.6720, 0.8508, + 0.5475, 0.5650, 0.5101, 0.0369, 0.7798, 0.2002, 0.5948, + 0.1198, 0.9417, 0.4657, 0.9378, 0.9744]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.3638, 0.9432, 0.7166, ..., 0.3961, 0.0448, 0.0792]) +tensor([0.1955, 0.8676, 0.0943, ..., 0.3973, 0.2391, 0.7802]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +77,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 3.069929599761963 seconds +Time: 0.011764049530029297 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 277466 -ss 5000 -sd 1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.306395292282104} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 89254 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.2735228538513184} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2530, 1671, 1710, 529, 4147, 2671, 774, 3521, 3142, - 4079, 2231, 3137, 198, 1214, 3263, 994, 2667, 2294, - 182, 3631, 1055, 2979, 71, 3078, 4821, 3439, 3949, - 2018, 1636, 1734, 4146, 1819, 670, 2737, 4839, 929, - 652, 4064, 1709, 1446, 637, 485, 3208, 2342, 4556, - 3470, 1760, 3837, 1164, 826, 2500, 338, 4213, 1539, - 2699, 7, 3593, 628, 2634, 1851, 2277, 2906, 1873, - 3675, 109, 933, 39, 1888, 3153, 1802, 2749, 4653, - 10, 1407, 3436, 4501, 1652, 4732, 4648, 3990, 3869, - 1528, 3105, 3115, 2926, 448, 1508, 3766, 414, 0, - 99, 1356, 732, 2391, 4307, 374, 3096, 3847, 1168, - 2149, 2270, 3071, 2538, 4038, 1887, 3751, 3671, 1345, - 271, 1144, 828, 1558, 3741, 642, 1130, 26, 2512, - 1351, 4437, 62, 3040, 3132, 4639, 4608, 3358, 1316, - 2346, 4344, 2385, 4204, 358, 3346, 4011, 297, 728, - 1635, 4143, 3858, 4661, 2365, 4156, 4923, 3921, 4212, - 419, 1025, 1912, 1997, 3589, 965, 4863, 581, 2400, - 2128, 2335, 3936, 4843, 1018, 2088, 3052, 1843, 3652, - 3264, 2342, 212, 2423, 2603, 526, 4357, 2538, 2326, - 4304, 4490, 19, 2158, 4734, 2481, 4574, 1764, 4922, - 1924, 1668, 665, 3628, 4416, 779, 3359, 3281, 668, - 2259, 684, 1693, 262, 2335, 2116, 2444, 285, 472, - 1695, 1989, 2831, 2933, 4834, 3892, 2679, 43, 338, - 1143, 3133, 3290, 2874, 3505, 1654, 2420, 3323, 4487, - 4528, 2876, 3002, 3959, 635, 1503, 2493, 4974, 3994, - 3304, 3215, 2609, 4509, 2631, 2777, 683, 3623, 3596, - 2685, 115, 2166, 1456, 3440, 4502, 1541, 136, 4160, - 2313, 2928, 4917, 3863, 3827, 2109, 4794]), - values=tensor([0.1255, 0.3198, 0.8133, 0.3742, 0.0163, 0.1439, 0.7607, - 0.6784, 0.8830, 0.0545, 0.8528, 0.7242, 0.8352, 0.4737, - 0.9256, 0.7090, 0.7451, 0.5297, 0.6794, 0.7283, 0.9067, - 0.0313, 0.2449, 0.1565, 0.6919, 0.4035, 0.9905, 0.2192, - 0.0562, 0.4841, 0.8665, 0.8712, 0.9887, 0.8805, 0.4264, - 0.9291, 0.7188, 0.3153, 0.5767, 0.0112, 0.8354, 0.4919, - 0.1313, 0.2676, 0.8495, 0.9700, 0.8615, 0.6450, 0.0071, - 0.4545, 0.8713, 0.2228, 0.4878, 0.1926, 0.0886, 0.8092, - 0.4330, 0.1067, 0.1112, 0.2683, 0.4340, 0.7229, 0.1649, - 0.0932, 0.0193, 0.5783, 0.2193, 0.3091, 0.4364, 0.5673, - 0.8010, 0.5772, 0.0521, 0.5829, 0.4101, 0.3786, 0.0283, - 0.4786, 0.3304, 0.3446, 0.7315, 0.6206, 0.8294, 0.4404, - 0.4676, 0.0871, 0.3497, 0.0069, 0.9043, 0.8947, 0.1952, - 0.6809, 0.4255, 0.1696, 0.7442, 0.9124, 0.8603, 0.9907, - 0.1133, 0.2677, 0.6551, 0.8223, 0.8137, 0.2411, 0.5924, - 0.7002, 0.4248, 0.2041, 0.8601, 0.8179, 0.6180, 0.7986, - 0.0067, 0.6255, 0.0265, 0.4455, 0.0788, 0.2798, 0.3073, - 0.9253, 0.6087, 0.7948, 0.9058, 0.2527, 0.3922, 0.9638, - 0.1626, 0.4231, 0.5916, 0.0663, 0.3747, 0.8133, 0.1672, - 0.4958, 0.1234, 0.2670, 0.0752, 0.3763, 0.6411, 0.3294, - 0.4132, 0.2682, 0.3319, 0.1004, 0.6692, 0.2485, 0.0663, - 0.1318, 0.4180, 0.2011, 0.4748, 0.2487, 0.0200, 0.3002, - 0.6475, 0.2552, 0.7456, 0.9304, 0.8959, 0.8069, 0.8309, - 0.8055, 0.5114, 0.9547, 0.4277, 0.3391, 0.6653, 0.7441, - 0.9317, 0.2522, 0.9794, 0.9450, 0.7609, 0.7552, 0.3464, - 0.2683, 0.6131, 0.7507, 0.3858, 0.2947, 0.5291, 0.7914, - 0.4452, 0.6309, 0.6569, 0.3974, 0.5452, 0.9065, 0.8000, - 0.7314, 0.8661, 0.0826, 0.0659, 0.8406, 0.3397, 0.6235, - 0.6886, 0.4334, 0.9899, 0.6808, 0.0386, 0.9324, 0.6160, - 0.2724, 0.3632, 0.4386, 0.4733, 0.7494, 0.2806, 0.7238, - 0.0116, 0.8061, 0.3580, 0.8134, 0.7511, 0.4690, 0.9418, - 0.0495, 0.8282, 0.9024, 0.7411, 0.2424, 0.5263, 0.6983, - 0.9412, 0.6025, 0.1977, 0.9907, 0.4170, 0.2685, 0.9711, - 0.6755, 0.6817, 0.5130, 0.8481, 0.9901, 0.9980, 0.3527, - 0.5949, 0.5533, 0.2777, 0.4754, 0.0948, 0.6148, 0.7233, - 0.0545, 0.7637, 0.1155, 0.4005, 0.7155]), + col_indices=tensor([1899, 4347, 2560, 2708, 1203, 2011, 406, 1548, 309, + 2513, 2574, 1949, 3119, 2552, 4972, 3833, 2054, 367, + 1913, 4167, 3506, 1559, 2754, 553, 2977, 3957, 3775, + 580, 4476, 1186, 4677, 1427, 3656, 547, 424, 1840, + 2813, 1385, 4307, 984, 1290, 1163, 1454, 4857, 3754, + 4108, 3934, 3291, 4191, 1636, 4477, 3889, 2747, 4523, + 1713, 1206, 3697, 4989, 1277, 820, 1994, 4648, 533, + 826, 939, 2692, 2903, 1941, 936, 4284, 2113, 2390, + 3617, 1703, 1292, 1519, 4896, 3735, 2679, 2652, 4545, + 2843, 3363, 937, 1266, 1882, 284, 4580, 2734, 1978, + 4722, 4113, 3164, 3083, 4910, 4294, 3174, 4802, 1489, + 2287, 3667, 2082, 4276, 2264, 3840, 3775, 2759, 4574, + 1675, 4699, 1698, 2584, 40, 3801, 3260, 2401, 1497, + 3836, 1621, 907, 270, 4322, 4474, 2130, 2300, 3418, + 944, 4186, 1294, 4755, 3776, 4927, 2031, 2299, 2955, + 1049, 429, 3223, 1042, 380, 4436, 2180, 269, 1313, + 4877, 2586, 946, 3788, 1881, 4604, 4042, 3219, 4452, + 3270, 4009, 337, 2076, 3061, 1289, 285, 367, 1970, + 3275, 2109, 4753, 1339, 1826, 3373, 1348, 2493, 4415, + 4583, 4385, 2527, 2495, 1673, 3939, 2528, 972, 4956, + 4072, 400, 3412, 1014, 722, 2464, 3090, 4380, 2755, + 2919, 1607, 2791, 4979, 2050, 4565, 4181, 2366, 2566, + 2376, 3341, 1205, 4180, 2750, 394, 1115, 4322, 1926, + 2039, 2671, 2121, 2063, 452, 3793, 4554, 4267, 119, + 3604, 4194, 4736, 398, 517, 1176, 3554, 205, 1327, + 2104, 1817, 1109, 1097, 246, 1791, 2296, 4015, 1252, + 2923, 3263, 95, 4398, 2876, 2278, 3914, 750, 1293, + 4553, 4210, 1659, 934, 1080, 2186, 4584]), + values=tensor([0.1865, 0.1374, 0.0978, 0.1493, 0.3922, 0.7655, 0.5676, + 0.5576, 0.5879, 0.7309, 0.4380, 0.9592, 0.7921, 0.6054, + 0.6671, 0.7068, 0.6746, 0.7275, 0.7996, 0.0664, 0.9108, + 0.4217, 0.5832, 0.9078, 0.8341, 0.8182, 0.3565, 0.7422, + 0.1930, 0.3703, 0.3383, 0.4463, 0.8914, 0.4265, 0.7173, + 0.9323, 0.9673, 0.3804, 0.0970, 0.5201, 0.1157, 0.4706, + 0.6001, 0.2251, 0.4719, 0.3701, 0.1247, 0.0047, 0.0018, + 0.7865, 0.6184, 0.1650, 0.7484, 0.3745, 0.6337, 0.4813, + 0.4730, 0.6087, 0.6178, 0.1507, 0.7410, 0.5783, 0.7580, + 0.9742, 0.3080, 0.9101, 0.6291, 0.5771, 0.6703, 0.7603, + 0.9532, 0.9779, 0.0261, 0.8322, 0.0781, 0.8111, 0.9271, + 0.9872, 0.3846, 0.0377, 0.1336, 0.9504, 0.4757, 0.0693, + 0.0441, 0.7605, 0.9790, 0.5753, 0.6324, 0.8525, 0.1321, + 0.5099, 0.2515, 0.3514, 0.2530, 0.8484, 0.6953, 0.6547, + 0.0290, 0.9446, 0.7670, 0.6269, 0.1098, 0.2963, 0.3997, + 0.6218, 0.5487, 0.6424, 0.7242, 0.3514, 0.4330, 0.9321, + 0.5493, 0.2092, 0.6805, 0.0223, 0.8372, 0.7990, 0.8355, + 0.3299, 0.5240, 0.3746, 0.2471, 0.5855, 0.7016, 0.2778, + 0.3824, 0.7200, 0.8069, 0.5934, 0.1201, 0.7338, 0.3007, + 0.9418, 0.3773, 0.1376, 0.0689, 0.7530, 0.4128, 0.5091, + 0.5847, 0.1090, 0.2695, 0.4476, 0.4301, 0.5910, 0.3656, + 0.3714, 0.2267, 0.7236, 0.8823, 0.2535, 0.0201, 0.4958, + 0.3723, 0.5983, 0.3442, 0.4569, 0.7348, 0.3963, 0.7134, + 0.0142, 0.9242, 0.4313, 0.8794, 0.4877, 0.6841, 0.3329, + 0.9461, 0.6773, 0.3988, 0.4810, 0.2056, 0.0986, 0.3610, + 0.9177, 0.6354, 0.7485, 0.8115, 0.6489, 0.8662, 0.4418, + 0.6059, 0.6498, 0.9628, 0.6330, 0.7760, 0.1712, 0.6472, + 0.7011, 0.2023, 0.3746, 0.7831, 0.6587, 0.0149, 0.1065, + 0.0522, 0.1271, 0.4297, 0.6888, 0.1782, 0.8592, 0.8326, + 0.5458, 0.8215, 0.0908, 0.0732, 0.7733, 0.7251, 0.5513, + 0.8372, 0.9332, 0.4444, 0.9377, 0.7578, 0.9000, 0.3339, + 0.2421, 0.8321, 0.9477, 0.2579, 0.9973, 0.5690, 0.0837, + 0.6222, 0.9107, 0.3005, 0.7960, 0.3291, 0.4164, 0.1900, + 0.5758, 0.1753, 0.7747, 0.3270, 0.8689, 0.5147, 0.6637, + 0.1394, 0.1087, 0.2048, 0.2814, 0.5592, 0.9814, 0.4859, + 0.8549, 0.7406, 0.4951, 0.6767, 0.0162]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.1431, 0.4969, 0.0611, ..., 0.8896, 0.3924, 0.7446]) +tensor([0.0316, 0.3219, 0.8965, ..., 0.7553, 0.3032, 0.8741]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +158,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.306395292282104 seconds +Time: 3.2735228538513184 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 286286 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.521464109420776} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2530, 1671, 1710, 529, 4147, 2671, 774, 3521, 3142, - 4079, 2231, 3137, 198, 1214, 3263, 994, 2667, 2294, - 182, 3631, 1055, 2979, 71, 3078, 4821, 3439, 3949, - 2018, 1636, 1734, 4146, 1819, 670, 2737, 4839, 929, - 652, 4064, 1709, 1446, 637, 485, 3208, 2342, 4556, - 3470, 1760, 3837, 1164, 826, 2500, 338, 4213, 1539, - 2699, 7, 3593, 628, 2634, 1851, 2277, 2906, 1873, - 3675, 109, 933, 39, 1888, 3153, 1802, 2749, 4653, - 10, 1407, 3436, 4501, 1652, 4732, 4648, 3990, 3869, - 1528, 3105, 3115, 2926, 448, 1508, 3766, 414, 0, - 99, 1356, 732, 2391, 4307, 374, 3096, 3847, 1168, - 2149, 2270, 3071, 2538, 4038, 1887, 3751, 3671, 1345, - 271, 1144, 828, 1558, 3741, 642, 1130, 26, 2512, - 1351, 4437, 62, 3040, 3132, 4639, 4608, 3358, 1316, - 2346, 4344, 2385, 4204, 358, 3346, 4011, 297, 728, - 1635, 4143, 3858, 4661, 2365, 4156, 4923, 3921, 4212, - 419, 1025, 1912, 1997, 3589, 965, 4863, 581, 2400, - 2128, 2335, 3936, 4843, 1018, 2088, 3052, 1843, 3652, - 3264, 2342, 212, 2423, 2603, 526, 4357, 2538, 2326, - 4304, 4490, 19, 2158, 4734, 2481, 4574, 1764, 4922, - 1924, 1668, 665, 3628, 4416, 779, 3359, 3281, 668, - 2259, 684, 1693, 262, 2335, 2116, 2444, 285, 472, - 1695, 1989, 2831, 2933, 4834, 3892, 2679, 43, 338, - 1143, 3133, 3290, 2874, 3505, 1654, 2420, 3323, 4487, - 4528, 2876, 3002, 3959, 635, 1503, 2493, 4974, 3994, - 3304, 3215, 2609, 4509, 2631, 2777, 683, 3623, 3596, - 2685, 115, 2166, 1456, 3440, 4502, 1541, 136, 4160, - 2313, 2928, 4917, 3863, 3827, 2109, 4794]), - values=tensor([0.1255, 0.3198, 0.8133, 0.3742, 0.0163, 0.1439, 0.7607, - 0.6784, 0.8830, 0.0545, 0.8528, 0.7242, 0.8352, 0.4737, - 0.9256, 0.7090, 0.7451, 0.5297, 0.6794, 0.7283, 0.9067, - 0.0313, 0.2449, 0.1565, 0.6919, 0.4035, 0.9905, 0.2192, - 0.0562, 0.4841, 0.8665, 0.8712, 0.9887, 0.8805, 0.4264, - 0.9291, 0.7188, 0.3153, 0.5767, 0.0112, 0.8354, 0.4919, - 0.1313, 0.2676, 0.8495, 0.9700, 0.8615, 0.6450, 0.0071, - 0.4545, 0.8713, 0.2228, 0.4878, 0.1926, 0.0886, 0.8092, - 0.4330, 0.1067, 0.1112, 0.2683, 0.4340, 0.7229, 0.1649, - 0.0932, 0.0193, 0.5783, 0.2193, 0.3091, 0.4364, 0.5673, - 0.8010, 0.5772, 0.0521, 0.5829, 0.4101, 0.3786, 0.0283, - 0.4786, 0.3304, 0.3446, 0.7315, 0.6206, 0.8294, 0.4404, - 0.4676, 0.0871, 0.3497, 0.0069, 0.9043, 0.8947, 0.1952, - 0.6809, 0.4255, 0.1696, 0.7442, 0.9124, 0.8603, 0.9907, - 0.1133, 0.2677, 0.6551, 0.8223, 0.8137, 0.2411, 0.5924, - 0.7002, 0.4248, 0.2041, 0.8601, 0.8179, 0.6180, 0.7986, - 0.0067, 0.6255, 0.0265, 0.4455, 0.0788, 0.2798, 0.3073, - 0.9253, 0.6087, 0.7948, 0.9058, 0.2527, 0.3922, 0.9638, - 0.1626, 0.4231, 0.5916, 0.0663, 0.3747, 0.8133, 0.1672, - 0.4958, 0.1234, 0.2670, 0.0752, 0.3763, 0.6411, 0.3294, - 0.4132, 0.2682, 0.3319, 0.1004, 0.6692, 0.2485, 0.0663, - 0.1318, 0.4180, 0.2011, 0.4748, 0.2487, 0.0200, 0.3002, - 0.6475, 0.2552, 0.7456, 0.9304, 0.8959, 0.8069, 0.8309, - 0.8055, 0.5114, 0.9547, 0.4277, 0.3391, 0.6653, 0.7441, - 0.9317, 0.2522, 0.9794, 0.9450, 0.7609, 0.7552, 0.3464, - 0.2683, 0.6131, 0.7507, 0.3858, 0.2947, 0.5291, 0.7914, - 0.4452, 0.6309, 0.6569, 0.3974, 0.5452, 0.9065, 0.8000, - 0.7314, 0.8661, 0.0826, 0.0659, 0.8406, 0.3397, 0.6235, - 0.6886, 0.4334, 0.9899, 0.6808, 0.0386, 0.9324, 0.6160, - 0.2724, 0.3632, 0.4386, 0.4733, 0.7494, 0.2806, 0.7238, - 0.0116, 0.8061, 0.3580, 0.8134, 0.7511, 0.4690, 0.9418, - 0.0495, 0.8282, 0.9024, 0.7411, 0.2424, 0.5263, 0.6983, - 0.9412, 0.6025, 0.1977, 0.9907, 0.4170, 0.2685, 0.9711, - 0.6755, 0.6817, 0.5130, 0.8481, 0.9901, 0.9980, 0.3527, - 0.5949, 0.5533, 0.2777, 0.4754, 0.0948, 0.6148, 0.7233, - 0.0545, 0.7637, 0.1155, 0.4005, 0.7155]), + col_indices=tensor([ 480, 3870, 2034, 369, 3216, 3129, 1794, 4895, 3584, + 4220, 4163, 3480, 3145, 107, 1218, 2520, 4570, 3112, + 3208, 4572, 2316, 1989, 1370, 1245, 4779, 3089, 3540, + 2678, 4569, 4755, 859, 258, 2063, 4366, 1785, 749, + 3484, 1142, 3077, 1876, 2779, 3741, 1780, 4209, 903, + 4145, 4869, 3543, 555, 287, 4710, 1427, 2682, 4436, + 3617, 1035, 1414, 2588, 2709, 1384, 3260, 3179, 536, + 15, 3896, 1872, 4780, 2123, 1983, 2477, 1909, 4840, + 3095, 1054, 2351, 3706, 954, 3928, 480, 2991, 4476, + 2535, 477, 1274, 3243, 3326, 2861, 2962, 453, 3260, + 1392, 1651, 960, 3543, 3504, 2718, 253, 2211, 3840, + 2330, 152, 3581, 2501, 2846, 790, 3520, 918, 553, + 1271, 3381, 4105, 2861, 826, 2247, 3092, 3375, 1552, + 1605, 4712, 2614, 4915, 3555, 602, 4560, 1263, 4728, + 1252, 2651, 67, 2584, 2491, 2155, 4397, 4541, 1621, + 2025, 2217, 1801, 4944, 3649, 3685, 4261, 3348, 3593, + 30, 2557, 2851, 3992, 1896, 798, 1733, 1864, 481, + 3442, 898, 456, 2713, 3510, 2216, 2406, 3808, 3537, + 1111, 1588, 4505, 4130, 1525, 3898, 222, 830, 3914, + 3206, 1631, 1797, 3092, 1007, 1768, 4349, 2031, 1813, + 2243, 4219, 4840, 1072, 2818, 258, 3175, 1718, 4668, + 41, 3058, 4453, 4554, 2991, 3590, 1823, 2320, 143, + 4314, 2194, 450, 3196, 2031, 143, 3675, 2233, 2449, + 1113, 4197, 1626, 4436, 4210, 122, 4276, 2632, 3704, + 2110, 1916, 271, 2723, 3305, 2128, 3485, 1246, 974, + 3858, 4337, 707, 2983, 3640, 2751, 882, 4437, 3960, + 3005, 264, 3133, 3255, 587, 1077, 201, 2112, 2997, + 2326, 983, 3504, 1863, 4060, 2823, 3127]), + values=tensor([0.3121, 0.0043, 0.7946, 0.0923, 0.3670, 0.9349, 0.1161, + 0.1608, 0.7023, 0.7598, 0.7741, 0.6627, 0.1201, 0.6615, + 0.3268, 0.8452, 0.0925, 0.7134, 0.5197, 0.6435, 0.6396, + 0.6088, 0.3969, 0.4192, 0.2501, 0.6894, 0.2897, 0.9520, + 0.8264, 0.1624, 0.9753, 0.8596, 0.9817, 0.9575, 0.1043, + 0.7839, 0.5785, 0.1642, 0.0789, 0.7062, 0.3125, 0.9158, + 0.8766, 0.3530, 0.3168, 0.3900, 0.3047, 0.1421, 0.5430, + 0.3554, 0.7084, 0.8560, 0.8663, 0.9369, 0.2539, 0.5770, + 0.1520, 0.6019, 0.5215, 0.7355, 0.2025, 0.8182, 0.3900, + 0.0267, 0.1237, 0.8960, 0.4175, 0.2950, 0.9583, 0.1027, + 0.4492, 0.7678, 0.2303, 0.0424, 0.4082, 0.7925, 0.9663, + 0.0787, 0.8553, 0.9050, 0.4507, 0.5493, 0.5714, 0.6098, + 0.7273, 0.3019, 0.9842, 0.8281, 0.5926, 0.4952, 0.2582, + 0.6038, 0.0276, 0.2246, 0.2941, 0.2287, 0.2264, 0.9905, + 0.8927, 0.5476, 0.4812, 0.3524, 0.2207, 0.0681, 0.0174, + 0.1924, 0.6012, 0.6840, 0.5947, 0.7402, 0.1348, 0.2467, + 0.1018, 0.8994, 0.3732, 0.1658, 0.0191, 0.3792, 0.7095, + 0.8089, 0.8762, 0.7268, 0.0611, 0.7059, 0.0864, 0.6562, + 0.5520, 0.1621, 0.4042, 0.5695, 0.7688, 0.5407, 0.0872, + 0.4184, 0.1074, 0.3488, 0.2190, 0.1130, 0.0411, 0.0654, + 0.2423, 0.7003, 0.2734, 0.9698, 0.7339, 0.1093, 0.1258, + 0.8524, 0.6958, 0.4360, 0.8331, 0.2091, 0.0345, 0.6824, + 0.1548, 0.5799, 0.3430, 0.2655, 0.7925, 0.1523, 0.5878, + 0.1459, 0.9012, 0.5523, 0.4627, 0.1559, 0.8525, 0.1919, + 0.8298, 0.9748, 0.1770, 0.5065, 0.6725, 0.3035, 0.7169, + 0.9011, 0.5673, 0.1765, 0.2238, 0.7180, 0.7244, 0.3321, + 0.8196, 0.8541, 0.0808, 0.5003, 0.5951, 0.6927, 0.7943, + 0.3562, 0.3748, 0.2034, 0.8188, 0.1786, 0.5449, 0.7555, + 0.4598, 0.6698, 0.8264, 0.3421, 0.6903, 0.7656, 0.1821, + 0.0159, 0.4418, 0.7846, 0.9920, 0.4231, 0.7305, 0.3985, + 0.4712, 0.9639, 0.9006, 0.5264, 0.9895, 0.6470, 0.2125, + 0.0665, 0.6922, 0.8492, 0.6881, 0.5412, 0.7532, 0.7415, + 0.6027, 0.0028, 0.9098, 0.8735, 0.0774, 0.9563, 0.9648, + 0.6625, 0.9251, 0.5024, 0.5896, 0.0673, 0.5722, 0.9050, + 0.9294, 0.0690, 0.0890, 0.6600, 0.8796, 0.8624, 0.1592, + 0.4507, 0.7277, 0.8208, 0.7799, 0.5597]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.1431, 0.4969, 0.0611, ..., 0.8896, 0.3924, 0.7446]) +tensor([0.9952, 0.6015, 0.3584, ..., 0.2924, 0.6333, 0.1169]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +239,91 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.306395292282104 seconds +Time: 10.521464109420776 seconds -[16.56, 16.4, 16.72, 16.72, 16.84, 16.92, 17.04, 16.64, 16.64, 16.64] -[16.68, 16.8, 19.52, 20.28, 22.4, 23.28, 23.28, 24.04, 21.56, 21.2, 20.08, 19.92, 19.76, 19.72] -14.208932638168335 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 277466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.306395292282104, 'TIME_S_1KI': 0.037144714279522914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 274.4401516246796, 'W': 19.314621204373413} -[16.56, 16.4, 16.72, 16.72, 16.84, 16.92, 17.04, 16.64, 16.64, 16.64, 16.24, 16.2, 16.2, 16.48, 16.12, 16.2, 16.2, 16.16, 16.0, 16.04] -296.22 -14.811000000000002 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 277466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.306395292282104, 'TIME_S_1KI': 0.037144714279522914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 274.4401516246796, 'W': 19.314621204373413, 'J_1KI': 0.9890947057465764, 'W_1KI': 0.06961076746114267, 'W_D': 4.503621204373411, 'J_D': 63.99165032076835, 'W_D_1KI': 0.016231254295565625, 'J_D_1KI': 5.849817381432545e-05} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([ 480, 3870, 2034, 369, 3216, 3129, 1794, 4895, 3584, + 4220, 4163, 3480, 3145, 107, 1218, 2520, 4570, 3112, + 3208, 4572, 2316, 1989, 1370, 1245, 4779, 3089, 3540, + 2678, 4569, 4755, 859, 258, 2063, 4366, 1785, 749, + 3484, 1142, 3077, 1876, 2779, 3741, 1780, 4209, 903, + 4145, 4869, 3543, 555, 287, 4710, 1427, 2682, 4436, + 3617, 1035, 1414, 2588, 2709, 1384, 3260, 3179, 536, + 15, 3896, 1872, 4780, 2123, 1983, 2477, 1909, 4840, + 3095, 1054, 2351, 3706, 954, 3928, 480, 2991, 4476, + 2535, 477, 1274, 3243, 3326, 2861, 2962, 453, 3260, + 1392, 1651, 960, 3543, 3504, 2718, 253, 2211, 3840, + 2330, 152, 3581, 2501, 2846, 790, 3520, 918, 553, + 1271, 3381, 4105, 2861, 826, 2247, 3092, 3375, 1552, + 1605, 4712, 2614, 4915, 3555, 602, 4560, 1263, 4728, + 1252, 2651, 67, 2584, 2491, 2155, 4397, 4541, 1621, + 2025, 2217, 1801, 4944, 3649, 3685, 4261, 3348, 3593, + 30, 2557, 2851, 3992, 1896, 798, 1733, 1864, 481, + 3442, 898, 456, 2713, 3510, 2216, 2406, 3808, 3537, + 1111, 1588, 4505, 4130, 1525, 3898, 222, 830, 3914, + 3206, 1631, 1797, 3092, 1007, 1768, 4349, 2031, 1813, + 2243, 4219, 4840, 1072, 2818, 258, 3175, 1718, 4668, + 41, 3058, 4453, 4554, 2991, 3590, 1823, 2320, 143, + 4314, 2194, 450, 3196, 2031, 143, 3675, 2233, 2449, + 1113, 4197, 1626, 4436, 4210, 122, 4276, 2632, 3704, + 2110, 1916, 271, 2723, 3305, 2128, 3485, 1246, 974, + 3858, 4337, 707, 2983, 3640, 2751, 882, 4437, 3960, + 3005, 264, 3133, 3255, 587, 1077, 201, 2112, 2997, + 2326, 983, 3504, 1863, 4060, 2823, 3127]), + values=tensor([0.3121, 0.0043, 0.7946, 0.0923, 0.3670, 0.9349, 0.1161, + 0.1608, 0.7023, 0.7598, 0.7741, 0.6627, 0.1201, 0.6615, + 0.3268, 0.8452, 0.0925, 0.7134, 0.5197, 0.6435, 0.6396, + 0.6088, 0.3969, 0.4192, 0.2501, 0.6894, 0.2897, 0.9520, + 0.8264, 0.1624, 0.9753, 0.8596, 0.9817, 0.9575, 0.1043, + 0.7839, 0.5785, 0.1642, 0.0789, 0.7062, 0.3125, 0.9158, + 0.8766, 0.3530, 0.3168, 0.3900, 0.3047, 0.1421, 0.5430, + 0.3554, 0.7084, 0.8560, 0.8663, 0.9369, 0.2539, 0.5770, + 0.1520, 0.6019, 0.5215, 0.7355, 0.2025, 0.8182, 0.3900, + 0.0267, 0.1237, 0.8960, 0.4175, 0.2950, 0.9583, 0.1027, + 0.4492, 0.7678, 0.2303, 0.0424, 0.4082, 0.7925, 0.9663, + 0.0787, 0.8553, 0.9050, 0.4507, 0.5493, 0.5714, 0.6098, + 0.7273, 0.3019, 0.9842, 0.8281, 0.5926, 0.4952, 0.2582, + 0.6038, 0.0276, 0.2246, 0.2941, 0.2287, 0.2264, 0.9905, + 0.8927, 0.5476, 0.4812, 0.3524, 0.2207, 0.0681, 0.0174, + 0.1924, 0.6012, 0.6840, 0.5947, 0.7402, 0.1348, 0.2467, + 0.1018, 0.8994, 0.3732, 0.1658, 0.0191, 0.3792, 0.7095, + 0.8089, 0.8762, 0.7268, 0.0611, 0.7059, 0.0864, 0.6562, + 0.5520, 0.1621, 0.4042, 0.5695, 0.7688, 0.5407, 0.0872, + 0.4184, 0.1074, 0.3488, 0.2190, 0.1130, 0.0411, 0.0654, + 0.2423, 0.7003, 0.2734, 0.9698, 0.7339, 0.1093, 0.1258, + 0.8524, 0.6958, 0.4360, 0.8331, 0.2091, 0.0345, 0.6824, + 0.1548, 0.5799, 0.3430, 0.2655, 0.7925, 0.1523, 0.5878, + 0.1459, 0.9012, 0.5523, 0.4627, 0.1559, 0.8525, 0.1919, + 0.8298, 0.9748, 0.1770, 0.5065, 0.6725, 0.3035, 0.7169, + 0.9011, 0.5673, 0.1765, 0.2238, 0.7180, 0.7244, 0.3321, + 0.8196, 0.8541, 0.0808, 0.5003, 0.5951, 0.6927, 0.7943, + 0.3562, 0.3748, 0.2034, 0.8188, 0.1786, 0.5449, 0.7555, + 0.4598, 0.6698, 0.8264, 0.3421, 0.6903, 0.7656, 0.1821, + 0.0159, 0.4418, 0.7846, 0.9920, 0.4231, 0.7305, 0.3985, + 0.4712, 0.9639, 0.9006, 0.5264, 0.9895, 0.6470, 0.2125, + 0.0665, 0.6922, 0.8492, 0.6881, 0.5412, 0.7532, 0.7415, + 0.6027, 0.0028, 0.9098, 0.8735, 0.0774, 0.9563, 0.9648, + 0.6625, 0.9251, 0.5024, 0.5896, 0.0673, 0.5722, 0.9050, + 0.9294, 0.0690, 0.0890, 0.6600, 0.8796, 0.8624, 0.1592, + 0.4507, 0.7277, 0.8208, 0.7799, 0.5597]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.9952, 0.6015, 0.3584, ..., 0.2924, 0.6333, 0.1169]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.521464109420776 seconds + +[21.0, 20.8, 20.8, 20.8, 21.08, 21.0, 21.24, 21.2, 21.24, 21.16] +[21.04, 20.8, 21.56, 22.56, 23.36, 23.36, 24.24, 25.0, 24.44, 24.12, 23.96, 24.04, 23.84, 23.68] +14.22146725654602 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 286286, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521464109420776, 'TIME_S_1KI': 0.03675158446246332, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.88434463500977, 'W': 21.719583434179967} +[21.0, 20.8, 20.8, 20.8, 21.08, 21.0, 21.24, 21.2, 21.24, 21.16, 21.04, 20.96, 20.72, 20.72, 20.68, 20.68, 20.48, 20.6, 20.8, 20.64] +375.71999999999997 +18.785999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 286286, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.521464109420776, 'TIME_S_1KI': 0.03675158446246332, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 308.88434463500977, 'W': 21.719583434179967, 'J_1KI': 1.0789362547767258, 'W_1KI': 0.07586673268752216, 'W_D': 2.933583434179969, 'J_D': 41.71986075353625, 'W_D_1KI': 0.010247037697197798, 'J_D_1KI': 3.579301012692831e-05} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.json index ed92a63..a46871e 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Altra", "CORES": 80, "ITERATIONS": 147819, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.450809717178345, "TIME_S_1KI": 0.07070004341240534, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 270.7087575721741, "W": 19.037186911173507, "J_1KI": 1.831352921966554, "W_1KI": 0.12878714448868891, "W_D": 4.033186911173505, "J_D": 57.35190933799741, "W_D_1KI": 0.027284631279967428, "J_D_1KI": 0.00018458135476472868} +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 149833, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.25239634513855, "TIME_S_1KI": 0.06842548934572858, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 312.4897583007812, "W": 21.925255553224016, "J_1KI": 2.085587008875089, "W_1KI": 0.14633128585307653, "W_D": 3.370255553224016, "J_D": 48.03457550048826, "W_D_1KI": 0.022493413021323846, "J_D_1KI": 0.0001501232239982103} diff --git a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.output index 3732e5f..fb0df82 100644 --- a/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/altra_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.015123844146728516} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.014627218246459961} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([3446, 3211, 1459, ..., 3404, 1400, 4328]), - values=tensor([0.9299, 0.4025, 0.7514, ..., 0.4501, 0.7034, 0.4301]), + col_indices=tensor([2526, 1521, 4610, ..., 1277, 4482, 183]), + values=tensor([0.9942, 0.8413, 0.7861, ..., 0.9789, 0.3987, 0.5714]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.5652, 0.8868, 0.6802, ..., 0.3723, 0.2839, 0.7363]) +tensor([0.9661, 0.6980, 0.2318, ..., 0.3746, 0.4676, 0.9632]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.015123844146728516 seconds +Time: 0.014627218246459961 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 69426 -ss 5000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.931497573852539} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 71783 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 5.030396461486816} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1250, 1250, 1250]), - col_indices=tensor([2025, 3924, 898, ..., 1281, 3893, 4108]), - values=tensor([0.0304, 0.7639, 0.3864, ..., 0.2285, 0.7727, 0.3264]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([2921, 2514, 1087, ..., 1030, 1452, 2007]), + values=tensor([0.2212, 0.1400, 0.6188, ..., 0.0721, 0.9961, 0.8481]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.1740, 0.1105, 0.4664, ..., 0.6286, 0.2183, 0.7582]) +tensor([0.5066, 0.6469, 0.5834, ..., 0.4760, 0.1348, 0.4338]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 4.931497573852539 seconds +Time: 5.030396461486816 seconds -['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 147819 -ss 5000 -sd 5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.450809717178345} +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 149833 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.25239634513855} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), - col_indices=tensor([4384, 3481, 3445, ..., 2544, 3259, 4103]), - values=tensor([0.0351, 0.6486, 0.3996, ..., 0.8336, 0.2691, 0.7388]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1919, 663, 244, ..., 2667, 4054, 269]), + values=tensor([0.4507, 0.6975, 0.8378, ..., 0.5863, 0.5474, 0.5550]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.9301, 0.0450, 0.3581, ..., 0.7152, 0.3980, 0.2103]) +tensor([0.2739, 0.2250, 0.3283, ..., 0.6236, 0.6925, 0.9095]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.450809717178345 seconds +Time: 10.25239634513855 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), - col_indices=tensor([4384, 3481, 3445, ..., 2544, 3259, 4103]), - values=tensor([0.0351, 0.6486, 0.3996, ..., 0.8336, 0.2691, 0.7388]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1919, 663, 244, ..., 2667, 4054, 269]), + values=tensor([0.4507, 0.6975, 0.8378, ..., 0.5863, 0.5474, 0.5550]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.9301, 0.0450, 0.3581, ..., 0.7152, 0.3980, 0.2103]) +tensor([0.2739, 0.2250, 0.3283, ..., 0.6236, 0.6925, 0.9095]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.450809717178345 seconds +Time: 10.25239634513855 seconds -[16.36, 16.36, 16.6, 16.8, 16.76, 16.88, 16.88, 16.64, 16.44, 16.48] -[16.72, 16.76, 17.08, 20.92, 22.6, 23.2, 24.36, 21.56, 20.88, 20.88, 19.84, 19.92, 19.92, 20.04] -14.219997882843018 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 147819, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.450809717178345, 'TIME_S_1KI': 0.07070004341240534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.7087575721741, 'W': 19.037186911173507} -[16.36, 16.36, 16.6, 16.8, 16.76, 16.88, 16.88, 16.64, 16.44, 16.48, 16.52, 16.72, 16.68, 16.76, 16.76, 16.76, 16.6, 16.68, 16.72, 16.72] -300.08000000000004 -15.004000000000001 -{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 147819, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.450809717178345, 'TIME_S_1KI': 0.07070004341240534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.7087575721741, 'W': 19.037186911173507, 'J_1KI': 1.831352921966554, 'W_1KI': 0.12878714448868891, 'W_D': 4.033186911173505, 'J_D': 57.35190933799741, 'W_D_1KI': 0.027284631279967428, 'J_D_1KI': 0.00018458135476472868} +[20.48, 20.56, 20.64, 20.68, 20.56, 20.48, 20.32, 20.36, 20.32, 20.36] +[20.48, 20.44, 21.72, 21.72, 22.84, 25.12, 25.88, 26.28, 25.44, 23.88, 23.52, 23.64, 23.8, 23.88] +14.25250244140625 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 149833, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.25239634513855, 'TIME_S_1KI': 0.06842548934572858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.4897583007812, 'W': 21.925255553224016} +[20.48, 20.56, 20.64, 20.68, 20.56, 20.48, 20.32, 20.36, 20.32, 20.36, 20.72, 20.6, 20.84, 20.84, 21.0, 20.92, 20.8, 20.68, 20.52, 20.4] +371.1 +18.555 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 149833, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.25239634513855, 'TIME_S_1KI': 0.06842548934572858, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 312.4897583007812, 'W': 21.925255553224016, 'J_1KI': 2.085587008875089, 'W_1KI': 0.14633128585307653, 'W_D': 3.370255553224016, 'J_D': 48.03457550048826, 'W_D_1KI': 0.022493413021323846, 'J_D_1KI': 0.0001501232239982103} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..0697d74 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 360, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.517809629440308, "TIME_S_1KI": 29.216137859556408, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 941.1867869710921, "W": 72.21, "J_1KI": 2614.4077415863667, "W_1KI": 200.58333333333331, "W_D": 36.7285, "J_D": 478.7201067063808, "W_D_1KI": 102.02361111111111, "J_D_1KI": 283.3989197530864} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..f122fb4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.914273500442505} + +tensor(indices=tensor([[99986, 25315, 77165, ..., 59555, 95902, 82819], + [82069, 99384, 29077, ..., 28641, 53008, 79649]]), + values=tensor([0.9899, 0.1745, 0.3787, ..., 0.4640, 0.3157, 0.3199]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4391, 0.3152, 0.5007, ..., 0.1342, 0.9653, 0.6029]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 2.914273500442505 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '360', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.517809629440308} + +tensor(indices=tensor([[84032, 59205, 82340, ..., 81970, 48827, 6917], + [ 902, 93126, 18274, ..., 88456, 248, 84785]]), + values=tensor([0.7699, 0.0829, 0.2157, ..., 0.3964, 0.5877, 0.5723]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2390, 0.1111, 0.2274, ..., 0.9221, 0.4885, 0.1395]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.517809629440308 seconds + +tensor(indices=tensor([[84032, 59205, 82340, ..., 81970, 48827, 6917], + [ 902, 93126, 18274, ..., 88456, 248, 84785]]), + values=tensor([0.7699, 0.0829, 0.2157, ..., 0.3964, 0.5877, 0.5723]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.2390, 0.1111, 0.2274, ..., 0.9221, 0.4885, 0.1395]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.517809629440308 seconds + +[40.18, 40.65, 39.12, 39.26, 39.32, 39.05, 39.38, 39.25, 38.99, 39.45] +[72.21] +13.034022808074951 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 360, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.517809629440308, 'TIME_S_1KI': 29.216137859556408, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 941.1867869710921, 'W': 72.21} +[40.18, 40.65, 39.12, 39.26, 39.32, 39.05, 39.38, 39.25, 38.99, 39.45, 40.14, 39.41, 40.95, 39.59, 38.99, 38.88, 39.39, 38.98, 39.13, 38.81] +709.63 +35.4815 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 360, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.517809629440308, 'TIME_S_1KI': 29.216137859556408, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 941.1867869710921, 'W': 72.21, 'J_1KI': 2614.4077415863667, 'W_1KI': 200.58333333333331, 'W_D': 36.7285, 'J_D': 478.7201067063808, 'W_D_1KI': 102.02361111111111, 'J_D_1KI': 283.3989197530864} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..b68fd96 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.680140733718872, "TIME_S_1KI": 286.8014073371887, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2283.6544932079314, "W": 68.42, "J_1KI": 22836.544932079312, "W_1KI": 684.2, "W_D": 31.95375, "J_D": 1066.5203853017092, "W_D_1KI": 319.53749999999997, "J_D_1KI": 3195.375} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..913c21e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 28.680140733718872} + +tensor(indices=tensor([[93915, 87688, 8534, ..., 2983, 81475, 63066], + [16229, 36880, 60405, ..., 37367, 99929, 26583]]), + values=tensor([0.8523, 0.3379, 0.9853, ..., 0.5481, 0.5227, 0.7365]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.2840, 0.5546, 0.2522, ..., 0.8642, 0.8552, 0.2799]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.680140733718872 seconds + +tensor(indices=tensor([[93915, 87688, 8534, ..., 2983, 81475, 63066], + [16229, 36880, 60405, ..., 37367, 99929, 26583]]), + values=tensor([0.8523, 0.3379, 0.9853, ..., 0.5481, 0.5227, 0.7365]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.2840, 0.5546, 0.2522, ..., 0.8642, 0.8552, 0.2799]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 28.680140733718872 seconds + +[40.17, 47.63, 48.07, 39.17, 39.2, 38.8, 38.95, 39.16, 40.37, 39.48] +[68.42] +33.377002239227295 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.680140733718872, 'TIME_S_1KI': 286.8014073371887, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2283.6544932079314, 'W': 68.42} +[40.17, 47.63, 48.07, 39.17, 39.2, 38.8, 38.95, 39.16, 40.37, 39.48, 39.53, 39.29, 39.41, 39.17, 38.94, 39.32, 39.05, 38.85, 44.55, 39.61] +729.325 +36.46625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 28.680140733718872, 'TIME_S_1KI': 286.8014073371887, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2283.6544932079314, 'W': 68.42, 'J_1KI': 22836.544932079312, 'W_1KI': 684.2, 'W_D': 31.95375, 'J_D': 1066.5203853017092, 'W_D_1KI': 319.53749999999997, 'J_D_1KI': 3195.375} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..7551d81 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 288.4082520008087, "TIME_S_1KI": 2884.082520008087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 20827.604114942547, "W": 66.49, "J_1KI": 208276.0411494255, "W_1KI": 664.9, "W_D": 30.438499999999998, "J_D": 9534.68232595396, "W_D_1KI": 304.38499999999993, "J_D_1KI": 3043.8499999999995} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..dfa1bc5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 288.4082520008087} + +tensor(indices=tensor([[70767, 2667, 51862, ..., 29574, 67144, 25041], + [66356, 81071, 28834, ..., 85677, 41293, 36220]]), + values=tensor([0.8323, 0.7948, 0.8190, ..., 0.5243, 0.0288, 0.1363]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.6335, 0.9345, 0.6879, ..., 0.5366, 0.0822, 0.7656]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 288.4082520008087 seconds + +tensor(indices=tensor([[70767, 2667, 51862, ..., 29574, 67144, 25041], + [66356, 81071, 28834, ..., 85677, 41293, 36220]]), + values=tensor([0.8323, 0.7948, 0.8190, ..., 0.5243, 0.0288, 0.1363]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.6335, 0.9345, 0.6879, ..., 0.5366, 0.0822, 0.7656]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 288.4082520008087 seconds + +[39.86, 39.22, 39.33, 44.83, 39.21, 39.16, 40.48, 39.13, 39.75, 39.38] +[66.49] +313.244158744812 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 288.4082520008087, 'TIME_S_1KI': 2884.082520008087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20827.604114942547, 'W': 66.49} +[39.86, 39.22, 39.33, 44.83, 39.21, 39.16, 40.48, 39.13, 39.75, 39.38, 45.74, 39.63, 40.13, 39.5, 39.49, 39.37, 40.6, 39.73, 39.22, 39.52] +721.03 +36.0515 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 288.4082520008087, 'TIME_S_1KI': 2884.082520008087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 20827.604114942547, 'W': 66.49, 'J_1KI': 208276.0411494255, 'W_1KI': 664.9, 'W_D': 30.438499999999998, 'J_D': 9534.68232595396, 'W_D_1KI': 304.38499999999993, 'J_D_1KI': 3043.8499999999995} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..b03916f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3024, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.192553520202637, "TIME_S_1KI": 3.370553412765422, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1070.3068846702574, "W": 83.32, "J_1KI": 353.93746186185757, "W_1KI": 27.55291005291005, "W_D": 47.720749999999995, "J_D": 613.0082485193013, "W_D_1KI": 15.780671296296296, "J_D_1KI": 5.218475957769939} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..4bfc826 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.34720563888549805} + +tensor(indices=tensor([[83305, 71225, 8664, ..., 86105, 34722, 34081], + [93324, 38993, 97484, ..., 80402, 84934, 96179]]), + values=tensor([0.2612, 0.5542, 0.9361, ..., 0.4237, 0.0519, 0.5468]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.2548, 0.2976, 0.3843, ..., 0.3315, 0.2841, 0.5649]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.34720563888549805 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3024', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.192553520202637} + +tensor(indices=tensor([[13479, 22288, 16265, ..., 38710, 64694, 19366], + [95177, 40937, 9975, ..., 76450, 59331, 21305]]), + values=tensor([0.3388, 0.5176, 0.5251, ..., 0.6061, 0.5522, 0.8666]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8629, 0.3650, 0.6806, ..., 0.4031, 0.4597, 0.9310]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.192553520202637 seconds + +tensor(indices=tensor([[13479, 22288, 16265, ..., 38710, 64694, 19366], + [95177, 40937, 9975, ..., 76450, 59331, 21305]]), + values=tensor([0.3388, 0.5176, 0.5251, ..., 0.6061, 0.5522, 0.8666]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.8629, 0.3650, 0.6806, ..., 0.4031, 0.4597, 0.9310]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.192553520202637 seconds + +[39.89, 39.15, 39.42, 39.44, 39.21, 39.41, 41.02, 39.2, 39.65, 39.5] +[83.32] +12.845737934112549 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3024, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.192553520202637, 'TIME_S_1KI': 3.370553412765422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.3068846702574, 'W': 83.32} +[39.89, 39.15, 39.42, 39.44, 39.21, 39.41, 41.02, 39.2, 39.65, 39.5, 39.94, 39.5, 39.4, 39.8, 39.77, 39.75, 39.88, 39.08, 39.11, 39.06] +711.985 +35.59925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3024, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.192553520202637, 'TIME_S_1KI': 3.370553412765422, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.3068846702574, 'W': 83.32, 'J_1KI': 353.93746186185757, 'W_1KI': 27.55291005291005, 'W_D': 47.720749999999995, 'J_D': 613.0082485193013, 'W_D_1KI': 15.780671296296296, 'J_D_1KI': 5.218475957769939} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..7140651 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 703, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50536847114563, "TIME_S_1KI": 14.943625136764764, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 973.2560013914108, "W": 74.82, "J_1KI": 1384.4324344116797, "W_1KI": 106.42958748221905, "W_D": 39.13399999999999, "J_D": 509.0537337403297, "W_D_1KI": 55.66714082503555, "J_D_1KI": 79.1851220839766} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..8892bec --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.4924423694610596} + +tensor(indices=tensor([[32828, 84473, 24083, ..., 22770, 63296, 7281], + [99908, 8257, 71897, ..., 39601, 90684, 95401]]), + values=tensor([0.9989, 0.4371, 0.7005, ..., 0.3923, 0.9300, 0.4616]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.7644, 0.4722, 0.6608, ..., 0.6379, 0.8086, 0.6851]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.4924423694610596 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '703', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50536847114563} + +tensor(indices=tensor([[60555, 71552, 36427, ..., 60096, 79433, 46188], + [29715, 2622, 16259, ..., 41484, 84389, 82444]]), + values=tensor([0.2576, 0.9807, 0.1478, ..., 0.5088, 0.9117, 0.2731]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1232, 0.4155, 0.0399, ..., 0.2924, 0.6124, 0.9829]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.50536847114563 seconds + +tensor(indices=tensor([[60555, 71552, 36427, ..., 60096, 79433, 46188], + [29715, 2622, 16259, ..., 41484, 84389, 82444]]), + values=tensor([0.2576, 0.9807, 0.1478, ..., 0.5088, 0.9117, 0.2731]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.1232, 0.4155, 0.0399, ..., 0.2924, 0.6124, 0.9829]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.50536847114563 seconds + +[40.09, 39.48, 39.44, 39.47, 39.01, 39.22, 39.42, 39.09, 39.42, 39.01] +[74.82] +13.007965803146362 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 703, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50536847114563, 'TIME_S_1KI': 14.943625136764764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.2560013914108, 'W': 74.82} +[40.09, 39.48, 39.44, 39.47, 39.01, 39.22, 39.42, 39.09, 39.42, 39.01, 40.22, 39.33, 39.38, 39.32, 39.47, 38.9, 38.94, 45.13, 39.14, 39.8] +713.72 +35.686 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 703, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50536847114563, 'TIME_S_1KI': 14.943625136764764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 973.2560013914108, 'W': 74.82, 'J_1KI': 1384.4324344116797, 'W_1KI': 106.42958748221905, 'W_D': 39.13399999999999, 'J_D': 509.0537337403297, 'W_D_1KI': 55.66714082503555, 'J_D_1KI': 79.1851220839766} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..6ce7055 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 36096, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467745304107666, "TIME_S_1KI": 0.2899973765544012, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 853.6095631027223, "W": 65.76, "J_1KI": 23.64831458063836, "W_1KI": 1.821808510638298, "W_D": 29.531000000000006, "J_D": 383.33248187327393, "W_D_1KI": 0.8181238918439717, "J_D_1KI": 0.022665223067485918} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..268d6a0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03787350654602051} + +tensor(indices=tensor([[6812, 3784, 8396, ..., 9763, 7527, 723], + [9344, 9561, 6166, ..., 9536, 4366, 5344]]), + values=tensor([0.4197, 0.3534, 0.4445, ..., 0.1028, 0.0171, 0.0635]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.6483, 0.3393, 0.2426, ..., 0.2721, 0.6658, 0.6594]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.03787350654602051 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '27723', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.064356327056885} + +tensor(indices=tensor([[3307, 2447, 6469, ..., 9952, 7123, 7608], + [7832, 1317, 6363, ..., 5513, 9450, 7174]]), + values=tensor([0.9680, 0.8945, 0.5520, ..., 0.4226, 0.2977, 0.9072]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.5743, 0.5549, 0.4867, ..., 0.9054, 0.4127, 0.1269]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 8.064356327056885 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '36096', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467745304107666} + +tensor(indices=tensor([[9142, 8965, 1713, ..., 7951, 8805, 3321], + [4817, 9196, 1756, ..., 3169, 6573, 8815]]), + values=tensor([0.6079, 0.8867, 0.1645, ..., 0.3917, 0.7893, 0.1889]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7226, 0.8808, 0.9866, ..., 0.1039, 0.1530, 0.0349]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.467745304107666 seconds + +tensor(indices=tensor([[9142, 8965, 1713, ..., 7951, 8805, 3321], + [4817, 9196, 1756, ..., 3169, 6573, 8815]]), + values=tensor([0.6079, 0.8867, 0.1645, ..., 0.3917, 0.7893, 0.1889]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7226, 0.8808, 0.9866, ..., 0.1039, 0.1530, 0.0349]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.467745304107666 seconds + +[44.93, 39.55, 40.33, 38.86, 50.54, 43.12, 39.06, 39.11, 38.88, 38.75] +[65.76] +12.980680704116821 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 36096, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467745304107666, 'TIME_S_1KI': 0.2899973765544012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.6095631027223, 'W': 65.76} +[44.93, 39.55, 40.33, 38.86, 50.54, 43.12, 39.06, 39.11, 38.88, 38.75, 39.56, 39.17, 39.17, 40.75, 39.8, 38.77, 38.84, 38.81, 38.76, 38.88] +724.58 +36.229 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 36096, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467745304107666, 'TIME_S_1KI': 0.2899973765544012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 853.6095631027223, 'W': 65.76, 'J_1KI': 23.64831458063836, 'W_1KI': 1.821808510638298, 'W_D': 29.531000000000006, 'J_D': 383.33248187327393, 'W_D_1KI': 0.8181238918439717, 'J_D_1KI': 0.022665223067485918} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..d31d411 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3601, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.263127088546753, "TIME_S_1KI": 2.850076947666413, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 840.4867179584502, "W": 65.96, "J_1KI": 233.40369840556795, "W_1KI": 18.317134129408498, "W_D": 30.692999999999998, "J_D": 391.10155904030796, "W_D_1KI": 8.523465703971118, "J_D_1KI": 2.3669718700280806} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..dea63e3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2915339469909668} + +tensor(indices=tensor([[7718, 2306, 1187, ..., 4663, 2202, 4833], + [2184, 6347, 741, ..., 8049, 1496, 348]]), + values=tensor([0.0822, 0.6961, 0.3759, ..., 0.0124, 0.8993, 0.2094]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.4794, 0.4393, 0.9571, ..., 0.5689, 0.6722, 0.5340]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.2915339469909668 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '3601', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.263127088546753} + +tensor(indices=tensor([[6864, 1166, 9666, ..., 414, 2159, 4881], + [ 706, 5541, 4696, ..., 8891, 2276, 955]]), + values=tensor([0.6195, 0.9445, 0.8679, ..., 0.4461, 0.9672, 0.1031]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9468, 0.8448, 0.8871, ..., 0.4107, 0.6251, 0.4346]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.263127088546753 seconds + +tensor(indices=tensor([[6864, 1166, 9666, ..., 414, 2159, 4881], + [ 706, 5541, 4696, ..., 8891, 2276, 955]]), + values=tensor([0.6195, 0.9445, 0.8679, ..., 0.4461, 0.9672, 0.1031]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.9468, 0.8448, 0.8871, ..., 0.4107, 0.6251, 0.4346]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.263127088546753 seconds + +[39.4, 38.81, 39.3, 39.18, 39.33, 39.15, 39.09, 39.85, 38.96, 38.71] +[65.96] +12.742369890213013 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3601, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.263127088546753, 'TIME_S_1KI': 2.850076947666413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.4867179584502, 'W': 65.96} +[39.4, 38.81, 39.3, 39.18, 39.33, 39.15, 39.09, 39.85, 38.96, 38.71, 39.46, 39.23, 38.8, 38.8, 40.22, 39.23, 38.81, 39.1, 39.2, 38.99] +705.3399999999999 +35.266999999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3601, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.263127088546753, 'TIME_S_1KI': 2.850076947666413, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 840.4867179584502, 'W': 65.96, 'J_1KI': 233.40369840556795, 'W_1KI': 18.317134129408498, 'W_D': 30.692999999999998, 'J_D': 391.10155904030796, 'W_D_1KI': 8.523465703971118, 'J_D_1KI': 2.3669718700280806} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..aee0012 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 371, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5595543384552, "TIME_S_1KI": 28.46241061578221, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 860.9970922946931, "W": 66.2, "J_1KI": 2320.74687950052, "W_1KI": 178.4366576819407, "W_D": 30.673750000000013, "J_D": 398.9427425947787, "W_D_1KI": 82.67857142857146, "J_D_1KI": 222.85329226030044} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..69c322c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.8251051902770996} + +tensor(indices=tensor([[9401, 6279, 4880, ..., 3195, 7662, 8742], + [8852, 4994, 658, ..., 7803, 1233, 1492]]), + values=tensor([0.8719, 0.9921, 0.3410, ..., 0.1309, 0.0703, 0.7999]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.5935, 0.2405, 0.5529, ..., 0.9315, 0.0718, 0.5301]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.8251051902770996 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '371', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.5595543384552} + +tensor(indices=tensor([[2845, 2286, 8416, ..., 497, 1957, 4038], + [9315, 3513, 5348, ..., 5086, 7286, 8688]]), + values=tensor([0.0357, 0.2351, 0.8968, ..., 0.5332, 0.8075, 0.4576]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1442, 0.4843, 0.1683, ..., 0.9892, 0.4392, 0.4399]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.5595543384552 seconds + +tensor(indices=tensor([[2845, 2286, 8416, ..., 497, 1957, 4038], + [9315, 3513, 5348, ..., 5086, 7286, 8688]]), + values=tensor([0.0357, 0.2351, 0.8968, ..., 0.5332, 0.8075, 0.4576]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.1442, 0.4843, 0.1683, ..., 0.9892, 0.4392, 0.4399]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.5595543384552 seconds + +[39.98, 39.6, 39.48, 39.02, 38.87, 39.36, 38.98, 38.95, 38.86, 38.96] +[66.2] +13.005998373031616 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 371, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5595543384552, 'TIME_S_1KI': 28.46241061578221, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 860.9970922946931, 'W': 66.2} +[39.98, 39.6, 39.48, 39.02, 38.87, 39.36, 38.98, 38.95, 38.86, 38.96, 39.5, 38.8, 39.31, 38.98, 39.1, 39.25, 41.01, 42.36, 39.33, 40.09] +710.5249999999999 +35.52624999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 371, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.5595543384552, 'TIME_S_1KI': 28.46241061578221, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 860.9970922946931, 'W': 66.2, 'J_1KI': 2320.74687950052, 'W_1KI': 178.4366576819407, 'W_D': 30.673750000000013, 'J_D': 398.9427425947787, 'W_D_1KI': 82.67857142857146, 'J_D_1KI': 222.85329226030044} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..c23ad92 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.203349590301514, "TIME_S_1KI": 142.03349590301514, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1170.895357787609, "W": 66.47, "J_1KI": 11708.953577876091, "W_1KI": 664.6999999999999, "W_D": 31.122499999999995, "J_D": 548.2351552993058, "W_D_1KI": 311.22499999999997, "J_D_1KI": 3112.2499999999995} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..f741113 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 14.203349590301514} + +tensor(indices=tensor([[9445, 7058, 7165, ..., 9125, 3240, 9957], + [3472, 7549, 3127, ..., 4406, 8014, 5021]]), + values=tensor([0.2748, 0.8463, 0.0179, ..., 0.9159, 0.7955, 0.6695]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9687, 0.1212, 0.0059, ..., 0.5409, 0.0615, 0.3602]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.203349590301514 seconds + +tensor(indices=tensor([[9445, 7058, 7165, ..., 9125, 3240, 9957], + [3472, 7549, 3127, ..., 4406, 8014, 5021]]), + values=tensor([0.2748, 0.8463, 0.0179, ..., 0.9159, 0.7955, 0.6695]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9687, 0.1212, 0.0059, ..., 0.5409, 0.0615, 0.3602]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 14.203349590301514 seconds + +[40.18, 38.97, 39.31, 39.22, 39.63, 38.93, 40.54, 38.91, 38.94, 38.77] +[66.47] +17.61539578437805 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.203349590301514, 'TIME_S_1KI': 142.03349590301514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1170.895357787609, 'W': 66.47} +[40.18, 38.97, 39.31, 39.22, 39.63, 38.93, 40.54, 38.91, 38.94, 38.77, 39.91, 38.83, 38.96, 38.87, 39.35, 38.79, 39.7, 39.94, 38.83, 39.6] +706.95 +35.347500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 14.203349590301514, 'TIME_S_1KI': 142.03349590301514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1170.895357787609, 'W': 66.47, 'J_1KI': 11708.953577876091, 'W_1KI': 664.6999999999999, 'W_D': 31.122499999999995, 'J_D': 548.2351552993058, 'W_D_1KI': 311.22499999999997, 'J_D_1KI': 3112.2499999999995} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..5e7eb24 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.75423765182495, "TIME_S_1KI": 287.5423765182495, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2147.241101813316, "W": 65.3, "J_1KI": 21472.411018133163, "W_1KI": 653.0, "W_D": 29.722499999999997, "J_D": 977.3564111584424, "W_D_1KI": 297.22499999999997, "J_D_1KI": 2972.25} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..642d388 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 28.75423765182495} + +tensor(indices=tensor([[4741, 8104, 5970, ..., 1890, 5522, 7855], + [9260, 4284, 2553, ..., 1806, 4926, 7189]]), + values=tensor([0.6127, 0.9555, 0.2495, ..., 0.3115, 0.0514, 0.0470]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8474, 0.1392, 0.9313, ..., 0.9943, 0.5733, 0.3153]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.75423765182495 seconds + +tensor(indices=tensor([[4741, 8104, 5970, ..., 1890, 5522, 7855], + [9260, 4284, 2553, ..., 1806, 4926, 7189]]), + values=tensor([0.6127, 0.9555, 0.2495, ..., 0.3115, 0.0514, 0.0470]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.8474, 0.1392, 0.9313, ..., 0.9943, 0.5733, 0.3153]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 28.75423765182495 seconds + +[41.45, 39.04, 39.26, 39.5, 39.29, 39.73, 39.09, 39.14, 38.89, 38.84] +[65.3] +32.8827121257782 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.75423765182495, 'TIME_S_1KI': 287.5423765182495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2147.241101813316, 'W': 65.3} +[41.45, 39.04, 39.26, 39.5, 39.29, 39.73, 39.09, 39.14, 38.89, 38.84, 39.66, 38.84, 38.84, 38.82, 38.95, 44.1, 39.4, 39.89, 39.39, 38.81] +711.55 +35.5775 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 28.75423765182495, 'TIME_S_1KI': 287.5423765182495, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2147.241101813316, 'W': 65.3, 'J_1KI': 21472.411018133163, 'W_1KI': 653.0, 'W_D': 29.722499999999997, 'J_D': 977.3564111584424, 'W_D_1KI': 297.22499999999997, 'J_D_1KI': 2972.25} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..04a9ce8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.33694052696228, "TIME_S_1KI": 563.3694052696228, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4109.53435587883, "W": 65.5, "J_1KI": 41095.3435587883, "W_1KI": 655.0, "W_D": 30.258750000000006, "J_D": 1898.4637052053217, "W_D_1KI": 302.5875000000001, "J_D_1KI": 3025.875000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..5c22281 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 56.33694052696228} + +tensor(indices=tensor([[5301, 7360, 2255, ..., 5447, 2309, 7569], + [5610, 7566, 5009, ..., 2507, 8685, 8044]]), + values=tensor([0.3294, 0.2149, 0.1462, ..., 0.2796, 0.0942, 0.0086]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.5988, 0.2327, 0.0917, ..., 0.3251, 0.5355, 0.7920]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.33694052696228 seconds + +tensor(indices=tensor([[5301, 7360, 2255, ..., 5447, 2309, 7569], + [5610, 7566, 5009, ..., 2507, 8685, 8044]]), + values=tensor([0.3294, 0.2149, 0.1462, ..., 0.2796, 0.0942, 0.0086]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.5988, 0.2327, 0.0917, ..., 0.3251, 0.5355, 0.7920]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 56.33694052696228 seconds + +[39.56, 38.85, 39.04, 38.9, 38.93, 39.04, 38.98, 39.26, 39.47, 39.08] +[65.5] +62.74098253250122 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.33694052696228, 'TIME_S_1KI': 563.3694052696228, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4109.53435587883, 'W': 65.5} +[39.56, 38.85, 39.04, 38.9, 38.93, 39.04, 38.98, 39.26, 39.47, 39.08, 39.69, 39.06, 39.29, 39.16, 39.03, 39.63, 39.03, 39.15, 39.13, 39.42] +704.8249999999999 +35.241249999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 56.33694052696228, 'TIME_S_1KI': 563.3694052696228, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4109.53435587883, 'W': 65.5, 'J_1KI': 41095.3435587883, 'W_1KI': 655.0, 'W_D': 30.258750000000006, 'J_D': 1898.4637052053217, 'W_D_1KI': 302.5875000000001, 'J_D_1KI': 3025.875000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..875cf7f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 85.27317547798157, "TIME_S_1KI": 852.7317547798157, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6065.219387569427, "W": 65.52, "J_1KI": 60652.193875694276, "W_1KI": 655.2, "W_D": 29.881499999999996, "J_D": 2766.1454995368713, "W_D_1KI": 298.81499999999994, "J_D_1KI": 2988.149999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..55f6737 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 85.27317547798157} + +tensor(indices=tensor([[7517, 643, 4736, ..., 4882, 5111, 8362], + [8240, 4030, 1475, ..., 1704, 525, 7807]]), + values=tensor([0.6772, 0.6271, 0.5111, ..., 0.8320, 0.2589, 0.8403]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.6345, 0.0086, 0.5938, ..., 0.7719, 0.5005, 0.6066]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 85.27317547798157 seconds + +tensor(indices=tensor([[7517, 643, 4736, ..., 4882, 5111, 8362], + [8240, 4030, 1475, ..., 1704, 525, 7807]]), + values=tensor([0.6772, 0.6271, 0.5111, ..., 0.8320, 0.2589, 0.8403]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.6345, 0.0086, 0.5938, ..., 0.7719, 0.5005, 0.6066]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 85.27317547798157 seconds + +[40.02, 39.66, 39.56, 40.07, 39.51, 39.01, 38.93, 39.09, 39.0, 39.31] +[65.52] +92.57050347328186 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 85.27317547798157, 'TIME_S_1KI': 852.7317547798157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6065.219387569427, 'W': 65.52} +[40.02, 39.66, 39.56, 40.07, 39.51, 39.01, 38.93, 39.09, 39.0, 39.31, 40.41, 39.04, 39.03, 43.72, 39.62, 39.41, 39.71, 38.96, 38.96, 39.24] +712.77 +35.6385 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 85.27317547798157, 'TIME_S_1KI': 852.7317547798157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6065.219387569427, 'W': 65.52, 'J_1KI': 60652.193875694276, 'W_1KI': 655.2, 'W_D': 29.881499999999996, 'J_D': 2766.1454995368713, 'W_D_1KI': 298.81499999999994, 'J_D_1KI': 2988.149999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..e24742f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 113.69686484336853, "TIME_S_1KI": 1136.9686484336853, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8124.020613417625, "W": 65.66, "J_1KI": 81240.20613417625, "W_1KI": 656.5999999999999, "W_D": 29.878, "J_D": 3696.7634463553427, "W_D_1KI": 298.78, "J_D_1KI": 2987.7999999999997} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..cc667e8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 113.69686484336853} + +tensor(indices=tensor([[1855, 2019, 4074, ..., 5861, 3349, 3567], + [3365, 8705, 2715, ..., 963, 6883, 6514]]), + values=tensor([0.1173, 0.7739, 0.2172, ..., 0.4492, 0.5296, 0.1803]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.6399, 0.8842, 0.3630, ..., 0.0726, 0.5579, 0.6134]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 113.69686484336853 seconds + +tensor(indices=tensor([[1855, 2019, 4074, ..., 5861, 3349, 3567], + [3365, 8705, 2715, ..., 963, 6883, 6514]]), + values=tensor([0.1173, 0.7739, 0.2172, ..., 0.4492, 0.5296, 0.1803]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.6399, 0.8842, 0.3630, ..., 0.0726, 0.5579, 0.6134]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 113.69686484336853 seconds + +[40.24, 39.27, 39.79, 39.08, 39.73, 39.74, 39.67, 39.11, 39.19, 42.46] +[65.66] +123.72861123085022 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 113.69686484336853, 'TIME_S_1KI': 1136.9686484336853, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8124.020613417625, 'W': 65.66} +[40.24, 39.27, 39.79, 39.08, 39.73, 39.74, 39.67, 39.11, 39.19, 42.46, 40.69, 40.41, 39.1, 39.72, 39.44, 39.49, 41.06, 40.19, 39.18, 39.55] +715.6399999999999 +35.782 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 113.69686484336853, 'TIME_S_1KI': 1136.9686484336853, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8124.020613417625, 'W': 65.66, 'J_1KI': 81240.20613417625, 'W_1KI': 656.5999999999999, 'W_D': 29.878, 'J_D': 3696.7634463553427, 'W_D_1KI': 298.78, 'J_D_1KI': 2987.7999999999997} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..9cf7c26 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 141.6028139591217, "TIME_S_1KI": 1416.028139591217, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10349.819285047055, "W": 67.45, "J_1KI": 103498.19285047054, "W_1KI": 674.5, "W_D": 25.81225, "J_D": 3960.7431110519765, "W_D_1KI": 258.1225, "J_D_1KI": 2581.225} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..175a5e1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 141.6028139591217} + +tensor(indices=tensor([[8121, 5450, 2511, ..., 8644, 7632, 779], + [8299, 6800, 7151, ..., 8242, 633, 3199]]), + values=tensor([0.5383, 0.1260, 0.9269, ..., 0.1173, 0.5989, 0.2331]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.4434, 0.7180, 0.5989, ..., 0.6987, 0.3126, 0.4713]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 141.6028139591217 seconds + +tensor(indices=tensor([[8121, 5450, 2511, ..., 8644, 7632, 779], + [8299, 6800, 7151, ..., 8242, 633, 3199]]), + values=tensor([0.5383, 0.1260, 0.9269, ..., 0.1173, 0.5989, 0.2331]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.4434, 0.7180, 0.5989, ..., 0.6987, 0.3126, 0.4713]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 141.6028139591217 seconds + +[40.39, 39.01, 39.18, 39.4, 44.6, 66.07, 64.9, 66.57, 66.9, 58.73] +[67.45] +153.44431853294373 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 141.6028139591217, 'TIME_S_1KI': 1416.028139591217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10349.819285047055, 'W': 67.45} +[40.39, 39.01, 39.18, 39.4, 44.6, 66.07, 64.9, 66.57, 66.9, 58.73, 40.36, 40.31, 39.71, 38.98, 39.58, 39.59, 40.06, 39.23, 39.46, 38.93] +832.7550000000001 +41.637750000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 141.6028139591217, 'TIME_S_1KI': 1416.028139591217, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10349.819285047055, 'W': 67.45, 'J_1KI': 103498.19285047054, 'W_1KI': 674.5, 'W_D': 25.81225, 'J_D': 3960.7431110519765, 'W_D_1KI': 258.1225, 'J_D_1KI': 2581.225} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..b91d8a3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 312340, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.412656784057617, "TIME_S_1KI": 0.033337570545103466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 845.5558525276183, "W": 65.24, "J_1KI": 2.707164796464168, "W_1KI": 0.2088749439713133, "W_D": 29.389250000000004, "J_D": 380.90515540921695, "W_D_1KI": 0.09409377601331884, "J_D_1KI": 0.0003012543254572544} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..2e43e1e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,748 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.011310100555419922} + +tensor(indices=tensor([[8604, 3517, 6207, ..., 5384, 5220, 636], + [6473, 457, 3761, ..., 2852, 7546, 8832]]), + values=tensor([0.3453, 0.5433, 0.8875, 0.3362, 0.4521, 0.8455, 0.1691, + 0.4803, 0.3346, 0.8535, 0.6081, 0.5838, 0.8605, 0.2022, + 0.5044, 0.8088, 0.8934, 0.0649, 0.2839, 0.1488, 0.5396, + 0.5684, 0.1333, 0.8022, 0.7728, 0.5013, 0.4517, 0.9120, + 0.8700, 0.8511, 0.9070, 0.4203, 0.2086, 0.0724, 0.4250, + 0.2937, 0.6311, 0.8321, 0.4344, 0.5561, 0.0806, 0.5755, + 0.3570, 0.8975, 0.0194, 0.4971, 0.5757, 0.1463, 0.0531, + 0.3547, 0.0837, 0.7309, 0.3464, 0.2201, 0.9479, 0.2088, + 0.2187, 0.8904, 0.8801, 0.4909, 0.7642, 0.3923, 0.7785, + 0.0826, 0.1032, 0.4591, 0.1642, 0.2167, 0.0991, 0.5096, + 0.3182, 0.1949, 0.5085, 0.0896, 0.9455, 0.5246, 0.1264, + 0.8185, 0.0579, 0.4346, 0.9720, 0.0176, 0.0256, 0.4342, + 0.1694, 0.3375, 0.1303, 0.3592, 0.7592, 0.8137, 0.6222, + 0.7288, 0.9336, 0.7593, 0.1328, 0.0308, 0.8611, 0.7721, + 0.5262, 0.6517, 0.8811, 0.9260, 0.2639, 0.9281, 0.7498, + 0.9771, 0.3443, 0.2322, 0.6568, 0.6370, 0.6074, 0.0775, + 0.6047, 0.7192, 0.9406, 0.9444, 0.4940, 0.7215, 0.3291, + 0.0083, 0.1486, 0.4308, 0.4395, 0.6369, 0.7150, 0.4602, + 0.5784, 0.6377, 0.8311, 0.1443, 0.3992, 0.2845, 0.0851, + 0.3162, 0.7783, 0.8704, 0.9291, 0.3138, 0.1761, 0.1655, + 0.5646, 0.0817, 0.6978, 0.5843, 0.3349, 0.2569, 0.6966, + 0.3759, 0.1424, 0.5344, 0.7151, 0.3621, 0.5511, 0.7591, + 0.0102, 0.6204, 0.1535, 0.3978, 0.6734, 0.3496, 0.3543, + 0.3446, 0.6937, 0.7377, 0.6517, 0.3741, 0.1855, 0.2106, + 0.2696, 0.4239, 0.9802, 0.0821, 0.6009, 0.4443, 0.8780, + 0.2288, 0.4942, 0.3639, 0.7709, 0.3697, 0.0124, 0.2729, + 0.5942, 0.7856, 0.4455, 0.7671, 0.2490, 0.8298, 0.0536, + 0.3334, 0.3784, 0.8582, 0.8084, 0.5764, 0.5802, 0.8290, + 0.3246, 0.7147, 0.2374, 0.1774, 0.1177, 0.8640, 0.1442, + 0.5506, 0.4836, 0.7747, 0.4840, 0.7935, 0.7838, 0.8960, + 0.8337, 0.5768, 0.3654, 0.0248, 0.2891, 0.8186, 0.2747, + 0.9670, 0.1752, 0.9222, 0.2624, 0.8882, 0.0658, 0.3924, + 0.0616, 0.0405, 0.9139, 0.7862, 0.9565, 0.1972, 0.3587, + 0.4248, 0.0870, 0.5927, 0.8050, 0.8059, 0.2423, 0.9430, + 0.0793, 0.5265, 0.1530, 0.0127, 0.4165, 0.7562, 0.9460, + 0.4526, 0.6163, 0.8822, 0.7984, 0.3472, 0.8714, 0.2119, + 0.1894, 0.5070, 0.9595, 0.9252, 0.4939, 0.7059, 0.8535, + 0.4902, 0.6368, 0.4824, 0.4663, 0.8639, 0.2507, 0.7287, + 0.6413, 0.9464, 0.6465, 0.4241, 0.6464, 0.2324, 0.5970, + 0.5594, 0.8875, 0.0122, 0.1410, 0.8080, 0.7879, 0.3973, + 0.9486, 0.8670, 0.6041, 0.1390, 0.1874, 0.3770, 0.3230, + 0.5591, 0.8811, 0.4651, 0.8435, 0.8820, 0.1557, 0.6011, + 0.0462, 0.5003, 0.0357, 0.8147, 0.1540, 0.5426, 0.1209, + 0.6141, 0.7731, 0.5935, 0.5599, 0.5094, 0.4904, 0.4040, + 0.7256, 0.5576, 0.5381, 0.4441, 0.1570, 0.5069, 0.5835, + 0.4094, 0.7271, 0.7583, 0.6839, 0.6095, 0.3350, 0.5064, + 0.1743, 0.8424, 0.2807, 0.3696, 0.4705, 0.0448, 0.1786, + 0.6612, 0.6718, 0.8943, 0.0117, 0.4122, 0.8684, 0.2449, + 0.6117, 0.3759, 0.6177, 0.2394, 0.3958, 0.4799, 0.4861, + 0.8897, 0.2062, 0.3350, 0.6445, 0.1128, 0.7808, 0.0756, + 0.8531, 0.5264, 0.7454, 0.1138, 0.8715, 0.6426, 0.8076, + 0.2634, 0.2572, 0.8139, 0.2325, 0.7509, 0.4608, 0.2781, + 0.1591, 0.9253, 0.5609, 0.4590, 0.1479, 0.0902, 0.0699, + 0.4577, 0.9655, 0.0186, 0.1757, 0.9666, 0.8437, 0.8571, + 0.5215, 0.8790, 0.1213, 0.3296, 0.5698, 0.0513, 0.4381, + 0.4817, 0.6132, 0.5002, 0.8983, 0.6881, 0.2876, 0.7323, + 0.0646, 0.4019, 0.9124, 0.8132, 0.0946, 0.8244, 0.5143, + 0.6581, 0.1267, 0.6488, 0.0083, 0.2494, 0.8998, 0.2865, + 0.7122, 0.0603, 0.5258, 0.2039, 0.8815, 0.1330, 0.3412, + 0.9359, 0.6290, 0.4542, 0.9191, 0.2670, 0.8294, 0.9257, + 0.3966, 0.4658, 0.7130, 0.2572, 0.2211, 0.0336, 0.6768, + 0.8594, 0.4309, 0.0269, 0.7457, 0.5156, 0.5816, 0.9190, + 0.8536, 0.6769, 0.7513, 0.7709, 0.5831, 0.8191, 0.0608, + 0.5017, 0.1249, 0.0771, 0.5136, 0.4317, 0.4021, 0.1594, + 0.8395, 0.0681, 0.0897, 0.8306, 0.9729, 0.0542, 0.8889, + 0.0195, 0.6094, 0.4095, 0.4700, 0.6746, 0.9094, 0.9579, + 0.6137, 0.0039, 0.7501, 0.2764, 0.8865, 0.9946, 0.6744, + 0.5342, 0.7490, 0.7389, 0.4656, 0.9174, 0.7383, 0.0242, + 0.4917, 0.4794, 0.5397, 0.4342, 0.1213, 0.4794, 0.1316, + 0.2688, 0.1475, 0.2956, 0.6316, 0.1107, 0.1497, 0.3997, + 0.4297, 0.9414, 0.5795, 0.7420, 0.6749, 0.1517, 0.6217, + 0.4266, 0.0680, 0.5159, 0.9019, 0.7692, 0.3825, 0.2354, + 0.6782, 0.0081, 0.3876, 0.9383, 0.9296, 0.5634, 0.1067, + 0.0281, 0.6895, 0.7175, 0.9627, 0.9601, 0.8092, 0.2906, + 0.6888, 0.4894, 0.2386, 0.0952, 0.0680, 0.0455, 0.1170, + 0.2872, 0.0082, 0.6620, 0.0490, 0.1139, 0.5217, 0.1600, + 0.9827, 0.6051, 0.7952, 0.7397, 0.3107, 0.4497, 0.8925, + 0.0506, 0.9935, 0.9301, 0.0035, 0.9475, 0.7715, 0.4626, + 0.9349, 0.1687, 0.9273, 0.3465, 0.0919, 0.1829, 0.3695, + 0.2325, 0.5725, 0.2258, 0.5457, 0.3965, 0.5368, 0.8174, + 0.0377, 0.7647, 0.7040, 0.6573, 0.1532, 0.5182, 0.7097, + 0.8081, 0.5450, 0.8066, 0.4023, 0.1516, 0.1226, 0.2177, + 0.5419, 0.0592, 0.6377, 0.0809, 0.5309, 0.8724, 0.1174, + 0.2380, 0.5491, 0.9933, 0.6859, 0.7163, 0.0327, 0.5647, + 0.1229, 0.9803, 0.5517, 0.5781, 0.5508, 0.1386, 0.3395, + 0.9119, 0.1501, 0.8697, 0.7580, 0.4668, 0.2587, 0.4256, + 0.4887, 0.6739, 0.8156, 0.6442, 0.0730, 0.0013, 0.2372, + 0.1228, 0.1214, 0.3318, 0.8742, 0.7341, 0.4174, 0.7934, + 0.2975, 0.7983, 0.7410, 0.6631, 0.8851, 0.8607, 0.3068, + 0.7149, 0.8392, 0.6514, 0.6639, 0.0665, 0.7260, 0.6435, + 0.6513, 0.6539, 0.4131, 0.6599, 0.6657, 0.9984, 0.1959, + 0.2999, 0.2601, 0.7434, 0.8090, 0.5245, 0.9120, 0.3211, + 0.9372, 0.0622, 0.5920, 0.7901, 0.5951, 0.6572, 0.5310, + 0.0408, 0.8725, 0.6087, 0.7934, 0.7209, 0.3030, 0.1864, + 0.3497, 0.3441, 0.8099, 0.9709, 0.0937, 0.5254, 0.5779, + 0.0979, 0.0716, 0.6393, 0.6622, 0.9564, 0.9888, 0.6260, + 0.7419, 0.5369, 0.6988, 0.5987, 0.1628, 0.4426, 0.5935, + 0.1147, 0.3034, 0.3548, 0.4513, 0.2846, 0.2501, 0.5448, + 0.0676, 0.5970, 0.2150, 0.5529, 0.2056, 0.4022, 0.7884, + 0.3342, 0.2970, 0.7508, 0.3846, 0.8873, 0.4663, 0.9773, + 0.5524, 0.2794, 0.8643, 0.4931, 0.1886, 0.7801, 0.6603, + 0.3208, 0.6869, 0.4022, 0.9638, 0.5796, 0.9223, 0.4549, + 0.6944, 0.2187, 0.1199, 0.2660, 0.5004, 0.1276, 0.7905, + 0.3998, 0.2038, 0.2555, 0.4457, 0.9027, 0.2863, 0.5705, + 0.5279, 0.2651, 0.1922, 0.6139, 0.7964, 0.7100, 0.6454, + 0.6221, 0.5821, 0.9842, 0.0776, 0.2110, 0.5469, 0.1788, + 0.8974, 0.3401, 0.6450, 0.0608, 0.8368, 0.6839, 0.5735, + 0.0234, 0.6021, 0.5103, 0.6078, 0.4351, 0.3415, 0.6379, + 0.9699, 0.3064, 0.7711, 0.5247, 0.2535, 0.5308, 0.1171, + 0.0304, 0.1007, 0.5942, 0.0026, 0.6074, 0.0781, 0.9879, + 0.9683, 0.0977, 0.4078, 0.3431, 0.2948, 0.8164, 0.0130, + 0.0825, 0.9740, 0.7938, 0.3820, 0.9508, 0.6581, 0.1668, + 0.7625, 0.7523, 0.1956, 0.4230, 0.7125, 0.4004, 0.6432, + 0.4643, 0.5422, 0.7618, 0.2751, 0.4913, 0.3960, 0.0705, + 0.4738, 0.7510, 0.4977, 0.5007, 0.4293, 0.2628, 0.4103, + 0.1925, 0.1285, 0.9550, 0.7136, 0.1585, 0.5701, 0.3324, + 0.7707, 0.0608, 0.1764, 0.4147, 0.5635, 0.1237, 0.1964, + 0.2341, 0.7288, 0.2977, 0.1238, 0.2519, 0.1732, 0.4711, + 0.9209, 0.5985, 0.6377, 0.7861, 0.3799, 0.8477, 0.4285, + 0.6218, 0.4320, 0.5940, 0.8142, 0.3085, 0.7594, 0.2293, + 0.1176, 0.6311, 0.1333, 0.2907, 0.9838, 0.0246, 0.1266, + 0.4974, 0.2025, 0.6498, 0.2587, 0.0420, 0.5328, 0.8689, + 0.5149, 0.1338, 0.9426, 0.7742, 0.1646, 0.2441, 0.3070, + 0.8073, 0.7613, 0.0577, 0.6325, 0.2117, 0.0493, 0.0740, + 0.5609, 0.1595, 0.3954, 0.0710, 0.0930, 0.2366, 0.9868, + 0.6964, 0.0942, 0.1460, 0.8552, 0.7737, 0.9044, 0.7882, + 0.9565, 0.4499, 0.2497, 0.2606, 0.3496, 0.3569, 0.7723, + 0.5579, 0.9432, 0.9497, 0.1325, 0.9368, 0.9783, 0.2378, + 0.4976, 0.7947, 0.1674, 0.6862, 0.5116, 0.5078, 0.4389, + 0.8172, 0.0404, 0.7954, 0.6900, 0.4874, 0.3387, 0.3826, + 0.4382, 0.0134, 0.8364, 0.3997, 0.9989, 0.3177, 0.1866, + 0.0050, 0.9602, 0.4488, 0.0123, 0.6945, 0.8992, 0.0641, + 0.2305, 0.4016, 0.1838, 0.9745, 0.4918, 0.3451, 0.2573, + 0.0736, 0.7725, 0.1558, 0.1828, 0.7366, 0.1291, 0.5672, + 0.1761, 0.4129, 0.9344, 0.1994, 0.0332, 0.8069, 0.2008, + 0.2943, 0.9231, 0.5527, 0.3341, 0.0621, 0.0740, 0.2993, + 0.0738, 0.8594, 0.9734, 0.4164, 0.0116, 0.7763, 0.6117, + 0.6871, 0.1043, 0.6630, 0.2287, 0.8811, 0.3757, 0.3596, + 0.8159, 0.7639, 0.9209, 0.4215, 0.2471, 0.3391, 0.4868, + 0.9460, 0.7646, 0.7414, 0.4011, 0.9771, 0.8878, 0.0294, + 0.4516, 0.0097, 0.3077, 0.5483, 0.0620, 0.7824, 0.3637, + 0.5494, 0.0993, 0.3062, 0.7239, 0.7986, 0.0227, 0.9140, + 0.4812, 0.6774, 0.8198, 0.6243, 0.8143, 0.5455]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.6512, 0.8612, 0.0426, ..., 0.1255, 0.6637, 0.9114]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.011310100555419922 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '92837', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.1209168434143066} + +tensor(indices=tensor([[3374, 257, 3209, ..., 2254, 388, 2796], + [7654, 9745, 2773, ..., 2334, 3069, 1975]]), + values=tensor([3.8862e-01, 2.0477e-01, 6.0884e-01, 2.1364e-01, + 7.0563e-01, 8.2038e-01, 2.0654e-01, 2.4067e-01, + 4.6216e-01, 9.2760e-01, 8.5357e-01, 3.7064e-01, + 4.0803e-01, 1.9516e-01, 3.0384e-01, 6.1408e-01, + 6.2911e-01, 4.5370e-01, 6.9393e-01, 5.7104e-01, + 1.6797e-02, 8.7768e-01, 1.0016e-01, 6.8387e-01, + 9.2406e-02, 7.7409e-01, 7.5487e-01, 5.5376e-01, + 3.3561e-01, 1.3491e-02, 2.6845e-01, 2.8319e-02, + 9.2753e-01, 7.7676e-01, 5.8346e-01, 5.1710e-01, + 8.3059e-01, 8.6619e-01, 1.2639e-01, 3.0156e-03, + 7.1352e-01, 3.8851e-01, 6.4567e-01, 8.0620e-01, + 6.4706e-01, 8.9346e-01, 2.7310e-01, 8.5743e-01, + 3.1394e-01, 4.0298e-01, 6.1835e-01, 4.3115e-02, + 2.0419e-01, 1.4101e-01, 9.9633e-01, 3.7739e-01, + 8.8258e-02, 8.0207e-01, 9.5292e-01, 9.7652e-01, + 6.4978e-01, 5.9416e-01, 8.5710e-03, 6.1254e-01, + 7.0306e-01, 2.0773e-01, 3.1026e-01, 5.8140e-01, + 9.3782e-01, 5.0966e-01, 8.1511e-01, 4.2446e-01, + 6.9412e-01, 3.6457e-01, 6.8241e-01, 4.4954e-01, + 8.7869e-01, 9.0486e-01, 2.2508e-01, 3.1065e-01, + 7.2976e-01, 6.2152e-01, 6.1725e-01, 8.0556e-01, + 4.6604e-01, 6.6821e-01, 7.6491e-01, 6.3786e-01, + 6.0704e-01, 9.0813e-01, 6.2533e-01, 8.7377e-02, + 5.8018e-01, 9.2452e-01, 9.5990e-01, 6.3971e-01, + 8.4033e-01, 4.6643e-01, 5.2911e-01, 6.6575e-01, + 3.9780e-01, 6.1192e-01, 4.1660e-01, 7.0471e-01, + 4.6376e-02, 2.3348e-01, 7.9686e-01, 8.5968e-01, + 2.3875e-01, 5.1686e-01, 4.5839e-01, 1.3593e-01, + 4.8841e-01, 1.4145e-01, 9.8751e-02, 7.4528e-01, + 4.6055e-01, 2.5248e-01, 4.7196e-01, 1.6975e-01, + 5.3728e-01, 2.3477e-01, 9.1718e-01, 6.5891e-01, + 4.7188e-01, 6.5338e-02, 1.2709e-01, 4.0619e-01, + 3.6298e-01, 3.9362e-01, 4.3761e-01, 2.6993e-01, + 1.0593e-01, 7.9824e-02, 7.9490e-01, 8.1965e-01, + 8.8536e-01, 9.5651e-01, 7.3275e-02, 5.2455e-01, + 9.7701e-02, 6.3446e-01, 4.6589e-01, 5.3825e-01, + 2.9585e-01, 4.3201e-02, 2.1565e-01, 3.3678e-01, + 8.7776e-01, 5.0756e-01, 5.9467e-02, 8.2850e-01, + 8.0737e-01, 5.3912e-02, 3.9137e-01, 6.6545e-02, + 7.7095e-01, 7.8183e-01, 4.0321e-01, 1.1978e-02, + 5.1250e-01, 2.2990e-01, 4.1727e-02, 8.8796e-01, + 8.9000e-01, 6.3709e-01, 5.1258e-01, 1.1870e-02, + 4.6165e-01, 3.8766e-01, 4.1887e-01, 1.2037e-01, + 5.8985e-01, 5.3730e-01, 3.8309e-01, 3.9125e-01, + 8.3269e-01, 9.8499e-01, 4.7266e-01, 6.6807e-02, + 9.1626e-01, 5.7313e-01, 7.1565e-02, 9.2683e-01, + 6.8388e-01, 5.1608e-01, 3.2436e-01, 6.6874e-01, + 3.5699e-01, 2.9195e-01, 6.4328e-01, 2.6528e-01, + 8.9600e-01, 7.7883e-01, 5.8397e-01, 9.3306e-01, + 1.7328e-01, 3.8128e-01, 4.5161e-01, 6.8623e-01, + 1.9342e-01, 7.1882e-01, 6.9781e-01, 6.6846e-01, + 2.6349e-01, 7.6815e-02, 7.2855e-01, 5.6813e-02, + 2.2455e-02, 6.8852e-01, 3.9944e-01, 7.1709e-01, + 1.9177e-01, 6.4064e-01, 6.0957e-01, 8.2043e-01, + 3.3700e-01, 7.1978e-01, 9.2732e-01, 6.4106e-01, + 6.6353e-01, 8.3095e-01, 2.1395e-01, 7.2065e-02, + 2.3761e-01, 5.3820e-01, 1.5350e-01, 5.7154e-01, + 7.5692e-01, 2.4342e-01, 2.0654e-01, 9.9257e-01, + 4.7087e-01, 8.8408e-01, 2.1190e-01, 5.8444e-01, + 3.5818e-02, 1.5916e-01, 5.8265e-01, 7.7121e-01, + 7.5991e-01, 8.1627e-01, 9.0416e-01, 6.9701e-02, + 9.6998e-01, 1.4979e-01, 7.5553e-01, 9.1048e-01, + 9.3529e-01, 1.7832e-01, 2.4402e-01, 5.8898e-01, + 7.3732e-01, 7.0817e-01, 4.1834e-01, 5.6941e-01, + 4.8054e-01, 6.1974e-01, 7.5594e-01, 1.5794e-02, + 8.3438e-01, 9.7874e-04, 2.7820e-01, 1.1321e-01, + 7.7006e-01, 8.6087e-01, 5.0458e-01, 9.8672e-03, + 3.2743e-01, 6.7953e-01, 6.5002e-01, 3.1178e-01, + 8.0403e-01, 2.0000e-01, 8.2140e-01, 8.0474e-01, + 1.8891e-02, 9.2470e-01, 8.8882e-02, 4.2583e-01, + 4.2067e-01, 9.6145e-01, 7.2755e-01, 6.7627e-02, + 7.2771e-01, 8.8791e-01, 3.6332e-01, 4.2388e-01, + 6.3585e-01, 7.8477e-02, 5.1434e-01, 3.1116e-01, + 9.8886e-01, 6.1104e-01, 9.6631e-02, 3.7680e-01, + 8.0799e-01, 5.2600e-01, 2.3914e-01, 5.2400e-01, + 6.4662e-02, 2.7439e-02, 5.4503e-01, 2.2071e-01, + 8.3308e-01, 7.6334e-01, 7.2394e-01, 7.3909e-01, + 6.1850e-01, 7.8257e-01, 9.3174e-01, 2.1671e-01, + 9.6043e-02, 2.6851e-01, 5.9695e-01, 9.3792e-01, + 5.6862e-01, 9.2003e-01, 4.2276e-01, 9.7869e-01, + 2.0136e-01, 1.3504e-01, 2.3719e-01, 8.6631e-02, + 7.0922e-01, 8.4288e-01, 5.2666e-01, 6.0664e-01, + 1.4625e-01, 2.9605e-01, 9.0752e-01, 9.9786e-01, + 5.7255e-01, 9.3345e-01, 4.7611e-01, 8.6052e-01, + 7.3527e-01, 3.7737e-01, 3.1438e-01, 4.7894e-01, + 8.5930e-01, 2.8234e-01, 2.5907e-01, 6.7383e-01, + 6.3167e-01, 2.2379e-01, 7.9452e-01, 8.9400e-01, + 6.4642e-01, 9.7356e-01, 1.6405e-01, 4.9004e-01, + 5.2628e-01, 2.3262e-01, 8.2087e-01, 7.4980e-01, + 7.8795e-01, 7.3402e-01, 3.8724e-01, 2.3809e-01, + 5.4652e-01, 8.8632e-02, 6.9386e-01, 9.1306e-01, + 8.4586e-01, 3.3233e-01, 2.8684e-01, 2.9311e-01, + 5.8012e-01, 5.2959e-01, 8.5201e-01, 5.0425e-01, + 3.6256e-01, 3.9973e-01, 2.9210e-02, 2.3296e-01, + 2.8649e-01, 9.5772e-01, 7.2278e-01, 5.6816e-01, + 4.5834e-01, 8.1184e-01, 5.4713e-01, 7.1883e-01, + 9.4743e-01, 3.3785e-01, 5.9563e-01, 8.9396e-01, + 2.6608e-01, 7.1919e-01, 6.2917e-01, 4.0657e-01, + 4.5583e-01, 5.0147e-01, 5.7133e-02, 2.2204e-01, + 6.8523e-01, 5.8098e-01, 6.6898e-01, 7.5229e-01, + 4.1118e-01, 9.6511e-01, 3.3630e-01, 8.8157e-02, + 7.7854e-01, 8.9245e-03, 5.2074e-02, 1.1367e-01, + 4.3717e-02, 3.0401e-01, 8.0245e-01, 9.3200e-01, + 5.2010e-01, 7.0607e-01, 6.6358e-01, 5.6534e-01, + 2.9449e-01, 4.2393e-01, 6.6555e-01, 5.2435e-01, + 5.1782e-01, 9.1709e-01, 6.5386e-01, 5.2853e-01, + 3.8596e-01, 1.0121e-01, 1.9989e-01, 5.6469e-01, + 3.1273e-01, 4.0266e-01, 4.6520e-01, 3.1715e-01, + 8.9972e-01, 5.0555e-01, 4.8516e-01, 6.0363e-01, + 3.4521e-01, 3.3139e-01, 9.1215e-01, 9.7622e-01, + 3.3891e-01, 8.1558e-01, 7.8187e-01, 2.0560e-01, + 2.4511e-01, 9.8261e-01, 2.0128e-01, 3.2702e-02, + 9.4882e-01, 1.7090e-01, 7.7055e-01, 5.3220e-01, + 2.4957e-01, 3.8311e-01, 8.4088e-01, 7.1890e-01, + 2.5112e-01, 8.8467e-01, 8.3607e-01, 2.5457e-01, + 1.9402e-01, 7.1837e-02, 7.5739e-01, 2.8396e-01, + 9.8643e-01, 4.1126e-01, 9.7316e-01, 6.0697e-01, + 3.6010e-01, 9.0945e-01, 2.3882e-01, 8.8880e-01, + 8.3213e-02, 4.5429e-02, 6.7233e-01, 3.7881e-01, + 3.4517e-01, 4.7978e-01, 3.5726e-01, 4.0814e-01, + 8.5284e-01, 2.5740e-01, 3.9382e-01, 5.3613e-01, + 1.9302e-01, 3.4016e-01, 9.2628e-01, 3.4977e-01, + 6.4323e-01, 1.2861e-01, 5.7655e-01, 3.9471e-01, + 9.3420e-01, 6.0524e-01, 3.0956e-01, 6.7782e-02, + 7.6172e-01, 9.0619e-01, 7.1740e-01, 4.5115e-01, + 7.3783e-01, 6.4135e-01, 8.7726e-01, 7.1576e-01, + 4.5735e-02, 8.7277e-01, 2.4097e-01, 8.2662e-01, + 3.1658e-01, 9.4355e-02, 4.3340e-01, 9.2168e-01, + 4.3243e-01, 3.9577e-01, 9.4703e-01, 1.0742e-01, + 3.0039e-01, 3.2399e-01, 9.8252e-01, 9.8475e-01, + 6.3718e-01, 6.0499e-02, 4.0091e-01, 1.2827e-02, + 6.4918e-01, 9.2528e-01, 5.5424e-01, 9.9079e-01, + 1.7604e-01, 8.9365e-01, 9.5435e-01, 9.8014e-01, + 9.3980e-01, 2.9482e-01, 9.2945e-01, 3.7062e-01, + 6.4982e-01, 3.8779e-01, 5.2359e-01, 7.5971e-01, + 1.0843e-01, 6.2776e-01, 1.3109e-01, 1.9403e-01, + 1.6832e-01, 6.5939e-01, 5.1227e-01, 2.4165e-01, + 4.2740e-01, 9.9776e-01, 1.8903e-01, 6.3996e-01, + 7.3644e-01, 6.9116e-01, 6.9621e-01, 8.8223e-01, + 6.3624e-01, 4.4212e-01, 2.8569e-01, 6.5832e-01, + 3.6150e-01, 5.9132e-01, 9.4733e-01, 4.7765e-01, + 1.3777e-01, 6.3601e-01, 4.8059e-01, 1.1066e-01, + 7.4679e-01, 4.1393e-01, 1.9740e-01, 3.6672e-01, + 9.7762e-01, 9.6576e-03, 6.7654e-02, 1.4688e-01, + 7.5310e-01, 2.5457e-01, 2.1610e-01, 8.2793e-01, + 6.4808e-01, 6.7289e-01, 3.0524e-01, 3.3837e-02, + 7.7165e-01, 8.8430e-01, 1.2025e-01, 8.5772e-01, + 7.5001e-01, 6.8274e-01, 4.8416e-01, 1.7744e-01, + 5.4461e-01, 9.7208e-01, 7.0914e-01, 9.6006e-01, + 8.6891e-01, 8.5433e-01, 4.8114e-01, 3.8694e-01, + 7.3284e-01, 5.8749e-02, 6.7228e-01, 1.9264e-01, + 7.6544e-01, 9.6580e-01, 2.0232e-01, 7.7369e-01, + 5.9159e-01, 3.2253e-01, 3.6787e-01, 3.6706e-01, + 5.3644e-01, 1.8584e-01, 5.6640e-01, 1.5932e-01, + 4.8396e-01, 6.6546e-01, 1.4953e-02, 3.8957e-01, + 7.1870e-01, 6.9860e-01, 1.2477e-02, 1.1884e-01, + 1.5944e-01, 2.8350e-01, 4.5556e-01, 9.8306e-01, + 7.7056e-01, 1.9696e-01, 2.8837e-01, 2.8225e-01, + 8.4552e-01, 7.8907e-01, 5.8619e-01, 1.3333e-01, + 7.3400e-01, 8.3669e-01, 5.2261e-01, 8.9270e-01, + 8.3146e-01, 6.5769e-01, 7.7850e-01, 5.4016e-02, + 9.8005e-01, 8.6198e-01, 9.3723e-01, 7.2314e-01, + 1.7618e-01, 1.0344e-01, 2.7176e-01, 2.2583e-01, + 6.7423e-01, 6.2909e-01, 1.9559e-01, 7.9185e-01, + 6.9060e-01, 9.0425e-01, 9.8863e-01, 7.0216e-01, + 3.3925e-01, 1.2435e-01, 4.1672e-01, 8.1660e-01, + 4.9332e-01, 2.2413e-01, 4.3592e-01, 1.8150e-02, + 4.5838e-01, 4.4798e-01, 3.8803e-01, 2.5420e-01, + 4.7052e-01, 4.8918e-01, 8.7823e-01, 6.3829e-01, + 7.6448e-01, 3.2882e-01, 9.2710e-01, 7.3649e-01, + 7.1718e-01, 6.5023e-02, 5.0637e-01, 3.7615e-01, + 5.8531e-01, 9.4102e-01, 8.7298e-01, 3.5276e-01, + 7.8707e-01, 1.3103e-01, 7.4972e-01, 5.9018e-01, + 2.5902e-01, 7.5990e-01, 2.5845e-01, 2.9146e-01, + 8.0859e-01, 9.2187e-01, 9.8392e-01, 4.1428e-01, + 6.0507e-01, 7.3093e-01, 7.7209e-01, 5.9189e-01, + 8.9155e-02, 8.4358e-02, 1.8220e-01, 7.9619e-01, + 9.9778e-01, 3.6808e-01, 4.3958e-01, 8.7865e-01, + 8.0919e-01, 4.3391e-01, 7.7023e-01, 7.1388e-01, + 6.3545e-01, 2.0524e-01, 2.1740e-01, 5.5601e-01, + 8.1526e-01, 4.3062e-01, 1.7260e-01, 7.5233e-01, + 5.3550e-01, 6.2498e-01, 1.9647e-01, 3.6763e-01, + 5.6695e-01, 4.8247e-01, 6.6686e-02, 7.8014e-01, + 7.9998e-01, 8.8898e-01, 6.1950e-01, 2.1201e-01, + 8.6782e-01, 2.0816e-01, 7.0522e-01, 6.0373e-01, + 1.6051e-01, 2.0541e-01, 3.9825e-01, 1.2469e-01, + 4.5761e-01, 9.9483e-02, 1.7914e-01, 8.5836e-01, + 3.1367e-01, 9.3922e-03, 7.5320e-01, 2.9730e-01, + 8.1826e-01, 8.3536e-01, 2.9549e-01, 9.2550e-01, + 5.4527e-01, 1.1053e-01, 3.1837e-02, 9.9017e-01, + 2.1675e-01, 4.8404e-01, 3.0914e-01, 7.5567e-01, + 4.8679e-01, 3.5531e-01, 1.1522e-01, 1.4308e-01, + 5.4234e-01, 1.9658e-01, 6.3670e-01, 2.9158e-01, + 5.7315e-01, 3.0285e-01, 8.4406e-01, 4.4966e-01, + 5.2779e-01, 6.3025e-01, 8.2590e-01, 5.7943e-01, + 6.6301e-01, 2.6809e-01, 6.6825e-01, 5.3344e-01, + 9.4982e-01, 6.7563e-01, 2.5459e-01, 1.5420e-01, + 7.0645e-01, 6.1118e-01, 8.8223e-01, 6.3336e-02, + 4.5681e-02, 2.2886e-01, 7.2026e-02, 2.9133e-01, + 7.5751e-01, 5.3600e-01, 1.2075e-01, 6.4121e-01, + 7.9224e-01, 2.4853e-01, 5.7897e-01, 9.4747e-01, + 6.3503e-01, 3.2290e-01, 8.4654e-02, 1.5116e-01, + 9.5147e-01, 6.6872e-01, 7.4016e-01, 3.5168e-02, + 3.6986e-01, 5.0355e-01, 3.3380e-01, 7.3395e-01, + 9.2021e-01, 2.9327e-01, 2.8037e-01, 9.6799e-01, + 5.1342e-02, 4.6745e-01, 8.3115e-01, 2.8397e-01, + 9.6664e-02, 9.1992e-01, 2.0782e-01, 9.4421e-01, + 8.6048e-01, 8.9501e-01, 7.3008e-01, 6.8139e-01, + 3.5080e-01, 1.1750e-01, 1.3919e-01, 8.4794e-01, + 1.7457e-01, 3.6869e-01, 9.0754e-01, 4.4013e-01, + 5.3119e-01, 7.7519e-01, 2.8814e-01, 7.0205e-01, + 5.0302e-01, 4.0991e-01, 8.4129e-02, 1.9699e-01, + 4.7995e-01, 7.9701e-01, 6.1171e-01, 1.3859e-01, + 3.7037e-01, 6.6859e-01, 2.0693e-01, 9.6775e-01, + 9.4474e-01, 3.0895e-01, 2.9984e-01, 3.7168e-01, + 8.9396e-01, 8.1394e-01, 3.0012e-01, 3.9800e-01, + 9.1070e-01, 6.4617e-01, 5.9697e-01, 1.5884e-01, + 6.5739e-01, 3.5832e-01, 8.0335e-01, 1.2741e-01, + 2.7844e-01, 6.0172e-01, 9.4079e-01, 3.1320e-03, + 7.1355e-02, 5.3820e-01, 1.2102e-01, 8.5949e-01, + 1.0346e-01, 1.0605e-01, 4.8436e-01, 8.7899e-01, + 9.7108e-01, 9.1295e-01, 8.2074e-01, 1.1317e-01, + 7.5211e-01, 5.5915e-01, 8.4878e-01, 3.8882e-01, + 5.8209e-01, 6.5711e-01, 7.5596e-01, 3.1464e-01, + 6.4604e-01, 3.8265e-01, 7.6275e-01, 4.8880e-01, + 8.0862e-01, 5.1691e-01, 4.3715e-01, 1.3482e-01, + 4.9620e-01, 2.9957e-01, 5.6910e-01, 8.0603e-01, + 1.9580e-01, 3.1511e-01, 3.8297e-01, 1.5720e-01, + 9.2110e-01, 8.2721e-01, 5.9087e-01, 1.2430e-01, + 9.4962e-01, 3.5553e-01, 8.5066e-01, 8.3453e-01, + 9.2996e-01, 5.5614e-01, 1.6560e-02, 7.4899e-01, + 1.3600e-02, 5.4153e-01, 1.7714e-01, 5.3092e-01, + 6.5456e-01, 6.8597e-01, 6.7081e-03, 4.9925e-01, + 4.2483e-01, 2.5072e-01, 1.7189e-01, 5.2243e-01, + 5.1180e-01, 8.5323e-01, 5.2639e-01, 6.3936e-01, + 7.8345e-02, 6.8843e-01, 7.6782e-01, 1.6334e-01, + 9.6858e-01, 3.1030e-01, 3.6321e-01, 8.4541e-01, + 1.0025e-01, 2.6601e-01, 9.3782e-01, 1.4548e-01, + 1.9757e-01, 4.0964e-01, 4.1950e-01, 9.0678e-01, + 8.9695e-02, 4.0019e-01, 7.6019e-02, 7.6119e-01, + 2.4726e-01, 8.7848e-01, 3.2709e-01, 9.8345e-01, + 6.9282e-01, 3.2918e-01, 2.6562e-01, 9.5290e-01, + 2.4363e-01, 1.5816e-01, 3.7878e-01, 9.3889e-01, + 9.8521e-01, 2.0501e-01, 7.5204e-01, 5.4633e-01, + 9.1027e-01, 4.0339e-02, 8.5769e-02, 7.9485e-01, + 7.5516e-01, 9.4249e-01, 1.2754e-02, 3.9033e-01, + 1.0070e-01, 4.7429e-01, 9.9323e-01, 6.2152e-01, + 4.7807e-01, 1.2558e-01, 4.4377e-01, 5.0130e-01, + 5.5419e-01, 5.8310e-01, 1.3335e-01, 8.5220e-01, + 2.5358e-01, 8.8959e-01, 2.8688e-01, 8.1352e-02]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.9419, 0.4539, 0.0352, ..., 0.0373, 0.9649, 0.0405]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 3.1209168434143066 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '312340', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.412656784057617} + +tensor(indices=tensor([[8486, 4726, 8446, ..., 782, 6318, 1510], + [3275, 2436, 2312, ..., 3373, 5510, 7845]]), + values=tensor([0.8773, 0.6189, 0.3397, 0.2792, 0.5532, 0.1909, 0.4547, + 0.4886, 0.3651, 0.7324, 0.6380, 0.6891, 0.1507, 0.2763, + 0.1803, 0.3857, 0.1425, 0.5078, 0.7281, 0.9418, 0.7707, + 0.4629, 0.9309, 0.4756, 0.4772, 0.2111, 0.9395, 0.0639, + 0.9425, 0.6053, 0.9513, 0.9870, 0.4490, 0.6726, 0.1921, + 0.2839, 0.7617, 0.5996, 0.4018, 0.5794, 0.2518, 0.9277, + 0.5227, 0.9310, 0.3060, 0.0685, 0.8964, 0.4736, 0.4626, + 0.8602, 0.7751, 0.1957, 0.1497, 0.8046, 0.3223, 0.0455, + 0.2189, 0.5340, 0.6977, 0.5697, 0.9779, 0.7694, 0.7964, + 0.8491, 0.3587, 0.8029, 0.9030, 0.1921, 0.6244, 0.1003, + 0.2027, 0.8699, 0.7584, 0.0175, 0.5920, 0.7662, 0.0722, + 0.1810, 0.0287, 0.5420, 0.4168, 0.7065, 0.5647, 0.6839, + 0.3744, 0.4591, 0.7684, 0.3113, 0.8572, 0.8241, 0.1600, + 0.5630, 0.2794, 0.3454, 0.8615, 0.1234, 0.1812, 0.8443, + 0.2918, 0.6765, 0.4330, 0.3292, 0.0461, 0.1405, 0.9988, + 0.0261, 0.5144, 0.1359, 0.4823, 0.6763, 0.1856, 0.9746, + 0.8681, 0.7309, 0.4762, 0.4415, 0.5514, 0.6691, 0.4900, + 0.5891, 0.2569, 0.2009, 0.7134, 0.2282, 0.9253, 0.9269, + 0.5180, 0.1618, 0.0480, 0.5343, 0.5294, 0.7604, 0.1538, + 0.7869, 0.0647, 0.4825, 0.9804, 0.4967, 0.7149, 0.8185, + 0.0363, 0.8913, 0.4509, 0.1554, 0.2675, 0.6518, 0.8266, + 0.5566, 0.8177, 0.1529, 0.7239, 0.0944, 0.9104, 0.0573, + 0.1467, 0.3105, 0.9428, 0.4181, 0.4754, 0.4494, 0.7882, + 0.0034, 0.6282, 0.3561, 0.2879, 0.1464, 0.0150, 0.4355, + 0.1848, 0.2382, 0.8515, 0.2731, 0.6979, 0.0556, 0.6121, + 0.8433, 0.6670, 0.3883, 0.4796, 0.1371, 0.6999, 0.6478, + 0.8813, 0.9457, 0.7407, 0.8677, 0.5636, 0.8766, 0.7506, + 0.0525, 0.5892, 0.7325, 0.7447, 0.7405, 0.7351, 0.0851, + 0.1451, 0.1559, 0.7630, 0.0833, 0.1724, 0.7486, 0.7191, + 0.1054, 0.4615, 0.5268, 0.6621, 0.2868, 0.5435, 0.8964, + 0.0704, 0.1276, 0.7064, 0.5343, 0.8903, 0.4756, 0.9300, + 0.9542, 0.7941, 0.4972, 0.9957, 0.8595, 0.9721, 0.3406, + 0.7851, 0.3649, 0.4583, 0.0060, 0.0626, 0.3084, 0.4744, + 0.1228, 0.5638, 0.3904, 0.1386, 0.4908, 0.2794, 0.2346, + 0.6443, 0.8278, 0.6417, 0.6409, 0.1545, 0.1528, 0.7363, + 0.1309, 0.7329, 0.3350, 0.6707, 0.8794, 0.5434, 0.5644, + 0.7635, 0.8069, 0.7891, 0.6734, 0.0448, 0.2866, 0.7509, + 0.1597, 0.4210, 0.4565, 0.8551, 0.9396, 0.7435, 0.4165, + 0.7512, 0.8760, 0.4018, 0.4502, 0.6303, 0.1775, 0.0443, + 0.1220, 0.7400, 0.9430, 0.1478, 0.5522, 0.2702, 0.8373, + 0.5730, 0.0484, 0.1999, 0.9551, 0.1628, 0.6704, 0.9164, + 0.4966, 0.8054, 0.0467, 0.0346, 0.1147, 0.7503, 0.0266, + 0.8257, 0.1038, 0.9248, 0.7558, 0.8580, 0.1683, 0.7204, + 0.9922, 0.9789, 0.9933, 0.0403, 0.3990, 0.4670, 0.6462, + 0.8378, 0.2082, 0.7360, 0.0939, 0.2701, 0.3565, 0.9999, + 0.5566, 0.9290, 0.6209, 0.3059, 0.6144, 0.6708, 0.6992, + 0.2906, 0.8183, 0.9660, 0.4505, 0.0279, 0.7574, 0.5460, + 0.7627, 0.0517, 0.2257, 0.2491, 0.4919, 0.6120, 0.7816, + 0.9608, 0.8525, 0.9186, 0.4701, 0.0983, 0.1468, 0.3151, + 0.3717, 0.2113, 0.2058, 0.1118, 0.2706, 0.5644, 0.4075, + 0.7731, 0.7587, 0.3904, 0.2821, 0.9303, 0.4962, 0.6467, + 0.6844, 0.5449, 0.9140, 0.8972, 0.7251, 0.6238, 0.1303, + 0.7296, 0.4703, 0.9732, 0.5830, 0.0749, 0.5931, 0.2048, + 0.5155, 0.7131, 0.8709, 0.9993, 0.3476, 0.7649, 0.7265, + 0.1677, 0.7855, 0.2186, 0.1685, 0.5676, 0.2681, 0.3612, + 0.9039, 0.6929, 0.3072, 0.9476, 0.2889, 0.9150, 0.7729, + 0.9689, 0.6243, 0.5231, 0.0465, 0.2593, 0.9590, 0.7230, + 0.9169, 0.0847, 0.7148, 0.3234, 0.9075, 0.6295, 0.4591, + 0.2163, 0.1934, 0.3725, 0.9089, 0.2389, 0.6573, 0.3009, + 0.9653, 0.4990, 0.7123, 0.0023, 0.5403, 0.2828, 0.3275, + 0.2427, 0.3500, 0.4947, 0.6853, 0.3748, 0.2239, 0.4188, + 0.0750, 0.2666, 0.1376, 0.5770, 0.1376, 0.9675, 0.3235, + 0.5982, 0.0740, 0.1106, 0.2427, 0.2672, 0.7878, 0.4699, + 0.3982, 0.7169, 0.6082, 0.6106, 0.1808, 0.5430, 0.8106, + 0.4696, 0.7851, 0.1698, 0.6620, 0.6998, 0.6234, 0.6580, + 0.2444, 0.6227, 0.0228, 0.0864, 0.4330, 0.5036, 0.4692, + 0.1017, 0.8774, 0.7464, 0.5384, 0.0069, 0.1846, 0.1023, + 0.5358, 0.2996, 0.4989, 0.0486, 0.7129, 0.8741, 0.2190, + 0.9761, 0.5302, 0.2600, 0.7410, 0.9521, 0.1788, 0.9726, + 0.7707, 0.5558, 0.5248, 0.8730, 0.5472, 0.3472, 0.8481, + 0.6353, 0.0715, 0.6838, 0.5586, 0.9197, 0.5710, 0.1479, + 0.5670, 0.8804, 0.7407, 0.8765, 0.0049, 0.8353, 0.6717, + 0.4190, 0.0189, 0.2585, 0.4219, 0.9389, 0.1362, 0.8612, + 0.2380, 0.4342, 0.8129, 0.5854, 0.9943, 0.8793, 0.0146, + 0.1345, 0.4525, 0.7301, 0.0316, 0.2006, 0.2813, 0.4329, + 0.9613, 0.9612, 0.0010, 0.6831, 0.3559, 0.8258, 0.7998, + 0.3651, 0.1151, 0.7059, 0.2458, 0.6193, 0.5662, 0.9025, + 0.0347, 0.0273, 0.0199, 0.8630, 0.8943, 0.3912, 0.6761, + 0.1870, 0.1732, 0.6589, 0.3692, 0.3795, 0.2801, 0.4264, + 0.8876, 0.1762, 0.4668, 0.2744, 0.6106, 0.2516, 0.1695, + 0.1678, 0.0828, 0.0328, 0.2295, 0.5388, 0.3543, 0.3069, + 0.4769, 0.1639, 0.6480, 0.9969, 0.3866, 0.6850, 0.3621, + 0.1267, 0.5623, 0.5340, 0.5030, 0.8951, 0.8624, 0.2769, + 0.6143, 0.9194, 0.1853, 0.6715, 0.1981, 0.7061, 0.5316, + 0.9857, 0.9448, 0.3404, 0.0248, 0.3866, 0.3586, 0.2437, + 0.3285, 0.2791, 0.0235, 0.0833, 0.4568, 0.5069, 0.8050, + 0.2631, 0.7437, 0.6172, 0.9843, 0.9771, 0.5252, 0.3440, + 0.2227, 0.5376, 0.9911, 0.7795, 0.8997, 0.2111, 0.1383, + 0.0387, 0.6453, 0.6592, 0.0981, 0.5538, 0.5834, 0.0581, + 0.2213, 0.8697, 0.7679, 0.2972, 0.5857, 0.7069, 0.0120, + 0.0324, 0.3153, 0.5344, 0.2731, 0.1140, 0.0913, 0.4129, + 0.4382, 0.3355, 0.6986, 0.3519, 0.4530, 0.7610, 0.2125, + 0.8336, 0.4044, 0.0504, 0.3092, 0.1457, 0.1685, 0.3821, + 0.2839, 0.6180, 0.1963, 0.6740, 0.9125, 0.9278, 0.6727, + 0.6895, 0.9403, 0.3758, 0.5280, 0.0145, 0.1637, 0.8841, + 0.5551, 0.8505, 0.9492, 0.4099, 0.0962, 0.9935, 0.2521, + 0.2985, 0.2145, 0.9639, 0.1153, 0.0474, 0.1547, 0.1130, + 0.2837, 0.7623, 0.4293, 0.1800, 0.8865, 0.2586, 0.5658, + 0.0603, 0.5294, 0.1141, 0.0174, 0.1955, 0.2435, 0.1351, + 0.4222, 0.9386, 0.8158, 0.8009, 0.7604, 0.1158, 0.5536, + 0.9867, 0.7697, 0.5081, 0.6857, 0.5791, 0.0825, 0.8822, + 0.8240, 0.0334, 0.2147, 0.7827, 0.0827, 0.8504, 0.5036, + 0.3312, 0.9712, 0.5072, 0.1410, 0.0215, 0.0253, 0.9812, + 0.5366, 0.0607, 0.8368, 0.3722, 0.9136, 0.0050, 0.8974, + 0.5559, 0.0713, 0.8424, 0.2626, 0.4054, 0.4728, 0.6901, + 0.9838, 0.1978, 0.0939, 0.8336, 0.7799, 0.2500, 0.4368, + 0.8492, 0.5164, 0.0318, 0.6811, 0.5102, 0.2491, 0.0664, + 0.9313, 0.9365, 0.3728, 0.7164, 0.4811, 0.5466, 0.3570, + 0.5894, 0.6908, 0.3662, 0.7998, 0.0119, 0.4316, 0.0144, + 0.7066, 0.0462, 0.0477, 0.8795, 0.8667, 0.6024, 0.8640, + 0.4672, 0.0806, 0.7001, 0.6942, 0.0404, 0.9978, 0.0739, + 0.6024, 0.5361, 0.8549, 0.6862, 0.7250, 0.5144, 0.8139, + 0.1436, 0.2229, 0.6189, 0.0984, 0.8986, 0.0945, 0.1121, + 0.5113, 0.4692, 0.3152, 0.3764, 0.1980, 0.4425, 0.5645, + 0.3490, 0.3886, 0.9671, 0.7720, 0.3276, 0.3684, 0.5336, + 0.0645, 0.8314, 0.6636, 0.6696, 0.9162, 0.7947, 0.5378, + 0.7865, 0.7367, 0.9883, 0.3154, 0.6236, 0.0672, 0.2434, + 0.1717, 0.3264, 0.6774, 0.7196, 0.8659, 0.2077, 0.2447, + 0.8917, 0.9331, 0.3707, 0.6164, 0.6967, 0.3166, 0.4921, + 0.0895, 0.7320, 0.8118, 0.2667, 0.3472, 0.2665, 0.1704, + 0.0015, 0.5065, 0.0292, 0.4929, 0.0069, 0.6129, 0.5494, + 0.3585, 0.8091, 0.9569, 0.5142, 0.4279, 0.3040, 0.8434, + 0.0213, 0.9038, 0.9944, 0.5452, 0.2369, 0.6308, 0.9276, + 0.1183, 0.9022, 0.5016, 0.2051, 0.2183, 0.2776, 0.1731, + 0.5592, 0.8204, 0.0177, 0.4581, 0.9870, 0.6908, 0.6901, + 0.4867, 0.1852, 0.3792, 0.9527, 0.5721, 0.6103, 0.3584, + 0.6472, 0.1543, 0.1283, 0.9055, 0.9418, 0.9870, 0.1653, + 0.4023, 0.7068, 0.8858, 0.2456, 0.1185, 0.9107, 0.8551, + 0.4898, 0.0397, 0.1194, 0.2290, 0.9314, 0.5515, 0.6129, + 0.5798, 0.3663, 0.2497, 0.7485, 0.2282, 0.3971, 0.9177, + 0.0481, 0.8698, 0.9932, 0.6806, 0.7915, 0.2937, 0.4010, + 0.2781, 0.2782, 0.0500, 0.8877, 0.6301, 0.5364, 0.6984, + 0.7693, 0.4280, 0.0177, 0.4879, 0.4710, 0.2885, 0.3494, + 0.9491, 0.8752, 0.6582, 0.7385, 0.3978, 0.4569, 0.7423, + 0.1089, 0.5390, 0.0122, 0.1677, 0.3374, 0.1936, 0.5366, + 0.2533, 0.8137, 0.2902, 0.7336, 0.3328, 0.4832, 0.7008, + 0.9193, 0.4004, 0.9321, 0.8566, 0.3168, 0.5445, 0.9344, + 0.6950, 0.2151, 0.0867, 0.6660, 0.2257, 0.9129, 0.6085, + 0.0369, 0.4689, 0.6008, 0.3752, 0.1448, 0.3553, 0.3370, + 0.9473, 0.5311, 0.1604, 0.5572, 0.1066, 0.0870, 0.0032, + 0.0379, 0.9761, 0.7234, 0.5948, 0.2374, 0.4637, 0.4932, + 0.5038, 0.7351, 0.1852, 0.8321, 0.8827, 0.0651, 0.8587, + 0.0553, 0.8515, 0.5418, 0.0039, 0.7695, 0.9671]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8667, 0.0784, 0.8868, ..., 0.7715, 0.2926, 0.7773]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.412656784057617 seconds + +tensor(indices=tensor([[8486, 4726, 8446, ..., 782, 6318, 1510], + [3275, 2436, 2312, ..., 3373, 5510, 7845]]), + values=tensor([0.8773, 0.6189, 0.3397, 0.2792, 0.5532, 0.1909, 0.4547, + 0.4886, 0.3651, 0.7324, 0.6380, 0.6891, 0.1507, 0.2763, + 0.1803, 0.3857, 0.1425, 0.5078, 0.7281, 0.9418, 0.7707, + 0.4629, 0.9309, 0.4756, 0.4772, 0.2111, 0.9395, 0.0639, + 0.9425, 0.6053, 0.9513, 0.9870, 0.4490, 0.6726, 0.1921, + 0.2839, 0.7617, 0.5996, 0.4018, 0.5794, 0.2518, 0.9277, + 0.5227, 0.9310, 0.3060, 0.0685, 0.8964, 0.4736, 0.4626, + 0.8602, 0.7751, 0.1957, 0.1497, 0.8046, 0.3223, 0.0455, + 0.2189, 0.5340, 0.6977, 0.5697, 0.9779, 0.7694, 0.7964, + 0.8491, 0.3587, 0.8029, 0.9030, 0.1921, 0.6244, 0.1003, + 0.2027, 0.8699, 0.7584, 0.0175, 0.5920, 0.7662, 0.0722, + 0.1810, 0.0287, 0.5420, 0.4168, 0.7065, 0.5647, 0.6839, + 0.3744, 0.4591, 0.7684, 0.3113, 0.8572, 0.8241, 0.1600, + 0.5630, 0.2794, 0.3454, 0.8615, 0.1234, 0.1812, 0.8443, + 0.2918, 0.6765, 0.4330, 0.3292, 0.0461, 0.1405, 0.9988, + 0.0261, 0.5144, 0.1359, 0.4823, 0.6763, 0.1856, 0.9746, + 0.8681, 0.7309, 0.4762, 0.4415, 0.5514, 0.6691, 0.4900, + 0.5891, 0.2569, 0.2009, 0.7134, 0.2282, 0.9253, 0.9269, + 0.5180, 0.1618, 0.0480, 0.5343, 0.5294, 0.7604, 0.1538, + 0.7869, 0.0647, 0.4825, 0.9804, 0.4967, 0.7149, 0.8185, + 0.0363, 0.8913, 0.4509, 0.1554, 0.2675, 0.6518, 0.8266, + 0.5566, 0.8177, 0.1529, 0.7239, 0.0944, 0.9104, 0.0573, + 0.1467, 0.3105, 0.9428, 0.4181, 0.4754, 0.4494, 0.7882, + 0.0034, 0.6282, 0.3561, 0.2879, 0.1464, 0.0150, 0.4355, + 0.1848, 0.2382, 0.8515, 0.2731, 0.6979, 0.0556, 0.6121, + 0.8433, 0.6670, 0.3883, 0.4796, 0.1371, 0.6999, 0.6478, + 0.8813, 0.9457, 0.7407, 0.8677, 0.5636, 0.8766, 0.7506, + 0.0525, 0.5892, 0.7325, 0.7447, 0.7405, 0.7351, 0.0851, + 0.1451, 0.1559, 0.7630, 0.0833, 0.1724, 0.7486, 0.7191, + 0.1054, 0.4615, 0.5268, 0.6621, 0.2868, 0.5435, 0.8964, + 0.0704, 0.1276, 0.7064, 0.5343, 0.8903, 0.4756, 0.9300, + 0.9542, 0.7941, 0.4972, 0.9957, 0.8595, 0.9721, 0.3406, + 0.7851, 0.3649, 0.4583, 0.0060, 0.0626, 0.3084, 0.4744, + 0.1228, 0.5638, 0.3904, 0.1386, 0.4908, 0.2794, 0.2346, + 0.6443, 0.8278, 0.6417, 0.6409, 0.1545, 0.1528, 0.7363, + 0.1309, 0.7329, 0.3350, 0.6707, 0.8794, 0.5434, 0.5644, + 0.7635, 0.8069, 0.7891, 0.6734, 0.0448, 0.2866, 0.7509, + 0.1597, 0.4210, 0.4565, 0.8551, 0.9396, 0.7435, 0.4165, + 0.7512, 0.8760, 0.4018, 0.4502, 0.6303, 0.1775, 0.0443, + 0.1220, 0.7400, 0.9430, 0.1478, 0.5522, 0.2702, 0.8373, + 0.5730, 0.0484, 0.1999, 0.9551, 0.1628, 0.6704, 0.9164, + 0.4966, 0.8054, 0.0467, 0.0346, 0.1147, 0.7503, 0.0266, + 0.8257, 0.1038, 0.9248, 0.7558, 0.8580, 0.1683, 0.7204, + 0.9922, 0.9789, 0.9933, 0.0403, 0.3990, 0.4670, 0.6462, + 0.8378, 0.2082, 0.7360, 0.0939, 0.2701, 0.3565, 0.9999, + 0.5566, 0.9290, 0.6209, 0.3059, 0.6144, 0.6708, 0.6992, + 0.2906, 0.8183, 0.9660, 0.4505, 0.0279, 0.7574, 0.5460, + 0.7627, 0.0517, 0.2257, 0.2491, 0.4919, 0.6120, 0.7816, + 0.9608, 0.8525, 0.9186, 0.4701, 0.0983, 0.1468, 0.3151, + 0.3717, 0.2113, 0.2058, 0.1118, 0.2706, 0.5644, 0.4075, + 0.7731, 0.7587, 0.3904, 0.2821, 0.9303, 0.4962, 0.6467, + 0.6844, 0.5449, 0.9140, 0.8972, 0.7251, 0.6238, 0.1303, + 0.7296, 0.4703, 0.9732, 0.5830, 0.0749, 0.5931, 0.2048, + 0.5155, 0.7131, 0.8709, 0.9993, 0.3476, 0.7649, 0.7265, + 0.1677, 0.7855, 0.2186, 0.1685, 0.5676, 0.2681, 0.3612, + 0.9039, 0.6929, 0.3072, 0.9476, 0.2889, 0.9150, 0.7729, + 0.9689, 0.6243, 0.5231, 0.0465, 0.2593, 0.9590, 0.7230, + 0.9169, 0.0847, 0.7148, 0.3234, 0.9075, 0.6295, 0.4591, + 0.2163, 0.1934, 0.3725, 0.9089, 0.2389, 0.6573, 0.3009, + 0.9653, 0.4990, 0.7123, 0.0023, 0.5403, 0.2828, 0.3275, + 0.2427, 0.3500, 0.4947, 0.6853, 0.3748, 0.2239, 0.4188, + 0.0750, 0.2666, 0.1376, 0.5770, 0.1376, 0.9675, 0.3235, + 0.5982, 0.0740, 0.1106, 0.2427, 0.2672, 0.7878, 0.4699, + 0.3982, 0.7169, 0.6082, 0.6106, 0.1808, 0.5430, 0.8106, + 0.4696, 0.7851, 0.1698, 0.6620, 0.6998, 0.6234, 0.6580, + 0.2444, 0.6227, 0.0228, 0.0864, 0.4330, 0.5036, 0.4692, + 0.1017, 0.8774, 0.7464, 0.5384, 0.0069, 0.1846, 0.1023, + 0.5358, 0.2996, 0.4989, 0.0486, 0.7129, 0.8741, 0.2190, + 0.9761, 0.5302, 0.2600, 0.7410, 0.9521, 0.1788, 0.9726, + 0.7707, 0.5558, 0.5248, 0.8730, 0.5472, 0.3472, 0.8481, + 0.6353, 0.0715, 0.6838, 0.5586, 0.9197, 0.5710, 0.1479, + 0.5670, 0.8804, 0.7407, 0.8765, 0.0049, 0.8353, 0.6717, + 0.4190, 0.0189, 0.2585, 0.4219, 0.9389, 0.1362, 0.8612, + 0.2380, 0.4342, 0.8129, 0.5854, 0.9943, 0.8793, 0.0146, + 0.1345, 0.4525, 0.7301, 0.0316, 0.2006, 0.2813, 0.4329, + 0.9613, 0.9612, 0.0010, 0.6831, 0.3559, 0.8258, 0.7998, + 0.3651, 0.1151, 0.7059, 0.2458, 0.6193, 0.5662, 0.9025, + 0.0347, 0.0273, 0.0199, 0.8630, 0.8943, 0.3912, 0.6761, + 0.1870, 0.1732, 0.6589, 0.3692, 0.3795, 0.2801, 0.4264, + 0.8876, 0.1762, 0.4668, 0.2744, 0.6106, 0.2516, 0.1695, + 0.1678, 0.0828, 0.0328, 0.2295, 0.5388, 0.3543, 0.3069, + 0.4769, 0.1639, 0.6480, 0.9969, 0.3866, 0.6850, 0.3621, + 0.1267, 0.5623, 0.5340, 0.5030, 0.8951, 0.8624, 0.2769, + 0.6143, 0.9194, 0.1853, 0.6715, 0.1981, 0.7061, 0.5316, + 0.9857, 0.9448, 0.3404, 0.0248, 0.3866, 0.3586, 0.2437, + 0.3285, 0.2791, 0.0235, 0.0833, 0.4568, 0.5069, 0.8050, + 0.2631, 0.7437, 0.6172, 0.9843, 0.9771, 0.5252, 0.3440, + 0.2227, 0.5376, 0.9911, 0.7795, 0.8997, 0.2111, 0.1383, + 0.0387, 0.6453, 0.6592, 0.0981, 0.5538, 0.5834, 0.0581, + 0.2213, 0.8697, 0.7679, 0.2972, 0.5857, 0.7069, 0.0120, + 0.0324, 0.3153, 0.5344, 0.2731, 0.1140, 0.0913, 0.4129, + 0.4382, 0.3355, 0.6986, 0.3519, 0.4530, 0.7610, 0.2125, + 0.8336, 0.4044, 0.0504, 0.3092, 0.1457, 0.1685, 0.3821, + 0.2839, 0.6180, 0.1963, 0.6740, 0.9125, 0.9278, 0.6727, + 0.6895, 0.9403, 0.3758, 0.5280, 0.0145, 0.1637, 0.8841, + 0.5551, 0.8505, 0.9492, 0.4099, 0.0962, 0.9935, 0.2521, + 0.2985, 0.2145, 0.9639, 0.1153, 0.0474, 0.1547, 0.1130, + 0.2837, 0.7623, 0.4293, 0.1800, 0.8865, 0.2586, 0.5658, + 0.0603, 0.5294, 0.1141, 0.0174, 0.1955, 0.2435, 0.1351, + 0.4222, 0.9386, 0.8158, 0.8009, 0.7604, 0.1158, 0.5536, + 0.9867, 0.7697, 0.5081, 0.6857, 0.5791, 0.0825, 0.8822, + 0.8240, 0.0334, 0.2147, 0.7827, 0.0827, 0.8504, 0.5036, + 0.3312, 0.9712, 0.5072, 0.1410, 0.0215, 0.0253, 0.9812, + 0.5366, 0.0607, 0.8368, 0.3722, 0.9136, 0.0050, 0.8974, + 0.5559, 0.0713, 0.8424, 0.2626, 0.4054, 0.4728, 0.6901, + 0.9838, 0.1978, 0.0939, 0.8336, 0.7799, 0.2500, 0.4368, + 0.8492, 0.5164, 0.0318, 0.6811, 0.5102, 0.2491, 0.0664, + 0.9313, 0.9365, 0.3728, 0.7164, 0.4811, 0.5466, 0.3570, + 0.5894, 0.6908, 0.3662, 0.7998, 0.0119, 0.4316, 0.0144, + 0.7066, 0.0462, 0.0477, 0.8795, 0.8667, 0.6024, 0.8640, + 0.4672, 0.0806, 0.7001, 0.6942, 0.0404, 0.9978, 0.0739, + 0.6024, 0.5361, 0.8549, 0.6862, 0.7250, 0.5144, 0.8139, + 0.1436, 0.2229, 0.6189, 0.0984, 0.8986, 0.0945, 0.1121, + 0.5113, 0.4692, 0.3152, 0.3764, 0.1980, 0.4425, 0.5645, + 0.3490, 0.3886, 0.9671, 0.7720, 0.3276, 0.3684, 0.5336, + 0.0645, 0.8314, 0.6636, 0.6696, 0.9162, 0.7947, 0.5378, + 0.7865, 0.7367, 0.9883, 0.3154, 0.6236, 0.0672, 0.2434, + 0.1717, 0.3264, 0.6774, 0.7196, 0.8659, 0.2077, 0.2447, + 0.8917, 0.9331, 0.3707, 0.6164, 0.6967, 0.3166, 0.4921, + 0.0895, 0.7320, 0.8118, 0.2667, 0.3472, 0.2665, 0.1704, + 0.0015, 0.5065, 0.0292, 0.4929, 0.0069, 0.6129, 0.5494, + 0.3585, 0.8091, 0.9569, 0.5142, 0.4279, 0.3040, 0.8434, + 0.0213, 0.9038, 0.9944, 0.5452, 0.2369, 0.6308, 0.9276, + 0.1183, 0.9022, 0.5016, 0.2051, 0.2183, 0.2776, 0.1731, + 0.5592, 0.8204, 0.0177, 0.4581, 0.9870, 0.6908, 0.6901, + 0.4867, 0.1852, 0.3792, 0.9527, 0.5721, 0.6103, 0.3584, + 0.6472, 0.1543, 0.1283, 0.9055, 0.9418, 0.9870, 0.1653, + 0.4023, 0.7068, 0.8858, 0.2456, 0.1185, 0.9107, 0.8551, + 0.4898, 0.0397, 0.1194, 0.2290, 0.9314, 0.5515, 0.6129, + 0.5798, 0.3663, 0.2497, 0.7485, 0.2282, 0.3971, 0.9177, + 0.0481, 0.8698, 0.9932, 0.6806, 0.7915, 0.2937, 0.4010, + 0.2781, 0.2782, 0.0500, 0.8877, 0.6301, 0.5364, 0.6984, + 0.7693, 0.4280, 0.0177, 0.4879, 0.4710, 0.2885, 0.3494, + 0.9491, 0.8752, 0.6582, 0.7385, 0.3978, 0.4569, 0.7423, + 0.1089, 0.5390, 0.0122, 0.1677, 0.3374, 0.1936, 0.5366, + 0.2533, 0.8137, 0.2902, 0.7336, 0.3328, 0.4832, 0.7008, + 0.9193, 0.4004, 0.9321, 0.8566, 0.3168, 0.5445, 0.9344, + 0.6950, 0.2151, 0.0867, 0.6660, 0.2257, 0.9129, 0.6085, + 0.0369, 0.4689, 0.6008, 0.3752, 0.1448, 0.3553, 0.3370, + 0.9473, 0.5311, 0.1604, 0.5572, 0.1066, 0.0870, 0.0032, + 0.0379, 0.9761, 0.7234, 0.5948, 0.2374, 0.4637, 0.4932, + 0.5038, 0.7351, 0.1852, 0.8321, 0.8827, 0.0651, 0.8587, + 0.0553, 0.8515, 0.5418, 0.0039, 0.7695, 0.9671]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.8667, 0.0784, 0.8868, ..., 0.7715, 0.2926, 0.7773]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.412656784057617 seconds + +[39.7, 39.08, 38.97, 39.01, 39.06, 39.35, 44.13, 38.93, 39.19, 39.01] +[65.24] +12.960696697235107 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 312340, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.412656784057617, 'TIME_S_1KI': 0.033337570545103466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 845.5558525276183, 'W': 65.24} +[39.7, 39.08, 38.97, 39.01, 39.06, 39.35, 44.13, 38.93, 39.19, 39.01, 39.55, 38.91, 38.98, 44.43, 39.69, 39.36, 40.23, 39.33, 39.56, 39.35] +717.0149999999999 +35.85074999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 312340, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.412656784057617, 'TIME_S_1KI': 0.033337570545103466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 845.5558525276183, 'W': 65.24, 'J_1KI': 2.707164796464168, 'W_1KI': 0.2088749439713133, 'W_D': 29.389250000000004, 'J_D': 380.90515540921695, 'W_D_1KI': 0.09409377601331884, 'J_D_1KI': 0.0003012543254572544} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..b05f320 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 71900, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.501707077026367, "TIME_S_1KI": 0.14605990371385769, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 863.9445636963844, "W": 65.61, "J_1KI": 12.01591882748796, "W_1KI": 0.9125173852573019, "W_D": 30.356749999999998, "J_D": 399.73402124661203, "W_D_1KI": 0.4222079276773296, "J_D_1KI": 0.0058721547660268375} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..c7d537a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.023619651794433594} + +tensor(indices=tensor([[5417, 3845, 5670, ..., 2866, 679, 790], + [5862, 6781, 6278, ..., 7209, 7880, 7698]]), + values=tensor([0.9670, 0.6160, 0.3963, ..., 0.4186, 0.4604, 0.5048]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.1797, 0.0311, 0.3796, ..., 0.0322, 0.6483, 0.7594]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.023619651794433594 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '44454', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.491841793060303} + +tensor(indices=tensor([[2055, 9902, 7559, ..., 9995, 7741, 6782], + [8790, 7625, 1643, ..., 7631, 7179, 8248]]), + values=tensor([0.9279, 0.9675, 0.0532, ..., 0.4084, 0.0586, 0.8507]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.6339, 0.9803, 0.9804, ..., 0.5531, 0.4506, 0.0201]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.491841793060303 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '71900', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.501707077026367} + +tensor(indices=tensor([[4352, 336, 4975, ..., 1389, 7589, 4947], + [ 849, 2766, 8890, ..., 6275, 7570, 3005]]), + values=tensor([0.2566, 0.4345, 0.9026, ..., 0.8113, 0.7052, 0.1122]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.9605, 0.4727, 0.3499, ..., 0.3831, 0.1033, 0.8012]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.501707077026367 seconds + +tensor(indices=tensor([[4352, 336, 4975, ..., 1389, 7589, 4947], + [ 849, 2766, 8890, ..., 6275, 7570, 3005]]), + values=tensor([0.2566, 0.4345, 0.9026, ..., 0.8113, 0.7052, 0.1122]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.9605, 0.4727, 0.3499, ..., 0.3831, 0.1033, 0.8012]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.501707077026367 seconds + +[40.37, 39.12, 38.99, 38.96, 39.7, 38.9, 39.09, 39.33, 39.4, 38.93] +[65.61] +13.167879343032837 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 71900, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.501707077026367, 'TIME_S_1KI': 0.14605990371385769, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.9445636963844, 'W': 65.61} +[40.37, 39.12, 38.99, 38.96, 39.7, 38.9, 39.09, 39.33, 39.4, 38.93, 39.54, 40.46, 38.96, 38.79, 38.85, 38.79, 38.9, 38.76, 38.93, 39.43] +705.065 +35.25325 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 71900, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.501707077026367, 'TIME_S_1KI': 0.14605990371385769, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.9445636963844, 'W': 65.61, 'J_1KI': 12.01591882748796, 'W_1KI': 0.9125173852573019, 'W_D': 30.356749999999998, 'J_D': 399.73402124661203, 'W_D_1KI': 0.4222079276773296, 'J_D_1KI': 0.0058721547660268375} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..04a5360 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 73.01553392410278, "TIME_S_1KI": 730.1553392410278, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5544.837553710938, "W": 67.78, "J_1KI": 55448.37553710938, "W_1KI": 677.8, "W_D": 32.3575, "J_D": 2647.050474243164, "W_D_1KI": 323.575, "J_D_1KI": 3235.75} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..858c65d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 73.01553392410278} + +tensor(indices=tensor([[184352, 27273, 371245, ..., 84150, 5886, 167054], + [336823, 295745, 103391, ..., 23515, 406024, 203872]]), + values=tensor([0.8913, 0.5766, 0.9084, ..., 0.5889, 0.6165, 0.2021]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.2949, 0.5872, 0.2995, ..., 0.8158, 0.7944, 0.4000]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 73.01553392410278 seconds + +tensor(indices=tensor([[184352, 27273, 371245, ..., 84150, 5886, 167054], + [336823, 295745, 103391, ..., 23515, 406024, 203872]]), + values=tensor([0.8913, 0.5766, 0.9084, ..., 0.5889, 0.6165, 0.2021]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.2949, 0.5872, 0.2995, ..., 0.8158, 0.7944, 0.4000]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 73.01553392410278 seconds + +[39.51, 39.26, 39.54, 38.82, 40.89, 39.31, 39.68, 39.22, 39.29, 40.28] +[67.78] +81.806396484375 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 73.01553392410278, 'TIME_S_1KI': 730.1553392410278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5544.837553710938, 'W': 67.78} +[39.51, 39.26, 39.54, 38.82, 40.89, 39.31, 39.68, 39.22, 39.29, 40.28, 40.16, 38.95, 39.06, 38.95, 39.11, 39.69, 39.0, 39.08, 39.12, 39.01] +708.45 +35.4225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 73.01553392410278, 'TIME_S_1KI': 730.1553392410278, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5544.837553710938, 'W': 67.78, 'J_1KI': 55448.37553710938, 'W_1KI': 677.8, 'W_D': 32.3575, 'J_D': 2647.050474243164, 'W_D_1KI': 323.575, 'J_D_1KI': 3235.75} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..0181305 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 140, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507567644119263, "TIME_S_1KI": 75.05405460085187, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1009.9299406337738, "W": 75.32, "J_1KI": 7213.7852902412415, "W_1KI": 537.9999999999999, "W_D": 39.69199999999999, "J_D": 532.2110887365341, "W_D_1KI": 283.5142857142857, "J_D_1KI": 2025.1020408163263} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..b218011 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 7.471493721008301} + +tensor(indices=tensor([[246966, 194081, 3306, ..., 354600, 293111, 328907], + [281454, 240901, 396011, ..., 394777, 390163, 64284]]), + values=tensor([0.4514, 0.9237, 0.3210, ..., 0.7148, 0.2524, 0.6679]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8363, 0.7892, 0.5191, ..., 0.4258, 0.8581, 0.4388]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 7.471493721008301 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '140', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.507567644119263} + +tensor(indices=tensor([[ 69916, 341711, 197894, ..., 444595, 492995, 61729], + [365651, 218811, 140466, ..., 164137, 157888, 101700]]), + values=tensor([0.3653, 0.4289, 0.5269, ..., 0.9138, 0.8819, 0.8743]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8509, 0.8716, 0.0223, ..., 0.6319, 0.2958, 0.4678]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.507567644119263 seconds + +tensor(indices=tensor([[ 69916, 341711, 197894, ..., 444595, 492995, 61729], + [365651, 218811, 140466, ..., 164137, 157888, 101700]]), + values=tensor([0.3653, 0.4289, 0.5269, ..., 0.9138, 0.8819, 0.8743]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8509, 0.8716, 0.0223, ..., 0.6319, 0.2958, 0.4678]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.507567644119263 seconds + +[39.78, 39.87, 39.6, 39.92, 39.17, 39.85, 39.14, 39.23, 39.3, 39.38] +[75.32] +13.408522844314575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 140, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507567644119263, 'TIME_S_1KI': 75.05405460085187, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.9299406337738, 'W': 75.32} +[39.78, 39.87, 39.6, 39.92, 39.17, 39.85, 39.14, 39.23, 39.3, 39.38, 40.24, 40.22, 39.11, 39.49, 39.72, 39.61, 39.53, 39.58, 39.77, 39.5] +712.56 +35.628 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 140, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.507567644119263, 'TIME_S_1KI': 75.05405460085187, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1009.9299406337738, 'W': 75.32, 'J_1KI': 7213.7852902412415, 'W_1KI': 537.9999999999999, 'W_D': 39.69199999999999, 'J_D': 532.2110887365341, 'W_D_1KI': 283.5142857142857, 'J_D_1KI': 2025.1020408163263} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..a0f2bb9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.87877178192139, "TIME_S_1KI": 368.78771781921387, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2942.692539224625, "W": 69.42, "J_1KI": 29426.92539224625, "W_1KI": 694.2, "W_D": 33.81825, "J_D": 1433.5452602223158, "W_D_1KI": 338.1825, "J_D_1KI": 3381.8250000000003} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..fa2c60b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 36.87877178192139} + +tensor(indices=tensor([[ 99422, 404806, 1886, ..., 262219, 320515, 387594], + [ 51935, 241442, 308580, ..., 442401, 409914, 24314]]), + values=tensor([0.5161, 0.6889, 0.4462, ..., 0.2206, 0.5337, 0.7569]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3999, 0.9505, 0.1648, ..., 0.8326, 0.8063, 0.3115]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.87877178192139 seconds + +tensor(indices=tensor([[ 99422, 404806, 1886, ..., 262219, 320515, 387594], + [ 51935, 241442, 308580, ..., 442401, 409914, 24314]]), + values=tensor([0.5161, 0.6889, 0.4462, ..., 0.2206, 0.5337, 0.7569]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3999, 0.9505, 0.1648, ..., 0.8326, 0.8063, 0.3115]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 36.87877178192139 seconds + +[39.69, 39.19, 39.29, 39.63, 39.42, 39.62, 38.97, 38.95, 38.96, 38.94] +[69.42] +42.38969373703003 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.87877178192139, 'TIME_S_1KI': 368.78771781921387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2942.692539224625, 'W': 69.42} +[39.69, 39.19, 39.29, 39.63, 39.42, 39.62, 38.97, 38.95, 38.96, 38.94, 40.63, 39.02, 39.19, 39.7, 39.26, 38.87, 39.12, 38.87, 44.94, 38.81] +712.0350000000001 +35.60175 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 36.87877178192139, 'TIME_S_1KI': 368.78771781921387, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2942.692539224625, 'W': 69.42, 'J_1KI': 29426.92539224625, 'W_1KI': 694.2, 'W_D': 33.81825, 'J_D': 1433.5452602223158, 'W_D_1KI': 338.1825, 'J_D_1KI': 3381.8250000000003} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..86e66b2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1378, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.675403356552124, "TIME_S_1KI": 7.7470271092540814, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1044.3064742207528, "W": 79.93, "J_1KI": 757.8421438467001, "W_1KI": 58.00435413642961, "W_D": 44.167750000000005, "J_D": 577.0632713219524, "W_D_1KI": 32.052068214804066, "J_D_1KI": 23.259846309727187} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..1e1071a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.7617764472961426} + +tensor(indices=tensor([[28513, 22407, 9308, ..., 48323, 15946, 993], + [42344, 35125, 30882, ..., 17931, 44821, 41900]]), + values=tensor([0.9713, 0.5862, 0.0310, ..., 0.1906, 0.8883, 0.5226]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.1524, 0.0276, 0.2215, ..., 0.6976, 0.9198, 0.1561]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.7617764472961426 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1378', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.675403356552124} + +tensor(indices=tensor([[28179, 240, 12343, ..., 13400, 45000, 27957], + [26528, 38367, 7654, ..., 28074, 29384, 6378]]), + values=tensor([0.0289, 0.7065, 0.0752, ..., 0.1874, 0.5513, 0.0987]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7164, 0.2002, 0.6608, ..., 0.9571, 0.8635, 0.7390]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.675403356552124 seconds + +tensor(indices=tensor([[28179, 240, 12343, ..., 13400, 45000, 27957], + [26528, 38367, 7654, ..., 28074, 29384, 6378]]), + values=tensor([0.0289, 0.7065, 0.0752, ..., 0.1874, 0.5513, 0.0987]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.7164, 0.2002, 0.6608, ..., 0.9571, 0.8635, 0.7390]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.675403356552124 seconds + +[40.77, 39.04, 39.42, 39.29, 39.39, 39.63, 40.19, 39.47, 39.16, 39.16] +[79.93] +13.065263032913208 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1378, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.675403356552124, 'TIME_S_1KI': 7.7470271092540814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1044.3064742207528, 'W': 79.93} +[40.77, 39.04, 39.42, 39.29, 39.39, 39.63, 40.19, 39.47, 39.16, 39.16, 39.81, 39.0, 39.26, 39.59, 39.31, 39.04, 39.11, 44.52, 39.51, 40.89] +715.245 +35.76225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1378, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.675403356552124, 'TIME_S_1KI': 7.7470271092540814, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1044.3064742207528, 'W': 79.93, 'J_1KI': 757.8421438467001, 'W_1KI': 58.00435413642961, 'W_D': 44.167750000000005, 'J_D': 577.0632713219524, 'W_D_1KI': 32.052068214804066, 'J_D_1KI': 23.259846309727187} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..a959e5a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 146, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.559770584106445, "TIME_S_1KI": 72.327195781551, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 926.0878416824341, "W": 68.79, "J_1KI": 6343.067408783796, "W_1KI": 471.1643835616439, "W_D": 33.29075, "J_D": 448.1779156198502, "W_D_1KI": 228.01883561643837, "J_D_1KI": 1561.772846687934} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..06367fa --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.146227121353149} + +tensor(indices=tensor([[29064, 39386, 25792, ..., 5788, 2892, 31643], + [18569, 25381, 25329, ..., 15749, 11272, 25293]]), + values=tensor([0.4725, 0.0288, 0.7373, ..., 0.5640, 0.1337, 0.9726]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8453, 0.9050, 0.9227, ..., 0.5835, 0.0393, 0.6785]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 7.146227121353149 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '146', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.559770584106445} + +tensor(indices=tensor([[26807, 35182, 12934, ..., 46916, 36953, 40831], + [19094, 7947, 16914, ..., 41943, 9379, 46978]]), + values=tensor([0.9841, 0.5757, 0.0754, ..., 0.1802, 0.9499, 0.9976]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7242, 0.6400, 0.7106, ..., 0.9563, 0.9315, 0.3958]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.559770584106445 seconds + +tensor(indices=tensor([[26807, 35182, 12934, ..., 46916, 36953, 40831], + [19094, 7947, 16914, ..., 41943, 9379, 46978]]), + values=tensor([0.9841, 0.5757, 0.0754, ..., 0.1802, 0.9499, 0.9976]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7242, 0.6400, 0.7106, ..., 0.9563, 0.9315, 0.3958]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.559770584106445 seconds + +[41.69, 39.77, 39.14, 39.19, 39.05, 39.06, 39.53, 39.31, 39.21, 40.29] +[68.79] +13.462535858154297 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 146, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.559770584106445, 'TIME_S_1KI': 72.327195781551, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.0878416824341, 'W': 68.79} +[41.69, 39.77, 39.14, 39.19, 39.05, 39.06, 39.53, 39.31, 39.21, 40.29, 40.92, 39.58, 39.66, 39.41, 39.02, 39.58, 38.99, 38.95, 39.5, 39.17] +709.985 +35.49925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 146, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.559770584106445, 'TIME_S_1KI': 72.327195781551, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 926.0878416824341, 'W': 68.79, 'J_1KI': 6343.067408783796, 'W_1KI': 471.1643835616439, 'W_D': 33.29075, 'J_D': 448.1779156198502, 'W_D_1KI': 228.01883561643837, 'J_D_1KI': 1561.772846687934} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..3150087 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.66315722465515, "TIME_S_1KI": 716.6315722465515, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5322.044746816158, "W": 66.85, "J_1KI": 53220.44746816158, "W_1KI": 668.5, "W_D": 31.25099999999999, "J_D": 2487.946452995538, "W_D_1KI": 312.5099999999999, "J_D_1KI": 3125.099999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..b1dfd05 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 71.66315722465515} + +tensor(indices=tensor([[36134, 36414, 37734, ..., 15090, 14990, 31094], + [48856, 38174, 15384, ..., 30461, 28813, 7396]]), + values=tensor([0.7583, 0.4685, 0.1084, ..., 0.0798, 0.5390, 0.4794]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6193, 0.1280, 0.2137, ..., 0.7520, 0.6469, 0.4593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.66315722465515 seconds + +tensor(indices=tensor([[36134, 36414, 37734, ..., 15090, 14990, 31094], + [48856, 38174, 15384, ..., 30461, 28813, 7396]]), + values=tensor([0.7583, 0.4685, 0.1084, ..., 0.0798, 0.5390, 0.4794]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.6193, 0.1280, 0.2137, ..., 0.7520, 0.6469, 0.4593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 71.66315722465515 seconds + +[40.05, 39.33, 39.76, 39.06, 39.21, 38.98, 39.46, 39.1, 39.09, 44.52] +[66.85] +79.61173892021179 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.66315722465515, 'TIME_S_1KI': 716.6315722465515, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5322.044746816158, 'W': 66.85} +[40.05, 39.33, 39.76, 39.06, 39.21, 38.98, 39.46, 39.1, 39.09, 44.52, 42.04, 39.03, 39.31, 39.08, 39.01, 39.09, 40.88, 39.42, 39.38, 38.97] +711.98 +35.599000000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 71.66315722465515, 'TIME_S_1KI': 716.6315722465515, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5322.044746816158, 'W': 66.85, 'J_1KI': 53220.44746816158, 'W_1KI': 668.5, 'W_D': 31.25099999999999, 'J_D': 2487.946452995538, 'W_D_1KI': 312.5099999999999, 'J_D_1KI': 3125.099999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..2422969 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 355.13236808776855, "TIME_S_1KI": 3551.3236808776855, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 25656.213647043704, "W": 66.07, "J_1KI": 256562.13647043705, "W_1KI": 660.6999999999999, "W_D": 30.431499999999993, "J_D": 11817.119200847743, "W_D_1KI": 304.31499999999994, "J_D_1KI": 3043.149999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..249675e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 355.13236808776855} + +tensor(indices=tensor([[42584, 40613, 39964, ..., 9126, 6044, 39631], + [37298, 34810, 24782, ..., 15464, 6352, 6904]]), + values=tensor([0.1842, 0.4253, 0.6593, ..., 0.8566, 0.9106, 0.6422]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.3023, 0.1232, 0.7820, ..., 0.9185, 0.9340, 0.8353]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 355.13236808776855 seconds + +tensor(indices=tensor([[42584, 40613, 39964, ..., 9126, 6044, 39631], + [37298, 34810, 24782, ..., 15464, 6352, 6904]]), + values=tensor([0.1842, 0.4253, 0.6593, ..., 0.8566, 0.9106, 0.6422]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.3023, 0.1232, 0.7820, ..., 0.9185, 0.9340, 0.8353]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 355.13236808776855 seconds + +[41.0, 39.64, 39.28, 39.86, 39.27, 39.13, 39.27, 39.44, 39.22, 39.53] +[66.07] +388.31865668296814 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 355.13236808776855, 'TIME_S_1KI': 3551.3236808776855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25656.213647043704, 'W': 66.07} +[41.0, 39.64, 39.28, 39.86, 39.27, 39.13, 39.27, 39.44, 39.22, 39.53, 40.94, 39.64, 39.25, 40.05, 40.51, 39.19, 39.2, 39.74, 39.5, 39.69] +712.77 +35.6385 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 355.13236808776855, 'TIME_S_1KI': 3551.3236808776855, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25656.213647043704, 'W': 66.07, 'J_1KI': 256562.13647043705, 'W_1KI': 660.6999999999999, 'W_D': 30.431499999999993, 'J_D': 11817.119200847743, 'W_D_1KI': 304.31499999999994, 'J_D_1KI': 3043.149999999999} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..8124190 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 11948, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.746899366378784, "TIME_S_1KI": 0.8994726620671898, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1077.3100168919564, "W": 83.43, "J_1KI": 90.16655648576803, "W_1KI": 6.982758620689656, "W_D": 47.62225000000001, "J_D": 614.9338002149464, "W_D_1KI": 3.9857926012721796, "J_D_1KI": 0.33359496160630897} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..3b61f12 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0945589542388916} + +tensor(indices=tensor([[44083, 19663, 15505, ..., 31758, 19619, 9839], + [ 2392, 17751, 22906, ..., 39619, 45504, 39798]]), + values=tensor([0.1026, 0.2340, 0.1786, ..., 0.3464, 0.2068, 0.0660]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.7212, 0.3655, 0.2398, ..., 0.3437, 0.2907, 0.4484]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.0945589542388916 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '11104', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.75810718536377} + +tensor(indices=tensor([[36906, 31899, 26970, ..., 24989, 43321, 32773], + [40447, 46524, 13351, ..., 17589, 30931, 31055]]), + values=tensor([0.0154, 0.4002, 0.2951, ..., 0.0270, 0.2887, 0.7602]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.1290, 0.2354, 0.2709, ..., 0.5877, 0.8476, 0.0393]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.75810718536377 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '11948', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.746899366378784} + +tensor(indices=tensor([[18628, 23578, 10520, ..., 35719, 25844, 25696], + [20520, 11853, 28918, ..., 22777, 17498, 33547]]), + values=tensor([0.3178, 0.1450, 0.5759, ..., 0.5288, 0.3040, 0.7406]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6567, 0.4329, 0.7495, ..., 0.3210, 0.5277, 0.3178]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.746899366378784 seconds + +tensor(indices=tensor([[18628, 23578, 10520, ..., 35719, 25844, 25696], + [20520, 11853, 28918, ..., 22777, 17498, 33547]]), + values=tensor([0.3178, 0.1450, 0.5759, ..., 0.5288, 0.3040, 0.7406]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6567, 0.4329, 0.7495, ..., 0.3210, 0.5277, 0.3178]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.746899366378784 seconds + +[40.6, 39.55, 40.49, 39.26, 39.26, 39.17, 39.49, 39.29, 39.19, 39.17] +[83.43] +12.912741422653198 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 11948, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.746899366378784, 'TIME_S_1KI': 0.8994726620671898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1077.3100168919564, 'W': 83.43} +[40.6, 39.55, 40.49, 39.26, 39.26, 39.17, 39.49, 39.29, 39.19, 39.17, 39.94, 39.22, 39.4, 40.02, 39.39, 39.35, 39.43, 39.28, 44.84, 39.34] +716.155 +35.80775 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 11948, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.746899366378784, 'TIME_S_1KI': 0.8994726620671898, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1077.3100168919564, 'W': 83.43, 'J_1KI': 90.16655648576803, 'W_1KI': 6.982758620689656, 'W_D': 47.62225000000001, 'J_D': 614.9338002149464, 'W_D_1KI': 3.9857926012721796, 'J_D_1KI': 0.33359496160630897} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..aad8fe3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2814, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.459291934967041, "TIME_S_1KI": 3.716877020244151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1091.9855526709557, "W": 83.19, "J_1KI": 388.05456740261394, "W_1KI": 29.562899786780385, "W_D": 47.68025, "J_D": 625.8702265625597, "W_D_1KI": 16.943941009239516, "J_D_1KI": 6.021300998308286} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..d28b141 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.3919186592102051} + +tensor(indices=tensor([[16406, 44380, 36201, ..., 23739, 2201, 47411], + [27618, 39626, 26525, ..., 32900, 32567, 46792]]), + values=tensor([0.1028, 0.3074, 0.9393, ..., 0.6132, 0.0765, 0.7505]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.0142, 0.4843, 0.3933, ..., 0.2047, 0.5100, 0.2180]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.3919186592102051 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '2679', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.99514102935791} + +tensor(indices=tensor([[28504, 34691, 14024, ..., 10159, 9923, 5772], + [20073, 49294, 28362, ..., 16823, 33412, 35335]]), + values=tensor([0.1481, 0.7934, 0.7377, ..., 0.7808, 0.9485, 0.0316]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.6297, 0.6875, 0.0456, ..., 0.6261, 0.7729, 0.0920]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 9.99514102935791 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '2814', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.459291934967041} + +tensor(indices=tensor([[14181, 42260, 35503, ..., 9025, 42273, 3073], + [38068, 10757, 12703, ..., 33985, 31322, 24898]]), + values=tensor([0.7037, 0.1721, 0.3715, ..., 0.2420, 0.2243, 0.4783]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.2184, 0.2581, 0.7407, ..., 0.3743, 0.1951, 0.4311]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.459291934967041 seconds + +tensor(indices=tensor([[14181, 42260, 35503, ..., 9025, 42273, 3073], + [38068, 10757, 12703, ..., 33985, 31322, 24898]]), + values=tensor([0.7037, 0.1721, 0.3715, ..., 0.2420, 0.2243, 0.4783]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.2184, 0.2581, 0.7407, ..., 0.3743, 0.1951, 0.4311]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.459291934967041 seconds + +[39.71, 39.14, 39.72, 39.08, 39.57, 40.7, 39.05, 38.99, 39.47, 39.07] +[83.19] +13.126404047012329 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2814, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.459291934967041, 'TIME_S_1KI': 3.716877020244151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1091.9855526709557, 'W': 83.19} +[39.71, 39.14, 39.72, 39.08, 39.57, 40.7, 39.05, 38.99, 39.47, 39.07, 40.48, 39.4, 40.04, 39.15, 39.32, 39.07, 39.54, 39.62, 39.12, 39.17] +710.1949999999999 +35.50975 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2814, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.459291934967041, 'TIME_S_1KI': 3.716877020244151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1091.9855526709557, 'W': 83.19, 'J_1KI': 388.05456740261394, 'W_1KI': 29.562899786780385, 'W_D': 47.68025, 'J_D': 625.8702265625597, 'W_D_1KI': 16.943941009239516, 'J_D_1KI': 6.021300998308286} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..4bda082 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 139874, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.543035745620728, "TIME_S_1KI": 0.07537523589531098, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.6892381954193, "W": 65.64, "J_1KI": 6.117571801731696, "W_1KI": 0.46927949440210476, "W_D": 30.264499999999998, "J_D": 394.5308797892332, "W_D_1KI": 0.2163697327594835, "J_D_1KI": 0.00154689029240233} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..65a6260 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0167543888092041} + +tensor(indices=tensor([[1446, 336, 2529, ..., 3095, 841, 3719], + [ 300, 712, 4850, ..., 1235, 1173, 241]]), + values=tensor([0.1031, 0.7739, 0.2857, ..., 0.5084, 0.0676, 0.2962]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.1208, 0.8396, 0.2157, ..., 0.6915, 0.8692, 0.6810]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.0167543888092041 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '62670', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.704458236694336} + +tensor(indices=tensor([[ 625, 85, 1700, ..., 56, 2579, 4588], + [4280, 680, 3277, ..., 3162, 3484, 2484]]), + values=tensor([0.9415, 0.1452, 0.7666, ..., 0.6160, 0.4008, 0.5396]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.3094, 0.0633, 0.7693, ..., 0.0601, 0.7208, 0.0514]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.704458236694336 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '139874', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.543035745620728} + +tensor(indices=tensor([[ 383, 4459, 4404, ..., 2299, 2210, 4989], + [1505, 88, 4681, ..., 2363, 2811, 2507]]), + values=tensor([0.7270, 0.5661, 0.7745, ..., 0.4986, 0.6510, 0.9410]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8571, 0.3363, 0.7834, ..., 0.2844, 0.5966, 0.1946]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.543035745620728 seconds + +tensor(indices=tensor([[ 383, 4459, 4404, ..., 2299, 2210, 4989], + [1505, 88, 4681, ..., 2363, 2811, 2507]]), + values=tensor([0.7270, 0.5661, 0.7745, ..., 0.4986, 0.6510, 0.9410]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8571, 0.3363, 0.7834, ..., 0.2844, 0.5966, 0.1946]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.543035745620728 seconds + +[39.6, 38.9, 38.99, 39.22, 38.93, 39.36, 39.5, 39.44, 39.65, 39.2] +[65.64] +13.036094427108765 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 139874, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.543035745620728, 'TIME_S_1KI': 0.07537523589531098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.6892381954193, 'W': 65.64} +[39.6, 38.9, 38.99, 39.22, 38.93, 39.36, 39.5, 39.44, 39.65, 39.2, 41.04, 38.82, 39.28, 40.35, 39.03, 39.41, 38.92, 39.2, 39.18, 38.82] +707.51 +35.3755 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 139874, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.543035745620728, 'TIME_S_1KI': 0.07537523589531098, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.6892381954193, 'W': 65.64, 'J_1KI': 6.117571801731696, 'W_1KI': 0.46927949440210476, 'W_D': 30.264499999999998, 'J_D': 394.5308797892332, 'W_D_1KI': 0.2163697327594835, 'J_D_1KI': 0.00154689029240233} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..fb07638 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 14657, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.53448748588562, "TIME_S_1KI": 0.7187342215927965, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 844.0133416080474, "W": 65.47, "J_1KI": 57.584317500719614, "W_1KI": 4.466807668690728, "W_D": 29.9, "J_D": 385.45897226333614, "W_D_1KI": 2.0399808964999657, "J_D_1KI": 0.13918133973527771} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..9952092 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0806429386138916} + +tensor(indices=tensor([[4019, 2928, 4826, ..., 4630, 613, 1745], + [4301, 217, 1228, ..., 2000, 2175, 1496]]), + values=tensor([0.2626, 0.1178, 0.9634, ..., 0.6924, 0.8167, 0.1102]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5144, 0.8651, 0.1400, ..., 0.0248, 0.7520, 0.2559]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.0806429386138916 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '13020', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.326949834823608} + +tensor(indices=tensor([[3549, 2111, 1171, ..., 491, 2594, 3925], + [4998, 1055, 1247, ..., 1724, 1520, 4493]]), + values=tensor([0.0980, 0.6141, 0.4990, ..., 0.4100, 0.8779, 0.2226]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.6811, 0.8182, 0.9534, ..., 0.0529, 0.9885, 0.9893]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.326949834823608 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '14657', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.53448748588562} + +tensor(indices=tensor([[2540, 1887, 856, ..., 1690, 2411, 230], + [4411, 2048, 1595, ..., 974, 2827, 4327]]), + values=tensor([0.9347, 0.8443, 0.4315, ..., 0.2045, 0.5018, 0.6629]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8431, 0.0756, 0.0074, ..., 0.4793, 0.0902, 0.6189]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.53448748588562 seconds + +tensor(indices=tensor([[2540, 1887, 856, ..., 1690, 2411, 230], + [4411, 2048, 1595, ..., 974, 2827, 4327]]), + values=tensor([0.9347, 0.8443, 0.4315, ..., 0.2045, 0.5018, 0.6629]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.8431, 0.0756, 0.0074, ..., 0.4793, 0.0902, 0.6189]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.53448748588562 seconds + +[44.96, 38.91, 40.66, 39.15, 39.03, 39.44, 38.88, 39.55, 39.51, 39.03] +[65.47] +12.89160442352295 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 14657, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.53448748588562, 'TIME_S_1KI': 0.7187342215927965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.0133416080474, 'W': 65.47} +[44.96, 38.91, 40.66, 39.15, 39.03, 39.44, 38.88, 39.55, 39.51, 39.03, 40.42, 39.23, 39.53, 38.93, 41.13, 38.97, 38.98, 38.86, 38.94, 38.99] +711.4 +35.57 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 14657, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.53448748588562, 'TIME_S_1KI': 0.7187342215927965, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 844.0133416080474, 'W': 65.47, 'J_1KI': 57.584317500719614, 'W_1KI': 4.466807668690728, 'W_D': 29.9, 'J_D': 385.45897226333614, 'W_D_1KI': 2.0399808964999657, 'J_D_1KI': 0.13918133973527771} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..5df8c9f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1470, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.361714363098145, "TIME_S_1KI": 7.048785281019145, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.7562479400634, "W": 66.08, "J_1KI": 582.14710744222, "W_1KI": 44.95238095238095, "W_D": 30.643749999999997, "J_D": 396.8459522217512, "W_D_1KI": 20.84608843537415, "J_D_1KI": 14.18101254107085} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..9700510 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.713935136795044} + +tensor(indices=tensor([[1691, 483, 1761, ..., 1827, 3310, 60], + [2575, 2582, 106, ..., 1476, 3658, 90]]), + values=tensor([0.1400, 0.5567, 0.1430, ..., 0.5134, 0.4001, 0.0611]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3034, 0.4308, 0.4118, ..., 0.6201, 0.7940, 0.1251]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.713935136795044 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '1470', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.361714363098145} + +tensor(indices=tensor([[1388, 308, 380, ..., 2498, 1363, 1543], + [2272, 3594, 758, ..., 2606, 4082, 2052]]), + values=tensor([0.9402, 0.9581, 0.0717, ..., 0.5254, 0.4871, 0.9034]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2765, 0.3265, 0.7214, ..., 0.7116, 0.9888, 0.9239]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.361714363098145 seconds + +tensor(indices=tensor([[1388, 308, 380, ..., 2498, 1363, 1543], + [2272, 3594, 758, ..., 2606, 4082, 2052]]), + values=tensor([0.9402, 0.9581, 0.0717, ..., 0.5254, 0.4871, 0.9034]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.2765, 0.3265, 0.7214, ..., 0.7116, 0.9888, 0.9239]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.361714363098145 seconds + +[40.27, 39.35, 39.31, 39.09, 38.86, 39.49, 39.71, 38.84, 39.33, 39.23] +[66.08] +12.950306415557861 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.361714363098145, 'TIME_S_1KI': 7.048785281019145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.7562479400634, 'W': 66.08} +[40.27, 39.35, 39.31, 39.09, 38.86, 39.49, 39.71, 38.84, 39.33, 39.23, 40.66, 39.5, 39.24, 39.82, 39.8, 38.87, 40.16, 38.79, 38.9, 39.17] +708.725 +35.43625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.361714363098145, 'TIME_S_1KI': 7.048785281019145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.7562479400634, 'W': 66.08, 'J_1KI': 582.14710744222, 'W_1KI': 44.95238095238095, 'W_D': 30.643749999999997, 'J_D': 396.8459522217512, 'W_D_1KI': 20.84608843537415, 'J_D_1KI': 14.18101254107085} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..47fdc5c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 295, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.507515907287598, "TIME_S_1KI": 35.618697990805416, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.6737474942207, "W": 65.77, "J_1KI": 2903.978805065155, "W_1KI": 222.94915254237287, "W_D": 29.91149999999999, "J_D": 389.6061547540425, "W_D_1KI": 101.39491525423726, "J_D_1KI": 343.71157713300767} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..169ce51 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.5592312812805176} + +tensor(indices=tensor([[2672, 4919, 3934, ..., 2971, 1240, 827], + [1050, 663, 3695, ..., 849, 396, 4400]]), + values=tensor([0.0645, 0.4164, 0.3742, ..., 0.3809, 0.9538, 0.1445]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.6349, 0.0986, 0.9429, ..., 0.2413, 0.4297, 0.2189]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 3.5592312812805176 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '295', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.507515907287598} + +tensor(indices=tensor([[ 880, 4456, 4085, ..., 4653, 278, 1316], + [3814, 153, 2565, ..., 4431, 2535, 4555]]), + values=tensor([0.5894, 0.3194, 0.4005, ..., 0.3183, 0.9287, 0.0316]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1877, 0.6295, 0.4286, ..., 0.6650, 0.8896, 0.7370]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.507515907287598 seconds + +tensor(indices=tensor([[ 880, 4456, 4085, ..., 4653, 278, 1316], + [3814, 153, 2565, ..., 4431, 2535, 4555]]), + values=tensor([0.5894, 0.3194, 0.4005, ..., 0.3183, 0.9287, 0.0316]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.1877, 0.6295, 0.4286, ..., 0.6650, 0.8896, 0.7370]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.507515907287598 seconds + +[40.64, 39.08, 38.93, 38.91, 38.9, 38.88, 38.9, 38.77, 44.86, 38.97] +[65.77] +13.025296449661255 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.507515907287598, 'TIME_S_1KI': 35.618697990805416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.6737474942207, 'W': 65.77} +[40.64, 39.08, 38.93, 38.91, 38.9, 38.88, 38.9, 38.77, 44.86, 38.97, 40.85, 39.25, 39.03, 38.89, 39.11, 44.09, 39.37, 40.9, 39.68, 38.78] +717.1700000000001 +35.85850000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.507515907287598, 'TIME_S_1KI': 35.618697990805416, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.6737474942207, 'W': 65.77, 'J_1KI': 2903.978805065155, 'W_1KI': 222.94915254237287, 'W_D': 29.91149999999999, 'J_D': 389.6061547540425, 'W_D_1KI': 101.39491525423726, 'J_D_1KI': 343.71157713300767} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..9cb92be --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 148, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.459958553314209, "TIME_S_1KI": 70.67539563050141, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 892.777959036827, "W": 66.3, "J_1KI": 6032.283507005588, "W_1KI": 447.97297297297297, "W_D": 30.933999999999997, "J_D": 416.54891983175276, "W_D_1KI": 209.0135135135135, "J_D_1KI": 1412.253469685902} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..b15a1ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 7.0644145011901855} + +tensor(indices=tensor([[ 268, 1491, 2866, ..., 873, 3989, 3717], + [ 893, 4059, 142, ..., 4391, 1294, 925]]), + values=tensor([0.6272, 0.8766, 0.3100, ..., 0.6736, 0.3160, 0.2463]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.6334, 0.6522, 0.8887, ..., 0.0300, 0.5655, 0.9062]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 7.0644145011901855 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '148', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.459958553314209} + +tensor(indices=tensor([[3694, 1974, 2141, ..., 587, 944, 3649], + [4134, 4356, 3640, ..., 4487, 4701, 1491]]), + values=tensor([0.3600, 0.7079, 0.6292, ..., 0.1252, 0.8073, 0.0459]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7022, 0.3170, 0.9828, ..., 0.6866, 0.2729, 0.9453]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.459958553314209 seconds + +tensor(indices=tensor([[3694, 1974, 2141, ..., 587, 944, 3649], + [4134, 4356, 3640, ..., 4487, 4701, 1491]]), + values=tensor([0.3600, 0.7079, 0.6292, ..., 0.1252, 0.8073, 0.0459]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7022, 0.3170, 0.9828, ..., 0.6866, 0.2729, 0.9453]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.459958553314209 seconds + +[39.74, 39.46, 39.47, 39.5, 39.08, 39.36, 38.89, 40.18, 39.04, 39.11] +[66.3] +13.465730905532837 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.459958553314209, 'TIME_S_1KI': 70.67539563050141, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 892.777959036827, 'W': 66.3} +[39.74, 39.46, 39.47, 39.5, 39.08, 39.36, 38.89, 40.18, 39.04, 39.11, 39.98, 39.41, 38.95, 39.41, 39.81, 39.02, 38.88, 38.88, 39.0, 39.13] +707.32 +35.366 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 148, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.459958553314209, 'TIME_S_1KI': 70.67539563050141, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 892.777959036827, 'W': 66.3, 'J_1KI': 6032.283507005588, 'W_1KI': 447.97297297297297, 'W_D': 30.933999999999997, 'J_D': 416.54891983175276, 'W_D_1KI': 209.0135135135135, 'J_D_1KI': 1412.253469685902} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..4b4104f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.073740243911743, "TIME_S_1KI": 140.73740243911743, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1156.7227613759042, "W": 66.29, "J_1KI": 11567.227613759042, "W_1KI": 662.9000000000001, "W_D": 30.898500000000006, "J_D": 539.1612346111536, "W_D_1KI": 308.98500000000007, "J_D_1KI": 3089.850000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..1a861e5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 14.073740243911743} + +tensor(indices=tensor([[ 284, 4235, 4780, ..., 2444, 1199, 171], + [4136, 2203, 2823, ..., 1842, 3886, 667]]), + values=tensor([0.2132, 0.7686, 0.4029, ..., 0.5659, 0.2948, 0.4967]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9542, 0.3576, 0.8822, ..., 0.2904, 0.8109, 0.1258]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.073740243911743 seconds + +tensor(indices=tensor([[ 284, 4235, 4780, ..., 2444, 1199, 171], + [4136, 2203, 2823, ..., 1842, 3886, 667]]), + values=tensor([0.2132, 0.7686, 0.4029, ..., 0.5659, 0.2948, 0.4967]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9542, 0.3576, 0.8822, ..., 0.2904, 0.8109, 0.1258]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 14.073740243911743 seconds + +[39.47, 39.29, 38.99, 39.2, 38.93, 38.78, 39.68, 38.97, 38.84, 39.09] +[66.29] +17.44943070411682 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.073740243911743, 'TIME_S_1KI': 140.73740243911743, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1156.7227613759042, 'W': 66.29} +[39.47, 39.29, 38.99, 39.2, 38.93, 38.78, 39.68, 38.97, 38.84, 39.09, 39.87, 38.76, 38.77, 44.23, 39.39, 38.95, 38.87, 38.72, 38.89, 38.71] +707.83 +35.3915 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 14.073740243911743, 'TIME_S_1KI': 140.73740243911743, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1156.7227613759042, 'W': 66.29, 'J_1KI': 11567.227613759042, 'W_1KI': 662.9000000000001, 'W_D': 30.898500000000006, 'J_D': 539.1612346111536, 'W_D_1KI': 308.98500000000007, 'J_D_1KI': 3089.850000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..9d14780 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.17181086540222, "TIME_S_1KI": 211.71810865402222, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1663.21891358614, "W": 65.79, "J_1KI": 16632.189135861398, "W_1KI": 657.9000000000001, "W_D": 30.19550000000001, "J_D": 763.3641390057804, "W_D_1KI": 301.9550000000001, "J_D_1KI": 3019.550000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..e08568e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 21.17181086540222} + +tensor(indices=tensor([[3439, 2956, 3539, ..., 1872, 4021, 1645], + [3326, 4605, 3433, ..., 2434, 1491, 4572]]), + values=tensor([0.0470, 0.1627, 0.0525, ..., 0.2916, 0.6395, 0.6868]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.0367, 0.4831, 0.0964, ..., 0.6700, 0.6537, 0.8356]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.17181086540222 seconds + +tensor(indices=tensor([[3439, 2956, 3539, ..., 1872, 4021, 1645], + [3326, 4605, 3433, ..., 2434, 1491, 4572]]), + values=tensor([0.0470, 0.1627, 0.0525, ..., 0.2916, 0.6395, 0.6868]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.0367, 0.4831, 0.0964, ..., 0.6700, 0.6537, 0.8356]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 21.17181086540222 seconds + +[39.68, 39.75, 38.83, 39.0, 39.73, 38.95, 38.78, 38.82, 38.97, 38.73] +[65.79] +25.280725240707397 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.17181086540222, 'TIME_S_1KI': 211.71810865402222, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1663.21891358614, 'W': 65.79} +[39.68, 39.75, 38.83, 39.0, 39.73, 38.95, 38.78, 38.82, 38.97, 38.73, 40.23, 39.46, 39.32, 45.1, 39.23, 40.18, 39.03, 38.9, 38.97, 39.1] +711.89 +35.5945 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 21.17181086540222, 'TIME_S_1KI': 211.71810865402222, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1663.21891358614, 'W': 65.79, 'J_1KI': 16632.189135861398, 'W_1KI': 657.9000000000001, 'W_D': 30.19550000000001, 'J_D': 763.3641390057804, 'W_D_1KI': 301.9550000000001, 'J_D_1KI': 3019.550000000001} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..d9eb99d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.186740159988403, "TIME_S_1KI": 281.86740159988403, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2142.953163208961, "W": 66.07, "J_1KI": 21429.53163208961, "W_1KI": 660.6999999999999, "W_D": 30.907999999999994, "J_D": 1002.4882150516509, "W_D_1KI": 309.08, "J_D_1KI": 3090.7999999999997} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..57dd3f2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 28.186740159988403} + +tensor(indices=tensor([[ 634, 3633, 4145, ..., 3076, 3720, 2647], + [ 785, 2304, 3033, ..., 3530, 1632, 680]]), + values=tensor([0.6164, 0.0229, 0.5841, ..., 0.8039, 0.5429, 0.4047]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0640, 0.0060, 0.5177, ..., 0.9071, 0.2872, 0.2326]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.186740159988403 seconds + +tensor(indices=tensor([[ 634, 3633, 4145, ..., 3076, 3720, 2647], + [ 785, 2304, 3033, ..., 3530, 1632, 680]]), + values=tensor([0.6164, 0.0229, 0.5841, ..., 0.8039, 0.5429, 0.4047]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.0640, 0.0060, 0.5177, ..., 0.9071, 0.2872, 0.2326]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 28.186740159988403 seconds + +[39.54, 39.05, 38.95, 39.06, 38.9, 39.01, 39.39, 38.88, 38.92, 38.93] +[66.07] +32.43458700180054 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.186740159988403, 'TIME_S_1KI': 281.86740159988403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2142.953163208961, 'W': 66.07} +[39.54, 39.05, 38.95, 39.06, 38.9, 39.01, 39.39, 38.88, 38.92, 38.93, 39.72, 39.11, 39.13, 39.64, 39.09, 39.05, 38.9, 38.75, 38.87, 38.89] +703.24 +35.162 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 28.186740159988403, 'TIME_S_1KI': 281.86740159988403, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2142.953163208961, 'W': 66.07, 'J_1KI': 21429.53163208961, 'W_1KI': 660.6999999999999, 'W_D': 30.907999999999994, 'J_D': 1002.4882150516509, 'W_D_1KI': 309.08, 'J_D_1KI': 3090.7999999999997} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..dec2fd9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.183215856552124, "TIME_S_1KI": 351.83215856552124, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2652.7753567814825, "W": 65.85, "J_1KI": 26527.753567814827, "W_1KI": 658.5, "W_D": 30.555749999999996, "J_D": 1230.942150462806, "W_D_1KI": 305.5575, "J_D_1KI": 3055.5750000000003} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..0976adc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 35.183215856552124} + +tensor(indices=tensor([[ 751, 4408, 3127, ..., 4152, 3834, 3001], + [ 478, 2081, 567, ..., 3148, 1935, 2978]]), + values=tensor([0.8140, 0.5278, 0.2593, ..., 0.8892, 0.4390, 0.9114]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.9021, 0.8660, 0.6641, ..., 0.3317, 0.9796, 0.4533]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.183215856552124 seconds + +tensor(indices=tensor([[ 751, 4408, 3127, ..., 4152, 3834, 3001], + [ 478, 2081, 567, ..., 3148, 1935, 2978]]), + values=tensor([0.8140, 0.5278, 0.2593, ..., 0.8892, 0.4390, 0.9114]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.9021, 0.8660, 0.6641, ..., 0.3317, 0.9796, 0.4533]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 35.183215856552124 seconds + +[39.65, 39.3, 39.02, 39.6, 38.85, 38.88, 38.86, 38.77, 38.89, 38.87] +[65.85] +40.285123109817505 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.183215856552124, 'TIME_S_1KI': 351.83215856552124, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2652.7753567814825, 'W': 65.85} +[39.65, 39.3, 39.02, 39.6, 38.85, 38.88, 38.86, 38.77, 38.89, 38.87, 39.85, 39.43, 39.21, 40.45, 39.58, 38.96, 38.98, 38.92, 39.31, 39.38] +705.885 +35.29425 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 35.183215856552124, 'TIME_S_1KI': 351.83215856552124, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2652.7753567814825, 'W': 65.85, 'J_1KI': 26527.753567814827, 'W_1KI': 658.5, 'W_D': 30.555749999999996, 'J_D': 1230.942150462806, 'W_D_1KI': 305.5575, 'J_D_1KI': 3055.5750000000003} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..a4695e6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 914292, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.542762517929077, "TIME_S_1KI": 0.011531067227897736, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 848.832134795189, "W": 65.62, "J_1KI": 0.9284037646563559, "W_1KI": 0.0717713815717517, "W_D": 30.134749999999997, "J_D": 389.8101824751496, "W_D_1KI": 0.03295965621486352, "J_D_1KI": 3.604937614554598e-05} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..8d321bc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,429 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.009185791015625} + +tensor(indices=tensor([[3437, 67, 2520, 2998, 1590, 1679, 720, 1477, 1076, + 1584, 1814, 2934, 3107, 2541, 1020, 630, 538, 4039, + 1532, 1123, 1619, 545, 3326, 633, 1349, 2935, 2957, + 916, 2261, 4895, 2521, 1802, 2988, 82, 3798, 4464, + 2208, 3895, 977, 4940, 729, 4546, 4937, 4309, 3388, + 4404, 2107, 76, 1460, 1789, 4415, 1150, 4604, 4561, + 2653, 3226, 3350, 3116, 1267, 2188, 2420, 3682, 4974, + 2259, 195, 3092, 39, 1401, 114, 2797, 405, 3071, + 3904, 519, 1927, 2180, 4316, 3215, 974, 3050, 3887, + 2248, 779, 1995, 4307, 2639, 4635, 323, 843, 4087, + 3259, 1376, 23, 4133, 734, 4940, 162, 1656, 947, + 3375, 3687, 4432, 1131, 831, 3923, 3033, 3999, 3444, + 2332, 949, 1392, 2186, 4424, 46, 4260, 119, 3031, + 4037, 1936, 2082, 2028, 2282, 2853, 2954, 1032, 4880, + 81, 4323, 2318, 2572, 3011, 2453, 2336, 2692, 2118, + 890, 2101, 3951, 346, 2731, 4647, 3129, 4526, 1856, + 1216, 3945, 1554, 4538, 1134, 1694, 4130, 2650, 3759, + 3769, 4728, 4880, 3790, 2066, 4179, 4395, 2927, 3340, + 1528, 2262, 896, 4280, 281, 3669, 654, 4981, 3498, + 2778, 94, 4912, 3913, 1978, 3722, 2181, 2875, 4637, + 3014, 2088, 1099, 3189, 607, 614, 592, 370, 1445, + 74, 2849, 1856, 1710, 118, 3681, 3254, 178, 1648, + 1436, 1576, 4614, 3240, 1544, 4472, 609, 3580, 3606, + 4947, 926, 903, 855, 671, 621, 4520, 1936, 1496, + 4842, 666, 3845, 4729, 4308, 1227, 1782, 2211, 3976, + 3634, 3351, 1053, 3292, 4471, 1765, 57, 1891, 3483, + 399, 4424, 3694, 467, 4583, 2012, 3234, 116, 1649, + 1329, 4051, 3064, 3205, 999, 1898, 4232], + [ 626, 3536, 4145, 3578, 1856, 4116, 3141, 3759, 4834, + 300, 591, 4693, 4552, 1653, 4106, 4651, 75, 4137, + 1031, 342, 4351, 520, 4533, 1924, 3690, 836, 2042, + 467, 3178, 3495, 1709, 234, 1680, 11, 897, 4678, + 4491, 1409, 1825, 1305, 3133, 368, 792, 874, 1099, + 603, 3466, 1572, 1552, 1979, 281, 4627, 2996, 4906, + 2065, 4096, 4915, 4761, 2735, 3220, 4909, 1605, 880, + 4557, 1527, 3623, 4974, 3290, 2548, 2991, 3127, 4906, + 2856, 2706, 750, 2879, 3519, 2239, 4357, 2566, 97, + 1697, 810, 4311, 3123, 157, 2166, 278, 4194, 2621, + 3202, 3283, 4227, 1600, 3827, 2532, 2929, 3159, 4944, + 3412, 1506, 3578, 3453, 3336, 4231, 100, 2559, 3349, + 1303, 4350, 433, 4217, 4192, 3335, 1558, 4175, 4873, + 306, 4761, 4234, 2853, 3752, 4527, 872, 4010, 3224, + 4003, 3268, 248, 1244, 2057, 4274, 2008, 3624, 129, + 1756, 1730, 2228, 4154, 4934, 1590, 4934, 4541, 3674, + 4549, 62, 2418, 548, 1593, 769, 476, 3649, 2918, + 421, 2308, 4806, 3837, 2031, 562, 2683, 3560, 4268, + 1834, 253, 4191, 3006, 4006, 4014, 3804, 3297, 714, + 2126, 1816, 2400, 3819, 3593, 4102, 968, 3340, 4958, + 3255, 3686, 2483, 377, 4484, 4025, 1205, 1912, 2227, + 3961, 3969, 814, 4816, 3682, 173, 2352, 2293, 1614, + 4996, 1725, 4839, 3586, 1052, 1012, 3911, 221, 4179, + 3302, 2937, 2137, 2179, 728, 253, 1950, 3136, 2640, + 1713, 4039, 215, 1530, 3990, 2339, 2409, 3917, 2938, + 1928, 1502, 2778, 1999, 191, 2237, 763, 3795, 1064, + 895, 4922, 1062, 4601, 4263, 3149, 3090, 4549, 2537, + 774, 2012, 4987, 4000, 4936, 4433, 3356]]), + values=tensor([0.4790, 0.5594, 0.8624, 0.7817, 0.9280, 0.9119, 0.8258, + 0.1121, 0.4519, 0.3824, 0.9548, 0.3008, 0.3626, 0.7845, + 0.0232, 0.8736, 0.8330, 0.5979, 0.7295, 0.8590, 0.2617, + 0.6932, 0.7587, 0.3040, 0.6265, 0.3639, 0.2884, 0.1448, + 0.7988, 0.8969, 0.2431, 0.0030, 0.9668, 0.6510, 0.3668, + 0.0169, 0.9163, 0.8117, 0.7982, 0.6497, 0.7809, 0.1506, + 0.7650, 0.5185, 0.5162, 0.6893, 0.9209, 0.3850, 0.6624, + 0.8496, 0.4202, 0.9480, 0.5121, 0.7044, 0.1776, 0.9420, + 0.1261, 0.6348, 0.5557, 0.2954, 0.2355, 0.6152, 0.5042, + 0.0439, 0.7777, 0.6916, 0.4291, 0.7181, 0.8759, 0.7161, + 0.0413, 0.9033, 0.7995, 0.0616, 0.7459, 0.3076, 0.6247, + 0.9109, 0.6372, 0.9044, 0.4993, 0.8342, 0.1309, 0.2963, + 0.7562, 0.6853, 0.8542, 0.1498, 0.0309, 0.7298, 0.7052, + 0.7384, 0.4484, 0.8864, 0.3081, 0.2477, 0.6522, 0.1601, + 0.2753, 0.1065, 0.8532, 0.7688, 0.8170, 0.5612, 0.7453, + 0.9080, 0.2651, 0.1739, 0.4176, 0.6589, 0.0077, 0.2448, + 0.0478, 0.6153, 0.1247, 0.5504, 0.4505, 0.8293, 0.3921, + 0.1073, 0.9424, 0.8800, 0.2285, 0.4855, 0.1070, 0.8109, + 0.5396, 0.5594, 0.6482, 0.3579, 0.1624, 0.0287, 0.6771, + 0.4460, 0.2854, 0.1009, 0.4933, 0.1936, 0.2157, 0.9925, + 0.0597, 0.6440, 0.9615, 0.7006, 0.6220, 0.5556, 0.8114, + 0.0486, 0.6124, 0.9462, 0.0804, 0.7132, 0.5539, 0.2129, + 0.6645, 0.4129, 0.4763, 0.7049, 0.4705, 0.3509, 0.9212, + 0.8346, 0.9177, 0.0573, 0.6091, 0.1336, 0.8434, 0.7360, + 0.8480, 0.7023, 0.5048, 0.8230, 0.2187, 0.0692, 0.4695, + 0.8685, 0.9518, 0.7383, 0.5503, 0.5588, 0.9202, 0.1703, + 0.4474, 0.4430, 0.1864, 0.7538, 0.2506, 0.6083, 0.3930, + 0.4066, 0.6124, 0.3294, 0.5319, 0.9026, 0.1904, 0.1274, + 0.3332, 0.2326, 0.6253, 0.5757, 0.6877, 0.8861, 0.2366, + 0.0111, 0.6808, 0.3935, 0.4335, 0.5535, 0.9245, 0.4129, + 0.3722, 0.3991, 0.4494, 0.3078, 0.9552, 0.0160, 0.7350, + 0.8739, 0.3969, 0.5562, 0.7971, 0.1370, 0.0842, 0.8502, + 0.4089, 0.6161, 0.8557, 0.0168, 0.0304, 0.7853, 0.5745, + 0.0807, 0.7872, 0.0583, 0.1144, 0.4095, 0.8149, 0.3911, + 0.1477, 0.7150, 0.0831, 0.3108, 0.4178, 0.1897, 0.7335, + 0.4986, 0.4545, 0.9927, 0.5508, 0.2496]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.0506, 0.3122, 0.7838, ..., 0.9680, 0.2671, 0.0532]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.009185791015625 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '114306', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.3127236366271973} + +tensor(indices=tensor([[4326, 2417, 2672, 502, 3383, 2433, 289, 563, 3391, + 3319, 937, 400, 3576, 2237, 4364, 4823, 1052, 624, + 3569, 3421, 3185, 1890, 1102, 4130, 2582, 2459, 782, + 234, 2099, 438, 4641, 1092, 4461, 4056, 835, 988, + 3193, 4205, 4263, 126, 2663, 1914, 1209, 3452, 1475, + 4108, 2324, 1426, 2721, 2893, 686, 1600, 3263, 4977, + 4162, 1404, 4674, 531, 4627, 2927, 2987, 3595, 1132, + 444, 3572, 786, 4483, 4042, 3823, 3464, 4385, 4137, + 2938, 1854, 3275, 2006, 2852, 792, 2090, 3115, 2597, + 1757, 432, 4047, 4400, 284, 4426, 64, 4775, 4661, + 3698, 1923, 938, 4532, 2215, 2101, 2258, 1187, 785, + 3801, 3947, 1278, 2591, 3011, 2398, 3066, 2899, 1891, + 1966, 4557, 3689, 1491, 2517, 1118, 1384, 1815, 2055, + 4554, 3015, 3357, 4980, 2346, 1109, 1706, 4591, 4604, + 1827, 1687, 192, 3054, 4450, 2618, 3432, 865, 474, + 1959, 1507, 2572, 2898, 2770, 2722, 3681, 2647, 3945, + 3751, 4692, 509, 4370, 591, 901, 1510, 3302, 4814, + 443, 203, 2029, 2561, 3348, 2420, 3721, 4690, 1736, + 1542, 1971, 2986, 2472, 1010, 525, 1693, 790, 4168, + 1513, 3510, 1033, 2512, 3513, 611, 195, 1619, 3948, + 1758, 2077, 2438, 40, 2176, 4010, 1542, 1748, 4036, + 222, 4923, 1047, 434, 1760, 611, 455, 2803, 4429, + 2452, 253, 1390, 3192, 2387, 3482, 3879, 1778, 2415, + 2421, 1912, 3691, 3465, 2135, 3733, 3434, 1586, 3760, + 4843, 857, 1924, 4385, 2913, 1760, 3843, 501, 4859, + 1198, 4713, 532, 4063, 2575, 4397, 4626, 4668, 742, + 3238, 917, 4712, 1848, 3172, 1005, 1748, 4065, 4872, + 2679, 1023, 439, 3125, 3182, 4110, 3494], + [1719, 3967, 1431, 4611, 673, 1803, 2979, 3766, 3274, + 3816, 1533, 4104, 1117, 3919, 2194, 1971, 4467, 1600, + 4555, 1038, 3458, 296, 1220, 4674, 1371, 922, 3724, + 2995, 770, 3709, 2077, 1453, 4154, 1394, 606, 2520, + 4831, 1789, 2477, 3556, 3011, 3714, 1345, 4821, 4181, + 628, 4359, 301, 758, 796, 1412, 1940, 3720, 3664, + 3746, 4584, 1565, 2955, 3507, 4846, 4029, 1241, 798, + 3717, 3602, 4701, 3317, 4655, 4919, 4250, 2641, 3207, + 2064, 1804, 476, 3922, 3562, 4015, 1098, 1148, 4678, + 1725, 4905, 4710, 2595, 2365, 2902, 3837, 1361, 1139, + 2821, 4765, 2904, 3403, 2777, 3451, 3044, 4841, 3648, + 1382, 570, 4264, 816, 3143, 2244, 4716, 4188, 3409, + 2134, 824, 4810, 2652, 3403, 4352, 1985, 4485, 1370, + 4616, 1254, 357, 1060, 512, 632, 1513, 192, 3888, + 4036, 3812, 4843, 3916, 2487, 2785, 2358, 2774, 2044, + 4684, 3549, 951, 2040, 2947, 3947, 668, 3312, 2110, + 3822, 1403, 2295, 4962, 1341, 3200, 3262, 4310, 3513, + 1820, 1494, 4368, 4906, 3644, 3756, 675, 1814, 4954, + 3007, 4294, 4949, 583, 441, 1951, 3512, 4213, 742, + 3729, 4180, 3276, 2336, 213, 1029, 3603, 2175, 1794, + 658, 1545, 3959, 1513, 4692, 840, 4891, 590, 2193, + 785, 4304, 2729, 2121, 2345, 2899, 588, 1610, 4633, + 636, 4801, 2999, 3553, 3119, 564, 1479, 3369, 546, + 1851, 2942, 1203, 3393, 4998, 1492, 2909, 4182, 2410, + 1917, 780, 3617, 3662, 4064, 2309, 298, 2763, 2446, + 3689, 1108, 1785, 1703, 3983, 2851, 220, 1082, 876, + 207, 3081, 1139, 94, 103, 4057, 3961, 2148, 312, + 4315, 1516, 3885, 3016, 3339, 2473, 2883]]), + values=tensor([0.8245, 0.6209, 0.6568, 0.2698, 0.7799, 0.5805, 0.7373, + 0.9934, 0.4098, 0.0633, 0.9197, 0.0607, 0.5828, 0.2517, + 0.0114, 0.4488, 0.1456, 0.8193, 0.4497, 0.1041, 0.0290, + 0.7164, 0.5905, 0.6910, 0.4248, 0.4371, 0.4699, 0.6124, + 0.1282, 0.0990, 0.0992, 0.3489, 0.5793, 0.4568, 0.7491, + 0.0167, 0.7550, 0.1416, 0.2202, 0.1180, 0.7966, 0.4574, + 0.5368, 0.3374, 0.4827, 0.6276, 0.9372, 0.1626, 0.2831, + 0.8860, 0.9484, 0.5694, 0.3351, 0.2877, 0.6202, 0.2328, + 0.9835, 0.8610, 0.6766, 0.0737, 0.9924, 0.5879, 0.4485, + 0.6267, 0.6144, 0.7329, 0.9045, 0.4680, 0.6755, 0.7423, + 0.3442, 0.0321, 0.8500, 0.9648, 0.4490, 0.7379, 0.4372, + 0.0023, 0.7806, 0.1012, 0.7585, 0.8932, 0.0404, 0.8165, + 0.4044, 0.5367, 0.9692, 0.4289, 0.6329, 0.5276, 0.2316, + 0.8150, 0.6601, 0.2823, 0.8252, 0.0827, 0.1716, 0.4655, + 0.3467, 0.8028, 0.5044, 0.2772, 0.2334, 0.0693, 0.6870, + 0.6328, 0.8483, 0.6663, 0.7303, 0.8878, 0.7052, 0.1902, + 0.4350, 0.0769, 0.2485, 0.9997, 0.2916, 0.7329, 0.3908, + 0.9711, 0.6311, 0.3058, 0.3897, 0.2498, 0.1473, 0.0901, + 0.3697, 0.8447, 0.0842, 0.7988, 0.1293, 0.2092, 0.7102, + 0.6025, 0.8799, 0.9802, 0.6528, 0.2960, 0.9156, 0.6264, + 0.8945, 0.6036, 0.7519, 0.3658, 0.9781, 0.6968, 0.1732, + 0.0836, 0.4978, 0.7772, 0.7275, 0.5194, 0.7401, 0.5726, + 0.8334, 0.4656, 0.7749, 0.6347, 0.1973, 0.5253, 0.5492, + 0.8237, 0.6949, 0.8620, 0.6366, 0.0170, 0.0404, 0.5789, + 0.7108, 0.9180, 0.9871, 0.8577, 0.5032, 0.0608, 0.5446, + 0.7873, 0.5818, 0.9519, 0.0234, 0.0483, 0.8109, 0.0204, + 0.5828, 0.4490, 0.5239, 0.4941, 0.4828, 0.5681, 0.6520, + 0.9346, 0.6347, 0.0625, 0.5264, 0.4258, 0.9040, 0.9506, + 0.5980, 0.4566, 0.9708, 0.3092, 0.0646, 0.2100, 0.4294, + 0.1322, 0.9258, 0.2269, 0.5477, 0.6100, 0.3885, 0.2404, + 0.0381, 0.2442, 0.6830, 0.0151, 0.1563, 0.4004, 0.6029, + 0.6976, 0.6941, 0.3889, 0.8411, 0.5514, 0.9536, 0.4125, + 0.5492, 0.5471, 0.1717, 0.6791, 0.0889, 0.8804, 0.3330, + 0.8051, 0.2565, 0.6620, 0.6278, 0.6609, 0.1369, 0.1963, + 0.9804, 0.9521, 0.0047, 0.9511, 0.8115, 0.6807, 0.4130, + 0.8889, 0.7367, 0.2773, 0.2228, 0.6664]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.9079, 0.9412, 0.7601, ..., 0.5240, 0.7311, 0.7800]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.3127236366271973 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '914292', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.542762517929077} + +tensor(indices=tensor([[ 304, 3197, 3179, 4100, 3454, 4981, 24, 1276, 1491, + 3988, 2238, 4437, 4100, 2328, 4483, 2163, 377, 4484, + 1769, 223, 652, 407, 2480, 4866, 56, 1993, 372, + 1571, 726, 3983, 2844, 4571, 4127, 472, 1145, 3954, + 684, 476, 1304, 3573, 3351, 1614, 4531, 4266, 484, + 4296, 809, 3667, 2675, 2550, 1476, 3155, 198, 1043, + 627, 4917, 2451, 2586, 2765, 1523, 31, 424, 4660, + 645, 4392, 3829, 481, 4277, 3151, 3144, 627, 4612, + 4396, 4302, 2303, 1835, 3073, 4994, 58, 2377, 2456, + 62, 1089, 1354, 993, 4013, 4934, 4179, 4358, 1659, + 3921, 579, 4181, 3469, 4451, 3316, 3982, 4049, 612, + 1925, 2571, 2902, 2808, 812, 222, 3515, 1861, 4948, + 3964, 3017, 1169, 4075, 1113, 2276, 2402, 3499, 4855, + 3844, 3003, 4354, 3543, 2427, 1057, 1570, 4797, 567, + 2498, 2624, 2000, 561, 3711, 895, 1215, 3299, 2988, + 4589, 4386, 998, 4383, 2607, 4780, 344, 4861, 1257, + 628, 3463, 484, 3434, 4625, 225, 4755, 2293, 771, + 1652, 1906, 2983, 895, 3993, 410, 30, 4232, 4300, + 4144, 4768, 4336, 1630, 1469, 1950, 4163, 3021, 1956, + 624, 4983, 4456, 669, 1318, 468, 2924, 1380, 2206, + 1789, 4397, 1371, 2668, 3731, 2628, 4357, 4856, 2675, + 977, 2501, 3125, 2836, 4199, 1468, 4107, 1372, 1912, + 1619, 4675, 2384, 3423, 1541, 4891, 4491, 1507, 3059, + 2909, 1605, 563, 2613, 2143, 3617, 886, 823, 3629, + 3746, 2799, 4315, 1022, 2270, 2577, 4650, 3192, 1858, + 4840, 205, 1846, 2174, 1990, 80, 3669, 2600, 1926, + 2790, 1479, 2413, 4614, 4578, 2544, 3785, 36, 2810, + 4190, 2037, 309, 4764, 165, 3504, 494], + [4298, 1775, 822, 1364, 4201, 1949, 3693, 3832, 144, + 2016, 3453, 1630, 4881, 1967, 3811, 1107, 3724, 4393, + 622, 2606, 4513, 876, 196, 2488, 4566, 2795, 3935, + 2092, 2100, 487, 1890, 4467, 2977, 89, 3741, 1619, + 3913, 4339, 427, 3248, 1282, 2219, 2427, 3131, 4224, + 2912, 4210, 496, 1932, 3042, 169, 810, 187, 3810, + 3986, 4987, 1566, 1144, 418, 3510, 239, 1235, 726, + 1978, 1118, 788, 4306, 1431, 4740, 2648, 1888, 1473, + 1123, 3122, 2924, 3437, 647, 4903, 3514, 3131, 59, + 2181, 4867, 3981, 3823, 4462, 1766, 208, 1254, 4119, + 1332, 2694, 4845, 2157, 2624, 3556, 1965, 4636, 96, + 4933, 4114, 4471, 3713, 2327, 3987, 2613, 2167, 568, + 1827, 4591, 4413, 813, 739, 772, 1324, 680, 1518, + 3099, 1083, 3546, 3227, 3904, 1477, 2056, 3651, 2905, + 3924, 4951, 2672, 2440, 3228, 1791, 3644, 926, 2624, + 377, 1084, 964, 3965, 1187, 2299, 1028, 1050, 3566, + 2174, 1547, 3056, 1046, 1660, 3934, 4646, 4433, 1308, + 2158, 1794, 183, 3193, 4154, 3634, 1815, 1122, 829, + 2602, 4215, 2242, 1859, 2198, 2560, 1283, 4461, 611, + 357, 2071, 1313, 2170, 4499, 4421, 1833, 721, 2002, + 4883, 1658, 3539, 2344, 911, 1692, 4472, 1107, 2825, + 1391, 1261, 752, 2404, 2427, 1104, 2454, 3963, 2081, + 560, 1130, 3484, 3780, 2501, 3715, 2189, 860, 3840, + 1676, 269, 869, 3886, 4163, 806, 1603, 1059, 4518, + 2253, 4574, 2962, 1087, 2050, 4761, 916, 3982, 3173, + 2431, 4434, 2243, 367, 2730, 1548, 711, 2272, 314, + 4882, 4510, 3321, 917, 4362, 779, 1743, 4131, 3583, + 1591, 4983, 1241, 4646, 1876, 3730, 536]]), + values=tensor([0.3827, 0.4242, 0.8568, 0.9981, 0.5819, 0.6548, 0.0314, + 0.0160, 0.4620, 0.6355, 0.6452, 0.3075, 0.4252, 0.0036, + 0.3757, 0.4882, 0.0647, 0.9551, 0.7166, 0.6506, 0.5268, + 0.4261, 0.4735, 0.8005, 0.4346, 0.0778, 0.0154, 0.0449, + 0.9261, 0.5085, 0.3235, 0.8062, 0.1748, 0.6072, 0.6774, + 0.6107, 0.0719, 0.4441, 0.1328, 0.9401, 0.1273, 0.5883, + 0.2372, 0.7228, 0.2807, 0.8217, 0.6319, 0.4495, 0.4936, + 0.2126, 0.4732, 0.1397, 0.0536, 0.9010, 0.9821, 0.4774, + 0.4470, 0.9401, 0.1876, 0.3970, 0.0668, 0.0728, 0.0741, + 0.6692, 0.6960, 0.7619, 0.4150, 0.0021, 0.1404, 0.7459, + 0.8221, 0.1130, 0.5932, 0.2906, 0.8610, 0.4444, 0.4650, + 0.9747, 0.2263, 0.5655, 0.2210, 0.4650, 0.6492, 0.9685, + 0.8482, 0.6124, 0.3437, 0.9826, 0.3733, 0.1004, 0.1431, + 0.9106, 0.7647, 0.6702, 0.7061, 0.0280, 0.9007, 0.7068, + 0.2146, 0.4596, 0.6978, 0.9841, 0.1318, 0.0521, 0.8950, + 0.7235, 0.9661, 0.5049, 0.3289, 0.1299, 0.3399, 0.1662, + 0.6628, 0.2565, 0.2053, 0.6121, 0.7049, 0.8834, 0.4649, + 0.7975, 0.9680, 0.3498, 0.6770, 0.8536, 0.3424, 0.8471, + 0.4448, 0.9292, 0.3907, 0.3420, 0.2408, 0.9103, 0.0812, + 0.8991, 0.9159, 0.2402, 0.4376, 0.8546, 0.4399, 0.1995, + 0.8428, 0.3298, 0.9501, 0.9695, 0.5800, 0.7492, 0.9832, + 0.3455, 0.4450, 0.4882, 0.0625, 0.2708, 0.4298, 0.4720, + 0.1172, 0.9569, 0.6381, 0.6640, 0.8851, 0.3822, 0.0927, + 0.2751, 0.9292, 0.5072, 0.1313, 0.4027, 0.2866, 0.6389, + 0.2001, 0.8797, 0.6546, 0.9551, 0.1146, 0.3523, 0.4406, + 0.1321, 0.8390, 0.9939, 0.8634, 0.8987, 0.4519, 0.9970, + 0.1440, 0.4406, 0.2710, 0.9460, 0.2690, 0.2847, 0.5087, + 0.2623, 0.3660, 0.8167, 0.3129, 0.0823, 0.4367, 0.8069, + 0.0815, 0.8214, 0.6923, 0.2095, 0.9002, 0.0246, 0.8342, + 0.2982, 0.9897, 0.0027, 0.3121, 0.0792, 0.5872, 0.1893, + 0.4513, 0.3743, 0.8199, 0.7452, 0.5175, 0.0566, 0.4424, + 0.1018, 0.1358, 0.5246, 0.9545, 0.0042, 0.6155, 0.5744, + 0.3248, 0.7488, 0.2222, 0.2778, 0.2395, 0.5061, 0.6396, + 0.5269, 0.0316, 0.6688, 0.2380, 0.3821, 0.7252, 0.0965, + 0.6365, 0.1471, 0.8564, 0.3164, 0.0916, 0.8556, 0.0410, + 0.2380, 0.8056, 0.7406, 0.3374, 0.1337]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3521, 0.7730, 0.9706, ..., 0.8563, 0.2389, 0.6312]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.542762517929077 seconds + +tensor(indices=tensor([[ 304, 3197, 3179, 4100, 3454, 4981, 24, 1276, 1491, + 3988, 2238, 4437, 4100, 2328, 4483, 2163, 377, 4484, + 1769, 223, 652, 407, 2480, 4866, 56, 1993, 372, + 1571, 726, 3983, 2844, 4571, 4127, 472, 1145, 3954, + 684, 476, 1304, 3573, 3351, 1614, 4531, 4266, 484, + 4296, 809, 3667, 2675, 2550, 1476, 3155, 198, 1043, + 627, 4917, 2451, 2586, 2765, 1523, 31, 424, 4660, + 645, 4392, 3829, 481, 4277, 3151, 3144, 627, 4612, + 4396, 4302, 2303, 1835, 3073, 4994, 58, 2377, 2456, + 62, 1089, 1354, 993, 4013, 4934, 4179, 4358, 1659, + 3921, 579, 4181, 3469, 4451, 3316, 3982, 4049, 612, + 1925, 2571, 2902, 2808, 812, 222, 3515, 1861, 4948, + 3964, 3017, 1169, 4075, 1113, 2276, 2402, 3499, 4855, + 3844, 3003, 4354, 3543, 2427, 1057, 1570, 4797, 567, + 2498, 2624, 2000, 561, 3711, 895, 1215, 3299, 2988, + 4589, 4386, 998, 4383, 2607, 4780, 344, 4861, 1257, + 628, 3463, 484, 3434, 4625, 225, 4755, 2293, 771, + 1652, 1906, 2983, 895, 3993, 410, 30, 4232, 4300, + 4144, 4768, 4336, 1630, 1469, 1950, 4163, 3021, 1956, + 624, 4983, 4456, 669, 1318, 468, 2924, 1380, 2206, + 1789, 4397, 1371, 2668, 3731, 2628, 4357, 4856, 2675, + 977, 2501, 3125, 2836, 4199, 1468, 4107, 1372, 1912, + 1619, 4675, 2384, 3423, 1541, 4891, 4491, 1507, 3059, + 2909, 1605, 563, 2613, 2143, 3617, 886, 823, 3629, + 3746, 2799, 4315, 1022, 2270, 2577, 4650, 3192, 1858, + 4840, 205, 1846, 2174, 1990, 80, 3669, 2600, 1926, + 2790, 1479, 2413, 4614, 4578, 2544, 3785, 36, 2810, + 4190, 2037, 309, 4764, 165, 3504, 494], + [4298, 1775, 822, 1364, 4201, 1949, 3693, 3832, 144, + 2016, 3453, 1630, 4881, 1967, 3811, 1107, 3724, 4393, + 622, 2606, 4513, 876, 196, 2488, 4566, 2795, 3935, + 2092, 2100, 487, 1890, 4467, 2977, 89, 3741, 1619, + 3913, 4339, 427, 3248, 1282, 2219, 2427, 3131, 4224, + 2912, 4210, 496, 1932, 3042, 169, 810, 187, 3810, + 3986, 4987, 1566, 1144, 418, 3510, 239, 1235, 726, + 1978, 1118, 788, 4306, 1431, 4740, 2648, 1888, 1473, + 1123, 3122, 2924, 3437, 647, 4903, 3514, 3131, 59, + 2181, 4867, 3981, 3823, 4462, 1766, 208, 1254, 4119, + 1332, 2694, 4845, 2157, 2624, 3556, 1965, 4636, 96, + 4933, 4114, 4471, 3713, 2327, 3987, 2613, 2167, 568, + 1827, 4591, 4413, 813, 739, 772, 1324, 680, 1518, + 3099, 1083, 3546, 3227, 3904, 1477, 2056, 3651, 2905, + 3924, 4951, 2672, 2440, 3228, 1791, 3644, 926, 2624, + 377, 1084, 964, 3965, 1187, 2299, 1028, 1050, 3566, + 2174, 1547, 3056, 1046, 1660, 3934, 4646, 4433, 1308, + 2158, 1794, 183, 3193, 4154, 3634, 1815, 1122, 829, + 2602, 4215, 2242, 1859, 2198, 2560, 1283, 4461, 611, + 357, 2071, 1313, 2170, 4499, 4421, 1833, 721, 2002, + 4883, 1658, 3539, 2344, 911, 1692, 4472, 1107, 2825, + 1391, 1261, 752, 2404, 2427, 1104, 2454, 3963, 2081, + 560, 1130, 3484, 3780, 2501, 3715, 2189, 860, 3840, + 1676, 269, 869, 3886, 4163, 806, 1603, 1059, 4518, + 2253, 4574, 2962, 1087, 2050, 4761, 916, 3982, 3173, + 2431, 4434, 2243, 367, 2730, 1548, 711, 2272, 314, + 4882, 4510, 3321, 917, 4362, 779, 1743, 4131, 3583, + 1591, 4983, 1241, 4646, 1876, 3730, 536]]), + values=tensor([0.3827, 0.4242, 0.8568, 0.9981, 0.5819, 0.6548, 0.0314, + 0.0160, 0.4620, 0.6355, 0.6452, 0.3075, 0.4252, 0.0036, + 0.3757, 0.4882, 0.0647, 0.9551, 0.7166, 0.6506, 0.5268, + 0.4261, 0.4735, 0.8005, 0.4346, 0.0778, 0.0154, 0.0449, + 0.9261, 0.5085, 0.3235, 0.8062, 0.1748, 0.6072, 0.6774, + 0.6107, 0.0719, 0.4441, 0.1328, 0.9401, 0.1273, 0.5883, + 0.2372, 0.7228, 0.2807, 0.8217, 0.6319, 0.4495, 0.4936, + 0.2126, 0.4732, 0.1397, 0.0536, 0.9010, 0.9821, 0.4774, + 0.4470, 0.9401, 0.1876, 0.3970, 0.0668, 0.0728, 0.0741, + 0.6692, 0.6960, 0.7619, 0.4150, 0.0021, 0.1404, 0.7459, + 0.8221, 0.1130, 0.5932, 0.2906, 0.8610, 0.4444, 0.4650, + 0.9747, 0.2263, 0.5655, 0.2210, 0.4650, 0.6492, 0.9685, + 0.8482, 0.6124, 0.3437, 0.9826, 0.3733, 0.1004, 0.1431, + 0.9106, 0.7647, 0.6702, 0.7061, 0.0280, 0.9007, 0.7068, + 0.2146, 0.4596, 0.6978, 0.9841, 0.1318, 0.0521, 0.8950, + 0.7235, 0.9661, 0.5049, 0.3289, 0.1299, 0.3399, 0.1662, + 0.6628, 0.2565, 0.2053, 0.6121, 0.7049, 0.8834, 0.4649, + 0.7975, 0.9680, 0.3498, 0.6770, 0.8536, 0.3424, 0.8471, + 0.4448, 0.9292, 0.3907, 0.3420, 0.2408, 0.9103, 0.0812, + 0.8991, 0.9159, 0.2402, 0.4376, 0.8546, 0.4399, 0.1995, + 0.8428, 0.3298, 0.9501, 0.9695, 0.5800, 0.7492, 0.9832, + 0.3455, 0.4450, 0.4882, 0.0625, 0.2708, 0.4298, 0.4720, + 0.1172, 0.9569, 0.6381, 0.6640, 0.8851, 0.3822, 0.0927, + 0.2751, 0.9292, 0.5072, 0.1313, 0.4027, 0.2866, 0.6389, + 0.2001, 0.8797, 0.6546, 0.9551, 0.1146, 0.3523, 0.4406, + 0.1321, 0.8390, 0.9939, 0.8634, 0.8987, 0.4519, 0.9970, + 0.1440, 0.4406, 0.2710, 0.9460, 0.2690, 0.2847, 0.5087, + 0.2623, 0.3660, 0.8167, 0.3129, 0.0823, 0.4367, 0.8069, + 0.0815, 0.8214, 0.6923, 0.2095, 0.9002, 0.0246, 0.8342, + 0.2982, 0.9897, 0.0027, 0.3121, 0.0792, 0.5872, 0.1893, + 0.4513, 0.3743, 0.8199, 0.7452, 0.5175, 0.0566, 0.4424, + 0.1018, 0.1358, 0.5246, 0.9545, 0.0042, 0.6155, 0.5744, + 0.3248, 0.7488, 0.2222, 0.2778, 0.2395, 0.5061, 0.6396, + 0.5269, 0.0316, 0.6688, 0.2380, 0.3821, 0.7252, 0.0965, + 0.6365, 0.1471, 0.8564, 0.3164, 0.0916, 0.8556, 0.0410, + 0.2380, 0.8056, 0.7406, 0.3374, 0.1337]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3521, 0.7730, 0.9706, ..., 0.8563, 0.2389, 0.6312]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.542762517929077 seconds + +[40.18, 39.23, 39.08, 39.62, 39.49, 39.4, 38.94, 39.42, 39.88, 38.9] +[65.62] +12.935570478439331 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 914292, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.542762517929077, 'TIME_S_1KI': 0.011531067227897736, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 848.832134795189, 'W': 65.62} +[40.18, 39.23, 39.08, 39.62, 39.49, 39.4, 38.94, 39.42, 39.88, 38.9, 39.55, 39.39, 39.13, 38.98, 39.31, 40.14, 39.28, 39.41, 40.05, 39.28] +709.7050000000002 +35.48525000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 914292, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.542762517929077, 'TIME_S_1KI': 0.011531067227897736, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 848.832134795189, 'W': 65.62, 'J_1KI': 0.9284037646563559, 'W_1KI': 0.0717713815717517, 'W_D': 30.134749999999997, 'J_D': 389.8101824751496, 'W_D_1KI': 0.03295965621486352, 'J_D_1KI': 3.604937614554598e-05} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..5feedd0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 264773, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.908402919769287, "TIME_S_1KI": 0.04875271617487163, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 856.4205559635163, "W": 66.23, "J_1KI": 3.2345464075397277, "W_1KI": 0.25013879814029377, "W_D": 30.250000000000007, "J_D": 391.1629445552827, "W_D_1KI": 0.11424880935744962, "J_D_1KI": 0.00043149720461470623} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..5c2b9aa --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01278996467590332} + +tensor(indices=tensor([[1910, 2449, 350, ..., 2036, 729, 2451], + [ 742, 1036, 4684, ..., 413, 1944, 2520]]), + values=tensor([0.8034, 0.1517, 0.3873, ..., 0.4764, 0.2065, 0.1020]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.5684, 0.7732, 0.5466, ..., 0.5826, 0.0795, 0.1639]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.01278996467590332 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '82095', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.2556071281433105} + +tensor(indices=tensor([[4274, 1817, 2675, ..., 3996, 3418, 416], + [2374, 458, 595, ..., 1284, 853, 4526]]), + values=tensor([0.9593, 0.9999, 0.0625, ..., 0.9089, 0.9906, 0.4203]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.5480, 0.4054, 0.8753, ..., 0.8136, 0.5370, 0.8569]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.2556071281433105 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'coo', '264773', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.908402919769287} + +tensor(indices=tensor([[ 28, 111, 112, ..., 3928, 3959, 1587], + [3034, 237, 3606, ..., 4239, 259, 4218]]), + values=tensor([0.9704, 0.9398, 0.0070, ..., 0.3428, 0.4681, 0.3499]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6124, 0.9572, 0.4064, ..., 0.0972, 0.7122, 0.2442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 12.908402919769287 seconds + +tensor(indices=tensor([[ 28, 111, 112, ..., 3928, 3959, 1587], + [3034, 237, 3606, ..., 4239, 259, 4218]]), + values=tensor([0.9704, 0.9398, 0.0070, ..., 0.3428, 0.4681, 0.3499]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6124, 0.9572, 0.4064, ..., 0.0972, 0.7122, 0.2442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 12.908402919769287 seconds + +[39.99, 39.38, 39.0, 39.02, 38.95, 38.99, 44.47, 39.26, 39.49, 38.95] +[66.23] +12.93100643157959 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 264773, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.908402919769287, 'TIME_S_1KI': 0.04875271617487163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4205559635163, 'W': 66.23} +[39.99, 39.38, 39.0, 39.02, 38.95, 38.99, 44.47, 39.26, 39.49, 38.95, 40.75, 39.61, 39.44, 44.89, 39.56, 40.82, 39.01, 38.87, 39.51, 38.97] +719.5999999999999 +35.98 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 264773, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.908402919769287, 'TIME_S_1KI': 0.04875271617487163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 856.4205559635163, 'W': 66.23, 'J_1KI': 3.2345464075397277, 'W_1KI': 0.25013879814029377, 'W_D': 30.250000000000007, 'J_D': 391.1629445552827, 'W_D_1KI': 0.11424880935744962, 'J_D_1KI': 0.00043149720461470623} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json index 4ca7cf3..4ad22b3 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 67064, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.610118389129639, "TIME_S_1KI": 0.15820885108448107, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1755.3175721168518, "W": 134.25, "J_1KI": 26.173767924920252, "W_1KI": 2.0018191578193965, "W_D": 98.104, "J_D": 1282.7089392547607, "W_D_1KI": 1.4628414648693786, "J_D_1KI": 0.021812618765200086} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 70295, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.822261571884155, "TIME_S_1KI": 0.15395492669299604, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2022.7645127677915, "W": 148.89, "J_1KI": 28.775368273245487, "W_1KI": 2.1180738317092254, "W_D": 112.92999999999999, "J_D": 1534.2252429771422, "W_D_1KI": 1.6065153993882921, "J_D_1KI": 0.022853907097066535} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output index 6ec2345..54ef64c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05684256553649902} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05624699592590332} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 13, 23, ..., 999979, - 999991, 1000000]), - col_indices=tensor([ 4525, 11074, 13753, ..., 80507, 85385, 86427]), - values=tensor([0.4106, 0.9983, 0.2404, ..., 0.2427, 0.2624, 0.7034]), +tensor(crow_indices=tensor([ 0, 11, 23, ..., 999975, + 999988, 1000000]), + col_indices=tensor([ 2723, 3213, 5616, ..., 63383, 68375, 91486]), + values=tensor([0.8396, 0.9858, 0.3390, ..., 0.5841, 0.8115, 0.3849]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4963, 0.1898, 0.5953, ..., 0.4144, 0.1558, 0.0288]) +tensor([0.5352, 0.8213, 0.5999, ..., 0.4683, 0.8101, 0.1591]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 0.05684256553649902 seconds +Time: 0.05624699592590332 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18472', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8920857906341553} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18667', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.788297176361084} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 17, ..., 999981, +tensor(crow_indices=tensor([ 0, 12, 29, ..., 999982, 999989, 1000000]), - col_indices=tensor([ 8653, 22699, 39303, ..., 86578, 89246, 90775]), - values=tensor([0.9948, 0.4799, 0.6025, ..., 0.7759, 0.3812, 0.6990]), + col_indices=tensor([ 5929, 8280, 11320, ..., 91562, 91691, 92303]), + values=tensor([0.5085, 0.3313, 0.7584, ..., 0.0068, 0.8631, 0.9800]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4256, 0.6072, 0.2987, ..., 0.6512, 0.1573, 0.7068]) +tensor([0.4155, 0.0797, 0.8597, ..., 0.6063, 0.7260, 0.5851]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 2.8920857906341553 seconds +Time: 2.788297176361084 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '67064', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.610118389129639} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '70295', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.822261571884155} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 22, ..., 999983, - 999993, 1000000]), - col_indices=tensor([27755, 29395, 33124, ..., 74386, 97777, 99456]), - values=tensor([0.7148, 0.9361, 0.5875, ..., 0.1256, 0.4168, 0.2712]), +tensor(crow_indices=tensor([ 0, 5, 12, ..., 999979, + 999990, 1000000]), + col_indices=tensor([27644, 31899, 46284, ..., 52964, 73578, 99989]), + values=tensor([0.6537, 0.4454, 0.4070, ..., 0.8265, 0.7302, 0.8086]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0632, 0.1399, 0.4829, ..., 0.0512, 0.0510, 0.5050]) +tensor([0.4658, 0.0086, 0.8927, ..., 0.0810, 0.5824, 0.7438]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.610118389129639 seconds +Time: 10.822261571884155 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 9, 22, ..., 999983, - 999993, 1000000]), - col_indices=tensor([27755, 29395, 33124, ..., 74386, 97777, 99456]), - values=tensor([0.7148, 0.9361, 0.5875, ..., 0.1256, 0.4168, 0.2712]), +tensor(crow_indices=tensor([ 0, 5, 12, ..., 999979, + 999990, 1000000]), + col_indices=tensor([27644, 31899, 46284, ..., 52964, 73578, 99989]), + values=tensor([0.6537, 0.4454, 0.4070, ..., 0.8265, 0.7302, 0.8086]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0632, 0.1399, 0.4829, ..., 0.0512, 0.0510, 0.5050]) +tensor([0.4658, 0.0086, 0.8927, ..., 0.0810, 0.5824, 0.7438]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.610118389129639 seconds +Time: 10.822261571884155 seconds -[42.15, 40.1, 39.72, 39.74, 39.84, 40.12, 39.64, 39.58, 39.89, 39.55] -[134.25] -13.074991226196289 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 67064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.610118389129639, 'TIME_S_1KI': 0.15820885108448107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1755.3175721168518, 'W': 134.25} -[42.15, 40.1, 39.72, 39.74, 39.84, 40.12, 39.64, 39.58, 39.89, 39.55, 42.7, 40.22, 40.9, 40.07, 39.72, 39.89, 39.69, 39.67, 39.56, 44.74] -722.9200000000001 -36.146 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 67064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.610118389129639, 'TIME_S_1KI': 0.15820885108448107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1755.3175721168518, 'W': 134.25, 'J_1KI': 26.173767924920252, 'W_1KI': 2.0018191578193965, 'W_D': 98.104, 'J_D': 1282.7089392547607, 'W_D_1KI': 1.4628414648693786, 'J_D_1KI': 0.021812618765200086} +[40.4, 39.87, 39.81, 39.7, 40.14, 40.04, 39.63, 39.79, 40.11, 39.75] +[148.89] +13.585630416870117 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 70295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.822261571884155, 'TIME_S_1KI': 0.15395492669299604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2022.7645127677915, 'W': 148.89} +[40.4, 39.87, 39.81, 39.7, 40.14, 40.04, 39.63, 39.79, 40.11, 39.75, 41.06, 39.66, 39.86, 39.65, 39.62, 40.13, 39.77, 39.67, 41.05, 40.19] +719.1999999999999 +35.959999999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 70295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.822261571884155, 'TIME_S_1KI': 0.15395492669299604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2022.7645127677915, 'W': 148.89, 'J_1KI': 28.775368273245487, 'W_1KI': 2.1180738317092254, 'W_D': 112.92999999999999, 'J_D': 1534.2252429771422, 'W_D_1KI': 1.6065153993882921, 'J_D_1KI': 0.022853907097066535} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json index 97a1aaa..724eedf 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3865, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6199471950531, "TIME_S_1KI": 2.7477224308028725, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1736.0991844940186, "W": 125.54, "J_1KI": 449.1847825340281, "W_1KI": 32.48124191461837, "W_D": 89.51725000000002, "J_D": 1237.938702589989, "W_D_1KI": 23.160996119016822, "J_D_1KI": 5.992495761711985} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3822, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.796124458312988, "TIME_S_1KI": 2.8247316740745654, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1752.425606701374, "W": 126.90999999999998, "J_1KI": 458.5101011777535, "W_1KI": 33.2051282051282, "W_D": 90.64599999999999, "J_D": 1251.677342566013, "W_D_1KI": 23.716902145473572, "J_D_1KI": 6.205364245283509} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output index ab63507..3e9bc76 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.29607152938842773} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.27468323707580566} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 108, 205, ..., 9999797, - 9999886, 10000000]), - col_indices=tensor([ 1353, 2200, 3779, ..., 96854, 97028, 97339]), - values=tensor([0.4346, 0.2367, 0.4770, ..., 0.1479, 0.4649, 0.9103]), +tensor(crow_indices=tensor([ 0, 99, 197, ..., 9999798, + 9999907, 10000000]), + col_indices=tensor([ 239, 1592, 1731, ..., 97883, 98029, 99565]), + values=tensor([0.8693, 0.2201, 0.3788, ..., 0.6951, 0.6639, 0.6763]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.4412, 0.7177, 0.2059, ..., 0.3280, 0.0589, 0.7180]) +tensor([0.7538, 0.4490, 0.6434, ..., 0.3147, 0.7801, 0.8891]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,39 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 0.29607152938842773 seconds +Time: 0.27468323707580566 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3546', '-ss', '100000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.63291883468628} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3822', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.796124458312988} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 92, 201, ..., 9999814, - 9999913, 10000000]), - col_indices=tensor([ 1097, 1389, 2328, ..., 96293, 96542, 99036]), - values=tensor([0.4476, 0.1977, 0.6820, ..., 0.8020, 0.1490, 0.2819]), - size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6506, 0.9195, 0.4022, ..., 0.6497, 0.8706, 0.8621]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 10000000 -Density: 0.001 -Time: 9.63291883468628 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3865', '-ss', '100000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6199471950531} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 98, 197, ..., 9999799, +tensor(crow_indices=tensor([ 0, 103, 206, ..., 9999818, 9999902, 10000000]), - col_indices=tensor([ 2919, 3313, 3728, ..., 97238, 97697, 98577]), - values=tensor([0.4198, 0.7828, 0.7567, ..., 0.6995, 0.0988, 0.1528]), + col_indices=tensor([ 218, 1372, 3051, ..., 94547, 95546, 98643]), + values=tensor([0.4032, 0.6777, 0.1457, ..., 0.3618, 0.7288, 0.5858]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.5695, 0.4144, 0.5638, ..., 0.3431, 0.2067, 0.0841]) +tensor([0.1082, 0.5340, 0.5447, ..., 0.1866, 0.5966, 0.6176]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.6199471950531 seconds +Time: 10.796124458312988 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 98, 197, ..., 9999799, +tensor(crow_indices=tensor([ 0, 103, 206, ..., 9999818, 9999902, 10000000]), - col_indices=tensor([ 2919, 3313, 3728, ..., 97238, 97697, 98577]), - values=tensor([0.4198, 0.7828, 0.7567, ..., 0.6995, 0.0988, 0.1528]), + col_indices=tensor([ 218, 1372, 3051, ..., 94547, 95546, 98643]), + values=tensor([0.4032, 0.6777, 0.1457, ..., 0.3618, 0.7288, 0.5858]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.5695, 0.4144, 0.5638, ..., 0.3431, 0.2067, 0.0841]) +tensor([0.1082, 0.5340, 0.5447, ..., 0.1866, 0.5966, 0.6176]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.6199471950531 seconds +Time: 10.796124458312988 seconds -[41.63, 40.06, 40.05, 39.72, 39.88, 39.71, 40.32, 40.32, 40.26, 39.8] -[125.54] -13.829051971435547 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3865, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6199471950531, 'TIME_S_1KI': 2.7477224308028725, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1736.0991844940186, 'W': 125.54} -[41.63, 40.06, 40.05, 39.72, 39.88, 39.71, 40.32, 40.32, 40.26, 39.8, 40.39, 39.7, 39.76, 39.75, 39.78, 40.5, 39.95, 39.84, 40.11, 39.67] -720.4549999999999 -36.022749999999995 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3865, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6199471950531, 'TIME_S_1KI': 2.7477224308028725, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1736.0991844940186, 'W': 125.54, 'J_1KI': 449.1847825340281, 'W_1KI': 32.48124191461837, 'W_D': 89.51725000000002, 'J_D': 1237.938702589989, 'W_D_1KI': 23.160996119016822, 'J_D_1KI': 5.992495761711985} +[40.82, 40.14, 39.68, 40.44, 39.72, 39.8, 39.86, 39.75, 39.61, 39.73] +[126.91] +13.808412313461304 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3822, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.796124458312988, 'TIME_S_1KI': 2.8247316740745654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1752.425606701374, 'W': 126.90999999999998} +[40.82, 40.14, 39.68, 40.44, 39.72, 39.8, 39.86, 39.75, 39.61, 39.73, 41.68, 40.53, 39.66, 40.13, 39.66, 39.82, 39.76, 40.78, 45.04, 39.57] +725.2800000000001 +36.264 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3822, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.796124458312988, 'TIME_S_1KI': 2.8247316740745654, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1752.425606701374, 'W': 126.90999999999998, 'J_1KI': 458.5101011777535, 'W_1KI': 33.2051282051282, 'W_D': 90.64599999999999, 'J_D': 1251.677342566013, 'W_D_1KI': 23.716902145473572, 'J_D_1KI': 6.205364245283509} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..2f44a1d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 492, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48446536064148, "TIME_S_1KI": 21.30988894439325, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3692.9655099868774, "W": 98.84, "J_1KI": 7506.0274593229215, "W_1KI": 200.89430894308944, "W_D": 62.13425, "J_D": 2321.526125444174, "W_D_1KI": 126.28912601626016, "J_D_1KI": 256.68521548020357} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..935b6eb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1332833766937256} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 992, 1996, ..., + 99998038, 99998977, 100000000]), + col_indices=tensor([ 426, 448, 664, ..., 99690, 99718, 99834]), + values=tensor([0.7644, 0.0512, 0.5135, ..., 0.5410, 0.8615, 0.1187]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.1556, 0.5592, 0.0469, ..., 0.1387, 0.8405, 0.8832]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 2.1332833766937256 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '492', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.48446536064148} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 996, 2006, ..., + 99998009, 99999008, 100000000]), + col_indices=tensor([ 32, 117, 446, ..., 99626, 99652, 99845]), + values=tensor([0.3215, 0.2082, 0.6698, ..., 0.0359, 0.4063, 0.2578]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6233, 0.7317, 0.9818, ..., 0.0719, 0.7892, 0.9884]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.48446536064148 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 996, 2006, ..., + 99998009, 99999008, 100000000]), + col_indices=tensor([ 32, 117, 446, ..., 99626, 99652, 99845]), + values=tensor([0.3215, 0.2082, 0.6698, ..., 0.0359, 0.4063, 0.2578]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6233, 0.7317, 0.9818, ..., 0.0719, 0.7892, 0.9884]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.48446536064148 seconds + +[40.62, 39.95, 39.79, 39.89, 39.69, 40.11, 39.73, 39.74, 40.0, 39.47] +[98.84] +37.36306667327881 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 492, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48446536064148, 'TIME_S_1KI': 21.30988894439325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3692.9655099868774, 'W': 98.84} +[40.62, 39.95, 39.79, 39.89, 39.69, 40.11, 39.73, 39.74, 40.0, 39.47, 40.79, 40.68, 40.03, 39.92, 43.59, 51.85, 39.71, 39.54, 39.71, 39.49] +734.115 +36.70575 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 492, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.48446536064148, 'TIME_S_1KI': 21.30988894439325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3692.9655099868774, 'W': 98.84, 'J_1KI': 7506.0274593229215, 'W_1KI': 200.89430894308944, 'W_D': 62.13425, 'J_D': 2321.526125444174, 'W_D_1KI': 126.28912601626016, 'J_D_1KI': 256.68521548020357} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json index 9be5a17..02781c1 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102064, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.524274349212646, "TIME_S_1KI": 0.10311446101674092, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1601.1415306377412, "W": 113.64000000000001, "J_1KI": 15.687622772355983, "W_1KI": 1.1134190311961123, "W_D": 77.54950000000002, "J_D": 1092.6410166331532, "W_D_1KI": 0.7598124706066784, "J_D_1KI": 0.007444470828173288} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 104631, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.92940616607666, "TIME_S_1KI": 0.11401407007556709, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1682.5194715809823, "W": 114.23, "J_1KI": 16.08050646157432, "W_1KI": 1.0917414532977798, "W_D": 78.15350000000001, "J_D": 1151.1405543395283, "W_D_1KI": 0.7469440223260794, "J_D_1KI": 0.007138840518833609} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output index a8ac003..a082364 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.051177263259887695} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.040479183197021484} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 99999, 100000]), - col_indices=tensor([ 5338, 33433, 17911, ..., 60039, 74427, 45774]), - values=tensor([0.9933, 0.3915, 0.2951, ..., 0.3503, 0.7922, 0.2614]), + col_indices=tensor([ 647, 35192, 49713, ..., 72297, 77626, 4033]), + values=tensor([0.5259, 0.7735, 0.8729, ..., 0.9147, 0.2198, 0.2516]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6274, 0.9288, 0.1155, ..., 0.7548, 0.5951, 0.2372]) +tensor([0.2705, 0.4738, 0.5542, ..., 0.7481, 0.0692, 0.4617]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.051177263259887695 seconds +Time: 0.040479183197021484 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20516', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.11061429977417} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25939', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.6030373573303223} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 100000, +tensor(crow_indices=tensor([ 0, 0, 0, ..., 99995, 99999, 100000]), - col_indices=tensor([35938, 84023, 26382, ..., 80961, 25218, 78065]), - values=tensor([0.1771, 0.7263, 0.1955, ..., 0.1569, 0.5183, 0.0872]), + col_indices=tensor([63737, 56720, 10559, ..., 75305, 77118, 60915]), + values=tensor([0.5479, 0.5972, 0.3396, ..., 0.3516, 0.8713, 0.6246]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.4261, 0.9316, 0.9486, ..., 0.4583, 0.3074, 0.5243]) +tensor([0.0931, 0.9309, 0.9267, ..., 0.4971, 0.7820, 0.8195]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 2.11061429977417 seconds +Time: 2.6030373573303223 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102064', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.524274349212646} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '104631', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.92940616607666} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99999, 100000]), - col_indices=tensor([ 7923, 87583, 82060, ..., 33729, 87076, 106]), - values=tensor([0.1731, 0.5965, 0.6757, ..., 0.8844, 0.0621, 0.1000]), + col_indices=tensor([91220, 70747, 49345, ..., 32064, 6505, 95139]), + values=tensor([0.0238, 0.2992, 0.8012, ..., 0.6072, 0.6775, 0.2288]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3641, 0.2409, 0.5686, ..., 0.5557, 0.7015, 0.9398]) +tensor([0.1871, 0.1293, 0.9330, ..., 0.6487, 0.5534, 0.6200]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.524274349212646 seconds +Time: 11.92940616607666 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 3, ..., 100000, 100000, +tensor(crow_indices=tensor([ 0, 1, 2, ..., 99998, 99999, 100000]), - col_indices=tensor([ 7923, 87583, 82060, ..., 33729, 87076, 106]), - values=tensor([0.1731, 0.5965, 0.6757, ..., 0.8844, 0.0621, 0.1000]), + col_indices=tensor([91220, 70747, 49345, ..., 32064, 6505, 95139]), + values=tensor([0.0238, 0.2992, 0.8012, ..., 0.6072, 0.6775, 0.2288]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.3641, 0.2409, 0.5686, ..., 0.5557, 0.7015, 0.9398]) +tensor([0.1871, 0.1293, 0.9330, ..., 0.6487, 0.5534, 0.6200]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.524274349212646 seconds +Time: 11.92940616607666 seconds -[40.73, 39.59, 39.74, 39.73, 39.63, 39.83, 40.08, 39.98, 40.09, 39.48] -[113.64] -14.089594602584839 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.524274349212646, 'TIME_S_1KI': 0.10311446101674092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.1415306377412, 'W': 113.64000000000001} -[40.73, 39.59, 39.74, 39.73, 39.63, 39.83, 40.08, 39.98, 40.09, 39.48, 40.32, 39.82, 39.63, 39.81, 39.64, 39.48, 39.85, 39.99, 44.93, 39.45] -721.81 -36.0905 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.524274349212646, 'TIME_S_1KI': 0.10311446101674092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.1415306377412, 'W': 113.64000000000001, 'J_1KI': 15.687622772355983, 'W_1KI': 1.1134190311961123, 'W_D': 77.54950000000002, 'J_D': 1092.6410166331532, 'W_D_1KI': 0.7598124706066784, 'J_D_1KI': 0.007444470828173288} +[41.02, 39.64, 40.4, 39.6, 41.57, 39.92, 39.7, 39.85, 39.57, 40.18] +[114.23] +14.729225873947144 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 104631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.92940616607666, 'TIME_S_1KI': 0.11401407007556709, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1682.5194715809823, 'W': 114.23} +[41.02, 39.64, 40.4, 39.6, 41.57, 39.92, 39.7, 39.85, 39.57, 40.18, 40.26, 39.84, 40.07, 40.06, 40.35, 39.74, 39.79, 40.2, 40.74, 39.52] +721.53 +36.076499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 104631, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.92940616607666, 'TIME_S_1KI': 0.11401407007556709, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1682.5194715809823, 'W': 114.23, 'J_1KI': 16.08050646157432, 'W_1KI': 1.0917414532977798, 'W_D': 78.15350000000001, 'J_D': 1151.1405543395283, 'W_D_1KI': 0.7469440223260794, 'J_D_1KI': 0.007138840518833609} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json index 24bf6da..f79591c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 83443, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331598043441772, "TIME_S_1KI": 0.12381623435688761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1753.1380189323427, "W": 133.96, "J_1KI": 21.010007057899916, "W_1KI": 1.6054072840142373, "W_D": 97.99700000000001, "J_D": 1282.489298606396, "W_D_1KI": 1.1744184652996656, "J_D_1KI": 0.014074499542198454} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 84017, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.68351411819458, "TIME_S_1KI": 0.1271589573323801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1756.137734184265, "W": 134.64, "J_1KI": 20.902171396077758, "W_1KI": 1.6025328207386598, "W_D": 98.332, "J_D": 1282.5648817424774, "W_D_1KI": 1.170382184557887, "J_D_1KI": 0.013930302016947607} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output index 566b51a..94d6712 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04202604293823242} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.048097848892211914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 10, ..., 499989, 499993, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 499987, 499994, 500000]), - col_indices=tensor([44479, 4048, 15938, ..., 81904, 89204, 96058]), - values=tensor([0.8024, 0.2371, 0.1804, ..., 0.7304, 0.5867, 0.0881]), + col_indices=tensor([45986, 87496, 16093, ..., 81459, 91507, 94938]), + values=tensor([0.5541, 0.7240, 0.1434, ..., 0.0835, 0.6754, 0.2974]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.7637, 0.3596, 0.9771, ..., 0.6123, 0.8042, 0.6339]) +tensor([0.9503, 0.0035, 0.0750, ..., 0.9219, 0.8718, 0.0631]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.04202604293823242 seconds +Time: 0.048097848892211914 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24984', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.1438426971435547} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21830', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.7281758785247803} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 12, ..., 499992, 499996, +tensor(crow_indices=tensor([ 0, 2, 6, ..., 499994, 499997, 500000]), - col_indices=tensor([25849, 26475, 42516, ..., 54532, 74351, 87242]), - values=tensor([0.9779, 0.7287, 0.9943, ..., 0.8976, 0.9175, 0.6342]), + col_indices=tensor([83268, 88397, 15509, ..., 59756, 77530, 84602]), + values=tensor([0.3296, 0.6421, 0.2508, ..., 0.6584, 0.8479, 0.6996]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.3426, 0.5652, 0.2473, ..., 0.4439, 0.3784, 0.0403]) +tensor([0.4117, 0.2597, 0.3803, ..., 0.1721, 0.1242, 0.1947]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 3.1438426971435547 seconds +Time: 2.7281758785247803 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '83443', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331598043441772} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '84017', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.68351411819458} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 9, ..., 499992, 499996, +tensor(crow_indices=tensor([ 0, 7, 11, ..., 499993, 499997, 500000]), - col_indices=tensor([44464, 48782, 50602, ..., 44812, 48851, 96308]), - values=tensor([0.0768, 0.4231, 0.3229, ..., 0.7263, 0.8571, 0.9151]), + col_indices=tensor([45345, 56173, 61145, ..., 41289, 72782, 75243]), + values=tensor([0.0688, 0.1783, 0.1543, ..., 0.1655, 0.2729, 0.4150]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.5899, 0.0490, 0.9717, ..., 0.2037, 0.9811, 0.9760]) +tensor([0.3874, 0.1633, 0.6273, ..., 0.4912, 0.7210, 0.5103]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.331598043441772 seconds +Time: 10.68351411819458 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 9, ..., 499992, 499996, +tensor(crow_indices=tensor([ 0, 7, 11, ..., 499993, 499997, 500000]), - col_indices=tensor([44464, 48782, 50602, ..., 44812, 48851, 96308]), - values=tensor([0.0768, 0.4231, 0.3229, ..., 0.7263, 0.8571, 0.9151]), + col_indices=tensor([45345, 56173, 61145, ..., 41289, 72782, 75243]), + values=tensor([0.0688, 0.1783, 0.1543, ..., 0.1655, 0.2729, 0.4150]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.5899, 0.0490, 0.9717, ..., 0.2037, 0.9811, 0.9760]) +tensor([0.3874, 0.1633, 0.6273, ..., 0.4912, 0.7210, 0.5103]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.331598043441772 seconds +Time: 10.68351411819458 seconds -[41.17, 39.77, 40.36, 40.06, 41.66, 39.61, 39.74, 39.58, 39.55, 39.78] -[133.96] -13.087026119232178 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83443, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331598043441772, 'TIME_S_1KI': 0.12381623435688761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1753.1380189323427, 'W': 133.96} -[41.17, 39.77, 40.36, 40.06, 41.66, 39.61, 39.74, 39.58, 39.55, 39.78, 41.11, 39.76, 39.73, 39.7, 39.67, 40.13, 39.73, 39.66, 39.62, 39.8] -719.26 -35.963 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83443, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331598043441772, 'TIME_S_1KI': 0.12381623435688761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1753.1380189323427, 'W': 133.96, 'J_1KI': 21.010007057899916, 'W_1KI': 1.6054072840142373, 'W_D': 97.99700000000001, 'J_D': 1282.489298606396, 'W_D_1KI': 1.1744184652996656, 'J_D_1KI': 0.014074499542198454} +[47.53, 39.57, 39.66, 39.67, 39.71, 39.49, 40.16, 39.73, 39.99, 40.66] +[134.64] +13.043209552764893 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 84017, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.68351411819458, 'TIME_S_1KI': 0.1271589573323801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1756.137734184265, 'W': 134.64} +[47.53, 39.57, 39.66, 39.67, 39.71, 39.49, 40.16, 39.73, 39.99, 40.66, 41.15, 39.61, 39.86, 39.81, 39.78, 40.03, 39.61, 39.49, 45.38, 39.88] +726.16 +36.308 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 84017, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.68351411819458, 'TIME_S_1KI': 0.1271589573323801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1756.137734184265, 'W': 134.64, 'J_1KI': 20.902171396077758, 'W_1KI': 1.6025328207386598, 'W_D': 98.332, 'J_D': 1282.5648817424774, 'W_D_1KI': 1.170382184557887, 'J_D_1KI': 0.013930302016947607} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json index 846867c..b907b96 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 288187, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643323421478271, "TIME_S_1KI": 0.03693200394701451, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1243.6877346038818, "W": 98.76, "J_1KI": 4.315558073764194, "W_1KI": 0.34269415344897586, "W_D": 63.04600000000001, "J_D": 793.9402279853822, "W_D_1KI": 0.21876767515536788, "J_D_1KI": 0.0007591170842382477} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 289973, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.402179718017578, "TIME_S_1KI": 0.035872925127572494, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1257.0382731056213, "W": 99.11, "J_1KI": 4.3350183400027635, "W_1KI": 0.3417904425584451, "W_D": 62.7475, "J_D": 795.8430939531327, "W_D_1KI": 0.21639083638821546, "J_D_1KI": 0.0007462447758522879} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output index c5c4dec..3a689e9 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02071547508239746} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01844310760498047} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), - col_indices=tensor([ 662, 2214, 9373, ..., 9890, 1994, 4209]), - values=tensor([0.5116, 0.1051, 0.5373, ..., 0.4151, 0.7725, 0.9175]), +tensor(crow_indices=tensor([ 0, 2, 3, ..., 9999, 9999, 10000]), + col_indices=tensor([5664, 7936, 1380, ..., 8294, 3730, 2763]), + values=tensor([0.9034, 0.8968, 0.1086, ..., 0.0542, 0.5046, 0.8853]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.1401, 0.0502, 0.3458, ..., 0.2506, 0.9913, 0.9973]) +tensor([0.8522, 0.7110, 0.6403, ..., 0.2419, 0.5994, 0.8952]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.02071547508239746 seconds +Time: 0.01844310760498047 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '50686', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8467259407043457} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '56931', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.0614843368530273} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 5, ..., 9998, 10000, 10000]), - col_indices=tensor([2329, 7525, 8810, ..., 9177, 1519, 2359]), - values=tensor([0.1835, 0.9536, 0.7906, ..., 0.4035, 0.0564, 0.3832]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 9999, 10000, 10000]), + col_indices=tensor([3038, 3101, 58, ..., 2158, 2755, 7454]), + values=tensor([0.2769, 0.6754, 0.9765, ..., 0.0751, 0.2423, 0.1071]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5918, 0.9817, 0.1058, ..., 0.3816, 0.0120, 0.7112]) +tensor([0.8739, 0.5769, 0.3937, ..., 0.8916, 0.1494, 0.8792]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 1.8467259407043457 seconds +Time: 2.0614843368530273 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288187', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643323421478271} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '289973', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.402179718017578} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), - col_indices=tensor([5118, 9103, 6912, ..., 6081, 2494, 7728]), - values=tensor([0.1845, 0.4117, 0.0579, ..., 0.8363, 0.9429, 0.5429]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9996, 9999, 10000]), + col_indices=tensor([9316, 1893, 9256, ..., 5002, 7518, 9571]), + values=tensor([0.4065, 0.7850, 0.3297, ..., 0.8532, 0.1303, 0.8202]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5505, 0.3516, 0.4265, ..., 0.6375, 0.7561, 0.2541]) +tensor([0.1843, 0.6409, 0.5987, ..., 0.8529, 0.6169, 0.0022]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.643323421478271 seconds +Time: 10.402179718017578 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), - col_indices=tensor([5118, 9103, 6912, ..., 6081, 2494, 7728]), - values=tensor([0.1845, 0.4117, 0.0579, ..., 0.8363, 0.9429, 0.5429]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9996, 9999, 10000]), + col_indices=tensor([9316, 1893, 9256, ..., 5002, 7518, 9571]), + values=tensor([0.4065, 0.7850, 0.3297, ..., 0.8532, 0.1303, 0.8202]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5505, 0.3516, 0.4265, ..., 0.6375, 0.7561, 0.2541]) +tensor([0.1843, 0.6409, 0.5987, ..., 0.8529, 0.6169, 0.0022]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.643323421478271 seconds +Time: 10.402179718017578 seconds -[40.51, 40.09, 39.52, 39.33, 39.47, 39.48, 39.43, 39.77, 39.53, 39.82] -[98.76] -12.59303092956543 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 288187, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643323421478271, 'TIME_S_1KI': 0.03693200394701451, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.6877346038818, 'W': 98.76} -[40.51, 40.09, 39.52, 39.33, 39.47, 39.48, 39.43, 39.77, 39.53, 39.82, 41.15, 40.14, 40.36, 39.64, 39.34, 39.28, 39.84, 39.18, 39.56, 39.16] -714.28 -35.714 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 288187, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643323421478271, 'TIME_S_1KI': 0.03693200394701451, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.6877346038818, 'W': 98.76, 'J_1KI': 4.315558073764194, 'W_1KI': 0.34269415344897586, 'W_D': 63.04600000000001, 'J_D': 793.9402279853822, 'W_D_1KI': 0.21876767515536788, 'J_D_1KI': 0.0007591170842382477} +[40.09, 39.43, 40.32, 39.57, 45.24, 39.33, 39.99, 39.89, 39.71, 39.43] +[99.11] +12.683263778686523 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289973, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.402179718017578, 'TIME_S_1KI': 0.035872925127572494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.0382731056213, 'W': 99.11} +[40.09, 39.43, 40.32, 39.57, 45.24, 39.33, 39.99, 39.89, 39.71, 39.43, 40.98, 45.17, 40.3, 39.23, 39.55, 39.48, 39.54, 39.24, 41.38, 39.26] +727.25 +36.3625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 289973, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.402179718017578, 'TIME_S_1KI': 0.035872925127572494, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.0382731056213, 'W': 99.11, 'J_1KI': 4.3350183400027635, 'W_1KI': 0.3417904425584451, 'W_D': 62.7475, 'J_D': 795.8430939531327, 'W_D_1KI': 0.21639083638821546, 'J_D_1KI': 0.0007462447758522879} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json index d5b0fd0..965ed1f 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 192082, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38179612159729, "TIME_S_1KI": 0.054048771470503694, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1536.0875409460068, "W": 107.47, "J_1KI": 7.997040539696623, "W_1KI": 0.5595006299392968, "W_D": 71.52775, "J_D": 1022.3586638773679, "W_D_1KI": 0.3723813267250445, "J_D_1KI": 0.0019386581081259277} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 195124, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.008607864379883, "TIME_S_1KI": 0.056418522910456334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1502.995405883789, "W": 107.68, "J_1KI": 7.7027705760633705, "W_1KI": 0.551854205530842, "W_D": 71.108, "J_D": 992.5241207427979, "W_D_1KI": 0.36442467354092783, "J_D_1KI": 0.0018676568415004193} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output index 41b527b..f366c5f 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02809286117553711} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.024670839309692383} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 18, ..., 99983, 99993, +tensor(crow_indices=tensor([ 0, 9, 26, ..., 99973, 99990, 100000]), - col_indices=tensor([2653, 3722, 5304, ..., 7707, 8674, 8869]), - values=tensor([0.5856, 0.9425, 0.9349, ..., 0.4089, 0.4268, 0.7151]), + col_indices=tensor([ 472, 3265, 3392, ..., 3557, 6998, 8496]), + values=tensor([0.9970, 0.8966, 0.8111, ..., 0.5588, 0.4205, 0.6657]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5412, 0.5320, 0.4895, ..., 0.9332, 0.4774, 0.7844]) +tensor([0.8047, 0.1107, 0.7030, ..., 0.4801, 0.5633, 0.4095]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.02809286117553711 seconds +Time: 0.024670839309692383 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37376', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.043123722076416} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '42560', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.2902352809906006} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 18, 27, ..., 99987, 99996, +tensor(crow_indices=tensor([ 0, 13, 29, ..., 99985, 99992, 100000]), - col_indices=tensor([ 496, 1705, 2513, ..., 6230, 8377, 9882]), - values=tensor([0.7106, 0.5928, 0.5041, ..., 0.9691, 0.8218, 0.7424]), + col_indices=tensor([ 661, 1907, 2234, ..., 7098, 7592, 9213]), + values=tensor([0.9203, 0.6464, 0.5146, ..., 0.9440, 0.8063, 0.5593]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6869, 0.6587, 0.3624, ..., 0.7168, 0.6886, 0.1198]) +tensor([0.7724, 0.0256, 0.1655, ..., 0.1624, 0.5094, 0.8407]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.043123722076416 seconds +Time: 2.2902352809906006 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '192082', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38179612159729} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '195124', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 11.008607864379883} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 28, ..., 99972, 99987, +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99982, 99991, 100000]), - col_indices=tensor([ 27, 2567, 2642, ..., 7209, 7267, 7735]), - values=tensor([0.8851, 0.6027, 0.9664, ..., 0.7310, 0.7426, 0.3698]), + col_indices=tensor([ 16, 1284, 1794, ..., 7682, 8352, 8990]), + values=tensor([0.6866, 0.8260, 0.6116, ..., 0.7861, 0.8522, 0.8329]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8025, 0.3022, 0.3457, ..., 0.3811, 0.1140, 0.9144]) +tensor([0.3102, 0.5794, 0.7585, ..., 0.2279, 0.2498, 0.6683]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.38179612159729 seconds +Time: 11.008607864379883 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 14, 28, ..., 99972, 99987, +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99982, 99991, 100000]), - col_indices=tensor([ 27, 2567, 2642, ..., 7209, 7267, 7735]), - values=tensor([0.8851, 0.6027, 0.9664, ..., 0.7310, 0.7426, 0.3698]), + col_indices=tensor([ 16, 1284, 1794, ..., 7682, 8352, 8990]), + values=tensor([0.6866, 0.8260, 0.6116, ..., 0.7861, 0.8522, 0.8329]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.8025, 0.3022, 0.3457, ..., 0.3811, 0.1140, 0.9144]) +tensor([0.3102, 0.5794, 0.7585, ..., 0.2279, 0.2498, 0.6683]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.38179612159729 seconds +Time: 11.008607864379883 seconds -[40.11, 39.55, 39.33, 44.82, 39.9, 39.64, 39.98, 39.29, 39.99, 39.21] -[107.47] -14.293175220489502 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 192082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38179612159729, 'TIME_S_1KI': 0.054048771470503694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.0875409460068, 'W': 107.47} -[40.11, 39.55, 39.33, 44.82, 39.9, 39.64, 39.98, 39.29, 39.99, 39.21, 40.63, 39.47, 39.85, 39.28, 39.66, 39.35, 39.46, 39.87, 39.78, 39.3] -718.845 -35.94225 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 192082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38179612159729, 'TIME_S_1KI': 0.054048771470503694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.0875409460068, 'W': 107.47, 'J_1KI': 7.997040539696623, 'W_1KI': 0.5595006299392968, 'W_D': 71.52775, 'J_D': 1022.3586638773679, 'W_D_1KI': 0.3723813267250445, 'J_D_1KI': 0.0019386581081259277} +[40.48, 39.36, 39.67, 39.94, 39.35, 39.97, 40.11, 39.3, 39.28, 39.21] +[107.68] +13.95798110961914 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 195124, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.008607864379883, 'TIME_S_1KI': 0.056418522910456334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.995405883789, 'W': 107.68} +[40.48, 39.36, 39.67, 39.94, 39.35, 39.97, 40.11, 39.3, 39.28, 39.21, 40.79, 39.88, 40.47, 39.31, 39.8, 39.64, 39.75, 55.65, 39.37, 40.7] +731.44 +36.572 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 195124, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 11.008607864379883, 'TIME_S_1KI': 0.056418522910456334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1502.995405883789, 'W': 107.68, 'J_1KI': 7.7027705760633705, 'W_1KI': 0.551854205530842, 'W_D': 71.108, 'J_D': 992.5241207427979, 'W_D_1KI': 0.36442467354092783, 'J_D_1KI': 0.0018676568415004193} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json index c0a8393..b099625 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102052, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32213807106018, "TIME_S_1KI": 0.10114586750931075, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1720.3125169992445, "W": 134.17, "J_1KI": 16.857215115815904, "W_1KI": 1.3147219064790499, "W_D": 98.29849999999999, "J_D": 1260.3722139990327, "W_D_1KI": 0.9632197311174694, "J_D_1KI": 0.0094385189032794} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 106323, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.322136163711548, "TIME_S_1KI": 0.09708281523011528, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1775.71419588089, "W": 134.46, "J_1KI": 16.701129538113953, "W_1KI": 1.2646370023419204, "W_D": 98.46650000000001, "J_D": 1300.3745490756037, "W_D_1KI": 0.9261072392614957, "J_D_1KI": 0.00871031892686903} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output index 3b630d1..c1541bb 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.04350447654724121} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.07525515556335449} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 98, 200, ..., 999802, - 999895, 1000000]), - col_indices=tensor([ 47, 93, 107, ..., 9931, 9947, 9964]), - values=tensor([0.2387, 0.2735, 0.7135, ..., 0.1692, 0.6802, 0.4186]), +tensor(crow_indices=tensor([ 0, 96, 190, ..., 999807, + 999911, 1000000]), + col_indices=tensor([ 14, 77, 173, ..., 9899, 9902, 9995]), + values=tensor([0.5533, 0.3727, 0.7903, ..., 0.3886, 0.8573, 0.3671]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2660, 0.3479, 0.7430, ..., 0.3350, 0.7379, 0.6869]) +tensor([0.6975, 0.9655, 0.7366, ..., 0.8197, 0.4964, 0.3947]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.04350447654724121 seconds +Time: 0.07525515556335449 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24135', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.483203411102295} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13952', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.451746940612793} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 121, 211, ..., 999813, - 999902, 1000000]), - col_indices=tensor([ 152, 193, 233, ..., 9824, 9889, 9990]), - values=tensor([0.9787, 0.2142, 0.0572, ..., 0.5889, 0.8836, 0.8390]), +tensor(crow_indices=tensor([ 0, 72, 172, ..., 999809, + 999904, 1000000]), + col_indices=tensor([ 276, 344, 376, ..., 9517, 9819, 9830]), + values=tensor([0.1929, 0.5167, 0.0076, ..., 0.1328, 0.6457, 0.0820]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.2484, 0.0072, 0.5266, ..., 0.8378, 0.3257, 0.7895]) +tensor([0.0756, 0.3906, 0.9571, ..., 0.7269, 0.5864, 0.5417]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 2.483203411102295 seconds +Time: 1.451746940612793 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102052', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32213807106018} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100910', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.965423345565796} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 197, ..., 999808, - 999909, 1000000]), - col_indices=tensor([ 12, 158, 312, ..., 9915, 9965, 9970]), - values=tensor([0.1097, 0.7996, 0.8802, ..., 0.2965, 0.2793, 0.1775]), +tensor(crow_indices=tensor([ 0, 105, 193, ..., 999820, + 999910, 1000000]), + col_indices=tensor([ 57, 196, 279, ..., 9777, 9791, 9801]), + values=tensor([0.4514, 0.6194, 0.2391, ..., 0.7664, 0.6835, 0.8970]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4138, 0.6115, 0.5428, ..., 0.5829, 0.0748, 0.9104]) +tensor([0.6512, 0.1898, 0.7873, ..., 0.0404, 0.8525, 0.9858]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.32213807106018 seconds +Time: 9.965423345565796 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '106323', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.322136163711548} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 109, 197, ..., 999808, - 999909, 1000000]), - col_indices=tensor([ 12, 158, 312, ..., 9915, 9965, 9970]), - values=tensor([0.1097, 0.7996, 0.8802, ..., 0.2965, 0.2793, 0.1775]), +tensor(crow_indices=tensor([ 0, 106, 209, ..., 999783, + 999891, 1000000]), + col_indices=tensor([ 14, 49, 70, ..., 9716, 9833, 9895]), + values=tensor([0.0681, 0.7835, 0.1986, ..., 0.8581, 0.4299, 0.1908]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.4138, 0.6115, 0.5428, ..., 0.5829, 0.0748, 0.9104]) +tensor([0.6587, 0.6689, 0.7198, ..., 0.8761, 0.4647, 0.5939]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +76,30 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.32213807106018 seconds +Time: 10.322136163711548 seconds -[40.44, 39.51, 39.85, 40.81, 39.86, 41.16, 39.43, 39.73, 39.64, 39.53] -[134.17] -12.821886539459229 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102052, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32213807106018, 'TIME_S_1KI': 0.10114586750931075, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.3125169992445, 'W': 134.17} -[40.44, 39.51, 39.85, 40.81, 39.86, 41.16, 39.43, 39.73, 39.64, 39.53, 40.32, 40.34, 39.64, 39.51, 39.49, 39.33, 39.71, 39.95, 39.67, 39.31] -717.43 -35.8715 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102052, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32213807106018, 'TIME_S_1KI': 0.10114586750931075, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.3125169992445, 'W': 134.17, 'J_1KI': 16.857215115815904, 'W_1KI': 1.3147219064790499, 'W_D': 98.29849999999999, 'J_D': 1260.3722139990327, 'W_D_1KI': 0.9632197311174694, 'J_D_1KI': 0.0094385189032794} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 106, 209, ..., 999783, + 999891, 1000000]), + col_indices=tensor([ 14, 49, 70, ..., 9716, 9833, 9895]), + values=tensor([0.0681, 0.7835, 0.1986, ..., 0.8581, 0.4299, 0.1908]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6587, 0.6689, 0.7198, ..., 0.8761, 0.4647, 0.5939]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.322136163711548 seconds + +[40.63, 41.42, 40.27, 39.92, 39.54, 39.44, 39.67, 39.68, 39.88, 39.61] +[134.46] +13.206263542175293 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106323, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.322136163711548, 'TIME_S_1KI': 0.09708281523011528, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.71419588089, 'W': 134.46} +[40.63, 41.42, 40.27, 39.92, 39.54, 39.44, 39.67, 39.68, 39.88, 39.61, 40.19, 39.89, 39.99, 40.22, 39.94, 39.79, 40.86, 39.77, 39.58, 39.59] +719.87 +35.9935 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 106323, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.322136163711548, 'TIME_S_1KI': 0.09708281523011528, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.71419588089, 'W': 134.46, 'J_1KI': 16.701129538113953, 'W_1KI': 1.2646370023419204, 'W_D': 98.46650000000001, 'J_D': 1300.3745490756037, 'W_D_1KI': 0.9261072392614957, 'J_D_1KI': 0.00871031892686903} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json index 0c8793e..c4d1519 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27901, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459260940551758, "TIME_S_1KI": 0.3748704684617669, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2101.511001520157, "W": 151.74, "J_1KI": 75.32027531343526, "W_1KI": 5.43851474857532, "W_D": 115.6735, "J_D": 1602.0108925421239, "W_D_1KI": 4.145854987276442, "J_D_1KI": 0.14859162708420637} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27006, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.041618347167969, "TIME_S_1KI": 0.3718291619331989, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2050.3459711432456, "W": 151.98999999999998, "J_1KI": 75.92186814571745, "W_1KI": 5.628008590683551, "W_D": 115.43924999999999, "J_D": 1557.2761441496014, "W_D_1KI": 4.274577871584092, "J_D_1KI": 0.1582825250531027} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output index d46dec8..be67e6c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07326960563659668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07368612289428711} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 503, 989, ..., 4998955, - 4999450, 5000000]), - col_indices=tensor([ 38, 72, 81, ..., 9956, 9978, 9983]), - values=tensor([0.2927, 0.3163, 0.4567, ..., 0.1935, 0.9639, 0.3715]), +tensor(crow_indices=tensor([ 0, 504, 1002, ..., 4999005, + 4999503, 5000000]), + col_indices=tensor([ 24, 56, 60, ..., 9903, 9934, 9970]), + values=tensor([0.3302, 0.5478, 0.2112, ..., 0.1196, 0.3028, 0.5060]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.4283, 0.0472, 0.5653, ..., 0.2916, 0.5894, 0.9993]) +tensor([0.9097, 0.8385, 0.6941, ..., 0.2367, 0.9571, 0.7158]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.07326960563659668 seconds +Time: 0.07368612289428711 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14330', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.392660140991211} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14249', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.53993821144104} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 524, 1025, ..., 4998972, - 4999489, 5000000]), - col_indices=tensor([ 6, 15, 16, ..., 9973, 9985, 9996]), - values=tensor([0.1466, 0.4320, 0.8734, ..., 0.2839, 0.7163, 0.2149]), +tensor(crow_indices=tensor([ 0, 503, 985, ..., 4999025, + 4999514, 5000000]), + col_indices=tensor([ 21, 39, 75, ..., 9920, 9938, 9983]), + values=tensor([0.8595, 0.9918, 0.9127, ..., 0.5649, 0.2080, 0.0100]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6711, 0.1737, 0.7087, ..., 0.1819, 0.7746, 0.6924]) +tensor([0.2539, 0.1414, 0.6201, ..., 0.7647, 0.9191, 0.5645]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 5.392660140991211 seconds +Time: 5.53993821144104 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27901', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459260940551758} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27006', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.041618347167969} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 478, 963, ..., 4998976, - 4999469, 5000000]), - col_indices=tensor([ 4, 6, 8, ..., 9977, 9981, 9998]), - values=tensor([0.4938, 0.7817, 0.2868, ..., 0.2355, 0.4075, 0.9137]), +tensor(crow_indices=tensor([ 0, 520, 1016, ..., 4998997, + 4999496, 5000000]), + col_indices=tensor([ 56, 89, 123, ..., 9981, 9985, 9989]), + values=tensor([0.4302, 0.2299, 0.3814, ..., 0.0183, 0.5254, 0.1358]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7828, 0.6669, 0.8649, ..., 0.0217, 0.0077, 0.7398]) +tensor([0.6789, 0.6658, 0.3096, ..., 0.0338, 0.4560, 0.9370]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.459260940551758 seconds +Time: 10.041618347167969 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 478, 963, ..., 4998976, - 4999469, 5000000]), - col_indices=tensor([ 4, 6, 8, ..., 9977, 9981, 9998]), - values=tensor([0.4938, 0.7817, 0.2868, ..., 0.2355, 0.4075, 0.9137]), +tensor(crow_indices=tensor([ 0, 520, 1016, ..., 4998997, + 4999496, 5000000]), + col_indices=tensor([ 56, 89, 123, ..., 9981, 9985, 9989]), + values=tensor([0.4302, 0.2299, 0.3814, ..., 0.0183, 0.5254, 0.1358]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7828, 0.6669, 0.8649, ..., 0.0217, 0.0077, 0.7398]) +tensor([0.6789, 0.6658, 0.3096, ..., 0.0338, 0.4560, 0.9370]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.459260940551758 seconds +Time: 10.041618347167969 seconds -[40.28, 39.74, 40.09, 39.52, 39.81, 39.49, 40.09, 39.86, 39.96, 39.9] -[151.74] -13.849420070648193 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459260940551758, 'TIME_S_1KI': 0.3748704684617669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2101.511001520157, 'W': 151.74} -[40.28, 39.74, 40.09, 39.52, 39.81, 39.49, 40.09, 39.86, 39.96, 39.9, 41.6, 42.09, 41.06, 39.67, 39.75, 39.6, 39.57, 40.11, 40.26, 39.54] -721.33 -36.066500000000005 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459260940551758, 'TIME_S_1KI': 0.3748704684617669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2101.511001520157, 'W': 151.74, 'J_1KI': 75.32027531343526, 'W_1KI': 5.43851474857532, 'W_D': 115.6735, 'J_D': 1602.0108925421239, 'W_D_1KI': 4.145854987276442, 'J_D_1KI': 0.14859162708420637} +[41.81, 39.96, 40.78, 39.7, 39.78, 45.53, 39.97, 39.66, 39.66, 39.55] +[151.99] +13.490005731582642 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27006, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.041618347167969, 'TIME_S_1KI': 0.3718291619331989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2050.3459711432456, 'W': 151.98999999999998} +[41.81, 39.96, 40.78, 39.7, 39.78, 45.53, 39.97, 39.66, 39.66, 39.55, 40.64, 44.96, 39.69, 39.49, 39.83, 39.47, 40.23, 39.93, 41.25, 40.25] +731.015 +36.55075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27006, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.041618347167969, 'TIME_S_1KI': 0.3718291619331989, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2050.3459711432456, 'W': 151.98999999999998, 'J_1KI': 75.92186814571745, 'W_1KI': 5.628008590683551, 'W_D': 115.43924999999999, 'J_D': 1557.2761441496014, 'W_D_1KI': 4.274577871584092, 'J_D_1KI': 0.1582825250531027} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json index 2bdc411..56c87c7 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4895, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.465762615203857, "TIME_S_1KI": 2.3423416987137604, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1961.564714949131, "W": 125.13, "J_1KI": 400.72823594466416, "W_1KI": 25.562819203268642, "W_D": 88.61599999999999, "J_D": 1389.1634202823636, "W_D_1KI": 18.10337078651685, "J_D_1KI": 3.698339282230205} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 5118, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 12.526283264160156, "TIME_S_1KI": 2.4474957530598194, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1992.3463458251954, "W": 126.08, "J_1KI": 389.28220903188657, "W_1KI": 24.634622899570147, "W_D": 89.46600000000001, "J_D": 1413.7631517734528, "W_D_1KI": 17.480656506447833, "J_D_1KI": 3.415524913334864} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output index c73c74f..e71985c 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.26529860496520996} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.26447367668151855} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 948, 1895, ..., 9998021, - 9999002, 10000000]), - col_indices=tensor([ 3, 4, 24, ..., 9958, 9984, 9986]), - values=tensor([0.2249, 0.5337, 0.8362, ..., 0.6636, 0.7975, 0.6242]), +tensor(crow_indices=tensor([ 0, 982, 1983, ..., 9997965, + 9999026, 10000000]), + col_indices=tensor([ 1, 6, 14, ..., 9961, 9972, 9977]), + values=tensor([0.8398, 0.8694, 0.5639, ..., 0.7105, 0.5707, 0.6304]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1264, 0.7394, 0.5519, ..., 0.0745, 0.0081, 0.2644]) +tensor([0.9072, 0.8563, 0.2325, ..., 0.0740, 0.0762, 0.0501]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 0.26529860496520996 seconds +Time: 0.26447367668151855 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3957', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.487387418746948} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3970', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.143938302993774} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 978, 1933, ..., 9998004, - 9999014, 10000000]), - col_indices=tensor([ 3, 5, 6, ..., 9972, 9982, 9998]), - values=tensor([0.7080, 0.9187, 0.9413, ..., 0.1315, 0.2244, 0.9797]), +tensor(crow_indices=tensor([ 0, 962, 1945, ..., 9998016, + 9999016, 10000000]), + col_indices=tensor([ 14, 21, 22, ..., 9984, 9990, 9991]), + values=tensor([0.7193, 0.5646, 0.3307, ..., 0.4459, 0.1754, 0.3663]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.5306, 0.8726, 0.4027, ..., 0.7037, 0.0033, 0.8016]) +tensor([0.1647, 0.5409, 0.1205, ..., 0.9066, 0.8390, 0.9270]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 8.487387418746948 seconds +Time: 8.143938302993774 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4895', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.465762615203857} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5118', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 12.526283264160156} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 986, 1965, ..., 9997996, - 9999022, 10000000]), - col_indices=tensor([ 5, 25, 37, ..., 9984, 9993, 9998]), - values=tensor([0.8800, 0.4752, 0.0446, ..., 0.6391, 0.5084, 0.8692]), +tensor(crow_indices=tensor([ 0, 996, 2041, ..., 9997995, + 9998945, 10000000]), + col_indices=tensor([ 1, 3, 9, ..., 9952, 9968, 9996]), + values=tensor([0.0713, 0.2648, 0.4521, ..., 0.1066, 0.1213, 0.7606]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6122, 0.5951, 0.3953, ..., 0.4999, 0.2315, 0.6538]) +tensor([0.7059, 0.0653, 0.1181, ..., 0.5566, 0.2736, 0.8378]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 11.465762615203857 seconds +Time: 12.526283264160156 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 986, 1965, ..., 9997996, - 9999022, 10000000]), - col_indices=tensor([ 5, 25, 37, ..., 9984, 9993, 9998]), - values=tensor([0.8800, 0.4752, 0.0446, ..., 0.6391, 0.5084, 0.8692]), +tensor(crow_indices=tensor([ 0, 996, 2041, ..., 9997995, + 9998945, 10000000]), + col_indices=tensor([ 1, 3, 9, ..., 9952, 9968, 9996]), + values=tensor([0.0713, 0.2648, 0.4521, ..., 0.1066, 0.1213, 0.7606]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6122, 0.5951, 0.3953, ..., 0.4999, 0.2315, 0.6538]) +tensor([0.7059, 0.0653, 0.1181, ..., 0.5566, 0.2736, 0.8378]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 11.465762615203857 seconds +Time: 12.526283264160156 seconds -[40.6, 45.25, 40.43, 39.72, 40.94, 40.35, 40.49, 39.74, 42.24, 39.59] -[125.13] -15.676214456558228 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.465762615203857, 'TIME_S_1KI': 2.3423416987137604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1961.564714949131, 'W': 125.13} -[40.6, 45.25, 40.43, 39.72, 40.94, 40.35, 40.49, 39.74, 42.24, 39.59, 41.13, 40.62, 40.17, 40.53, 40.27, 40.01, 39.62, 39.57, 39.92, 39.5] -730.2800000000001 -36.514 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.465762615203857, 'TIME_S_1KI': 2.3423416987137604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1961.564714949131, 'W': 125.13, 'J_1KI': 400.72823594466416, 'W_1KI': 25.562819203268642, 'W_D': 88.61599999999999, 'J_D': 1389.1634202823636, 'W_D_1KI': 18.10337078651685, 'J_D_1KI': 3.698339282230205} +[41.69, 40.21, 40.3, 40.12, 39.85, 39.74, 39.78, 45.25, 40.47, 39.68] +[126.08] +15.802239418029785 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5118, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 12.526283264160156, 'TIME_S_1KI': 2.4474957530598194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1992.3463458251954, 'W': 126.08} +[41.69, 40.21, 40.3, 40.12, 39.85, 39.74, 39.78, 45.25, 40.47, 39.68, 40.53, 45.71, 40.42, 40.02, 40.48, 39.61, 40.13, 39.7, 39.72, 39.64] +732.28 +36.614 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5118, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 12.526283264160156, 'TIME_S_1KI': 2.4474957530598194, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1992.3463458251954, 'W': 126.08, 'J_1KI': 389.28220903188657, 'W_1KI': 24.634622899570147, 'W_D': 89.46600000000001, 'J_D': 1413.7631517734528, 'W_D_1KI': 17.480656506447833, 'J_D_1KI': 3.415524913334864} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json index 0c05e29..ae8bf07 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2082, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.557694435119629, "TIME_S_1KI": 5.070938729644394, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1859.5419220256806, "W": 120.04, "J_1KI": 893.1517396857256, "W_1KI": 57.656099903938525, "W_D": 83.7395, "J_D": 1297.2101864334345, "W_D_1KI": 40.22070124879924, "J_D_1KI": 19.31830031162307} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2093, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.486551761627197, "TIME_S_1KI": 5.010297067189296, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1924.8615674543382, "W": 120.01, "J_1KI": 919.6663007426365, "W_1KI": 57.33874820831343, "W_D": 72.8605, "J_D": 1168.62241676116, "W_D_1KI": 34.81151457238414, "J_D_1KI": 16.632352877393284} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output index 68bbca5..616267b 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.5041141510009766} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.5015337467193604} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2037, 4062, ..., 19995956, - 19997954, 20000000]), - col_indices=tensor([ 0, 3, 5, ..., 9996, 9997, 9998]), - values=tensor([0.3088, 0.0777, 0.1762, ..., 0.6057, 0.6562, 0.8467]), +tensor(crow_indices=tensor([ 0, 2065, 4088, ..., 19995990, + 19997962, 20000000]), + col_indices=tensor([ 0, 2, 8, ..., 9965, 9993, 9996]), + values=tensor([0.1160, 0.3460, 0.9869, ..., 0.8626, 0.3191, 0.7812]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.5661, 0.3739, 0.3594, ..., 0.8068, 0.7143, 0.9609]) +tensor([0.3538, 0.1952, 0.7791, ..., 0.6959, 0.6589, 0.4364]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 0.5041141510009766 seconds +Time: 0.5015337467193604 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2082', '-ss', '10000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.557694435119629} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2093', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.486551761627197} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1900, 3860, ..., 19996021, - 19998015, 20000000]), - col_indices=tensor([ 0, 3, 11, ..., 9989, 9992, 9996]), - values=tensor([0.1071, 0.4523, 0.1080, ..., 0.2881, 0.4034, 0.8495]), +tensor(crow_indices=tensor([ 0, 2091, 4151, ..., 19995949, + 19997994, 20000000]), + col_indices=tensor([ 0, 6, 11, ..., 9981, 9985, 9986]), + values=tensor([0.3936, 0.6857, 0.7214, ..., 0.8889, 0.2946, 0.7271]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.3748, 0.6654, 0.9133, ..., 0.7126, 0.6760, 0.9288]) +tensor([0.6947, 0.8976, 0.6132, ..., 0.3430, 0.6838, 0.4582]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.557694435119629 seconds +Time: 10.486551761627197 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1900, 3860, ..., 19996021, - 19998015, 20000000]), - col_indices=tensor([ 0, 3, 11, ..., 9989, 9992, 9996]), - values=tensor([0.1071, 0.4523, 0.1080, ..., 0.2881, 0.4034, 0.8495]), +tensor(crow_indices=tensor([ 0, 2091, 4151, ..., 19995949, + 19997994, 20000000]), + col_indices=tensor([ 0, 6, 11, ..., 9981, 9985, 9986]), + values=tensor([0.3936, 0.6857, 0.7214, ..., 0.8889, 0.2946, 0.7271]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.3748, 0.6654, 0.9133, ..., 0.7126, 0.6760, 0.9288]) +tensor([0.6947, 0.8976, 0.6132, ..., 0.3430, 0.6838, 0.4582]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.557694435119629 seconds +Time: 10.486551761627197 seconds -[40.83, 40.15, 39.72, 39.76, 39.84, 39.84, 40.49, 39.96, 40.13, 39.73] -[120.04] -15.491019010543823 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.557694435119629, 'TIME_S_1KI': 5.070938729644394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1859.5419220256806, 'W': 120.04} -[40.83, 40.15, 39.72, 39.76, 39.84, 39.84, 40.49, 39.96, 40.13, 39.73, 41.31, 40.05, 40.38, 40.21, 39.71, 39.65, 45.13, 39.78, 40.38, 39.79] -726.01 -36.3005 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.557694435119629, 'TIME_S_1KI': 5.070938729644394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1859.5419220256806, 'W': 120.04, 'J_1KI': 893.1517396857256, 'W_1KI': 57.656099903938525, 'W_D': 83.7395, 'J_D': 1297.2101864334345, 'W_D_1KI': 40.22070124879924, 'J_D_1KI': 19.31830031162307} +[40.85, 39.59, 39.61, 40.83, 40.02, 40.07, 39.94, 39.43, 39.49, 39.51] +[120.01] +16.03917646408081 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2093, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.486551761627197, 'TIME_S_1KI': 5.010297067189296, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1924.8615674543382, 'W': 120.01} +[40.85, 39.59, 39.61, 40.83, 40.02, 40.07, 39.94, 39.43, 39.49, 39.51, 72.32, 77.64, 65.79, 65.84, 68.15, 63.72, 61.71, 61.72, 62.84, 40.52] +942.9899999999999 +47.149499999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2093, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.486551761627197, 'TIME_S_1KI': 5.010297067189296, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1924.8615674543382, 'W': 120.01, 'J_1KI': 919.6663007426365, 'W_1KI': 57.33874820831343, 'W_D': 72.8605, 'J_D': 1168.62241676116, 'W_D_1KI': 34.81151457238414, 'J_D_1KI': 16.632352877393284} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json index b94bf6c..7250125 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1470, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.845632314682007, "TIME_S_1KI": 7.377981166450344, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2161.157882115841, "W": 117.03, "J_1KI": 1470.1754300107762, "W_1KI": 79.61224489795917, "W_D": 80.6345, "J_D": 1489.0531081386805, "W_D_1KI": 54.853401360544225, "J_D_1KI": 37.315239020778385} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1381, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.009110450744629, "TIME_S_1KI": 7.247726611690536, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2082.98861492157, "W": 115.78, "J_1KI": 1508.3190549757928, "W_1KI": 83.83779869659668, "W_D": 79.56150000000001, "J_D": 1431.3845110216143, "W_D_1KI": 57.6115133960898, "J_D_1KI": 41.71724358876886} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output index 4e7bca2..6cad927 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.7518825531005859} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 2.4347083568573} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2974, 5950, ..., 29994087, - 29997108, 30000000]), - col_indices=tensor([ 6, 8, 10, ..., 9985, 9992, 9996]), - values=tensor([0.7151, 0.6737, 0.4043, ..., 0.5812, 0.5679, 0.6733]), +tensor(crow_indices=tensor([ 0, 2970, 6030, ..., 29993988, + 29996992, 30000000]), + col_indices=tensor([ 5, 8, 12, ..., 9995, 9996, 9999]), + values=tensor([0.4708, 0.4086, 0.5052, ..., 0.9718, 0.1326, 0.4183]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.6135, 0.6008, 0.5882, ..., 0.6628, 0.8539, 0.9204]) +tensor([0.4875, 0.8736, 0.6610, ..., 0.0907, 0.9780, 0.5227]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 0.7518825531005859 seconds +Time: 2.4347083568573 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1396', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.968559503555298} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '431', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 8.85914397239685} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3001, 6027, ..., 29994039, - 29997052, 30000000]), - col_indices=tensor([ 7, 11, 16, ..., 9989, 9996, 9999]), - values=tensor([0.2908, 0.3192, 0.9662, ..., 0.5726, 0.8523, 0.1200]), +tensor(crow_indices=tensor([ 0, 2975, 5968, ..., 29993983, + 29997045, 30000000]), + col_indices=tensor([ 1, 6, 11, ..., 9991, 9994, 9995]), + values=tensor([0.5155, 0.2760, 0.9456, ..., 0.6815, 0.0937, 0.4855]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.6009, 0.9845, 0.3791, ..., 0.1987, 0.1714, 0.4278]) +tensor([0.7624, 0.6706, 0.0728, ..., 0.7684, 0.7487, 0.4885]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 9.968559503555298 seconds +Time: 8.85914397239685 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1470', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.845632314682007} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '510', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 3.8749990463256836} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2929, 5851, ..., 29993945, - 29997029, 30000000]), - col_indices=tensor([ 6, 7, 11, ..., 9986, 9997, 9998]), - values=tensor([0.6210, 0.5427, 0.8130, ..., 0.4194, 0.0441, 0.7442]), +tensor(crow_indices=tensor([ 0, 3028, 6043, ..., 29994061, + 29997045, 30000000]), + col_indices=tensor([ 2, 7, 10, ..., 9994, 9997, 9998]), + values=tensor([0.0847, 0.6146, 0.4143, ..., 0.8875, 0.3135, 0.7316]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.0141, 0.0033, 0.5199, ..., 0.4699, 0.7276, 0.5761]) +tensor([0.4991, 0.4556, 0.0586, ..., 0.4688, 0.0146, 0.1894]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.845632314682007 seconds +Time: 3.8749990463256836 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1381', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.009110450744629} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2929, 5851, ..., 29993945, - 29997029, 30000000]), - col_indices=tensor([ 6, 7, 11, ..., 9986, 9997, 9998]), - values=tensor([0.6210, 0.5427, 0.8130, ..., 0.4194, 0.0441, 0.7442]), +tensor(crow_indices=tensor([ 0, 2980, 6008, ..., 29994009, + 29997042, 30000000]), + col_indices=tensor([ 6, 10, 12, ..., 9993, 9994, 9997]), + values=tensor([0.7122, 0.9908, 0.6087, ..., 0.3080, 0.7079, 0.9704]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.0141, 0.0033, 0.5199, ..., 0.4699, 0.7276, 0.5761]) +tensor([0.8824, 0.6919, 0.2244, ..., 0.2825, 0.9269, 0.1789]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +76,30 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 10.845632314682007 seconds +Time: 10.009110450744629 seconds -[46.28, 39.97, 40.2, 40.69, 40.27, 40.47, 40.33, 41.03, 39.84, 40.37] -[117.03] -18.466699838638306 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.845632314682007, 'TIME_S_1KI': 7.377981166450344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2161.157882115841, 'W': 117.03} -[46.28, 39.97, 40.2, 40.69, 40.27, 40.47, 40.33, 41.03, 39.84, 40.37, 41.81, 40.41, 40.59, 39.82, 40.04, 39.71, 39.78, 40.7, 39.78, 40.1] -727.9100000000001 -36.395500000000006 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.845632314682007, 'TIME_S_1KI': 7.377981166450344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2161.157882115841, 'W': 117.03, 'J_1KI': 1470.1754300107762, 'W_1KI': 79.61224489795917, 'W_D': 80.6345, 'J_D': 1489.0531081386805, 'W_D_1KI': 54.853401360544225, 'J_D_1KI': 37.315239020778385} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2980, 6008, ..., 29994009, + 29997042, 30000000]), + col_indices=tensor([ 6, 10, 12, ..., 9993, 9994, 9997]), + values=tensor([0.7122, 0.9908, 0.6087, ..., 0.3080, 0.7079, 0.9704]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.8824, 0.6919, 0.2244, ..., 0.2825, 0.9269, 0.1789]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.009110450744629 seconds + +[43.41, 40.29, 39.95, 39.78, 39.85, 40.23, 40.43, 39.73, 41.31, 41.06] +[115.78] +17.99091911315918 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.009110450744629, 'TIME_S_1KI': 7.247726611690536, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2082.98861492157, 'W': 115.78} +[43.41, 40.29, 39.95, 39.78, 39.85, 40.23, 40.43, 39.73, 41.31, 41.06, 41.55, 39.88, 39.85, 39.74, 39.98, 39.72, 40.61, 40.3, 39.85, 39.72] +724.3699999999999 +36.21849999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1381, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.009110450744629, 'TIME_S_1KI': 7.247726611690536, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2082.98861492157, 'W': 115.78, 'J_1KI': 1508.3190549757928, 'W_1KI': 83.83779869659668, 'W_D': 79.56150000000001, 'J_D': 1431.3845110216143, 'W_D_1KI': 57.6115133960898, 'J_D_1KI': 41.71724358876886} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..46ccf1c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1065, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.481358289718628, "TIME_S_1KI": 9.841650976261622, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2347.078887729645, "W": 111.31000000000002, "J_1KI": 2203.829941530183, "W_1KI": 104.51643192488265, "W_D": 75.20475000000002, "J_D": 1585.7648098282818, "W_D_1KI": 70.61478873239437, "J_D_1KI": 66.30496594591021} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..e35280e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.9852290153503418} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3986, 7986, ..., 39992018, + 39996031, 40000000]), + col_indices=tensor([ 5, 6, 9, ..., 9992, 9994, 9996]), + values=tensor([0.6541, 0.7210, 0.2819, ..., 0.6753, 0.1107, 0.4939]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.0543, 0.0386, 0.2201, ..., 0.7560, 0.6831, 0.4533]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 0.9852290153503418 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1065', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.481358289718628} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3991, 7976, ..., 39991944, + 39995981, 40000000]), + col_indices=tensor([ 3, 5, 7, ..., 9995, 9997, 9998]), + values=tensor([0.0964, 0.7406, 0.8990, ..., 0.2597, 0.1012, 0.1175]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.6206, 0.0983, 0.0503, ..., 0.3920, 0.3679, 0.5560]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.481358289718628 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3991, 7976, ..., 39991944, + 39995981, 40000000]), + col_indices=tensor([ 3, 5, 7, ..., 9995, 9997, 9998]), + values=tensor([0.0964, 0.7406, 0.8990, ..., 0.2597, 0.1012, 0.1175]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.6206, 0.0983, 0.0503, ..., 0.3920, 0.3679, 0.5560]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.481358289718628 seconds + +[40.72, 39.99, 40.29, 39.74, 40.59, 40.22, 39.97, 39.63, 41.44, 39.71] +[111.31] +21.085966110229492 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1065, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.481358289718628, 'TIME_S_1KI': 9.841650976261622, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2347.078887729645, 'W': 111.31000000000002} +[40.72, 39.99, 40.29, 39.74, 40.59, 40.22, 39.97, 39.63, 41.44, 39.71, 40.5, 39.72, 40.2, 39.73, 39.88, 39.9, 40.11, 40.46, 39.97, 39.6] +722.105 +36.10525 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1065, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.481358289718628, 'TIME_S_1KI': 9.841650976261622, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2347.078887729645, 'W': 111.31000000000002, 'J_1KI': 2203.829941530183, 'W_1KI': 104.51643192488265, 'W_D': 75.20475000000002, 'J_D': 1585.7648098282818, 'W_D_1KI': 70.61478873239437, 'J_D_1KI': 66.30496594591021} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..44f00fc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 959, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.287067413330078, "TIME_S_1KI": 10.72686904413981, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2562.4177262830735, "W": 110.66000000000001, "J_1KI": 2671.9684319948633, "W_1KI": 115.3910323253389, "W_D": 74.4205, "J_D": 1723.2641279491188, "W_D_1KI": 77.6021897810219, "J_D_1KI": 80.91990592390188} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..3d7d465 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.0947494506835938} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4881, 9921, ..., 49990115, + 49995061, 50000000]), + col_indices=tensor([ 4, 7, 9, ..., 9993, 9997, 9999]), + values=tensor([0.4030, 0.2804, 0.0595, ..., 0.7877, 0.0532, 0.8389]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.3752, 0.3058, 0.8535, ..., 0.2048, 0.2544, 0.0959]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 1.0947494506835938 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '959', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.287067413330078} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4969, 9907, ..., 49989771, + 49994882, 50000000]), + col_indices=tensor([ 0, 1, 2, ..., 9995, 9997, 9999]), + values=tensor([0.7090, 0.6777, 0.6702, ..., 0.9888, 0.6596, 0.6666]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.5679, 0.7359, 0.9695, ..., 0.6090, 0.0343, 0.3884]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.287067413330078 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4969, 9907, ..., 49989771, + 49994882, 50000000]), + col_indices=tensor([ 0, 1, 2, ..., 9995, 9997, 9999]), + values=tensor([0.7090, 0.6777, 0.6702, ..., 0.9888, 0.6596, 0.6666]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.5679, 0.7359, 0.9695, ..., 0.6090, 0.0343, 0.3884]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.287067413330078 seconds + +[40.95, 40.36, 40.36, 39.82, 40.17, 39.71, 39.88, 39.61, 39.9, 40.59] +[110.66] +23.1557719707489 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.287067413330078, 'TIME_S_1KI': 10.72686904413981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2562.4177262830735, 'W': 110.66000000000001} +[40.95, 40.36, 40.36, 39.82, 40.17, 39.71, 39.88, 39.61, 39.9, 40.59, 40.84, 40.11, 40.32, 45.02, 39.83, 39.6, 39.79, 39.52, 39.81, 39.58] +724.7900000000001 +36.23950000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 959, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.287067413330078, 'TIME_S_1KI': 10.72686904413981, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2562.4177262830735, 'W': 110.66000000000001, 'J_1KI': 2671.9684319948633, 'W_1KI': 115.3910323253389, 'W_D': 74.4205, 'J_D': 1723.2641279491188, 'W_D_1KI': 77.6021897810219, 'J_D_1KI': 80.91990592390188} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json index b4516b2..11ac4bd 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 366482, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.093939065933228, "TIME_S_1KI": 0.0302714432521467, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1264.2006668257713, "W": 98.79, "J_1KI": 3.4495573229402026, "W_1KI": 0.26956303447372587, "W_D": 62.66125, "J_D": 801.866525297463, "W_D_1KI": 0.17098043014390885, "J_D_1KI": 0.0004665452331735497} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 349528, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3000328540802, "TIME_S_1KI": 0.029468405547138424, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1246.5216499328615, "W": 97.68, "J_1KI": 3.566299838447453, "W_1KI": 0.27946258954933517, "W_D": 61.658750000000005, "J_D": 786.844459283352, "W_D_1KI": 0.17640575290105515, "J_D_1KI": 0.0005046970568911651} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output index b43f8ba..270e7ce 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,373 +1,266 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019162416458129883} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.020120859146118164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([2508, 9046, 543, 4312, 1967, 3077, 2314, 3402, 8716, - 3244, 7898, 5856, 5314, 5976, 2342, 2492, 8104, 2147, - 1979, 1751, 5860, 9783, 5705, 5222, 9824, 95, 1929, - 8803, 8548, 677, 1693, 5788, 5523, 9109, 5789, 1810, - 1126, 4820, 9808, 8419, 2599, 1229, 2963, 1212, 2144, - 3318, 3062, 2566, 7505, 6545, 1590, 1285, 717, 2005, - 5957, 3726, 9706, 857, 1431, 3655, 7849, 8981, 2848, - 7191, 9483, 8286, 6981, 629, 346, 7910, 8938, 1876, - 6475, 566, 6786, 4287, 8002, 5447, 9857, 7286, 3753, - 6095, 8322, 7342, 5732, 7297, 8559, 2193, 1470, 9896, - 4441, 3719, 1687, 3912, 4005, 7511, 4689, 9996, 5681, - 3721, 5011, 1299, 193, 7161, 1139, 8246, 6024, 9167, - 5912, 3347, 8644, 5448, 9677, 2358, 2275, 4573, 5804, - 6640, 677, 4332, 7242, 5152, 3155, 2691, 5318, 3888, - 7771, 2370, 9442, 9350, 2217, 3939, 1845, 8407, 8949, - 3238, 6161, 7398, 563, 6153, 7042, 5316, 6185, 8259, - 7444, 4523, 6325, 9519, 7963, 8735, 3571, 4162, 6945, - 7239, 9555, 8362, 6689, 825, 2005, 7392, 6469, 6550, - 1091, 7670, 3483, 3477, 6190, 5573, 9375, 3852, 897, - 8739, 4969, 3239, 4179, 9098, 2727, 7551, 1657, 3410, - 9763, 2836, 8969, 3741, 1805, 806, 7323, 1341, 9366, - 4238, 7055, 1855, 6557, 2199, 3076, 961, 56, 4684, - 8459, 6449, 916, 1241, 3555, 6490, 6076, 4659, 6608, - 1972, 7464, 3684, 7276, 202, 4934, 2937, 6629, 8676, - 2854, 9198, 3221, 1881, 9527, 3491, 900, 551, 3477, - 7557, 5144, 2172, 5733, 2720, 3420, 7237, 7166, 9810, - 146, 2108, 2851, 228, 520, 2516, 469, 991, 7850, - 1010, 9739, 3913, 4444, 2689, 1467, 901, 7088, 235, - 7464, 8041, 9413, 9292, 9837, 5114, 68, 289, 3415, - 4247, 7541, 9998, 3514, 9674, 2670, 3572, 2167, 8523, - 1517, 6861, 179, 4531, 7528, 4118, 3477, 1329, 9307, - 6276, 937, 7241, 4841, 2423, 4555, 3917, 1683, 3585, - 9720, 9352, 5341, 6902, 9150, 1840, 8548, 9758, 2673, - 665, 8946, 4675, 2273, 3021, 7214, 5707, 6328, 3736, - 1078, 7132, 2785, 1506, 6694, 7725, 9559, 241, 6974, - 365, 3095, 3437, 5609, 2248, 2372, 9786, 6037, 6108, - 7705, 1458, 6266, 400, 8324, 6775, 3201, 1369, 6843, - 7707, 9241, 1615, 9217, 1863, 2989, 2014, 2316, 4769, - 5768, 5885, 5440, 3265, 6889, 6583, 7730, 647, 9106, - 4087, 6087, 4403, 9274, 4471, 7401, 2762, 3931, 6965, - 3164, 938, 527, 5512, 7910, 4668, 5448, 6709, 6557, - 7876, 2822, 1072, 7234, 7990, 680, 6351, 365, 8734, - 3355, 2213, 5195, 7229, 9067, 2511, 3762, 8881, 3507, - 8462, 498, 1315, 6134, 3211, 3461, 4219, 3233, 6852, - 4989, 108, 8007, 9484, 5952, 2542, 8868, 678, 6135, - 2042, 6083, 8113, 8376, 7391, 510, 8516, 672, 6513, - 3857, 4529, 1142, 2241, 2953, 8752, 7337, 485, 9766, - 7738, 1630, 4671, 1054, 3709, 3629, 1425, 6145, 3112, - 1557, 5971, 4707, 2420, 7428, 3073, 7043, 4518, 3258, - 8324, 4576, 9820, 1738, 4288, 4568, 5120, 2665, 5670, - 1160, 3431, 2936, 6123, 3465, 6501, 3805, 5189, 2086, - 6398, 7063, 7290, 1186, 1870, 6160, 4041, 9566, 9903, - 8671, 3136, 161, 9574, 903, 9086, 6744, 9121, 576, - 5200, 5674, 3097, 5935, 1757, 3232, 3580, 6609, 4322, - 3945, 8444, 3852, 1569, 5504, 6603, 1547, 878, 7861, - 8802, 3561, 6575, 4591, 301, 5462, 5293, 1550, 7795, - 7430, 4734, 185, 8890, 5875, 1566, 7507, 6627, 9171, - 1332, 7105, 4247, 5478, 9026, 7544, 154, 3320, 6187, - 9024, 5315, 5066, 2631, 6631, 4897, 3673, 328, 228, - 3166, 2519, 3745, 6596, 6669, 3088, 9711, 8165, 899, - 9589, 36, 3957, 496, 115, 6166, 3688, 1766, 2307, - 361, 8690, 3683, 5585, 8112, 1232, 4580, 3944, 1072, - 3051, 3242, 8587, 3766, 5601, 892, 958, 4286, 1119, - 8408, 8953, 1339, 9703, 8899, 4918, 8984, 90, 9279, - 2372, 4341, 1706, 6214, 6370, 5149, 5092, 1777, 9441, - 9154, 96, 8248, 9257, 324, 4153, 3647, 3530, 8044, - 6156, 5365, 6116, 6837, 5051, 5730, 9232, 9397, 1594, - 8693, 8301, 9952, 6466, 8737, 1882, 7563, 1802, 8052, - 5900, 5405, 9630, 5275, 397, 43, 1118, 3111, 7822, - 1550, 6255, 9674, 8999, 4005, 803, 8997, 3496, 9830, - 8865, 2267, 8584, 1911, 3802, 5884, 3569, 5747, 6116, - 2421, 6562, 9781, 1610, 9382, 4873, 3318, 7129, 1019, - 4920, 9241, 9316, 5277, 42, 7494, 521, 5458, 1264, - 2824, 4185, 5776, 9545, 1226, 5004, 3090, 5089, 5606, - 2635, 730, 1318, 7015, 5910, 1589, 2576, 5832, 8341, - 510, 1635, 9237, 9840, 9042, 2447, 2076, 1317, 6375, - 2349, 753, 5583, 461, 8308, 3246, 9965, 6828, 2813, - 5244, 8142, 3664, 9159, 2387, 8232, 3360, 3745, 9932, - 8242, 9581, 583, 7106, 1310, 8310, 9223, 9897, 891, - 3817, 669, 1332, 8246, 7894, 4436, 9573, 8485, 883, - 6017, 9583, 1812, 1526, 9934, 6848, 545, 1911, 9192, - 9042, 1264, 2751, 4412, 3662, 7224, 2779, 4696, 305, - 5292, 260, 4859, 3014, 8257, 6456, 630, 7298, 2761, - 6571, 1472, 237, 547, 1959, 9753, 241, 8232, 658, - 5617, 1683, 4236, 4952, 233, 3496, 5512, 1296, 2703, - 824, 6138, 8526, 4191, 8549, 1882, 3569, 880, 1859, - 3403, 3955, 3780, 9117, 9017, 4182, 8874, 6980, 6107, - 6696, 2541, 5309, 129, 1202, 6760, 6885, 2142, 3488, - 5352, 1138, 5951, 6874, 3225, 7748, 1683, 7358, 5735, - 6474, 9585, 2307, 1145, 6405, 344, 6696, 7602, 6515, - 6579, 4160, 476, 4879, 2338, 3519, 6083, 7770, 8803, - 874, 61, 5281, 3911, 7063, 496, 6813, 3096, 9178, - 9609, 9207, 4316, 298, 2152, 4674, 1655, 2709, 1060, - 1886, 4678, 5984, 8258, 3381, 1591, 2352, 5598, 2384, - 6485, 3129, 2612, 5258, 50, 7930, 121, 1164, 2557, - 7206, 332, 277, 8421, 8500, 3337, 425, 29, 4235, - 1351, 6801, 7552, 2348, 1388, 7634, 6330, 462, 92, - 7088, 6060, 755, 8641, 9588, 7108, 2787, 4729, 7562, - 1723, 9127, 2961, 5309, 3629, 9214, 609, 7748, 5877, - 8710, 813, 774, 4114, 8416, 9164, 6941, 8479, 9249, - 2209, 1935, 7138, 6867, 4080, 6512, 6310, 1493, 4434, - 2113, 1980, 842, 7628, 543, 6712, 7464, 8278, 9046, - 252, 3576, 5501, 7511, 8121, 9681, 2157, 2047, 8548, - 1713, 9404, 312, 6465, 1352, 745, 2058, 4123, 364, - 3279, 206, 6153, 6439, 2407, 2611, 8191, 5859, 4752, - 2250, 2606, 5278, 8499, 5121, 6146, 1399, 707, 9543, - 9512, 8626, 4838, 1640, 8865, 3626, 1378, 1243, 7236, - 2803, 749, 5378, 8175, 205, 2154, 4279, 8991, 3947, - 1049, 8403, 5710, 6926, 9939, 7498, 1591, 3403, 5973, - 3803, 6876, 4161, 9117, 6935, 2368, 4884, 1751, 2199, - 7506, 7285, 9043, 5992, 4277, 9161, 865, 8138, 3793, - 2124]), - values=tensor([9.5707e-01, 7.5111e-02, 5.2267e-01, 7.5134e-01, - 4.3950e-01, 1.0462e-01, 5.2766e-01, 8.0984e-01, - 3.6860e-01, 6.2700e-01, 2.7206e-01, 2.2086e-01, - 7.5800e-02, 5.2988e-01, 9.7607e-01, 6.6976e-01, - 1.0378e-01, 2.4656e-01, 7.2340e-02, 8.2021e-01, - 5.8758e-01, 3.6488e-01, 4.8457e-01, 9.5443e-01, - 8.5500e-01, 7.0430e-02, 4.0168e-01, 9.6994e-01, - 7.6007e-01, 5.2134e-01, 6.0309e-01, 2.0268e-02, - 6.4190e-01, 5.1957e-01, 1.7394e-02, 5.2587e-01, - 8.3287e-01, 5.4658e-01, 2.9352e-01, 5.8028e-01, - 2.3292e-01, 5.6525e-01, 3.0481e-01, 5.9132e-01, - 5.4885e-01, 4.3601e-01, 1.8604e-01, 7.6863e-01, - 4.0955e-01, 1.9418e-01, 8.1744e-01, 2.2905e-01, - 6.2145e-01, 4.2411e-01, 9.6454e-01, 6.9999e-01, - 2.5102e-01, 7.2112e-01, 5.4737e-03, 2.1960e-01, - 2.7281e-01, 9.8861e-01, 2.3502e-01, 1.8402e-02, - 7.3555e-01, 1.7015e-01, 8.0917e-01, 3.6306e-01, - 9.5127e-01, 9.5612e-01, 6.5620e-01, 9.4079e-01, - 8.4929e-02, 1.9682e-01, 2.9972e-01, 7.5136e-01, - 9.5920e-01, 8.9681e-01, 9.3731e-01, 7.0940e-01, - 3.4216e-01, 5.2653e-01, 4.4805e-01, 3.5030e-01, - 4.8871e-01, 6.7711e-01, 2.1250e-01, 6.4504e-01, - 8.9757e-01, 2.0048e-01, 7.0915e-01, 3.4982e-01, - 3.7719e-01, 5.1148e-01, 5.1377e-01, 4.0306e-01, - 9.0690e-01, 9.4781e-01, 4.2681e-01, 8.2944e-01, - 3.6292e-02, 1.1525e-01, 2.1119e-01, 4.8411e-01, - 5.4486e-01, 6.4081e-01, 7.2189e-01, 7.4429e-01, - 5.2306e-01, 2.1619e-01, 9.8409e-01, 1.4917e-01, - 7.9852e-01, 7.8111e-01, 1.8563e-01, 1.2931e-01, - 6.3740e-01, 4.9304e-01, 1.5550e-01, 5.8621e-01, - 6.4899e-01, 7.7558e-01, 3.3227e-01, 2.4952e-01, - 8.7603e-01, 8.7279e-01, 2.4448e-01, 5.2727e-01, - 6.1779e-01, 6.0647e-01, 4.6651e-01, 1.5071e-01, - 2.6441e-01, 7.8739e-01, 7.4962e-01, 1.1566e-01, - 2.0523e-01, 6.1684e-02, 1.3500e-01, 3.5001e-01, - 4.2839e-01, 1.0545e-01, 3.1092e-01, 1.9727e-01, - 2.2556e-01, 8.2536e-01, 5.3360e-01, 5.6445e-02, - 2.8582e-01, 9.0706e-01, 3.2536e-01, 5.8741e-01, - 6.7535e-01, 2.1045e-01, 2.8517e-01, 8.8253e-01, - 4.3607e-01, 1.1168e-01, 1.0718e-01, 9.9590e-01, - 2.8048e-01, 7.4511e-01, 5.0053e-01, 6.2593e-01, - 5.9607e-01, 4.4446e-02, 9.8355e-01, 5.2331e-01, - 5.2200e-01, 8.5502e-01, 6.4442e-01, 9.0020e-01, - 8.6652e-01, 3.0542e-01, 5.0427e-01, 7.5490e-01, - 7.0590e-01, 4.5354e-01, 9.5344e-01, 7.8571e-01, - 8.1908e-01, 4.9790e-01, 5.0901e-01, 6.2748e-01, - 8.1486e-01, 2.4903e-01, 4.0531e-01, 5.0243e-01, - 7.1026e-01, 6.1145e-01, 9.9286e-01, 9.2958e-02, - 9.5266e-01, 9.8567e-02, 6.6328e-01, 7.4995e-01, - 1.6322e-01, 4.7215e-01, 2.8642e-01, 4.0280e-01, - 4.1682e-01, 8.7567e-01, 3.3333e-01, 6.2409e-01, - 5.1732e-01, 5.0235e-01, 5.9038e-01, 9.6075e-01, - 6.4711e-01, 1.9445e-01, 6.6739e-01, 1.7538e-01, - 4.7512e-01, 7.0938e-01, 7.6463e-01, 9.7284e-01, - 6.7293e-01, 7.1057e-01, 1.6317e-01, 5.3806e-01, - 2.3246e-01, 3.3440e-01, 6.4813e-01, 1.4413e-01, - 6.7799e-01, 6.9199e-01, 2.7005e-01, 8.5967e-01, - 9.5025e-01, 5.7371e-01, 5.3952e-01, 8.1204e-01, - 8.6498e-01, 1.0209e-01, 1.7641e-01, 9.0414e-01, - 9.8256e-01, 1.4095e-01, 1.1876e-02, 6.2645e-01, - 6.8777e-01, 3.0740e-01, 2.6030e-01, 8.3542e-01, - 5.9327e-01, 4.4948e-01, 3.7350e-01, 1.0986e-04, - 2.3860e-01, 9.1896e-02, 5.3594e-01, 9.6019e-01, - 1.6538e-01, 1.1255e-01, 3.6157e-01, 8.2704e-01, - 4.6706e-01, 9.2369e-01, 5.7754e-02, 8.8497e-01, - 4.0691e-01, 1.4109e-01, 8.9898e-01, 4.6571e-01, - 6.6921e-01, 1.3252e-01, 1.8881e-01, 9.8200e-01, - 1.0344e-01, 8.4811e-01, 1.5001e-01, 2.3035e-01, - 5.0531e-01, 5.4044e-01, 1.5758e-01, 2.7234e-01, - 2.9591e-02, 5.5223e-01, 5.2184e-01, 1.3019e-01, - 5.4173e-01, 3.8830e-02, 1.7101e-01, 4.2637e-01, - 1.9357e-01, 8.3596e-01, 2.2751e-01, 3.1702e-02, - 5.1308e-01, 8.9139e-01, 7.0073e-01, 1.9805e-02, - 7.1449e-01, 8.6626e-01, 6.7742e-01, 9.7350e-02, - 9.6125e-02, 1.0821e-02, 2.0755e-01, 8.5136e-01, - 9.6269e-01, 2.9214e-01, 7.5108e-01, 9.3964e-01, - 3.7916e-01, 4.1542e-01, 5.8698e-02, 6.6382e-02, - 6.7280e-01, 5.8834e-01, 6.9868e-01, 2.9465e-01, - 8.0107e-01, 9.9069e-01, 9.1914e-01, 8.1978e-01, - 2.3299e-01, 8.9070e-01, 4.4949e-01, 3.9548e-01, - 6.8903e-02, 5.1778e-01, 5.0884e-01, 9.6649e-01, - 7.8248e-01, 7.9469e-01, 8.4596e-01, 4.1456e-01, - 5.6304e-01, 2.4162e-01, 7.7596e-01, 3.7262e-01, - 1.9690e-01, 8.7898e-01, 5.7855e-01, 8.8490e-01, - 7.9846e-01, 9.8537e-01, 2.5265e-01, 4.1278e-01, - 2.8090e-01, 7.7330e-01, 3.6348e-01, 1.2010e-01, - 4.8836e-02, 3.9874e-01, 3.6603e-01, 9.7665e-01, - 6.6776e-01, 8.6239e-01, 5.8508e-01, 6.2472e-01, - 4.6666e-01, 7.1718e-01, 5.8015e-01, 1.7355e-01, - 6.4883e-01, 6.9474e-01, 5.5885e-01, 8.0919e-01, - 2.1098e-02, 5.1721e-01, 3.2513e-01, 4.7559e-01, - 6.9479e-01, 5.7204e-02, 8.2071e-01, 7.9470e-01, - 1.5832e-01, 9.6615e-01, 8.8939e-01, 3.4476e-01, - 4.2795e-01, 2.4169e-01, 1.1676e-01, 8.6470e-01, - 8.2056e-01, 6.5573e-01, 9.8935e-01, 5.7272e-01, - 7.3601e-01, 7.9089e-02, 3.3822e-01, 4.0108e-01, - 9.6395e-01, 4.8080e-01, 5.9846e-01, 3.8560e-01, - 4.7398e-01, 5.5866e-01, 1.9426e-01, 6.3002e-01, - 3.6931e-01, 1.9150e-01, 9.5172e-01, 6.8593e-01, - 9.0949e-01, 7.7968e-01, 8.8743e-01, 7.2524e-01, - 5.8547e-01, 4.0587e-02, 7.9930e-01, 4.9416e-01, - 2.2101e-01, 3.5603e-01, 3.6054e-01, 8.1873e-01, - 4.9506e-01, 4.8170e-01, 1.9852e-02, 8.4708e-01, - 5.5999e-02, 9.3643e-01, 8.9081e-01, 5.2429e-01, - 3.4433e-01, 9.9034e-01, 7.5270e-01, 9.8192e-02, - 8.3430e-01, 4.0506e-01, 5.7351e-01, 5.8198e-01, - 7.8535e-01, 3.1474e-01, 3.7672e-01, 9.5581e-01, - 5.0140e-01, 9.4484e-01, 8.7129e-01, 7.1288e-01, - 5.5956e-01, 4.2823e-01, 5.7999e-01, 2.1299e-01, - 4.2089e-01, 2.2957e-01, 7.9354e-01, 7.6309e-02, - 3.4527e-01, 7.3564e-01, 3.1080e-02, 5.3529e-01, - 6.7180e-01, 5.7907e-01, 3.1932e-01, 7.5232e-01, - 9.5640e-01, 2.7941e-01, 3.0238e-01, 2.3890e-01, - 7.8927e-01, 3.9851e-01, 9.3470e-01, 5.2208e-01, - 2.6565e-01, 9.7156e-01, 5.3434e-02, 1.7433e-01, - 6.3462e-01, 4.0678e-01, 7.6080e-01, 7.5007e-01, - 8.8465e-01, 5.6181e-01, 1.8341e-01, 7.3839e-01, - 1.2657e-01, 4.0856e-02, 1.9742e-01, 5.7758e-01, - 9.1472e-01, 7.0792e-01, 2.2052e-01, 8.3225e-01, - 3.2311e-01, 3.3219e-01, 7.1413e-01, 9.1565e-01, - 3.5997e-01, 7.4105e-03, 6.0376e-01, 7.2324e-01, - 1.4783e-01, 6.7320e-01, 3.7075e-01, 2.5924e-02, - 9.4362e-01, 9.0266e-01, 1.6865e-01, 1.1705e-01, - 5.1386e-01, 8.8878e-01, 7.2176e-02, 4.6214e-01, - 5.6413e-01, 8.8627e-01, 1.4536e-01, 9.9284e-01, - 2.1084e-01, 2.8486e-01, 4.9650e-01, 2.9385e-02, - 6.2964e-01, 4.5897e-01, 1.2506e-01, 3.2973e-01, - 3.5741e-01, 1.1022e-01, 4.5569e-01, 6.9577e-01, - 5.5279e-01, 2.7969e-01, 8.5986e-01, 2.6368e-01, - 6.4340e-01, 9.2902e-01, 8.9041e-01, 3.8571e-01, - 5.3481e-01, 6.3246e-01, 2.9579e-01, 8.7994e-01, - 7.3501e-01, 1.8278e-01, 8.7146e-01, 6.8984e-01, - 9.7701e-01, 2.6879e-01, 4.7228e-01, 4.6858e-01, - 6.5356e-01, 3.4051e-01, 1.3083e-01, 5.5078e-01, - 9.9590e-01, 6.7336e-01, 5.2390e-01, 9.9667e-01, - 3.6021e-01, 8.2597e-01, 9.1583e-02, 8.8522e-01, - 6.2531e-01, 7.7626e-01, 2.4642e-01, 7.1187e-01, - 8.5251e-01, 3.2244e-01, 8.3217e-01, 5.6333e-01, - 9.0624e-01, 1.0129e-01, 1.7179e-01, 2.2377e-01, - 1.3904e-01, 1.0345e-01, 3.5301e-01, 9.2239e-01, - 1.7381e-01, 8.5612e-01, 4.7815e-02, 6.3335e-01, - 8.3647e-01, 9.1361e-01, 4.4021e-01, 9.0607e-01, - 9.6051e-01, 3.7103e-01, 7.5892e-01, 4.9115e-02, - 4.7167e-01, 2.2361e-01, 5.0125e-02, 5.7553e-01, - 3.8853e-01, 1.8018e-03, 2.1029e-01, 6.3036e-01, - 5.1485e-01, 2.6272e-01, 3.7676e-01, 4.0031e-01, - 5.5369e-01, 8.5186e-01, 9.5478e-01, 9.7874e-01, - 6.0960e-01, 5.7114e-01, 1.6926e-01, 9.7937e-01, - 2.8179e-01, 6.0239e-01, 1.5084e-01, 4.4762e-01, - 1.8913e-02, 7.4072e-02, 1.9277e-01, 9.7400e-01, - 6.4101e-01, 9.2459e-01, 9.2259e-01, 2.5328e-01, - 4.3386e-01, 7.5744e-01, 2.6380e-01, 1.8999e-01, - 2.9650e-01, 1.2204e-01, 6.3492e-01, 9.8427e-01, - 6.6209e-01, 2.3144e-01, 1.9369e-01, 6.9676e-01, - 7.7157e-01, 7.1536e-01, 1.9457e-01, 9.1369e-01, - 3.7470e-01, 4.8812e-01, 2.0180e-01, 8.6489e-01, - 8.3156e-01, 4.2700e-01, 5.5408e-01, 1.2690e-01, - 6.1767e-01, 8.0178e-01, 6.5887e-01, 3.7992e-01, - 3.4861e-02, 7.4799e-01, 5.4430e-01, 2.7734e-01, - 7.8324e-01, 7.5679e-01, 3.7977e-01, 9.8697e-01, - 2.1210e-01, 5.3826e-01, 9.8583e-01, 5.6862e-01, - 1.2409e-01, 4.7137e-01, 3.8364e-02, 7.9660e-02, - 1.0900e-01, 5.8409e-01, 3.5038e-01, 4.7469e-01, - 3.2646e-01, 9.7963e-01, 4.0955e-02, 7.7282e-01, - 2.3795e-01, 9.1451e-01, 6.2383e-02, 8.7752e-01, - 4.7785e-01, 6.3873e-01, 8.5135e-01, 6.3066e-01, - 9.2024e-01, 9.7095e-01, 6.0603e-01, 1.6173e-01, - 1.3692e-01, 9.7430e-02, 4.6913e-01, 1.3027e-01, - 6.4550e-01, 3.7097e-01, 1.0917e-01, 2.2626e-01, - 2.8272e-01, 9.3467e-01, 4.9372e-01, 1.1127e-02, - 2.9856e-01, 3.5094e-01, 6.7278e-01, 8.7179e-01, - 6.1750e-01, 9.3689e-01, 4.8396e-01, 7.8289e-01, - 6.7812e-01, 5.5047e-01, 6.6028e-01, 3.8193e-01, - 4.4025e-01, 4.7649e-02, 7.8513e-02, 8.4372e-01, - 1.5700e-01, 6.6635e-01, 6.0490e-02, 1.8217e-02, - 4.2363e-01, 7.2664e-01, 8.3042e-01, 2.0481e-01, - 7.7507e-01, 9.7178e-01, 2.8840e-01, 2.2320e-02, - 2.4571e-01, 3.1488e-01, 5.5842e-01, 4.4275e-01, - 6.1188e-01, 9.9512e-01, 2.2123e-01, 7.4091e-01, - 3.2291e-01, 6.8756e-01, 9.3283e-01, 5.8119e-01, - 3.3596e-01, 7.2542e-01, 4.1198e-01, 6.8787e-02, - 8.8370e-01, 9.5542e-01, 2.2005e-01, 1.3328e-01, - 8.7725e-02, 1.6866e-01, 2.6482e-01, 2.0425e-01, - 9.5978e-01, 8.7192e-01, 8.3889e-01, 8.5559e-01, - 1.8737e-01, 6.5055e-01, 3.7960e-01, 9.6932e-01, - 8.8268e-02, 4.3796e-01, 2.4756e-01, 5.3141e-01, - 5.8335e-01, 1.5163e-01, 4.6407e-01, 2.2600e-01, - 7.3892e-02, 1.8333e-01, 3.7744e-01, 6.2963e-01, - 3.5785e-01, 6.6981e-01, 3.8519e-01, 9.8844e-02, - 8.4155e-01, 9.8768e-01, 2.6869e-01, 6.6979e-01, - 3.9017e-01, 7.5747e-01, 8.9913e-01, 9.6730e-01, - 8.6778e-01, 8.9518e-01, 1.3607e-02, 7.1872e-01, - 4.9862e-01, 9.7831e-01, 3.5029e-01, 6.3459e-01, - 7.2985e-01, 3.1857e-01, 3.9858e-01, 8.7370e-01, - 7.2291e-01, 6.5606e-01, 4.2614e-01, 9.7369e-01, - 4.8132e-01, 3.9773e-01, 4.9498e-01, 9.3882e-01, - 6.7264e-01, 1.4831e-01, 5.6335e-01, 1.3523e-01, - 8.0317e-01, 4.7686e-01, 7.9812e-01, 1.6696e-01, - 1.1332e-01, 9.1364e-02, 5.8808e-01, 9.8429e-01, - 1.5637e-01, 2.4472e-01, 8.7386e-01, 6.3707e-01, - 5.1617e-01, 4.9782e-02, 8.8919e-01, 8.8265e-01, - 1.0222e-02, 2.0402e-01, 9.7026e-01, 8.6356e-01, - 1.9645e-01, 5.4133e-01, 7.4024e-01, 3.8777e-01, - 7.1005e-02, 2.4800e-01, 8.5222e-02, 6.5517e-01, - 4.2196e-01, 9.1175e-02, 1.1579e-01, 3.4600e-01, - 2.4814e-01, 4.3545e-01, 4.5340e-02, 1.3988e-01, - 9.2241e-01, 8.9701e-01, 4.8420e-01, 1.0904e-01, - 5.2898e-02, 5.7708e-01, 2.3747e-01, 4.6528e-01, - 2.2433e-01, 9.1101e-01, 8.3910e-01, 2.2365e-01, - 4.4439e-02, 4.6479e-01, 2.9108e-01, 6.7486e-01, - 2.7214e-01, 3.8601e-01, 1.6734e-01, 8.3921e-01, - 1.9230e-01, 9.9649e-01, 9.2040e-01, 5.7493e-01, - 2.5505e-02, 4.8876e-01, 2.6393e-01, 2.7271e-01, - 5.4310e-01, 8.6927e-01, 5.3533e-02, 1.9992e-01, - 5.7080e-01, 4.0119e-01, 2.5005e-01, 2.9507e-01, - 7.8395e-01, 3.1151e-01, 8.1157e-01, 6.1728e-01, - 8.6581e-01, 8.8523e-01, 8.1788e-01, 7.3004e-01, - 1.6512e-01, 3.2868e-02, 3.0387e-02, 2.1487e-01, - 6.6144e-01, 2.1608e-01, 8.9192e-01, 2.3551e-01, - 8.1202e-01, 2.6274e-01, 9.1459e-01, 7.5205e-01, - 7.3392e-01, 5.9391e-01, 2.2391e-01, 9.4922e-02, - 3.3381e-03, 9.6530e-01, 7.1320e-01, 5.3214e-01, - 5.2087e-01, 2.4255e-01, 9.0107e-01, 9.7729e-01, - 3.0352e-01, 9.8847e-01, 1.6505e-01, 5.9415e-01, - 7.8564e-01, 3.1263e-01, 1.5197e-01, 2.2146e-01, - 8.6721e-02, 5.5948e-01, 3.9253e-01, 3.7220e-01, - 5.3485e-01, 8.5741e-01, 8.0291e-01, 1.4914e-01, - 8.5698e-02, 4.0980e-01, 7.9854e-02, 3.0832e-01, - 3.0797e-01, 8.7825e-01, 6.1055e-01, 5.3491e-02, - 3.0791e-02, 3.7147e-01, 7.7878e-01, 3.4236e-01, - 4.5054e-01, 1.5377e-02, 5.9456e-02, 5.9733e-01, - 4.9498e-01, 1.1290e-01, 8.5009e-01, 8.0132e-01, - 6.7297e-01, 9.7824e-02, 9.2051e-01, 1.9335e-01, - 2.4769e-01, 1.6690e-01, 5.0092e-01, 5.4391e-01, - 7.2974e-01, 5.6050e-01, 3.7368e-01, 8.9801e-01, - 3.7280e-01, 1.0774e-01, 2.2172e-02, 3.1493e-01, - 5.8527e-01, 7.1807e-01, 7.0470e-01, 1.1035e-01, - 2.5102e-01, 7.6188e-01, 7.9622e-01, 9.4422e-01, - 1.5518e-01, 3.1240e-02, 3.5932e-01, 1.9391e-01, - 1.0883e-01, 8.0519e-01, 6.5322e-01, 2.7200e-01, - 3.6103e-01, 4.5282e-01, 9.1939e-01, 4.6131e-01, - 3.8575e-01, 8.8690e-01, 4.9604e-01, 9.2190e-01, - 8.9283e-01, 5.2838e-01, 1.4113e-01, 4.4667e-01, - 2.2835e-01, 8.8676e-01, 4.8873e-01, 9.5014e-01, - 5.1040e-01, 8.4321e-01, 5.9227e-01, 4.5634e-01, - 8.5767e-01, 8.2056e-01, 3.4810e-01, 1.9600e-01, - 2.8430e-01, 7.0722e-01, 6.5270e-01, 7.1032e-01, - 5.2650e-01, 9.3358e-01, 4.5651e-01, 1.3223e-01, - 6.9249e-01, 3.3404e-01, 3.4727e-01, 2.1805e-01, - 2.2592e-02, 4.9539e-01, 7.1062e-02, 1.8299e-01]), + col_indices=tensor([5720, 4969, 1239, 644, 1146, 7636, 6750, 1203, 1601, + 3688, 7038, 3406, 2405, 7258, 8054, 5491, 9798, 7088, + 8379, 7699, 9021, 4907, 4516, 7984, 5354, 4154, 160, + 6883, 9683, 5123, 1881, 1387, 7887, 7718, 7033, 7248, + 1400, 9747, 5682, 5945, 6502, 7049, 5809, 5310, 2550, + 783, 8093, 2333, 6401, 6307, 2533, 5175, 727, 5991, + 4731, 7947, 7692, 8081, 5769, 1069, 5759, 807, 2415, + 8593, 9018, 7433, 9799, 5620, 1912, 1254, 1717, 2243, + 1601, 4408, 2963, 2716, 9079, 8160, 9075, 570, 2956, + 265, 9610, 9684, 5937, 4138, 5205, 7707, 8954, 6036, + 7966, 1175, 138, 1293, 8665, 6290, 9568, 2743, 2975, + 41, 1434, 3968, 2660, 7275, 3668, 9736, 1481, 7127, + 9069, 6236, 4897, 2857, 9146, 4606, 6232, 67, 6259, + 2438, 1251, 7120, 7564, 7763, 8648, 3302, 1912, 3582, + 2620, 9118, 9519, 3977, 325, 6981, 1246, 4300, 1003, + 7533, 5160, 3577, 206, 7743, 5685, 9266, 9276, 4124, + 9671, 9591, 3004, 1735, 4315, 8233, 4767, 2126, 3276, + 3161, 2138, 1031, 9630, 6503, 8851, 2142, 9733, 8459, + 1485, 9228, 1076, 1314, 5008, 1543, 5736, 1741, 284, + 5267, 6416, 4336, 8369, 1071, 7775, 3675, 6776, 3132, + 5086, 2392, 9203, 5136, 7781, 9728, 6200, 1005, 2202, + 28, 9964, 2610, 4497, 9409, 6798, 7509, 8371, 3496, + 5420, 976, 3461, 4009, 8236, 9760, 4180, 1551, 6470, + 9198, 3453, 9268, 5515, 3121, 6165, 3277, 642, 5201, + 8100, 93, 5953, 7750, 8924, 6234, 7829, 6602, 848, + 1045, 340, 6518, 8033, 2450, 7963, 5139, 5821, 8646, + 8568, 2825, 9727, 7650, 6041, 6102, 5933, 1510, 9254, + 4712, 1332, 2735, 9165, 2982, 1175, 4501, 5521, 8766, + 7801, 9010, 8912, 9480, 5535, 4867, 884, 9627, 1994, + 6968, 3275, 8076, 80, 5494, 580, 5880, 1608, 6966, + 1288, 4062, 7995, 8607, 4848, 4772, 1241, 3314, 7776, + 3485, 716, 1768, 1161, 7978, 8351, 3934, 9804, 6273, + 6727, 8723, 1057, 3273, 5842, 9500, 6665, 852, 4925, + 8490, 8763, 291, 7253, 3003, 395, 6180, 6232, 8116, + 5250, 1562, 3713, 3489, 399, 5582, 3496, 9459, 2833, + 7110, 4743, 4497, 820, 9145, 2044, 6875, 8236, 6494, + 9170, 2730, 3508, 5828, 6696, 3966, 8660, 5641, 5403, + 224, 4575, 7915, 4396, 6815, 2265, 9894, 6671, 8858, + 889, 9483, 4752, 4782, 7707, 642, 4218, 8374, 7779, + 5588, 724, 2775, 6467, 9272, 6150, 567, 8677, 9456, + 2024, 4306, 6854, 3993, 9310, 6805, 5335, 8379, 4077, + 647, 2520, 7107, 7431, 8620, 8772, 3468, 2063, 7382, + 4610, 6950, 2688, 8767, 9160, 2867, 5351, 4630, 5424, + 7396, 1670, 9603, 9759, 6440, 8866, 7178, 6616, 1947, + 3505, 3139, 559, 370, 9579, 4106, 1430, 8023, 3927, + 4603, 6432, 7243, 5398, 5133, 4671, 7696, 2848, 845, + 6559, 4284, 5479, 7294, 548, 96, 9590, 2397, 46, + 3576, 3913, 2614, 4923, 8109, 5185, 4027, 7897, 4883, + 6086, 1344, 7268, 1882, 5358, 2030, 7012, 8428, 8818, + 9960, 1658, 785, 3792, 1398, 3097, 4565, 4447, 4130, + 6965, 714, 4442, 44, 7450, 1476, 5288, 5379, 7553, + 4688, 4758, 5438, 9635, 9761, 101, 9600, 1866, 163, + 7172, 2728, 3826, 6454, 56, 2512, 2040, 8595, 6663, + 7387, 7414, 9215, 5648, 2187, 9294, 8636, 3646, 9996, + 1474, 823, 1391, 3217, 3785, 8681, 6163, 4753, 7375, + 9506, 1582, 1484, 1533, 9197, 907, 2641, 3453, 6120, + 8201, 4079, 2385, 680, 2921, 2876, 9629, 2825, 731, + 9704, 9344, 9425, 2054, 5900, 2370, 1148, 3769, 6628, + 3742, 9257, 6020, 9538, 9147, 1211, 3115, 9306, 261, + 1327, 4686, 4631, 2711, 6699, 4245, 9195, 7546, 7133, + 8555, 463, 623, 9412, 9263, 3673, 7568, 4202, 8140, + 4068, 1950, 4932, 1522, 8804, 6659, 2518, 4758, 9312, + 9921, 6207, 3432, 4665, 8328, 3129, 3517, 4532, 5981, + 5021, 7492, 4807, 3572, 8936, 2277, 8389, 4948, 8134, + 9662, 1902, 324, 4274, 2698, 8466, 6268, 1312, 9280, + 9166, 2314, 8788, 8232, 5869, 9680, 9595, 1299, 2015, + 8817, 2473, 1261, 3006, 6679, 3124, 1348, 6229, 2053, + 5218, 6291, 3269, 3783, 4570, 746, 1783, 2924, 8011, + 3806, 8802, 3799, 2539, 6765, 1697, 9445, 3830, 9735, + 1720, 9777, 530, 2450, 228, 2500, 2034, 7393, 5455, + 9166, 5717, 7159, 105, 4976, 4374, 4692, 7126, 4690, + 5673, 9725, 5440, 7174, 2935, 3439, 5142, 4327, 7328, + 9023, 9711, 9672, 6607, 611, 6778, 8597, 3887, 5611, + 5055, 3729, 6248, 2976, 9931, 3827, 3036, 3201, 4775, + 1910, 6208, 3259, 7762, 3798, 8984, 4923, 5102, 3977, + 4732, 6984, 5224, 2592, 7389, 2559, 5400, 1497, 4514, + 5371, 700, 5966, 1814, 1965, 9898, 6571, 1509, 7471, + 4948, 5506, 1378, 7780, 1237, 5090, 9025, 9512, 833, + 3156, 4253, 1092, 710, 6827, 1792, 2833, 2885, 8796, + 4156, 3420, 6965, 7092, 7674, 4704, 2560, 5885, 6514, + 7379, 4741, 6081, 3156, 4894, 104, 8105, 6203, 999, + 6357, 6914, 1206, 6254, 7635, 8078, 1628, 3309, 839, + 5279, 6304, 7299, 6583, 2090, 9707, 5249, 9266, 9450, + 7863, 2421, 8478, 9674, 2515, 9696, 3920, 4648, 6962, + 1804, 1643, 5951, 7979, 6916, 5130, 4266, 1177, 2153, + 934, 6875, 6438, 746, 6384, 225, 703, 3155, 9053, + 3689, 3411, 9687, 3411, 9724, 4572, 9937, 875, 1014, + 9237, 8636, 1517, 9566, 7250, 1394, 8055, 5673, 5048, + 2120, 8361, 4563, 451, 8397, 5035, 8281, 7864, 529, + 3920, 6246, 9228, 803, 5546, 4491, 5061, 5129, 26, + 2343, 8243, 3381, 4561, 9040, 83, 8826, 7389, 1363, + 7692, 4273, 8720, 3487, 6533, 5233, 5215, 7317, 3920, + 7199, 5303, 100, 9033, 2998, 8011, 8256, 1657, 6414, + 1247, 1172, 9139, 3600, 5499, 5802, 2925, 9718, 1915, + 5367, 2577, 7882, 78, 5629, 7403, 7917, 227, 437, + 1408, 8642, 3833, 852, 1787, 9393, 8314, 8856, 7679, + 20, 738, 9876, 2758, 8718, 2801, 9840, 2000, 9115, + 2586, 2381, 2882, 2462, 5029, 3559, 2671, 9654, 9304, + 8910, 8550, 594, 840, 7069, 1630, 3346, 3851, 1957, + 7869, 9628, 1652, 3010, 382, 8135, 3311, 7256, 3143, + 5745, 930, 8368, 3723, 4237, 7714, 9063, 9466, 3326, + 9028, 8909, 4825, 7898, 8015, 7166, 966, 3555, 9249, + 6198, 5255, 4831, 891, 976, 408, 3246, 2926, 4838, + 2822, 6032, 9296, 7984, 3285, 6136, 648, 1833, 1693, + 835, 3786, 8992, 9830, 3903, 3731, 4719, 3571, 9516, + 1895, 3643, 7516, 5117, 5917, 9057, 6546, 4799, 4169, + 3213, 7101, 432, 8512, 9679, 7228, 8955, 1649, 2192, + 5351, 9768, 3431, 715, 294, 7359, 3158, 4364, 3073, + 5835, 96, 8859, 8420, 381, 9621, 7761, 6809, 8767, + 785, 4452, 7618, 3117, 1442, 3083, 8656, 1856, 7707, + 3725, 5921, 6775, 5936, 444, 2036, 3428, 3755, 4459, + 8333]), + values=tensor([0.2027, 0.7490, 0.8133, 0.0562, 0.9474, 0.5509, 0.6186, + 0.2207, 0.6858, 0.5263, 0.3437, 0.8838, 0.1333, 0.8183, + 0.1591, 0.1806, 0.4954, 0.0497, 0.0038, 0.1926, 0.0844, + 0.5077, 0.8051, 0.0359, 0.8950, 0.0028, 0.0406, 0.1010, + 0.1506, 0.6297, 0.9513, 0.6190, 0.6713, 0.6895, 0.9984, + 0.4075, 0.8084, 0.4149, 0.5555, 0.2438, 0.0051, 0.9033, + 0.6444, 0.8540, 0.4969, 0.1288, 0.2544, 0.9003, 0.8226, + 0.4465, 0.3188, 0.2134, 0.8768, 0.0045, 0.4018, 0.3953, + 0.1091, 0.4188, 0.2310, 0.5324, 0.4837, 0.4584, 0.6558, + 0.6561, 0.9607, 0.1670, 0.7036, 0.9224, 0.1913, 0.7472, + 0.1885, 0.5846, 0.5959, 0.8837, 0.3775, 0.0359, 0.4937, + 0.4749, 0.3884, 0.3617, 0.1723, 0.2173, 0.7349, 0.4180, + 0.4864, 0.8281, 0.9879, 0.7866, 0.1097, 0.9318, 0.2083, + 0.4485, 0.0031, 0.8065, 0.7471, 0.6055, 0.7397, 0.3432, + 0.1865, 0.1552, 0.9155, 0.9039, 0.8318, 0.4281, 0.8358, + 0.0564, 0.7821, 0.2387, 0.3523, 0.5199, 0.8792, 0.7089, + 0.5095, 0.6685, 0.8629, 0.1724, 0.4953, 0.5103, 0.6859, + 0.7329, 0.3065, 0.2224, 0.5655, 0.3526, 0.0054, 0.0649, + 0.4211, 0.7946, 0.3574, 0.4338, 0.3580, 0.2838, 0.7737, + 0.6310, 0.9406, 0.6218, 0.8551, 0.9122, 0.1653, 0.6440, + 0.8169, 0.2986, 0.0504, 0.0677, 0.9500, 0.6755, 0.9628, + 0.5575, 0.8563, 0.3336, 0.3346, 0.5942, 0.9289, 0.6870, + 0.1304, 0.9928, 0.9620, 0.5369, 0.4322, 0.1341, 0.6234, + 0.0502, 0.1350, 0.0203, 0.4990, 0.2683, 0.8266, 0.8742, + 0.8618, 0.7374, 0.1594, 0.2708, 0.6547, 0.2996, 0.3303, + 0.5316, 0.5614, 0.6945, 0.9221, 0.6253, 0.8953, 0.5739, + 0.5788, 0.3440, 0.8285, 0.6692, 0.8210, 0.5561, 0.0721, + 0.5738, 0.3122, 0.3698, 0.6400, 0.1349, 0.0523, 0.2463, + 0.0066, 0.7765, 0.0395, 0.5867, 0.2673, 0.2777, 0.2477, + 0.9287, 0.9059, 0.7776, 0.2220, 0.0830, 0.4983, 0.6182, + 0.9073, 0.5764, 0.3346, 0.0213, 0.8692, 0.8480, 0.3217, + 0.5366, 0.3811, 0.1491, 0.6436, 0.5656, 0.7463, 0.7748, + 0.8297, 0.9828, 0.9057, 0.6271, 0.0284, 0.0373, 0.0975, + 0.0253, 0.0644, 0.3382, 0.2408, 0.0203, 0.3882, 0.0601, + 0.4420, 0.8609, 0.5684, 0.1984, 0.8337, 0.7001, 0.4675, + 0.9122, 0.3984, 0.6223, 0.2758, 0.9687, 0.0301, 0.5419, + 0.6510, 0.7812, 0.9548, 0.1760, 0.4832, 0.2746, 0.2275, + 0.7385, 0.2670, 0.9870, 0.9869, 0.4627, 0.4748, 0.9410, + 0.1229, 0.4026, 0.4986, 0.0603, 0.8677, 0.7768, 0.2920, + 0.1329, 0.1188, 0.2120, 0.3616, 0.2767, 0.2633, 0.2598, + 0.2051, 0.4633, 0.9324, 0.4214, 0.9023, 0.5357, 0.0380, + 0.1152, 0.2734, 0.0776, 0.9362, 0.5468, 0.1224, 0.1539, + 0.8797, 0.9211, 0.1278, 0.6116, 0.0674, 0.5841, 0.7041, + 0.7768, 0.6632, 0.9330, 0.5210, 0.7316, 0.2318, 0.2634, + 0.4238, 0.3720, 0.3042, 0.1194, 0.4120, 0.3272, 0.5386, + 0.2656, 0.5931, 0.9549, 0.2529, 0.9428, 0.7355, 0.4479, + 0.2533, 0.1880, 0.1336, 0.4843, 0.1179, 0.8381, 0.3506, + 0.8735, 0.9041, 0.9813, 0.5063, 0.2445, 0.7635, 0.0407, + 0.4174, 0.7651, 0.4583, 0.9594, 0.1219, 0.3283, 0.0952, + 0.1795, 0.4833, 0.5147, 0.7542, 0.9908, 0.0718, 0.1142, + 0.6131, 0.7019, 0.9247, 0.8709, 0.5526, 0.0320, 0.0548, + 0.8929, 0.9621, 0.8976, 0.0710, 0.4733, 0.8148, 0.3623, + 0.7602, 0.7560, 0.2628, 0.9010, 0.1615, 0.7423, 0.8450, + 0.6056, 0.2053, 0.4265, 0.1903, 0.6588, 0.4823, 0.5141, + 0.3240, 0.7904, 0.8027, 0.2211, 0.5964, 0.3145, 0.3696, + 0.9083, 0.1766, 0.4944, 0.1736, 0.5735, 0.6368, 0.9596, + 0.7263, 0.9262, 0.5922, 0.2463, 0.1714, 0.0033, 0.7932, + 0.2126, 0.4060, 0.6893, 0.1975, 0.5678, 0.0841, 0.1465, + 0.5867, 0.5627, 0.8539, 0.7445, 0.1341, 0.1755, 0.0596, + 0.8862, 0.4165, 0.3661, 0.3899, 0.5965, 0.4511, 0.0711, + 0.0548, 0.6176, 0.0889, 0.7128, 0.5513, 0.3387, 0.9328, + 0.8630, 0.7255, 0.5233, 0.6431, 0.9668, 0.5138, 0.0090, + 0.0643, 0.6471, 0.6498, 0.1651, 0.8260, 0.9196, 0.4553, + 0.1401, 0.7956, 0.8677, 0.0436, 0.1115, 0.7501, 0.1805, + 0.1177, 0.4145, 0.5921, 0.0963, 0.9300, 0.7227, 0.1348, + 0.7173, 0.6322, 0.5137, 0.9867, 0.0647, 0.9792, 0.3266, + 0.0247, 0.7377, 0.7637, 0.3209, 0.1086, 0.0583, 0.5799, + 0.1981, 0.5919, 0.2916, 0.2966, 0.0780, 0.4505, 0.7557, + 0.4159, 0.1668, 0.4742, 0.1576, 0.6722, 0.3878, 0.8099, + 0.8942, 0.6965, 0.8877, 0.8482, 0.9470, 0.4203, 0.8759, + 0.3645, 0.3534, 0.1832, 0.8458, 0.5798, 0.8726, 0.0328, + 0.9019, 0.8421, 0.3615, 0.7394, 0.5238, 0.3930, 0.5586, + 0.7305, 0.1786, 0.3410, 0.0880, 0.7679, 0.5636, 0.7731, + 0.4946, 0.8036, 0.9399, 0.1421, 0.4802, 0.2376, 0.5522, + 0.4459, 0.3322, 0.9362, 0.0032, 0.2882, 0.9709, 0.9560, + 0.9359, 0.0754, 0.4339, 0.2682, 0.6701, 0.9940, 0.2238, + 0.5478, 0.2545, 0.9800, 0.0888, 0.5932, 0.9357, 0.9443, + 0.6873, 0.7301, 0.7549, 0.8646, 0.9766, 0.1864, 0.0985, + 0.6139, 0.0267, 0.3197, 0.2919, 0.6967, 0.3082, 0.2883, + 0.9297, 0.7269, 0.8809, 0.3092, 0.0610, 0.9951, 0.5479, + 0.0249, 0.0989, 0.4399, 0.4075, 0.8983, 0.5558, 0.8569, + 0.7300, 0.2362, 0.1604, 0.2829, 0.5397, 0.9476, 0.7292, + 0.6492, 0.8562, 0.8877, 0.8278, 0.2771, 0.2910, 0.0853, + 0.0277, 0.5928, 0.0960, 0.1237, 0.3651, 0.2032, 0.2734, + 0.5899, 0.9376, 0.2309, 0.7289, 0.7641, 0.6251, 0.6909, + 0.8172, 0.7796, 0.8912, 0.8862, 0.5555, 0.9198, 0.0056, + 0.0163, 0.4567, 0.5162, 0.6382, 0.8266, 0.2194, 0.2514, + 0.9542, 0.6722, 0.9304, 0.3675, 0.0357, 0.2872, 0.5895, + 0.6892, 0.7067, 0.0477, 0.0060, 0.9660, 0.0275, 0.6367, + 0.3282, 0.3322, 0.9717, 0.3052, 0.4175, 0.6780, 0.8759, + 0.2678, 0.8372, 0.8339, 0.0426, 0.4554, 0.8976, 0.1442, + 0.2386, 0.0041, 0.9797, 0.9283, 0.8008, 0.4484, 0.2574, + 0.1613, 0.6014, 0.5136, 0.4805, 0.6825, 0.9934, 0.3328, + 0.7734, 0.8253, 0.7679, 0.3065, 0.7301, 0.9482, 0.7462, + 0.1569, 0.2125, 0.6821, 0.2220, 0.6717, 0.0136, 0.9483, + 0.4856, 0.0845, 0.9100, 0.4147, 0.3812, 0.8805, 0.8519, + 0.3004, 0.8503, 0.4162, 0.6043, 0.4136, 0.9529, 0.2599, + 0.0815, 0.7157, 0.2965, 0.3964, 0.7460, 0.2067, 0.2764, + 0.1755, 0.8688, 0.4199, 0.6142, 0.1047, 0.2985, 0.8871, + 0.0720, 0.5235, 0.5386, 0.1308, 0.9861, 0.0866, 0.1638, + 0.6890, 0.4619, 0.9327, 0.1311, 0.6828, 0.8499, 0.9249, + 0.7575, 0.0451, 0.7830, 0.4792, 0.2998, 0.9139, 0.3481, + 0.3857, 0.1693, 0.3475, 0.1795, 0.8895, 0.0032, 0.3565, + 0.3059, 0.3174, 0.9466, 0.2653, 0.0545, 0.9901, 0.7807, + 0.4240, 0.1534, 0.8160, 0.4585, 0.1379, 0.1152, 0.5090, + 0.7583, 0.3142, 0.3149, 0.4921, 0.2561, 0.7515, 0.7654, + 0.0649, 0.1845, 0.5929, 0.0609, 0.4455, 0.7905, 0.9877, + 0.6531, 0.4109, 0.2854, 0.0457, 0.9078, 0.9672, 0.4688, + 0.7108, 0.3158, 0.7767, 0.5234, 0.5970, 0.4202, 0.0403, + 0.8205, 0.9698, 0.2907, 0.6883, 0.3807, 0.3374, 0.2869, + 0.5752, 0.7683, 0.8315, 0.2782, 0.9414, 0.9418, 0.8182, + 0.9433, 0.0096, 0.2916, 0.7635, 0.0303, 0.9226, 0.6470, + 0.4414, 0.6115, 0.5065, 0.4629, 0.3811, 0.0957, 0.8057, + 0.5195, 0.6078, 0.5027, 0.7032, 0.9695, 0.5188, 0.3642, + 0.3508, 0.7419, 0.6800, 0.4045, 0.9365, 0.2822, 0.7778, + 0.0614, 0.6728, 0.7842, 0.5414, 0.3789, 0.9884, 0.5920, + 0.9099, 0.1604, 0.7490, 0.5753, 0.0585, 0.7049, 0.5581, + 0.5176, 0.6875, 0.3656, 0.0087, 0.7959, 0.2962, 0.7801, + 0.1125, 0.0452, 0.7285, 0.0217, 0.7922, 0.9920, 0.9259, + 0.6845, 0.9624, 0.9663, 0.3054, 0.9569, 0.8543, 0.5173, + 0.2849, 0.6607, 0.9863, 0.8342, 0.8845, 0.7987, 0.3752, + 0.4894, 0.1432, 0.7905, 0.2055, 0.5707, 0.6925, 0.7001, + 0.8584, 0.1737, 0.6575, 0.8089, 0.2901, 0.4984, 0.0652, + 0.4835, 0.9132, 0.2529, 0.0078, 0.4172, 0.3195, 0.8749, + 0.1647, 0.7075, 0.8781, 0.8740, 0.2782, 0.7006, 0.4985, + 0.8867, 0.6613, 0.0792, 0.8806, 0.2490, 0.5613, 0.7993, + 0.0865, 0.5244, 0.0047, 0.0988, 0.9358, 0.1302, 0.4057, + 0.0976, 0.1346, 0.3566, 0.0626, 0.8754, 0.9295, 0.2328, + 0.3769, 0.1588, 0.9292, 0.4953, 0.1373, 0.2802, 0.3488, + 0.7633, 0.2961, 0.1349, 0.2026, 0.7383, 0.6437, 0.1659, + 0.6598, 0.4589, 0.6136, 0.5957, 0.5537, 0.0746, 0.6399, + 0.1265, 0.2960, 0.3090, 0.0291, 0.6074, 0.8325, 0.7495, + 0.0128, 0.3680, 0.0523, 0.9777, 0.7945, 0.6058, 0.6933, + 0.8803, 0.5024, 0.6410, 0.0012, 0.4817, 0.1404, 0.9206, + 0.6278, 0.5152, 0.7429, 0.9504, 0.5112, 0.8716, 0.2935, + 0.1368, 0.9447, 0.6216, 0.0358, 0.7554, 0.0534, 0.5254, + 0.9518, 0.5931, 0.8775, 0.4097, 0.1981, 0.4269, 0.1336, + 0.7316, 0.4084, 0.3779, 0.4594, 0.3602, 0.2949, 0.6555, + 0.6949, 0.2494, 0.6488, 0.7850, 0.8291, 0.8944, 0.1369, + 0.6184, 0.3261, 0.0127, 0.1038, 0.8709, 0.7868, 0.8779, + 0.9236, 0.1612, 0.5488, 0.9434, 0.7733, 0.9305, 0.4343, + 0.0363, 0.5938, 0.7590, 0.6970, 0.1389, 0.5668, 0.4351, + 0.6738, 0.9483, 0.6525, 0.2794, 0.5059, 0.4023]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4331, 0.6450, 0.9910, ..., 0.2595, 0.5081, 0.5389]) +tensor([0.6468, 0.9076, 0.6761, ..., 0.7586, 0.6739, 0.5784]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -375,378 +268,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.019162416458129883 seconds +Time: 0.020120859146118164 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '54794', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.6940038204193115} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52184', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5676329135894775} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([7911, 3161, 1262, 5192, 2357, 7680, 3052, 5022, 1293, - 7499, 7977, 6646, 8728, 6750, 3375, 7755, 8716, 989, - 2588, 5755, 4685, 6627, 6922, 2935, 5765, 5675, 6658, - 7409, 1352, 5956, 5147, 9211, 2687, 6131, 3712, 585, - 972, 5746, 1667, 2824, 532, 1593, 3811, 2678, 9253, - 6720, 7376, 2847, 3241, 3587, 6951, 8223, 340, 5643, - 5214, 8395, 1798, 7092, 3059, 6235, 7618, 486, 1778, - 3237, 6697, 2502, 70, 2828, 606, 6952, 9286, 5888, - 3027, 7384, 4383, 6428, 4570, 1783, 1294, 7026, 2076, - 4918, 1488, 770, 957, 9836, 1056, 2315, 474, 4971, - 3554, 7405, 3832, 8094, 428, 4395, 7438, 9704, 1633, - 6658, 2294, 942, 9262, 5660, 6854, 8366, 1078, 2854, - 2434, 3985, 3190, 6248, 4349, 7344, 8178, 4674, 4996, - 6996, 4763, 2253, 1593, 2769, 2167, 4085, 6424, 9420, - 1242, 4354, 6300, 5604, 1686, 343, 7940, 6599, 3332, - 608, 9023, 9655, 8459, 2336, 1618, 2580, 2819, 4817, - 7559, 8620, 7978, 3358, 9453, 7322, 6766, 7975, 3814, - 2608, 1766, 6541, 541, 4726, 3955, 8201, 8104, 2555, - 2228, 8697, 1378, 8871, 834, 5468, 3992, 9300, 86, - 2146, 9398, 545, 591, 5437, 3320, 3820, 8590, 4135, - 3007, 2522, 3922, 759, 5236, 2202, 5387, 7098, 8105, - 3515, 2202, 6617, 5113, 2562, 4701, 3458, 7765, 7192, - 5627, 6408, 9799, 6567, 37, 4616, 271, 7974, 5307, - 9377, 7332, 242, 7565, 8542, 4407, 5590, 7693, 2731, - 2564, 8507, 8892, 8486, 4127, 1297, 9765, 3824, 1756, - 2347, 1093, 902, 2490, 5112, 7578, 5964, 7096, 3492, - 4916, 3077, 6301, 2536, 5861, 1994, 5412, 4184, 9482, - 5468, 6176, 4074, 3736, 5024, 9965, 8881, 7464, 2623, - 7850, 5867, 9092, 7361, 2749, 846, 3270, 8221, 7657, - 5496, 1168, 6665, 6181, 6205, 2270, 71, 245, 7836, - 4101, 8588, 9378, 9346, 9253, 744, 9634, 1404, 1681, - 2033, 4795, 7187, 7704, 9971, 4828, 565, 2094, 6252, - 5611, 2984, 5542, 4429, 355, 2391, 7037, 3331, 7438, - 5605, 5312, 7733, 9542, 2583, 2261, 2389, 2273, 3645, - 3262, 6647, 1886, 8483, 8073, 2214, 4868, 6777, 8269, - 220, 9717, 5193, 6143, 4955, 3714, 2595, 1190, 5189, - 9733, 4412, 3274, 76, 3147, 2339, 1088, 8301, 86, - 524, 2350, 3099, 415, 3796, 2977, 9907, 1395, 6139, - 3508, 4520, 9604, 3340, 6041, 595, 5957, 587, 1195, - 3410, 8756, 8453, 6996, 1505, 8596, 2662, 1437, 6740, - 7061, 9248, 4638, 2125, 1555, 5142, 5829, 750, 446, - 1115, 4258, 132, 8639, 8024, 4417, 7514, 9888, 5171, - 3804, 9336, 7399, 7498, 3446, 517, 7466, 8729, 1786, - 5529, 3386, 5123, 5833, 8522, 7193, 4596, 3871, 9148, - 2138, 2608, 1676, 5641, 518, 8701, 9620, 1125, 9274, - 49, 13, 2714, 3823, 7010, 8129, 8631, 2865, 1750, - 395, 7820, 5688, 9017, 6767, 3565, 4202, 6678, 2244, - 4721, 9333, 8878, 5819, 3634, 8317, 592, 1447, 3012, - 6300, 8346, 1608, 1213, 80, 7267, 4298, 6409, 2292, - 1670, 6487, 5529, 8657, 2479, 6350, 6733, 7577, 2776, - 1458, 945, 8267, 2346, 8008, 3294, 2469, 7020, 2715, - 4869, 2481, 110, 4234, 4845, 890, 5463, 618, 8967, - 4348, 4756, 6306, 3882, 9183, 1947, 8107, 5369, 2472, - 2945, 5358, 9391, 1200, 4322, 8117, 5100, 4644, 6706, - 1082, 1513, 5226, 9231, 7175, 4584, 8366, 633, 1495, - 9566, 7257, 4456, 3124, 5334, 3920, 5125, 2467, 1786, - 6231, 6395, 2461, 1514, 7642, 3727, 7424, 7613, 140, - 1890, 4821, 1469, 8259, 8978, 1802, 5378, 6805, 3869, - 3783, 3061, 8658, 5541, 472, 3859, 5961, 4577, 2988, - 7578, 2133, 7348, 1104, 8479, 3907, 7659, 5190, 50, - 2069, 5632, 5929, 7650, 8492, 5399, 5679, 9805, 3063, - 5599, 7144, 9305, 7057, 6281, 1286, 2319, 3291, 9287, - 6547, 67, 7805, 2707, 2756, 5914, 771, 1219, 7868, - 5501, 6882, 9791, 7785, 1336, 5478, 8053, 5806, 7905, - 3258, 9112, 5104, 8343, 355, 2229, 5367, 3909, 5464, - 6309, 3948, 8995, 9885, 961, 7409, 259, 5234, 1566, - 6742, 4902, 8034, 5441, 8932, 8602, 8641, 2237, 3033, - 9579, 7336, 5320, 4915, 4711, 249, 8378, 2695, 4635, - 92, 567, 6131, 6716, 4080, 593, 9208, 2994, 3780, - 2398, 5252, 9333, 6118, 8719, 1357, 5108, 235, 4942, - 1134, 1638, 6292, 3484, 3730, 5250, 2588, 5449, 8152, - 3481, 8309, 8874, 8518, 9128, 7674, 540, 7089, 6715, - 9035, 4981, 6442, 2568, 3698, 4804, 8943, 9401, 5794, - 9163, 6978, 9060, 5634, 2027, 2034, 1419, 2698, 183, - 7135, 9013, 6760, 5936, 2836, 9968, 4826, 125, 558, - 4114, 8419, 405, 4366, 2095, 5583, 4237, 5302, 1643, - 9648, 2160, 4098, 7817, 4906, 2647, 1861, 3658, 3239, - 7570, 9370, 1637, 5703, 5705, 4124, 7739, 3422, 7854, - 374, 4555, 5446, 8087, 1258, 7641, 3331, 206, 3865, - 5881, 2468, 7763, 6628, 5348, 3313, 1312, 6220, 7644, - 2313, 2844, 4936, 8483, 7091, 8320, 4877, 8564, 3187, - 7061, 8879, 9360, 4897, 4797, 5039, 1909, 3193, 7560, - 7693, 3282, 6661, 6942, 7250, 971, 5884, 8450, 9680, - 3181, 9619, 3815, 7862, 3788, 2853, 1779, 620, 2071, - 7700, 9810, 9051, 7192, 1172, 513, 1386, 4644, 9842, - 8082, 2156, 1625, 6587, 1021, 7260, 7201, 4689, 6301, - 3054, 353, 719, 3436, 2864, 3509, 2571, 8255, 3289, - 2857, 9690, 7954, 7304, 3426, 8079, 286, 5262, 2657, - 9448, 9159, 1770, 1621, 7884, 4261, 6666, 4278, 4311, - 7112, 8402, 1282, 8940, 4636, 3650, 8680, 7177, 7540, - 4412, 8615, 2593, 7393, 8896, 8921, 925, 9474, 1231, - 9773, 527, 5255, 560, 8846, 659, 7410, 8294, 942, - 381, 8693, 7884, 7087, 7532, 7167, 4092, 5794, 5787, - 1151, 2797, 6916, 9663, 9835, 638, 5822, 4133, 7949, - 2884, 3556, 5839, 4844, 9052, 2314, 7830, 3146, 2062, - 4109, 7795, 1041, 8997, 551, 976, 8822, 2074, 4611, - 9870, 9228, 3272, 764, 4377, 9210, 5300, 6377, 8413, - 8545, 7738, 2478, 981, 8075, 2541, 730, 3435, 6845, - 5767, 9726, 1931, 5221, 4111, 3176, 1229, 2817, 2841, - 4797, 7580, 1623, 3843, 1681, 230, 6130, 6583, 5659, - 1878, 8027, 707, 3549, 2451, 852, 509, 669, 881, - 8913, 9704, 6400, 4696, 2366, 2605, 200, 1715, 7757, - 2614, 8597, 2968, 9280, 1679, 6206, 4716, 535, 4893, - 9648, 755, 7542, 2709, 8355, 4740, 8184, 1893, 4039, - 8992, 4769, 9806, 5916, 3331, 2004, 6705, 5834, 6838, - 3763, 2856, 3248, 7450, 851, 7429, 3879, 5390, 1163, - 6404, 5606, 1150, 1948, 251, 8914, 5922, 4549, 8687, - 2810, 7589, 9366, 9847, 3758, 1716, 1962, 2678, 4578, - 3134, 7173, 219, 8841, 5097, 3735, 2863, 5504, 1245, - 4000, 1797, 2381, 29, 3878, 2914, 2744, 2104, 7360, - 1303, 8133, 533, 2444, 608, 8144, 3134, 7588, 4660, - 219]), - values=tensor([8.4162e-01, 2.9686e-01, 6.3714e-01, 1.6817e-01, - 1.6215e-01, 6.1437e-01, 1.6080e-01, 4.0610e-01, - 7.3645e-01, 9.7877e-01, 8.8529e-01, 8.5771e-01, - 2.8589e-02, 2.0612e-02, 9.9026e-01, 9.0065e-02, - 3.1218e-01, 2.7400e-01, 6.6157e-01, 3.2569e-01, - 1.7011e-01, 4.0876e-01, 6.9676e-01, 5.2962e-01, - 4.7625e-01, 5.2877e-02, 6.4254e-01, 8.4744e-01, - 5.7453e-01, 9.2040e-01, 9.7805e-01, 8.0590e-01, - 4.4096e-01, 1.8318e-01, 8.9755e-01, 2.8005e-01, - 2.3996e-01, 1.8286e-01, 1.7292e-01, 7.1175e-01, - 2.0714e-02, 4.9211e-01, 5.1628e-01, 5.1485e-01, - 8.2763e-01, 1.8358e-01, 8.4319e-01, 4.9073e-01, - 1.8916e-01, 5.6885e-01, 3.3605e-01, 6.3594e-01, - 5.1482e-01, 4.9282e-01, 5.9958e-01, 7.0978e-01, - 1.7567e-01, 1.1230e-02, 2.8903e-01, 9.9449e-01, - 1.0897e-01, 3.9189e-01, 5.9951e-01, 6.7777e-01, - 8.9047e-01, 6.3510e-01, 8.1965e-01, 8.2343e-01, - 5.8432e-01, 2.8112e-01, 3.4195e-02, 8.1705e-01, - 2.9005e-01, 1.3897e-01, 6.9293e-01, 1.5391e-01, - 2.2869e-01, 8.3383e-01, 5.4870e-02, 5.6915e-01, - 3.8852e-01, 2.6519e-01, 4.5692e-01, 5.4502e-02, - 5.7453e-01, 7.9181e-01, 5.6057e-01, 7.9160e-01, - 4.4644e-01, 5.0880e-01, 7.4451e-01, 3.2819e-01, - 8.0628e-01, 8.6929e-01, 3.4416e-01, 7.6103e-01, - 3.6389e-01, 3.5701e-01, 8.2362e-01, 6.3586e-01, - 8.3509e-01, 8.9715e-01, 8.8398e-01, 8.3148e-01, - 8.4779e-01, 9.5284e-01, 8.3857e-01, 6.0527e-01, - 2.4645e-01, 8.9898e-01, 4.8666e-01, 8.8162e-01, - 6.1103e-01, 7.0425e-01, 6.8737e-01, 9.5292e-01, - 1.6806e-02, 4.9765e-01, 7.8396e-01, 8.0695e-01, - 7.4330e-01, 8.0275e-01, 4.5302e-01, 8.7655e-01, - 7.2798e-01, 7.4388e-01, 9.7066e-02, 1.6811e-01, - 1.8099e-01, 2.9537e-01, 2.8883e-01, 6.7815e-01, - 7.5316e-01, 1.1697e-01, 4.4732e-01, 7.4137e-01, - 6.2402e-01, 4.4193e-01, 6.8972e-01, 9.3106e-01, - 3.8489e-01, 6.1625e-01, 7.1358e-01, 1.2465e-01, - 8.8973e-01, 4.3619e-01, 3.8639e-01, 6.1804e-01, - 3.4197e-01, 5.8895e-02, 7.7694e-01, 9.2218e-03, - 4.0218e-01, 3.5958e-01, 5.1621e-01, 2.1586e-02, - 1.8157e-01, 7.4709e-01, 1.2399e-01, 5.7793e-02, - 8.7317e-01, 3.4726e-01, 5.8867e-01, 9.9642e-01, - 1.2702e-01, 3.3654e-01, 5.9711e-01, 9.5511e-01, - 7.8878e-02, 1.0158e-01, 8.9875e-01, 4.1339e-01, - 3.2155e-01, 9.3632e-01, 2.3121e-01, 5.1482e-01, - 3.4904e-01, 7.3977e-01, 5.6021e-01, 6.6210e-01, - 3.0267e-01, 5.9350e-03, 8.0584e-01, 1.7500e-01, - 4.5025e-01, 2.9967e-01, 6.1546e-01, 8.6159e-01, - 3.5830e-01, 6.4170e-01, 3.6363e-01, 7.0448e-01, - 8.1901e-01, 2.5682e-01, 1.0264e-01, 5.9217e-01, - 1.0416e-01, 3.9465e-01, 8.6154e-01, 2.6364e-01, - 9.8520e-01, 3.4529e-01, 6.1024e-01, 9.4500e-01, - 4.5989e-01, 1.2879e-02, 1.0827e-02, 6.9805e-01, - 9.4260e-01, 2.2079e-02, 1.8200e-01, 2.3832e-02, - 1.8420e-01, 3.2436e-01, 5.0197e-01, 5.8627e-01, - 6.7237e-01, 5.7570e-01, 5.1838e-01, 5.0143e-01, - 7.0347e-01, 5.7707e-01, 2.1966e-01, 5.2688e-01, - 9.8927e-01, 3.2737e-01, 3.0341e-01, 8.9357e-02, - 9.6648e-01, 6.1951e-01, 8.4411e-01, 5.1367e-02, - 9.2186e-01, 3.2165e-02, 5.2456e-02, 8.1840e-01, - 6.5262e-01, 1.4597e-01, 3.5673e-01, 6.4159e-01, - 3.6264e-01, 6.3084e-02, 8.3980e-01, 2.7736e-01, - 2.9819e-01, 8.1897e-01, 3.4132e-01, 4.9222e-01, - 3.9032e-01, 7.3209e-02, 6.3313e-01, 9.4217e-01, - 8.0046e-02, 8.8661e-01, 5.4139e-02, 9.1303e-01, - 6.2584e-01, 8.8930e-01, 6.4369e-01, 1.6622e-01, - 9.3592e-01, 7.0938e-01, 3.8339e-01, 6.4713e-01, - 2.2843e-01, 9.2113e-01, 6.4446e-03, 5.0513e-01, - 7.8366e-02, 4.3885e-01, 8.3500e-01, 2.3568e-01, - 2.3491e-01, 4.6610e-01, 4.7569e-01, 7.3570e-03, - 3.8697e-01, 3.0485e-02, 9.0027e-01, 3.9759e-01, - 3.0559e-01, 3.6608e-01, 8.3416e-01, 4.2041e-01, - 5.2523e-01, 4.0730e-02, 2.0952e-01, 2.7056e-01, - 7.0586e-01, 8.2052e-01, 2.9046e-01, 8.7390e-01, - 6.6215e-01, 9.9736e-01, 8.2920e-01, 9.5761e-01, - 4.7888e-01, 8.5421e-01, 1.9642e-01, 4.6196e-01, - 2.6211e-01, 4.3134e-01, 5.5166e-01, 3.0145e-02, - 2.4152e-01, 9.4430e-01, 8.6994e-01, 7.1148e-01, - 2.3961e-01, 3.3245e-01, 5.5968e-01, 1.9932e-01, - 3.6100e-02, 4.2814e-01, 9.4590e-01, 7.4276e-01, - 5.4453e-01, 1.6084e-01, 2.2574e-01, 3.1439e-01, - 3.7958e-02, 8.4663e-01, 2.3722e-01, 2.8379e-01, - 5.7939e-01, 6.0883e-01, 5.5850e-01, 3.0063e-01, - 4.9289e-01, 6.4727e-02, 3.2414e-01, 2.7767e-02, - 1.5239e-01, 4.5157e-01, 6.5935e-01, 8.5589e-01, - 8.2281e-01, 1.3098e-01, 5.7403e-01, 8.8951e-01, - 2.3362e-01, 4.4099e-01, 2.9584e-01, 2.1959e-01, - 9.8822e-01, 7.6077e-01, 3.2764e-02, 2.2886e-01, - 1.5180e-01, 2.4079e-02, 2.9072e-01, 1.0037e-01, - 8.8988e-01, 7.2298e-01, 4.5100e-01, 7.2322e-01, - 5.2411e-02, 7.5577e-01, 6.5300e-01, 1.9748e-01, - 4.6204e-01, 7.1756e-02, 5.5358e-01, 6.3139e-01, - 1.1557e-01, 8.2697e-01, 8.2970e-01, 9.8441e-01, - 9.1614e-02, 3.1059e-03, 7.4810e-01, 9.8221e-01, - 1.2364e-01, 1.9347e-01, 1.9584e-02, 9.5603e-01, - 6.7176e-01, 6.8425e-01, 1.9972e-01, 5.4195e-01, - 6.3874e-01, 7.5708e-01, 7.9813e-01, 3.3289e-01, - 8.7933e-01, 7.3670e-01, 6.5968e-01, 2.5864e-01, - 6.8278e-01, 3.0164e-01, 9.2539e-01, 4.0284e-01, - 4.5093e-01, 8.7324e-01, 8.0143e-01, 4.8116e-01, - 3.4355e-01, 8.8919e-01, 1.5159e-01, 7.5020e-01, - 9.8429e-01, 9.9678e-01, 4.1278e-01, 7.5484e-01, - 4.7396e-01, 2.1042e-01, 7.2843e-02, 1.4297e-01, - 6.7845e-01, 8.5086e-02, 5.1551e-01, 4.9162e-01, - 9.7966e-01, 3.6049e-01, 4.7386e-01, 5.7859e-01, - 7.0943e-01, 9.1205e-02, 6.6770e-01, 5.3590e-01, - 3.9755e-01, 9.6597e-01, 1.8599e-01, 4.7860e-01, - 4.2112e-02, 1.8189e-01, 2.4269e-01, 7.7636e-01, - 9.7677e-01, 1.4310e-01, 6.1272e-02, 1.4161e-01, - 5.1371e-01, 6.6644e-01, 6.0793e-01, 5.3914e-01, - 2.2731e-01, 8.7603e-01, 2.9862e-01, 4.6113e-01, - 4.3524e-01, 9.1175e-01, 3.9624e-01, 1.0346e-01, - 6.1379e-01, 4.7208e-01, 2.6070e-01, 5.2191e-01, - 3.9442e-01, 1.4743e-01, 3.8716e-01, 3.7503e-01, - 1.3589e-01, 5.0473e-01, 6.7728e-01, 9.0319e-01, - 6.1190e-01, 1.9275e-01, 3.5936e-01, 6.4987e-01, - 3.1396e-01, 2.8159e-01, 8.3147e-02, 4.5551e-01, - 6.6356e-01, 7.0244e-02, 1.7106e-02, 6.4133e-02, - 6.5616e-02, 2.5462e-01, 7.3261e-01, 5.0907e-01, - 6.7076e-01, 4.0218e-01, 6.6139e-01, 1.8017e-01, - 2.0269e-01, 2.6760e-01, 6.2695e-01, 7.8797e-01, - 9.1126e-01, 8.1648e-01, 3.9421e-01, 4.6220e-01, - 6.8836e-01, 5.0359e-01, 6.4967e-01, 9.8028e-01, - 9.1023e-01, 1.1718e-01, 4.9634e-01, 5.6551e-01, - 6.8389e-01, 3.6489e-01, 3.9624e-01, 5.3191e-01, - 5.4360e-01, 8.7333e-01, 6.8269e-01, 7.5760e-01, - 1.1377e-01, 1.5015e-01, 4.3439e-02, 6.1503e-01, - 3.4363e-01, 4.3709e-01, 6.4079e-01, 7.8204e-01, - 1.1716e-01, 5.7515e-01, 6.4631e-02, 1.4533e-01, - 5.2120e-01, 8.7179e-01, 4.7879e-01, 9.3303e-02, - 2.1845e-01, 6.3517e-01, 6.8688e-01, 3.5430e-01, - 9.3739e-01, 7.0591e-01, 7.7055e-01, 7.5304e-01, - 3.9056e-02, 1.4006e-02, 2.2822e-01, 9.9645e-01, - 5.4400e-02, 8.0951e-01, 1.8179e-01, 1.9010e-01, - 3.0349e-01, 2.4530e-01, 3.4986e-01, 4.8871e-01, - 3.9206e-01, 5.5413e-01, 6.4310e-02, 9.0201e-01, - 2.0129e-01, 7.2874e-01, 7.4927e-01, 4.7834e-01, - 1.0777e-02, 2.5515e-01, 2.9574e-01, 6.6742e-01, - 5.9485e-01, 3.8711e-01, 1.8718e-01, 8.6283e-01, - 9.1934e-01, 8.9372e-01, 2.7499e-01, 3.4311e-02, - 5.7311e-01, 2.1089e-01, 4.2092e-01, 2.0315e-01, - 3.0962e-01, 1.3347e-01, 9.5185e-01, 4.1612e-01, - 4.3202e-01, 2.5416e-01, 7.7755e-01, 7.1075e-01, - 4.2297e-01, 2.0397e-01, 1.3687e-01, 9.6658e-01, - 3.0341e-01, 9.1344e-01, 5.3464e-01, 4.7718e-01, - 7.5356e-01, 8.8694e-01, 8.9264e-01, 8.1240e-02, - 7.6203e-02, 3.2592e-01, 5.3572e-02, 7.7634e-02, - 7.7625e-01, 1.5836e-02, 5.8242e-01, 2.1002e-01, - 8.9491e-02, 6.8441e-01, 6.4412e-01, 2.9177e-02, - 6.1618e-01, 1.2632e-01, 5.3563e-01, 5.0509e-01, - 5.5684e-02, 5.2367e-01, 2.2220e-01, 6.6590e-01, - 3.5274e-01, 2.7941e-01, 3.9525e-01, 1.3783e-01, - 9.9827e-01, 2.9496e-01, 3.3969e-02, 7.0416e-01, - 6.2724e-03, 3.5413e-01, 4.3579e-01, 2.8431e-01, - 6.6274e-01, 5.5869e-03, 4.6483e-01, 9.4265e-01, - 1.9390e-01, 2.6871e-01, 8.9957e-01, 7.6910e-01, - 9.2851e-01, 8.5764e-01, 2.8679e-01, 5.6221e-02, - 6.3789e-01, 4.9980e-01, 2.9834e-01, 1.6547e-01, - 7.2863e-01, 7.4335e-01, 1.7202e-01, 6.0711e-01, - 6.2744e-01, 2.3141e-01, 7.7128e-01, 6.9575e-01, - 6.1477e-01, 3.6069e-01, 4.9022e-02, 9.4412e-01, - 3.5188e-02, 5.7942e-01, 2.9890e-02, 8.4625e-01, - 7.8233e-01, 8.3817e-01, 5.2931e-01, 7.8769e-01, - 5.6272e-01, 7.8863e-01, 1.4452e-03, 7.3161e-02, - 6.3288e-01, 7.0660e-01, 5.5303e-01, 6.9013e-02, - 9.2833e-01, 6.1609e-01, 2.7628e-01, 3.8301e-01, - 7.8596e-01, 8.8683e-01, 7.7771e-01, 2.8419e-01, - 6.9736e-02, 9.6704e-01, 8.8528e-01, 2.2352e-01, - 2.4524e-01, 8.5698e-01, 6.4514e-01, 6.2044e-01, - 8.3695e-01, 9.1677e-01, 1.1584e-01, 7.9561e-01, - 9.3166e-01, 2.7498e-01, 9.4050e-01, 3.5642e-01, - 3.1322e-01, 3.1113e-01, 9.8499e-01, 7.1392e-02, - 3.1345e-01, 7.0182e-01, 8.1882e-01, 4.9724e-01, - 5.4993e-01, 8.6382e-01, 6.3868e-01, 1.5000e-02, - 5.5265e-01, 2.4538e-01, 9.4592e-02, 7.1374e-01, - 7.2630e-01, 8.8515e-01, 8.8786e-01, 8.9234e-01, - 8.5142e-03, 2.3346e-01, 2.8292e-01, 2.9767e-01, - 8.2406e-01, 9.4692e-01, 2.3154e-01, 6.5589e-02, - 9.1864e-01, 6.0994e-01, 7.0920e-01, 9.5407e-01, - 6.2214e-01, 5.5778e-01, 6.0645e-01, 4.8067e-01, - 2.8270e-01, 1.7280e-01, 1.0979e-01, 9.5386e-01, - 6.6667e-01, 7.6987e-01, 9.1941e-01, 3.8360e-01, - 8.9910e-02, 8.7933e-01, 6.6930e-01, 4.3986e-01, - 5.4556e-01, 4.9679e-01, 4.7997e-01, 1.4883e-01, - 4.5139e-01, 1.8357e-01, 2.4832e-01, 3.7126e-01, - 9.7198e-01, 3.6657e-01, 2.2981e-01, 1.6650e-02, - 6.3243e-01, 1.5482e-01, 8.0779e-01, 1.6308e-01, - 4.8827e-01, 1.8842e-01, 9.6686e-01, 7.4347e-01, - 6.0157e-01, 4.7871e-01, 4.4855e-02, 8.6666e-01, - 9.0341e-01, 7.7832e-01, 1.2656e-01, 4.6982e-01, - 6.7927e-01, 2.0556e-01, 4.8391e-01, 1.0870e-01, - 6.0179e-01, 8.6866e-01, 5.9287e-02, 3.0958e-01, - 3.3432e-01, 5.5680e-01, 6.7654e-01, 5.0669e-01, - 3.4185e-01, 1.6502e-01, 2.0130e-01, 4.4286e-01, - 1.0996e-01, 7.5754e-01, 3.4793e-01, 2.8929e-01, - 7.9544e-01, 1.0781e-02, 9.4940e-01, 4.9267e-01, - 7.7169e-01, 5.1969e-01, 6.1145e-01, 8.8191e-01, - 3.8444e-01, 2.4200e-01, 1.7867e-01, 8.6492e-01, - 8.4970e-01, 5.0800e-01, 5.6133e-01, 6.4340e-01, - 5.8533e-01, 6.0481e-01, 2.3846e-01, 1.4102e-01, - 3.7119e-01, 5.6156e-01, 2.8298e-01, 7.1845e-01, - 1.4668e-01, 4.7663e-01, 4.2636e-01, 1.0275e-02, - 2.9743e-01, 1.1376e-02, 7.0160e-02, 4.7251e-01, - 9.5838e-02, 8.5482e-01, 7.9510e-01, 3.6924e-01, - 9.5391e-01, 4.9116e-01, 3.0690e-04, 8.6869e-01, - 9.4535e-01, 3.7650e-01, 9.4503e-01, 9.6097e-01, - 5.9794e-03, 4.1384e-01, 2.2876e-01, 5.0617e-01, - 2.3170e-02, 3.0538e-01, 4.0455e-01, 9.4603e-02, - 2.0357e-01, 2.1517e-01, 9.0257e-01, 3.8634e-01, - 2.4908e-01, 8.0986e-02, 3.9443e-01, 2.4807e-01, - 2.8439e-01, 4.2593e-01, 3.6809e-01, 1.9789e-01, - 9.8594e-01, 8.9386e-01, 6.2015e-01, 1.7708e-01, - 3.8974e-01, 7.4022e-01, 5.4677e-01, 4.3473e-01, - 3.6837e-01, 3.6400e-01, 9.9743e-01, 7.1220e-01, - 8.7479e-01, 2.2457e-01, 8.5053e-01, 4.2955e-01, - 7.5176e-01, 1.0062e-01, 7.3583e-01, 2.5060e-02, - 5.0283e-02, 8.5963e-01, 4.0599e-01, 9.0739e-01, - 2.8833e-01, 2.1531e-01, 1.5452e-02, 9.4690e-01, - 3.1250e-01, 5.6093e-01, 6.1900e-01, 7.3466e-01, - 7.2323e-01, 8.9401e-01, 7.7222e-01, 9.3134e-01, - 6.5260e-01, 8.7584e-01, 4.5350e-01, 7.4368e-01, - 5.8671e-02, 3.5188e-01, 8.3654e-02, 3.3309e-01, - 1.0706e-01, 4.1255e-01, 7.9140e-01, 2.8887e-01, - 7.8441e-02, 2.1380e-01, 9.3099e-01, 4.7350e-01, - 9.2422e-01, 1.4535e-01, 3.3640e-01, 9.2002e-01, - 5.7304e-01, 6.5916e-01, 8.6914e-01, 2.5138e-01, - 3.1603e-01, 3.5203e-01, 3.1131e-02, 6.5641e-01, - 5.7494e-01, 4.7371e-01, 5.0827e-01, 1.5062e-01, - 5.8503e-01, 8.7183e-01, 7.0288e-01, 9.2990e-01, - 1.5908e-01, 4.9748e-01, 9.3223e-01, 9.9643e-01, - 3.8223e-01, 7.7110e-02, 3.4388e-02, 8.0052e-01, - 8.6565e-01, 2.9712e-01, 8.5471e-01, 8.6162e-01, - 4.4628e-01, 5.6741e-01, 4.0056e-02, 7.8839e-01, - 8.9395e-01, 6.8466e-01, 3.0041e-01, 2.2500e-02, - 1.8457e-01, 1.3861e-01, 1.0338e-01, 4.6879e-01, - 9.2662e-01, 7.4286e-01, 5.5725e-01, 5.3379e-01, - 6.8767e-01, 7.4719e-01, 9.0113e-01, 7.7449e-01, - 8.9973e-01, 7.3285e-01, 2.0062e-01, 1.0561e-01, - 4.3906e-01, 3.1029e-01, 6.7605e-01, 7.1638e-01, - 5.8611e-01, 9.9285e-01, 6.1233e-01, 8.2145e-01, - 3.1992e-01, 5.9686e-01, 6.7828e-01, 7.8322e-01, - 3.7277e-01, 3.7731e-01, 6.0362e-01, 1.4908e-01, - 5.2504e-01, 7.4365e-01, 2.2895e-01, 4.0076e-01, - 3.6715e-01, 8.1935e-01, 9.5313e-01, 5.8028e-01, - 4.6037e-01, 2.6326e-01, 5.8730e-01, 3.8793e-01, - 9.9606e-01, 9.6277e-01, 2.9002e-01, 4.2454e-01, - 2.8341e-01, 2.3389e-01, 3.9656e-01, 8.9761e-01, - 7.3352e-01, 1.8460e-01, 7.1116e-01, 2.3146e-01, - 5.8144e-01, 9.4575e-02, 8.3670e-01, 7.9219e-01, - 4.0643e-01, 1.8008e-01, 1.0599e-02, 4.8597e-01, - 2.9480e-01, 2.6967e-01, 8.8380e-03, 9.0770e-02]), + col_indices=tensor([4978, 883, 5457, 2186, 1867, 3637, 4039, 4178, 2714, + 3226, 9016, 9321, 3262, 4970, 6036, 9041, 4002, 6528, + 6091, 5105, 634, 916, 1128, 5864, 7032, 2559, 6242, + 133, 7301, 9657, 5037, 5418, 7993, 2026, 9554, 4901, + 5763, 5473, 4076, 2098, 6082, 2210, 4669, 2584, 8139, + 1500, 6900, 803, 4438, 5964, 502, 4631, 6706, 2916, + 4633, 7474, 7868, 9989, 8612, 7836, 6901, 6628, 1779, + 3887, 502, 1731, 7779, 9481, 4983, 5905, 638, 6476, + 8672, 7529, 1771, 1914, 2848, 6662, 5052, 8689, 5784, + 8686, 7689, 7879, 3319, 4537, 3211, 972, 1873, 5047, + 7044, 9150, 1444, 7688, 6744, 1444, 9012, 1171, 368, + 1373, 3776, 5147, 8525, 208, 7790, 8791, 7477, 1789, + 3603, 5820, 7746, 157, 2875, 2287, 374, 6895, 1093, + 38, 5522, 7585, 6497, 378, 1415, 5055, 3095, 1452, + 2287, 529, 8918, 7670, 6160, 8771, 2481, 7572, 3941, + 6505, 9304, 1303, 2666, 1930, 5958, 6481, 9, 6596, + 569, 5804, 8973, 9587, 4222, 4727, 7881, 5386, 3609, + 1, 8045, 5870, 4087, 7965, 8765, 3550, 9339, 7129, + 6323, 3951, 2805, 8553, 2643, 4432, 3005, 6829, 389, + 5708, 3629, 7700, 5712, 9898, 8551, 2635, 5717, 2414, + 4385, 8668, 8672, 6413, 4575, 8517, 8322, 9209, 6010, + 7633, 9352, 7009, 63, 9547, 4327, 5671, 9732, 7688, + 295, 390, 3392, 3551, 2435, 4318, 4378, 395, 7623, + 8433, 4010, 2640, 4769, 397, 3615, 3380, 1030, 283, + 9766, 5028, 2755, 9284, 6627, 7951, 4316, 2581, 7656, + 6794, 9730, 1542, 2363, 5557, 8145, 6730, 5553, 5842, + 5785, 4346, 7714, 4409, 4600, 3974, 9618, 6444, 1286, + 4491, 1982, 5629, 6226, 6858, 6605, 3378, 494, 830, + 1279, 8749, 832, 5933, 2256, 8625, 3317, 4476, 62, + 3137, 8173, 9848, 8907, 974, 6684, 842, 1698, 4611, + 8669, 3391, 2343, 2417, 9868, 1421, 5645, 884, 3600, + 8685, 669, 6381, 8257, 4630, 2747, 1346, 2968, 3547, + 1987, 1475, 1070, 9847, 1492, 9821, 4366, 1298, 6590, + 7751, 2540, 1312, 3129, 7969, 9461, 428, 5347, 9916, + 3108, 1447, 5112, 1804, 921, 8384, 632, 204, 7838, + 7440, 2957, 4517, 4914, 2770, 3814, 1046, 448, 9906, + 3887, 5601, 7279, 3756, 2145, 9799, 3561, 5463, 3974, + 4127, 5298, 2650, 5947, 4546, 8247, 2609, 8814, 7816, + 7832, 9395, 5882, 2148, 5198, 4705, 4842, 7236, 2324, + 4566, 9387, 3663, 8590, 9924, 5039, 6796, 8853, 6605, + 3145, 8963, 6096, 960, 8149, 9097, 8779, 3126, 8995, + 8859, 1346, 5646, 7177, 9227, 3350, 851, 8634, 5227, + 7305, 760, 9834, 8836, 6314, 8322, 5964, 3226, 9218, + 7012, 7385, 1496, 6032, 2851, 3380, 9305, 6428, 6792, + 5495, 2692, 9651, 3847, 4512, 5070, 1362, 4404, 5606, + 2985, 7884, 6259, 8084, 32, 4120, 1675, 1423, 5198, + 6657, 8077, 7073, 4768, 7456, 9415, 9735, 4337, 9164, + 7483, 3031, 3971, 5570, 7482, 9702, 6246, 600, 5544, + 4364, 6754, 3546, 9587, 6525, 5498, 416, 3242, 3346, + 5643, 7779, 1950, 3057, 6147, 9375, 6191, 1037, 3354, + 5619, 4475, 11, 9284, 7595, 1187, 3693, 3095, 9051, + 8886, 4894, 872, 3646, 600, 3827, 4019, 707, 5049, + 9887, 6644, 7787, 4306, 1351, 9143, 957, 8234, 5269, + 3896, 1959, 1969, 1415, 2466, 3242, 4822, 5500, 6720, + 3402, 726, 6654, 3579, 3916, 8111, 6074, 7221, 4286, + 8562, 4518, 3359, 8240, 9930, 708, 8628, 2531, 4926, + 2387, 2866, 1609, 7931, 4518, 8955, 7478, 8164, 4665, + 6403, 6458, 2576, 8260, 9132, 9430, 120, 4032, 6950, + 669, 9219, 4738, 457, 4592, 2432, 8536, 4783, 8499, + 6118, 6058, 2126, 1826, 2693, 8014, 8192, 6305, 1103, + 812, 3039, 2907, 5615, 6135, 6138, 3686, 1917, 2978, + 7946, 3466, 300, 6303, 927, 6178, 4949, 7317, 6977, + 2579, 7607, 5139, 7803, 5610, 8569, 2967, 448, 4454, + 7654, 4918, 9866, 3044, 6049, 5890, 9481, 2702, 3341, + 3717, 8903, 4464, 4463, 9666, 6457, 1843, 2033, 8690, + 7664, 2239, 7627, 436, 8587, 4679, 6960, 8201, 4019, + 6453, 634, 7413, 8468, 359, 2014, 2982, 6210, 2682, + 8995, 9968, 9395, 7813, 4624, 1688, 4319, 4211, 8656, + 3060, 592, 1449, 9281, 6834, 6922, 9299, 1728, 706, + 474, 9868, 2038, 2122, 5869, 4814, 993, 4055, 144, + 7509, 9225, 3119, 7020, 8400, 879, 2721, 3360, 2902, + 1559, 5934, 5642, 72, 5505, 9627, 1716, 8430, 8718, + 6908, 480, 3886, 2021, 8613, 6379, 2878, 1554, 5987, + 4419, 7664, 8798, 9008, 6350, 2953, 6595, 3787, 6533, + 4223, 9607, 3434, 4651, 3166, 9535, 7179, 1363, 990, + 8656, 5888, 833, 7827, 1821, 5414, 5985, 8601, 5616, + 9626, 2443, 3214, 2209, 1126, 2883, 7835, 1643, 308, + 8531, 6380, 1089, 649, 1817, 7914, 1175, 2651, 9652, + 1351, 690, 1053, 4603, 2157, 1158, 6639, 7070, 9869, + 9019, 8820, 7744, 7952, 2183, 6278, 8872, 1671, 9806, + 9470, 4797, 9043, 1823, 7628, 12, 5468, 1330, 2338, + 9625, 264, 7700, 2726, 8642, 6091, 5096, 9176, 3471, + 6744, 8674, 961, 81, 5750, 9281, 6477, 9955, 7802, + 22, 9519, 9870, 3778, 3791, 7323, 1567, 4334, 7879, + 1651, 6518, 5203, 9555, 8021, 1231, 5243, 1159, 7655, + 697, 2526, 1049, 6176, 4658, 6919, 3861, 9291, 8263, + 2659, 2174, 8141, 8510, 3912, 9916, 5140, 5348, 6832, + 3477, 3542, 7359, 3112, 728, 6583, 5095, 7206, 9031, + 1907, 890, 3556, 7193, 9446, 3515, 7475, 4279, 5402, + 3218, 9513, 4054, 5879, 5696, 9774, 369, 514, 810, + 6831, 208, 1065, 2449, 4845, 550, 772, 5063, 4748, + 2676, 19, 1630, 7186, 747, 6275, 2462, 1457, 653, + 8021, 3223, 2135, 711, 9512, 6730, 4840, 7832, 4547, + 7392, 1568, 6702, 4811, 8882, 1089, 4495, 7613, 9627, + 9665, 9104, 1158, 2060, 1820, 9201, 2175, 2345, 4909, + 8237, 3907, 4806, 6591, 4874, 3504, 724, 4864, 2497, + 2119, 6556, 4239, 2737, 199, 2499, 1160, 5253, 1132, + 4045, 1668, 6187, 8255, 8245, 9972, 8155, 4724, 4211, + 6685, 8771, 8739, 3915, 2152, 4110, 1773, 3845, 9080, + 2137, 7065, 542, 3704, 2566, 178, 1994, 7344, 137, + 6186, 4474, 9773, 5009, 4170, 2845, 7890, 4051, 8536, + 7686, 5742, 7722, 666, 5722, 9630, 6007, 4362, 5560, + 10, 8218, 1842, 1275, 635, 7362, 5495, 2136, 4211, + 378, 6538, 3994, 3748, 6079, 2689, 4902, 5604, 3045, + 9010, 9650, 2312, 8424, 5176, 8826, 526, 720, 6994, + 3776, 1045, 2465, 1651, 4068, 8717, 4375, 7159, 5081, + 1513, 6060, 615, 672, 5432, 8051, 7494, 1053, 7917, + 3432, 9087, 4378, 4109, 5191, 9896, 3846, 1386, 6955, + 2375, 9064, 1262, 9060, 9875, 2339, 9546, 6020, 5654, + 7246, 8199, 8195, 517, 3302, 8555, 9326, 4235, 3586, + 4868, 9215, 6598, 2971, 8528, 7135, 2145, 4198, 1685, + 2482]), + values=tensor([0.9567, 0.6401, 0.0289, 0.0604, 0.7845, 0.0981, 0.3696, + 0.8086, 0.9521, 0.2276, 0.4990, 0.1149, 0.8484, 0.0243, + 0.4418, 0.9103, 0.6333, 0.2343, 0.9044, 0.4588, 0.6857, + 0.8698, 0.4435, 0.2208, 0.4606, 0.5779, 0.2752, 0.5548, + 0.3503, 0.7292, 0.7647, 0.0809, 0.9477, 0.6992, 0.2035, + 0.0397, 0.8108, 0.7146, 0.2163, 0.9391, 0.5506, 0.9027, + 0.7047, 0.5322, 0.7954, 0.3300, 0.6580, 0.2193, 0.0330, + 0.5974, 0.3628, 0.7895, 0.9255, 0.0804, 0.0592, 0.7538, + 0.6160, 0.8920, 0.1523, 0.4520, 0.4458, 0.1749, 0.0672, + 0.1477, 0.8366, 0.5330, 0.7843, 0.9827, 0.9894, 0.6537, + 0.3404, 0.6512, 0.0130, 0.5886, 0.9584, 0.0068, 0.9694, + 0.6461, 0.0788, 0.5784, 0.2313, 0.0856, 0.2292, 0.0251, + 0.9344, 0.6738, 0.7046, 0.7218, 0.2665, 0.5560, 0.2874, + 0.9673, 0.3856, 0.9135, 0.8380, 0.9815, 0.4728, 0.7779, + 0.6119, 0.1177, 0.1791, 0.4172, 0.3399, 0.2765, 0.6813, + 0.7396, 0.3746, 0.7203, 0.5133, 0.4445, 0.5394, 0.1730, + 0.5784, 0.9914, 0.9708, 0.8980, 0.1039, 0.2266, 0.4808, + 0.2576, 0.2658, 0.3848, 0.1454, 0.0484, 0.0300, 0.6395, + 0.7019, 0.1981, 0.1327, 0.3768, 0.4752, 0.2236, 0.9314, + 0.1229, 0.3719, 0.2322, 0.0576, 0.6666, 0.5856, 0.1380, + 0.4911, 0.4859, 0.6221, 0.2007, 0.8157, 0.8864, 0.2731, + 0.6660, 0.2513, 0.3138, 0.9262, 0.0157, 0.0204, 0.0240, + 0.4477, 0.8490, 0.5021, 0.5214, 0.4655, 0.5087, 0.0711, + 0.1911, 0.1231, 0.6346, 0.8298, 0.0380, 0.8418, 0.9598, + 0.1522, 0.4778, 0.9573, 0.7726, 0.2498, 0.1694, 0.0584, + 0.1372, 0.3085, 0.8106, 0.0567, 0.2010, 0.4609, 0.9837, + 0.1839, 0.5309, 0.5495, 0.2292, 0.7255, 0.4160, 0.9888, + 0.7532, 0.7898, 0.4630, 0.8565, 0.7736, 0.1426, 0.8703, + 0.3054, 0.8385, 0.6707, 0.3232, 0.5836, 0.9899, 0.1806, + 0.9804, 0.7152, 0.8613, 0.3545, 0.2345, 0.9195, 0.5430, + 0.5479, 0.9362, 0.6357, 0.6452, 0.9286, 0.9967, 0.2200, + 0.7728, 0.6736, 0.2296, 0.6237, 0.7501, 0.1240, 0.7747, + 0.3555, 0.9198, 0.3495, 0.2542, 0.7209, 0.9156, 0.0515, + 0.2703, 0.0181, 0.2011, 0.1224, 0.5085, 0.2234, 0.0302, + 0.2947, 0.3593, 0.3460, 0.0315, 0.3124, 0.4514, 0.5363, + 0.8041, 0.3048, 0.2874, 0.6160, 0.9463, 0.9323, 0.8109, + 0.8041, 0.4408, 0.8870, 0.4087, 0.7975, 0.7877, 0.0894, + 0.4902, 0.8558, 0.7063, 0.4614, 0.6886, 0.2711, 0.6619, + 0.9498, 0.1406, 0.9804, 0.1931, 0.0143, 0.8031, 0.2488, + 0.9835, 0.8021, 0.4518, 0.4608, 0.3842, 0.5552, 0.7256, + 0.7855, 0.8269, 0.6814, 0.2846, 0.5769, 0.9943, 0.9737, + 0.0990, 0.4586, 0.5238, 0.5444, 0.5529, 0.1931, 0.5886, + 0.0165, 0.7196, 0.2588, 0.0775, 0.0380, 0.7476, 0.9161, + 0.9474, 0.2280, 0.9071, 0.1424, 0.7098, 0.3749, 0.4691, + 0.2728, 0.7593, 0.2245, 0.0480, 0.9714, 0.9056, 0.2009, + 0.5604, 0.0336, 0.4233, 0.7803, 0.3998, 0.0492, 0.5798, + 0.4101, 0.1265, 0.1687, 0.0372, 0.8495, 0.1562, 0.6581, + 0.9158, 0.9566, 0.9072, 0.9118, 0.4880, 0.7234, 0.4802, + 0.3338, 0.5988, 0.1381, 0.7881, 0.3234, 0.3128, 0.2988, + 0.4535, 0.6315, 0.2457, 0.7950, 0.8746, 0.9983, 0.1234, + 0.4515, 0.7630, 0.8594, 0.7278, 0.0895, 0.2611, 0.2934, + 0.3805, 0.5172, 0.4010, 0.4599, 0.5413, 0.3705, 0.4287, + 0.6704, 0.9607, 0.2103, 0.3563, 0.9316, 0.6049, 0.2751, + 0.1796, 0.5449, 0.7685, 0.3244, 0.9161, 0.4231, 0.4814, + 0.7756, 0.0506, 0.3261, 0.7359, 0.7528, 0.3921, 0.7551, + 0.8787, 0.3083, 0.3644, 0.4313, 0.9158, 0.2947, 0.2399, + 0.3056, 0.5223, 0.4295, 0.6272, 0.0716, 0.1027, 0.8041, + 0.4237, 0.6882, 0.3429, 0.5571, 0.8035, 0.0825, 0.4492, + 0.9331, 0.4995, 0.0052, 0.0186, 0.8956, 0.1651, 0.1414, + 0.3010, 0.5465, 0.4704, 0.5324, 0.8113, 0.2056, 0.0344, + 0.5246, 0.5239, 0.8946, 0.7467, 0.4814, 0.6956, 0.5354, + 0.0869, 0.2266, 0.8007, 0.0423, 0.3283, 0.5281, 0.9578, + 0.5117, 0.8639, 0.2612, 0.1341, 0.7651, 0.5612, 0.6570, + 0.6290, 0.4063, 0.1039, 0.7085, 0.3910, 0.8628, 0.1813, + 0.0925, 0.8390, 0.4482, 0.8049, 0.2080, 0.6886, 0.1542, + 0.8832, 0.4478, 0.5826, 0.0593, 0.2286, 0.5223, 0.3564, + 0.5057, 0.9998, 0.2178, 0.0151, 0.3471, 0.5350, 0.3153, + 0.8728, 0.7287, 0.8205, 0.4108, 0.2514, 0.6802, 0.5027, + 0.1787, 0.9386, 0.7083, 0.8699, 0.5892, 0.8372, 0.9544, + 0.5598, 0.7319, 0.7038, 0.5052, 0.0286, 0.9130, 0.0087, + 0.2585, 0.4425, 0.8089, 0.8004, 0.8636, 0.2907, 0.2562, + 0.6202, 0.1408, 0.5656, 0.4921, 0.8984, 0.3638, 0.9105, + 0.7809, 0.9644, 0.4391, 0.6655, 0.6642, 0.8995, 0.5586, + 0.0225, 0.2291, 0.1077, 0.7528, 0.5140, 0.1698, 0.2918, + 0.1881, 0.2420, 0.5137, 0.9434, 0.9558, 0.0272, 0.7428, + 0.6561, 0.5748, 0.2049, 0.2590, 0.8848, 0.8819, 0.6741, + 0.7961, 0.6549, 0.9523, 0.8492, 0.7389, 0.5150, 0.4675, + 0.5546, 0.7170, 0.9205, 0.3813, 0.1122, 0.4388, 0.2827, + 0.2482, 0.0387, 0.1516, 0.1721, 0.0380, 0.7997, 0.0654, + 0.8005, 0.2431, 0.4891, 0.2186, 0.7462, 0.4610, 0.3419, + 0.1420, 0.8192, 0.7360, 0.3842, 0.6789, 0.0968, 0.9573, + 0.8960, 0.5924, 0.9726, 0.3153, 0.7800, 0.7290, 0.1314, + 0.6242, 0.9031, 0.8014, 0.0040, 0.7667, 0.7923, 0.5213, + 0.0883, 0.8476, 0.2599, 0.3289, 0.0237, 0.7838, 0.3106, + 0.8314, 0.2515, 0.0515, 0.2668, 0.7277, 0.5869, 0.2264, + 0.2577, 0.0674, 0.6363, 0.6935, 0.0671, 0.3733, 0.7463, + 0.0943, 0.8731, 0.5985, 0.2687, 0.6264, 0.8042, 0.2933, + 0.4110, 0.7937, 0.7599, 0.1805, 0.2344, 0.0751, 0.7187, + 0.6132, 0.7344, 0.9792, 0.8508, 0.9008, 0.2758, 0.9281, + 0.8799, 0.4992, 0.3995, 0.9865, 0.1091, 0.8309, 0.0324, + 0.5858, 0.0073, 0.3954, 0.4228, 0.6787, 0.4711, 0.1365, + 0.7743, 0.1756, 0.1027, 0.0025, 0.5483, 0.9832, 0.8402, + 0.5437, 0.2742, 0.0736, 0.0905, 0.7345, 0.9436, 0.0105, + 0.1660, 0.4601, 0.0854, 0.6705, 0.3520, 0.7315, 0.7631, + 0.0872, 0.6993, 0.7534, 0.4279, 0.2082, 0.8083, 0.4831, + 0.4741, 0.7258, 0.9036, 0.3705, 0.5423, 0.2741, 0.1442, + 0.5668, 0.7289, 0.6670, 0.1609, 0.7237, 0.7167, 0.7745, + 0.6507, 0.9123, 0.6367, 0.9916, 0.4398, 0.5125, 0.1011, + 0.7268, 0.6097, 0.9597, 0.1060, 0.6101, 0.6706, 0.1188, + 0.2742, 0.6052, 0.5156, 0.3301, 0.2090, 0.5596, 0.5434, + 0.3379, 0.7423, 0.9199, 0.8817, 0.5745, 0.2014, 0.9000, + 0.4581, 0.6879, 0.8285, 0.8029, 0.0642, 0.6265, 0.1454, + 0.2419, 0.5675, 0.7619, 0.6607, 0.7058, 0.0416, 0.6548, + 0.3716, 0.3958, 0.0373, 0.0703, 0.6534, 0.4767, 0.4211, + 0.1503, 0.3744, 0.8061, 0.5172, 0.9588, 0.9192, 0.0870, + 0.8266, 0.5539, 0.7920, 0.3120, 0.4218, 0.8430, 0.1412, + 0.7451, 0.6605, 0.7100, 0.5820, 0.6515, 0.4478, 0.4598, + 0.1531, 0.8580, 0.8975, 0.2543, 0.0441, 0.7231, 0.1275, + 0.8924, 0.8426, 0.3492, 0.0802, 0.2876, 0.3453, 0.1496, + 0.2122, 0.2471, 0.1476, 0.1720, 0.4896, 0.2255, 0.4540, + 0.0206, 0.2897, 0.9075, 0.5187, 0.2542, 0.3667, 0.5459, + 0.6994, 0.5194, 0.8405, 0.1449, 0.5723, 0.5739, 0.2964, + 0.5884, 0.1329, 0.5094, 0.7933, 0.5916, 0.2356, 0.8448, + 0.0314, 0.2918, 0.1118, 0.2161, 0.7100, 0.7407, 0.3672, + 0.9308, 0.0455, 0.3879, 0.6382, 0.3056, 0.2402, 0.7601, + 0.4543, 0.6599, 0.4151, 0.5199, 0.5403, 0.7615, 0.3703, + 0.5084, 0.4434, 0.3133, 0.4003, 0.2618, 0.0335, 0.8971, + 0.1212, 0.6718, 0.9963, 0.6519, 0.9673, 0.5898, 0.2818, + 0.4703, 0.5832, 0.7686, 0.7253, 0.2328, 0.5988, 0.6346, + 0.8886, 0.6579, 0.1074, 0.9487, 0.1032, 0.5863, 0.1326, + 0.6517, 0.5785, 0.6778, 0.3888, 0.6359, 0.2422, 0.6571, + 0.8073, 0.6352, 0.8975, 0.1051, 0.3768, 0.9437, 0.8829, + 0.0349, 0.6240, 0.8285, 0.0936, 0.3749, 0.7126, 0.7686, + 0.9655, 0.0865, 0.3338, 0.0500, 0.9789, 0.4468, 0.1113, + 0.6103, 0.6503, 0.6008, 0.7135, 0.5067, 0.3864, 0.6554, + 0.4833, 0.0609, 0.2451, 0.0462, 0.0089, 0.6872, 0.7457, + 0.9636, 0.7757, 0.9588, 0.9765, 0.7906, 0.2393, 0.4971, + 0.9507, 0.6379, 0.5601, 0.8341, 0.1664, 0.5108, 0.8919, + 0.2532, 0.0955, 0.0325, 0.6477, 0.3783, 0.8696, 0.1850, + 0.2149, 0.2608, 0.3061, 0.2548, 0.5371, 0.7665, 0.7060, + 0.1345, 0.3906, 0.4500, 0.8124, 0.3931, 0.1239, 0.5592, + 0.3674, 0.7581, 0.0519, 0.1445, 0.8229, 0.2828, 0.8468, + 0.1683, 0.4946, 0.7498, 0.2054, 0.3175, 0.3311, 0.6441, + 0.4811, 0.6538, 0.9832, 0.5201, 0.6879, 0.0806, 0.6384, + 0.6090, 0.0030, 0.9149, 0.3028, 0.0662, 0.4092, 0.7217, + 0.7886, 0.1827, 0.2308, 0.8342, 0.6329, 0.5975, 0.0068, + 0.4830, 0.0719, 0.4648, 0.5918, 0.8623, 0.2627, 0.7577, + 0.9797, 0.8266, 0.3086, 0.2017, 0.6303, 0.8752, 0.7597, + 0.2228, 0.3385, 0.5201, 0.1909, 0.4456, 0.3074, 0.5579, + 0.6458, 0.4935, 0.8701, 0.1571, 0.2568, 0.9694, 0.3103, + 0.9109, 0.1790, 0.3725, 0.9182, 0.0234, 0.2294, 0.3142, + 0.7794, 0.8060, 0.6996, 0.6655, 0.9955, 0.7639, 0.7232, + 0.0896, 0.2239, 0.4701, 0.0235, 0.3167, 0.6004]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8612, 0.4581, 0.7630, ..., 0.0492, 0.2811, 0.3451]) +tensor([0.9688, 0.6616, 0.6383, ..., 0.6766, 0.6207, 0.5367]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -754,271 +540,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 1.6940038204193115 seconds +Time: 1.5676329135894775 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '339631', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.730679035186768} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '349528', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3000328540802} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), - col_indices=tensor([4972, 1405, 2738, 821, 7457, 7836, 7120, 5534, 695, - 6668, 7153, 1083, 1806, 3581, 349, 2928, 9019, 9318, - 9657, 5964, 9091, 9508, 6651, 348, 2668, 9801, 712, - 819, 3104, 2583, 7723, 85, 5857, 6849, 7363, 7282, - 890, 1906, 2514, 9264, 7871, 8221, 7845, 1927, 471, - 2360, 1083, 3678, 1270, 9654, 9373, 7266, 9755, 6534, - 9043, 6484, 9707, 3465, 997, 9966, 6876, 4035, 1407, - 8477, 4573, 4827, 5524, 7695, 5419, 327, 5516, 1549, - 7525, 9004, 8876, 6314, 2296, 6027, 9452, 72, 7459, - 3044, 8559, 628, 5871, 4084, 7963, 158, 9675, 7550, - 1796, 3225, 8892, 8104, 445, 2716, 8203, 4628, 6210, - 6301, 5576, 3974, 7025, 8928, 9625, 8171, 958, 6404, - 7807, 8154, 451, 3321, 6728, 5234, 6976, 1597, 9431, - 4125, 7315, 3074, 7163, 9117, 5604, 3258, 3491, 1302, - 780, 3021, 7140, 4452, 7130, 506, 742, 4904, 4779, - 9113, 8947, 7664, 7637, 7987, 302, 2532, 7421, 9325, - 6951, 8027, 5405, 1574, 4570, 9089, 7665, 528, 4791, - 1506, 5090, 7803, 281, 4795, 4297, 886, 3647, 8898, - 3120, 2601, 2574, 3655, 4214, 5707, 7550, 3370, 2324, - 3105, 7422, 3066, 3760, 2726, 2303, 8568, 327, 2589, - 4688, 3603, 3785, 9818, 9283, 469, 9286, 2273, 8934, - 9475, 2423, 1816, 6319, 9100, 1702, 4299, 4023, 4882, - 7771, 666, 7807, 7905, 397, 3796, 7251, 6833, 7018, - 1044, 9904, 4747, 3464, 5425, 3407, 175, 604, 2034, - 5882, 7224, 7439, 7369, 6262, 2750, 1254, 2862, 9223, - 4910, 6784, 6117, 9740, 1422, 8971, 7737, 1310, 9814, - 9075, 1744, 3303, 5610, 8739, 407, 7188, 9037, 2975, - 1568, 2983, 3434, 8327, 2274, 5215, 604, 2230, 2563, - 9134, 7881, 5093, 3453, 5598, 6471, 8742, 985, 2483, - 2627, 1373, 280, 8576, 3642, 7906, 7329, 9906, 6733, - 1745, 6623, 7734, 9790, 7276, 5528, 4362, 3188, 4638, - 4280, 2963, 1142, 372, 5049, 2909, 7165, 3193, 6148, - 2271, 1609, 1609, 3499, 5181, 9245, 9403, 2085, 5724, - 1181, 1771, 2960, 1899, 6350, 6779, 7732, 2210, 1463, - 9426, 5307, 9888, 664, 9851, 9122, 4726, 1985, 4843, - 7919, 8459, 1506, 1424, 6535, 3932, 1577, 8292, 9293, - 4141, 260, 3379, 4488, 4, 9670, 8903, 4311, 6111, - 7184, 4147, 9673, 2790, 579, 3619, 2848, 7168, 4487, - 6706, 3418, 9886, 3386, 9654, 7227, 6301, 8182, 2042, - 5889, 8620, 4715, 2314, 6102, 7812, 4452, 8306, 3245, - 7661, 4687, 9454, 372, 2859, 4089, 1590, 2543, 5916, - 5118, 5541, 2765, 9023, 1245, 8801, 911, 9153, 8331, - 2736, 8562, 6910, 738, 6503, 5375, 4609, 3990, 2407, - 9645, 8807, 6638, 1515, 7548, 7650, 8775, 7638, 2895, - 6146, 8851, 53, 3827, 901, 3926, 4196, 8539, 6422, - 4461, 7590, 8706, 9720, 3957, 975, 1113, 7177, 3493, - 2456, 3981, 2750, 1678, 6119, 1019, 1015, 1019, 2970, - 6212, 4435, 9544, 7136, 3621, 5132, 6730, 1344, 8173, - 9568, 9492, 3762, 8302, 9499, 2078, 7665, 8552, 646, - 4448, 2453, 4214, 4831, 3595, 6202, 6664, 5181, 1561, - 6495, 6356, 988, 6217, 6069, 4983, 5304, 5430, 6831, - 4249, 4868, 5418, 5675, 8390, 9467, 6739, 6570, 383, - 7997, 8462, 8831, 1856, 398, 1814, 9850, 683, 2765, - 7155, 6554, 5795, 7478, 9531, 9477, 5234, 8954, 6229, - 5571, 6549, 4977, 4582, 7552, 1900, 5253, 2076, 9585, - 1810, 4182, 1517, 8795, 4585, 8047, 1703, 8851, 7992, - 9840, 7872, 8668, 1604, 1226, 7542, 2036, 8321, 8280, - 6516, 7301, 5800, 6655, 556, 7240, 1925, 6371, 8117, - 4008, 1573, 297, 4617, 3500, 1096, 8348, 7795, 2166, - 9919, 7046, 907, 4373, 9942, 7959, 4612, 2191, 7471, - 1551, 1253, 9622, 5451, 6375, 2718, 9172, 1328, 2206, - 6610, 8512, 9620, 3166, 70, 7979, 8362, 4317, 9319, - 7145, 9471, 1102, 6052, 3736, 7897, 3018, 9318, 4626, - 3035, 3845, 5367, 445, 1352, 6964, 3743, 6956, 6815, - 8871, 4156, 4269, 4098, 3309, 4397, 5465, 3229, 1823, - 6544, 3842, 8845, 3484, 822, 6074, 4146, 9045, 7757, - 2777, 66, 9385, 2615, 599, 3543, 2303, 1567, 9769, - 3202, 4399, 2891, 5597, 2945, 5620, 5806, 4689, 9005, - 4553, 137, 7443, 892, 7777, 7252, 8060, 4460, 732, - 8162, 7391, 2245, 7604, 9063, 1073, 7517, 4465, 394, - 9244, 8, 9498, 9116, 9485, 5995, 3554, 4940, 8129, - 1458, 4587, 2078, 1470, 3731, 5211, 8596, 5666, 6142, - 3359, 1472, 8209, 9872, 1611, 3338, 6557, 3513, 3864, - 4827, 2518, 9053, 530, 3073, 418, 1537, 6645, 7411, - 2104, 9688, 8234, 7188, 7937, 4386, 6521, 9854, 2965, - 8265, 9490, 2081, 612, 4080, 5175, 9752, 1944, 2111, - 791, 5228, 349, 166, 4411, 9072, 6256, 1725, 7501, - 2904, 3632, 5352, 2745, 3201, 3650, 4953, 3431, 9310, - 9213, 5480, 3794, 6746, 4307, 2422, 1271, 9553, 7121, - 8150, 7353, 1417, 870, 806, 3925, 4907, 2082, 5868, - 6597, 5497, 4547, 6014, 1707, 4907, 455, 5675, 355, - 6173, 5131, 1727, 6109, 8687, 7982, 736, 6797, 2166, - 1063, 4073, 7028, 1072, 3289, 5899, 2997, 9326, 5623, - 1788, 1089, 8209, 9870, 4781, 9452, 2450, 7131, 2788, - 2853, 5511, 9849, 2656, 7920, 674, 1300, 8932, 416, - 8093, 9874, 7324, 5225, 4621, 926, 980, 8985, 1462, - 7040, 9170, 9555, 8932, 9914, 219, 2964, 7753, 6648, - 2390, 6208, 5248, 5320, 4117, 7437, 430, 4484, 2611, - 5121, 8916, 1447, 5161, 1499, 9794, 1265, 4597, 8256, - 1496, 7641, 2433, 6962, 8984, 1170, 5695, 5461, 547, - 9880, 7152, 272, 80, 1601, 2649, 2962, 4267, 9755, - 1424, 3921, 9152, 9785, 972, 1812, 7994, 2298, 8551, - 708, 668, 3575, 9762, 8171, 2486, 2048, 3839, 1631, - 1897, 7990, 4894, 2240, 6177, 6171, 9539, 2465, 9722, - 9336, 2467, 6414, 2170, 3729, 1314, 3949, 8063, 6208, - 823, 5984, 6659, 6415, 2310, 7658, 6846, 2561, 8927, - 5336, 4676, 5025, 5266, 9954, 5759, 4786, 389, 1969, - 8561, 5438, 7925, 6769, 2925, 3548, 5821, 4979, 5495, - 4420, 2929, 4317, 8343, 6738, 1023, 1264, 816, 5153, - 6387, 9609, 7627, 9609, 8951, 8523, 9734, 7325, 692, - 9657, 595, 9347, 5128, 5970, 6833, 8478, 2254, 1877, - 386, 8236, 3505, 9182, 9403, 1371, 4925, 3513, 8183, - 6878, 3320, 8021, 3631, 1112, 3099, 3784, 132, 834, - 2343, 2612, 8494, 4918, 7861, 7283, 6749, 3142, 1632, - 4302, 8980, 7897, 515, 2749, 5212, 8365, 982, 7831, - 4787, 5300, 7623, 7109, 9010, 3076, 1284, 467, 7979, - 5644, 8907, 7522, 6202, 6272, 5531, 9124, 4043, 7300, - 250, 3433, 7921, 7720, 9874, 1712, 2699, 8196, 3473, - 3245, 9036, 8606, 942, 3651, 8212, 979, 3999, 790, - 7368, 4160, 7287, 156, 3177, 9487, 6150, 318, 6342, - 8617, 6278, 8278, 8892, 1288, 6740, 9226, 7056, 9217, - 1811]), - values=tensor([0.6196, 0.0114, 0.0780, 0.5906, 0.7505, 0.5993, 0.7945, - 0.6434, 0.3118, 0.3104, 0.2388, 0.2046, 0.7622, 0.0777, - 0.2242, 0.6515, 0.2665, 0.6905, 0.5118, 0.3093, 0.1972, - 0.8279, 0.7758, 0.5459, 0.1846, 0.6189, 0.2595, 0.9186, - 0.9405, 0.9879, 0.1042, 0.7520, 0.4405, 0.4499, 0.2446, - 0.5636, 0.5715, 0.4879, 0.0724, 0.4352, 0.0923, 0.1412, - 0.7009, 0.0672, 0.7384, 0.5888, 0.4764, 0.9840, 0.4351, - 0.7794, 0.0814, 0.2714, 0.4090, 0.8987, 0.3438, 0.2136, - 0.2617, 0.9509, 0.7670, 0.2193, 0.5040, 0.9778, 0.5290, - 0.4287, 0.4993, 0.6605, 0.4552, 0.9814, 0.6170, 0.0979, - 0.2177, 0.2630, 0.6411, 0.5989, 0.5365, 0.6080, 0.2088, - 0.6048, 0.4912, 0.3916, 0.1699, 0.3572, 0.0296, 0.1407, - 0.2305, 0.8274, 0.8609, 0.2424, 0.0171, 0.3855, 0.8338, - 0.0725, 0.1924, 0.3285, 0.2749, 0.2272, 0.8472, 0.4564, - 0.0152, 0.9213, 0.6620, 0.1417, 0.5779, 0.7029, 0.8146, - 0.0682, 0.3470, 0.1203, 0.3985, 0.2526, 0.9231, 0.2354, - 0.8514, 0.4049, 0.6712, 0.6265, 0.6751, 0.7498, 0.8617, - 0.8223, 0.5316, 0.8207, 0.7825, 0.3233, 0.8320, 0.0205, - 0.8938, 0.9868, 0.4228, 0.0904, 0.1323, 0.2104, 0.5759, - 0.7486, 0.0044, 0.1971, 0.8234, 0.8820, 0.6865, 0.9321, - 0.1935, 0.5814, 0.3290, 0.8572, 0.7029, 0.1494, 0.4076, - 0.6910, 0.7343, 0.6929, 0.6184, 0.4509, 0.3354, 0.0941, - 0.8616, 0.1833, 0.4475, 0.8165, 0.7540, 0.0272, 0.8090, - 0.0075, 0.3321, 0.8506, 0.8823, 0.3041, 0.9698, 0.2208, - 0.0487, 0.0944, 0.5882, 0.4789, 0.2189, 0.7276, 0.4046, - 0.6510, 0.9386, 0.5425, 0.0569, 0.9029, 0.3170, 0.4560, - 0.5885, 0.3819, 0.2215, 0.3602, 0.5884, 0.5067, 0.6889, - 0.4126, 0.5286, 0.8708, 0.3178, 0.5978, 0.0567, 0.3482, - 0.7129, 0.2828, 0.9540, 0.1061, 0.9066, 0.0741, 0.4504, - 0.7678, 0.9215, 0.2588, 0.3298, 0.0216, 0.3903, 0.8528, - 0.9555, 0.5742, 0.2240, 0.8449, 0.7139, 0.5892, 0.2852, - 0.5518, 0.0411, 0.1811, 0.1596, 0.8581, 0.7313, 0.1657, - 0.9703, 0.4189, 0.2887, 0.6831, 0.2369, 0.7634, 0.0408, - 0.1000, 0.5756, 0.0151, 0.0558, 0.6882, 0.9572, 0.3370, - 0.9688, 0.4463, 0.3662, 0.7410, 0.8981, 0.8137, 0.7748, - 0.5107, 0.2607, 0.4339, 0.7448, 0.8445, 0.3417, 0.7647, - 0.9351, 0.4583, 0.4626, 0.8247, 0.0549, 0.7239, 0.1440, - 0.1775, 0.0044, 0.8569, 0.9508, 0.9056, 0.7959, 0.1560, - 0.6764, 0.7063, 0.5503, 0.9575, 0.2693, 0.8490, 0.4128, - 0.9714, 0.7818, 0.2400, 0.9716, 0.6862, 0.9725, 0.7942, - 0.6956, 0.0325, 0.8764, 0.6559, 0.7553, 0.5630, 0.4887, - 0.4200, 0.4990, 0.6213, 0.7425, 0.5404, 0.5604, 0.6518, - 0.0938, 0.0682, 0.2423, 0.9653, 0.1080, 0.3011, 0.1907, - 0.9731, 0.6522, 0.5311, 0.7260, 0.4884, 0.7901, 0.0902, - 0.8898, 0.7591, 0.1705, 0.1715, 0.3061, 0.0252, 0.3621, - 0.2712, 0.6191, 0.9504, 0.7431, 0.1438, 0.4641, 0.1864, - 0.8295, 0.2078, 0.4217, 0.6064, 0.8959, 0.2519, 0.3506, - 0.8294, 0.1176, 0.3434, 0.4118, 0.0391, 0.7786, 0.0788, - 0.3045, 0.2730, 0.0333, 0.9811, 0.5870, 0.5459, 0.4360, - 0.7475, 0.4862, 0.2886, 0.3649, 0.8278, 0.5869, 0.3199, - 0.6849, 0.0285, 0.4652, 0.5037, 0.2746, 0.7393, 0.6614, - 0.8444, 0.4003, 0.6111, 0.3264, 0.6698, 0.0865, 0.1122, - 0.8565, 0.1133, 0.1540, 0.9887, 0.7073, 0.1087, 0.5905, - 0.7220, 0.0133, 0.1397, 0.8300, 0.0324, 0.8468, 0.3611, - 0.2452, 0.3323, 0.2390, 0.8962, 0.8835, 0.5849, 0.8184, - 0.7935, 0.0660, 0.7531, 0.0935, 0.0932, 0.0297, 0.7930, - 0.4667, 0.6703, 0.2160, 0.8538, 0.6976, 0.7919, 0.6943, - 0.5213, 0.4328, 0.9720, 0.9873, 0.3954, 0.1633, 0.0324, - 0.1143, 0.9281, 0.5805, 0.4522, 0.1840, 0.3413, 0.7327, - 0.8227, 0.7055, 0.4474, 0.9122, 0.5135, 0.1786, 0.5499, - 0.6141, 0.0692, 0.4429, 0.4518, 0.1137, 0.3476, 0.6665, - 0.4712, 0.6495, 0.4523, 0.1555, 0.4635, 0.4607, 0.8030, - 0.8073, 0.9042, 0.2096, 0.7414, 0.3257, 0.5309, 0.6492, - 0.5166, 0.4222, 0.1800, 0.6811, 0.2543, 0.7807, 0.8292, - 0.0337, 0.3617, 0.3639, 0.6057, 0.9194, 0.7802, 0.0115, - 0.4737, 0.1007, 0.6828, 0.4037, 0.6724, 0.8920, 0.1067, - 0.2017, 0.4498, 0.1470, 0.6942, 0.3285, 0.6704, 0.5841, - 0.6335, 0.3846, 0.4546, 0.4434, 0.2040, 0.5921, 0.4102, - 0.9338, 0.0085, 0.3554, 0.7339, 0.9378, 0.6361, 0.4532, - 0.8577, 0.0919, 0.8382, 0.8771, 0.9330, 0.9355, 0.3061, - 0.1102, 0.2291, 0.2677, 0.0542, 0.0233, 0.9935, 0.2414, - 0.5357, 0.0318, 0.7366, 0.4385, 0.9397, 0.9292, 0.4194, - 0.0669, 0.0388, 0.1712, 0.0651, 0.0535, 0.4304, 0.4725, - 0.6864, 0.1406, 0.9443, 0.6677, 0.0116, 0.4935, 0.0069, - 0.5753, 0.7223, 0.1602, 0.1981, 0.8268, 0.1964, 0.1201, - 0.4187, 0.9315, 0.6910, 0.1607, 0.8515, 0.6607, 0.1831, - 0.4073, 0.4556, 0.6316, 0.3967, 0.7742, 0.0299, 0.8444, - 0.4225, 0.4172, 0.0107, 0.7973, 0.1650, 0.5326, 0.3424, - 0.1339, 0.3026, 0.2960, 0.8852, 0.5235, 0.3847, 0.9486, - 0.9744, 0.7896, 0.0970, 0.8576, 0.9830, 0.8277, 0.6377, - 0.6529, 0.3114, 0.3164, 0.5753, 0.9730, 0.1173, 0.7698, - 0.0827, 0.2026, 0.6431, 0.6275, 0.9894, 0.9420, 0.4072, - 0.0354, 0.6950, 0.2008, 0.8919, 0.9516, 0.2605, 0.2712, - 0.3361, 0.8498, 0.9614, 0.7851, 0.9593, 0.8290, 0.5531, - 0.4367, 0.7236, 0.1173, 0.3492, 0.3041, 0.4874, 0.6287, - 0.4729, 0.9250, 0.4853, 0.7466, 0.5273, 0.7349, 0.8320, - 0.4933, 0.8099, 0.6428, 0.4210, 0.3452, 0.1825, 0.1989, - 0.0157, 0.5749, 0.0068, 0.8464, 0.1640, 0.6036, 0.8043, - 0.1153, 0.1076, 0.3058, 0.3746, 0.4799, 0.3056, 0.9610, - 0.9127, 0.1886, 0.6659, 0.9943, 0.2424, 0.3077, 0.7229, - 0.1714, 0.4319, 0.1211, 0.4333, 0.8060, 0.0129, 0.3734, - 0.2043, 0.8662, 0.0012, 0.7686, 0.9359, 0.7522, 0.5003, - 0.8250, 0.6805, 0.4069, 0.5015, 0.3243, 0.2291, 0.6806, - 0.3313, 0.5285, 0.2989, 0.5609, 0.2452, 0.4071, 0.5377, - 0.6963, 0.5657, 0.0299, 0.3653, 0.3895, 0.5235, 0.1440, - 0.6253, 0.9459, 0.1675, 0.0939, 0.6242, 0.6776, 0.4588, - 0.7446, 0.8041, 0.5422, 0.2562, 0.4694, 0.6440, 0.4271, - 0.2793, 0.9064, 0.9594, 0.0651, 0.7852, 0.2015, 0.1116, - 0.8944, 0.6879, 0.1163, 0.9220, 0.0269, 0.3468, 0.8496, - 0.0650, 0.6194, 0.8677, 0.0084, 0.7029, 0.1122, 0.9175, - 0.6419, 0.6504, 0.1891, 0.4529, 0.1905, 0.8371, 0.7381, - 0.0046, 0.0964, 0.3131, 0.0824, 0.0035, 0.8733, 0.0679, - 0.3019, 0.0364, 0.0796, 0.9762, 0.3044, 0.5891, 0.5660, - 0.3560, 0.7397, 0.3023, 0.7408, 0.9024, 0.2952, 0.3074, - 0.6006, 0.7129, 0.0891, 0.3833, 0.2026, 0.5861, 0.2465, - 0.9574, 0.5804, 0.7353, 0.6099, 0.7893, 0.5485, 0.6035, - 0.2349, 0.9017, 0.3856, 0.5056, 0.0559, 0.3784, 0.5241, - 0.2260, 0.3588, 0.0084, 0.9935, 0.5469, 0.0778, 0.5282, - 0.6435, 0.6276, 0.8416, 0.0584, 0.0156, 0.0848, 0.6834, - 0.9575, 0.2536, 0.5391, 0.2540, 0.8715, 0.2861, 0.5460, - 0.7418, 0.0582, 0.9596, 0.0844, 0.4428, 0.3021, 0.7742, - 0.6810, 0.4964, 0.7090, 0.4618, 0.1299, 0.8111, 0.3574, - 0.2849, 0.4554, 0.2968, 0.9678, 0.0744, 0.2541, 0.3812, - 0.5255, 0.2774, 0.7182, 0.6545, 0.3975, 0.7431, 0.2071, - 0.5491, 0.9683, 0.3905, 0.1637, 0.7412, 0.4003, 0.0465, - 0.7927, 0.1817, 0.9405, 0.6939, 0.2323, 0.2436, 0.5829, - 0.6947, 0.0693, 0.6028, 0.7684, 0.0952, 0.9277, 0.4091, - 0.9988, 0.6707, 0.1396, 0.1664, 0.2853, 0.2971, 0.9641, - 0.1686, 0.0860, 0.6361, 0.7716, 0.8737, 0.6248, 0.0015, - 0.3479, 0.1224, 0.1738, 0.6328, 0.2988, 0.9543, 0.5409, - 0.0047, 0.1434, 0.4069, 0.6226, 0.8539, 0.0827, 0.8015, - 0.2080, 0.8185, 0.6100, 0.2443, 0.4521, 0.2640, 0.8834, - 0.8696, 0.2577, 0.6036, 0.3935, 0.5480, 0.9590, 0.9883, - 0.7734, 0.5568, 0.6996, 0.0841, 0.9604, 0.4231, 0.9460, - 0.7411, 0.8125, 0.9801, 0.0252, 0.0568, 0.4796, 0.9708, - 0.3495, 0.6681, 0.3051, 0.6744, 0.6027, 0.3956, 0.2149, - 0.6839, 0.9513, 0.7049, 0.9582, 0.9253, 0.8298, 0.8000, - 0.7741, 0.1759, 0.5742, 0.7797, 0.5900, 0.6486, 0.5787, - 0.2751, 0.3526, 0.0315, 0.3035, 0.6247, 0.4841, 0.5887, - 0.1239, 0.9917, 0.3466, 0.9117, 0.5496, 0.3028, 0.9246, - 0.4866, 0.4419, 0.3541, 0.1189, 0.6739, 0.6026, 0.2252, - 0.8252, 0.4125, 0.4146, 0.0962, 0.2742, 0.6324, 0.9534, - 0.6412, 0.1798, 0.3442, 0.4388, 0.5642, 0.0475, 0.3987, - 0.9407, 0.2760, 0.9317, 0.9750, 0.2067, 0.0954, 0.3279, - 0.9463, 0.0269, 0.3547, 0.9989, 0.6845, 0.0502, 0.2075, - 0.9402, 0.9150, 0.2132, 0.4162, 0.9616, 0.9976, 0.1376, - 0.2488, 0.9598, 0.7374, 0.5987, 0.2084, 0.2543, 0.9909, - 0.9516, 0.5972, 0.2731, 0.2157, 0.3846, 0.1856, 0.3478, - 0.8278, 0.9150, 0.8429, 0.1244, 0.6218, 0.9288, 0.3402, - 0.1837, 0.8875, 0.3500, 0.7672, 0.3250, 0.7786, 0.8135, - 0.0048, 0.4153, 0.0159, 0.0130, 0.8717, 0.9126, 0.3962, - 0.4043, 0.1810, 0.9416, 0.6720, 0.1907, 0.4396, 0.7264, - 0.0043, 0.4775, 0.5628, 0.5494, 0.7407, 0.3406]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([7518, 9696, 6300, 262, 1821, 8285, 8778, 3322, 8898, + 7577, 7218, 6194, 7006, 9614, 4003, 4685, 5395, 3357, + 7430, 5632, 6273, 2540, 3870, 8002, 5621, 5158, 4894, + 8789, 8631, 475, 6021, 25, 1478, 2181, 9967, 8972, + 7148, 7237, 1324, 2589, 6704, 1661, 3441, 9044, 8782, + 3179, 2340, 6434, 8915, 9575, 1895, 668, 5800, 3712, + 3120, 4497, 3003, 8475, 1656, 5317, 534, 1209, 750, + 4867, 5146, 2934, 2534, 4093, 7033, 3431, 1147, 8260, + 6120, 2085, 4612, 4043, 7468, 6975, 1754, 8217, 4410, + 1740, 172, 9577, 8177, 556, 6215, 2860, 2656, 3624, + 7154, 9768, 4398, 1947, 2674, 6729, 4022, 4851, 453, + 8512, 6182, 9666, 2673, 1056, 2252, 4391, 7487, 5177, + 4350, 1020, 3998, 6787, 4433, 4098, 3043, 4595, 2036, + 200, 3964, 7514, 4037, 5869, 3218, 7690, 6624, 488, + 1881, 9085, 9814, 4334, 6660, 1075, 1040, 4720, 1813, + 9907, 1998, 1000, 2004, 5683, 1137, 3602, 5453, 9566, + 2417, 9925, 7869, 179, 8191, 7442, 2336, 1365, 7166, + 7354, 6152, 2462, 5973, 6124, 5756, 3298, 1414, 4077, + 7759, 305, 7151, 2537, 5054, 8993, 7229, 2942, 3483, + 4002, 2693, 2448, 5771, 826, 1254, 7055, 4953, 66, + 2508, 2351, 3206, 4067, 5669, 9712, 6741, 1066, 7637, + 278, 5450, 3287, 1711, 9900, 4655, 3158, 1712, 6656, + 6936, 9886, 6105, 1083, 2126, 1861, 6749, 4491, 5376, + 3481, 234, 4214, 4686, 7277, 8302, 2751, 3845, 468, + 4378, 3280, 8845, 7340, 4948, 2744, 5128, 5325, 2078, + 2907, 395, 3547, 3669, 8960, 8102, 8374, 6349, 3441, + 4948, 3355, 6904, 4725, 3674, 6960, 2790, 5677, 2877, + 5123, 2601, 7479, 643, 1952, 585, 168, 2413, 3560, + 9344, 9051, 864, 328, 9846, 3635, 7758, 7148, 9253, + 6009, 2555, 5884, 204, 4047, 201, 9134, 7649, 2615, + 9583, 9172, 9003, 780, 7486, 9587, 9477, 5154, 4457, + 1105, 3365, 6202, 8659, 5414, 2426, 1087, 1957, 6749, + 4929, 1049, 3270, 246, 8629, 2498, 8221, 4504, 9911, + 9966, 3014, 4245, 6556, 3305, 589, 4137, 318, 6145, + 8838, 3468, 1976, 6641, 9251, 2588, 2851, 8127, 8656, + 2062, 2975, 6380, 2214, 4190, 5031, 9304, 614, 6944, + 562, 4917, 969, 3937, 2534, 4263, 7288, 9607, 4730, + 444, 2886, 3273, 3567, 3830, 5410, 8598, 1862, 5598, + 4933, 8354, 2174, 8958, 5295, 846, 3272, 353, 2969, + 8177, 7603, 8213, 9944, 5897, 2136, 4476, 7073, 1115, + 8358, 8989, 9345, 6021, 6613, 901, 8445, 784, 8596, + 4500, 9360, 5470, 2745, 6764, 1266, 8614, 2516, 1972, + 8005, 3834, 2717, 7044, 7458, 1886, 9756, 5069, 5734, + 6821, 745, 6731, 1536, 9880, 9319, 8126, 6805, 7453, + 39, 3024, 3069, 9783, 2264, 1259, 738, 3734, 9004, + 2095, 3027, 5789, 1350, 4750, 5589, 5192, 9977, 2146, + 5549, 8117, 6364, 9272, 4409, 5362, 7387, 7399, 2899, + 7420, 5273, 7946, 7115, 9377, 5767, 8802, 1095, 9889, + 8008, 7921, 1671, 1137, 5636, 5204, 9943, 2483, 1303, + 9433, 2389, 9126, 7106, 1954, 8519, 236, 6693, 2794, + 7942, 5186, 904, 5057, 7613, 5066, 8772, 1477, 4880, + 4179, 2818, 5601, 8106, 1178, 3735, 7444, 9960, 1324, + 2727, 2041, 1199, 5652, 3114, 9046, 3713, 861, 4463, + 1424, 126, 4816, 8641, 7447, 5664, 8120, 6968, 9745, + 7132, 5777, 8388, 3282, 8980, 1088, 7293, 9885, 4266, + 6309, 5469, 6966, 60, 1227, 5501, 999, 7686, 448, + 8913, 1964, 9037, 316, 5677, 3113, 2190, 347, 8164, + 7928, 9716, 4357, 5297, 5956, 2459, 7237, 656, 182, + 7452, 3500, 3896, 5526, 2240, 3745, 33, 1094, 4722, + 4207, 1962, 5999, 8357, 4119, 6009, 7333, 6925, 3159, + 7769, 1317, 8086, 4883, 3762, 7069, 6702, 4912, 5628, + 6146, 62, 2316, 433, 2097, 6311, 24, 347, 8288, + 2746, 1342, 2404, 6710, 5242, 178, 5849, 8133, 6624, + 1815, 5332, 533, 7044, 3421, 9752, 6107, 1812, 8227, + 6724, 5442, 2308, 798, 2453, 8060, 6419, 6234, 6264, + 4931, 435, 2405, 2270, 5349, 1220, 848, 2804, 8394, + 3337, 5183, 5812, 2367, 5067, 5907, 5088, 7972, 460, + 2064, 5097, 9032, 8929, 7371, 1494, 149, 4759, 7969, + 968, 5248, 9684, 4863, 9748, 9909, 8626, 6754, 6340, + 6417, 61, 6772, 600, 1936, 7091, 7755, 9124, 8882, + 8496, 3711, 9212, 9435, 8216, 7685, 8063, 5288, 9492, + 1160, 4958, 7970, 4718, 6769, 5717, 1553, 4760, 9541, + 6566, 6456, 7743, 3822, 6876, 5097, 2671, 3209, 3226, + 2793, 7122, 5735, 6937, 1731, 1634, 4869, 5674, 6361, + 2516, 6708, 9293, 3974, 5537, 1905, 6525, 2223, 7014, + 2692, 1876, 9845, 157, 3891, 113, 7490, 2632, 6626, + 6686, 7895, 5360, 8738, 4685, 3986, 1130, 2869, 3422, + 4812, 2624, 6916, 9474, 1705, 5886, 4866, 6374, 9366, + 9761, 9618, 1119, 6229, 6311, 8794, 8597, 4133, 6121, + 9540, 6417, 1594, 3884, 2402, 1780, 5984, 4536, 6567, + 3892, 5500, 7849, 6302, 1302, 4311, 6900, 5784, 7487, + 203, 191, 8618, 8956, 318, 7779, 3949, 5494, 8492, + 1675, 5732, 9756, 9204, 5091, 6134, 4301, 5904, 184, + 395, 1607, 3648, 1844, 643, 5663, 6820, 5174, 2484, + 2223, 2313, 396, 3592, 96, 467, 7251, 717, 5592, + 9116, 3470, 342, 8485, 6092, 1390, 7765, 2309, 8318, + 4123, 9225, 9856, 8144, 9651, 2603, 192, 7575, 6749, + 7640, 6468, 2711, 5163, 838, 4488, 9754, 2066, 6247, + 4650, 6711, 6711, 6741, 5236, 8684, 1992, 6812, 9945, + 5083, 7942, 5645, 8754, 2371, 7966, 1027, 670, 8926, + 6913, 5100, 8627, 9784, 7560, 5325, 5546, 2186, 4255, + 3502, 2906, 7383, 6744, 5666, 3739, 2679, 2518, 4617, + 8685, 5307, 9705, 9940, 2328, 8384, 8001, 6555, 5871, + 2468, 7100, 5998, 5692, 4802, 4015, 2628, 7408, 5224, + 1078, 6410, 2468, 2961, 9945, 6353, 5587, 3018, 8393, + 1265, 8258, 7873, 8344, 313, 5155, 2929, 3301, 2147, + 6945, 9130, 9351, 284, 2838, 7395, 3957, 4616, 2128, + 9332, 7315, 742, 5824, 4092, 9802, 8791, 1788, 7391, + 290, 4777, 1756, 6447, 2315, 2564, 4732, 4959, 23, + 2501, 5485, 6508, 6099, 4580, 6549, 2248, 9033, 5440, + 6995, 6959, 9024, 3235, 5667, 4553, 1072, 4400, 5450, + 7840, 965, 5626, 2280, 9236, 4299, 7041, 7596, 5077, + 9967, 9369, 2731, 3895, 1718, 5713, 2354, 8425, 3764, + 1992, 8248, 668, 8845, 5098, 7778, 1796, 8703, 6523, + 6952, 2673, 4419, 8055, 7289, 9732, 6094, 7227, 1426, + 4580, 6981, 2751, 1273, 3522, 3234, 1740, 7604, 3606, + 3783, 2377, 4681, 7158, 3865, 4704, 678, 1050, 5256, + 5611, 5719, 7630, 667, 5582, 2950, 6855, 9179, 8765, + 8370, 109, 2915, 6690, 8972, 8846, 8957, 2389, 7623, + 2847, 459, 8803, 9389, 6178, 5855, 792, 337, 4567, + 9620, 3552, 2412, 8717, 2207, 7057, 1364, 3667, 5420, + 3386]), + values=tensor([1.3041e-01, 8.3167e-01, 2.6285e-01, 6.6326e-01, + 8.6360e-01, 6.5715e-02, 9.5991e-01, 7.7502e-01, + 1.5176e-01, 3.7598e-01, 8.5691e-01, 8.8353e-01, + 4.2667e-01, 8.6303e-01, 7.8944e-01, 5.3616e-01, + 3.6177e-01, 2.8447e-01, 5.8604e-01, 3.6488e-01, + 3.9226e-01, 4.0463e-02, 6.4984e-01, 6.8213e-01, + 3.0299e-01, 1.5221e-01, 8.6401e-01, 5.6470e-01, + 6.8510e-01, 5.4418e-01, 6.6205e-01, 2.9520e-01, + 5.0068e-01, 6.0881e-01, 2.7799e-01, 8.1099e-01, + 8.1696e-01, 6.5516e-01, 9.3995e-01, 2.7441e-01, + 4.3716e-01, 8.8088e-01, 3.7994e-01, 6.0887e-01, + 9.4212e-01, 1.9859e-01, 6.6642e-01, 2.9686e-01, + 7.7572e-02, 9.2566e-01, 6.0326e-01, 1.5910e-01, + 3.9549e-01, 1.2085e-01, 2.4669e-01, 6.6434e-01, + 5.4194e-01, 5.9233e-01, 4.7001e-01, 8.4553e-01, + 6.5650e-01, 6.0743e-01, 5.2093e-01, 2.1412e-01, + 7.3438e-01, 8.5971e-01, 8.0479e-01, 5.5348e-01, + 7.6964e-01, 9.0794e-01, 4.3225e-01, 1.9787e-02, + 4.8258e-01, 3.3029e-01, 4.5348e-01, 4.5391e-01, + 7.7268e-01, 8.0093e-01, 3.5913e-01, 3.2663e-01, + 7.2599e-01, 5.0492e-01, 6.2514e-01, 4.5307e-01, + 9.2036e-01, 9.6876e-01, 6.5593e-01, 1.7458e-02, + 9.0660e-01, 8.9408e-01, 4.4944e-01, 9.0032e-02, + 1.5076e-01, 3.1631e-01, 6.7634e-01, 1.6319e-01, + 7.3819e-01, 5.2802e-03, 3.2231e-01, 2.6514e-01, + 2.9081e-01, 1.6334e-01, 5.9931e-01, 8.6034e-01, + 7.8469e-01, 4.0042e-01, 3.3385e-01, 9.5543e-01, + 4.1403e-01, 7.1569e-01, 3.3312e-01, 2.8133e-01, + 1.6364e-01, 2.7316e-01, 4.9372e-01, 9.1315e-01, + 1.1311e-01, 6.3113e-01, 7.4994e-01, 5.8650e-01, + 9.1784e-01, 9.8427e-03, 1.1119e-01, 8.4274e-01, + 6.7618e-01, 6.5958e-01, 7.9249e-01, 6.0204e-01, + 2.0975e-01, 2.9920e-01, 4.3059e-01, 4.8873e-01, + 5.5887e-01, 2.0476e-01, 9.0267e-01, 7.5136e-01, + 8.4611e-01, 3.6738e-01, 5.2257e-01, 5.6348e-01, + 4.3848e-01, 3.8505e-01, 9.4682e-01, 6.4161e-01, + 9.5527e-01, 8.4654e-01, 7.1221e-01, 7.3888e-01, + 3.2743e-01, 5.4002e-01, 9.6379e-01, 5.9026e-01, + 3.7294e-01, 4.2773e-01, 8.8207e-01, 6.2644e-01, + 9.5097e-01, 1.3478e-01, 2.4806e-02, 2.2406e-01, + 6.2953e-01, 5.4355e-01, 3.3764e-01, 6.3472e-01, + 4.5224e-01, 8.4833e-02, 8.2615e-01, 2.1149e-01, + 9.4310e-01, 7.3199e-01, 1.5563e-01, 6.0103e-01, + 1.3393e-01, 2.9121e-01, 2.6980e-01, 2.9433e-01, + 6.6666e-01, 7.0104e-01, 4.9024e-01, 6.3149e-01, + 4.8279e-01, 1.4330e-01, 3.1238e-01, 6.0270e-01, + 7.4806e-01, 2.3777e-01, 4.9348e-01, 9.5404e-01, + 1.4182e-01, 6.2322e-01, 9.6427e-01, 3.8601e-01, + 8.9012e-02, 2.9387e-01, 4.8207e-01, 9.1009e-01, + 8.4314e-01, 5.8469e-01, 9.7026e-01, 3.8950e-01, + 5.0291e-01, 4.0643e-01, 9.1954e-01, 2.6164e-01, + 5.6263e-01, 2.1560e-01, 4.7691e-01, 5.7944e-01, + 8.1369e-03, 1.9692e-02, 9.9720e-01, 5.4116e-01, + 3.5419e-01, 1.6319e-02, 7.0477e-01, 2.0930e-01, + 2.7323e-01, 8.2105e-01, 2.6184e-01, 5.2832e-01, + 9.5236e-01, 8.5917e-01, 2.8397e-02, 4.3195e-01, + 8.5812e-01, 1.9410e-02, 7.8711e-01, 8.0554e-01, + 2.3720e-01, 9.8231e-01, 1.2190e-01, 9.7974e-01, + 2.1714e-01, 3.9420e-01, 3.7000e-01, 5.2780e-01, + 5.1473e-01, 6.5503e-01, 7.8142e-01, 8.3461e-01, + 2.8050e-01, 6.0711e-01, 7.6428e-01, 1.1920e-01, + 4.7358e-01, 3.4573e-01, 4.0436e-01, 7.9038e-01, + 6.8841e-01, 1.1705e-01, 7.6729e-01, 3.5260e-01, + 1.2207e-02, 3.2093e-01, 7.8719e-01, 5.3983e-01, + 5.8843e-01, 6.9710e-01, 5.4039e-01, 7.6871e-01, + 6.2762e-01, 5.4210e-01, 4.5772e-01, 8.3344e-01, + 8.3078e-02, 5.1283e-01, 7.1850e-01, 5.3715e-01, + 9.1040e-01, 9.4004e-01, 5.8271e-02, 5.1221e-01, + 6.7820e-01, 5.2404e-01, 4.7045e-01, 9.5798e-01, + 3.7354e-02, 8.5443e-01, 2.5596e-01, 8.9251e-02, + 4.2693e-02, 6.4525e-01, 3.5553e-01, 3.1609e-01, + 5.7268e-01, 4.7798e-01, 4.2599e-03, 6.5780e-01, + 6.6970e-01, 3.6429e-01, 9.9605e-01, 4.7338e-01, + 1.6846e-01, 1.9407e-01, 3.2190e-01, 3.8114e-01, + 1.0284e-01, 3.9789e-01, 1.9114e-01, 3.5616e-01, + 1.4658e-01, 2.9873e-01, 2.3371e-01, 5.1649e-01, + 7.8222e-01, 4.7979e-02, 6.0290e-01, 2.2453e-01, + 9.7654e-01, 8.3028e-01, 4.5758e-01, 7.7794e-01, + 3.7506e-01, 1.1357e-01, 5.2466e-01, 3.6868e-01, + 8.2834e-02, 8.7007e-01, 4.9073e-02, 7.9799e-01, + 8.6213e-02, 3.2764e-01, 7.6745e-01, 6.2858e-01, + 3.4245e-01, 1.4275e-01, 4.3392e-01, 5.5763e-01, + 7.9863e-01, 2.6162e-01, 8.5317e-01, 6.4938e-01, + 9.3924e-01, 2.2927e-03, 4.8312e-01, 1.8976e-01, + 9.7836e-01, 5.1857e-01, 3.7434e-01, 4.9652e-01, + 7.6274e-01, 8.8894e-01, 4.9709e-01, 9.9719e-01, + 8.5105e-01, 7.2986e-01, 3.4902e-01, 2.5750e-01, + 1.0834e-01, 4.8259e-01, 2.2992e-01, 2.9985e-01, + 5.3388e-01, 5.9629e-01, 9.1799e-01, 6.3888e-01, + 9.0394e-01, 2.1252e-01, 2.0328e-01, 8.5727e-01, + 8.1034e-01, 2.4269e-01, 9.1198e-02, 6.6476e-01, + 4.5387e-02, 6.4642e-02, 3.0902e-01, 4.9403e-01, + 5.4217e-01, 6.0907e-01, 3.2529e-01, 5.7728e-01, + 7.4848e-01, 8.5174e-01, 2.9542e-01, 2.1850e-02, + 8.8083e-01, 8.6589e-01, 5.3352e-01, 8.7861e-01, + 9.0461e-01, 1.0456e-01, 1.4724e-01, 4.3868e-01, + 7.9412e-01, 5.3568e-01, 9.0452e-01, 5.5981e-01, + 2.2401e-01, 9.8722e-01, 2.4030e-01, 5.1078e-01, + 4.0485e-01, 8.0013e-02, 4.1112e-01, 3.5888e-01, + 4.7125e-01, 8.3656e-02, 4.8604e-01, 8.1332e-01, + 4.7391e-03, 7.0845e-01, 6.5425e-01, 6.0492e-01, + 7.5155e-01, 6.5385e-01, 2.2705e-01, 7.3624e-01, + 6.2400e-01, 2.4495e-01, 7.5005e-01, 2.6759e-01, + 4.3052e-01, 8.5914e-01, 8.3231e-01, 1.2598e-01, + 8.4112e-02, 1.9060e-01, 2.3896e-01, 2.1126e-01, + 3.5790e-01, 1.9189e-01, 4.1223e-01, 9.8631e-01, + 9.8919e-01, 3.4538e-01, 5.3891e-01, 6.4956e-02, + 4.8657e-01, 4.4859e-01, 5.5118e-01, 6.3965e-01, + 7.0850e-01, 5.9231e-02, 1.2184e-01, 4.5675e-01, + 4.7291e-01, 1.9259e-01, 3.1329e-01, 8.6873e-01, + 1.6203e-01, 9.2237e-01, 3.9722e-01, 3.5045e-01, + 1.4911e-01, 7.9886e-01, 6.8584e-01, 9.4234e-01, + 3.0581e-01, 6.0700e-01, 7.9474e-01, 5.1465e-01, + 9.6574e-01, 9.7807e-01, 1.5252e-01, 4.3097e-01, + 9.2783e-01, 7.1882e-01, 7.2712e-01, 1.6256e-01, + 2.4866e-01, 3.7688e-01, 3.3680e-01, 8.5072e-01, + 2.3886e-01, 6.2812e-01, 7.7314e-01, 7.6736e-01, + 5.6288e-01, 3.5074e-01, 6.4221e-01, 5.2145e-01, + 6.6666e-01, 8.4864e-01, 6.1669e-01, 4.6744e-01, + 8.0764e-01, 4.8521e-01, 1.1360e-01, 8.7306e-01, + 8.8180e-01, 8.1429e-01, 9.7086e-01, 7.6475e-02, + 8.1469e-01, 1.6678e-01, 1.1948e-01, 6.2224e-01, + 9.1905e-01, 7.9098e-01, 2.4631e-01, 5.4597e-01, + 5.5153e-01, 1.0818e-01, 1.4021e-01, 3.3312e-01, + 2.2719e-01, 5.6543e-02, 7.0716e-01, 1.9460e-01, + 6.4614e-01, 5.2791e-01, 8.9249e-01, 6.3819e-01, + 5.9491e-01, 4.6935e-01, 6.1055e-01, 4.0601e-02, + 1.3569e-01, 9.6450e-01, 1.0903e-01, 9.7617e-01, + 7.2183e-01, 8.3720e-01, 2.8106e-01, 7.8349e-01, + 8.4787e-01, 7.5451e-01, 5.6493e-01, 1.6383e-01, + 1.2315e-01, 6.6605e-01, 8.4475e-01, 7.3330e-01, + 2.9853e-01, 7.9348e-01, 7.2687e-01, 6.0237e-01, + 5.8598e-01, 8.8539e-01, 3.5050e-01, 3.6175e-01, + 6.9359e-01, 6.2558e-01, 9.2810e-01, 1.3114e-01, + 3.1580e-02, 7.0586e-01, 7.9015e-01, 5.8570e-01, + 1.3430e-01, 2.3354e-01, 6.3306e-01, 8.4906e-01, + 5.5013e-01, 8.5825e-02, 9.2716e-01, 5.4252e-01, + 6.2825e-01, 4.3084e-01, 5.6270e-02, 1.1638e-01, + 1.3731e-01, 7.7385e-01, 3.9576e-01, 8.6116e-02, + 6.2543e-01, 7.3140e-01, 3.6416e-01, 7.3622e-01, + 7.5471e-01, 6.4980e-01, 1.0022e-01, 3.3981e-01, + 2.2794e-01, 4.8078e-01, 4.2247e-01, 8.1942e-01, + 1.8379e-01, 5.6845e-01, 5.0755e-01, 3.0208e-01, + 8.2405e-01, 2.3302e-01, 1.9578e-01, 9.7528e-01, + 5.9984e-01, 1.6057e-01, 1.3373e-01, 6.8478e-01, + 5.0837e-03, 5.8461e-01, 5.0733e-01, 5.1389e-01, + 1.9036e-01, 4.2977e-01, 7.9566e-01, 7.1653e-01, + 9.7823e-02, 8.1174e-01, 2.2992e-02, 9.0406e-01, + 5.0683e-01, 4.4278e-01, 4.1399e-01, 6.2416e-02, + 1.9389e-01, 2.4011e-01, 7.1807e-01, 3.3743e-01, + 8.4328e-01, 7.2589e-01, 2.1221e-01, 5.5074e-01, + 1.8528e-01, 4.6362e-01, 1.9508e-01, 3.3855e-01, + 1.5190e-01, 8.4130e-01, 4.7200e-01, 6.8265e-01, + 5.4146e-01, 2.6877e-01, 9.7685e-01, 9.8942e-01, + 5.4228e-01, 1.0248e-02, 6.3921e-01, 3.8272e-01, + 7.4936e-01, 1.1421e-01, 5.9560e-01, 8.5940e-01, + 5.3870e-01, 4.4622e-01, 4.4721e-01, 8.3619e-01, + 6.2216e-02, 5.7225e-01, 4.4771e-01, 5.8433e-01, + 7.9385e-01, 9.4364e-01, 8.4174e-02, 6.5572e-01, + 7.1904e-01, 7.9004e-01, 2.1308e-01, 6.8903e-01, + 9.3781e-01, 1.3756e-01, 8.5481e-01, 9.6659e-01, + 3.6367e-01, 4.8802e-01, 3.1375e-01, 1.1494e-01, + 5.5924e-01, 2.8019e-02, 1.5756e-01, 5.2825e-01, + 4.3190e-01, 4.3413e-01, 7.6832e-02, 6.1781e-01, + 3.0693e-01, 4.4782e-01, 1.8348e-01, 6.5436e-01, + 9.5736e-01, 9.4978e-01, 2.2203e-01, 7.1895e-01, + 7.7491e-01, 1.7078e-01, 2.7903e-01, 4.1889e-01, + 1.4853e-01, 3.4897e-03, 2.1731e-01, 2.4215e-01, + 3.8969e-01, 4.1215e-01, 1.3361e-01, 4.6432e-02, + 4.5127e-01, 2.9005e-01, 6.0938e-01, 5.9054e-01, + 4.1539e-01, 3.6537e-01, 2.0181e-01, 2.9838e-02, + 1.8519e-01, 2.4171e-02, 1.3415e-01, 9.1166e-01, + 5.8295e-01, 9.1723e-01, 1.7492e-01, 9.0159e-01, + 3.7645e-01, 3.4025e-01, 2.9614e-01, 2.1353e-01, + 4.3318e-01, 5.6879e-01, 2.5781e-01, 8.3416e-01, + 7.4130e-01, 1.0773e-01, 3.3222e-01, 8.9479e-01, + 9.2591e-01, 6.4297e-01, 3.4223e-01, 6.5716e-01, + 8.5955e-01, 6.9486e-01, 9.0945e-01, 2.2563e-01, + 4.8421e-01, 8.6055e-02, 2.4828e-01, 4.5846e-01, + 5.5823e-01, 3.8470e-02, 8.0408e-02, 8.7295e-01, + 1.1442e-01, 6.7883e-01, 8.1315e-01, 3.7696e-01, + 1.6574e-02, 1.6713e-02, 2.4466e-01, 6.7002e-01, + 6.2239e-02, 1.4361e-01, 3.4630e-01, 6.6300e-01, + 7.0281e-01, 2.7165e-01, 5.0032e-01, 7.2116e-01, + 1.5788e-01, 2.8297e-01, 2.3188e-01, 8.9092e-01, + 9.3911e-01, 7.4703e-01, 7.4243e-01, 8.6912e-01, + 6.4955e-01, 2.5734e-01, 6.7459e-01, 6.1048e-01, + 9.0751e-01, 6.9611e-01, 8.5546e-01, 2.4057e-01, + 1.3409e-01, 7.1376e-01, 7.0472e-01, 4.9040e-01, + 3.5536e-01, 6.6420e-01, 8.1200e-01, 4.5435e-01, + 2.1666e-01, 4.1887e-01, 6.4981e-01, 2.3226e-01, + 6.9725e-02, 7.3732e-01, 2.6577e-01, 2.3758e-01, + 9.4229e-01, 6.2395e-01, 6.1865e-01, 9.9180e-02, + 6.4531e-01, 9.9102e-01, 9.0777e-02, 4.6811e-01, + 7.0523e-01, 3.6579e-01, 3.4625e-01, 5.8710e-01, + 5.9811e-01, 1.8949e-01, 3.9311e-02, 3.9617e-01, + 5.0071e-01, 3.7570e-01, 2.4552e-01, 2.6117e-01, + 9.3254e-01, 5.2420e-01, 5.7593e-01, 1.4900e-01, + 5.4496e-01, 5.2456e-01, 3.9216e-01, 8.1017e-01, + 5.2870e-01, 1.2899e-01, 1.9976e-01, 7.0247e-01, + 9.7196e-01, 3.5450e-02, 9.8821e-01, 4.5041e-01, + 6.9886e-01, 9.3043e-01, 8.8904e-01, 3.0349e-01, + 7.1390e-01, 3.5820e-01, 9.0010e-01, 3.6511e-01, + 8.6431e-01, 2.6833e-01, 6.5077e-01, 4.3159e-01, + 2.3637e-03, 3.6962e-01, 6.8563e-01, 3.9226e-01, + 4.2306e-01, 9.3246e-01, 6.2636e-01, 6.6569e-01, + 5.1769e-01, 1.0954e-01, 6.6563e-01, 5.6084e-01, + 6.2615e-01, 1.9512e-01, 6.7166e-01, 3.0464e-01, + 8.1481e-01, 2.0383e-01, 6.8527e-01, 6.0140e-01, + 9.1940e-01, 2.9010e-01, 1.9323e-01, 7.5308e-01, + 5.1655e-01, 7.7545e-01, 4.0819e-01, 4.6914e-01, + 1.0762e-01, 5.9083e-01, 4.1005e-02, 8.1673e-01, + 1.7977e-01, 4.4015e-01, 4.6866e-01, 6.1661e-01, + 8.0717e-01, 2.2196e-01, 3.1843e-01, 1.2570e-01, + 3.8681e-01, 5.1235e-02, 8.9325e-01, 9.5700e-01, + 9.3673e-01, 4.1681e-01, 4.6472e-01, 9.2014e-01, + 6.3272e-01, 6.6815e-01, 8.7993e-01, 4.9261e-01, + 4.1488e-01, 4.1331e-01, 8.3137e-01, 1.4267e-01, + 6.7118e-01, 7.3294e-01, 4.6344e-01, 3.4125e-01, + 8.1212e-01, 5.9438e-01, 9.2301e-01, 9.4274e-01, + 4.2425e-01, 2.0890e-01, 8.1161e-01, 6.6813e-01, + 7.7586e-01, 1.7382e-01, 4.0429e-01, 1.3756e-01, + 4.8850e-01, 7.7635e-01, 3.1478e-01, 7.9157e-01, + 3.8260e-01, 9.7448e-01, 2.5158e-01, 2.5456e-01, + 3.7263e-01, 9.1997e-01, 7.1236e-01, 9.9155e-01, + 5.7040e-01, 8.1640e-01, 8.7423e-01, 5.7996e-01, + 5.8532e-01, 3.1681e-01, 5.1521e-01, 4.5605e-01, + 6.8051e-01, 7.1903e-01, 3.3910e-01, 2.6719e-01, + 3.5073e-01, 8.3548e-01, 1.2868e-01, 4.6235e-01, + 7.4859e-01, 4.4090e-01, 5.6777e-01, 7.4301e-01, + 8.9441e-01, 3.3698e-01, 1.4379e-01, 8.6005e-01, + 6.9398e-01, 7.0285e-04, 7.4110e-02, 1.9000e-01, + 3.7174e-02, 7.2348e-01, 2.9599e-01, 9.8261e-02, + 7.7121e-01, 6.8846e-01, 5.4729e-01, 5.8436e-01, + 3.4513e-01, 3.0918e-01, 2.8966e-01, 6.9551e-01, + 9.0674e-01, 2.9109e-01, 9.6780e-01, 9.4296e-01, + 8.6366e-01, 9.3170e-01, 1.1810e-01, 9.6329e-01, + 3.4415e-01, 3.3271e-01, 8.9077e-01, 4.5828e-01, + 7.8079e-01, 3.5690e-01, 6.8891e-01, 1.0958e-01, + 7.4747e-01, 6.1440e-01, 5.5642e-01, 6.7334e-01, + 9.5122e-01, 4.5693e-01, 6.5286e-01, 6.8427e-01, + 6.5640e-01, 1.3610e-01, 5.1817e-01, 4.5218e-01, + 4.9750e-01, 6.9949e-01, 1.8679e-01, 4.7306e-01, + 6.2149e-01, 6.3191e-01, 6.8227e-01, 9.5340e-01, + 6.9214e-01, 4.8327e-01, 6.2025e-01, 7.2734e-01, + 3.4948e-01, 9.1951e-01, 1.5345e-01, 9.4844e-01, + 7.9645e-01, 4.2942e-01, 3.0837e-01, 3.2774e-01, + 4.7750e-01, 3.2074e-01, 2.3827e-01, 9.4556e-01, + 8.5400e-01, 4.2681e-02, 9.5386e-01, 3.3202e-01, + 2.9276e-01, 1.8871e-01, 8.4606e-01, 6.1973e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4250, 0.1408, 0.2235, ..., 0.5768, 0.6841, 0.8384]) +tensor([0.7175, 0.8461, 0.0931, ..., 0.1554, 0.1137, 0.6137]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1026,378 +919,375 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 9.730679035186768 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '366482', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.093939065933228} +Time: 10.3000328540802 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5850, 8366, 5592, 2122, 6679, 1664, 2469, 7640, 8756, - 2915, 7211, 1515, 5525, 1719, 3439, 4040, 8857, 5364, - 6132, 895, 3852, 156, 8811, 9741, 357, 3518, 4399, - 7286, 180, 9528, 5416, 6953, 1721, 2595, 7283, 9795, - 7825, 5079, 6282, 136, 4669, 6702, 3303, 3010, 2380, - 2256, 7997, 5883, 9682, 9926, 8146, 9663, 3941, 6670, - 9041, 817, 4692, 3045, 7544, 9848, 5442, 871, 3702, - 7376, 94, 6327, 3751, 7760, 7748, 920, 3358, 4678, - 8037, 93, 1218, 9719, 1249, 634, 196, 2894, 7487, - 1159, 8188, 332, 5966, 4396, 4268, 8665, 6886, 7804, - 7445, 4388, 7265, 7541, 8403, 5348, 9890, 2631, 5205, - 8499, 4507, 2643, 7756, 857, 4020, 8538, 7850, 4986, - 7312, 3766, 1567, 2852, 7235, 6927, 653, 379, 1077, - 6334, 4614, 1987, 3571, 6355, 1814, 6184, 9414, 2153, - 8882, 3562, 6625, 5036, 2967, 1233, 9339, 4935, 5597, - 9689, 4093, 5663, 8161, 6587, 3112, 612, 6031, 2698, - 3403, 9263, 9319, 8735, 104, 2810, 3672, 5639, 8827, - 5048, 8805, 1031, 9837, 9524, 6912, 3766, 3659, 7073, - 737, 797, 5226, 7104, 3103, 6515, 4992, 1665, 5226, - 6199, 6712, 6310, 7443, 3988, 2676, 2322, 244, 3312, - 4295, 2693, 9044, 9455, 7558, 2076, 364, 3468, 4685, - 2690, 1818, 1795, 8073, 8740, 3982, 5591, 7437, 4434, - 6272, 8601, 5896, 7206, 452, 124, 6062, 3722, 2858, - 3166, 6119, 1841, 8789, 9596, 5567, 5894, 7970, 5360, - 9933, 6197, 3790, 8303, 9150, 3759, 378, 6338, 8692, - 3037, 225, 8540, 8302, 3869, 1980, 7102, 3526, 84, - 2428, 5319, 4508, 9845, 2414, 8840, 833, 2752, 1575, - 8598, 7280, 7141, 2098, 7332, 9471, 7172, 6064, 5586, - 6496, 4719, 7246, 9857, 461, 1971, 1577, 2883, 521, - 887, 7601, 1096, 7587, 8032, 9839, 9759, 3694, 4208, - 2775, 4747, 9442, 6136, 6253, 7707, 4233, 955, 7815, - 9711, 2360, 974, 4097, 9684, 7356, 2815, 7178, 3217, - 6396, 7951, 6798, 3086, 4009, 4560, 5258, 4636, 5136, - 484, 5572, 9213, 7528, 8274, 5548, 1629, 8245, 2276, - 9250, 7073, 612, 1264, 8789, 9413, 9556, 4024, 8035, - 4389, 2075, 2015, 6780, 9946, 8182, 3173, 4875, 4374, - 7146, 6566, 1212, 3788, 6737, 1525, 4118, 597, 1164, - 95, 8578, 5382, 7, 4863, 8244, 8410, 5238, 6413, - 1176, 8621, 5893, 5470, 5285, 4907, 1576, 9176, 6701, - 5861, 5303, 5175, 2021, 3075, 7189, 9086, 6729, 1443, - 4021, 3184, 1282, 194, 3967, 741, 191, 6596, 1251, - 1613, 4577, 1550, 7988, 7486, 1702, 7074, 9193, 1036, - 2186, 5023, 2564, 6337, 940, 1519, 8948, 8449, 9328, - 479, 6279, 8562, 8923, 2685, 1574, 5617, 5722, 970, - 640, 8699, 1647, 1059, 8229, 1516, 892, 441, 4405, - 4899, 5037, 6072, 8409, 3457, 6678, 8778, 5168, 4725, - 1582, 2499, 3623, 3399, 2800, 6268, 2077, 192, 5047, - 1851, 3023, 9282, 7259, 5334, 5942, 1157, 8925, 7158, - 2680, 8224, 1863, 9792, 1845, 5045, 6307, 5364, 7590, - 3501, 3167, 5703, 4100, 6744, 3117, 6631, 2272, 3513, - 1946, 3024, 5825, 1185, 1348, 6559, 74, 4615, 3419, - 4395, 7611, 6167, 8539, 9714, 7761, 9431, 2318, 6641, - 1733, 4522, 5800, 6094, 9427, 1318, 4807, 6951, 6988, - 2813, 985, 7549, 9176, 9180, 4980, 2792, 4241, 7249, - 1163, 61, 7125, 2555, 6886, 8991, 196, 9270, 35, - 1529, 3941, 7793, 7718, 7033, 4030, 6810, 850, 9329, - 5979, 6544, 751, 4582, 8368, 565, 7754, 7052, 3290, - 8677, 983, 7257, 1909, 8475, 6667, 894, 6714, 9857, - 238, 8379, 8656, 2402, 83, 4439, 4472, 7734, 2074, - 3272, 618, 1715, 8742, 745, 2543, 7748, 5408, 6663, - 8178, 2886, 7594, 1196, 5498, 3894, 893, 8965, 6034, - 6443, 2520, 5737, 483, 2405, 4283, 6209, 1176, 448, - 606, 958, 8428, 3541, 8984, 1463, 4865, 173, 2388, - 4515, 610, 3428, 834, 1897, 3416, 3506, 6850, 7650, - 1567, 2088, 4588, 3271, 6086, 2429, 5007, 1641, 3405, - 5664, 2262, 1925, 5269, 9056, 8204, 5139, 609, 3768, - 1372, 8780, 3638, 7690, 2428, 8787, 4274, 9543, 7138, - 8745, 2025, 668, 6826, 8790, 6413, 9920, 6540, 348, - 4430, 4748, 8976, 1596, 914, 2089, 5266, 7048, 2343, - 5663, 5852, 3682, 5650, 2554, 2359, 5522, 968, 4538, - 556, 900, 332, 6036, 4210, 6798, 9668, 7214, 3249, - 934, 7260, 1264, 9553, 9573, 1341, 5076, 7088, 723, - 8670, 6474, 3175, 9943, 1202, 328, 6087, 194, 8768, - 258, 7554, 9741, 2069, 3477, 9790, 8686, 2875, 4984, - 7848, 4751, 5568, 9759, 8560, 7437, 4239, 7813, 4618, - 6633, 990, 5961, 926, 7923, 4657, 2745, 2212, 9361, - 5154, 9425, 4873, 6600, 5372, 8037, 7620, 2502, 3781, - 7433, 6170, 819, 1336, 484, 3513, 4395, 19, 8130, - 7693, 6153, 6352, 4202, 7488, 1192, 4764, 7301, 6821, - 6865, 6976, 937, 1234, 1710, 549, 5075, 6580, 8729, - 4805, 1375, 3711, 1305, 4371, 3503, 8608, 3833, 3074, - 1072, 7655, 4667, 966, 192, 6852, 7586, 1491, 9585, - 8090, 3373, 1438, 2850, 8318, 8429, 9764, 8245, 7536, - 1862, 6568, 9277, 3906, 4401, 671, 9709, 9370, 6046, - 9211, 9673, 4331, 921, 674, 7979, 374, 7224, 8804, - 5223, 9172, 2758, 3516, 3647, 8604, 8106, 9070, 2337, - 3498, 3861, 2253, 9022, 3709, 4466, 2031, 9776, 7382, - 6631, 6983, 229, 9213, 4744, 5134, 2773, 3818, 2045, - 3684, 5192, 2488, 8503, 6845, 9997, 5752, 1325, 8942, - 352, 1614, 4161, 7427, 7988, 1814, 8864, 5443, 6505, - 9390, 1752, 5714, 9392, 1996, 5576, 5760, 390, 6435, - 7898, 7417, 7401, 1217, 7128, 1412, 6756, 7275, 5202, - 2409, 1660, 2736, 5263, 8336, 8044, 3978, 3020, 5519, - 7853, 8427, 824, 1792, 4254, 4572, 5695, 1587, 1988, - 4588, 728, 1627, 3761, 5276, 8300, 992, 8389, 3069, - 3780, 9133, 145, 9462, 7403, 1537, 9647, 5057, 3362, - 9039, 6376, 8446, 4900, 5977, 7345, 4098, 447, 1820, - 252, 9481, 6404, 6592, 1295, 8043, 4753, 3605, 3518, - 2606, 2287, 9068, 9762, 699, 1959, 5988, 6313, 7817, - 5094, 8321, 2706, 7762, 8810, 5984, 698, 2889, 3562, - 3511, 7298, 3188, 9974, 5668, 6020, 8731, 9606, 2958, - 8385, 4702, 6639, 3111, 8106, 914, 952, 5956, 6944, - 6389, 6930, 4511, 693, 2408, 8878, 4458, 6005, 4687, - 5262, 264, 7556, 7239, 6348, 319, 3127, 4870, 9830, - 6495, 9430, 82, 2845, 2158, 7553, 2485, 4718, 9487, - 9863, 2351, 9993, 9611, 7651, 6752, 9890, 4226, 7914, - 1090, 3994, 273, 9314, 2869, 8105, 8063, 8350, 5143, - 2771, 6914, 6289, 1069, 6603, 7055, 1101, 2035, 1775, - 4440, 1100, 9988, 6654, 8779, 3492, 65, 3068, 7601, - 6582, 5059, 9136, 6153, 4692, 6999, 662, 8797, 8102, - 5680, 9525, 2546, 5756, 4348, 7370, 557, 1546, 4320, - 5734]), - values=tensor([1.6341e-01, 5.4230e-01, 5.9412e-01, 6.2649e-01, - 6.1492e-01, 1.1463e-02, 8.8850e-01, 3.1827e-01, - 1.4850e-01, 1.7623e-01, 5.0631e-01, 8.7356e-01, - 2.6593e-01, 3.7335e-01, 6.7543e-01, 8.1424e-01, - 3.7460e-01, 6.9153e-01, 8.0907e-01, 8.3914e-01, - 9.7417e-02, 3.4410e-04, 1.3414e-01, 2.2155e-01, - 6.0112e-02, 8.4328e-01, 2.7848e-01, 1.4958e-01, - 7.9439e-01, 7.7172e-01, 2.7028e-01, 1.6852e-01, - 6.2794e-01, 3.7628e-01, 7.9275e-01, 2.0746e-01, - 6.5757e-02, 6.6970e-01, 1.3521e-02, 6.0790e-01, - 5.0451e-01, 9.2714e-01, 7.8707e-01, 4.1523e-01, - 5.5244e-01, 9.7376e-01, 6.7472e-01, 9.9321e-01, - 9.5176e-01, 6.2001e-01, 7.8968e-01, 3.0430e-01, - 8.2453e-01, 9.4042e-02, 5.1519e-01, 8.5738e-01, - 5.8535e-01, 8.7087e-01, 6.5326e-01, 1.7395e-01, - 3.0247e-01, 7.3945e-02, 4.1833e-01, 8.0312e-01, - 8.8320e-01, 5.1558e-01, 2.5666e-01, 5.6112e-01, - 5.5084e-01, 7.0649e-01, 3.5059e-01, 7.8879e-01, - 5.2108e-02, 9.8239e-01, 3.4744e-01, 5.8319e-01, - 9.1928e-02, 4.9802e-02, 8.4140e-01, 2.0825e-01, - 6.5549e-01, 3.3967e-02, 4.5861e-01, 3.1781e-01, - 7.2623e-01, 4.3890e-01, 9.1717e-01, 6.7930e-01, - 2.2711e-01, 5.3114e-01, 9.4610e-01, 1.2012e-01, - 3.8896e-01, 9.0657e-01, 8.7514e-01, 9.4601e-01, - 6.8092e-01, 4.2792e-01, 7.8983e-01, 7.0867e-01, - 6.8668e-01, 7.3584e-01, 4.8951e-01, 1.1601e-01, - 5.8369e-01, 8.1548e-02, 4.6998e-01, 8.2472e-01, - 8.1798e-01, 6.4792e-01, 7.9098e-01, 4.6883e-01, - 5.3531e-01, 9.9356e-01, 2.4036e-01, 4.8060e-01, - 2.8861e-01, 8.7768e-01, 7.6854e-01, 2.6962e-01, - 9.4503e-01, 7.5491e-01, 6.3333e-01, 3.9724e-01, - 6.1673e-01, 9.9300e-01, 8.4452e-02, 6.1927e-01, - 7.8274e-01, 9.4288e-01, 5.5703e-01, 9.9018e-01, - 8.7030e-01, 6.7458e-01, 7.1377e-01, 3.9155e-01, - 4.0923e-01, 1.9269e-01, 7.8647e-01, 1.7260e-01, - 1.9351e-01, 1.6644e-02, 6.3159e-01, 4.2569e-01, - 3.5414e-01, 1.5447e-01, 6.0382e-01, 1.1039e-01, - 7.4996e-01, 9.7809e-01, 4.9703e-01, 6.8052e-02, - 4.0995e-03, 1.5993e-01, 2.5589e-01, 6.1582e-01, - 4.1141e-01, 8.3175e-01, 9.2280e-02, 6.6768e-01, - 2.1373e-01, 7.4338e-01, 3.8856e-01, 4.0554e-01, - 2.4237e-01, 6.8970e-01, 5.1570e-01, 2.2133e-04, - 8.5322e-01, 4.7984e-01, 4.1539e-01, 8.1641e-01, - 5.1215e-01, 7.5280e-01, 4.9588e-04, 8.3219e-01, - 1.3510e-01, 5.6800e-01, 8.8518e-01, 9.6846e-01, - 2.4576e-01, 9.1717e-01, 4.6194e-01, 4.1655e-01, - 8.5447e-01, 1.2544e-01, 6.0240e-01, 5.7586e-01, - 2.2428e-01, 4.5450e-01, 2.4531e-01, 3.5914e-01, - 5.8131e-01, 4.4973e-01, 3.0343e-01, 8.7101e-01, - 5.4478e-01, 5.0386e-01, 6.8135e-01, 4.2381e-01, - 3.1555e-02, 3.0972e-01, 2.4608e-01, 1.9970e-01, - 4.6368e-01, 9.9803e-01, 8.2862e-01, 8.5141e-01, - 8.7867e-01, 4.8625e-01, 5.2020e-01, 4.1959e-01, - 1.7473e-01, 2.5225e-01, 7.2666e-01, 9.2040e-01, - 1.8559e-01, 6.3520e-01, 4.5196e-01, 9.4681e-01, - 9.5216e-01, 1.0019e-01, 9.7707e-01, 4.5094e-01, - 5.0805e-01, 5.1974e-01, 5.3486e-01, 1.8077e-01, - 8.5364e-01, 7.2740e-01, 4.8004e-01, 6.8966e-01, - 2.0804e-01, 7.7792e-01, 6.3289e-01, 1.2722e-01, - 9.0620e-01, 4.9687e-01, 5.6847e-01, 1.3671e-01, - 4.0281e-01, 1.7219e-01, 1.9050e-01, 6.2485e-01, - 1.0260e-01, 2.5271e-02, 9.4031e-01, 9.4275e-01, - 7.8410e-03, 3.3465e-02, 3.6601e-01, 2.9329e-01, - 2.0289e-01, 8.1331e-01, 9.6038e-01, 7.8543e-01, - 8.1769e-01, 9.2929e-01, 5.1055e-01, 3.5358e-01, - 4.8515e-01, 3.5044e-01, 1.9924e-01, 8.1918e-01, - 2.9889e-01, 3.7500e-02, 2.3185e-01, 6.2795e-01, - 5.8909e-01, 1.2007e-01, 7.5640e-01, 8.0080e-01, - 8.4206e-01, 2.3327e-01, 8.8223e-01, 9.9512e-01, - 2.9164e-01, 4.3867e-01, 7.8315e-01, 9.6653e-01, - 4.0617e-01, 6.9039e-01, 4.3199e-01, 8.2813e-01, - 3.5563e-01, 9.1892e-01, 5.9521e-01, 8.9108e-01, - 3.7947e-01, 2.0591e-01, 6.7351e-01, 5.7368e-01, - 2.8254e-01, 6.7739e-02, 4.4276e-01, 9.7228e-01, - 4.6186e-01, 6.9010e-01, 8.4715e-01, 1.5150e-01, - 2.0227e-01, 3.6577e-01, 9.5994e-01, 2.5249e-01, - 6.5535e-02, 5.6708e-01, 7.2591e-01, 7.0050e-01, - 4.6753e-01, 9.1326e-02, 6.7496e-01, 5.8336e-01, - 3.1459e-01, 4.7441e-01, 6.5107e-01, 2.7592e-01, - 1.4742e-01, 4.5510e-01, 7.4301e-01, 4.8033e-01, - 1.4801e-01, 7.7726e-01, 9.3909e-01, 8.1948e-01, - 2.9724e-01, 8.1459e-01, 4.7430e-01, 3.8389e-01, - 5.8885e-02, 3.1119e-01, 5.3076e-01, 1.4421e-01, - 9.6859e-01, 9.4119e-01, 8.4274e-01, 8.1548e-02, - 4.2885e-02, 1.4390e-01, 9.9599e-01, 9.8519e-01, - 5.8954e-01, 4.9409e-01, 6.5081e-01, 4.7729e-01, - 7.1507e-01, 2.3377e-01, 3.5997e-01, 9.6579e-01, - 3.9773e-01, 2.1377e-02, 3.5134e-01, 6.4577e-01, - 9.9705e-01, 4.8704e-01, 4.2052e-01, 3.9684e-01, - 9.2759e-01, 3.0995e-01, 3.1089e-01, 7.6678e-01, - 5.4422e-04, 9.2904e-01, 1.6571e-01, 9.8144e-01, - 7.6337e-01, 8.8536e-01, 1.6786e-01, 9.4677e-01, - 3.5525e-01, 3.5127e-01, 1.3314e-01, 8.7067e-01, - 2.8725e-01, 6.7870e-01, 3.3831e-01, 4.6605e-01, - 7.4196e-01, 8.8785e-01, 9.6258e-01, 7.7168e-01, - 4.8538e-01, 2.0843e-01, 6.4007e-01, 1.9033e-01, - 3.4627e-01, 1.1059e-01, 7.1554e-01, 5.5574e-01, - 7.6910e-01, 7.3835e-01, 2.8739e-01, 6.9284e-01, - 7.5175e-02, 9.9640e-01, 9.1137e-01, 2.2022e-01, - 4.9205e-01, 9.0083e-01, 5.1433e-01, 4.3040e-01, - 3.8550e-01, 4.9743e-01, 2.3713e-01, 2.5804e-01, - 8.8147e-02, 9.0523e-02, 8.5804e-01, 1.4407e-01, - 3.2087e-01, 6.9830e-01, 5.0311e-02, 5.9795e-01, - 2.3588e-01, 9.7651e-01, 1.1260e-01, 6.7643e-01, - 7.7883e-01, 4.7514e-01, 7.6449e-01, 3.5045e-01, - 7.3614e-01, 8.8137e-01, 4.6469e-01, 4.9258e-01, - 8.1075e-01, 6.2703e-01, 1.2233e-01, 2.7440e-01, - 6.9122e-01, 7.3033e-01, 3.4342e-02, 1.2576e-01, - 8.0974e-01, 4.0072e-01, 6.0245e-01, 1.5077e-02, - 4.1940e-01, 2.0633e-01, 7.1347e-01, 8.3114e-01, - 3.1728e-01, 3.9989e-01, 6.0468e-01, 2.0407e-01, - 8.7073e-01, 4.3857e-01, 8.9954e-02, 6.5643e-01, - 5.8418e-01, 3.5790e-01, 6.7886e-01, 9.5451e-01, - 3.6580e-01, 3.0585e-01, 7.6034e-01, 6.5825e-02, - 2.9720e-01, 3.1525e-01, 3.2830e-01, 7.9663e-02, - 7.4797e-01, 4.5439e-01, 4.4041e-01, 6.1706e-01, - 1.4869e-01, 3.7743e-01, 6.0526e-01, 7.4958e-01, - 1.3105e-01, 3.9610e-01, 8.5301e-01, 8.1549e-01, - 8.5854e-02, 7.4522e-01, 7.4950e-01, 3.7042e-01, - 2.4261e-01, 1.7942e-01, 8.2764e-01, 1.6097e-01, - 2.2542e-01, 5.2571e-01, 1.0887e-01, 3.8000e-02, - 8.1497e-01, 8.0201e-01, 9.4936e-01, 2.6775e-01, - 9.4312e-01, 9.3715e-01, 6.7901e-02, 4.0906e-01, - 1.5251e-01, 2.4190e-01, 2.4089e-01, 2.6151e-01, - 6.2245e-01, 4.6082e-01, 5.0570e-01, 2.5937e-01, - 3.5881e-01, 2.2385e-01, 2.8770e-02, 1.7716e-01, - 5.4713e-02, 5.1027e-01, 3.1178e-02, 6.5533e-01, - 2.2465e-01, 6.9285e-01, 2.5103e-01, 3.0570e-01, - 3.7755e-03, 8.7853e-01, 6.5763e-01, 7.6528e-02, - 9.2357e-01, 9.3172e-01, 1.4133e-01, 9.0949e-01, - 3.7744e-01, 2.6587e-01, 8.7043e-01, 1.0112e-02, - 2.8448e-01, 7.2542e-02, 1.5147e-01, 2.2166e-01, - 9.7594e-01, 3.2101e-01, 3.2571e-01, 9.1645e-01, - 5.1688e-01, 9.8729e-02, 3.8775e-01, 7.4951e-01, - 2.4091e-01, 2.2275e-01, 8.9300e-01, 4.1567e-01, - 8.1383e-01, 7.1688e-01, 7.5982e-01, 9.9028e-02, - 1.0199e-01, 1.7657e-01, 8.1854e-01, 9.5101e-01, - 8.6789e-01, 7.9254e-01, 9.6088e-01, 8.2911e-01, - 7.9367e-01, 5.3301e-01, 8.6699e-02, 6.5008e-01, - 2.4111e-01, 4.0949e-01, 7.9876e-01, 6.0784e-01, - 8.6621e-01, 7.1156e-01, 3.6004e-01, 1.5720e-02, - 6.4658e-01, 3.4960e-01, 8.6761e-01, 8.2966e-01, - 9.8307e-01, 1.8464e-01, 5.4622e-01, 2.8174e-01, - 4.2027e-01, 6.2481e-01, 2.5893e-01, 5.8887e-01, - 8.4609e-01, 2.6754e-01, 5.9884e-01, 1.1711e-01, - 6.7826e-01, 2.8852e-01, 6.4017e-01, 1.2806e-02, - 3.1902e-03, 4.7820e-01, 8.9185e-01, 5.7234e-02, - 6.7506e-01, 8.4710e-01, 6.0623e-01, 2.2402e-01, - 4.0295e-01, 6.4904e-01, 9.2718e-01, 3.1307e-01, - 6.5796e-01, 3.5345e-01, 1.9087e-01, 2.1269e-01, - 5.7823e-01, 3.9982e-01, 4.3154e-01, 6.7568e-01, - 6.8414e-01, 2.4388e-01, 8.7462e-01, 4.2036e-02, - 3.5337e-01, 5.3395e-01, 9.2982e-01, 8.6034e-01, - 9.3195e-01, 6.8440e-01, 2.5818e-01, 7.2423e-01, - 1.2544e-01, 2.7728e-01, 6.8784e-01, 6.0665e-01, - 1.3165e-01, 8.5017e-01, 4.3265e-03, 8.0981e-01, - 7.9983e-01, 3.2596e-01, 4.9482e-01, 1.1663e-01, - 4.6665e-01, 9.3435e-01, 7.9541e-01, 7.1414e-01, - 5.3506e-02, 4.0912e-01, 6.8238e-01, 3.4773e-01, - 5.6049e-01, 7.1487e-01, 2.1138e-01, 8.1397e-01, - 5.4932e-01, 2.0457e-02, 5.6340e-02, 5.7141e-02, - 4.6515e-01, 1.4683e-01, 7.4366e-01, 4.1761e-02, - 5.1584e-02, 1.2634e-01, 4.6336e-01, 5.9261e-01, - 5.6130e-01, 7.8972e-02, 7.9983e-01, 2.2448e-01, - 1.8797e-01, 2.5057e-01, 7.7180e-01, 4.0901e-01, - 3.7017e-01, 4.0182e-01, 8.7125e-01, 2.8655e-01, - 1.4119e-01, 4.3303e-01, 6.6902e-01, 1.5943e-01, - 2.5892e-01, 5.0928e-01, 6.7761e-01, 3.2212e-02, - 3.0824e-01, 9.2674e-02, 3.6193e-02, 4.4219e-01, - 1.6390e-01, 5.9400e-01, 8.2224e-01, 6.9900e-01, - 5.2481e-01, 9.5165e-01, 4.4474e-01, 2.4445e-01, - 1.0981e-01, 9.3663e-01, 8.4897e-01, 9.5652e-01, - 4.6435e-01, 1.7098e-01, 5.9974e-01, 4.3018e-01, - 2.1721e-01, 3.8205e-01, 2.6684e-01, 2.6286e-01, - 8.1812e-01, 5.7012e-01, 6.3330e-01, 6.6896e-02, - 8.6486e-02, 5.7371e-01, 2.1333e-01, 5.1711e-01, - 3.0138e-01, 2.1686e-01, 9.4270e-01, 3.0485e-01, - 4.4156e-01, 9.5896e-01, 6.0745e-01, 1.5570e-01, - 4.1088e-01, 3.7352e-01, 3.6528e-01, 1.2019e-01, - 7.4565e-02, 5.5689e-01, 5.1790e-01, 1.3767e-01, - 1.2340e-01, 9.6204e-01, 1.6892e-01, 2.9572e-01, - 2.1066e-01, 3.3179e-01, 3.3274e-01, 8.4143e-01, - 8.3193e-01, 7.2407e-01, 7.0281e-01, 6.8041e-01, - 8.2730e-01, 1.8872e-01, 2.6244e-01, 2.1234e-01, - 7.4230e-01, 2.6976e-01, 6.3711e-01, 4.3716e-01, - 6.9298e-01, 8.1356e-01, 6.0939e-01, 7.0288e-01, - 3.5558e-01, 9.2964e-01, 1.1833e-01, 4.6906e-01, - 8.0664e-01, 4.8526e-01, 9.6169e-01, 4.1867e-01, - 8.3224e-01, 9.6285e-01, 9.5262e-01, 8.6424e-01, - 4.9817e-01, 5.4699e-01, 6.1704e-01, 3.9186e-01, - 6.8935e-01, 5.9439e-01, 7.3227e-01, 4.1190e-01, - 9.0233e-01, 3.2790e-01, 4.3206e-01, 5.5893e-01, - 5.8605e-02, 9.4473e-01, 2.4772e-01, 3.1700e-01, - 7.7245e-01, 5.5508e-01, 7.4692e-01, 1.2111e-01, - 8.0345e-01, 4.4160e-01, 6.1610e-01, 4.0302e-01, - 7.2755e-01, 5.2552e-01, 5.2678e-01, 1.3783e-01, - 6.7301e-01, 8.4666e-02, 9.7543e-01, 5.6391e-01, - 6.8981e-01, 5.0252e-01, 3.1345e-01, 8.3631e-01, - 5.1942e-01, 4.1463e-01, 3.8551e-01, 5.6612e-01, - 9.0226e-02, 4.5281e-01, 1.3323e-01, 8.3660e-01, - 3.3939e-01, 2.9707e-01, 3.8862e-01, 7.2849e-01, - 8.2419e-01, 6.6920e-01, 4.7306e-01, 8.6893e-01, - 7.5703e-01, 5.7027e-01, 5.4140e-01, 8.8231e-01, - 3.8567e-02, 3.5538e-01, 6.1680e-01, 5.9111e-01, - 4.5875e-01, 6.0458e-01, 7.5967e-02, 3.2091e-01, - 3.9756e-01, 5.3389e-01, 4.5029e-01, 1.5185e-01, - 6.9442e-01, 7.6304e-01, 4.6109e-01, 5.2312e-02, - 7.7110e-01, 4.8813e-01, 1.8485e-01, 9.4212e-01, - 2.0752e-01, 2.7050e-01, 8.0133e-01, 5.6501e-01, - 7.0480e-01, 3.7931e-01, 4.9377e-01, 7.7738e-01, - 6.5307e-01, 5.0050e-01, 1.0036e-01, 3.6651e-01, - 4.6488e-01, 6.5613e-01, 3.1970e-01, 5.0090e-01, - 2.3238e-01, 4.7361e-01, 4.2273e-01, 1.9837e-01, - 4.1443e-01, 2.8358e-01, 5.9909e-01, 4.6853e-01, - 1.5862e-01, 5.4860e-01, 9.4413e-01, 6.4800e-03, - 2.0553e-01, 6.7561e-01, 1.6860e-01, 5.9291e-01, - 2.2881e-01, 3.9992e-01, 6.9442e-01, 7.9259e-01, - 6.9638e-01, 7.6941e-01, 2.3646e-01, 7.2314e-01, - 1.4276e-01, 4.5362e-01, 5.5788e-01, 2.2118e-01, - 3.1804e-01, 6.9545e-01, 4.2483e-01, 3.3560e-01, - 3.3597e-01, 4.0398e-02, 1.9135e-01, 5.4384e-01, - 5.9982e-01, 9.7372e-01, 1.5970e-01, 6.0079e-01, - 5.9714e-01, 9.5044e-01, 5.0232e-01, 3.6790e-01, - 7.3455e-01, 8.0673e-01, 4.0026e-01, 6.3795e-02, - 8.2192e-01, 1.8676e-01, 1.9070e-01, 6.2954e-02, - 1.8591e-01, 1.6582e-01, 8.1810e-01, 1.1599e-02, - 4.7558e-01, 8.4737e-01, 7.5306e-01, 1.2992e-01, - 1.5127e-01, 2.0163e-01, 4.6257e-01, 3.4956e-01, - 9.0495e-01, 2.1175e-03, 9.4719e-01, 3.1681e-01, - 2.0418e-01, 5.3479e-02, 3.5683e-01, 5.8247e-02, - 9.3423e-01, 1.1090e-01, 4.3007e-01, 6.1117e-01, - 4.0593e-01, 1.1448e-02, 8.5302e-01, 2.5408e-01, - 9.1190e-01, 6.4738e-01, 2.7596e-01, 2.8235e-01, - 4.6522e-01, 8.7885e-01, 7.0320e-01, 4.2790e-01, - 2.9479e-02, 3.8500e-01, 3.9245e-01, 1.0004e-01, - 4.2397e-01, 7.1833e-01, 6.6614e-01, 4.6682e-02, - 5.8017e-01, 6.0782e-01, 4.8419e-01, 5.5802e-01, - 2.3916e-01, 1.4114e-01, 8.3739e-01, 1.0626e-01, - 5.1946e-01, 9.4847e-01, 4.1767e-01, 3.7856e-01, - 1.1090e-01, 1.5010e-01, 4.3945e-01, 7.5067e-02, - 9.8959e-01, 2.8002e-01, 5.0613e-01, 8.0707e-01, - 6.1595e-01, 8.2005e-01, 9.9749e-01, 1.1749e-01, - 6.5959e-01, 2.3371e-01, 8.3971e-01, 4.3270e-03, - 6.2581e-01, 8.0238e-01, 2.8393e-01, 7.0314e-01, - 2.0960e-01, 3.2954e-02, 6.5011e-01, 8.0206e-01, - 9.2215e-01, 8.1873e-01, 3.4350e-01, 2.8733e-01, - 1.9274e-01, 3.4014e-01, 3.0741e-01, 3.4144e-01, - 2.7448e-02, 7.6554e-01, 6.2323e-01, 3.0307e-01, - 4.5175e-01, 3.9421e-01, 8.5280e-01, 6.5476e-01, - 3.1057e-01, 3.6455e-01, 8.0890e-01, 2.7987e-01]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([7518, 9696, 6300, 262, 1821, 8285, 8778, 3322, 8898, + 7577, 7218, 6194, 7006, 9614, 4003, 4685, 5395, 3357, + 7430, 5632, 6273, 2540, 3870, 8002, 5621, 5158, 4894, + 8789, 8631, 475, 6021, 25, 1478, 2181, 9967, 8972, + 7148, 7237, 1324, 2589, 6704, 1661, 3441, 9044, 8782, + 3179, 2340, 6434, 8915, 9575, 1895, 668, 5800, 3712, + 3120, 4497, 3003, 8475, 1656, 5317, 534, 1209, 750, + 4867, 5146, 2934, 2534, 4093, 7033, 3431, 1147, 8260, + 6120, 2085, 4612, 4043, 7468, 6975, 1754, 8217, 4410, + 1740, 172, 9577, 8177, 556, 6215, 2860, 2656, 3624, + 7154, 9768, 4398, 1947, 2674, 6729, 4022, 4851, 453, + 8512, 6182, 9666, 2673, 1056, 2252, 4391, 7487, 5177, + 4350, 1020, 3998, 6787, 4433, 4098, 3043, 4595, 2036, + 200, 3964, 7514, 4037, 5869, 3218, 7690, 6624, 488, + 1881, 9085, 9814, 4334, 6660, 1075, 1040, 4720, 1813, + 9907, 1998, 1000, 2004, 5683, 1137, 3602, 5453, 9566, + 2417, 9925, 7869, 179, 8191, 7442, 2336, 1365, 7166, + 7354, 6152, 2462, 5973, 6124, 5756, 3298, 1414, 4077, + 7759, 305, 7151, 2537, 5054, 8993, 7229, 2942, 3483, + 4002, 2693, 2448, 5771, 826, 1254, 7055, 4953, 66, + 2508, 2351, 3206, 4067, 5669, 9712, 6741, 1066, 7637, + 278, 5450, 3287, 1711, 9900, 4655, 3158, 1712, 6656, + 6936, 9886, 6105, 1083, 2126, 1861, 6749, 4491, 5376, + 3481, 234, 4214, 4686, 7277, 8302, 2751, 3845, 468, + 4378, 3280, 8845, 7340, 4948, 2744, 5128, 5325, 2078, + 2907, 395, 3547, 3669, 8960, 8102, 8374, 6349, 3441, + 4948, 3355, 6904, 4725, 3674, 6960, 2790, 5677, 2877, + 5123, 2601, 7479, 643, 1952, 585, 168, 2413, 3560, + 9344, 9051, 864, 328, 9846, 3635, 7758, 7148, 9253, + 6009, 2555, 5884, 204, 4047, 201, 9134, 7649, 2615, + 9583, 9172, 9003, 780, 7486, 9587, 9477, 5154, 4457, + 1105, 3365, 6202, 8659, 5414, 2426, 1087, 1957, 6749, + 4929, 1049, 3270, 246, 8629, 2498, 8221, 4504, 9911, + 9966, 3014, 4245, 6556, 3305, 589, 4137, 318, 6145, + 8838, 3468, 1976, 6641, 9251, 2588, 2851, 8127, 8656, + 2062, 2975, 6380, 2214, 4190, 5031, 9304, 614, 6944, + 562, 4917, 969, 3937, 2534, 4263, 7288, 9607, 4730, + 444, 2886, 3273, 3567, 3830, 5410, 8598, 1862, 5598, + 4933, 8354, 2174, 8958, 5295, 846, 3272, 353, 2969, + 8177, 7603, 8213, 9944, 5897, 2136, 4476, 7073, 1115, + 8358, 8989, 9345, 6021, 6613, 901, 8445, 784, 8596, + 4500, 9360, 5470, 2745, 6764, 1266, 8614, 2516, 1972, + 8005, 3834, 2717, 7044, 7458, 1886, 9756, 5069, 5734, + 6821, 745, 6731, 1536, 9880, 9319, 8126, 6805, 7453, + 39, 3024, 3069, 9783, 2264, 1259, 738, 3734, 9004, + 2095, 3027, 5789, 1350, 4750, 5589, 5192, 9977, 2146, + 5549, 8117, 6364, 9272, 4409, 5362, 7387, 7399, 2899, + 7420, 5273, 7946, 7115, 9377, 5767, 8802, 1095, 9889, + 8008, 7921, 1671, 1137, 5636, 5204, 9943, 2483, 1303, + 9433, 2389, 9126, 7106, 1954, 8519, 236, 6693, 2794, + 7942, 5186, 904, 5057, 7613, 5066, 8772, 1477, 4880, + 4179, 2818, 5601, 8106, 1178, 3735, 7444, 9960, 1324, + 2727, 2041, 1199, 5652, 3114, 9046, 3713, 861, 4463, + 1424, 126, 4816, 8641, 7447, 5664, 8120, 6968, 9745, + 7132, 5777, 8388, 3282, 8980, 1088, 7293, 9885, 4266, + 6309, 5469, 6966, 60, 1227, 5501, 999, 7686, 448, + 8913, 1964, 9037, 316, 5677, 3113, 2190, 347, 8164, + 7928, 9716, 4357, 5297, 5956, 2459, 7237, 656, 182, + 7452, 3500, 3896, 5526, 2240, 3745, 33, 1094, 4722, + 4207, 1962, 5999, 8357, 4119, 6009, 7333, 6925, 3159, + 7769, 1317, 8086, 4883, 3762, 7069, 6702, 4912, 5628, + 6146, 62, 2316, 433, 2097, 6311, 24, 347, 8288, + 2746, 1342, 2404, 6710, 5242, 178, 5849, 8133, 6624, + 1815, 5332, 533, 7044, 3421, 9752, 6107, 1812, 8227, + 6724, 5442, 2308, 798, 2453, 8060, 6419, 6234, 6264, + 4931, 435, 2405, 2270, 5349, 1220, 848, 2804, 8394, + 3337, 5183, 5812, 2367, 5067, 5907, 5088, 7972, 460, + 2064, 5097, 9032, 8929, 7371, 1494, 149, 4759, 7969, + 968, 5248, 9684, 4863, 9748, 9909, 8626, 6754, 6340, + 6417, 61, 6772, 600, 1936, 7091, 7755, 9124, 8882, + 8496, 3711, 9212, 9435, 8216, 7685, 8063, 5288, 9492, + 1160, 4958, 7970, 4718, 6769, 5717, 1553, 4760, 9541, + 6566, 6456, 7743, 3822, 6876, 5097, 2671, 3209, 3226, + 2793, 7122, 5735, 6937, 1731, 1634, 4869, 5674, 6361, + 2516, 6708, 9293, 3974, 5537, 1905, 6525, 2223, 7014, + 2692, 1876, 9845, 157, 3891, 113, 7490, 2632, 6626, + 6686, 7895, 5360, 8738, 4685, 3986, 1130, 2869, 3422, + 4812, 2624, 6916, 9474, 1705, 5886, 4866, 6374, 9366, + 9761, 9618, 1119, 6229, 6311, 8794, 8597, 4133, 6121, + 9540, 6417, 1594, 3884, 2402, 1780, 5984, 4536, 6567, + 3892, 5500, 7849, 6302, 1302, 4311, 6900, 5784, 7487, + 203, 191, 8618, 8956, 318, 7779, 3949, 5494, 8492, + 1675, 5732, 9756, 9204, 5091, 6134, 4301, 5904, 184, + 395, 1607, 3648, 1844, 643, 5663, 6820, 5174, 2484, + 2223, 2313, 396, 3592, 96, 467, 7251, 717, 5592, + 9116, 3470, 342, 8485, 6092, 1390, 7765, 2309, 8318, + 4123, 9225, 9856, 8144, 9651, 2603, 192, 7575, 6749, + 7640, 6468, 2711, 5163, 838, 4488, 9754, 2066, 6247, + 4650, 6711, 6711, 6741, 5236, 8684, 1992, 6812, 9945, + 5083, 7942, 5645, 8754, 2371, 7966, 1027, 670, 8926, + 6913, 5100, 8627, 9784, 7560, 5325, 5546, 2186, 4255, + 3502, 2906, 7383, 6744, 5666, 3739, 2679, 2518, 4617, + 8685, 5307, 9705, 9940, 2328, 8384, 8001, 6555, 5871, + 2468, 7100, 5998, 5692, 4802, 4015, 2628, 7408, 5224, + 1078, 6410, 2468, 2961, 9945, 6353, 5587, 3018, 8393, + 1265, 8258, 7873, 8344, 313, 5155, 2929, 3301, 2147, + 6945, 9130, 9351, 284, 2838, 7395, 3957, 4616, 2128, + 9332, 7315, 742, 5824, 4092, 9802, 8791, 1788, 7391, + 290, 4777, 1756, 6447, 2315, 2564, 4732, 4959, 23, + 2501, 5485, 6508, 6099, 4580, 6549, 2248, 9033, 5440, + 6995, 6959, 9024, 3235, 5667, 4553, 1072, 4400, 5450, + 7840, 965, 5626, 2280, 9236, 4299, 7041, 7596, 5077, + 9967, 9369, 2731, 3895, 1718, 5713, 2354, 8425, 3764, + 1992, 8248, 668, 8845, 5098, 7778, 1796, 8703, 6523, + 6952, 2673, 4419, 8055, 7289, 9732, 6094, 7227, 1426, + 4580, 6981, 2751, 1273, 3522, 3234, 1740, 7604, 3606, + 3783, 2377, 4681, 7158, 3865, 4704, 678, 1050, 5256, + 5611, 5719, 7630, 667, 5582, 2950, 6855, 9179, 8765, + 8370, 109, 2915, 6690, 8972, 8846, 8957, 2389, 7623, + 2847, 459, 8803, 9389, 6178, 5855, 792, 337, 4567, + 9620, 3552, 2412, 8717, 2207, 7057, 1364, 3667, 5420, + 3386]), + values=tensor([1.3041e-01, 8.3167e-01, 2.6285e-01, 6.6326e-01, + 8.6360e-01, 6.5715e-02, 9.5991e-01, 7.7502e-01, + 1.5176e-01, 3.7598e-01, 8.5691e-01, 8.8353e-01, + 4.2667e-01, 8.6303e-01, 7.8944e-01, 5.3616e-01, + 3.6177e-01, 2.8447e-01, 5.8604e-01, 3.6488e-01, + 3.9226e-01, 4.0463e-02, 6.4984e-01, 6.8213e-01, + 3.0299e-01, 1.5221e-01, 8.6401e-01, 5.6470e-01, + 6.8510e-01, 5.4418e-01, 6.6205e-01, 2.9520e-01, + 5.0068e-01, 6.0881e-01, 2.7799e-01, 8.1099e-01, + 8.1696e-01, 6.5516e-01, 9.3995e-01, 2.7441e-01, + 4.3716e-01, 8.8088e-01, 3.7994e-01, 6.0887e-01, + 9.4212e-01, 1.9859e-01, 6.6642e-01, 2.9686e-01, + 7.7572e-02, 9.2566e-01, 6.0326e-01, 1.5910e-01, + 3.9549e-01, 1.2085e-01, 2.4669e-01, 6.6434e-01, + 5.4194e-01, 5.9233e-01, 4.7001e-01, 8.4553e-01, + 6.5650e-01, 6.0743e-01, 5.2093e-01, 2.1412e-01, + 7.3438e-01, 8.5971e-01, 8.0479e-01, 5.5348e-01, + 7.6964e-01, 9.0794e-01, 4.3225e-01, 1.9787e-02, + 4.8258e-01, 3.3029e-01, 4.5348e-01, 4.5391e-01, + 7.7268e-01, 8.0093e-01, 3.5913e-01, 3.2663e-01, + 7.2599e-01, 5.0492e-01, 6.2514e-01, 4.5307e-01, + 9.2036e-01, 9.6876e-01, 6.5593e-01, 1.7458e-02, + 9.0660e-01, 8.9408e-01, 4.4944e-01, 9.0032e-02, + 1.5076e-01, 3.1631e-01, 6.7634e-01, 1.6319e-01, + 7.3819e-01, 5.2802e-03, 3.2231e-01, 2.6514e-01, + 2.9081e-01, 1.6334e-01, 5.9931e-01, 8.6034e-01, + 7.8469e-01, 4.0042e-01, 3.3385e-01, 9.5543e-01, + 4.1403e-01, 7.1569e-01, 3.3312e-01, 2.8133e-01, + 1.6364e-01, 2.7316e-01, 4.9372e-01, 9.1315e-01, + 1.1311e-01, 6.3113e-01, 7.4994e-01, 5.8650e-01, + 9.1784e-01, 9.8427e-03, 1.1119e-01, 8.4274e-01, + 6.7618e-01, 6.5958e-01, 7.9249e-01, 6.0204e-01, + 2.0975e-01, 2.9920e-01, 4.3059e-01, 4.8873e-01, + 5.5887e-01, 2.0476e-01, 9.0267e-01, 7.5136e-01, + 8.4611e-01, 3.6738e-01, 5.2257e-01, 5.6348e-01, + 4.3848e-01, 3.8505e-01, 9.4682e-01, 6.4161e-01, + 9.5527e-01, 8.4654e-01, 7.1221e-01, 7.3888e-01, + 3.2743e-01, 5.4002e-01, 9.6379e-01, 5.9026e-01, + 3.7294e-01, 4.2773e-01, 8.8207e-01, 6.2644e-01, + 9.5097e-01, 1.3478e-01, 2.4806e-02, 2.2406e-01, + 6.2953e-01, 5.4355e-01, 3.3764e-01, 6.3472e-01, + 4.5224e-01, 8.4833e-02, 8.2615e-01, 2.1149e-01, + 9.4310e-01, 7.3199e-01, 1.5563e-01, 6.0103e-01, + 1.3393e-01, 2.9121e-01, 2.6980e-01, 2.9433e-01, + 6.6666e-01, 7.0104e-01, 4.9024e-01, 6.3149e-01, + 4.8279e-01, 1.4330e-01, 3.1238e-01, 6.0270e-01, + 7.4806e-01, 2.3777e-01, 4.9348e-01, 9.5404e-01, + 1.4182e-01, 6.2322e-01, 9.6427e-01, 3.8601e-01, + 8.9012e-02, 2.9387e-01, 4.8207e-01, 9.1009e-01, + 8.4314e-01, 5.8469e-01, 9.7026e-01, 3.8950e-01, + 5.0291e-01, 4.0643e-01, 9.1954e-01, 2.6164e-01, + 5.6263e-01, 2.1560e-01, 4.7691e-01, 5.7944e-01, + 8.1369e-03, 1.9692e-02, 9.9720e-01, 5.4116e-01, + 3.5419e-01, 1.6319e-02, 7.0477e-01, 2.0930e-01, + 2.7323e-01, 8.2105e-01, 2.6184e-01, 5.2832e-01, + 9.5236e-01, 8.5917e-01, 2.8397e-02, 4.3195e-01, + 8.5812e-01, 1.9410e-02, 7.8711e-01, 8.0554e-01, + 2.3720e-01, 9.8231e-01, 1.2190e-01, 9.7974e-01, + 2.1714e-01, 3.9420e-01, 3.7000e-01, 5.2780e-01, + 5.1473e-01, 6.5503e-01, 7.8142e-01, 8.3461e-01, + 2.8050e-01, 6.0711e-01, 7.6428e-01, 1.1920e-01, + 4.7358e-01, 3.4573e-01, 4.0436e-01, 7.9038e-01, + 6.8841e-01, 1.1705e-01, 7.6729e-01, 3.5260e-01, + 1.2207e-02, 3.2093e-01, 7.8719e-01, 5.3983e-01, + 5.8843e-01, 6.9710e-01, 5.4039e-01, 7.6871e-01, + 6.2762e-01, 5.4210e-01, 4.5772e-01, 8.3344e-01, + 8.3078e-02, 5.1283e-01, 7.1850e-01, 5.3715e-01, + 9.1040e-01, 9.4004e-01, 5.8271e-02, 5.1221e-01, + 6.7820e-01, 5.2404e-01, 4.7045e-01, 9.5798e-01, + 3.7354e-02, 8.5443e-01, 2.5596e-01, 8.9251e-02, + 4.2693e-02, 6.4525e-01, 3.5553e-01, 3.1609e-01, + 5.7268e-01, 4.7798e-01, 4.2599e-03, 6.5780e-01, + 6.6970e-01, 3.6429e-01, 9.9605e-01, 4.7338e-01, + 1.6846e-01, 1.9407e-01, 3.2190e-01, 3.8114e-01, + 1.0284e-01, 3.9789e-01, 1.9114e-01, 3.5616e-01, + 1.4658e-01, 2.9873e-01, 2.3371e-01, 5.1649e-01, + 7.8222e-01, 4.7979e-02, 6.0290e-01, 2.2453e-01, + 9.7654e-01, 8.3028e-01, 4.5758e-01, 7.7794e-01, + 3.7506e-01, 1.1357e-01, 5.2466e-01, 3.6868e-01, + 8.2834e-02, 8.7007e-01, 4.9073e-02, 7.9799e-01, + 8.6213e-02, 3.2764e-01, 7.6745e-01, 6.2858e-01, + 3.4245e-01, 1.4275e-01, 4.3392e-01, 5.5763e-01, + 7.9863e-01, 2.6162e-01, 8.5317e-01, 6.4938e-01, + 9.3924e-01, 2.2927e-03, 4.8312e-01, 1.8976e-01, + 9.7836e-01, 5.1857e-01, 3.7434e-01, 4.9652e-01, + 7.6274e-01, 8.8894e-01, 4.9709e-01, 9.9719e-01, + 8.5105e-01, 7.2986e-01, 3.4902e-01, 2.5750e-01, + 1.0834e-01, 4.8259e-01, 2.2992e-01, 2.9985e-01, + 5.3388e-01, 5.9629e-01, 9.1799e-01, 6.3888e-01, + 9.0394e-01, 2.1252e-01, 2.0328e-01, 8.5727e-01, + 8.1034e-01, 2.4269e-01, 9.1198e-02, 6.6476e-01, + 4.5387e-02, 6.4642e-02, 3.0902e-01, 4.9403e-01, + 5.4217e-01, 6.0907e-01, 3.2529e-01, 5.7728e-01, + 7.4848e-01, 8.5174e-01, 2.9542e-01, 2.1850e-02, + 8.8083e-01, 8.6589e-01, 5.3352e-01, 8.7861e-01, + 9.0461e-01, 1.0456e-01, 1.4724e-01, 4.3868e-01, + 7.9412e-01, 5.3568e-01, 9.0452e-01, 5.5981e-01, + 2.2401e-01, 9.8722e-01, 2.4030e-01, 5.1078e-01, + 4.0485e-01, 8.0013e-02, 4.1112e-01, 3.5888e-01, + 4.7125e-01, 8.3656e-02, 4.8604e-01, 8.1332e-01, + 4.7391e-03, 7.0845e-01, 6.5425e-01, 6.0492e-01, + 7.5155e-01, 6.5385e-01, 2.2705e-01, 7.3624e-01, + 6.2400e-01, 2.4495e-01, 7.5005e-01, 2.6759e-01, + 4.3052e-01, 8.5914e-01, 8.3231e-01, 1.2598e-01, + 8.4112e-02, 1.9060e-01, 2.3896e-01, 2.1126e-01, + 3.5790e-01, 1.9189e-01, 4.1223e-01, 9.8631e-01, + 9.8919e-01, 3.4538e-01, 5.3891e-01, 6.4956e-02, + 4.8657e-01, 4.4859e-01, 5.5118e-01, 6.3965e-01, + 7.0850e-01, 5.9231e-02, 1.2184e-01, 4.5675e-01, + 4.7291e-01, 1.9259e-01, 3.1329e-01, 8.6873e-01, + 1.6203e-01, 9.2237e-01, 3.9722e-01, 3.5045e-01, + 1.4911e-01, 7.9886e-01, 6.8584e-01, 9.4234e-01, + 3.0581e-01, 6.0700e-01, 7.9474e-01, 5.1465e-01, + 9.6574e-01, 9.7807e-01, 1.5252e-01, 4.3097e-01, + 9.2783e-01, 7.1882e-01, 7.2712e-01, 1.6256e-01, + 2.4866e-01, 3.7688e-01, 3.3680e-01, 8.5072e-01, + 2.3886e-01, 6.2812e-01, 7.7314e-01, 7.6736e-01, + 5.6288e-01, 3.5074e-01, 6.4221e-01, 5.2145e-01, + 6.6666e-01, 8.4864e-01, 6.1669e-01, 4.6744e-01, + 8.0764e-01, 4.8521e-01, 1.1360e-01, 8.7306e-01, + 8.8180e-01, 8.1429e-01, 9.7086e-01, 7.6475e-02, + 8.1469e-01, 1.6678e-01, 1.1948e-01, 6.2224e-01, + 9.1905e-01, 7.9098e-01, 2.4631e-01, 5.4597e-01, + 5.5153e-01, 1.0818e-01, 1.4021e-01, 3.3312e-01, + 2.2719e-01, 5.6543e-02, 7.0716e-01, 1.9460e-01, + 6.4614e-01, 5.2791e-01, 8.9249e-01, 6.3819e-01, + 5.9491e-01, 4.6935e-01, 6.1055e-01, 4.0601e-02, + 1.3569e-01, 9.6450e-01, 1.0903e-01, 9.7617e-01, + 7.2183e-01, 8.3720e-01, 2.8106e-01, 7.8349e-01, + 8.4787e-01, 7.5451e-01, 5.6493e-01, 1.6383e-01, + 1.2315e-01, 6.6605e-01, 8.4475e-01, 7.3330e-01, + 2.9853e-01, 7.9348e-01, 7.2687e-01, 6.0237e-01, + 5.8598e-01, 8.8539e-01, 3.5050e-01, 3.6175e-01, + 6.9359e-01, 6.2558e-01, 9.2810e-01, 1.3114e-01, + 3.1580e-02, 7.0586e-01, 7.9015e-01, 5.8570e-01, + 1.3430e-01, 2.3354e-01, 6.3306e-01, 8.4906e-01, + 5.5013e-01, 8.5825e-02, 9.2716e-01, 5.4252e-01, + 6.2825e-01, 4.3084e-01, 5.6270e-02, 1.1638e-01, + 1.3731e-01, 7.7385e-01, 3.9576e-01, 8.6116e-02, + 6.2543e-01, 7.3140e-01, 3.6416e-01, 7.3622e-01, + 7.5471e-01, 6.4980e-01, 1.0022e-01, 3.3981e-01, + 2.2794e-01, 4.8078e-01, 4.2247e-01, 8.1942e-01, + 1.8379e-01, 5.6845e-01, 5.0755e-01, 3.0208e-01, + 8.2405e-01, 2.3302e-01, 1.9578e-01, 9.7528e-01, + 5.9984e-01, 1.6057e-01, 1.3373e-01, 6.8478e-01, + 5.0837e-03, 5.8461e-01, 5.0733e-01, 5.1389e-01, + 1.9036e-01, 4.2977e-01, 7.9566e-01, 7.1653e-01, + 9.7823e-02, 8.1174e-01, 2.2992e-02, 9.0406e-01, + 5.0683e-01, 4.4278e-01, 4.1399e-01, 6.2416e-02, + 1.9389e-01, 2.4011e-01, 7.1807e-01, 3.3743e-01, + 8.4328e-01, 7.2589e-01, 2.1221e-01, 5.5074e-01, + 1.8528e-01, 4.6362e-01, 1.9508e-01, 3.3855e-01, + 1.5190e-01, 8.4130e-01, 4.7200e-01, 6.8265e-01, + 5.4146e-01, 2.6877e-01, 9.7685e-01, 9.8942e-01, + 5.4228e-01, 1.0248e-02, 6.3921e-01, 3.8272e-01, + 7.4936e-01, 1.1421e-01, 5.9560e-01, 8.5940e-01, + 5.3870e-01, 4.4622e-01, 4.4721e-01, 8.3619e-01, + 6.2216e-02, 5.7225e-01, 4.4771e-01, 5.8433e-01, + 7.9385e-01, 9.4364e-01, 8.4174e-02, 6.5572e-01, + 7.1904e-01, 7.9004e-01, 2.1308e-01, 6.8903e-01, + 9.3781e-01, 1.3756e-01, 8.5481e-01, 9.6659e-01, + 3.6367e-01, 4.8802e-01, 3.1375e-01, 1.1494e-01, + 5.5924e-01, 2.8019e-02, 1.5756e-01, 5.2825e-01, + 4.3190e-01, 4.3413e-01, 7.6832e-02, 6.1781e-01, + 3.0693e-01, 4.4782e-01, 1.8348e-01, 6.5436e-01, + 9.5736e-01, 9.4978e-01, 2.2203e-01, 7.1895e-01, + 7.7491e-01, 1.7078e-01, 2.7903e-01, 4.1889e-01, + 1.4853e-01, 3.4897e-03, 2.1731e-01, 2.4215e-01, + 3.8969e-01, 4.1215e-01, 1.3361e-01, 4.6432e-02, + 4.5127e-01, 2.9005e-01, 6.0938e-01, 5.9054e-01, + 4.1539e-01, 3.6537e-01, 2.0181e-01, 2.9838e-02, + 1.8519e-01, 2.4171e-02, 1.3415e-01, 9.1166e-01, + 5.8295e-01, 9.1723e-01, 1.7492e-01, 9.0159e-01, + 3.7645e-01, 3.4025e-01, 2.9614e-01, 2.1353e-01, + 4.3318e-01, 5.6879e-01, 2.5781e-01, 8.3416e-01, + 7.4130e-01, 1.0773e-01, 3.3222e-01, 8.9479e-01, + 9.2591e-01, 6.4297e-01, 3.4223e-01, 6.5716e-01, + 8.5955e-01, 6.9486e-01, 9.0945e-01, 2.2563e-01, + 4.8421e-01, 8.6055e-02, 2.4828e-01, 4.5846e-01, + 5.5823e-01, 3.8470e-02, 8.0408e-02, 8.7295e-01, + 1.1442e-01, 6.7883e-01, 8.1315e-01, 3.7696e-01, + 1.6574e-02, 1.6713e-02, 2.4466e-01, 6.7002e-01, + 6.2239e-02, 1.4361e-01, 3.4630e-01, 6.6300e-01, + 7.0281e-01, 2.7165e-01, 5.0032e-01, 7.2116e-01, + 1.5788e-01, 2.8297e-01, 2.3188e-01, 8.9092e-01, + 9.3911e-01, 7.4703e-01, 7.4243e-01, 8.6912e-01, + 6.4955e-01, 2.5734e-01, 6.7459e-01, 6.1048e-01, + 9.0751e-01, 6.9611e-01, 8.5546e-01, 2.4057e-01, + 1.3409e-01, 7.1376e-01, 7.0472e-01, 4.9040e-01, + 3.5536e-01, 6.6420e-01, 8.1200e-01, 4.5435e-01, + 2.1666e-01, 4.1887e-01, 6.4981e-01, 2.3226e-01, + 6.9725e-02, 7.3732e-01, 2.6577e-01, 2.3758e-01, + 9.4229e-01, 6.2395e-01, 6.1865e-01, 9.9180e-02, + 6.4531e-01, 9.9102e-01, 9.0777e-02, 4.6811e-01, + 7.0523e-01, 3.6579e-01, 3.4625e-01, 5.8710e-01, + 5.9811e-01, 1.8949e-01, 3.9311e-02, 3.9617e-01, + 5.0071e-01, 3.7570e-01, 2.4552e-01, 2.6117e-01, + 9.3254e-01, 5.2420e-01, 5.7593e-01, 1.4900e-01, + 5.4496e-01, 5.2456e-01, 3.9216e-01, 8.1017e-01, + 5.2870e-01, 1.2899e-01, 1.9976e-01, 7.0247e-01, + 9.7196e-01, 3.5450e-02, 9.8821e-01, 4.5041e-01, + 6.9886e-01, 9.3043e-01, 8.8904e-01, 3.0349e-01, + 7.1390e-01, 3.5820e-01, 9.0010e-01, 3.6511e-01, + 8.6431e-01, 2.6833e-01, 6.5077e-01, 4.3159e-01, + 2.3637e-03, 3.6962e-01, 6.8563e-01, 3.9226e-01, + 4.2306e-01, 9.3246e-01, 6.2636e-01, 6.6569e-01, + 5.1769e-01, 1.0954e-01, 6.6563e-01, 5.6084e-01, + 6.2615e-01, 1.9512e-01, 6.7166e-01, 3.0464e-01, + 8.1481e-01, 2.0383e-01, 6.8527e-01, 6.0140e-01, + 9.1940e-01, 2.9010e-01, 1.9323e-01, 7.5308e-01, + 5.1655e-01, 7.7545e-01, 4.0819e-01, 4.6914e-01, + 1.0762e-01, 5.9083e-01, 4.1005e-02, 8.1673e-01, + 1.7977e-01, 4.4015e-01, 4.6866e-01, 6.1661e-01, + 8.0717e-01, 2.2196e-01, 3.1843e-01, 1.2570e-01, + 3.8681e-01, 5.1235e-02, 8.9325e-01, 9.5700e-01, + 9.3673e-01, 4.1681e-01, 4.6472e-01, 9.2014e-01, + 6.3272e-01, 6.6815e-01, 8.7993e-01, 4.9261e-01, + 4.1488e-01, 4.1331e-01, 8.3137e-01, 1.4267e-01, + 6.7118e-01, 7.3294e-01, 4.6344e-01, 3.4125e-01, + 8.1212e-01, 5.9438e-01, 9.2301e-01, 9.4274e-01, + 4.2425e-01, 2.0890e-01, 8.1161e-01, 6.6813e-01, + 7.7586e-01, 1.7382e-01, 4.0429e-01, 1.3756e-01, + 4.8850e-01, 7.7635e-01, 3.1478e-01, 7.9157e-01, + 3.8260e-01, 9.7448e-01, 2.5158e-01, 2.5456e-01, + 3.7263e-01, 9.1997e-01, 7.1236e-01, 9.9155e-01, + 5.7040e-01, 8.1640e-01, 8.7423e-01, 5.7996e-01, + 5.8532e-01, 3.1681e-01, 5.1521e-01, 4.5605e-01, + 6.8051e-01, 7.1903e-01, 3.3910e-01, 2.6719e-01, + 3.5073e-01, 8.3548e-01, 1.2868e-01, 4.6235e-01, + 7.4859e-01, 4.4090e-01, 5.6777e-01, 7.4301e-01, + 8.9441e-01, 3.3698e-01, 1.4379e-01, 8.6005e-01, + 6.9398e-01, 7.0285e-04, 7.4110e-02, 1.9000e-01, + 3.7174e-02, 7.2348e-01, 2.9599e-01, 9.8261e-02, + 7.7121e-01, 6.8846e-01, 5.4729e-01, 5.8436e-01, + 3.4513e-01, 3.0918e-01, 2.8966e-01, 6.9551e-01, + 9.0674e-01, 2.9109e-01, 9.6780e-01, 9.4296e-01, + 8.6366e-01, 9.3170e-01, 1.1810e-01, 9.6329e-01, + 3.4415e-01, 3.3271e-01, 8.9077e-01, 4.5828e-01, + 7.8079e-01, 3.5690e-01, 6.8891e-01, 1.0958e-01, + 7.4747e-01, 6.1440e-01, 5.5642e-01, 6.7334e-01, + 9.5122e-01, 4.5693e-01, 6.5286e-01, 6.8427e-01, + 6.5640e-01, 1.3610e-01, 5.1817e-01, 4.5218e-01, + 4.9750e-01, 6.9949e-01, 1.8679e-01, 4.7306e-01, + 6.2149e-01, 6.3191e-01, 6.8227e-01, 9.5340e-01, + 6.9214e-01, 4.8327e-01, 6.2025e-01, 7.2734e-01, + 3.4948e-01, 9.1951e-01, 1.5345e-01, 9.4844e-01, + 7.9645e-01, 4.2942e-01, 3.0837e-01, 3.2774e-01, + 4.7750e-01, 3.2074e-01, 2.3827e-01, 9.4556e-01, + 8.5400e-01, 4.2681e-02, 9.5386e-01, 3.3202e-01, + 2.9276e-01, 1.8871e-01, 8.4606e-01, 6.1973e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7740, 0.9703, 0.1840, ..., 0.7477, 0.1526, 0.5369]) +tensor([0.7175, 0.8461, 0.0931, ..., 0.1554, 0.1137, 0.6137]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1405,389 +1295,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 11.093939065933228 seconds +Time: 10.3000328540802 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5850, 8366, 5592, 2122, 6679, 1664, 2469, 7640, 8756, - 2915, 7211, 1515, 5525, 1719, 3439, 4040, 8857, 5364, - 6132, 895, 3852, 156, 8811, 9741, 357, 3518, 4399, - 7286, 180, 9528, 5416, 6953, 1721, 2595, 7283, 9795, - 7825, 5079, 6282, 136, 4669, 6702, 3303, 3010, 2380, - 2256, 7997, 5883, 9682, 9926, 8146, 9663, 3941, 6670, - 9041, 817, 4692, 3045, 7544, 9848, 5442, 871, 3702, - 7376, 94, 6327, 3751, 7760, 7748, 920, 3358, 4678, - 8037, 93, 1218, 9719, 1249, 634, 196, 2894, 7487, - 1159, 8188, 332, 5966, 4396, 4268, 8665, 6886, 7804, - 7445, 4388, 7265, 7541, 8403, 5348, 9890, 2631, 5205, - 8499, 4507, 2643, 7756, 857, 4020, 8538, 7850, 4986, - 7312, 3766, 1567, 2852, 7235, 6927, 653, 379, 1077, - 6334, 4614, 1987, 3571, 6355, 1814, 6184, 9414, 2153, - 8882, 3562, 6625, 5036, 2967, 1233, 9339, 4935, 5597, - 9689, 4093, 5663, 8161, 6587, 3112, 612, 6031, 2698, - 3403, 9263, 9319, 8735, 104, 2810, 3672, 5639, 8827, - 5048, 8805, 1031, 9837, 9524, 6912, 3766, 3659, 7073, - 737, 797, 5226, 7104, 3103, 6515, 4992, 1665, 5226, - 6199, 6712, 6310, 7443, 3988, 2676, 2322, 244, 3312, - 4295, 2693, 9044, 9455, 7558, 2076, 364, 3468, 4685, - 2690, 1818, 1795, 8073, 8740, 3982, 5591, 7437, 4434, - 6272, 8601, 5896, 7206, 452, 124, 6062, 3722, 2858, - 3166, 6119, 1841, 8789, 9596, 5567, 5894, 7970, 5360, - 9933, 6197, 3790, 8303, 9150, 3759, 378, 6338, 8692, - 3037, 225, 8540, 8302, 3869, 1980, 7102, 3526, 84, - 2428, 5319, 4508, 9845, 2414, 8840, 833, 2752, 1575, - 8598, 7280, 7141, 2098, 7332, 9471, 7172, 6064, 5586, - 6496, 4719, 7246, 9857, 461, 1971, 1577, 2883, 521, - 887, 7601, 1096, 7587, 8032, 9839, 9759, 3694, 4208, - 2775, 4747, 9442, 6136, 6253, 7707, 4233, 955, 7815, - 9711, 2360, 974, 4097, 9684, 7356, 2815, 7178, 3217, - 6396, 7951, 6798, 3086, 4009, 4560, 5258, 4636, 5136, - 484, 5572, 9213, 7528, 8274, 5548, 1629, 8245, 2276, - 9250, 7073, 612, 1264, 8789, 9413, 9556, 4024, 8035, - 4389, 2075, 2015, 6780, 9946, 8182, 3173, 4875, 4374, - 7146, 6566, 1212, 3788, 6737, 1525, 4118, 597, 1164, - 95, 8578, 5382, 7, 4863, 8244, 8410, 5238, 6413, - 1176, 8621, 5893, 5470, 5285, 4907, 1576, 9176, 6701, - 5861, 5303, 5175, 2021, 3075, 7189, 9086, 6729, 1443, - 4021, 3184, 1282, 194, 3967, 741, 191, 6596, 1251, - 1613, 4577, 1550, 7988, 7486, 1702, 7074, 9193, 1036, - 2186, 5023, 2564, 6337, 940, 1519, 8948, 8449, 9328, - 479, 6279, 8562, 8923, 2685, 1574, 5617, 5722, 970, - 640, 8699, 1647, 1059, 8229, 1516, 892, 441, 4405, - 4899, 5037, 6072, 8409, 3457, 6678, 8778, 5168, 4725, - 1582, 2499, 3623, 3399, 2800, 6268, 2077, 192, 5047, - 1851, 3023, 9282, 7259, 5334, 5942, 1157, 8925, 7158, - 2680, 8224, 1863, 9792, 1845, 5045, 6307, 5364, 7590, - 3501, 3167, 5703, 4100, 6744, 3117, 6631, 2272, 3513, - 1946, 3024, 5825, 1185, 1348, 6559, 74, 4615, 3419, - 4395, 7611, 6167, 8539, 9714, 7761, 9431, 2318, 6641, - 1733, 4522, 5800, 6094, 9427, 1318, 4807, 6951, 6988, - 2813, 985, 7549, 9176, 9180, 4980, 2792, 4241, 7249, - 1163, 61, 7125, 2555, 6886, 8991, 196, 9270, 35, - 1529, 3941, 7793, 7718, 7033, 4030, 6810, 850, 9329, - 5979, 6544, 751, 4582, 8368, 565, 7754, 7052, 3290, - 8677, 983, 7257, 1909, 8475, 6667, 894, 6714, 9857, - 238, 8379, 8656, 2402, 83, 4439, 4472, 7734, 2074, - 3272, 618, 1715, 8742, 745, 2543, 7748, 5408, 6663, - 8178, 2886, 7594, 1196, 5498, 3894, 893, 8965, 6034, - 6443, 2520, 5737, 483, 2405, 4283, 6209, 1176, 448, - 606, 958, 8428, 3541, 8984, 1463, 4865, 173, 2388, - 4515, 610, 3428, 834, 1897, 3416, 3506, 6850, 7650, - 1567, 2088, 4588, 3271, 6086, 2429, 5007, 1641, 3405, - 5664, 2262, 1925, 5269, 9056, 8204, 5139, 609, 3768, - 1372, 8780, 3638, 7690, 2428, 8787, 4274, 9543, 7138, - 8745, 2025, 668, 6826, 8790, 6413, 9920, 6540, 348, - 4430, 4748, 8976, 1596, 914, 2089, 5266, 7048, 2343, - 5663, 5852, 3682, 5650, 2554, 2359, 5522, 968, 4538, - 556, 900, 332, 6036, 4210, 6798, 9668, 7214, 3249, - 934, 7260, 1264, 9553, 9573, 1341, 5076, 7088, 723, - 8670, 6474, 3175, 9943, 1202, 328, 6087, 194, 8768, - 258, 7554, 9741, 2069, 3477, 9790, 8686, 2875, 4984, - 7848, 4751, 5568, 9759, 8560, 7437, 4239, 7813, 4618, - 6633, 990, 5961, 926, 7923, 4657, 2745, 2212, 9361, - 5154, 9425, 4873, 6600, 5372, 8037, 7620, 2502, 3781, - 7433, 6170, 819, 1336, 484, 3513, 4395, 19, 8130, - 7693, 6153, 6352, 4202, 7488, 1192, 4764, 7301, 6821, - 6865, 6976, 937, 1234, 1710, 549, 5075, 6580, 8729, - 4805, 1375, 3711, 1305, 4371, 3503, 8608, 3833, 3074, - 1072, 7655, 4667, 966, 192, 6852, 7586, 1491, 9585, - 8090, 3373, 1438, 2850, 8318, 8429, 9764, 8245, 7536, - 1862, 6568, 9277, 3906, 4401, 671, 9709, 9370, 6046, - 9211, 9673, 4331, 921, 674, 7979, 374, 7224, 8804, - 5223, 9172, 2758, 3516, 3647, 8604, 8106, 9070, 2337, - 3498, 3861, 2253, 9022, 3709, 4466, 2031, 9776, 7382, - 6631, 6983, 229, 9213, 4744, 5134, 2773, 3818, 2045, - 3684, 5192, 2488, 8503, 6845, 9997, 5752, 1325, 8942, - 352, 1614, 4161, 7427, 7988, 1814, 8864, 5443, 6505, - 9390, 1752, 5714, 9392, 1996, 5576, 5760, 390, 6435, - 7898, 7417, 7401, 1217, 7128, 1412, 6756, 7275, 5202, - 2409, 1660, 2736, 5263, 8336, 8044, 3978, 3020, 5519, - 7853, 8427, 824, 1792, 4254, 4572, 5695, 1587, 1988, - 4588, 728, 1627, 3761, 5276, 8300, 992, 8389, 3069, - 3780, 9133, 145, 9462, 7403, 1537, 9647, 5057, 3362, - 9039, 6376, 8446, 4900, 5977, 7345, 4098, 447, 1820, - 252, 9481, 6404, 6592, 1295, 8043, 4753, 3605, 3518, - 2606, 2287, 9068, 9762, 699, 1959, 5988, 6313, 7817, - 5094, 8321, 2706, 7762, 8810, 5984, 698, 2889, 3562, - 3511, 7298, 3188, 9974, 5668, 6020, 8731, 9606, 2958, - 8385, 4702, 6639, 3111, 8106, 914, 952, 5956, 6944, - 6389, 6930, 4511, 693, 2408, 8878, 4458, 6005, 4687, - 5262, 264, 7556, 7239, 6348, 319, 3127, 4870, 9830, - 6495, 9430, 82, 2845, 2158, 7553, 2485, 4718, 9487, - 9863, 2351, 9993, 9611, 7651, 6752, 9890, 4226, 7914, - 1090, 3994, 273, 9314, 2869, 8105, 8063, 8350, 5143, - 2771, 6914, 6289, 1069, 6603, 7055, 1101, 2035, 1775, - 4440, 1100, 9988, 6654, 8779, 3492, 65, 3068, 7601, - 6582, 5059, 9136, 6153, 4692, 6999, 662, 8797, 8102, - 5680, 9525, 2546, 5756, 4348, 7370, 557, 1546, 4320, - 5734]), - values=tensor([1.6341e-01, 5.4230e-01, 5.9412e-01, 6.2649e-01, - 6.1492e-01, 1.1463e-02, 8.8850e-01, 3.1827e-01, - 1.4850e-01, 1.7623e-01, 5.0631e-01, 8.7356e-01, - 2.6593e-01, 3.7335e-01, 6.7543e-01, 8.1424e-01, - 3.7460e-01, 6.9153e-01, 8.0907e-01, 8.3914e-01, - 9.7417e-02, 3.4410e-04, 1.3414e-01, 2.2155e-01, - 6.0112e-02, 8.4328e-01, 2.7848e-01, 1.4958e-01, - 7.9439e-01, 7.7172e-01, 2.7028e-01, 1.6852e-01, - 6.2794e-01, 3.7628e-01, 7.9275e-01, 2.0746e-01, - 6.5757e-02, 6.6970e-01, 1.3521e-02, 6.0790e-01, - 5.0451e-01, 9.2714e-01, 7.8707e-01, 4.1523e-01, - 5.5244e-01, 9.7376e-01, 6.7472e-01, 9.9321e-01, - 9.5176e-01, 6.2001e-01, 7.8968e-01, 3.0430e-01, - 8.2453e-01, 9.4042e-02, 5.1519e-01, 8.5738e-01, - 5.8535e-01, 8.7087e-01, 6.5326e-01, 1.7395e-01, - 3.0247e-01, 7.3945e-02, 4.1833e-01, 8.0312e-01, - 8.8320e-01, 5.1558e-01, 2.5666e-01, 5.6112e-01, - 5.5084e-01, 7.0649e-01, 3.5059e-01, 7.8879e-01, - 5.2108e-02, 9.8239e-01, 3.4744e-01, 5.8319e-01, - 9.1928e-02, 4.9802e-02, 8.4140e-01, 2.0825e-01, - 6.5549e-01, 3.3967e-02, 4.5861e-01, 3.1781e-01, - 7.2623e-01, 4.3890e-01, 9.1717e-01, 6.7930e-01, - 2.2711e-01, 5.3114e-01, 9.4610e-01, 1.2012e-01, - 3.8896e-01, 9.0657e-01, 8.7514e-01, 9.4601e-01, - 6.8092e-01, 4.2792e-01, 7.8983e-01, 7.0867e-01, - 6.8668e-01, 7.3584e-01, 4.8951e-01, 1.1601e-01, - 5.8369e-01, 8.1548e-02, 4.6998e-01, 8.2472e-01, - 8.1798e-01, 6.4792e-01, 7.9098e-01, 4.6883e-01, - 5.3531e-01, 9.9356e-01, 2.4036e-01, 4.8060e-01, - 2.8861e-01, 8.7768e-01, 7.6854e-01, 2.6962e-01, - 9.4503e-01, 7.5491e-01, 6.3333e-01, 3.9724e-01, - 6.1673e-01, 9.9300e-01, 8.4452e-02, 6.1927e-01, - 7.8274e-01, 9.4288e-01, 5.5703e-01, 9.9018e-01, - 8.7030e-01, 6.7458e-01, 7.1377e-01, 3.9155e-01, - 4.0923e-01, 1.9269e-01, 7.8647e-01, 1.7260e-01, - 1.9351e-01, 1.6644e-02, 6.3159e-01, 4.2569e-01, - 3.5414e-01, 1.5447e-01, 6.0382e-01, 1.1039e-01, - 7.4996e-01, 9.7809e-01, 4.9703e-01, 6.8052e-02, - 4.0995e-03, 1.5993e-01, 2.5589e-01, 6.1582e-01, - 4.1141e-01, 8.3175e-01, 9.2280e-02, 6.6768e-01, - 2.1373e-01, 7.4338e-01, 3.8856e-01, 4.0554e-01, - 2.4237e-01, 6.8970e-01, 5.1570e-01, 2.2133e-04, - 8.5322e-01, 4.7984e-01, 4.1539e-01, 8.1641e-01, - 5.1215e-01, 7.5280e-01, 4.9588e-04, 8.3219e-01, - 1.3510e-01, 5.6800e-01, 8.8518e-01, 9.6846e-01, - 2.4576e-01, 9.1717e-01, 4.6194e-01, 4.1655e-01, - 8.5447e-01, 1.2544e-01, 6.0240e-01, 5.7586e-01, - 2.2428e-01, 4.5450e-01, 2.4531e-01, 3.5914e-01, - 5.8131e-01, 4.4973e-01, 3.0343e-01, 8.7101e-01, - 5.4478e-01, 5.0386e-01, 6.8135e-01, 4.2381e-01, - 3.1555e-02, 3.0972e-01, 2.4608e-01, 1.9970e-01, - 4.6368e-01, 9.9803e-01, 8.2862e-01, 8.5141e-01, - 8.7867e-01, 4.8625e-01, 5.2020e-01, 4.1959e-01, - 1.7473e-01, 2.5225e-01, 7.2666e-01, 9.2040e-01, - 1.8559e-01, 6.3520e-01, 4.5196e-01, 9.4681e-01, - 9.5216e-01, 1.0019e-01, 9.7707e-01, 4.5094e-01, - 5.0805e-01, 5.1974e-01, 5.3486e-01, 1.8077e-01, - 8.5364e-01, 7.2740e-01, 4.8004e-01, 6.8966e-01, - 2.0804e-01, 7.7792e-01, 6.3289e-01, 1.2722e-01, - 9.0620e-01, 4.9687e-01, 5.6847e-01, 1.3671e-01, - 4.0281e-01, 1.7219e-01, 1.9050e-01, 6.2485e-01, - 1.0260e-01, 2.5271e-02, 9.4031e-01, 9.4275e-01, - 7.8410e-03, 3.3465e-02, 3.6601e-01, 2.9329e-01, - 2.0289e-01, 8.1331e-01, 9.6038e-01, 7.8543e-01, - 8.1769e-01, 9.2929e-01, 5.1055e-01, 3.5358e-01, - 4.8515e-01, 3.5044e-01, 1.9924e-01, 8.1918e-01, - 2.9889e-01, 3.7500e-02, 2.3185e-01, 6.2795e-01, - 5.8909e-01, 1.2007e-01, 7.5640e-01, 8.0080e-01, - 8.4206e-01, 2.3327e-01, 8.8223e-01, 9.9512e-01, - 2.9164e-01, 4.3867e-01, 7.8315e-01, 9.6653e-01, - 4.0617e-01, 6.9039e-01, 4.3199e-01, 8.2813e-01, - 3.5563e-01, 9.1892e-01, 5.9521e-01, 8.9108e-01, - 3.7947e-01, 2.0591e-01, 6.7351e-01, 5.7368e-01, - 2.8254e-01, 6.7739e-02, 4.4276e-01, 9.7228e-01, - 4.6186e-01, 6.9010e-01, 8.4715e-01, 1.5150e-01, - 2.0227e-01, 3.6577e-01, 9.5994e-01, 2.5249e-01, - 6.5535e-02, 5.6708e-01, 7.2591e-01, 7.0050e-01, - 4.6753e-01, 9.1326e-02, 6.7496e-01, 5.8336e-01, - 3.1459e-01, 4.7441e-01, 6.5107e-01, 2.7592e-01, - 1.4742e-01, 4.5510e-01, 7.4301e-01, 4.8033e-01, - 1.4801e-01, 7.7726e-01, 9.3909e-01, 8.1948e-01, - 2.9724e-01, 8.1459e-01, 4.7430e-01, 3.8389e-01, - 5.8885e-02, 3.1119e-01, 5.3076e-01, 1.4421e-01, - 9.6859e-01, 9.4119e-01, 8.4274e-01, 8.1548e-02, - 4.2885e-02, 1.4390e-01, 9.9599e-01, 9.8519e-01, - 5.8954e-01, 4.9409e-01, 6.5081e-01, 4.7729e-01, - 7.1507e-01, 2.3377e-01, 3.5997e-01, 9.6579e-01, - 3.9773e-01, 2.1377e-02, 3.5134e-01, 6.4577e-01, - 9.9705e-01, 4.8704e-01, 4.2052e-01, 3.9684e-01, - 9.2759e-01, 3.0995e-01, 3.1089e-01, 7.6678e-01, - 5.4422e-04, 9.2904e-01, 1.6571e-01, 9.8144e-01, - 7.6337e-01, 8.8536e-01, 1.6786e-01, 9.4677e-01, - 3.5525e-01, 3.5127e-01, 1.3314e-01, 8.7067e-01, - 2.8725e-01, 6.7870e-01, 3.3831e-01, 4.6605e-01, - 7.4196e-01, 8.8785e-01, 9.6258e-01, 7.7168e-01, - 4.8538e-01, 2.0843e-01, 6.4007e-01, 1.9033e-01, - 3.4627e-01, 1.1059e-01, 7.1554e-01, 5.5574e-01, - 7.6910e-01, 7.3835e-01, 2.8739e-01, 6.9284e-01, - 7.5175e-02, 9.9640e-01, 9.1137e-01, 2.2022e-01, - 4.9205e-01, 9.0083e-01, 5.1433e-01, 4.3040e-01, - 3.8550e-01, 4.9743e-01, 2.3713e-01, 2.5804e-01, - 8.8147e-02, 9.0523e-02, 8.5804e-01, 1.4407e-01, - 3.2087e-01, 6.9830e-01, 5.0311e-02, 5.9795e-01, - 2.3588e-01, 9.7651e-01, 1.1260e-01, 6.7643e-01, - 7.7883e-01, 4.7514e-01, 7.6449e-01, 3.5045e-01, - 7.3614e-01, 8.8137e-01, 4.6469e-01, 4.9258e-01, - 8.1075e-01, 6.2703e-01, 1.2233e-01, 2.7440e-01, - 6.9122e-01, 7.3033e-01, 3.4342e-02, 1.2576e-01, - 8.0974e-01, 4.0072e-01, 6.0245e-01, 1.5077e-02, - 4.1940e-01, 2.0633e-01, 7.1347e-01, 8.3114e-01, - 3.1728e-01, 3.9989e-01, 6.0468e-01, 2.0407e-01, - 8.7073e-01, 4.3857e-01, 8.9954e-02, 6.5643e-01, - 5.8418e-01, 3.5790e-01, 6.7886e-01, 9.5451e-01, - 3.6580e-01, 3.0585e-01, 7.6034e-01, 6.5825e-02, - 2.9720e-01, 3.1525e-01, 3.2830e-01, 7.9663e-02, - 7.4797e-01, 4.5439e-01, 4.4041e-01, 6.1706e-01, - 1.4869e-01, 3.7743e-01, 6.0526e-01, 7.4958e-01, - 1.3105e-01, 3.9610e-01, 8.5301e-01, 8.1549e-01, - 8.5854e-02, 7.4522e-01, 7.4950e-01, 3.7042e-01, - 2.4261e-01, 1.7942e-01, 8.2764e-01, 1.6097e-01, - 2.2542e-01, 5.2571e-01, 1.0887e-01, 3.8000e-02, - 8.1497e-01, 8.0201e-01, 9.4936e-01, 2.6775e-01, - 9.4312e-01, 9.3715e-01, 6.7901e-02, 4.0906e-01, - 1.5251e-01, 2.4190e-01, 2.4089e-01, 2.6151e-01, - 6.2245e-01, 4.6082e-01, 5.0570e-01, 2.5937e-01, - 3.5881e-01, 2.2385e-01, 2.8770e-02, 1.7716e-01, - 5.4713e-02, 5.1027e-01, 3.1178e-02, 6.5533e-01, - 2.2465e-01, 6.9285e-01, 2.5103e-01, 3.0570e-01, - 3.7755e-03, 8.7853e-01, 6.5763e-01, 7.6528e-02, - 9.2357e-01, 9.3172e-01, 1.4133e-01, 9.0949e-01, - 3.7744e-01, 2.6587e-01, 8.7043e-01, 1.0112e-02, - 2.8448e-01, 7.2542e-02, 1.5147e-01, 2.2166e-01, - 9.7594e-01, 3.2101e-01, 3.2571e-01, 9.1645e-01, - 5.1688e-01, 9.8729e-02, 3.8775e-01, 7.4951e-01, - 2.4091e-01, 2.2275e-01, 8.9300e-01, 4.1567e-01, - 8.1383e-01, 7.1688e-01, 7.5982e-01, 9.9028e-02, - 1.0199e-01, 1.7657e-01, 8.1854e-01, 9.5101e-01, - 8.6789e-01, 7.9254e-01, 9.6088e-01, 8.2911e-01, - 7.9367e-01, 5.3301e-01, 8.6699e-02, 6.5008e-01, - 2.4111e-01, 4.0949e-01, 7.9876e-01, 6.0784e-01, - 8.6621e-01, 7.1156e-01, 3.6004e-01, 1.5720e-02, - 6.4658e-01, 3.4960e-01, 8.6761e-01, 8.2966e-01, - 9.8307e-01, 1.8464e-01, 5.4622e-01, 2.8174e-01, - 4.2027e-01, 6.2481e-01, 2.5893e-01, 5.8887e-01, - 8.4609e-01, 2.6754e-01, 5.9884e-01, 1.1711e-01, - 6.7826e-01, 2.8852e-01, 6.4017e-01, 1.2806e-02, - 3.1902e-03, 4.7820e-01, 8.9185e-01, 5.7234e-02, - 6.7506e-01, 8.4710e-01, 6.0623e-01, 2.2402e-01, - 4.0295e-01, 6.4904e-01, 9.2718e-01, 3.1307e-01, - 6.5796e-01, 3.5345e-01, 1.9087e-01, 2.1269e-01, - 5.7823e-01, 3.9982e-01, 4.3154e-01, 6.7568e-01, - 6.8414e-01, 2.4388e-01, 8.7462e-01, 4.2036e-02, - 3.5337e-01, 5.3395e-01, 9.2982e-01, 8.6034e-01, - 9.3195e-01, 6.8440e-01, 2.5818e-01, 7.2423e-01, - 1.2544e-01, 2.7728e-01, 6.8784e-01, 6.0665e-01, - 1.3165e-01, 8.5017e-01, 4.3265e-03, 8.0981e-01, - 7.9983e-01, 3.2596e-01, 4.9482e-01, 1.1663e-01, - 4.6665e-01, 9.3435e-01, 7.9541e-01, 7.1414e-01, - 5.3506e-02, 4.0912e-01, 6.8238e-01, 3.4773e-01, - 5.6049e-01, 7.1487e-01, 2.1138e-01, 8.1397e-01, - 5.4932e-01, 2.0457e-02, 5.6340e-02, 5.7141e-02, - 4.6515e-01, 1.4683e-01, 7.4366e-01, 4.1761e-02, - 5.1584e-02, 1.2634e-01, 4.6336e-01, 5.9261e-01, - 5.6130e-01, 7.8972e-02, 7.9983e-01, 2.2448e-01, - 1.8797e-01, 2.5057e-01, 7.7180e-01, 4.0901e-01, - 3.7017e-01, 4.0182e-01, 8.7125e-01, 2.8655e-01, - 1.4119e-01, 4.3303e-01, 6.6902e-01, 1.5943e-01, - 2.5892e-01, 5.0928e-01, 6.7761e-01, 3.2212e-02, - 3.0824e-01, 9.2674e-02, 3.6193e-02, 4.4219e-01, - 1.6390e-01, 5.9400e-01, 8.2224e-01, 6.9900e-01, - 5.2481e-01, 9.5165e-01, 4.4474e-01, 2.4445e-01, - 1.0981e-01, 9.3663e-01, 8.4897e-01, 9.5652e-01, - 4.6435e-01, 1.7098e-01, 5.9974e-01, 4.3018e-01, - 2.1721e-01, 3.8205e-01, 2.6684e-01, 2.6286e-01, - 8.1812e-01, 5.7012e-01, 6.3330e-01, 6.6896e-02, - 8.6486e-02, 5.7371e-01, 2.1333e-01, 5.1711e-01, - 3.0138e-01, 2.1686e-01, 9.4270e-01, 3.0485e-01, - 4.4156e-01, 9.5896e-01, 6.0745e-01, 1.5570e-01, - 4.1088e-01, 3.7352e-01, 3.6528e-01, 1.2019e-01, - 7.4565e-02, 5.5689e-01, 5.1790e-01, 1.3767e-01, - 1.2340e-01, 9.6204e-01, 1.6892e-01, 2.9572e-01, - 2.1066e-01, 3.3179e-01, 3.3274e-01, 8.4143e-01, - 8.3193e-01, 7.2407e-01, 7.0281e-01, 6.8041e-01, - 8.2730e-01, 1.8872e-01, 2.6244e-01, 2.1234e-01, - 7.4230e-01, 2.6976e-01, 6.3711e-01, 4.3716e-01, - 6.9298e-01, 8.1356e-01, 6.0939e-01, 7.0288e-01, - 3.5558e-01, 9.2964e-01, 1.1833e-01, 4.6906e-01, - 8.0664e-01, 4.8526e-01, 9.6169e-01, 4.1867e-01, - 8.3224e-01, 9.6285e-01, 9.5262e-01, 8.6424e-01, - 4.9817e-01, 5.4699e-01, 6.1704e-01, 3.9186e-01, - 6.8935e-01, 5.9439e-01, 7.3227e-01, 4.1190e-01, - 9.0233e-01, 3.2790e-01, 4.3206e-01, 5.5893e-01, - 5.8605e-02, 9.4473e-01, 2.4772e-01, 3.1700e-01, - 7.7245e-01, 5.5508e-01, 7.4692e-01, 1.2111e-01, - 8.0345e-01, 4.4160e-01, 6.1610e-01, 4.0302e-01, - 7.2755e-01, 5.2552e-01, 5.2678e-01, 1.3783e-01, - 6.7301e-01, 8.4666e-02, 9.7543e-01, 5.6391e-01, - 6.8981e-01, 5.0252e-01, 3.1345e-01, 8.3631e-01, - 5.1942e-01, 4.1463e-01, 3.8551e-01, 5.6612e-01, - 9.0226e-02, 4.5281e-01, 1.3323e-01, 8.3660e-01, - 3.3939e-01, 2.9707e-01, 3.8862e-01, 7.2849e-01, - 8.2419e-01, 6.6920e-01, 4.7306e-01, 8.6893e-01, - 7.5703e-01, 5.7027e-01, 5.4140e-01, 8.8231e-01, - 3.8567e-02, 3.5538e-01, 6.1680e-01, 5.9111e-01, - 4.5875e-01, 6.0458e-01, 7.5967e-02, 3.2091e-01, - 3.9756e-01, 5.3389e-01, 4.5029e-01, 1.5185e-01, - 6.9442e-01, 7.6304e-01, 4.6109e-01, 5.2312e-02, - 7.7110e-01, 4.8813e-01, 1.8485e-01, 9.4212e-01, - 2.0752e-01, 2.7050e-01, 8.0133e-01, 5.6501e-01, - 7.0480e-01, 3.7931e-01, 4.9377e-01, 7.7738e-01, - 6.5307e-01, 5.0050e-01, 1.0036e-01, 3.6651e-01, - 4.6488e-01, 6.5613e-01, 3.1970e-01, 5.0090e-01, - 2.3238e-01, 4.7361e-01, 4.2273e-01, 1.9837e-01, - 4.1443e-01, 2.8358e-01, 5.9909e-01, 4.6853e-01, - 1.5862e-01, 5.4860e-01, 9.4413e-01, 6.4800e-03, - 2.0553e-01, 6.7561e-01, 1.6860e-01, 5.9291e-01, - 2.2881e-01, 3.9992e-01, 6.9442e-01, 7.9259e-01, - 6.9638e-01, 7.6941e-01, 2.3646e-01, 7.2314e-01, - 1.4276e-01, 4.5362e-01, 5.5788e-01, 2.2118e-01, - 3.1804e-01, 6.9545e-01, 4.2483e-01, 3.3560e-01, - 3.3597e-01, 4.0398e-02, 1.9135e-01, 5.4384e-01, - 5.9982e-01, 9.7372e-01, 1.5970e-01, 6.0079e-01, - 5.9714e-01, 9.5044e-01, 5.0232e-01, 3.6790e-01, - 7.3455e-01, 8.0673e-01, 4.0026e-01, 6.3795e-02, - 8.2192e-01, 1.8676e-01, 1.9070e-01, 6.2954e-02, - 1.8591e-01, 1.6582e-01, 8.1810e-01, 1.1599e-02, - 4.7558e-01, 8.4737e-01, 7.5306e-01, 1.2992e-01, - 1.5127e-01, 2.0163e-01, 4.6257e-01, 3.4956e-01, - 9.0495e-01, 2.1175e-03, 9.4719e-01, 3.1681e-01, - 2.0418e-01, 5.3479e-02, 3.5683e-01, 5.8247e-02, - 9.3423e-01, 1.1090e-01, 4.3007e-01, 6.1117e-01, - 4.0593e-01, 1.1448e-02, 8.5302e-01, 2.5408e-01, - 9.1190e-01, 6.4738e-01, 2.7596e-01, 2.8235e-01, - 4.6522e-01, 8.7885e-01, 7.0320e-01, 4.2790e-01, - 2.9479e-02, 3.8500e-01, 3.9245e-01, 1.0004e-01, - 4.2397e-01, 7.1833e-01, 6.6614e-01, 4.6682e-02, - 5.8017e-01, 6.0782e-01, 4.8419e-01, 5.5802e-01, - 2.3916e-01, 1.4114e-01, 8.3739e-01, 1.0626e-01, - 5.1946e-01, 9.4847e-01, 4.1767e-01, 3.7856e-01, - 1.1090e-01, 1.5010e-01, 4.3945e-01, 7.5067e-02, - 9.8959e-01, 2.8002e-01, 5.0613e-01, 8.0707e-01, - 6.1595e-01, 8.2005e-01, 9.9749e-01, 1.1749e-01, - 6.5959e-01, 2.3371e-01, 8.3971e-01, 4.3270e-03, - 6.2581e-01, 8.0238e-01, 2.8393e-01, 7.0314e-01, - 2.0960e-01, 3.2954e-02, 6.5011e-01, 8.0206e-01, - 9.2215e-01, 8.1873e-01, 3.4350e-01, 2.8733e-01, - 1.9274e-01, 3.4014e-01, 3.0741e-01, 3.4144e-01, - 2.7448e-02, 7.6554e-01, 6.2323e-01, 3.0307e-01, - 4.5175e-01, 3.9421e-01, 8.5280e-01, 6.5476e-01, - 3.1057e-01, 3.6455e-01, 8.0890e-01, 2.7987e-01]), - size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.7740, 0.9703, 0.1840, ..., 0.7477, 0.1526, 0.5369]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 1000 -Density: 1e-05 -Time: 11.093939065933228 seconds - -[40.33, 40.0, 39.88, 40.14, 39.62, 39.53, 39.86, 39.53, 39.55, 39.82] -[98.79] -12.79684853553772 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 366482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.093939065933228, 'TIME_S_1KI': 0.0302714432521467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1264.2006668257713, 'W': 98.79} -[40.33, 40.0, 39.88, 40.14, 39.62, 39.53, 39.86, 39.53, 39.55, 39.82, 40.52, 39.54, 40.01, 39.91, 40.14, 39.87, 40.07, 39.9, 44.97, 39.44] -722.575 -36.128750000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 366482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.093939065933228, 'TIME_S_1KI': 0.0302714432521467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1264.2006668257713, 'W': 98.79, 'J_1KI': 3.4495573229402026, 'W_1KI': 0.26956303447372587, 'W_D': 62.66125, 'J_D': 801.866525297463, 'W_D_1KI': 0.17098043014390885, 'J_D_1KI': 0.0004665452331735497} +[46.41, 39.72, 39.82, 39.5, 39.71, 39.86, 41.37, 39.83, 39.54, 39.79] +[97.68] +12.76127815246582 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349528, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3000328540802, 'TIME_S_1KI': 0.029468405547138424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.5216499328615, 'W': 97.68} +[46.41, 39.72, 39.82, 39.5, 39.71, 39.86, 41.37, 39.83, 39.54, 39.79, 40.31, 39.5, 40.19, 39.88, 40.21, 39.51, 39.5, 39.4, 39.78, 39.7] +720.4250000000001 +36.02125 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 349528, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3000328540802, 'TIME_S_1KI': 0.029468405547138424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1246.5216499328615, 'W': 97.68, 'J_1KI': 3.566299838447453, 'W_1KI': 0.27946258954933517, 'W_D': 61.658750000000005, 'J_D': 786.844459283352, 'W_D_1KI': 0.17640575290105515, 'J_D_1KI': 0.0005046970568911651} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json index c87bd7a..0f29e81 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 305580, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.080953121185303, "TIME_S_1KI": 0.032989571049104334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1230.7398206591606, "W": 98.25, "J_1KI": 4.027553572416914, "W_1KI": 0.32151973296681724, "W_D": 62.38325, "J_D": 781.450889741838, "W_D_1KI": 0.20414703187381372, "J_D_1KI": 0.0006680641137306555} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 316836, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.895925283432007, "TIME_S_1KI": 0.03438979561486702, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1245.413042907715, "W": 97.92, "J_1KI": 3.9307813597814483, "W_1KI": 0.3090557891148733, "W_D": 62.073, "J_D": 789.4865585417748, "W_D_1KI": 0.19591523690489715, "J_D_1KI": 0.0006183490414753915} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output index 9e7563d..5bc678d 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018548250198364258} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01957559585571289} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), - col_indices=tensor([1572, 8127, 3303, ..., 3635, 8012, 8701]), - values=tensor([0.7029, 0.2681, 0.5472, ..., 0.1372, 0.6564, 0.9870]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4998, 4999, 5000]), + col_indices=tensor([1720, 7937, 2049, ..., 9577, 9983, 1353]), + values=tensor([0.0345, 0.4506, 0.1189, ..., 0.6520, 0.2135, 0.0491]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.6228, 0.5154, 0.0077, ..., 0.6369, 0.2601, 0.0192]) +tensor([0.9951, 0.3552, 0.9985, ..., 0.5681, 0.6856, 0.6031]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.018548250198364258 seconds +Time: 0.01957559585571289 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '56609', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9451327323913574} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '53638', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.7775709629058838} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 4999, 5000]), - col_indices=tensor([3842, 387, 3686, ..., 4115, 6419, 2917]), - values=tensor([0.9231, 0.0215, 0.0697, ..., 0.2708, 0.2879, 0.7516]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4996, 4998, 5000]), + col_indices=tensor([4959, 918, 4053, ..., 9398, 67, 9521]), + values=tensor([0.3778, 0.3237, 0.9504, ..., 0.9486, 0.3785, 0.3072]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.2403, 0.0214, 0.1380, ..., 0.6094, 0.2095, 0.9923]) +tensor([0.9157, 0.0973, 0.6254, ..., 0.8224, 0.9770, 0.1355]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 1.9451327323913574 seconds +Time: 1.7775709629058838 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '305580', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.080953121185303} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '316836', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.895925283432007} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 4999, 5000]), - col_indices=tensor([1481, 9557, 9045, ..., 186, 1024, 519]), - values=tensor([0.0681, 0.8562, 0.9064, ..., 0.7770, 0.2010, 0.9088]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 4997, 4997, 5000]), + col_indices=tensor([2376, 3852, 7380, ..., 878, 7696, 9514]), + values=tensor([0.3306, 0.8481, 0.4988, ..., 0.7577, 0.2140, 0.3496]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.5005, 0.6529, 0.0782, ..., 0.6202, 0.1736, 0.9901]) +tensor([0.0988, 0.8059, 0.5662, ..., 0.3195, 0.3665, 0.4368]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.080953121185303 seconds +Time: 10.895925283432007 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 4999, 5000]), - col_indices=tensor([1481, 9557, 9045, ..., 186, 1024, 519]), - values=tensor([0.0681, 0.8562, 0.9064, ..., 0.7770, 0.2010, 0.9088]), +tensor(crow_indices=tensor([ 0, 0, 3, ..., 4997, 4997, 5000]), + col_indices=tensor([2376, 3852, 7380, ..., 878, 7696, 9514]), + values=tensor([0.3306, 0.8481, 0.4988, ..., 0.7577, 0.2140, 0.3496]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.5005, 0.6529, 0.0782, ..., 0.6202, 0.1736, 0.9901]) +tensor([0.0988, 0.8059, 0.5662, ..., 0.3195, 0.3665, 0.4368]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.080953121185303 seconds +Time: 10.895925283432007 seconds -[40.69, 39.86, 40.65, 39.74, 39.56, 39.52, 39.78, 39.47, 39.45, 39.45] -[98.25] -12.52661395072937 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 305580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.080953121185303, 'TIME_S_1KI': 0.032989571049104334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7398206591606, 'W': 98.25} -[40.69, 39.86, 40.65, 39.74, 39.56, 39.52, 39.78, 39.47, 39.45, 39.45, 40.13, 40.88, 39.46, 41.14, 39.99, 39.46, 39.46, 39.44, 39.59, 39.5] -717.335 -35.86675 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 305580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.080953121185303, 'TIME_S_1KI': 0.032989571049104334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7398206591606, 'W': 98.25, 'J_1KI': 4.027553572416914, 'W_1KI': 0.32151973296681724, 'W_D': 62.38325, 'J_D': 781.450889741838, 'W_D_1KI': 0.20414703187381372, 'J_D_1KI': 0.0006680641137306555} +[41.11, 39.47, 41.15, 40.05, 39.56, 39.44, 39.86, 39.66, 39.7, 39.8] +[97.92] +12.718678951263428 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 316836, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.895925283432007, 'TIME_S_1KI': 0.03438979561486702, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.413042907715, 'W': 97.92} +[41.11, 39.47, 41.15, 40.05, 39.56, 39.44, 39.86, 39.66, 39.7, 39.8, 40.79, 39.83, 39.98, 39.4, 39.49, 39.43, 39.88, 39.48, 39.95, 39.52] +716.94 +35.847 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 316836, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.895925283432007, 'TIME_S_1KI': 0.03438979561486702, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1245.413042907715, 'W': 97.92, 'J_1KI': 3.9307813597814483, 'W_1KI': 0.3090557891148733, 'W_D': 62.073, 'J_D': 789.4865585417748, 'W_D_1KI': 0.19591523690489715, 'J_D_1KI': 0.0006183490414753915} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json index 0a007ba..a1deb3a 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1273, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.290857076644897, "TIME_S_1KI": 8.083941144261507, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2248.191835269928, "W": 120.44000000000001, "J_1KI": 1766.0580009975868, "W_1KI": 94.61115475255303, "W_D": 84.17750000000001, "J_D": 1571.2983079826834, "W_D_1KI": 66.12529457973291, "J_D_1KI": 51.944457643152326} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1257, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.0600004196167, "TIME_S_1KI": 8.003182513617103, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2214.012642827034, "W": 119.14000000000001, "J_1KI": 1761.3465734503056, "W_1KI": 94.78122513922038, "W_D": 83.14675000000001, "J_D": 1545.1397994794252, "W_D_1KI": 66.14697692919651, "J_D_1KI": 52.622893340649576} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output index 1b9241c..fb8d5a0 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.824350118637085} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8350811004638672} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 55, 100, ..., 24999899, - 24999953, 25000000]), - col_indices=tensor([ 1283, 31647, 40047, ..., 487577, 491974, - 492635]), - values=tensor([0.2687, 0.0076, 0.0743, ..., 0.5051, 0.4444, 0.1527]), +tensor(crow_indices=tensor([ 0, 56, 98, ..., 24999894, + 24999946, 25000000]), + col_indices=tensor([ 21390, 30692, 42474, ..., 464465, 482205, + 499540]), + values=tensor([0.3993, 0.3433, 0.4515, ..., 0.0260, 0.6112, 0.7953]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2422, 0.7782, 0.2817, ..., 0.4809, 0.1219, 0.8722]) +tensor([0.3970, 0.7382, 0.9549, ..., 0.7739, 0.4106, 0.9250]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 0.824350118637085 seconds +Time: 0.8350811004638672 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1273', '-ss', '500000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.290857076644897} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1257', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.0600004196167} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 101, ..., 24999883, - 24999939, 25000000]), - col_indices=tensor([ 9313, 23523, 43031, ..., 488537, 498363, - 498593]), - values=tensor([0.9134, 0.4019, 0.3601, ..., 0.2723, 0.3306, 0.0527]), +tensor(crow_indices=tensor([ 0, 45, 99, ..., 24999907, + 24999959, 25000000]), + col_indices=tensor([ 7707, 18388, 21123, ..., 463120, 474527, + 488339]), + values=tensor([0.7704, 0.9806, 0.4404, ..., 0.7048, 0.0947, 0.7119]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9993, 0.9766, 0.6194, ..., 0.0672, 0.4807, 0.1643]) +tensor([0.2271, 0.4870, 0.9892, ..., 0.5903, 0.4477, 0.1543]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.290857076644897 seconds +Time: 10.0600004196167 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 101, ..., 24999883, - 24999939, 25000000]), - col_indices=tensor([ 9313, 23523, 43031, ..., 488537, 498363, - 498593]), - values=tensor([0.9134, 0.4019, 0.3601, ..., 0.2723, 0.3306, 0.0527]), +tensor(crow_indices=tensor([ 0, 45, 99, ..., 24999907, + 24999959, 25000000]), + col_indices=tensor([ 7707, 18388, 21123, ..., 463120, 474527, + 488339]), + values=tensor([0.7704, 0.9806, 0.4404, ..., 0.7048, 0.0947, 0.7119]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.9993, 0.9766, 0.6194, ..., 0.0672, 0.4807, 0.1643]) +tensor([0.2271, 0.4870, 0.9892, ..., 0.5903, 0.4477, 0.1543]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.290857076644897 seconds +Time: 10.0600004196167 seconds -[40.99, 40.04, 40.78, 39.98, 40.28, 39.98, 39.98, 39.94, 40.19, 39.9] -[120.44] -18.66648817062378 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1273, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.290857076644897, 'TIME_S_1KI': 8.083941144261507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2248.191835269928, 'W': 120.44000000000001} -[40.99, 40.04, 40.78, 39.98, 40.28, 39.98, 39.98, 39.94, 40.19, 39.9, 41.12, 40.02, 40.17, 39.62, 39.57, 39.49, 39.52, 45.4, 39.55, 39.47] -725.25 -36.2625 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1273, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.290857076644897, 'TIME_S_1KI': 8.083941144261507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2248.191835269928, 'W': 120.44000000000001, 'J_1KI': 1766.0580009975868, 'W_1KI': 94.61115475255303, 'W_D': 84.17750000000001, 'J_D': 1571.2983079826834, 'W_D_1KI': 66.12529457973291, 'J_D_1KI': 51.944457643152326} +[40.4, 39.62, 39.65, 40.0, 39.69, 39.61, 40.44, 39.99, 40.11, 40.12] +[119.14] +18.583285570144653 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.0600004196167, 'TIME_S_1KI': 8.003182513617103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2214.012642827034, 'W': 119.14000000000001} +[40.4, 39.62, 39.65, 40.0, 39.69, 39.61, 40.44, 39.99, 40.11, 40.12, 41.13, 40.27, 39.63, 40.2, 39.68, 40.25, 40.02, 40.19, 39.95, 39.48] +719.865 +35.99325 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1257, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.0600004196167, 'TIME_S_1KI': 8.003182513617103, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2214.012642827034, 'W': 119.14000000000001, 'J_1KI': 1761.3465734503056, 'W_1KI': 94.78122513922038, 'W_D': 83.14675000000001, 'J_D': 1545.1397994794252, 'W_D_1KI': 66.14697692919651, 'J_D_1KI': 52.622893340649576} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json index 22e8b8e..f01e1a9 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21531, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.381713628768921, "TIME_S_1KI": 0.48217517202029264, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2048.753864378929, "W": 153.79, "J_1KI": 95.15367908499043, "W_1KI": 7.142724443825182, "W_D": 117.79275, "J_D": 1569.2070470012427, "W_D_1KI": 5.4708443639403646, "J_D_1KI": 0.25409151288562376} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21626, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.539944410324097, "TIME_S_1KI": 0.4873737357959908, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2051.971848464012, "W": 154.45, "J_1KI": 94.88448388347416, "W_1KI": 7.141866272079903, "W_D": 118.43974999999999, "J_D": 1573.5515230761764, "W_D_1KI": 5.476729399796541, "J_D_1KI": 0.2532474521315334} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output index da8bd5c..fa5a239 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08279204368591309} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.07795858383178711} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 10, ..., 2499988, - 2499995, 2500000]), - col_indices=tensor([111354, 133493, 148601, ..., 214459, 291734, - 295580]), - values=tensor([0.7692, 0.5972, 0.6345, ..., 0.2595, 0.9828, 0.2512]), +tensor(crow_indices=tensor([ 0, 2, 4, ..., 2499985, + 2499990, 2500000]), + col_indices=tensor([187524, 340597, 50465, ..., 396691, 431780, + 493873]), + values=tensor([0.8920, 0.9244, 0.9041, ..., 0.0928, 0.3980, 0.2410]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1544, 0.9362, 0.6152, ..., 0.8648, 0.4518, 0.0330]) +tensor([0.0594, 0.5344, 0.6686, ..., 0.0375, 0.0136, 0.3063]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,41 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.08279204368591309 seconds +Time: 0.07795858383178711 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12682', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.184589385986328} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '13468', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.538946628570557} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 12, ..., 2499985, +tensor(crow_indices=tensor([ 0, 8, 9, ..., 2499991, + 2499997, 2500000]), + col_indices=tensor([ 29252, 48539, 177292, ..., 25321, 399663, + 441240]), + values=tensor([0.5601, 0.9391, 0.4233, ..., 0.6965, 0.5428, 0.9275]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4893, 0.1531, 0.9914, ..., 0.7153, 0.0010, 0.4806]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 6.538946628570557 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21626', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.539944410324097} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 10, ..., 2499985, 2499992, 2500000]), - col_indices=tensor([135614, 168986, 215859, ..., 402290, 443216, - 486549]), - values=tensor([0.4455, 0.2288, 0.5445, ..., 0.6029, 0.8332, 0.9959]), + col_indices=tensor([ 98671, 180976, 345489, ..., 382848, 425676, + 484883]), + values=tensor([0.5764, 0.7272, 0.4446, ..., 0.6586, 0.9064, 0.2568]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.5969, 0.6605, 0.1157, ..., 0.5750, 0.9019, 0.4949]) +tensor([0.7158, 0.9003, 0.8372, ..., 0.1054, 0.6378, 0.8680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +59,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 6.184589385986328 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21531', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.381713628768921} +Time: 10.539944410324097 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 2499988, - 2499995, 2500000]), - col_indices=tensor([192037, 290494, 298239, ..., 203209, 269872, - 299833]), - values=tensor([0.2087, 0.5501, 0.3490, ..., 0.1907, 0.4204, 0.3032]), +tensor(crow_indices=tensor([ 0, 4, 10, ..., 2499985, + 2499992, 2500000]), + col_indices=tensor([ 98671, 180976, 345489, ..., 382848, 425676, + 484883]), + values=tensor([0.5764, 0.7272, 0.4446, ..., 0.6586, 0.9064, 0.2568]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3717, 0.6256, 0.6803, ..., 0.1727, 0.9290, 0.7130]) +tensor([0.7158, 0.9003, 0.8372, ..., 0.1054, 0.6378, 0.8680]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,31 +77,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.381713628768921 seconds +Time: 10.539944410324097 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 6, ..., 2499988, - 2499995, 2500000]), - col_indices=tensor([192037, 290494, 298239, ..., 203209, 269872, - 299833]), - values=tensor([0.2087, 0.5501, 0.3490, ..., 0.1907, 0.4204, 0.3032]), - size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3717, 0.6256, 0.6803, ..., 0.1727, 0.9290, 0.7130]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 2500000 -Density: 1e-05 -Time: 10.381713628768921 seconds - -[40.84, 39.87, 40.71, 40.09, 39.75, 39.79, 39.67, 39.6, 39.76, 40.21] -[153.79] -13.321762561798096 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.381713628768921, 'TIME_S_1KI': 0.48217517202029264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.753864378929, 'W': 153.79} -[40.84, 39.87, 40.71, 40.09, 39.75, 39.79, 39.67, 39.6, 39.76, 40.21, 41.58, 40.07, 40.12, 40.08, 39.68, 39.76, 39.59, 39.74, 40.65, 39.4] -719.9449999999999 -35.997249999999994 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.381713628768921, 'TIME_S_1KI': 0.48217517202029264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.753864378929, 'W': 153.79, 'J_1KI': 95.15367908499043, 'W_1KI': 7.142724443825182, 'W_D': 117.79275, 'J_D': 1569.2070470012427, 'W_D_1KI': 5.4708443639403646, 'J_D_1KI': 0.25409151288562376} +[40.43, 40.12, 40.51, 39.58, 39.78, 40.16, 39.92, 39.88, 39.89, 39.65] +[154.45] +13.285670757293701 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.539944410324097, 'TIME_S_1KI': 0.4873737357959908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2051.971848464012, 'W': 154.45} +[40.43, 40.12, 40.51, 39.58, 39.78, 40.16, 39.92, 39.88, 39.89, 39.65, 40.17, 39.98, 41.76, 39.72, 40.03, 40.16, 39.55, 39.35, 39.92, 39.54] +720.2049999999999 +36.01025 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.539944410324097, 'TIME_S_1KI': 0.4873737357959908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2051.971848464012, 'W': 154.45, 'J_1KI': 94.88448388347416, 'W_1KI': 7.141866272079903, 'W_D': 118.43974999999999, 'J_D': 1573.5515230761764, 'W_D_1KI': 5.476729399796541, 'J_D_1KI': 0.2532474521315334} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json index 2972d33..1829510 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2288, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.664037466049194, "TIME_S_1KI": 4.660855535860661, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2026.948439025879, "W": 124.48, "J_1KI": 885.9040380357864, "W_1KI": 54.40559440559441, "W_D": 87.53675000000001, "J_D": 1425.389450272322, "W_D_1KI": 38.25906905594406, "J_D_1KI": 16.721621090884643} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2237, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.043452024459839, "TIME_S_1KI": 4.489696926446061, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1969.1841315150261, "W": 123.27, "J_1KI": 880.2790038064488, "W_1KI": 55.10505140813589, "W_D": 87.08525, "J_D": 1391.1486362376809, "W_D_1KI": 38.92948144836835, "J_D_1KI": 17.402539762346155} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output index 16454d4..6208bf5 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.45888853073120117} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.4693167209625244} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 24, 46, ..., 12499954, - 12499981, 12500000]), - col_indices=tensor([ 49072, 112972, 116125, ..., 361100, 370525, - 412609]), - values=tensor([0.2354, 0.3643, 0.0075, ..., 0.8603, 0.9033, 0.4787]), +tensor(crow_indices=tensor([ 0, 25, 50, ..., 12499953, + 12499973, 12500000]), + col_indices=tensor([ 4880, 16891, 23384, ..., 468600, 493050, + 493066]), + values=tensor([0.6197, 0.4687, 0.7888, ..., 0.4796, 0.1972, 0.6492]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.8534, 0.6179, 0.0838, ..., 0.4832, 0.1451, 0.6650]) +tensor([0.1250, 0.0763, 0.3340, ..., 0.1944, 0.6244, 0.7099]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 0.45888853073120117 seconds +Time: 0.4693167209625244 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2288', '-ss', '500000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.664037466049194} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2237', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.043452024459839} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 27, 48, ..., 12499952, - 12499972, 12500000]), - col_indices=tensor([ 3842, 7633, 8971, ..., 455163, 462741, - 476944]), - values=tensor([0.8075, 0.4724, 0.8976, ..., 0.5541, 0.2969, 0.9431]), +tensor(crow_indices=tensor([ 0, 22, 43, ..., 12499965, + 12499980, 12500000]), + col_indices=tensor([ 4985, 20152, 29473, ..., 475854, 476409, + 499068]), + values=tensor([0.8596, 0.1215, 0.4132, ..., 0.1501, 0.7884, 0.5323]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0930, 0.1654, 0.5776, ..., 0.1397, 0.2168, 0.6873]) +tensor([0.6345, 0.7237, 0.9663, ..., 0.0286, 0.9659, 0.0785]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.664037466049194 seconds +Time: 10.043452024459839 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 27, 48, ..., 12499952, - 12499972, 12500000]), - col_indices=tensor([ 3842, 7633, 8971, ..., 455163, 462741, - 476944]), - values=tensor([0.8075, 0.4724, 0.8976, ..., 0.5541, 0.2969, 0.9431]), +tensor(crow_indices=tensor([ 0, 22, 43, ..., 12499965, + 12499980, 12500000]), + col_indices=tensor([ 4985, 20152, 29473, ..., 475854, 476409, + 499068]), + values=tensor([0.8596, 0.1215, 0.4132, ..., 0.1501, 0.7884, 0.5323]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0930, 0.1654, 0.5776, ..., 0.1397, 0.2168, 0.6873]) +tensor([0.6345, 0.7237, 0.9663, ..., 0.0286, 0.9659, 0.0785]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.664037466049194 seconds +Time: 10.043452024459839 seconds -[40.58, 39.64, 39.65, 39.68, 39.54, 40.08, 40.01, 55.98, 39.52, 39.5] -[124.48] -16.283326148986816 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.664037466049194, 'TIME_S_1KI': 4.660855535860661, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2026.948439025879, 'W': 124.48} -[40.58, 39.64, 39.65, 39.68, 39.54, 40.08, 40.01, 55.98, 39.52, 39.5, 40.22, 40.13, 39.62, 39.51, 44.89, 40.37, 40.29, 40.46, 39.51, 39.67] -738.865 -36.94325 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.664037466049194, 'TIME_S_1KI': 4.660855535860661, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2026.948439025879, 'W': 124.48, 'J_1KI': 885.9040380357864, 'W_1KI': 54.40559440559441, 'W_D': 87.53675000000001, 'J_D': 1425.389450272322, 'W_D_1KI': 38.25906905594406, 'J_D_1KI': 16.721621090884643} +[41.8, 39.96, 40.04, 39.89, 39.75, 39.54, 39.48, 39.64, 39.96, 39.86] +[123.27] +15.974560976028442 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2237, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.043452024459839, 'TIME_S_1KI': 4.489696926446061, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1969.1841315150261, 'W': 123.27} +[41.8, 39.96, 40.04, 39.89, 39.75, 39.54, 39.48, 39.64, 39.96, 39.86, 41.14, 39.7, 39.69, 39.51, 40.0, 40.34, 39.59, 40.7, 43.54, 41.93] +723.695 +36.18475 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2237, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.043452024459839, 'TIME_S_1KI': 4.489696926446061, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1969.1841315150261, 'W': 123.27, 'J_1KI': 880.2790038064488, 'W_1KI': 55.10505140813589, 'W_D': 87.08525, 'J_D': 1391.1486362376809, 'W_D_1KI': 38.92948144836835, 'J_D_1KI': 17.402539762346155} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json index f1d4670..c8d1576 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 94004, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.384105920791626, "TIME_S_1KI": 0.12110235650388947, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1783.8070621061327, "W": 114.34, "J_1KI": 18.975863389920992, "W_1KI": 1.2163312199480873, "W_D": 78.24925, "J_D": 1220.758831157148, "W_D_1KI": 0.8324034083656016, "J_D_1KI": 0.008854978600544674} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 99811, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.109006404876709, "TIME_S_1KI": 0.11130042184605614, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1630.7188550758362, "W": 118.48, "J_1KI": 16.338067498330208, "W_1KI": 1.1870435122381302, "W_D": 82.57724999999999, "J_D": 1136.5654842615722, "W_D_1KI": 0.8273361653525162, "J_D_1KI": 0.008289027916286945} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output index 0b0a270..944fbec 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04173541069030762} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03710818290710449} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 9, ..., 249994, 249996, +tensor(crow_indices=tensor([ 0, 7, 11, ..., 249988, 249993, 250000]), - col_indices=tensor([ 2875, 11250, 41033, ..., 32140, 46339, 48534]), - values=tensor([0.9791, 0.8918, 0.3698, ..., 0.3708, 0.0646, 0.7857]), + col_indices=tensor([ 1823, 2401, 20507, ..., 29927, 35717, 41362]), + values=tensor([0.7021, 0.2937, 0.0368, ..., 0.8013, 0.6342, 0.2053]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.2100, 0.1946, 0.2511, ..., 0.6374, 0.0985, 0.4430]) +tensor([0.5957, 0.0820, 0.5341, ..., 0.0547, 0.1765, 0.4646]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.04173541069030762 seconds +Time: 0.03710818290710449 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25158', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8100624084472656} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28295', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.9765748977661133} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2, 7, ..., 249990, 249993, +tensor(crow_indices=tensor([ 0, 6, 14, ..., 249988, 249993, 250000]), - col_indices=tensor([41615, 42906, 15488, ..., 31340, 31947, 35417]), - values=tensor([0.0772, 0.0729, 0.2688, ..., 0.4463, 0.5032, 0.2162]), + col_indices=tensor([ 5249, 20961, 36115, ..., 24265, 39085, 41781]), + values=tensor([0.9608, 0.3689, 0.1001, ..., 0.3784, 0.3303, 0.4761]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.5388, 0.5198, 0.1392, ..., 0.7254, 0.7688, 0.9922]) +tensor([0.0189, 0.3335, 0.9964, ..., 0.1565, 0.8638, 0.1091]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 2.8100624084472656 seconds +Time: 2.9765748977661133 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '94004', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.384105920791626} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '99811', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.109006404876709} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 249985, 249990, +tensor(crow_indices=tensor([ 0, 6, 13, ..., 249992, 249998, 250000]), - col_indices=tensor([ 2939, 14473, 20084, ..., 45023, 47616, 49448]), - values=tensor([0.6894, 0.8051, 0.8240, ..., 0.7425, 0.1769, 0.4023]), + col_indices=tensor([12128, 14735, 23784, ..., 48408, 17109, 32963]), + values=tensor([0.9788, 0.4966, 0.1177, ..., 0.0710, 0.0367, 0.3291]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4931, 0.5539, 0.3107, ..., 0.8523, 0.9706, 0.6879]) +tensor([0.8833, 0.9491, 0.8566, ..., 0.2783, 0.1791, 0.8533]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 11.384105920791626 seconds +Time: 11.109006404876709 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 13, ..., 249985, 249990, +tensor(crow_indices=tensor([ 0, 6, 13, ..., 249992, 249998, 250000]), - col_indices=tensor([ 2939, 14473, 20084, ..., 45023, 47616, 49448]), - values=tensor([0.6894, 0.8051, 0.8240, ..., 0.7425, 0.1769, 0.4023]), + col_indices=tensor([12128, 14735, 23784, ..., 48408, 17109, 32963]), + values=tensor([0.9788, 0.4966, 0.1177, ..., 0.0710, 0.0367, 0.3291]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4931, 0.5539, 0.3107, ..., 0.8523, 0.9706, 0.6879]) +tensor([0.8833, 0.9491, 0.8566, ..., 0.2783, 0.1791, 0.8533]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 11.384105920791626 seconds +Time: 11.109006404876709 seconds -[40.36, 39.91, 39.67, 39.83, 39.58, 39.72, 40.16, 39.72, 39.91, 39.98] -[114.34] -15.600901365280151 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 94004, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.384105920791626, 'TIME_S_1KI': 0.12110235650388947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1783.8070621061327, 'W': 114.34} -[40.36, 39.91, 39.67, 39.83, 39.58, 39.72, 40.16, 39.72, 39.91, 39.98, 40.89, 39.87, 39.99, 39.51, 39.5, 39.56, 39.75, 45.01, 39.69, 39.64] -721.815 -36.09075 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 94004, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.384105920791626, 'TIME_S_1KI': 0.12110235650388947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1783.8070621061327, 'W': 114.34, 'J_1KI': 18.975863389920992, 'W_1KI': 1.2163312199480873, 'W_D': 78.24925, 'J_D': 1220.758831157148, 'W_D_1KI': 0.8324034083656016, 'J_D_1KI': 0.008854978600544674} +[40.97, 40.76, 39.98, 39.9, 39.51, 39.39, 40.3, 39.56, 39.43, 40.13] +[118.48] +13.763663530349731 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.109006404876709, 'TIME_S_1KI': 0.11130042184605614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1630.7188550758362, 'W': 118.48} +[40.97, 40.76, 39.98, 39.9, 39.51, 39.39, 40.3, 39.56, 39.43, 40.13, 40.86, 39.49, 40.21, 40.32, 39.93, 39.79, 39.93, 39.38, 39.42, 39.55] +718.0550000000001 +35.902750000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 99811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.109006404876709, 'TIME_S_1KI': 0.11130042184605614, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1630.7188550758362, 'W': 118.48, 'J_1KI': 16.338067498330208, 'W_1KI': 1.1870435122381302, 'W_D': 82.57724999999999, 'J_D': 1136.5654842615722, 'W_D_1KI': 0.8273361653525162, 'J_D_1KI': 0.008289027916286945} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json index ca129d7..0ed93bf 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 46418, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.356630086898804, "TIME_S_1KI": 0.22311668074666732, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1954.5975055408476, "W": 147.82, "J_1KI": 42.10861100307742, "W_1KI": 3.1845404799862123, "W_D": 111.69725, "J_D": 1476.952822525859, "W_D_1KI": 2.4063348270067646, "J_D_1KI": 0.051840553815476} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 45970, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.176043510437012, "TIME_S_1KI": 0.22136270416439008, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1947.0663536453249, "W": 147.36, "J_1KI": 42.355152352519575, "W_1KI": 3.2055688492495107, "W_D": 110.93175000000002, "J_D": 1465.7402142779233, "W_D_1KI": 2.413133565368719, "J_D_1KI": 0.052493660329969966} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output index 0dda590..40eeb90 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06997370719909668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07438278198242188} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 102, ..., 2499883, - 2499945, 2500000]), - col_indices=tensor([ 266, 347, 3014, ..., 46062, 47055, 47354]), - values=tensor([0.8937, 0.7241, 0.1967, ..., 0.6923, 0.3348, 0.4624]), +tensor(crow_indices=tensor([ 0, 43, 95, ..., 2499906, + 2499957, 2500000]), + col_indices=tensor([ 93, 772, 1278, ..., 48633, 48867, 48966]), + values=tensor([0.9058, 0.1037, 0.2766, ..., 0.4493, 0.5334, 0.4126]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3743, 0.1265, 0.3703, ..., 0.6234, 0.9781, 0.6963]) +tensor([0.1532, 0.4835, 0.1070, ..., 0.8056, 0.9315, 0.2414]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,39 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 0.06997370719909668 seconds +Time: 0.07438278198242188 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15005', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.3941891193389893} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14116', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.2241876125335693} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 54, 97, ..., 2499911, - 2499955, 2500000]), - col_indices=tensor([ 2370, 4930, 5051, ..., 41423, 44524, 44646]), - values=tensor([0.0412, 0.5807, 0.8088, ..., 0.8046, 0.7553, 0.5801]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6465, 0.4915, 0.1151, ..., 0.6682, 0.4745, 0.9594]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 3.3941891193389893 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46418', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.356630086898804} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 86, ..., 2499896, +tensor(crow_indices=tensor([ 0, 34, 92, ..., 2499900, 2499949, 2500000]), - col_indices=tensor([ 1254, 3268, 3363, ..., 48004, 48805, 49373]), - values=tensor([0.4618, 0.8696, 0.7740, ..., 0.9354, 0.3130, 0.0156]), + col_indices=tensor([ 1404, 3513, 5830, ..., 44159, 44569, 48306]), + values=tensor([0.1373, 0.2439, 0.5744, ..., 0.3908, 0.2364, 0.3591]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3463, 0.0749, 0.0037, ..., 0.8223, 0.0446, 0.2738]) +tensor([0.1362, 0.8781, 0.3760, ..., 0.5068, 0.4476, 0.2011]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.356630086898804 seconds +Time: 3.2241876125335693 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '45970', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.176043510437012} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 46, 86, ..., 2499896, - 2499949, 2500000]), - col_indices=tensor([ 1254, 3268, 3363, ..., 48004, 48805, 49373]), - values=tensor([0.4618, 0.8696, 0.7740, ..., 0.9354, 0.3130, 0.0156]), +tensor(crow_indices=tensor([ 0, 44, 88, ..., 2499900, + 2499948, 2500000]), + col_indices=tensor([ 1615, 2373, 2611, ..., 46930, 47083, 48076]), + values=tensor([0.9230, 0.1594, 0.5959, ..., 0.9087, 0.9174, 0.2268]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.3463, 0.0749, 0.0037, ..., 0.8223, 0.0446, 0.2738]) +tensor([0.0874, 0.4273, 0.5493, ..., 0.5645, 0.1367, 0.9834]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +56,30 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.356630086898804 seconds +Time: 10.176043510437012 seconds -[40.8, 41.43, 39.77, 40.05, 40.2, 40.01, 40.25, 39.8, 39.91, 39.83] -[147.82] -13.222821712493896 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.356630086898804, 'TIME_S_1KI': 0.22311668074666732, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1954.5975055408476, 'W': 147.82} -[40.8, 41.43, 39.77, 40.05, 40.2, 40.01, 40.25, 39.8, 39.91, 39.83, 40.83, 39.7, 40.25, 40.38, 40.19, 39.67, 41.03, 39.72, 39.6, 39.53] -722.4549999999999 -36.122749999999996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.356630086898804, 'TIME_S_1KI': 0.22311668074666732, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1954.5975055408476, 'W': 147.82, 'J_1KI': 42.10861100307742, 'W_1KI': 3.1845404799862123, 'W_D': 111.69725, 'J_D': 1476.952822525859, 'W_D_1KI': 2.4063348270067646, 'J_D_1KI': 0.051840553815476} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 88, ..., 2499900, + 2499948, 2500000]), + col_indices=tensor([ 1615, 2373, 2611, ..., 46930, 47083, 48076]), + values=tensor([0.9230, 0.1594, 0.5959, ..., 0.9087, 0.9174, 0.2268]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0874, 0.4273, 0.5493, ..., 0.5645, 0.1367, 0.9834]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.176043510437012 seconds + +[41.09, 39.77, 39.56, 40.76, 39.72, 40.02, 40.67, 40.29, 44.66, 39.48] +[147.36] +13.212990999221802 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45970, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.176043510437012, 'TIME_S_1KI': 0.22136270416439008, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1947.0663536453249, 'W': 147.36} +[41.09, 39.77, 39.56, 40.76, 39.72, 40.02, 40.67, 40.29, 44.66, 39.48, 40.16, 39.91, 39.65, 39.38, 45.28, 39.52, 39.9, 39.57, 39.84, 39.4] +728.5649999999999 +36.42825 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 45970, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.176043510437012, 'TIME_S_1KI': 0.22136270416439008, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1947.0663536453249, 'W': 147.36, 'J_1KI': 42.355152352519575, 'W_1KI': 3.2055688492495107, 'W_D': 110.93175000000002, 'J_D': 1465.7402142779233, 'W_D_1KI': 2.413133565368719, 'J_D_1KI': 0.052493660329969966} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json index ce3aa00..4012a55 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1681, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.72816252708435, "TIME_S_1KI": 6.382012211233998, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2210.374014186859, "W": 116.29999999999998, "J_1KI": 1314.9161297958708, "W_1KI": 69.18500892325996, "W_D": 80.01049999999998, "J_D": 1520.6631991581912, "W_D_1KI": 47.596966091612124, "J_D_1KI": 28.314673463183894} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1683, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.336533784866333, "TIME_S_1KI": 6.141731304139236, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2269.7345922088625, "W": 116.64000000000001, "J_1KI": 1348.6242377949272, "W_1KI": 69.30481283422462, "W_D": 80.63875000000002, "J_D": 1569.1749000984432, "W_D_1KI": 47.91369578134285, "J_D_1KI": 28.469219121415836} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output index fde54b7..c3a231d 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output @@ -1,14 +1,54 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6244547367095947} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6703436374664307} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 524, 1064, ..., 24999000, +tensor(crow_indices=tensor([ 0, 490, 1012, ..., 24999072, + 24999522, 25000000]), + col_indices=tensor([ 139, 180, 438, ..., 49771, 49774, 49863]), + values=tensor([0.3902, 0.9074, 0.1633, ..., 0.6016, 0.0459, 0.5003]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5558, 0.4609, 0.2036, ..., 0.5663, 0.8384, 0.9866]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 0.6703436374664307 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1566', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.764458417892456} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 488, 992, ..., 24998975, + 24999476, 25000000]), + col_indices=tensor([ 69, 85, 277, ..., 49779, 49916, 49923]), + values=tensor([0.9712, 0.8967, 0.3986, ..., 0.6840, 0.3057, 0.8093]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2667, 0.3205, 0.1054, ..., 0.7906, 0.2246, 0.0319]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 9.764458417892456 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1683', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.336533784866333} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 528, 1023, ..., 24999023, 24999502, 25000000]), - col_indices=tensor([ 60, 76, 165, ..., 49872, 49944, 49977]), - values=tensor([0.3464, 0.5127, 0.2524, ..., 0.4585, 0.6152, 0.8409]), + col_indices=tensor([ 37, 51, 116, ..., 49907, 49981, 49995]), + values=tensor([0.5431, 0.8645, 0.1060, ..., 0.5554, 0.9629, 0.3371]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3169, 0.2467, 0.7317, ..., 0.4966, 0.9013, 0.2021]) +tensor([0.4196, 0.0024, 0.2413, ..., 0.5179, 0.0396, 0.4997]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 0.6244547367095947 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1681', '-ss', '50000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.72816252708435} +Time: 10.336533784866333 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 503, 984, ..., 24998995, - 24999487, 25000000]), - col_indices=tensor([ 80, 111, 167, ..., 49695, 49904, 49943]), - values=tensor([0.9741, 0.4832, 0.1000, ..., 0.9253, 0.4991, 0.7681]), +tensor(crow_indices=tensor([ 0, 528, 1023, ..., 24999023, + 24999502, 25000000]), + col_indices=tensor([ 37, 51, 116, ..., 49907, 49981, 49995]), + values=tensor([0.5431, 0.8645, 0.1060, ..., 0.5554, 0.9629, 0.3371]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5734, 0.0323, 0.0030, ..., 0.5787, 0.7337, 0.7260]) +tensor([0.4196, 0.0024, 0.2413, ..., 0.5179, 0.0396, 0.4997]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,30 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.72816252708435 seconds +Time: 10.336533784866333 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 503, 984, ..., 24998995, - 24999487, 25000000]), - col_indices=tensor([ 80, 111, 167, ..., 49695, 49904, 49943]), - values=tensor([0.9741, 0.4832, 0.1000, ..., 0.9253, 0.4991, 0.7681]), - size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5734, 0.0323, 0.0030, ..., 0.5787, 0.7337, 0.7260]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000000 -Density: 0.01 -Time: 10.72816252708435 seconds - -[40.52, 40.33, 46.14, 39.67, 40.04, 40.05, 39.67, 39.99, 39.82, 39.93] -[116.3] -19.0057954788208 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.72816252708435, 'TIME_S_1KI': 6.382012211233998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.374014186859, 'W': 116.29999999999998} -[40.52, 40.33, 46.14, 39.67, 40.04, 40.05, 39.67, 39.99, 39.82, 39.93, 42.61, 39.87, 39.87, 39.75, 39.67, 39.77, 39.75, 39.54, 40.39, 39.88] -725.7900000000001 -36.289500000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.72816252708435, 'TIME_S_1KI': 6.382012211233998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.374014186859, 'W': 116.29999999999998, 'J_1KI': 1314.9161297958708, 'W_1KI': 69.18500892325996, 'W_D': 80.01049999999998, 'J_D': 1520.6631991581912, 'W_D_1KI': 47.596966091612124, 'J_D_1KI': 28.314673463183894} +[40.99, 40.03, 39.7, 40.56, 41.71, 40.15, 39.61, 39.6, 39.59, 39.5] +[116.64] +19.45931577682495 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1683, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.336533784866333, 'TIME_S_1KI': 6.141731304139236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2269.7345922088625, 'W': 116.64000000000001} +[40.99, 40.03, 39.7, 40.56, 41.71, 40.15, 39.61, 39.6, 39.59, 39.5, 41.03, 39.74, 40.04, 40.09, 40.48, 39.46, 39.52, 39.69, 39.52, 39.55] +720.0250000000001 +36.001250000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1683, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.336533784866333, 'TIME_S_1KI': 6.141731304139236, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2269.7345922088625, 'W': 116.64000000000001, 'J_1KI': 1348.6242377949272, 'W_1KI': 69.30481283422462, 'W_D': 80.63875000000002, 'J_D': 1569.1749000984432, 'W_D_1KI': 47.91369578134285, 'J_D_1KI': 28.469219121415836} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..f7df380 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 370, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 13.602883338928223, "TIME_S_1KI": 36.76454956467087, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3923.328741321564, "W": 95.93, "J_1KI": 10603.591192760983, "W_1KI": 259.27027027027026, "W_D": 59.633500000000005, "J_D": 2438.8806889982225, "W_D_1KI": 161.17162162162163, "J_D_1KI": 435.5989773557341} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..a0994fc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.831510305404663} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2449, 4912, ..., + 124994935, 124997480, 125000000]), + col_indices=tensor([ 8, 35, 55, ..., 49956, 49985, 49993]), + values=tensor([0.2569, 0.7242, 0.2453, ..., 0.3891, 0.2871, 0.8384]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.0743, 0.9130, 0.1660, ..., 0.7346, 0.0952, 0.8944]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 2.831510305404663 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '370', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 13.602883338928223} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2444, 4961, ..., + 124995104, 124997530, 125000000]), + col_indices=tensor([ 73, 77, 95, ..., 49959, 49981, 49999]), + values=tensor([0.2571, 0.0881, 0.2055, ..., 0.2189, 0.9227, 0.1207]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.4151, 0.6477, 0.9996, ..., 0.9228, 0.9960, 0.2452]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 13.602883338928223 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2444, 4961, ..., + 124995104, 124997530, 125000000]), + col_indices=tensor([ 73, 77, 95, ..., 49959, 49981, 49999]), + values=tensor([0.2571, 0.0881, 0.2055, ..., 0.2189, 0.9227, 0.1207]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.4151, 0.6477, 0.9996, ..., 0.9228, 0.9960, 0.2452]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 13.602883338928223 seconds + +[40.88, 40.42, 39.82, 39.83, 39.92, 39.86, 40.1, 40.12, 41.06, 40.17] +[95.93] +40.89782905578613 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 13.602883338928223, 'TIME_S_1KI': 36.76454956467087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3923.328741321564, 'W': 95.93} +[40.88, 40.42, 39.82, 39.83, 39.92, 39.86, 40.1, 40.12, 41.06, 40.17, 41.33, 39.79, 39.98, 39.62, 40.05, 39.91, 39.89, 41.64, 40.15, 45.16] +725.9300000000001 +36.2965 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 370, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 13.602883338928223, 'TIME_S_1KI': 36.76454956467087, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3923.328741321564, 'W': 95.93, 'J_1KI': 10603.591192760983, 'W_1KI': 259.27027027027026, 'W_D': 59.633500000000005, 'J_D': 2438.8806889982225, 'W_D_1KI': 161.17162162162163, 'J_D_1KI': 435.5989773557341} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json index 73ac2d2..0959bd4 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 128261, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.553161382675171, "TIME_S_1KI": 0.08227880168309283, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1360.8756747579575, "W": 104.26, "J_1KI": 10.610206335191194, "W_1KI": 0.8128737496199158, "W_D": 68.19500000000001, "J_D": 890.1296435844899, "W_D_1KI": 0.5316892898075019, "J_D_1KI": 0.004145369908292481} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 126053, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.525958776473999, "TIME_S_1KI": 0.08350423057344132, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1333.1147343921662, "W": 104.19, "J_1KI": 10.57582710758305, "W_1KI": 0.8265570831316986, "W_D": 68.29400000000001, "J_D": 873.8241450290682, "W_D_1KI": 0.5417879780727155, "J_D_1KI": 0.004298096658331936} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output index b70442e..4772a3e 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,32 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0555570125579834} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([42051, 34515, 37611, ..., 41473, 46289, 26191]), - values=tensor([0.0144, 0.4378, 0.1715, ..., 0.0832, 0.5030, 0.6687]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1124, 0.4102, 0.7912, ..., 0.3553, 0.2259, 0.3847]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 0.0555570125579834 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18899', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.547147274017334} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.041290998458862305} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), - col_indices=tensor([ 9684, 40954, 42907, ..., 26506, 37971, 35337]), - values=tensor([0.0766, 0.5354, 0.2778, ..., 0.4912, 0.6494, 0.7856]), + col_indices=tensor([39202, 12250, 26322, ..., 37318, 47951, 8445]), + values=tensor([0.5740, 0.6345, 0.0638, ..., 0.9586, 0.6580, 0.7048]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.2325, 0.6288, 0.8060, ..., 0.4059, 0.0257, 0.5351]) +tensor([0.6102, 0.5871, 0.5346, ..., 0.2772, 0.0260, 0.5471]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 1.547147274017334 seconds +Time: 0.041290998458862305 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '128261', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.553161382675171} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25429', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.1181838512420654} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 24999, 25000]), - col_indices=tensor([43490, 45422, 41208, ..., 48729, 34812, 29106]), - values=tensor([0.6729, 0.4582, 0.1719, ..., 0.9792, 0.1938, 0.4197]), +tensor(crow_indices=tensor([ 0, 3, 3, ..., 25000, 25000, 25000]), + col_indices=tensor([14352, 21301, 37384, ..., 4637, 8775, 16739]), + values=tensor([0.2683, 0.5206, 0.7101, ..., 0.1299, 0.1065, 0.5952]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3715, 0.8721, 0.7070, ..., 0.0207, 0.1985, 0.3006]) +tensor([0.5496, 0.8003, 0.5774, ..., 0.1083, 0.3526, 0.4589]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,15 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.553161382675171 seconds +Time: 2.1181838512420654 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '126053', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.525958776473999} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 24999, 25000]), - col_indices=tensor([43490, 45422, 41208, ..., 48729, 34812, 29106]), - values=tensor([0.6729, 0.4582, 0.1719, ..., 0.9792, 0.1938, 0.4197]), +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24997, 25000, 25000]), + col_indices=tensor([20587, 37923, 20731, ..., 134, 7495, 29489]), + values=tensor([0.6053, 0.5048, 0.9193, ..., 0.2382, 0.3077, 0.8468]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3715, 0.8721, 0.7070, ..., 0.0207, 0.1985, 0.3006]) +tensor([0.0915, 0.4356, 0.4649, ..., 0.6897, 0.4993, 0.9107]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -69,13 +53,29 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.553161382675171 seconds +Time: 10.525958776473999 seconds -[42.38, 39.84, 39.75, 39.86, 39.86, 40.05, 40.23, 39.88, 39.93, 39.73] -[104.26] -13.052711248397827 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.553161382675171, 'TIME_S_1KI': 0.08227880168309283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1360.8756747579575, 'W': 104.26} -[42.38, 39.84, 39.75, 39.86, 39.86, 40.05, 40.23, 39.88, 39.93, 39.73, 40.22, 39.71, 39.72, 39.44, 39.86, 40.03, 39.57, 39.83, 40.02, 45.11] -721.3 -36.065 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.553161382675171, 'TIME_S_1KI': 0.08227880168309283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1360.8756747579575, 'W': 104.26, 'J_1KI': 10.610206335191194, 'W_1KI': 0.8128737496199158, 'W_D': 68.19500000000001, 'J_D': 890.1296435844899, 'W_D_1KI': 0.5316892898075019, 'J_D_1KI': 0.004145369908292481} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24997, 25000, 25000]), + col_indices=tensor([20587, 37923, 20731, ..., 134, 7495, 29489]), + values=tensor([0.6053, 0.5048, 0.9193, ..., 0.2382, 0.3077, 0.8468]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0915, 0.4356, 0.4649, ..., 0.6897, 0.4993, 0.9107]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.525958776473999 seconds + +[40.82, 39.94, 40.07, 39.92, 39.76, 39.88, 39.51, 39.48, 40.97, 39.53] +[104.19] +12.795035362243652 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126053, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.525958776473999, 'TIME_S_1KI': 0.08350423057344132, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.1147343921662, 'W': 104.19} +[40.82, 39.94, 40.07, 39.92, 39.76, 39.88, 39.51, 39.48, 40.97, 39.53, 40.7, 39.51, 40.31, 39.76, 39.51, 39.78, 40.09, 39.51, 39.68, 39.43] +717.9199999999998 +35.895999999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 126053, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.525958776473999, 'TIME_S_1KI': 0.08350423057344132, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1333.1147343921662, 'W': 104.19, 'J_1KI': 10.57582710758305, 'W_1KI': 0.8265570831316986, 'W_D': 68.29400000000001, 'J_D': 873.8241450290682, 'W_D_1KI': 0.5417879780727155, 'J_D_1KI': 0.004298096658331936} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json index b465c9c..cab8894 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 110115, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.965436697006226, "TIME_S_1KI": 0.1086630949190049, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1446.9478559374809, "W": 111.69, "J_1KI": 13.14033379591773, "W_1KI": 1.0143032284429914, "W_D": 75.71975, "J_D": 980.9520092633368, "W_D_1KI": 0.6876424646959997, "J_D_1KI": 0.006244766514062568} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 109904, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.213285207748413, "TIME_S_1KI": 0.09292914914605849, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1450.8303464508058, "W": 111.68, "J_1KI": 13.200887560514683, "W_1KI": 1.0161595574319406, "W_D": 75.4555, "J_D": 980.2393374518156, "W_D_1KI": 0.686558269034794, "J_D_1KI": 0.0062468906412395725} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output index b1f3c1f..57edacd 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.05198168754577637} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.029732704162597656} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 124998, 124999, +tensor(crow_indices=tensor([ 0, 4, 6, ..., 124997, 124998, 125000]), - col_indices=tensor([ 927, 8914, 5646, ..., 41839, 2622, 37662]), - values=tensor([0.2093, 0.3505, 0.4434, ..., 0.7585, 0.2953, 0.8139]), + col_indices=tensor([15960, 34782, 36176, ..., 12559, 34529, 38985]), + values=tensor([0.0791, 0.6156, 0.0818, ..., 0.8529, 0.2215, 0.9416]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.7158, 0.3261, 0.8838, ..., 0.1644, 0.9864, 0.1779]) +tensor([0.3698, 0.6501, 0.4173, ..., 0.1077, 0.8642, 0.5336]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.05198168754577637 seconds +Time: 0.029732704162597656 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20199', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9260566234588623} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '35314', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.3738198280334473} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 9, ..., 124993, 124995, +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124994, 124995, 125000]), - col_indices=tensor([ 5280, 12669, 18309, ..., 32915, 33761, 44585]), - values=tensor([0.1104, 0.6442, 0.1166, ..., 0.0611, 0.5204, 0.6774]), + col_indices=tensor([21922, 29755, 43390, ..., 22404, 26277, 33750]), + values=tensor([0.2859, 0.5691, 0.3202, ..., 0.6287, 0.7111, 0.2865]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.3981, 0.9235, 0.5295, ..., 0.9056, 0.3690, 0.2596]) +tensor([0.8597, 0.0062, 0.6406, ..., 0.7576, 0.3863, 0.5631]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 1.9260566234588623 seconds +Time: 3.3738198280334473 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '110115', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.965436697006226} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '109904', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.213285207748413} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 124990, 124994, +tensor(crow_indices=tensor([ 0, 4, 5, ..., 124997, 124999, 125000]), - col_indices=tensor([ 332, 15911, 38702, ..., 27905, 36936, 47310]), - values=tensor([0.9967, 0.4995, 0.1475, ..., 0.0565, 0.7404, 0.0608]), + col_indices=tensor([ 7419, 31029, 43158, ..., 3143, 44044, 12484]), + values=tensor([0.5434, 0.7479, 0.4321, ..., 0.0308, 0.7117, 0.2391]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.3103, 0.3240, 0.6987, ..., 0.1758, 0.7445, 0.7079]) +tensor([0.4874, 0.6180, 0.9672, ..., 0.1180, 0.9110, 0.1759]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 11.965436697006226 seconds +Time: 10.213285207748413 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 124990, 124994, +tensor(crow_indices=tensor([ 0, 4, 5, ..., 124997, 124999, 125000]), - col_indices=tensor([ 332, 15911, 38702, ..., 27905, 36936, 47310]), - values=tensor([0.9967, 0.4995, 0.1475, ..., 0.0565, 0.7404, 0.0608]), + col_indices=tensor([ 7419, 31029, 43158, ..., 3143, 44044, 12484]), + values=tensor([0.5434, 0.7479, 0.4321, ..., 0.0308, 0.7117, 0.2391]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.3103, 0.3240, 0.6987, ..., 0.1758, 0.7445, 0.7079]) +tensor([0.4874, 0.6180, 0.9672, ..., 0.1180, 0.9110, 0.1759]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 11.965436697006226 seconds +Time: 10.213285207748413 seconds -[40.59, 40.33, 40.2, 40.19, 39.75, 39.57, 39.56, 39.52, 39.58, 39.6] -[111.69] -12.955034971237183 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 110115, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.965436697006226, 'TIME_S_1KI': 0.1086630949190049, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.9478559374809, 'W': 111.69} -[40.59, 40.33, 40.2, 40.19, 39.75, 39.57, 39.56, 39.52, 39.58, 39.6, 40.22, 41.59, 39.67, 40.02, 39.96, 39.85, 39.52, 40.16, 39.94, 39.58] -719.405 -35.97025 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 110115, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.965436697006226, 'TIME_S_1KI': 0.1086630949190049, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.9478559374809, 'W': 111.69, 'J_1KI': 13.14033379591773, 'W_1KI': 1.0143032284429914, 'W_D': 75.71975, 'J_D': 980.9520092633368, 'W_D_1KI': 0.6876424646959997, 'J_D_1KI': 0.006244766514062568} +[40.83, 39.5, 39.52, 39.34, 39.66, 39.79, 39.96, 39.74, 39.52, 44.63] +[111.68] +12.990959405899048 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 109904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.213285207748413, 'TIME_S_1KI': 0.09292914914605849, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1450.8303464508058, 'W': 111.68} +[40.83, 39.5, 39.52, 39.34, 39.66, 39.79, 39.96, 39.74, 39.52, 44.63, 40.65, 40.28, 39.58, 39.41, 40.46, 40.44, 44.79, 39.34, 40.26, 39.69] +724.49 +36.2245 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 109904, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.213285207748413, 'TIME_S_1KI': 0.09292914914605849, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1450.8303464508058, 'W': 111.68, 'J_1KI': 13.200887560514683, 'W_1KI': 1.0161595574319406, 'W_D': 75.4555, 'J_D': 980.2393374518156, 'W_D_1KI': 0.686558269034794, 'J_D_1KI': 0.0062468906412395725} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json index 2c10823..3b6d222 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 447788, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.585598468780518, "TIME_S_1KI": 0.023639754680296294, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1210.6447257900238, "W": 96.76, "J_1KI": 2.7036113647306848, "W_1KI": 0.21608439708076144, "W_D": 60.779250000000005, "J_D": 760.4596780691744, "W_D_1KI": 0.13573219916567664, "J_D_1KI": 0.00030311709819306603} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 449815, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.705693244934082, "TIME_S_1KI": 0.023800213965594924, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1273.803324151039, "W": 97.05, "J_1KI": 2.8318382538399987, "W_1KI": 0.21575536609495014, "W_D": 60.271499999999996, "J_D": 791.0771463325024, "W_D_1KI": 0.13399175216477885, "J_D_1KI": 0.0002978819118188118} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output index 38edffa..40f219b 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01688361167907715} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01718902587890625} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), - col_indices=tensor([2619, 4724, 4043, ..., 721, 4005, 3452]), - values=tensor([0.3560, 0.4737, 0.9490, ..., 0.6650, 0.5511, 0.5102]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2499, 2500]), + col_indices=tensor([1031, 2134, 3925, ..., 1116, 1736, 1811]), + values=tensor([0.7566, 0.0335, 0.3769, ..., 0.8150, 0.7471, 0.5019]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.9035, 0.6347, 0.7264, ..., 0.8885, 0.4271, 0.9746]) +tensor([0.4116, 0.1385, 0.3532, ..., 0.9353, 0.1525, 0.3623]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.01688361167907715 seconds +Time: 0.01718902587890625 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '62190', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.4582650661468506} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '61085', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.4259023666381836} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), - col_indices=tensor([ 785, 2078, 964, ..., 3093, 2409, 4914]), - values=tensor([0.2674, 0.7127, 0.0446, ..., 0.5887, 0.3242, 0.2984]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2306, 3269, 4125, ..., 3228, 4729, 1910]), + values=tensor([0.0516, 0.6989, 0.3045, ..., 0.6684, 0.0411, 0.3500]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.2053, 0.2435, 0.6452, ..., 0.2463, 0.9693, 0.2980]) +tensor([0.4004, 0.2061, 0.1055, ..., 0.1168, 0.0661, 0.6525]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 1.4582650661468506 seconds +Time: 1.4259023666381836 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '447788', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.585598468780518} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '449815', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.705693244934082} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), - col_indices=tensor([3170, 56, 953, ..., 2101, 4088, 4138]), - values=tensor([0.7441, 0.4324, 0.6982, ..., 0.2565, 0.3946, 0.1156]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([4152, 2644, 2174, ..., 3211, 4021, 16]), + values=tensor([0.0598, 0.0855, 0.2128, ..., 0.5120, 0.7970, 0.1348]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.7808, 0.5836, 0.6876, ..., 0.2450, 0.1275, 0.2911]) +tensor([0.4898, 0.1566, 0.9091, ..., 0.4315, 0.1963, 0.8990]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.585598468780518 seconds +Time: 10.705693244934082 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), - col_indices=tensor([3170, 56, 953, ..., 2101, 4088, 4138]), - values=tensor([0.7441, 0.4324, 0.6982, ..., 0.2565, 0.3946, 0.1156]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([4152, 2644, 2174, ..., 3211, 4021, 16]), + values=tensor([0.0598, 0.0855, 0.2128, ..., 0.5120, 0.7970, 0.1348]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.7808, 0.5836, 0.6876, ..., 0.2450, 0.1275, 0.2911]) +tensor([0.4898, 0.1566, 0.9091, ..., 0.4315, 0.1963, 0.8990]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.585598468780518 seconds +Time: 10.705693244934082 seconds -[40.68, 39.36, 44.27, 39.07, 39.18, 39.33, 39.3, 39.56, 39.47, 39.52] -[96.76] -12.511830568313599 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 447788, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.585598468780518, 'TIME_S_1KI': 0.023639754680296294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1210.6447257900238, 'W': 96.76} -[40.68, 39.36, 44.27, 39.07, 39.18, 39.33, 39.3, 39.56, 39.47, 39.52, 45.98, 39.49, 39.82, 39.41, 39.22, 39.42, 41.09, 39.62, 39.26, 39.31] -719.615 -35.98075 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 447788, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.585598468780518, 'TIME_S_1KI': 0.023639754680296294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1210.6447257900238, 'W': 96.76, 'J_1KI': 2.7036113647306848, 'W_1KI': 0.21608439708076144, 'W_D': 60.779250000000005, 'J_D': 760.4596780691744, 'W_D_1KI': 0.13573219916567664, 'J_D_1KI': 0.00030311709819306603} +[41.07, 39.76, 54.14, 40.27, 39.53, 40.02, 39.52, 39.39, 39.7, 39.29] +[97.05] +13.125227451324463 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 449815, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.705693244934082, 'TIME_S_1KI': 0.023800213965594924, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1273.803324151039, 'W': 97.05} +[41.07, 39.76, 54.14, 40.27, 39.53, 40.02, 39.52, 39.39, 39.7, 39.29, 40.12, 40.22, 39.78, 39.17, 40.41, 39.73, 39.72, 45.19, 39.21, 39.14] +735.57 +36.7785 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 449815, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.705693244934082, 'TIME_S_1KI': 0.023800213965594924, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1273.803324151039, 'W': 97.05, 'J_1KI': 2.8318382538399987, 'W_1KI': 0.21575536609495014, 'W_D': 60.271499999999996, 'J_D': 791.0771463325024, 'W_D_1KI': 0.13399175216477885, 'J_D_1KI': 0.0002978819118188118} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json index f972143..434a764 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 251242, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.495816707611084, "TIME_S_1KI": 0.041775725028502735, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.650104198456, "W": 98.84, "J_1KI": 5.073395786526361, "W_1KI": 0.393405561172097, "W_D": 63.200750000000006, "J_D": 815.0429236434699, "W_D_1KI": 0.25155328328862214, "J_D_1KI": 0.0010012389779122206} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 255321, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.671942234039307, "TIME_S_1KI": 0.041798137380157946, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1286.0644676828385, "W": 98.73, "J_1KI": 5.037049313150264, "W_1KI": 0.38668969649970036, "W_D": 62.91625, "J_D": 819.5518440681695, "W_D_1KI": 0.24642019262027018, "J_D_1KI": 0.000965138757173402} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output index 489fd07..72f6dea 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.020148515701293945} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02073979377746582} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 10, ..., 24991, 24995, 25000]), - col_indices=tensor([ 119, 931, 3406, ..., 3461, 3840, 3846]), - values=tensor([0.6773, 0.1678, 0.7190, ..., 0.1084, 0.6735, 0.7339]), +tensor(crow_indices=tensor([ 0, 8, 13, ..., 24986, 24997, 25000]), + col_indices=tensor([ 210, 616, 1664, ..., 1859, 2406, 3744]), + values=tensor([0.8494, 0.3277, 0.6011, ..., 0.0442, 0.0395, 0.0883]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0928, 0.4745, 0.9490, ..., 0.8279, 0.2614, 0.8062]) +tensor([0.9625, 0.8000, 0.6179, ..., 0.0228, 0.2614, 0.9913]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.020148515701293945 seconds +Time: 0.02073979377746582 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52113', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.177922248840332} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '50627', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.082019805908203} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 11, ..., 24987, 24992, 25000]), - col_indices=tensor([1164, 1818, 2007, ..., 3806, 4515, 4674]), - values=tensor([0.7293, 0.4677, 0.0557, ..., 0.9608, 0.8022, 0.8772]), +tensor(crow_indices=tensor([ 0, 3, 7, ..., 24989, 24993, 25000]), + col_indices=tensor([ 617, 730, 1271, ..., 3299, 4296, 4682]), + values=tensor([0.5887, 0.2909, 0.9734, ..., 0.1489, 0.9540, 0.8941]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.5594, 0.0674, 0.3236, ..., 0.9684, 0.1982, 0.3579]) +tensor([0.9697, 0.5962, 0.4255, ..., 0.8171, 0.4802, 0.2715]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 2.177922248840332 seconds +Time: 2.082019805908203 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '251242', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.495816707611084} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '255321', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.671942234039307} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 24992, 24997, 25000]), - col_indices=tensor([1486, 3242, 3522, ..., 1754, 2627, 4146]), - values=tensor([0.1836, 0.4006, 0.2197, ..., 0.0536, 0.9699, 0.8761]), +tensor(crow_indices=tensor([ 0, 7, 12, ..., 24991, 24996, 25000]), + col_indices=tensor([ 914, 959, 3594, ..., 2558, 2872, 4517]), + values=tensor([0.7861, 0.8355, 0.9023, ..., 0.3734, 0.5279, 0.9813]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8727, 0.2072, 0.9866, ..., 0.7187, 0.7974, 0.6886]) +tensor([0.7691, 0.3774, 0.5048, ..., 0.0036, 0.9716, 0.0233]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.495816707611084 seconds +Time: 10.671942234039307 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 8, ..., 24992, 24997, 25000]), - col_indices=tensor([1486, 3242, 3522, ..., 1754, 2627, 4146]), - values=tensor([0.1836, 0.4006, 0.2197, ..., 0.0536, 0.9699, 0.8761]), +tensor(crow_indices=tensor([ 0, 7, 12, ..., 24991, 24996, 25000]), + col_indices=tensor([ 914, 959, 3594, ..., 2558, 2872, 4517]), + values=tensor([0.7861, 0.8355, 0.9023, ..., 0.3734, 0.5279, 0.9813]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8727, 0.2072, 0.9866, ..., 0.7187, 0.7974, 0.6886]) +tensor([0.7691, 0.3774, 0.5048, ..., 0.0036, 0.9716, 0.0233]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.495816707611084 seconds +Time: 10.671942234039307 seconds -[40.75, 39.26, 39.42, 39.22, 39.47, 40.61, 39.23, 39.53, 39.63, 39.38] -[98.84] -12.896095752716064 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 251242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.495816707611084, 'TIME_S_1KI': 0.041775725028502735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.650104198456, 'W': 98.84} -[40.75, 39.26, 39.42, 39.22, 39.47, 40.61, 39.23, 39.53, 39.63, 39.38, 40.47, 39.12, 40.14, 40.63, 39.59, 39.14, 39.11, 39.22, 39.39, 39.55] -712.785 -35.63925 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 251242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.495816707611084, 'TIME_S_1KI': 0.041775725028502735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.650104198456, 'W': 98.84, 'J_1KI': 5.073395786526361, 'W_1KI': 0.393405561172097, 'W_D': 63.200750000000006, 'J_D': 815.0429236434699, 'W_D_1KI': 0.25155328328862214, 'J_D_1KI': 0.0010012389779122206} +[40.31, 39.66, 39.89, 39.32, 41.78, 39.24, 39.51, 39.33, 39.28, 39.57] +[98.73] +13.026075839996338 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 255321, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.671942234039307, 'TIME_S_1KI': 0.041798137380157946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1286.0644676828385, 'W': 98.73} +[40.31, 39.66, 39.89, 39.32, 41.78, 39.24, 39.51, 39.33, 39.28, 39.57, 40.91, 39.42, 39.75, 40.05, 39.94, 40.05, 40.16, 39.13, 39.34, 40.06] +716.2750000000001 +35.813750000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 255321, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.671942234039307, 'TIME_S_1KI': 0.041798137380157946, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1286.0644676828385, 'W': 98.73, 'J_1KI': 5.037049313150264, 'W_1KI': 0.38668969649970036, 'W_D': 62.91625, 'J_D': 819.5518440681695, 'W_D_1KI': 0.24642019262027018, 'J_D_1KI': 0.000965138757173402} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json index 7edf7f8..71c10d6 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 151851, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.677409887313843, "TIME_S_1KI": 0.07031504492768466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1443.7484180927277, "W": 117.47, "J_1KI": 9.507664869462351, "W_1KI": 0.7735872664651534, "W_D": 81.37549999999999, "J_D": 1000.1340716481208, "W_D_1KI": 0.5358904452390829, "J_D_1KI": 0.0035290544365139706} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 159777, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 12.599762678146362, "TIME_S_1KI": 0.0788584256691912, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1625.0090211582183, "W": 118.91, "J_1KI": 10.17048149081669, "W_1KI": 0.7442247632637988, "W_D": 83.03999999999999, "J_D": 1134.81413772583, "W_D_1KI": 0.5197243658348824, "J_D_1KI": 0.0032528108916482492} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output index f6f6dab..cbddaed 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.027439117431640625} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.028786182403564453} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 55, 107, ..., 249902, 249955, +tensor(crow_indices=tensor([ 0, 59, 104, ..., 249898, 249953, 250000]), - col_indices=tensor([ 26, 155, 397, ..., 4652, 4756, 4760]), - values=tensor([0.9134, 0.8993, 0.8423, ..., 0.2444, 0.0288, 0.7023]), + col_indices=tensor([ 221, 274, 328, ..., 4878, 4976, 4999]), + values=tensor([0.0342, 0.1493, 0.9761, ..., 0.6975, 0.3452, 0.2059]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3636, 0.7288, 0.4529, ..., 0.6147, 0.2907, 0.3015]) +tensor([0.5412, 0.2977, 0.2231, ..., 0.9873, 0.5104, 0.9074]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.027439117431640625 seconds +Time: 0.028786182403564453 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '38266', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.645953893661499} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '36475', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.560610294342041} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 49, 92, ..., 249897, 249949, +tensor(crow_indices=tensor([ 0, 55, 109, ..., 249892, 249939, 250000]), - col_indices=tensor([ 507, 568, 655, ..., 4839, 4844, 4959]), - values=tensor([0.4543, 0.3787, 0.6932, ..., 0.4487, 0.3087, 0.1431]), + col_indices=tensor([ 97, 194, 218, ..., 4655, 4840, 4910]), + values=tensor([0.5896, 0.1215, 0.2623, ..., 0.5727, 0.7694, 0.3887]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.4167, 0.3931, 0.0326, ..., 0.8288, 0.4472, 0.1506]) +tensor([0.3335, 0.3388, 0.8775, ..., 0.7612, 0.9315, 0.4075]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 2.645953893661499 seconds +Time: 2.560610294342041 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '151851', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.677409887313843} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '149568', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 9.829088926315308} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 123, ..., 249890, 249948, +tensor(crow_indices=tensor([ 0, 53, 101, ..., 249907, 249953, 250000]), - col_indices=tensor([ 53, 60, 101, ..., 4781, 4787, 4941]), - values=tensor([0.8546, 0.9316, 0.6470, ..., 0.1212, 0.6179, 0.4318]), + col_indices=tensor([ 20, 135, 201, ..., 4697, 4718, 4926]), + values=tensor([0.9828, 0.0133, 0.5809, ..., 0.0122, 0.8671, 0.9428]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7208, 0.6556, 0.2590, ..., 0.8294, 0.6979, 0.2347]) +tensor([0.3418, 0.3993, 0.8591, ..., 0.6204, 0.2914, 0.3019]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.677409887313843 seconds +Time: 9.829088926315308 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '159777', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 12.599762678146362} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 62, 123, ..., 249890, 249948, +tensor(crow_indices=tensor([ 0, 55, 105, ..., 249903, 249945, 250000]), - col_indices=tensor([ 53, 60, 101, ..., 4781, 4787, 4941]), - values=tensor([0.8546, 0.9316, 0.6470, ..., 0.1212, 0.6179, 0.4318]), + col_indices=tensor([ 114, 208, 220, ..., 4787, 4793, 4955]), + values=tensor([0.0792, 0.6264, 0.4827, ..., 0.0840, 0.3114, 0.5425]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7208, 0.6556, 0.2590, ..., 0.8294, 0.6979, 0.2347]) +tensor([0.4452, 0.0456, 0.3361, ..., 0.6556, 0.9661, 0.5009]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +76,30 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.677409887313843 seconds +Time: 12.599762678146362 seconds -[39.83, 39.62, 39.22, 39.2, 39.58, 39.98, 44.8, 39.52, 39.58, 39.44] -[117.47] -12.290358543395996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 151851, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.677409887313843, 'TIME_S_1KI': 0.07031504492768466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.7484180927277, 'W': 117.47} -[39.83, 39.62, 39.22, 39.2, 39.58, 39.98, 44.8, 39.52, 39.58, 39.44, 40.11, 39.32, 39.47, 44.63, 39.38, 39.24, 39.42, 39.88, 39.66, 39.4] -721.8900000000001 -36.094500000000004 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 151851, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.677409887313843, 'TIME_S_1KI': 0.07031504492768466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.7484180927277, 'W': 117.47, 'J_1KI': 9.507664869462351, 'W_1KI': 0.7735872664651534, 'W_D': 81.37549999999999, 'J_D': 1000.1340716481208, 'W_D_1KI': 0.5358904452390829, 'J_D_1KI': 0.0035290544365139706} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 105, ..., 249903, 249945, + 250000]), + col_indices=tensor([ 114, 208, 220, ..., 4787, 4793, 4955]), + values=tensor([0.0792, 0.6264, 0.4827, ..., 0.0840, 0.3114, 0.5425]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4452, 0.0456, 0.3361, ..., 0.6556, 0.9661, 0.5009]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 12.599762678146362 seconds + +[41.42, 39.73, 39.61, 39.49, 39.71, 39.39, 39.69, 39.84, 39.88, 39.81] +[118.91] +13.665873527526855 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 159777, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 12.599762678146362, 'TIME_S_1KI': 0.0788584256691912, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1625.0090211582183, 'W': 118.91} +[41.42, 39.73, 39.61, 39.49, 39.71, 39.39, 39.69, 39.84, 39.88, 39.81, 40.25, 41.73, 39.59, 39.81, 39.53, 39.41, 39.53, 39.92, 39.91, 39.78] +717.4000000000001 +35.870000000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 159777, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 12.599762678146362, 'TIME_S_1KI': 0.0788584256691912, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1625.0090211582183, 'W': 118.91, 'J_1KI': 10.17048149081669, 'W_1KI': 0.7442247632637988, 'W_D': 83.03999999999999, 'J_D': 1134.81413772583, 'W_D_1KI': 0.5197243658348824, 'J_D_1KI': 0.0032528108916482492} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json index 0dcc054..0c87859 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91742, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460400104522705, "TIME_S_1KI": 0.11401975218027409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1797.7089305996894, "W": 131.45, "J_1KI": 19.595266405786766, "W_1KI": 1.4328224804342613, "W_D": 95.54724999999999, "J_D": 1306.7032683091759, "W_D_1KI": 1.0414777310283185, "J_D_1KI": 0.011352245765607012} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91807, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.539710760116577, "TIME_S_1KI": 0.11480291001902444, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1775.26453387022, "W": 132.79, "J_1KI": 19.33691912240047, "W_1KI": 1.446403868986025, "W_D": 96.68149999999999, "J_D": 1292.5313504885432, "W_D_1KI": 1.0530950798958683, "J_D_1KI": 0.011470749288135636} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output index ee62fae..5e9885f 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03591585159301758} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03616905212402344} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 282, 523, ..., 1249534, - 1249768, 1250000]), - col_indices=tensor([ 14, 20, 65, ..., 4981, 4988, 4994]), - values=tensor([0.5427, 0.7626, 0.3688, ..., 0.1462, 0.4395, 0.5084]), +tensor(crow_indices=tensor([ 0, 239, 475, ..., 1249454, + 1249729, 1250000]), + col_indices=tensor([ 10, 41, 59, ..., 4943, 4952, 4982]), + values=tensor([0.5383, 0.7420, 0.9700, ..., 0.7402, 0.6039, 0.9535]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.3604, 0.6234, 0.1526, ..., 0.4035, 0.4868, 0.5530]) +tensor([0.9383, 0.0034, 0.5962, ..., 0.7530, 0.7746, 0.7053]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,39 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.03591585159301758 seconds +Time: 0.03616905212402344 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '29235', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.345984697341919} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '29030', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.320157527923584} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 247, 522, ..., 1249509, - 1249766, 1250000]), - col_indices=tensor([ 11, 41, 47, ..., 4983, 4993, 4996]), - values=tensor([0.4860, 0.1371, 0.2214, ..., 0.6634, 0.1469, 0.9637]), - size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.5776, 0.4725, 0.0368, ..., 0.6036, 0.3775, 0.0011]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 1250000 -Density: 0.05 -Time: 3.345984697341919 seconds - -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91742', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460400104522705} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 245, 499, ..., 1249503, +tensor(crow_indices=tensor([ 0, 264, 493, ..., 1249506, 1249746, 1250000]), - col_indices=tensor([ 11, 13, 56, ..., 4961, 4967, 4994]), - values=tensor([0.1643, 0.9353, 0.3976, ..., 0.1683, 0.6963, 0.8462]), + col_indices=tensor([ 38, 65, 78, ..., 4953, 4995, 4997]), + values=tensor([0.9639, 0.6446, 0.1864, ..., 0.7835, 0.7572, 0.9616]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.6649, 0.4073, 0.4211, ..., 0.1078, 0.2188, 0.0388]) +tensor([0.0443, 0.2357, 0.4128, ..., 0.0404, 0.6082, 0.2175]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.460400104522705 seconds +Time: 3.320157527923584 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91807', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.539710760116577} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 245, 499, ..., 1249503, - 1249746, 1250000]), - col_indices=tensor([ 11, 13, 56, ..., 4961, 4967, 4994]), - values=tensor([0.1643, 0.9353, 0.3976, ..., 0.1683, 0.6963, 0.8462]), +tensor(crow_indices=tensor([ 0, 239, 477, ..., 1249471, + 1249754, 1250000]), + col_indices=tensor([ 7, 12, 31, ..., 4957, 4958, 4966]), + values=tensor([0.8697, 0.7101, 0.2841, ..., 0.5399, 0.7906, 0.6573]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.6649, 0.4073, 0.4211, ..., 0.1078, 0.2188, 0.0388]) +tensor([0.4895, 0.4712, 0.1549, ..., 0.9337, 0.8190, 0.0696]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +56,30 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.460400104522705 seconds +Time: 10.539710760116577 seconds -[40.58, 39.92, 39.93, 39.37, 39.59, 39.9, 39.71, 40.07, 40.76, 39.67] -[131.45] -13.675990343093872 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460400104522705, 'TIME_S_1KI': 0.11401975218027409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1797.7089305996894, 'W': 131.45} -[40.58, 39.92, 39.93, 39.37, 39.59, 39.9, 39.71, 40.07, 40.76, 39.67, 40.34, 39.45, 39.48, 40.31, 39.86, 39.84, 39.9, 39.88, 39.9, 39.78] -718.055 -35.90275 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460400104522705, 'TIME_S_1KI': 0.11401975218027409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1797.7089305996894, 'W': 131.45, 'J_1KI': 19.595266405786766, 'W_1KI': 1.4328224804342613, 'W_D': 95.54724999999999, 'J_D': 1306.7032683091759, 'W_D_1KI': 1.0414777310283185, 'J_D_1KI': 0.011352245765607012} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 239, 477, ..., 1249471, + 1249754, 1250000]), + col_indices=tensor([ 7, 12, 31, ..., 4957, 4958, 4966]), + values=tensor([0.8697, 0.7101, 0.2841, ..., 0.5399, 0.7906, 0.6573]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.4895, 0.4712, 0.1549, ..., 0.9337, 0.8190, 0.0696]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.539710760116577 seconds + +[41.24, 39.56, 39.78, 39.5, 40.15, 40.08, 40.05, 39.48, 39.7, 39.48] +[132.79] +13.368962526321411 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91807, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.539710760116577, 'TIME_S_1KI': 0.11480291001902444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.26453387022, 'W': 132.79} +[41.24, 39.56, 39.78, 39.5, 40.15, 40.08, 40.05, 39.48, 39.7, 39.48, 40.21, 39.89, 39.7, 39.85, 39.71, 40.16, 40.06, 41.81, 39.99, 44.47] +722.1700000000001 +36.10850000000001 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91807, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.539710760116577, 'TIME_S_1KI': 0.11480291001902444, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1775.26453387022, 'W': 132.79, 'J_1KI': 19.33691912240047, 'W_1KI': 1.446403868986025, 'W_D': 96.68149999999999, 'J_D': 1292.5313504885432, 'W_D_1KI': 1.0530950798958683, 'J_D_1KI': 0.011470749288135636} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json index 44fa72d..987b9ad 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52297, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.236442565917969, "TIME_S_1KI": 0.195736706998833, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1815.1719595336915, "W": 138.62, "J_1KI": 34.70891178334688, "W_1KI": 2.650630055261296, "W_D": 102.202, "J_D": 1338.293208831787, "W_D_1KI": 1.9542612386943803, "J_D_1KI": 0.0373685151862321} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52912, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.354979991912842, "TIME_S_1KI": 0.19570192001649608, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1873.7867654466627, "W": 137.14, "J_1KI": 35.41326665872888, "W_1KI": 2.5918506198971873, "W_D": 101.08874999999999, "J_D": 1381.2072472330926, "W_D_1KI": 1.9105070683398848, "J_D_1KI": 0.03610725484464554} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output index 4fe4648..4ac2b45 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.053244590759277344} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.057938575744628906} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 487, 976, ..., 2498986, - 2499481, 2500000]), - col_indices=tensor([ 13, 19, 40, ..., 4975, 4977, 4981]), - values=tensor([0.0276, 0.4992, 0.8339, ..., 0.1235, 0.3053, 0.8819]), +tensor(crow_indices=tensor([ 0, 516, 999, ..., 2499028, + 2499504, 2500000]), + col_indices=tensor([ 10, 15, 27, ..., 4971, 4978, 4980]), + values=tensor([0.9206, 0.4241, 0.6810, ..., 0.1765, 0.0854, 0.9488]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6982, 0.6943, 0.5654, ..., 0.0343, 0.1924, 0.4615]) +tensor([0.2709, 0.2891, 0.7340, ..., 0.3721, 0.9776, 0.3534]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.053244590759277344 seconds +Time: 0.057938575744628906 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19720', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 3.959235429763794} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18122', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 3.59611177444458} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 500, 997, ..., 2499001, - 2499525, 2500000]), - col_indices=tensor([ 22, 31, 36, ..., 4976, 4979, 4990]), - values=tensor([0.9139, 0.4529, 0.5623, ..., 0.7413, 0.5022, 0.1210]), +tensor(crow_indices=tensor([ 0, 480, 982, ..., 2499049, + 2499529, 2500000]), + col_indices=tensor([ 5, 11, 37, ..., 4957, 4962, 4990]), + values=tensor([0.8715, 0.2207, 0.6494, ..., 0.4488, 0.1939, 0.3906]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.1909, 0.5057, 0.7269, ..., 0.6307, 0.9165, 0.6325]) +tensor([0.4996, 0.4879, 0.9575, ..., 0.2501, 0.0484, 0.4784]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 3.959235429763794 seconds +Time: 3.59611177444458 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52297', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.236442565917969} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52912', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.354979991912842} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 495, 967, ..., 2499009, - 2499501, 2500000]), - col_indices=tensor([ 2, 3, 29, ..., 4974, 4984, 4998]), - values=tensor([0.7947, 0.6825, 0.2906, ..., 0.1208, 0.9049, 0.2265]), +tensor(crow_indices=tensor([ 0, 512, 1033, ..., 2498964, + 2499478, 2500000]), + col_indices=tensor([ 2, 17, 23, ..., 4986, 4995, 4996]), + values=tensor([0.3497, 0.1928, 0.5659, ..., 0.9965, 0.8054, 0.3638]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7067, 0.8426, 0.8818, ..., 0.1022, 0.5608, 0.1343]) +tensor([0.5347, 0.2716, 0.2224, ..., 0.2917, 0.3905, 0.3272]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.236442565917969 seconds +Time: 10.354979991912842 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 495, 967, ..., 2499009, - 2499501, 2500000]), - col_indices=tensor([ 2, 3, 29, ..., 4974, 4984, 4998]), - values=tensor([0.7947, 0.6825, 0.2906, ..., 0.1208, 0.9049, 0.2265]), +tensor(crow_indices=tensor([ 0, 512, 1033, ..., 2498964, + 2499478, 2500000]), + col_indices=tensor([ 2, 17, 23, ..., 4986, 4995, 4996]), + values=tensor([0.3497, 0.1928, 0.5659, ..., 0.9965, 0.8054, 0.3638]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7067, 0.8426, 0.8818, ..., 0.1022, 0.5608, 0.1343]) +tensor([0.5347, 0.2716, 0.2224, ..., 0.2917, 0.3905, 0.3272]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.236442565917969 seconds +Time: 10.354979991912842 seconds -[40.97, 39.52, 39.62, 39.4, 39.87, 45.09, 39.68, 39.56, 39.77, 39.67] -[138.62] -13.094589233398438 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.236442565917969, 'TIME_S_1KI': 0.195736706998833, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1815.1719595336915, 'W': 138.62} -[40.97, 39.52, 39.62, 39.4, 39.87, 45.09, 39.68, 39.56, 39.77, 39.67, 41.69, 43.46, 41.5, 39.76, 39.97, 39.88, 39.62, 39.85, 40.84, 39.61] -728.3600000000001 -36.418000000000006 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.236442565917969, 'TIME_S_1KI': 0.195736706998833, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1815.1719595336915, 'W': 138.62, 'J_1KI': 34.70891178334688, 'W_1KI': 2.650630055261296, 'W_D': 102.202, 'J_D': 1338.293208831787, 'W_D_1KI': 1.9542612386943803, 'J_D_1KI': 0.0373685151862321} +[40.41, 39.61, 40.39, 39.58, 41.83, 39.58, 39.84, 39.74, 39.57, 39.81] +[137.14] +13.663313150405884 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52912, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.354979991912842, 'TIME_S_1KI': 0.19570192001649608, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1873.7867654466627, 'W': 137.14} +[40.41, 39.61, 40.39, 39.58, 41.83, 39.58, 39.84, 39.74, 39.57, 39.81, 40.38, 40.03, 40.18, 40.08, 39.88, 40.18, 40.14, 39.98, 39.77, 40.69] +721.025 +36.051249999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52912, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.354979991912842, 'TIME_S_1KI': 0.19570192001649608, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1873.7867654466627, 'W': 137.14, 'J_1KI': 35.41326665872888, 'W_1KI': 2.5918506198971873, 'W_D': 101.08874999999999, 'J_D': 1381.2072472330926, 'W_D_1KI': 1.9105070683398848, 'J_D_1KI': 0.03610725484464554} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json index c26577c..870c328 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28289, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.465468645095825, "TIME_S_1KI": 0.36994834193841514, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1925.348158209324, "W": 138.77, "J_1KI": 68.05995822437428, "W_1KI": 4.905440277139524, "W_D": 102.84225, "J_D": 1426.8727867954374, "W_D_1KI": 3.6354148255505674, "J_D_1KI": 0.1285098386493184} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28846, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.404276132583618, "TIME_S_1KI": 0.3606834962415454, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1932.1177334833146, "W": 139.74, "J_1KI": 66.98043865642774, "W_1KI": 4.8443458365111285, "W_D": 103.11100000000002, "J_D": 1425.6661773092749, "W_D_1KI": 3.5745337308465652, "J_D_1KI": 0.12391783023110882} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output index 4cf3fef..66e4164 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.07305312156677246} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.0717768669128418} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1021, 1969, ..., 4997949, - 4999001, 5000000]), - col_indices=tensor([ 1, 7, 18, ..., 4990, 4995, 4998]), - values=tensor([0.6231, 0.9951, 0.8252, ..., 0.9756, 0.2344, 0.2983]), +tensor(crow_indices=tensor([ 0, 982, 1957, ..., 4997963, + 4998987, 5000000]), + col_indices=tensor([ 5, 6, 7, ..., 4993, 4997, 4999]), + values=tensor([0.8941, 0.9992, 0.8012, ..., 0.3746, 0.6607, 0.5735]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0355, 0.1933, 0.9788, ..., 0.6197, 0.2365, 0.0393]) +tensor([0.3587, 0.9276, 0.9527, ..., 0.4878, 0.1315, 0.8391]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 0.07305312156677246 seconds +Time: 0.0717768669128418 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14373', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.334789037704468} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14628', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.324462413787842} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1024, 2040, ..., 4998012, - 4999011, 5000000]), - col_indices=tensor([ 1, 7, 11, ..., 4993, 4994, 4998]), - values=tensor([0.0452, 0.7491, 0.1728, ..., 0.4616, 0.1426, 0.7347]), +tensor(crow_indices=tensor([ 0, 969, 2010, ..., 4998017, + 4998963, 5000000]), + col_indices=tensor([ 2, 7, 11, ..., 4968, 4973, 4981]), + values=tensor([0.1932, 0.9028, 0.0773, ..., 0.1884, 0.2681, 0.3774]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.8794, 0.6343, 0.4463, ..., 0.6355, 0.8597, 0.5087]) +tensor([0.7410, 0.6895, 0.0960, ..., 0.1902, 0.8343, 0.6611]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 5.334789037704468 seconds +Time: 5.324462413787842 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28289', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.465468645095825} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28846', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.404276132583618} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 999, 2016, ..., 4997966, - 4998976, 5000000]), - col_indices=tensor([ 2, 9, 14, ..., 4993, 4994, 4995]), - values=tensor([0.1135, 0.5000, 0.4923, ..., 0.1880, 0.5290, 0.9229]), +tensor(crow_indices=tensor([ 0, 985, 2039, ..., 4997936, + 4998963, 5000000]), + col_indices=tensor([ 8, 12, 28, ..., 4988, 4996, 4997]), + values=tensor([0.4804, 0.1658, 0.3798, ..., 0.8036, 0.6527, 0.9757]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0610, 0.8156, 0.2755, ..., 0.7165, 0.9008, 0.9624]) +tensor([0.2723, 0.5864, 0.6330, ..., 0.7738, 0.3370, 0.9006]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.465468645095825 seconds +Time: 10.404276132583618 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 999, 2016, ..., 4997966, - 4998976, 5000000]), - col_indices=tensor([ 2, 9, 14, ..., 4993, 4994, 4995]), - values=tensor([0.1135, 0.5000, 0.4923, ..., 0.1880, 0.5290, 0.9229]), +tensor(crow_indices=tensor([ 0, 985, 2039, ..., 4997936, + 4998963, 5000000]), + col_indices=tensor([ 8, 12, 28, ..., 4988, 4996, 4997]), + values=tensor([0.4804, 0.1658, 0.3798, ..., 0.8036, 0.6527, 0.9757]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.0610, 0.8156, 0.2755, ..., 0.7165, 0.9008, 0.9624]) +tensor([0.2723, 0.5864, 0.6330, ..., 0.7738, 0.3370, 0.9006]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.465468645095825 seconds +Time: 10.404276132583618 seconds -[40.8, 40.7, 39.71, 40.23, 39.76, 39.64, 39.79, 39.68, 39.73, 39.72] -[138.77] -13.874383211135864 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.465468645095825, 'TIME_S_1KI': 0.36994834193841514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1925.348158209324, 'W': 138.77} -[40.8, 40.7, 39.71, 40.23, 39.76, 39.64, 39.79, 39.68, 39.73, 39.72, 40.37, 39.65, 40.3, 39.61, 40.3, 40.15, 39.64, 39.78, 39.65, 39.58] -718.555 -35.927749999999996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.465468645095825, 'TIME_S_1KI': 0.36994834193841514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1925.348158209324, 'W': 138.77, 'J_1KI': 68.05995822437428, 'W_1KI': 4.905440277139524, 'W_D': 102.84225, 'J_D': 1426.8727867954374, 'W_D_1KI': 3.6354148255505674, 'J_D_1KI': 0.1285098386493184} +[41.19, 40.35, 39.91, 40.05, 41.57, 40.01, 40.37, 44.94, 40.26, 39.6] +[139.74] +13.826518774032593 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28846, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.404276132583618, 'TIME_S_1KI': 0.3606834962415454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1932.1177334833146, 'W': 139.74} +[41.19, 40.35, 39.91, 40.05, 41.57, 40.01, 40.37, 44.94, 40.26, 39.6, 41.27, 39.63, 39.67, 45.04, 39.7, 39.68, 40.15, 40.06, 40.17, 39.98] +732.5799999999999 +36.629 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28846, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.404276132583618, 'TIME_S_1KI': 0.3606834962415454, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1932.1177334833146, 'W': 139.74, 'J_1KI': 66.98043865642774, 'W_1KI': 4.8443458365111285, 'W_D': 103.11100000000002, 'J_D': 1425.6661773092749, 'W_D_1KI': 3.5745337308465652, 'J_D_1KI': 0.12391783023110882} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json index edb21c0..4f78c17 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19365, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.708929538726807, "TIME_S_1KI": 0.5530043655423086, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1994.3257465744018, "W": 137.56, "J_1KI": 102.98609587267761, "W_1KI": 7.103537309579138, "W_D": 101.59625, "J_D": 1472.9283013260365, "W_D_1KI": 5.246385231087013, "J_D_1KI": 0.270921003412704} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19144, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.589388370513916, "TIME_S_1KI": 0.5531439809085832, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2009.800785241127, "W": 137.31, "J_1KI": 104.98332559763514, "W_1KI": 7.172482239866277, "W_D": 101.2175, "J_D": 1481.5163570034504, "W_D_1KI": 5.287165691600501, "J_D_1KI": 0.27617873441289703} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output index 4cf90e5..32dc9a9 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.0932457447052002} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.09389424324035645} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1472, 2953, ..., 7496947, - 7498464, 7500000]), - col_indices=tensor([ 7, 9, 10, ..., 4989, 4991, 4994]), - values=tensor([0.9868, 0.9731, 0.3711, ..., 0.9277, 0.6596, 0.4560]), +tensor(crow_indices=tensor([ 0, 1453, 2925, ..., 7497044, + 7498511, 7500000]), + col_indices=tensor([ 2, 8, 11, ..., 4986, 4987, 4995]), + values=tensor([0.6050, 0.7562, 0.0795, ..., 0.5439, 0.7811, 0.7483]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.4150, 0.1407, 0.7534, ..., 0.5098, 0.0887, 0.6433]) +tensor([0.3581, 0.7659, 0.8458, ..., 0.9868, 0.1607, 0.0117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 0.0932457447052002 seconds +Time: 0.09389424324035645 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11260', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.105090618133545} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11182', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.13297963142395} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1440, 2968, ..., 7497040, - 7498552, 7500000]), - col_indices=tensor([ 4, 15, 16, ..., 4993, 4997, 4998]), - values=tensor([0.0079, 0.6033, 0.5837, ..., 0.4070, 0.1537, 0.2862]), +tensor(crow_indices=tensor([ 0, 1535, 3010, ..., 7496960, + 7498491, 7500000]), + col_indices=tensor([ 0, 1, 4, ..., 4993, 4997, 4999]), + values=tensor([0.0177, 0.3777, 0.8712, ..., 0.1638, 0.9917, 0.2791]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.9942, 0.8351, 0.2634, ..., 0.1203, 0.3761, 0.2393]) +tensor([0.5968, 0.6885, 0.5169, ..., 0.8778, 0.7014, 0.4474]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 6.105090618133545 seconds +Time: 6.13297963142395 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19365', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.708929538726807} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19144', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.589388370513916} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1483, 3029, ..., 7497007, - 7498484, 7500000]), - col_indices=tensor([ 0, 1, 2, ..., 4991, 4992, 4999]), - values=tensor([0.2037, 0.3378, 0.5245, ..., 0.5597, 0.6700, 0.6684]), +tensor(crow_indices=tensor([ 0, 1480, 2995, ..., 7496986, + 7498507, 7500000]), + col_indices=tensor([ 2, 7, 10, ..., 4984, 4996, 4997]), + values=tensor([0.8500, 0.4156, 0.3807, ..., 0.4612, 0.1880, 0.4263]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.7739, 0.8705, 0.9400, ..., 0.4166, 0.9328, 0.4141]) +tensor([0.5811, 0.5191, 0.7210, ..., 0.8257, 0.8574, 0.1320]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.708929538726807 seconds +Time: 10.589388370513916 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1483, 3029, ..., 7497007, - 7498484, 7500000]), - col_indices=tensor([ 0, 1, 2, ..., 4991, 4992, 4999]), - values=tensor([0.2037, 0.3378, 0.5245, ..., 0.5597, 0.6700, 0.6684]), +tensor(crow_indices=tensor([ 0, 1480, 2995, ..., 7496986, + 7498507, 7500000]), + col_indices=tensor([ 2, 7, 10, ..., 4984, 4996, 4997]), + values=tensor([0.8500, 0.4156, 0.3807, ..., 0.4612, 0.1880, 0.4263]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.7739, 0.8705, 0.9400, ..., 0.4166, 0.9328, 0.4141]) +tensor([0.5811, 0.5191, 0.7210, ..., 0.8257, 0.8574, 0.1320]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.708929538726807 seconds +Time: 10.589388370513916 seconds -[42.29, 39.85, 39.72, 39.53, 39.58, 40.0, 40.36, 39.5, 39.8, 40.06] -[137.56] -14.4978609085083 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.708929538726807, 'TIME_S_1KI': 0.5530043655423086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1994.3257465744018, 'W': 137.56} -[42.29, 39.85, 39.72, 39.53, 39.58, 40.0, 40.36, 39.5, 39.8, 40.06, 40.39, 40.11, 40.04, 39.88, 41.07, 39.92, 39.57, 39.46, 39.73, 39.57] -719.275 -35.96375 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.708929538726807, 'TIME_S_1KI': 0.5530043655423086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1994.3257465744018, 'W': 137.56, 'J_1KI': 102.98609587267761, 'W_1KI': 7.103537309579138, 'W_D': 101.59625, 'J_D': 1472.9283013260365, 'W_D_1KI': 5.246385231087013, 'J_D_1KI': 0.270921003412704} +[41.93, 40.2, 39.72, 39.76, 39.9, 40.29, 40.01, 40.65, 39.8, 39.55] +[137.31] +14.636958599090576 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.589388370513916, 'TIME_S_1KI': 0.5531439809085832, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2009.800785241127, 'W': 137.31} +[41.93, 40.2, 39.72, 39.76, 39.9, 40.29, 40.01, 40.65, 39.8, 39.55, 40.75, 39.74, 39.86, 39.82, 39.94, 40.37, 39.63, 40.91, 40.17, 39.93] +721.85 +36.0925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19144, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.589388370513916, 'TIME_S_1KI': 0.5531439809085832, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2009.800785241127, 'W': 137.31, 'J_1KI': 104.98332559763514, 'W_1KI': 7.172482239866277, 'W_D': 101.2175, 'J_D': 1481.5163570034504, 'W_D_1KI': 5.287165691600501, 'J_D_1KI': 0.27617873441289703} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json index d0ddde8..b2ebd01 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4054, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.937983989715576, "TIME_S_1KI": 2.6980720250901764, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1733.5753089761736, "W": 118.18, "J_1KI": 427.62094449338275, "W_1KI": 29.15145535273804, "W_D": 74.48075000000001, "J_D": 1092.5536401593092, "W_D_1KI": 18.37216329551061, "J_D_1KI": 4.531860704368675} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 5230, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.803205013275146, "TIME_S_1KI": 2.0656223734751715, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1996.9131930828094, "W": 124.05, "J_1KI": 381.8189661726213, "W_1KI": 23.718929254302104, "W_D": 87.92925, "J_D": 1415.454086117506, "W_D_1KI": 16.812476099426387, "J_D_1KI": 3.214622581152273} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output index fbc9c8c..e008023 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.25896668434143066} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.2405378818511963} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1967, 3942, ..., 9996007, - 9997987, 10000000]), - col_indices=tensor([ 0, 1, 3, ..., 4989, 4995, 4996]), - values=tensor([0.9037, 0.0824, 0.8127, ..., 0.2074, 0.6033, 0.5497]), +tensor(crow_indices=tensor([ 0, 2034, 4065, ..., 9995925, + 9997951, 10000000]), + col_indices=tensor([ 0, 3, 4, ..., 4996, 4997, 4999]), + values=tensor([0.2630, 0.0419, 0.7378, ..., 0.8916, 0.9324, 0.9315]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6227, 0.6614, 0.9902, ..., 0.2660, 0.9614, 0.3260]) +tensor([0.7523, 0.5983, 0.1032, ..., 0.8622, 0.3372, 0.3026]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 0.25896668434143066 seconds +Time: 0.2405378818511963 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4054', '-ss', '5000', '-sd', '0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.937983989715576} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4365', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 8.763044357299805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2032, 3978, ..., 9995983, - 9998000, 10000000]), - col_indices=tensor([ 1, 2, 3, ..., 4988, 4993, 4994]), - values=tensor([0.2457, 0.5907, 0.9941, ..., 0.3357, 0.2301, 0.2269]), +tensor(crow_indices=tensor([ 0, 1961, 3998, ..., 9995938, + 9997917, 10000000]), + col_indices=tensor([ 3, 4, 5, ..., 4996, 4998, 4999]), + values=tensor([0.0989, 0.8910, 0.1341, ..., 0.2098, 0.3959, 0.3132]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2230, 0.0807, 0.5998, ..., 0.1430, 0.1498, 0.1360]) +tensor([0.4158, 0.1466, 0.9233, ..., 0.3419, 0.5659, 0.6621]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,16 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.937983989715576 seconds +Time: 8.763044357299805 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '5230', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.803205013275146} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2032, 3978, ..., 9995983, - 9998000, 10000000]), - col_indices=tensor([ 1, 2, 3, ..., 4988, 4993, 4994]), - values=tensor([0.2457, 0.5907, 0.9941, ..., 0.3357, 0.2301, 0.2269]), +tensor(crow_indices=tensor([ 0, 1987, 4084, ..., 9995959, + 9998006, 10000000]), + col_indices=tensor([ 5, 9, 13, ..., 4991, 4995, 4998]), + values=tensor([0.2998, 0.3930, 0.1334, ..., 0.0607, 0.7503, 0.6485]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2230, 0.0807, 0.5998, ..., 0.1430, 0.1498, 0.1360]) +tensor([0.5362, 0.2142, 0.9699, ..., 0.8371, 0.3959, 0.8353]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,13 +56,30 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 10.937983989715576 seconds +Time: 10.803205013275146 seconds -[40.89, 40.0, 39.72, 39.6, 39.91, 39.64, 39.68, 39.81, 39.88, 45.41] -[118.18] -14.66893982887268 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.937983989715576, 'TIME_S_1KI': 2.6980720250901764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1733.5753089761736, 'W': 118.18} -[40.89, 40.0, 39.72, 39.6, 39.91, 39.64, 39.68, 39.81, 39.88, 45.41, 40.78, 40.13, 44.65, 73.6, 81.22, 73.52, 67.52, 42.65, 48.49, 40.85] -873.9849999999999 -43.69924999999999 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.937983989715576, 'TIME_S_1KI': 2.6980720250901764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1733.5753089761736, 'W': 118.18, 'J_1KI': 427.62094449338275, 'W_1KI': 29.15145535273804, 'W_D': 74.48075000000001, 'J_D': 1092.5536401593092, 'W_D_1KI': 18.37216329551061, 'J_D_1KI': 4.531860704368675} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1987, 4084, ..., 9995959, + 9998006, 10000000]), + col_indices=tensor([ 5, 9, 13, ..., 4991, 4995, 4998]), + values=tensor([0.2998, 0.3930, 0.1334, ..., 0.0607, 0.7503, 0.6485]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5362, 0.2142, 0.9699, ..., 0.8371, 0.3959, 0.8353]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.803205013275146 seconds + +[41.24, 40.18, 39.91, 41.32, 39.92, 41.22, 40.71, 39.64, 39.82, 39.65] +[124.05] +16.097647666931152 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5230, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.803205013275146, 'TIME_S_1KI': 2.0656223734751715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1996.9131930828094, 'W': 124.05} +[41.24, 40.18, 39.91, 41.32, 39.92, 41.22, 40.71, 39.64, 39.82, 39.65, 41.69, 39.74, 40.19, 39.63, 40.0, 39.64, 39.86, 39.81, 39.72, 39.63] +722.415 +36.12075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 5230, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.803205013275146, 'TIME_S_1KI': 2.0656223734751715, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1996.9131930828094, 'W': 124.05, 'J_1KI': 381.8189661726213, 'W_1KI': 23.718929254302104, 'W_D': 87.92925, 'J_D': 1415.454086117506, 'W_D_1KI': 16.812476099426387, 'J_D_1KI': 3.214622581152273} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json index a25c6eb..cf977fd 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3758, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.1781005859375, "TIME_S_1KI": 2.708382274065327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1956.303444838524, "W": 122.9, "J_1KI": 520.570368504131, "W_1KI": 32.70356572645024, "W_D": 86.40525000000001, "J_D": 1375.3855836219193, "W_D_1KI": 22.992349654071315, "J_D_1KI": 6.118240993632601} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3816, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.633152484893799, "TIME_S_1KI": 2.7864655358736368, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2084.05164506197, "W": 121.69, "J_1KI": 546.1351271126755, "W_1KI": 31.889412997903563, "W_D": 84.58225, "J_D": 1448.5477628033757, "W_D_1KI": 22.165159853249474, "J_D_1KI": 5.808480045400806} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output index fd072be..bc52cdb 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.29501914978027344} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.3140254020690918} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2490, 5019, ..., 12495032, - 12497514, 12500000]), - col_indices=tensor([ 0, 1, 2, ..., 4992, 4993, 4994]), - values=tensor([0.4521, 0.6419, 0.1807, ..., 0.6429, 0.2936, 0.1963]), +tensor(crow_indices=tensor([ 0, 2477, 4942, ..., 12494970, + 12497427, 12500000]), + col_indices=tensor([ 2, 9, 10, ..., 4997, 4998, 4999]), + values=tensor([0.2607, 0.2897, 0.6113, ..., 0.8033, 0.0026, 0.0427]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.6822, 0.2314, 0.5095, ..., 0.2635, 0.2792, 0.8048]) +tensor([0.0281, 0.1769, 0.9631, ..., 0.7506, 0.8075, 0.4975]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 0.29501914978027344 seconds +Time: 0.3140254020690918 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3559', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.942713260650635} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3343', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.196410894393921} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2478, 4936, ..., 12495093, - 12497610, 12500000]), - col_indices=tensor([ 1, 2, 3, ..., 4992, 4995, 4998]), - values=tensor([0.6608, 0.6509, 0.8650, ..., 0.2551, 0.6130, 0.3679]), +tensor(crow_indices=tensor([ 0, 2432, 4927, ..., 12494982, + 12497476, 12500000]), + col_indices=tensor([ 4, 6, 7, ..., 4997, 4998, 4999]), + values=tensor([0.9429, 0.5134, 0.0149, ..., 0.4540, 0.0443, 0.1429]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.9258, 0.3878, 0.0027, ..., 0.4707, 0.4169, 0.1792]) +tensor([0.5035, 0.8624, 0.0188, ..., 0.3732, 0.5744, 0.8509]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 9.942713260650635 seconds +Time: 9.196410894393921 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3758', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.1781005859375} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3816', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.633152484893799} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2527, 4930, ..., 12495017, - 12497540, 12500000]), - col_indices=tensor([ 1, 2, 3, ..., 4995, 4997, 4999]), - values=tensor([0.2576, 0.5438, 0.7818, ..., 0.1593, 0.4265, 0.7530]), +tensor(crow_indices=tensor([ 0, 2504, 4975, ..., 12494911, + 12497444, 12500000]), + col_indices=tensor([ 3, 4, 8, ..., 4988, 4993, 4995]), + values=tensor([0.1232, 0.7788, 0.1132, ..., 0.9068, 0.1197, 0.2972]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0335, 0.1835, 0.7330, ..., 0.5684, 0.8047, 0.4810]) +tensor([0.8363, 0.8796, 0.7824, ..., 0.7053, 0.2246, 0.6262]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.1781005859375 seconds +Time: 10.633152484893799 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2527, 4930, ..., 12495017, - 12497540, 12500000]), - col_indices=tensor([ 1, 2, 3, ..., 4995, 4997, 4999]), - values=tensor([0.2576, 0.5438, 0.7818, ..., 0.1593, 0.4265, 0.7530]), +tensor(crow_indices=tensor([ 0, 2504, 4975, ..., 12494911, + 12497444, 12500000]), + col_indices=tensor([ 3, 4, 8, ..., 4988, 4993, 4995]), + values=tensor([0.1232, 0.7788, 0.1132, ..., 0.9068, 0.1197, 0.2972]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0335, 0.1835, 0.7330, ..., 0.5684, 0.8047, 0.4810]) +tensor([0.8363, 0.8796, 0.7824, ..., 0.7053, 0.2246, 0.6262]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.1781005859375 seconds +Time: 10.633152484893799 seconds -[41.47, 40.36, 39.99, 40.33, 39.95, 40.28, 39.98, 39.88, 39.78, 45.28] -[122.9] -15.917847394943237 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.1781005859375, 'TIME_S_1KI': 2.708382274065327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.303444838524, 'W': 122.9} -[41.47, 40.36, 39.99, 40.33, 39.95, 40.28, 39.98, 39.88, 39.78, 45.28, 40.38, 39.63, 39.74, 44.87, 40.1, 40.15, 40.75, 40.1, 40.4, 40.08] -729.895 -36.494749999999996 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.1781005859375, 'TIME_S_1KI': 2.708382274065327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.303444838524, 'W': 122.9, 'J_1KI': 520.570368504131, 'W_1KI': 32.70356572645024, 'W_D': 86.40525000000001, 'J_D': 1375.3855836219193, 'W_D_1KI': 22.992349654071315, 'J_D_1KI': 6.118240993632601} +[40.51, 40.07, 40.7, 39.76, 40.61, 40.3, 40.25, 40.11, 56.38, 40.41] +[121.69] +17.12590718269348 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3816, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.633152484893799, 'TIME_S_1KI': 2.7864655358736368, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2084.05164506197, 'W': 121.69} +[40.51, 40.07, 40.7, 39.76, 40.61, 40.3, 40.25, 40.11, 56.38, 40.41, 40.56, 39.77, 40.21, 39.71, 40.16, 40.19, 39.82, 40.88, 39.84, 45.31] +742.155 +37.107749999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3816, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.633152484893799, 'TIME_S_1KI': 2.7864655358736368, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2084.05164506197, 'W': 121.69, 'J_1KI': 546.1351271126755, 'W_1KI': 31.889412997903563, 'W_D': 84.58225, 'J_D': 1448.5477628033757, 'W_D_1KI': 22.165159853249474, 'J_D_1KI': 5.808480045400806} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json index c2948bd..2dcdfe3 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 505155, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3298921585083, "TIME_S_1KI": 0.020448955584935914, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1240.863141503334, "W": 95.58, "J_1KI": 2.4564007908529737, "W_1KI": 0.1892092526056359, "W_D": 59.942499999999995, "J_D": 778.2008669131993, "W_D_1KI": 0.11866159891518444, "J_D_1KI": 0.0002349013647596964} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 496427, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.657760858535767, "TIME_S_1KI": 0.021468938753403354, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1249.036185359955, "W": 96.43, "J_1KI": 2.5160520788755543, "W_1KI": 0.19424809690045064, "W_D": 60.4765, "J_D": 783.3385550546647, "W_D_1KI": 0.12182355109613296, "J_D_1KI": 0.0002454007358506547} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output index 107ac32..0a44d98 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,75 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013948440551757812} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013672351837158203} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([3548, 3508, 4386, 3528, 2702, 3004, 3629, 4756, 1243, - 213, 2804, 1698, 689, 4639, 4580, 1578, 3327, 694, - 1408, 2610, 4665, 1701, 4464, 632, 2037, 2500, 1517, - 2177, 1389, 4628, 306, 1568, 3761, 3194, 3074, 2522, - 3705, 2681, 4246, 249, 1916, 3633, 4678, 1217, 107, - 2703, 1648, 2700, 2961, 4336, 1084, 4254, 396, 3740, - 3046, 2671, 2061, 1766, 3209, 4565, 1985, 2700, 4834, - 2805, 875, 2910, 2400, 2621, 4389, 955, 1399, 578, - 2242, 4964, 3239, 222, 1256, 3099, 3567, 2886, 3721, - 1671, 1246, 4445, 3748, 4434, 1765, 983, 1353, 3314, - 2249, 2525, 4314, 2896, 2171, 3775, 3320, 730, 2027, - 2731, 3976, 3825, 4171, 1978, 4468, 2371, 386, 1118, - 3263, 840, 3509, 4865, 3412, 2573, 1668, 4140, 1828, - 1203, 819, 4214, 2533, 3446, 643, 4924, 2902, 1393, - 4975, 841, 1924, 1159, 1396, 1327, 3531, 2008, 2330, - 3344, 0, 1785, 2268, 4522, 1792, 2828, 305, 4487, - 4986, 3210, 3476, 4418, 3986, 3188, 1206, 4837, 2877, - 2143, 1316, 3014, 3807, 339, 1928, 4332, 1721, 1955, - 1430, 1820, 1733, 132, 1124, 4910, 399, 4998, 3203, - 1066, 4770, 3787, 2390, 4240, 862, 2987, 1396, 4199, - 2140, 4278, 4725, 3767, 4419, 1019, 3708, 90, 2851, - 2610, 3655, 3402, 2040, 1712, 1375, 4589, 2905, 1572, - 545, 3985, 3399, 582, 4328, 3912, 2552, 83, 2255, - 1709, 772, 4299, 2146, 3329, 2442, 3295, 60, 173, - 543, 4997, 2966, 3912, 1602, 135, 2282, 3935, 2764, - 2342, 3756, 4573, 3705, 1470, 3025, 1498, 4276, 668, - 3561, 4033, 260, 3652, 775, 4020, 1031, 2617, 2294, - 2109, 2487, 3590, 1199, 2797, 1290, 3990]), - values=tensor([0.6994, 0.2438, 0.4802, 0.0829, 0.0677, 0.0178, 0.7638, - 0.1665, 0.8626, 0.8633, 0.8809, 0.3889, 0.5842, 0.4728, - 0.4918, 0.0860, 0.7324, 0.8491, 0.3798, 0.3500, 0.4975, - 0.0872, 0.8650, 0.3555, 0.4399, 0.2630, 0.0729, 0.3054, - 0.9674, 0.7941, 0.9749, 0.5236, 0.8844, 0.2916, 0.4218, - 0.0889, 0.1637, 0.0411, 0.1963, 0.8167, 0.6130, 0.2282, - 0.0754, 0.2471, 0.0778, 0.4752, 0.2737, 0.1262, 0.2451, - 0.2934, 0.3944, 0.0397, 0.3394, 0.7909, 0.5453, 0.0895, - 0.2329, 0.3870, 0.5830, 0.0888, 0.8460, 0.7742, 0.7374, - 0.8528, 0.2281, 0.9068, 0.0092, 0.0150, 0.9568, 0.4508, - 0.2063, 0.9542, 0.6049, 0.5147, 0.9346, 0.5104, 0.1196, - 0.8281, 0.2227, 0.7282, 0.2980, 0.7830, 0.6065, 0.2936, - 0.6589, 0.1956, 0.8884, 0.6244, 0.8765, 0.9279, 0.5777, - 0.8162, 0.0894, 0.3744, 0.1591, 0.3051, 0.9299, 0.1618, - 0.7383, 0.9907, 0.5121, 0.6397, 0.8338, 0.9391, 0.2607, - 0.4098, 0.6073, 0.2048, 0.8476, 0.1799, 0.1533, 0.5127, - 0.3612, 0.8614, 0.5878, 0.7167, 0.1917, 0.2581, 0.3381, - 0.5246, 0.2437, 0.9851, 0.9032, 0.6527, 0.5590, 0.5454, - 0.0253, 0.0710, 0.1587, 0.3574, 0.7354, 0.6182, 0.5365, - 0.0479, 0.8974, 0.6075, 0.5864, 0.7635, 0.4139, 0.6734, - 0.0016, 0.0763, 0.3633, 0.3792, 0.6630, 0.0919, 0.1222, - 0.5443, 0.8587, 0.0627, 0.1060, 0.4814, 0.8481, 0.2733, - 0.7553, 0.9339, 0.1865, 0.2260, 0.9547, 0.8541, 0.1158, - 0.0258, 0.5314, 0.6595, 0.5573, 0.7953, 0.3786, 0.1641, - 0.8997, 0.2507, 0.1855, 0.6951, 0.2863, 0.1627, 0.3079, - 0.5000, 0.3625, 0.8186, 0.3705, 0.2957, 0.1551, 0.0216, - 0.3714, 0.8284, 0.9522, 0.8937, 0.5141, 0.0703, 0.2182, - 0.9274, 0.7097, 0.4349, 0.6001, 0.7581, 0.1855, 0.1138, - 0.0069, 0.0143, 0.6779, 0.4223, 0.2934, 0.1234, 0.5974, - 0.7303, 0.9182, 0.4432, 0.6166, 0.0534, 0.9601, 0.1664, - 0.7453, 0.2693, 0.7496, 0.1561, 0.1695, 0.4247, 0.5083, - 0.7464, 0.9108, 0.9708, 0.4346, 0.1849, 0.3357, 0.6306, - 0.3234, 0.0643, 0.0684, 0.2529, 0.3070, 0.5381, 0.4691, - 0.3912, 0.0111, 0.6019, 0.4700, 0.8282, 0.9967, 0.0138, - 0.0331, 0.4050, 0.1544, 0.2207, 0.6016, 0.9303, 0.9139, - 0.9840, 0.7431, 0.3482, 0.1124, 0.6413]), + col_indices=tensor([1607, 857, 2707, 1655, 377, 2123, 3867, 1019, 3779, + 1224, 3942, 1277, 4415, 370, 1274, 1912, 3600, 4641, + 1750, 697, 1327, 1211, 499, 4016, 4984, 1379, 2975, + 1653, 1285, 4610, 1155, 3614, 4322, 1915, 2500, 381, + 2693, 414, 2249, 2860, 3449, 4768, 1634, 2272, 4594, + 4927, 1252, 3880, 4750, 3810, 3647, 533, 2751, 865, + 3062, 1306, 4573, 4711, 231, 3330, 1715, 3202, 10, + 1964, 137, 4753, 1165, 3470, 2205, 1031, 379, 3888, + 3125, 447, 2837, 4878, 221, 3121, 2192, 4788, 3286, + 3799, 3263, 978, 244, 137, 1687, 2798, 3845, 3613, + 4502, 4918, 745, 4732, 930, 560, 4649, 3658, 574, + 2850, 3592, 999, 2304, 3011, 3859, 3739, 2260, 249, + 1343, 1725, 1219, 1500, 3496, 3857, 1945, 3082, 4689, + 3901, 1685, 3601, 3921, 1961, 4582, 3901, 4195, 3832, + 956, 4465, 3899, 2309, 703, 4389, 268, 4275, 3941, + 2231, 1877, 1504, 444, 1274, 1929, 3642, 2552, 4, + 4677, 4523, 2296, 1889, 3349, 552, 1599, 596, 3330, + 1512, 2750, 2350, 3985, 3895, 4121, 4275, 3023, 104, + 2980, 2906, 3556, 2364, 2777, 4911, 1938, 236, 4193, + 1160, 570, 2998, 2440, 4191, 1008, 1621, 1138, 4025, + 4629, 1420, 266, 4048, 3171, 498, 4165, 4636, 2836, + 1770, 3832, 4918, 4729, 1903, 1852, 788, 2871, 3098, + 292, 2589, 2376, 1842, 3969, 2347, 880, 2247, 2426, + 3020, 1440, 1968, 4986, 569, 1816, 4279, 4525, 1371, + 3422, 417, 2204, 2316, 1379, 4875, 385, 833, 1277, + 2243, 3297, 1469, 457, 4179, 3341, 4135, 287, 4452, + 987, 43, 4578, 3887, 1501, 3041, 2186, 666, 4406, + 1704, 1302, 3868, 1796, 3646, 1683, 1522]), + values=tensor([0.2935, 0.0859, 0.9304, 0.7948, 0.9190, 0.3884, 0.7893, + 0.1355, 0.3290, 0.3483, 0.2099, 0.4594, 0.7582, 0.6084, + 0.9762, 0.9532, 0.7552, 0.1813, 0.6683, 0.3421, 0.1868, + 0.5633, 0.7820, 0.2497, 0.8361, 0.4643, 0.3834, 0.9878, + 0.8160, 0.4262, 0.1524, 0.0504, 0.6857, 0.1713, 0.4663, + 0.5653, 0.8330, 0.6854, 0.2580, 0.4840, 0.3685, 0.6138, + 0.8690, 0.3028, 0.2150, 0.5241, 0.9010, 0.5133, 0.3491, + 0.4275, 0.5819, 0.5560, 0.7292, 0.4234, 0.4762, 0.6477, + 0.5506, 0.7339, 0.3070, 0.7254, 0.3249, 0.8759, 0.4423, + 0.8702, 0.6634, 0.2213, 0.4275, 0.1365, 0.6728, 0.5386, + 0.9000, 0.5939, 0.2778, 0.8131, 0.3006, 0.8190, 0.3607, + 0.3168, 0.6483, 0.1173, 0.8231, 0.0485, 0.8550, 0.6294, + 0.4451, 0.5817, 0.2900, 0.1671, 0.4239, 0.8222, 0.5487, + 0.4980, 0.9519, 0.0910, 0.8955, 0.5052, 0.5213, 0.3088, + 0.2997, 0.1236, 0.4790, 0.8734, 0.4126, 0.8474, 0.1387, + 0.5407, 0.3477, 0.3248, 0.2457, 0.4639, 0.8765, 0.2862, + 0.5571, 0.1455, 0.6300, 0.5035, 0.4532, 0.8256, 0.7647, + 0.1876, 0.8035, 0.5568, 0.1494, 0.0858, 0.9868, 0.3051, + 0.5198, 0.9205, 0.2230, 0.3627, 0.6087, 0.5034, 0.0968, + 0.7338, 0.3205, 0.3348, 0.0030, 0.0722, 0.3672, 0.3951, + 0.1451, 0.1398, 0.2322, 0.0853, 0.6617, 0.9107, 0.3505, + 0.8042, 0.7946, 0.3165, 0.9474, 0.4198, 0.2242, 0.7137, + 0.8159, 0.6310, 0.5006, 0.9372, 0.0408, 0.6015, 0.6639, + 0.7554, 0.5447, 0.9076, 0.1438, 0.7296, 0.0666, 0.1252, + 0.4065, 0.9534, 0.1652, 0.3494, 0.8457, 0.5340, 0.3004, + 0.3538, 0.5555, 0.2346, 0.6812, 0.4412, 0.2548, 0.9333, + 0.6195, 0.7748, 0.1408, 0.4175, 0.4239, 0.6040, 0.2050, + 0.3661, 0.7942, 0.7772, 0.0067, 0.0521, 0.0969, 0.9952, + 0.6633, 0.0709, 0.1616, 0.9515, 0.4494, 0.1592, 0.1760, + 0.3132, 0.6727, 0.1114, 0.0992, 0.7514, 0.2252, 0.3175, + 0.7564, 0.8663, 0.5340, 0.3748, 0.5192, 0.5988, 0.5768, + 0.7467, 0.9139, 0.2977, 0.5734, 0.1970, 0.9873, 0.2625, + 0.6294, 0.9209, 0.6985, 0.9089, 0.7503, 0.3946, 0.2695, + 0.2452, 0.8985, 0.8680, 0.3407, 0.7951, 0.5222, 0.4675, + 0.3550, 0.9854, 0.8600, 0.2723, 0.6599, 0.7884, 0.5958, + 0.3548, 0.4698, 0.0088, 0.2374, 0.3375]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.3949, 0.9428, 0.0102, ..., 0.0310, 0.9492, 0.7070]) +tensor([0.8445, 0.0461, 0.7177, ..., 0.0951, 0.9998, 0.0843]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +77,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.013948440551757812 seconds +Time: 0.013672351837158203 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75277', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5646839141845703} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '76797', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.6243436336517334} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([3746, 1654, 2453, 1836, 1687, 4126, 577, 989, 1161, - 2149, 3910, 3295, 4472, 133, 3358, 1352, 3096, 3601, - 3758, 2512, 1092, 4489, 1464, 1660, 3070, 3361, 4966, - 822, 3500, 236, 2632, 1344, 3148, 1004, 2075, 4538, - 1923, 4311, 3791, 3093, 1373, 470, 112, 1162, 2705, - 3514, 4485, 3748, 3597, 4486, 4629, 78, 32, 3433, - 1822, 3440, 1230, 93, 1755, 4162, 1309, 3789, 3501, - 2710, 1926, 2165, 381, 2357, 4887, 3442, 1756, 2858, - 2903, 4359, 3016, 2687, 1689, 4625, 1621, 3805, 3094, - 1702, 3528, 1035, 4698, 4982, 1451, 1771, 2089, 3195, - 4919, 4133, 1397, 4984, 2564, 4549, 4619, 2832, 4040, - 4237, 2079, 1796, 1577, 4625, 3108, 1608, 19, 3574, - 3985, 1287, 3355, 4562, 3138, 4018, 4235, 751, 3240, - 1452, 49, 2916, 1280, 2827, 2493, 4891, 2490, 4843, - 2541, 1858, 112, 4172, 3878, 2893, 375, 3701, 1061, - 2843, 3468, 53, 4322, 1606, 2648, 4201, 4904, 4969, - 3035, 4661, 1890, 3624, 2603, 426, 3014, 1375, 437, - 1036, 1237, 4055, 3154, 3403, 3642, 4899, 4262, 474, - 2778, 534, 2901, 1174, 2089, 4005, 2324, 163, 146, - 519, 4127, 1342, 708, 4532, 125, 4427, 212, 2077, - 4307, 3955, 3476, 203, 3487, 4805, 2720, 1985, 727, - 17, 534, 1652, 733, 3208, 1391, 4340, 3700, 1893, - 162, 3604, 4298, 2419, 2136, 802, 564, 129, 2585, - 1324, 2167, 2710, 4323, 116, 1401, 831, 2891, 2913, - 1398, 372, 4148, 3575, 2563, 4021, 40, 3400, 4783, - 824, 3068, 2795, 4664, 2245, 2717, 3120, 1588, 2273, - 2302, 3501, 1853, 4736, 2336, 2390, 3070, 2778, 2782, - 746, 4888, 427, 933, 3852, 4383, 1889]), - values=tensor([0.9927, 0.0340, 0.4488, 0.4635, 0.2230, 0.1466, 0.4603, - 0.2785, 0.5078, 0.0027, 0.7560, 0.4421, 0.3531, 0.7573, - 0.7663, 0.5287, 0.4866, 0.0160, 0.0811, 0.2667, 0.0905, - 0.2771, 0.9262, 0.8504, 0.9933, 0.1455, 0.8531, 0.9069, - 0.5790, 0.0929, 0.4671, 0.6608, 0.9664, 0.1896, 0.7953, - 0.9833, 0.4291, 0.7412, 0.1386, 0.5823, 0.9668, 0.6184, - 0.6067, 0.5096, 0.9072, 0.2000, 0.6029, 0.7709, 0.3337, - 0.1225, 0.3581, 0.4923, 0.5670, 0.5531, 0.2219, 0.3460, - 0.1648, 0.9635, 0.0634, 0.0066, 0.2847, 0.4656, 0.8836, - 0.9572, 0.7046, 0.5077, 0.6573, 0.4027, 0.0704, 0.6249, - 0.7137, 0.0161, 0.2786, 0.6964, 0.9732, 0.0101, 0.9546, - 0.7324, 0.8008, 0.6620, 0.3443, 0.9296, 0.6939, 0.5327, - 0.2792, 0.5590, 0.4343, 0.4000, 0.5836, 0.6246, 0.3293, - 0.4244, 0.5279, 0.7005, 0.2154, 0.0350, 0.9262, 0.9148, - 0.3048, 0.0077, 0.5649, 0.4772, 0.8216, 0.6110, 0.2610, - 0.1616, 0.7469, 0.5265, 0.3053, 0.8103, 0.5940, 0.9742, - 0.5862, 0.5529, 0.3872, 0.3034, 0.6804, 0.6806, 0.6660, - 0.5771, 0.6250, 0.9114, 0.6649, 0.3603, 0.1633, 0.2146, - 0.1054, 0.1040, 0.0319, 0.4499, 0.9767, 0.8617, 0.7495, - 0.1652, 0.5616, 0.5919, 0.4704, 0.2766, 0.9966, 0.7157, - 0.1989, 0.6419, 0.8518, 0.4191, 0.4983, 0.6045, 0.7545, - 0.3583, 0.1657, 0.2221, 0.8782, 0.5595, 0.9120, 0.4869, - 0.2789, 0.3396, 0.9955, 0.5682, 0.9596, 0.9327, 0.6012, - 0.2415, 0.5228, 0.5492, 0.9986, 0.8668, 0.0412, 0.8168, - 0.4480, 0.6255, 0.6348, 0.6366, 0.4660, 0.8412, 0.9469, - 0.3182, 0.0180, 0.7704, 0.3602, 0.4075, 0.8083, 0.5267, - 0.5330, 0.4008, 0.8286, 0.6612, 0.5353, 0.6215, 0.4553, - 0.1920, 0.3166, 0.3250, 0.3744, 0.5410, 0.8495, 0.8267, - 0.2666, 0.2654, 0.6447, 0.8392, 0.9176, 0.4756, 0.9542, - 0.8318, 0.5561, 0.5761, 0.1449, 0.0902, 0.9651, 0.4745, - 0.1336, 0.4136, 0.1136, 0.8153, 0.3693, 0.4404, 0.2291, - 0.9951, 0.7922, 0.8470, 0.5195, 0.9072, 0.0501, 0.5628, - 0.6200, 0.3160, 0.6988, 0.7319, 0.9009, 0.3185, 0.5934, - 0.7917, 0.9332, 0.5038, 0.7465, 0.1646, 0.8555, 0.0988, - 0.4002, 0.8098, 0.8642, 0.7419, 0.3377, 0.6378, 0.4276, - 0.2050, 0.6970, 0.0429, 0.1896, 0.1443]), + col_indices=tensor([2224, 901, 560, 4487, 2401, 4268, 1055, 3174, 2514, + 4628, 417, 1293, 283, 2617, 1273, 1300, 954, 2243, + 665, 4978, 900, 1399, 4956, 3320, 254, 3754, 3259, + 1245, 2943, 3607, 1955, 1136, 2644, 2065, 2057, 3643, + 737, 2059, 2, 2824, 2643, 4084, 2539, 899, 76, + 1778, 2706, 103, 4034, 2685, 2496, 3279, 2172, 2988, + 4687, 1400, 1617, 2291, 2671, 4892, 1535, 1569, 1946, + 1331, 4529, 426, 3100, 3373, 2068, 2529, 2281, 4153, + 4330, 2816, 2719, 517, 1330, 3451, 1337, 1345, 4571, + 969, 2427, 2353, 4835, 3581, 1541, 1980, 4045, 4808, + 744, 1382, 2231, 2582, 10, 4459, 1042, 1662, 1663, + 408, 1826, 918, 1933, 1436, 4819, 4205, 4263, 2250, + 781, 3250, 1582, 4791, 1519, 405, 3575, 4281, 3480, + 52, 3408, 1781, 2609, 1686, 3110, 1152, 882, 2887, + 2046, 706, 1393, 3262, 2943, 715, 560, 446, 368, + 2993, 262, 2130, 2870, 2462, 2556, 4751, 4706, 517, + 4798, 4841, 232, 4242, 4325, 1651, 4102, 2649, 1082, + 4845, 1319, 3814, 4728, 4864, 4319, 266, 2699, 300, + 4406, 4721, 1321, 691, 1022, 4639, 3522, 2092, 867, + 1289, 2134, 1110, 4147, 3643, 2026, 654, 3282, 4848, + 4856, 4042, 2653, 2361, 3169, 591, 658, 3314, 2550, + 2003, 721, 3780, 1784, 3281, 3487, 1239, 589, 4011, + 3794, 2166, 2518, 2088, 2171, 280, 342, 338, 1293, + 3167, 4666, 2901, 4022, 4194, 7, 4820, 2924, 1651, + 1063, 1631, 435, 2097, 3342, 1741, 4703, 565, 4713, + 1667, 1287, 4676, 3789, 515, 4561, 1404, 2038, 1735, + 574, 1523, 4313, 3858, 4692, 4352, 739, 3981, 1329, + 3785, 950, 233, 1431, 286, 4479, 642]), + values=tensor([0.6941, 0.3677, 0.5240, 0.4610, 0.0202, 0.6382, 0.9529, + 0.4945, 0.0160, 0.8633, 0.1117, 0.3876, 0.9143, 0.4952, + 0.2721, 0.5970, 0.1783, 0.4260, 0.1500, 0.9569, 0.2314, + 0.3098, 0.0146, 0.5883, 0.1683, 0.7106, 0.7539, 0.9040, + 0.0577, 0.1833, 0.1629, 0.4590, 0.6075, 0.6114, 0.7252, + 0.8482, 0.7719, 0.1553, 0.9991, 0.8762, 0.2404, 0.5361, + 0.4548, 0.4277, 0.4169, 0.3537, 0.4541, 0.1809, 0.7326, + 0.4183, 0.2039, 0.7961, 0.1242, 0.3250, 0.7532, 0.6406, + 0.3883, 0.9653, 0.1961, 0.9691, 0.9711, 0.9660, 0.0846, + 0.1272, 0.0027, 0.6300, 0.3468, 0.5874, 0.1433, 0.9471, + 0.5533, 0.9603, 0.5477, 0.7905, 0.4073, 0.1762, 0.6108, + 0.6014, 0.4826, 0.8434, 0.1882, 0.1969, 0.2073, 0.6108, + 0.8953, 0.4950, 0.0907, 0.5290, 0.4499, 0.4099, 0.2818, + 0.6970, 0.1777, 0.5340, 0.7938, 0.6380, 0.9189, 0.3527, + 0.0457, 0.4255, 0.0173, 0.7114, 0.0874, 0.4291, 0.2530, + 0.1327, 0.8612, 0.1410, 0.0463, 0.6314, 0.9230, 0.4217, + 0.8635, 0.9699, 0.9087, 0.0365, 0.9133, 0.2560, 0.6137, + 0.6826, 0.7329, 0.2289, 0.4035, 0.9633, 0.8263, 0.1760, + 0.4585, 0.6410, 0.1561, 0.0363, 0.1268, 0.5676, 0.7234, + 0.0420, 0.7554, 0.1950, 0.0735, 0.9761, 0.6034, 0.1078, + 0.4689, 0.0620, 0.5899, 0.6638, 0.8250, 0.9025, 0.9772, + 0.5160, 0.0814, 0.2275, 0.7463, 0.9167, 0.0340, 0.6620, + 0.6094, 0.3934, 0.3484, 0.5317, 0.0033, 0.8545, 0.5061, + 0.6083, 0.6540, 0.5515, 0.2964, 0.1120, 0.6021, 0.6347, + 0.3907, 0.2739, 0.3659, 0.3709, 0.6294, 0.4562, 0.7659, + 0.5239, 0.5849, 0.2870, 0.6230, 0.1707, 0.0297, 0.9162, + 0.3474, 0.4202, 0.5536, 0.7120, 0.0212, 0.2778, 0.0910, + 0.7705, 0.5502, 0.7796, 0.5337, 0.3633, 0.7268, 0.9379, + 0.1172, 0.6666, 0.1324, 0.4072, 0.3612, 0.1135, 0.0575, + 0.1471, 0.4608, 0.5917, 0.8388, 0.4952, 0.0199, 0.6047, + 0.9696, 0.1826, 0.1339, 0.7125, 0.7144, 0.2370, 0.3706, + 0.8566, 0.3904, 0.0904, 0.1191, 0.1595, 0.7751, 0.7993, + 0.7943, 0.0017, 0.3810, 0.0843, 0.0930, 0.2029, 0.8088, + 0.0161, 0.4011, 0.1640, 0.3499, 0.7508, 0.6681, 0.8892, + 0.2737, 0.7342, 0.9081, 0.5222, 0.0526, 0.8172, 0.6113, + 0.5203, 0.1084, 0.8601, 0.6553, 0.9590]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.0748, 0.8455, 0.5581, ..., 0.9449, 0.9600, 0.8816]) +tensor([0.9332, 0.9689, 0.0023, ..., 0.6824, 0.5938, 0.3117]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +158,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 1.5646839141845703 seconds +Time: 1.6243436336517334 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '505155', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3298921585083} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '496427', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.657760858535767} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1247, 4227, 2384, 1324, 2688, 777, 3611, 2096, 3777, - 4017, 3060, 3853, 275, 2455, 1734, 3565, 37, 2244, - 1733, 4350, 1234, 2896, 3213, 1561, 1980, 4472, 4663, - 506, 4951, 3982, 1179, 751, 4549, 4447, 2215, 404, - 243, 3834, 4527, 3698, 4091, 3198, 2812, 3614, 4523, - 1643, 1713, 1084, 2707, 4, 4057, 1938, 3151, 2591, - 736, 3345, 437, 2659, 175, 3499, 4582, 1472, 654, - 3080, 4209, 728, 2500, 4977, 4359, 4315, 4550, 1180, - 65, 1612, 4551, 4050, 2941, 4296, 4434, 1223, 1238, - 1086, 3880, 4530, 1316, 2102, 4761, 3908, 1050, 3705, - 3194, 4362, 3882, 122, 751, 3713, 3799, 1795, 2520, - 1510, 2238, 4244, 1885, 2083, 3076, 2704, 4833, 3679, - 4672, 3718, 2996, 4020, 4241, 4395, 853, 4475, 4071, - 1868, 2791, 21, 4190, 1530, 3067, 1932, 3665, 3853, - 1426, 1044, 3601, 1259, 2412, 133, 3883, 2099, 3331, - 2256, 2974, 3456, 1750, 1889, 4841, 2200, 4977, 1425, - 1536, 158, 2655, 3709, 3160, 4144, 3380, 1076, 1185, - 3818, 1983, 1852, 4777, 1300, 431, 3274, 3575, 4837, - 325, 4137, 4592, 671, 998, 3275, 4613, 992, 3846, - 2916, 2447, 2605, 2672, 2276, 4850, 4965, 1297, 694, - 259, 1199, 4640, 4880, 3240, 1438, 11, 2797, 2545, - 3917, 4254, 214, 1182, 3713, 4287, 4785, 2360, 1203, - 4398, 3290, 2436, 3411, 3739, 928, 1526, 663, 2059, - 3931, 404, 1876, 3217, 4800, 4377, 4206, 877, 1100, - 1979, 2229, 3016, 180, 945, 4382, 2217, 3064, 609, - 1772, 4045, 3022, 1213, 4864, 3162, 1697, 2645, 550, - 2485, 4784, 4550, 726, 4920, 1331, 357, 3709, 4153, - 3528, 2484, 3864, 1866, 4322, 727, 4703]), - values=tensor([0.4663, 0.4340, 0.1210, 0.0765, 0.6336, 0.1902, 0.3091, - 0.4203, 0.8220, 0.0850, 0.1025, 0.1361, 0.5908, 0.4685, - 0.3710, 0.6463, 0.1664, 0.6973, 0.9489, 0.6122, 0.1354, - 0.4027, 0.3554, 0.9045, 0.3464, 0.4691, 0.3320, 0.2792, - 0.0515, 0.5607, 0.4585, 0.0483, 0.1589, 0.9453, 0.6795, - 0.0575, 0.3527, 0.3951, 0.3470, 0.3333, 0.5355, 0.7562, - 0.1077, 0.3035, 0.4048, 0.6937, 0.2967, 0.6561, 0.1737, - 0.9035, 0.4547, 0.3603, 0.1408, 0.4830, 0.8065, 0.3582, - 0.5455, 0.5723, 0.4523, 0.5057, 0.6324, 0.9028, 0.0577, - 0.4956, 0.6657, 0.9848, 0.6391, 0.6395, 0.4769, 0.0056, - 0.1044, 0.2976, 0.6221, 0.9725, 0.0205, 0.3420, 0.9773, - 0.7515, 0.8006, 0.2336, 0.4394, 0.9756, 0.3057, 0.6186, - 0.7004, 0.6806, 0.0982, 0.4285, 0.8466, 0.0638, 0.7804, - 0.2686, 0.1423, 0.1658, 0.0440, 0.8558, 0.2543, 0.9629, - 0.0443, 0.2812, 0.7112, 0.2679, 0.0325, 0.1914, 0.4697, - 0.5212, 0.5036, 0.8278, 0.2212, 0.7637, 0.6411, 0.8324, - 0.1085, 0.7903, 0.3041, 0.7685, 0.7610, 0.1247, 0.0153, - 0.0898, 0.0347, 0.7987, 0.9119, 0.5255, 0.4670, 0.2413, - 0.8008, 0.0124, 0.0719, 0.3268, 0.5991, 0.1237, 0.5734, - 0.5860, 0.8964, 0.6851, 0.7980, 0.9940, 0.8001, 0.3866, - 0.4921, 0.3129, 0.6180, 0.1949, 0.6009, 0.4868, 0.5484, - 0.3765, 0.9472, 0.7499, 0.8386, 0.8281, 0.0445, 0.8521, - 0.1827, 0.1424, 0.9964, 0.2930, 0.6405, 0.8880, 0.8455, - 0.8730, 0.0256, 0.3303, 0.4997, 0.7068, 0.9986, 0.4515, - 0.8492, 0.9495, 0.7252, 0.1393, 0.3125, 0.0955, 0.0411, - 0.0855, 0.4594, 0.0571, 0.4760, 0.9753, 0.9680, 0.6765, - 0.6632, 0.7732, 0.2205, 0.7219, 0.1231, 0.5507, 0.9386, - 0.9853, 0.1484, 0.5471, 0.2415, 0.3412, 0.3970, 0.9721, - 0.4075, 0.7397, 0.6041, 0.4919, 0.1150, 0.1028, 0.3707, - 0.5907, 0.4305, 0.9162, 0.9956, 0.3282, 0.6112, 0.6540, - 0.0961, 0.8665, 0.2552, 0.6175, 0.4850, 0.4310, 0.1165, - 0.3274, 0.7923, 0.1515, 0.5293, 0.8418, 0.1450, 0.8268, - 0.9665, 0.7626, 0.7605, 0.9986, 0.9489, 0.8011, 0.9290, - 0.5451, 0.8590, 0.5389, 0.0080, 0.8363, 0.8570, 0.5734, - 0.7613, 0.9018, 0.0697, 0.9293, 0.2543, 0.2531, 0.2854, - 0.3722, 0.6889, 0.4487, 0.3475, 0.2897]), + col_indices=tensor([1456, 4108, 1825, 4211, 4006, 2009, 4794, 4629, 3634, + 4848, 1328, 2172, 3777, 2426, 4781, 1153, 688, 3242, + 2166, 3793, 832, 1187, 491, 2413, 2550, 3996, 4467, + 4197, 4228, 4763, 2049, 3721, 3333, 1832, 2295, 1871, + 2507, 1917, 2328, 386, 2663, 3427, 92, 2215, 884, + 831, 4189, 3965, 2582, 4594, 2311, 1985, 3332, 3783, + 1166, 2640, 712, 1543, 3187, 4411, 2385, 76, 4213, + 3202, 109, 3740, 4838, 4865, 1693, 4224, 4919, 3814, + 2015, 3428, 1352, 4341, 1472, 2797, 2890, 4076, 4688, + 3016, 2258, 2816, 3403, 2779, 3008, 2741, 3872, 4297, + 2744, 3061, 1128, 1053, 2061, 4640, 4375, 720, 711, + 4960, 2985, 1442, 2777, 3995, 402, 2650, 2504, 3872, + 3802, 4082, 1303, 3783, 3433, 706, 1390, 3838, 2208, + 4107, 3973, 1734, 1507, 272, 1533, 615, 4787, 3665, + 2779, 1666, 3893, 329, 484, 3682, 3581, 3861, 1712, + 3602, 3370, 1566, 3351, 4695, 1100, 3450, 1169, 219, + 1866, 2008, 732, 2608, 2038, 1688, 4812, 3051, 3823, + 1823, 939, 3738, 2330, 4889, 4110, 4784, 1533, 4611, + 2059, 1617, 2978, 3099, 1232, 2100, 335, 4107, 2680, + 3128, 3276, 4068, 418, 1349, 3022, 3207, 239, 1757, + 4965, 2122, 1181, 2639, 3705, 1459, 440, 3554, 3043, + 4807, 3603, 4909, 3556, 523, 4148, 3900, 4779, 4147, + 3592, 3815, 3834, 346, 1568, 2888, 1453, 4360, 2938, + 2512, 2526, 770, 1001, 1840, 3310, 2261, 2368, 871, + 4581, 2300, 1652, 99, 3175, 3134, 1698, 1430, 2502, + 796, 3677, 1704, 1667, 1603, 4332, 674, 1627, 3977, + 2340, 2522, 3477, 650, 1785, 2894, 3925, 153, 849, + 3219, 884, 2150, 4713, 4341, 386, 2129]), + values=tensor([0.7648, 0.0388, 0.1971, 0.1656, 0.3027, 0.2125, 0.3792, + 0.4125, 0.6482, 0.6670, 0.7095, 0.4821, 0.4863, 0.5822, + 0.6538, 0.0534, 0.6044, 0.3759, 0.6694, 0.9058, 0.8034, + 0.2582, 0.5391, 0.3103, 0.1562, 0.1893, 0.1390, 0.4308, + 0.2867, 0.5767, 0.7856, 0.3429, 0.4270, 0.8922, 0.9477, + 0.6459, 0.8706, 0.3536, 0.3872, 0.6240, 0.3744, 0.1308, + 0.7032, 0.6133, 0.4417, 0.0247, 0.6177, 0.7538, 0.9642, + 0.7692, 0.3201, 0.7630, 0.1763, 0.9162, 0.2149, 0.5298, + 0.9979, 0.1331, 0.0152, 0.8651, 0.1298, 0.9376, 0.6580, + 0.3607, 0.7740, 0.8075, 0.7657, 0.5525, 0.6175, 0.2880, + 0.6706, 0.0327, 0.9945, 0.6498, 0.7256, 0.5117, 0.5980, + 0.3927, 0.7003, 0.3495, 0.5625, 0.7323, 0.1430, 0.8774, + 0.3007, 0.5835, 0.6654, 0.7253, 0.3632, 0.3645, 0.8047, + 0.4157, 0.7866, 0.4430, 0.2446, 0.2544, 0.2621, 0.6264, + 0.4385, 0.3533, 0.5965, 0.7609, 0.3438, 0.5220, 0.2899, + 0.2775, 0.2450, 0.8517, 0.5321, 0.0807, 0.2339, 0.9026, + 0.0430, 0.7174, 0.3348, 0.8876, 0.6517, 0.5060, 0.4026, + 0.0860, 0.4949, 0.8231, 0.6247, 0.1961, 0.6284, 0.9780, + 0.3565, 0.3141, 0.1558, 0.1707, 0.9956, 0.8038, 0.2783, + 0.2084, 0.5138, 0.6444, 0.7914, 0.3804, 0.7614, 0.5121, + 0.6257, 0.6564, 0.2095, 0.6303, 0.2489, 0.7484, 0.7121, + 0.4636, 0.5455, 0.2133, 0.5314, 0.7354, 0.9506, 0.1953, + 0.6846, 0.0267, 0.7328, 0.3577, 0.1689, 0.9587, 0.6119, + 0.8490, 0.2600, 0.6883, 0.5099, 0.9089, 0.4716, 0.1370, + 0.1882, 0.4826, 0.1770, 0.4748, 0.7396, 0.2977, 0.4567, + 0.1595, 0.0502, 0.1923, 0.3134, 0.3518, 0.5690, 0.7860, + 0.7362, 0.5825, 0.7561, 0.9342, 0.9995, 0.7983, 0.5089, + 0.1453, 0.3062, 0.0429, 0.2336, 0.8068, 0.1462, 0.1833, + 0.1836, 0.6762, 0.4474, 0.9413, 0.8858, 0.7649, 0.1029, + 0.0340, 0.8541, 0.2876, 0.7440, 0.9437, 0.4278, 0.2873, + 0.6276, 0.1069, 0.0430, 0.7398, 0.2766, 0.0315, 0.1646, + 0.8701, 0.9575, 0.6657, 0.2313, 0.8344, 0.9596, 0.4326, + 0.3935, 0.2810, 0.4460, 0.1700, 0.0739, 0.9546, 0.3029, + 0.9901, 0.3485, 0.9910, 0.9908, 0.3133, 0.2767, 0.6857, + 0.6285, 0.8413, 0.3472, 0.1282, 0.1747, 0.8309, 0.1162, + 0.5105, 0.4325, 0.0653, 0.1299, 0.1987]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.8985, 0.0813, 0.9894, ..., 0.1805, 0.5543, 0.3501]) +tensor([0.7200, 0.1618, 0.9398, ..., 0.9808, 0.9900, 0.2750]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +239,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.3298921585083 seconds +Time: 10.657760858535767 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1247, 4227, 2384, 1324, 2688, 777, 3611, 2096, 3777, - 4017, 3060, 3853, 275, 2455, 1734, 3565, 37, 2244, - 1733, 4350, 1234, 2896, 3213, 1561, 1980, 4472, 4663, - 506, 4951, 3982, 1179, 751, 4549, 4447, 2215, 404, - 243, 3834, 4527, 3698, 4091, 3198, 2812, 3614, 4523, - 1643, 1713, 1084, 2707, 4, 4057, 1938, 3151, 2591, - 736, 3345, 437, 2659, 175, 3499, 4582, 1472, 654, - 3080, 4209, 728, 2500, 4977, 4359, 4315, 4550, 1180, - 65, 1612, 4551, 4050, 2941, 4296, 4434, 1223, 1238, - 1086, 3880, 4530, 1316, 2102, 4761, 3908, 1050, 3705, - 3194, 4362, 3882, 122, 751, 3713, 3799, 1795, 2520, - 1510, 2238, 4244, 1885, 2083, 3076, 2704, 4833, 3679, - 4672, 3718, 2996, 4020, 4241, 4395, 853, 4475, 4071, - 1868, 2791, 21, 4190, 1530, 3067, 1932, 3665, 3853, - 1426, 1044, 3601, 1259, 2412, 133, 3883, 2099, 3331, - 2256, 2974, 3456, 1750, 1889, 4841, 2200, 4977, 1425, - 1536, 158, 2655, 3709, 3160, 4144, 3380, 1076, 1185, - 3818, 1983, 1852, 4777, 1300, 431, 3274, 3575, 4837, - 325, 4137, 4592, 671, 998, 3275, 4613, 992, 3846, - 2916, 2447, 2605, 2672, 2276, 4850, 4965, 1297, 694, - 259, 1199, 4640, 4880, 3240, 1438, 11, 2797, 2545, - 3917, 4254, 214, 1182, 3713, 4287, 4785, 2360, 1203, - 4398, 3290, 2436, 3411, 3739, 928, 1526, 663, 2059, - 3931, 404, 1876, 3217, 4800, 4377, 4206, 877, 1100, - 1979, 2229, 3016, 180, 945, 4382, 2217, 3064, 609, - 1772, 4045, 3022, 1213, 4864, 3162, 1697, 2645, 550, - 2485, 4784, 4550, 726, 4920, 1331, 357, 3709, 4153, - 3528, 2484, 3864, 1866, 4322, 727, 4703]), - values=tensor([0.4663, 0.4340, 0.1210, 0.0765, 0.6336, 0.1902, 0.3091, - 0.4203, 0.8220, 0.0850, 0.1025, 0.1361, 0.5908, 0.4685, - 0.3710, 0.6463, 0.1664, 0.6973, 0.9489, 0.6122, 0.1354, - 0.4027, 0.3554, 0.9045, 0.3464, 0.4691, 0.3320, 0.2792, - 0.0515, 0.5607, 0.4585, 0.0483, 0.1589, 0.9453, 0.6795, - 0.0575, 0.3527, 0.3951, 0.3470, 0.3333, 0.5355, 0.7562, - 0.1077, 0.3035, 0.4048, 0.6937, 0.2967, 0.6561, 0.1737, - 0.9035, 0.4547, 0.3603, 0.1408, 0.4830, 0.8065, 0.3582, - 0.5455, 0.5723, 0.4523, 0.5057, 0.6324, 0.9028, 0.0577, - 0.4956, 0.6657, 0.9848, 0.6391, 0.6395, 0.4769, 0.0056, - 0.1044, 0.2976, 0.6221, 0.9725, 0.0205, 0.3420, 0.9773, - 0.7515, 0.8006, 0.2336, 0.4394, 0.9756, 0.3057, 0.6186, - 0.7004, 0.6806, 0.0982, 0.4285, 0.8466, 0.0638, 0.7804, - 0.2686, 0.1423, 0.1658, 0.0440, 0.8558, 0.2543, 0.9629, - 0.0443, 0.2812, 0.7112, 0.2679, 0.0325, 0.1914, 0.4697, - 0.5212, 0.5036, 0.8278, 0.2212, 0.7637, 0.6411, 0.8324, - 0.1085, 0.7903, 0.3041, 0.7685, 0.7610, 0.1247, 0.0153, - 0.0898, 0.0347, 0.7987, 0.9119, 0.5255, 0.4670, 0.2413, - 0.8008, 0.0124, 0.0719, 0.3268, 0.5991, 0.1237, 0.5734, - 0.5860, 0.8964, 0.6851, 0.7980, 0.9940, 0.8001, 0.3866, - 0.4921, 0.3129, 0.6180, 0.1949, 0.6009, 0.4868, 0.5484, - 0.3765, 0.9472, 0.7499, 0.8386, 0.8281, 0.0445, 0.8521, - 0.1827, 0.1424, 0.9964, 0.2930, 0.6405, 0.8880, 0.8455, - 0.8730, 0.0256, 0.3303, 0.4997, 0.7068, 0.9986, 0.4515, - 0.8492, 0.9495, 0.7252, 0.1393, 0.3125, 0.0955, 0.0411, - 0.0855, 0.4594, 0.0571, 0.4760, 0.9753, 0.9680, 0.6765, - 0.6632, 0.7732, 0.2205, 0.7219, 0.1231, 0.5507, 0.9386, - 0.9853, 0.1484, 0.5471, 0.2415, 0.3412, 0.3970, 0.9721, - 0.4075, 0.7397, 0.6041, 0.4919, 0.1150, 0.1028, 0.3707, - 0.5907, 0.4305, 0.9162, 0.9956, 0.3282, 0.6112, 0.6540, - 0.0961, 0.8665, 0.2552, 0.6175, 0.4850, 0.4310, 0.1165, - 0.3274, 0.7923, 0.1515, 0.5293, 0.8418, 0.1450, 0.8268, - 0.9665, 0.7626, 0.7605, 0.9986, 0.9489, 0.8011, 0.9290, - 0.5451, 0.8590, 0.5389, 0.0080, 0.8363, 0.8570, 0.5734, - 0.7613, 0.9018, 0.0697, 0.9293, 0.2543, 0.2531, 0.2854, - 0.3722, 0.6889, 0.4487, 0.3475, 0.2897]), + col_indices=tensor([1456, 4108, 1825, 4211, 4006, 2009, 4794, 4629, 3634, + 4848, 1328, 2172, 3777, 2426, 4781, 1153, 688, 3242, + 2166, 3793, 832, 1187, 491, 2413, 2550, 3996, 4467, + 4197, 4228, 4763, 2049, 3721, 3333, 1832, 2295, 1871, + 2507, 1917, 2328, 386, 2663, 3427, 92, 2215, 884, + 831, 4189, 3965, 2582, 4594, 2311, 1985, 3332, 3783, + 1166, 2640, 712, 1543, 3187, 4411, 2385, 76, 4213, + 3202, 109, 3740, 4838, 4865, 1693, 4224, 4919, 3814, + 2015, 3428, 1352, 4341, 1472, 2797, 2890, 4076, 4688, + 3016, 2258, 2816, 3403, 2779, 3008, 2741, 3872, 4297, + 2744, 3061, 1128, 1053, 2061, 4640, 4375, 720, 711, + 4960, 2985, 1442, 2777, 3995, 402, 2650, 2504, 3872, + 3802, 4082, 1303, 3783, 3433, 706, 1390, 3838, 2208, + 4107, 3973, 1734, 1507, 272, 1533, 615, 4787, 3665, + 2779, 1666, 3893, 329, 484, 3682, 3581, 3861, 1712, + 3602, 3370, 1566, 3351, 4695, 1100, 3450, 1169, 219, + 1866, 2008, 732, 2608, 2038, 1688, 4812, 3051, 3823, + 1823, 939, 3738, 2330, 4889, 4110, 4784, 1533, 4611, + 2059, 1617, 2978, 3099, 1232, 2100, 335, 4107, 2680, + 3128, 3276, 4068, 418, 1349, 3022, 3207, 239, 1757, + 4965, 2122, 1181, 2639, 3705, 1459, 440, 3554, 3043, + 4807, 3603, 4909, 3556, 523, 4148, 3900, 4779, 4147, + 3592, 3815, 3834, 346, 1568, 2888, 1453, 4360, 2938, + 2512, 2526, 770, 1001, 1840, 3310, 2261, 2368, 871, + 4581, 2300, 1652, 99, 3175, 3134, 1698, 1430, 2502, + 796, 3677, 1704, 1667, 1603, 4332, 674, 1627, 3977, + 2340, 2522, 3477, 650, 1785, 2894, 3925, 153, 849, + 3219, 884, 2150, 4713, 4341, 386, 2129]), + values=tensor([0.7648, 0.0388, 0.1971, 0.1656, 0.3027, 0.2125, 0.3792, + 0.4125, 0.6482, 0.6670, 0.7095, 0.4821, 0.4863, 0.5822, + 0.6538, 0.0534, 0.6044, 0.3759, 0.6694, 0.9058, 0.8034, + 0.2582, 0.5391, 0.3103, 0.1562, 0.1893, 0.1390, 0.4308, + 0.2867, 0.5767, 0.7856, 0.3429, 0.4270, 0.8922, 0.9477, + 0.6459, 0.8706, 0.3536, 0.3872, 0.6240, 0.3744, 0.1308, + 0.7032, 0.6133, 0.4417, 0.0247, 0.6177, 0.7538, 0.9642, + 0.7692, 0.3201, 0.7630, 0.1763, 0.9162, 0.2149, 0.5298, + 0.9979, 0.1331, 0.0152, 0.8651, 0.1298, 0.9376, 0.6580, + 0.3607, 0.7740, 0.8075, 0.7657, 0.5525, 0.6175, 0.2880, + 0.6706, 0.0327, 0.9945, 0.6498, 0.7256, 0.5117, 0.5980, + 0.3927, 0.7003, 0.3495, 0.5625, 0.7323, 0.1430, 0.8774, + 0.3007, 0.5835, 0.6654, 0.7253, 0.3632, 0.3645, 0.8047, + 0.4157, 0.7866, 0.4430, 0.2446, 0.2544, 0.2621, 0.6264, + 0.4385, 0.3533, 0.5965, 0.7609, 0.3438, 0.5220, 0.2899, + 0.2775, 0.2450, 0.8517, 0.5321, 0.0807, 0.2339, 0.9026, + 0.0430, 0.7174, 0.3348, 0.8876, 0.6517, 0.5060, 0.4026, + 0.0860, 0.4949, 0.8231, 0.6247, 0.1961, 0.6284, 0.9780, + 0.3565, 0.3141, 0.1558, 0.1707, 0.9956, 0.8038, 0.2783, + 0.2084, 0.5138, 0.6444, 0.7914, 0.3804, 0.7614, 0.5121, + 0.6257, 0.6564, 0.2095, 0.6303, 0.2489, 0.7484, 0.7121, + 0.4636, 0.5455, 0.2133, 0.5314, 0.7354, 0.9506, 0.1953, + 0.6846, 0.0267, 0.7328, 0.3577, 0.1689, 0.9587, 0.6119, + 0.8490, 0.2600, 0.6883, 0.5099, 0.9089, 0.4716, 0.1370, + 0.1882, 0.4826, 0.1770, 0.4748, 0.7396, 0.2977, 0.4567, + 0.1595, 0.0502, 0.1923, 0.3134, 0.3518, 0.5690, 0.7860, + 0.7362, 0.5825, 0.7561, 0.9342, 0.9995, 0.7983, 0.5089, + 0.1453, 0.3062, 0.0429, 0.2336, 0.8068, 0.1462, 0.1833, + 0.1836, 0.6762, 0.4474, 0.9413, 0.8858, 0.7649, 0.1029, + 0.0340, 0.8541, 0.2876, 0.7440, 0.9437, 0.4278, 0.2873, + 0.6276, 0.1069, 0.0430, 0.7398, 0.2766, 0.0315, 0.1646, + 0.8701, 0.9575, 0.6657, 0.2313, 0.8344, 0.9596, 0.4326, + 0.3935, 0.2810, 0.4460, 0.1700, 0.0739, 0.9546, 0.3029, + 0.9901, 0.3485, 0.9910, 0.9908, 0.3133, 0.2767, 0.6857, + 0.6285, 0.8413, 0.3472, 0.1282, 0.1747, 0.8309, 0.1162, + 0.5105, 0.4325, 0.0653, 0.1299, 0.1987]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.8985, 0.0813, 0.9894, ..., 0.1805, 0.5543, 0.3501]) +tensor([0.7200, 0.1618, 0.9398, ..., 0.9808, 0.9900, 0.2750]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +317,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.3298921585083 seconds +Time: 10.657760858535767 seconds -[40.32, 39.94, 39.79, 39.67, 40.95, 39.71, 39.34, 39.43, 39.56, 39.61] -[95.58] -12.982455968856812 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 505155, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3298921585083, 'TIME_S_1KI': 0.020448955584935914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.863141503334, 'W': 95.58} -[40.32, 39.94, 39.79, 39.67, 40.95, 39.71, 39.34, 39.43, 39.56, 39.61, 39.84, 39.42, 39.39, 39.45, 39.34, 39.21, 39.4, 39.18, 39.13, 39.91] -712.75 -35.6375 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 505155, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3298921585083, 'TIME_S_1KI': 0.020448955584935914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.863141503334, 'W': 95.58, 'J_1KI': 2.4564007908529737, 'W_1KI': 0.1892092526056359, 'W_D': 59.942499999999995, 'J_D': 778.2008669131993, 'W_D_1KI': 0.11866159891518444, 'J_D_1KI': 0.0002349013647596964} +[42.32, 39.93, 39.62, 40.13, 39.61, 40.04, 40.59, 39.88, 40.09, 39.51] +[96.43] +12.952775955200195 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 496427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.657760858535767, 'TIME_S_1KI': 0.021468938753403354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.036185359955, 'W': 96.43} +[42.32, 39.93, 39.62, 40.13, 39.61, 40.04, 40.59, 39.88, 40.09, 39.51, 41.79, 39.51, 39.66, 39.66, 39.52, 39.55, 39.42, 40.24, 39.94, 39.74] +719.07 +35.953500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 496427, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.657760858535767, 'TIME_S_1KI': 0.021468938753403354, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.036185359955, 'W': 96.43, 'J_1KI': 2.5160520788755543, 'W_1KI': 0.19424809690045064, 'W_D': 60.4765, 'J_D': 783.3385550546647, 'W_D_1KI': 0.12182355109613296, 'J_D_1KI': 0.0002454007358506547} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json index 1cf76cc..a1e614e 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 461197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.877047538757324, "TIME_S_1KI": 0.023584384848030937, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1221.180625166893, "W": 95.74, "J_1KI": 2.6478503224585004, "W_1KI": 0.20759024885244265, "W_D": 60.15774999999999, "J_D": 767.3227360939383, "W_D_1KI": 0.13043829426470682, "J_D_1KI": 0.0002828255480081328} +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 479482, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.686605215072632, "TIME_S_1KI": 0.02228781312973716, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1256.431904683113, "W": 97.02, "J_1KI": 2.6203943102829994, "W_1KI": 0.20234336221172014, "W_D": 61.007, "J_D": 790.0550526592731, "W_D_1KI": 0.12723522467996715, "J_D_1KI": 0.0002653597521491258} diff --git a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output index 09b3a13..74cfd6b 100644 --- a/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0320582389831543} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018622159957885742} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1249, 1250]), - col_indices=tensor([2956, 558, 3504, ..., 1528, 4784, 1878]), - values=tensor([0.5224, 0.1438, 0.5941, ..., 0.0368, 0.6760, 0.3012]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([4747, 1400, 364, ..., 4145, 1033, 2512]), + values=tensor([0.2725, 0.0769, 0.1246, ..., 0.7043, 0.6053, 0.3533]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.8976, 0.5094, 0.6995, ..., 0.0327, 0.1649, 0.7937]) +tensor([0.7933, 0.6184, 0.7441, ..., 0.6976, 0.5256, 0.3905]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.0320582389831543 seconds +Time: 0.018622159957885742 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '32752', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.745659589767456} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '56384', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.3166499137878418} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([1743, 4461, 346, ..., 1137, 3893, 4349]), - values=tensor([0.7861, 0.9854, 0.5411, ..., 0.5282, 0.2898, 0.9587]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([4757, 3329, 1263, ..., 1469, 3329, 2701]), + values=tensor([0.1246, 0.9627, 0.7339, ..., 0.9069, 0.1668, 0.3333]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.5708, 0.3567, 0.0850, ..., 0.6472, 0.1624, 0.7150]) +tensor([0.2123, 0.9228, 0.8495, ..., 0.6306, 0.5525, 0.0271]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.745659589767456 seconds +Time: 1.3166499137878418 seconds -['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '461197', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.877047538757324} +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '449650', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.846704244613647} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([4244, 4483, 4692, ..., 3607, 4429, 290]), - values=tensor([0.6080, 0.0136, 0.3918, ..., 0.5066, 0.3391, 0.6977]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1248, 1249, 1250]), + col_indices=tensor([3866, 4415, 541, ..., 3166, 1514, 574]), + values=tensor([0.2175, 0.0585, 0.9595, ..., 0.1738, 0.3089, 0.4824]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.1101, 0.0872, 0.5048, ..., 0.5059, 0.1642, 0.4124]) +tensor([0.0321, 0.8662, 0.1160, ..., 0.1960, 0.2258, 0.6699]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.877047538757324 seconds +Time: 9.846704244613647 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '479482', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.686605215072632} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), - col_indices=tensor([4244, 4483, 4692, ..., 3607, 4429, 290]), - values=tensor([0.6080, 0.0136, 0.3918, ..., 0.5066, 0.3391, 0.6977]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([4848, 292, 4243, ..., 2482, 3447, 2721]), + values=tensor([0.8445, 0.4097, 0.2554, ..., 0.4119, 0.7548, 0.6186]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.1101, 0.0872, 0.5048, ..., 0.5059, 0.1642, 0.4124]) +tensor([0.9146, 0.3592, 0.2749, ..., 0.2096, 0.7041, 0.4735]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +72,29 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.877047538757324 seconds +Time: 10.686605215072632 seconds -[42.19, 40.35, 39.27, 40.36, 39.17, 39.06, 39.17, 39.21, 39.04, 39.45] -[95.74] -12.755176782608032 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 461197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.877047538757324, 'TIME_S_1KI': 0.023584384848030937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.180625166893, 'W': 95.74} -[42.19, 40.35, 39.27, 40.36, 39.17, 39.06, 39.17, 39.21, 39.04, 39.45, 39.84, 39.7, 39.64, 39.58, 39.75, 39.24, 39.17, 39.36, 39.12, 39.43] -711.645 -35.58225 -{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 461197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.877047538757324, 'TIME_S_1KI': 0.023584384848030937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.180625166893, 'W': 95.74, 'J_1KI': 2.6478503224585004, 'W_1KI': 0.20759024885244265, 'W_D': 60.15774999999999, 'J_D': 767.3227360939383, 'W_D_1KI': 0.13043829426470682, 'J_D_1KI': 0.0002828255480081328} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([4848, 292, 4243, ..., 2482, 3447, 2721]), + values=tensor([0.8445, 0.4097, 0.2554, ..., 0.4119, 0.7548, 0.6186]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9146, 0.3592, 0.2749, ..., 0.2096, 0.7041, 0.4735]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.686605215072632 seconds + +[40.33, 39.49, 39.61, 39.68, 39.98, 40.15, 40.37, 39.78, 40.33, 39.71] +[97.02] +12.950236082077026 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 479482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.686605215072632, 'TIME_S_1KI': 0.02228781312973716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1256.431904683113, 'W': 97.02} +[40.33, 39.49, 39.61, 39.68, 39.98, 40.15, 40.37, 39.78, 40.33, 39.71, 40.16, 39.74, 40.01, 39.44, 39.5, 40.68, 39.86, 40.07, 41.79, 39.36] +720.26 +36.013 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 479482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.686605215072632, 'TIME_S_1KI': 0.02228781312973716, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1256.431904683113, 'W': 97.02, 'J_1KI': 2.6203943102829994, 'W_1KI': 0.20234336221172014, 'W_D': 61.007, 'J_D': 790.0550526592731, 'W_D_1KI': 0.12723522467996715, 'J_D_1KI': 0.0002653597521491258} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..b134077 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 277, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.484652519226074, "TIME_S_1KI": 37.85073111633962, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 815.8254318380356, "W": 58.18, "J_1KI": 2945.2181654802725, "W_1KI": 210.0361010830325, "W_D": 41.336749999999995, "J_D": 579.6420061796307, "W_D_1KI": 149.2301444043321, "J_D_1KI": 538.7369834091411} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..a88f49b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 3.777904748916626} + +tensor(indices=tensor([[70224, 77791, 54738, ..., 90159, 78472, 84710], + [89635, 18305, 28352, ..., 20029, 92022, 1919]]), + values=tensor([0.0841, 0.8913, 0.5780, ..., 0.2242, 0.4150, 0.0064]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.9684, 0.4732, 0.2685, ..., 0.9364, 0.9632, 0.8366]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 3.777904748916626 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '277', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.484652519226074} + +tensor(indices=tensor([[75718, 77882, 53333, ..., 55126, 29590, 52929], + [24993, 5986, 8335, ..., 73350, 51485, 6339]]), + values=tensor([0.8153, 0.9561, 0.4734, ..., 0.5384, 0.6480, 0.6223]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4470, 0.7756, 0.9870, ..., 0.8175, 0.5678, 0.3606]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.484652519226074 seconds + +tensor(indices=tensor([[75718, 77882, 53333, ..., 55126, 29590, 52929], + [24993, 5986, 8335, ..., 73350, 51485, 6339]]), + values=tensor([0.8153, 0.9561, 0.4734, ..., 0.5384, 0.6480, 0.6223]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4470, 0.7756, 0.9870, ..., 0.8175, 0.5678, 0.3606]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.484652519226074 seconds + +[18.87, 18.51, 18.66, 18.8, 18.81, 18.54, 18.56, 19.08, 18.93, 18.51] +[58.18] +14.022437810897827 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.484652519226074, 'TIME_S_1KI': 37.85073111633962, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 815.8254318380356, 'W': 58.18} +[18.87, 18.51, 18.66, 18.8, 18.81, 18.54, 18.56, 19.08, 18.93, 18.51, 18.87, 18.79, 18.69, 18.45, 19.31, 18.41, 18.78, 18.51, 18.68, 18.46] +336.865 +16.84325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 277, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.484652519226074, 'TIME_S_1KI': 37.85073111633962, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 815.8254318380356, 'W': 58.18, 'J_1KI': 2945.2181654802725, 'W_1KI': 210.0361010830325, 'W_D': 41.336749999999995, 'J_D': 579.6420061796307, 'W_D_1KI': 149.2301444043321, 'J_D_1KI': 538.7369834091411} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..cb88306 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 40.743112087249756, "TIME_S_1KI": 407.43112087249756, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2558.607043964863, "W": 53.89, "J_1KI": 25586.07043964863, "W_1KI": 538.9000000000001, "W_D": 36.80825, "J_D": 1747.594131119311, "W_D_1KI": 368.08250000000004, "J_D_1KI": 3680.8250000000003} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..751fa7d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 40.743112087249756} + +tensor(indices=tensor([[55139, 39270, 91918, ..., 49140, 39015, 49909], + [ 5329, 58648, 67130, ..., 91740, 79288, 38914]]), + values=tensor([0.0968, 0.3684, 0.7307, ..., 0.0250, 0.0960, 0.9822]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9208, 0.9876, 0.2395, ..., 0.1912, 0.6607, 0.3160]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 40.743112087249756 seconds + +tensor(indices=tensor([[55139, 39270, 91918, ..., 49140, 39015, 49909], + [ 5329, 58648, 67130, ..., 91740, 79288, 38914]]), + values=tensor([0.0968, 0.3684, 0.7307, ..., 0.0250, 0.0960, 0.9822]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.9208, 0.9876, 0.2395, ..., 0.1912, 0.6607, 0.3160]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 40.743112087249756 seconds + +[18.93, 18.59, 18.84, 19.25, 18.52, 18.56, 22.71, 19.79, 18.64, 18.99] +[53.89] +47.47832703590393 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 40.743112087249756, 'TIME_S_1KI': 407.43112087249756, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2558.607043964863, 'W': 53.89} +[18.93, 18.59, 18.84, 19.25, 18.52, 18.56, 22.71, 19.79, 18.64, 18.99, 19.06, 18.37, 19.01, 18.37, 18.62, 18.63, 18.41, 18.49, 18.89, 18.91] +341.635 +17.08175 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 40.743112087249756, 'TIME_S_1KI': 407.43112087249756, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2558.607043964863, 'W': 53.89, 'J_1KI': 25586.07043964863, 'W_1KI': 538.9000000000001, 'W_D': 36.80825, 'J_D': 1747.594131119311, 'W_D_1KI': 368.08250000000004, 'J_D_1KI': 3680.8250000000003} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..e03b0a9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 408.6323621273041, "TIME_S_1KI": 4086.323621273041, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 24437.81099384308, "W": 53.28, "J_1KI": 244378.10993843077, "W_1KI": 532.8000000000001, "W_D": 36.44775, "J_D": 16717.402883837163, "W_D_1KI": 364.4775, "J_D_1KI": 3644.775} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..bfef7db --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 408.6323621273041} + +tensor(indices=tensor([[32054, 2702, 33950, ..., 1264, 99287, 9659], + [48537, 24014, 15665, ..., 63340, 97079, 68679]]), + values=tensor([0.3553, 0.4131, 0.3021, ..., 0.5462, 0.8786, 0.8288]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.7950, 0.4850, 0.6219, ..., 0.8943, 0.5958, 0.2665]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 408.6323621273041 seconds + +tensor(indices=tensor([[32054, 2702, 33950, ..., 1264, 99287, 9659], + [48537, 24014, 15665, ..., 63340, 97079, 68679]]), + values=tensor([0.3553, 0.4131, 0.3021, ..., 0.5462, 0.8786, 0.8288]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_coo) +tensor([0.7950, 0.4850, 0.6219, ..., 0.8943, 0.5958, 0.2665]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 408.6323621273041 seconds + +[19.18, 18.59, 19.02, 18.5, 18.56, 18.88, 18.64, 18.58, 18.7, 18.52] +[53.28] +458.66762375831604 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 408.6323621273041, 'TIME_S_1KI': 4086.323621273041, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24437.81099384308, 'W': 53.28} +[19.18, 18.59, 19.02, 18.5, 18.56, 18.88, 18.64, 18.58, 18.7, 18.52, 18.96, 18.54, 18.84, 18.87, 18.8, 18.49, 18.58, 18.69, 18.62, 18.83] +336.645 +16.83225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 408.6323621273041, 'TIME_S_1KI': 4086.323621273041, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 24437.81099384308, 'W': 53.28, 'J_1KI': 244378.10993843077, 'W_1KI': 532.8000000000001, 'W_D': 36.44775, 'J_D': 16717.402883837163, 'W_D_1KI': 364.4775, 'J_D_1KI': 3644.775} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..8e00da4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2195, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.226952075958252, "TIME_S_1KI": 4.659203679252051, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1105.514514913559, "W": 81.42, "J_1KI": 503.65125964171256, "W_1KI": 37.09339407744875, "W_D": 64.4795, "J_D": 875.4977052857876, "W_D_1KI": 29.375626423690207, "J_D_1KI": 13.38297331375408} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..6359401 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.4783329963684082} + +tensor(indices=tensor([[60378, 99759, 58143, ..., 52332, 4184, 28118], + [75557, 63960, 47353, ..., 15162, 61961, 13123]]), + values=tensor([0.5422, 0.6321, 0.2848, ..., 0.0428, 0.6024, 0.5692]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.0318, 0.3638, 0.8355, ..., 0.7691, 0.7210, 0.7447]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.4783329963684082 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2195', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.226952075958252} + +tensor(indices=tensor([[11656, 96706, 99691, ..., 34294, 79366, 13820], + [44828, 66833, 29805, ..., 53593, 98973, 56424]]), + values=tensor([0.3625, 0.3756, 0.2782, ..., 0.8037, 0.2669, 0.1572]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1773, 0.6994, 0.9086, ..., 0.5655, 0.9948, 0.4469]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.226952075958252 seconds + +tensor(indices=tensor([[11656, 96706, 99691, ..., 34294, 79366, 13820], + [44828, 66833, 29805, ..., 53593, 98973, 56424]]), + values=tensor([0.3625, 0.3756, 0.2782, ..., 0.8037, 0.2669, 0.1572]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_coo) +tensor([0.1773, 0.6994, 0.9086, ..., 0.5655, 0.9948, 0.4469]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.226952075958252 seconds + +[19.11, 18.77, 18.6, 18.43, 18.75, 18.62, 18.57, 18.75, 18.61, 18.83] +[81.42] +13.57792329788208 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.226952075958252, 'TIME_S_1KI': 4.659203679252051, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1105.514514913559, 'W': 81.42} +[19.11, 18.77, 18.6, 18.43, 18.75, 18.62, 18.57, 18.75, 18.61, 18.83, 19.58, 18.61, 19.45, 18.9, 18.74, 19.56, 18.53, 18.59, 19.3, 18.54] +338.81000000000006 +16.940500000000004 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2195, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.226952075958252, 'TIME_S_1KI': 4.659203679252051, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1105.514514913559, 'W': 81.42, 'J_1KI': 503.65125964171256, 'W_1KI': 37.09339407744875, 'W_D': 64.4795, 'J_D': 875.4977052857876, 'W_D_1KI': 29.375626423690207, 'J_D_1KI': 13.38297331375408} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..00308c8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 538, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.484038829803467, "TIME_S_1KI": 19.48706102193953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 863.0392302846909, "W": 61.82000000000001, "J_1KI": 1604.1621380756337, "W_1KI": 114.90706319702603, "W_D": 44.82350000000001, "J_D": 625.7592840288879, "W_D_1KI": 83.3150557620818, "J_D_1KI": 154.86069844253123} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..60409dd --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9489986896514893} + +tensor(indices=tensor([[18716, 95790, 3688, ..., 56893, 32268, 74877], + [42427, 48584, 1937, ..., 87175, 73241, 255]]), + values=tensor([0.3833, 0.3090, 0.7937, ..., 0.4799, 0.0064, 0.0236]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.8209, 0.9524, 0.2720, ..., 0.9711, 0.5452, 0.8633]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 1.9489986896514893 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '538', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.484038829803467} + +tensor(indices=tensor([[75111, 77851, 77098, ..., 3514, 96030, 86193], + [96984, 22441, 53852, ..., 11600, 61034, 85238]]), + values=tensor([0.6727, 0.5360, 0.3440, ..., 0.2635, 0.8115, 0.4966]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.4576, 0.3890, 0.1313, ..., 0.1543, 0.9807, 0.2103]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.484038829803467 seconds + +tensor(indices=tensor([[75111, 77851, 77098, ..., 3514, 96030, 86193], + [96984, 22441, 53852, ..., 11600, 61034, 85238]]), + values=tensor([0.6727, 0.5360, 0.3440, ..., 0.2635, 0.8115, 0.4966]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_coo) +tensor([0.4576, 0.3890, 0.1313, ..., 0.1543, 0.9807, 0.2103]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.484038829803467 seconds + +[19.3, 18.46, 18.59, 21.3, 19.04, 18.54, 19.16, 18.53, 18.83, 18.51] +[61.82] +13.96051812171936 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.484038829803467, 'TIME_S_1KI': 19.48706102193953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.0392302846909, 'W': 61.82000000000001} +[19.3, 18.46, 18.59, 21.3, 19.04, 18.54, 19.16, 18.53, 18.83, 18.51, 19.05, 18.47, 19.14, 18.47, 18.74, 18.38, 18.87, 19.18, 18.58, 18.44] +339.93 +16.9965 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 538, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.484038829803467, 'TIME_S_1KI': 19.48706102193953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 863.0392302846909, 'W': 61.82000000000001, 'J_1KI': 1604.1621380756337, 'W_1KI': 114.90706319702603, 'W_D': 44.82350000000001, 'J_D': 625.7592840288879, 'W_D_1KI': 83.3150557620818, 'J_D_1KI': 154.86069844253123} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..7a98492 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 29183, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467581272125244, "TIME_S_1KI": 0.35868763568259754, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 748.1673318600655, "W": 53.97, "J_1KI": 25.637094605080545, "W_1KI": 1.849364355960662, "W_D": 37.1335, "J_D": 514.7687903951406, "W_D_1KI": 1.2724360072645033, "J_D_1KI": 0.04360196029416109} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..ee8c335 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04817700386047363} + +tensor(indices=tensor([[3335, 3322, 7196, ..., 7191, 508, 8110], + [4063, 8322, 5133, ..., 8201, 7246, 3231]]), + values=tensor([0.8712, 0.9569, 0.0566, ..., 0.3023, 0.9617, 0.7515]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.6349, 0.6664, 0.2561, ..., 0.5040, 0.4042, 0.6694]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.04817700386047363 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '21794', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.841289520263672} + +tensor(indices=tensor([[4097, 1874, 5344, ..., 2314, 7969, 5757], + [3711, 1851, 7307, ..., 198, 8537, 3843]]), + values=tensor([0.1438, 0.8339, 0.1228, ..., 0.2158, 0.8895, 0.0423]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.7810, 0.6510, 0.1460, ..., 0.2722, 0.2417, 0.7158]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 7.841289520263672 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '29183', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.467581272125244} + +tensor(indices=tensor([[3977, 2006, 5423, ..., 9180, 4980, 3169], + [ 734, 104, 1049, ..., 520, 426, 6809]]), + values=tensor([0.0325, 0.6933, 0.0833, ..., 0.4344, 0.1915, 0.4176]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0472, 0.8371, 0.9207, ..., 0.6624, 0.9733, 0.2751]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.467581272125244 seconds + +tensor(indices=tensor([[3977, 2006, 5423, ..., 9180, 4980, 3169], + [ 734, 104, 1049, ..., 520, 426, 6809]]), + values=tensor([0.0325, 0.6933, 0.0833, ..., 0.4344, 0.1915, 0.4176]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_coo) +tensor([0.0472, 0.8371, 0.9207, ..., 0.6624, 0.9733, 0.2751]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.467581272125244 seconds + +[18.95, 18.66, 18.64, 18.46, 18.63, 18.79, 18.71, 18.47, 18.54, 18.65] +[53.97] +13.862652063369751 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 29183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467581272125244, 'TIME_S_1KI': 0.35868763568259754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 748.1673318600655, 'W': 53.97} +[18.95, 18.66, 18.64, 18.46, 18.63, 18.79, 18.71, 18.47, 18.54, 18.65, 19.39, 18.65, 18.66, 18.71, 19.01, 18.67, 18.65, 18.85, 18.88, 18.51] +336.73 +16.8365 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 29183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.467581272125244, 'TIME_S_1KI': 0.35868763568259754, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 748.1673318600655, 'W': 53.97, 'J_1KI': 25.637094605080545, 'W_1KI': 1.849364355960662, 'W_D': 37.1335, 'J_D': 514.7687903951406, 'W_D_1KI': 1.2724360072645033, 'J_D_1KI': 0.04360196029416109} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..d33b6e1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2873, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.13928747177124, "TIME_S_1KI": 3.529163756272621, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 733.8819551849365, "W": 54.04, "J_1KI": 255.4409868377781, "W_1KI": 18.80960668290985, "W_D": 29.094, "J_D": 395.10661739730836, "W_D_1KI": 10.126696832579185, "J_D_1KI": 3.5247813548831135} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..bf0841f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.3654141426086426} + +tensor(indices=tensor([[ 285, 9508, 997, ..., 3993, 9092, 9278], + [1151, 9186, 4538, ..., 6102, 5484, 9538]]), + values=tensor([0.7916, 0.9038, 0.6322, ..., 0.6891, 0.7898, 0.9190]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.3644, 0.5010, 0.4580, ..., 0.2484, 0.5683, 0.7454]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.3654141426086426 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '2873', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.13928747177124} + +tensor(indices=tensor([[6060, 7206, 2987, ..., 1631, 2200, 7913], + [4578, 9706, 8191, ..., 4185, 595, 6385]]), + values=tensor([0.1653, 0.5324, 0.3694, ..., 0.4292, 0.2763, 0.2494]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6125, 0.0366, 0.2882, ..., 0.5917, 0.2052, 0.6661]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.13928747177124 seconds + +tensor(indices=tensor([[6060, 7206, 2987, ..., 1631, 2200, 7913], + [4578, 9706, 8191, ..., 4185, 595, 6385]]), + values=tensor([0.1653, 0.5324, 0.3694, ..., 0.4292, 0.2763, 0.2494]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_coo) +tensor([0.6125, 0.0366, 0.2882, ..., 0.5917, 0.2052, 0.6661]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.13928747177124 seconds + +[18.99, 18.97, 18.91, 18.52, 18.6, 19.1, 18.69, 19.29, 19.1, 18.84] +[54.04] +13.580347061157227 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2873, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.13928747177124, 'TIME_S_1KI': 3.529163756272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.8819551849365, 'W': 54.04} +[18.99, 18.97, 18.91, 18.52, 18.6, 19.1, 18.69, 19.29, 19.1, 18.84, 19.4, 18.67, 30.47, 45.76, 45.45, 45.62, 46.53, 43.62, 33.55, 18.91] +498.91999999999996 +24.945999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2873, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.13928747177124, 'TIME_S_1KI': 3.529163756272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 733.8819551849365, 'W': 54.04, 'J_1KI': 255.4409868377781, 'W_1KI': 18.80960668290985, 'W_D': 29.094, 'J_D': 395.10661739730836, 'W_D_1KI': 10.126696832579185, 'J_D_1KI': 3.5247813548831135} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..52ea94b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 298, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.55695390701294, "TIME_S_1KI": 35.42601982219107, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 763.1404219007492, "W": 54.26, "J_1KI": 2560.873898995803, "W_1KI": 182.08053691275165, "W_D": 37.38225, "J_D": 525.7631042498946, "W_D_1KI": 125.44379194630872, "J_D_1KI": 420.95232196747895} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..f00641f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.5225820541381836} + +tensor(indices=tensor([[1131, 1047, 2841, ..., 8180, 5269, 4463], + [8625, 461, 2854, ..., 2984, 2605, 6313]]), + values=tensor([0.1914, 0.3221, 0.8157, ..., 0.1394, 0.3711, 0.1104]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.4933, 0.6059, 0.4292, ..., 0.9377, 0.0326, 0.6811]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 3.5225820541381836 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '298', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.55695390701294} + +tensor(indices=tensor([[7496, 806, 8472, ..., 8403, 2373, 2384], + [1826, 9342, 202, ..., 685, 9867, 1439]]), + values=tensor([0.7012, 0.7592, 0.3138, ..., 0.2722, 0.9031, 0.6235]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.0070, 0.5660, 0.8775, ..., 0.0859, 0.7008, 0.4276]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.55695390701294 seconds + +tensor(indices=tensor([[7496, 806, 8472, ..., 8403, 2373, 2384], + [1826, 9342, 202, ..., 685, 9867, 1439]]), + values=tensor([0.7012, 0.7592, 0.3138, ..., 0.2722, 0.9031, 0.6235]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_coo) +tensor([0.0070, 0.5660, 0.8775, ..., 0.0859, 0.7008, 0.4276]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.55695390701294 seconds + +[19.87, 18.83, 19.52, 18.4, 18.45, 18.4, 19.99, 18.35, 18.79, 18.42] +[54.26] +14.064512014389038 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 298, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.55695390701294, 'TIME_S_1KI': 35.42601982219107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 763.1404219007492, 'W': 54.26} +[19.87, 18.83, 19.52, 18.4, 18.45, 18.4, 19.99, 18.35, 18.79, 18.42, 19.11, 18.44, 18.42, 18.4, 18.97, 18.42, 18.89, 18.45, 18.87, 18.53] +337.55499999999995 +16.87775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 298, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.55695390701294, 'TIME_S_1KI': 35.42601982219107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 763.1404219007492, 'W': 54.26, 'J_1KI': 2560.873898995803, 'W_1KI': 182.08053691275165, 'W_D': 37.38225, 'J_D': 525.7631042498946, 'W_D_1KI': 125.44379194630872, 'J_D_1KI': 420.95232196747895} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..72fd5cb --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.491714477539062, "TIME_S_1KI": 174.91714477539062, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1206.7315879487992, "W": 53.99, "J_1KI": 12067.315879487993, "W_1KI": 539.9000000000001, "W_D": 36.95700000000001, "J_D": 826.0266585631372, "W_D_1KI": 369.57000000000005, "J_D_1KI": 3695.7000000000003} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..9f803ce --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 17.491714477539062} + +tensor(indices=tensor([[4602, 4926, 4680, ..., 2456, 368, 8039], + [6233, 1066, 4674, ..., 8264, 3705, 27]]), + values=tensor([0.2728, 0.9920, 0.7149, ..., 0.4971, 0.8454, 0.5216]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9812, 0.1863, 0.2385, ..., 0.0766, 0.1018, 0.6024]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.491714477539062 seconds + +tensor(indices=tensor([[4602, 4926, 4680, ..., 2456, 368, 8039], + [6233, 1066, 4674, ..., 8264, 3705, 27]]), + values=tensor([0.2728, 0.9920, 0.7149, ..., 0.4971, 0.8454, 0.5216]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.9812, 0.1863, 0.2385, ..., 0.0766, 0.1018, 0.6024]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 17.491714477539062 seconds + +[18.94, 18.73, 18.92, 18.49, 18.62, 18.32, 19.15, 18.51, 18.5, 18.78] +[53.99] +22.351020336151123 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.491714477539062, 'TIME_S_1KI': 174.91714477539062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1206.7315879487992, 'W': 53.99} +[18.94, 18.73, 18.92, 18.49, 18.62, 18.32, 19.15, 18.51, 18.5, 18.78, 22.54, 19.38, 18.88, 19.24, 18.72, 18.43, 18.66, 19.75, 18.83, 18.8] +340.65999999999997 +17.032999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 17.491714477539062, 'TIME_S_1KI': 174.91714477539062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1206.7315879487992, 'W': 53.99, 'J_1KI': 12067.315879487993, 'W_1KI': 539.9000000000001, 'W_D': 36.95700000000001, 'J_D': 826.0266585631372, 'W_D_1KI': 369.57000000000005, 'J_D_1KI': 3695.7000000000003} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..90fb139 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.10873603820801, "TIME_S_1KI": 351.0873603820801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2215.3251207351686, "W": 53.92, "J_1KI": 22153.25120735169, "W_1KI": 539.2, "W_D": 36.96425, "J_D": 1518.6912387636303, "W_D_1KI": 369.6425, "J_D_1KI": 3696.4249999999997} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..9dbb705 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 35.10873603820801} + +tensor(indices=tensor([[7269, 7692, 6140, ..., 9759, 2967, 8326], + [8877, 8587, 2475, ..., 4500, 550, 47]]), + values=tensor([0.9914, 0.2635, 0.0873, ..., 0.0059, 0.9314, 0.8927]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4101, 0.2801, 0.7810, ..., 0.0990, 0.8629, 0.7092]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.10873603820801 seconds + +tensor(indices=tensor([[7269, 7692, 6140, ..., 9759, 2967, 8326], + [8877, 8587, 2475, ..., 4500, 550, 47]]), + values=tensor([0.9914, 0.2635, 0.0873, ..., 0.0059, 0.9314, 0.8927]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.4101, 0.2801, 0.7810, ..., 0.0990, 0.8629, 0.7092]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 35.10873603820801 seconds + +[19.66, 18.43, 19.24, 18.72, 18.44, 18.29, 19.51, 18.64, 18.79, 18.38] +[53.92] +41.08540654182434 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.10873603820801, 'TIME_S_1KI': 351.0873603820801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2215.3251207351686, 'W': 53.92} +[19.66, 18.43, 19.24, 18.72, 18.44, 18.29, 19.51, 18.64, 18.79, 18.38, 19.25, 19.04, 18.86, 19.1, 18.5, 19.71, 18.42, 18.79, 18.55, 18.88] +339.115 +16.955750000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 35.10873603820801, 'TIME_S_1KI': 351.0873603820801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2215.3251207351686, 'W': 53.92, 'J_1KI': 22153.25120735169, 'W_1KI': 539.2, 'W_D': 36.96425, 'J_D': 1518.6912387636303, 'W_D_1KI': 369.6425, 'J_D_1KI': 3696.4249999999997} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..139f671 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.1952166557312, "TIME_S_1KI": 701.952166557312, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 4218.440363705158, "W": 53.75, "J_1KI": 42184.40363705158, "W_1KI": 537.5, "W_D": 36.86875, "J_D": 2893.555779708922, "W_D_1KI": 368.6875, "J_D_1KI": 3686.875} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..8708d5e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 70.1952166557312} + +tensor(indices=tensor([[4254, 5119, 5933, ..., 1877, 1852, 6863], + [6529, 8257, 5254, ..., 9840, 1155, 7431]]), + values=tensor([0.6297, 0.7087, 0.9469, ..., 0.6709, 0.7173, 0.7057]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.2571, 0.7598, 0.4541, ..., 0.9196, 0.5656, 0.9314]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.1952166557312 seconds + +tensor(indices=tensor([[4254, 5119, 5933, ..., 1877, 1852, 6863], + [6529, 8257, 5254, ..., 9840, 1155, 7431]]), + values=tensor([0.6297, 0.7087, 0.9469, ..., 0.6709, 0.7173, 0.7057]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_coo) +tensor([0.2571, 0.7598, 0.4541, ..., 0.9196, 0.5656, 0.9314]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 70.1952166557312 seconds + +[19.6, 18.54, 18.89, 19.01, 18.63, 18.61, 18.73, 19.13, 18.89, 18.45] +[53.75] +78.48261141777039 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.1952166557312, 'TIME_S_1KI': 701.952166557312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4218.440363705158, 'W': 53.75} +[19.6, 18.54, 18.89, 19.01, 18.63, 18.61, 18.73, 19.13, 18.89, 18.45, 19.2, 18.53, 18.6, 18.5, 19.09, 18.65, 18.78, 18.52, 18.66, 18.48] +337.625 +16.88125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 70.1952166557312, 'TIME_S_1KI': 701.952166557312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 4218.440363705158, 'W': 53.75, 'J_1KI': 42184.40363705158, 'W_1KI': 537.5, 'W_D': 36.86875, 'J_D': 2893.555779708922, 'W_D_1KI': 368.6875, 'J_D_1KI': 3686.875} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..95b513d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.20825099945068, "TIME_S_1KI": 1052.0825099945068, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6216.137630634308, "W": 53.74, "J_1KI": 62161.37630634308, "W_1KI": 537.4, "W_D": 36.70375, "J_D": 4245.544502426385, "W_D_1KI": 367.0375, "J_D_1KI": 3670.3750000000005} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..b729e98 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 105.20825099945068} + +tensor(indices=tensor([[9203, 5315, 423, ..., 837, 1807, 1484], + [ 770, 4877, 1229, ..., 7240, 2928, 2325]]), + values=tensor([0.5493, 0.8611, 0.7531, ..., 0.5884, 0.4715, 0.7271]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.2219, 0.5576, 0.8123, ..., 0.7190, 0.1183, 0.3943]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.20825099945068 seconds + +tensor(indices=tensor([[9203, 5315, 423, ..., 837, 1807, 1484], + [ 770, 4877, 1229, ..., 7240, 2928, 2325]]), + values=tensor([0.5493, 0.8611, 0.7531, ..., 0.5884, 0.4715, 0.7271]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_coo) +tensor([0.2219, 0.5576, 0.8123, ..., 0.7190, 0.1183, 0.3943]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 105.20825099945068 seconds + +[19.44, 18.5, 18.99, 18.52, 19.0, 18.5, 18.6, 18.4, 18.9, 18.53] +[53.74] +115.67059230804443 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.20825099945068, 'TIME_S_1KI': 1052.0825099945068, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6216.137630634308, 'W': 53.74} +[19.44, 18.5, 18.99, 18.52, 19.0, 18.5, 18.6, 18.4, 18.9, 18.53, 19.49, 18.63, 20.07, 18.73, 19.01, 18.6, 18.74, 18.68, 18.87, 22.51] +340.72499999999997 +17.03625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 105.20825099945068, 'TIME_S_1KI': 1052.0825099945068, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6216.137630634308, 'W': 53.74, 'J_1KI': 62161.37630634308, 'W_1KI': 537.4, 'W_D': 36.70375, 'J_D': 4245.544502426385, 'W_D_1KI': 367.0375, 'J_D_1KI': 3670.3750000000005} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..1173009 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.30631065368652, "TIME_S_1KI": 1403.0631065368652, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 8230.189362194538, "W": 53.730000000000004, "J_1KI": 82301.89362194538, "W_1KI": 537.3, "W_D": 36.82475000000001, "J_D": 5640.697296025934, "W_D_1KI": 368.24750000000006, "J_D_1KI": 3682.475000000001} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..d3accaa --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 140.30631065368652} + +tensor(indices=tensor([[2342, 3802, 3183, ..., 8973, 4530, 7273], + [9136, 6273, 5082, ..., 194, 8649, 5236]]), + values=tensor([0.5938, 0.2838, 0.4476, ..., 0.9252, 0.8369, 0.9096]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.4076, 0.0326, 0.3759, ..., 0.8982, 0.9745, 0.8362]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.30631065368652 seconds + +tensor(indices=tensor([[2342, 3802, 3183, ..., 8973, 4530, 7273], + [9136, 6273, 5082, ..., 194, 8649, 5236]]), + values=tensor([0.5938, 0.2838, 0.4476, ..., 0.9252, 0.8369, 0.9096]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_coo) +tensor([0.4076, 0.0326, 0.3759, ..., 0.8982, 0.9745, 0.8362]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 140.30631065368652 seconds + +[19.11, 18.6, 18.62, 18.93, 18.47, 18.58, 18.5, 19.08, 18.64, 19.76] +[53.73] +153.17679810523987 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.30631065368652, 'TIME_S_1KI': 1403.0631065368652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8230.189362194538, 'W': 53.730000000000004} +[19.11, 18.6, 18.62, 18.93, 18.47, 18.58, 18.5, 19.08, 18.64, 19.76, 19.02, 18.92, 19.25, 18.55, 18.65, 18.84, 19.03, 18.72, 18.49, 18.58] +338.10499999999996 +16.90525 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 140.30631065368652, 'TIME_S_1KI': 1403.0631065368652, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 8230.189362194538, 'W': 53.730000000000004, 'J_1KI': 82301.89362194538, 'W_1KI': 537.3, 'W_D': 36.82475000000001, 'J_D': 5640.697296025934, 'W_D_1KI': 368.24750000000006, 'J_D_1KI': 3682.475000000001} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..dbe73aa --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.6148717403412, "TIME_S_1KI": 1756.1487174034119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 10264.798701245783, "W": 53.72999999999999, "J_1KI": 102647.98701245783, "W_1KI": 537.2999999999998, "W_D": 36.97524999999999, "J_D": 7063.902813665328, "W_D_1KI": 369.7524999999999, "J_D_1KI": 3697.5249999999987} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..4134bcd --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 175.6148717403412} + +tensor(indices=tensor([[ 918, 6806, 3007, ..., 4702, 6826, 1070], + [8048, 9840, 9535, ..., 9805, 4117, 613]]), + values=tensor([0.5990, 0.4903, 0.7254, ..., 0.8083, 0.3792, 0.9317]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8552, 0.1665, 0.3212, ..., 0.5582, 0.9750, 0.2568]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.6148717403412 seconds + +tensor(indices=tensor([[ 918, 6806, 3007, ..., 4702, 6826, 1070], + [8048, 9840, 9535, ..., 9805, 4117, 613]]), + values=tensor([0.5990, 0.4903, 0.7254, ..., 0.8083, 0.3792, 0.9317]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_coo) +tensor([0.8552, 0.1665, 0.3212, ..., 0.5582, 0.9750, 0.2568]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 175.6148717403412 seconds + +[19.08, 18.43, 18.93, 18.34, 18.62, 18.36, 18.76, 18.49, 18.5, 18.35] +[53.73] +191.04408526420593 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.6148717403412, 'TIME_S_1KI': 1756.1487174034119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10264.798701245783, 'W': 53.72999999999999} +[19.08, 18.43, 18.93, 18.34, 18.62, 18.36, 18.76, 18.49, 18.5, 18.35, 18.93, 18.95, 18.52, 18.75, 18.5, 18.54, 18.59, 18.71, 18.64, 18.57] +335.095 +16.75475 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 175.6148717403412, 'TIME_S_1KI': 1756.1487174034119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 10264.798701245783, 'W': 53.72999999999999, 'J_1KI': 102647.98701245783, 'W_1KI': 537.2999999999998, 'W_D': 36.97524999999999, 'J_D': 7063.902813665328, 'W_D_1KI': 369.7524999999999, 'J_D_1KI': 3697.5249999999987} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..f341b39 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 245183, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.55661129951477, "TIME_S_1KI": 0.04305604915314182, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 747.9292433834075, "W": 54.11, "J_1KI": 3.0504938898023415, "W_1KI": 0.22069229922139788, "W_D": 36.87025, "J_D": 509.6347844364643, "W_D_1KI": 0.15037849279925605, "J_D_1KI": 0.0006133316453394242} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..8398219 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,962 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.016073942184448242} + +tensor(indices=tensor([[9373, 2165, 9059, ..., 713, 9728, 3730], + [8704, 8595, 2729, ..., 2984, 8729, 1709]]), + values=tensor([3.4033e-01, 2.3666e-01, 9.4833e-01, 5.6351e-02, + 7.0513e-01, 7.3453e-01, 3.8730e-01, 6.5880e-01, + 8.8273e-01, 8.4499e-02, 1.2427e-01, 6.5511e-01, + 3.4915e-02, 3.7088e-01, 9.3675e-02, 6.4468e-02, + 6.5562e-01, 2.0195e-01, 9.3968e-01, 3.7975e-01, + 8.0104e-01, 4.7106e-01, 4.6743e-01, 6.5311e-02, + 7.1032e-01, 7.1108e-01, 9.5428e-01, 2.7893e-01, + 8.9309e-01, 5.6771e-01, 4.2308e-01, 5.5889e-01, + 7.4354e-01, 3.0400e-01, 9.2145e-01, 4.6651e-01, + 7.7753e-01, 2.9401e-01, 9.5822e-01, 8.3421e-01, + 3.4098e-01, 9.8809e-01, 4.9210e-01, 3.8344e-01, + 5.5398e-01, 9.4981e-02, 4.5270e-01, 6.3251e-01, + 9.9672e-01, 9.2261e-01, 9.9262e-01, 2.9938e-01, + 5.4691e-01, 1.9161e-01, 9.6637e-02, 4.9912e-01, + 4.7533e-01, 1.2817e-01, 1.7708e-01, 8.3389e-01, + 2.8751e-01, 8.8296e-01, 9.5788e-01, 9.0460e-01, + 1.3403e-01, 4.7175e-01, 6.2210e-01, 5.8336e-01, + 3.7231e-01, 2.1312e-01, 8.9929e-01, 1.4801e-01, + 8.4331e-01, 6.9229e-01, 3.2573e-01, 4.2644e-01, + 1.7320e-01, 8.3745e-01, 7.3182e-01, 1.8749e-01, + 2.1187e-01, 6.2687e-01, 1.8615e-01, 7.3521e-01, + 7.3372e-01, 6.4005e-01, 8.2787e-02, 1.3079e-01, + 1.0990e-01, 3.8578e-01, 6.4379e-01, 7.2523e-01, + 6.0380e-01, 8.5846e-01, 7.0040e-01, 3.0436e-02, + 1.8714e-01, 6.1512e-01, 2.0343e-01, 8.0583e-01, + 1.4576e-01, 6.6920e-01, 7.4532e-01, 3.4415e-02, + 5.2542e-01, 1.0769e-01, 5.0265e-01, 5.4246e-01, + 9.3141e-01, 4.4871e-02, 6.0034e-01, 4.0684e-01, + 8.1141e-01, 3.1932e-01, 5.1561e-01, 1.6017e-01, + 6.2635e-02, 9.8150e-01, 1.9732e-01, 3.1549e-01, + 3.1425e-01, 9.2298e-02, 6.7114e-01, 5.5670e-02, + 1.8850e-01, 5.5200e-01, 4.7243e-01, 3.3749e-01, + 3.6954e-01, 2.6993e-01, 1.1426e-01, 8.3926e-01, + 1.8910e-01, 6.5240e-01, 5.4269e-01, 3.0396e-01, + 5.9835e-01, 8.7169e-01, 3.3902e-01, 4.2052e-01, + 3.7081e-01, 9.1857e-02, 1.9821e-01, 1.0209e-01, + 3.8307e-01, 7.9189e-01, 2.8728e-01, 7.3534e-01, + 1.5172e-01, 2.8306e-01, 9.9801e-01, 4.3052e-01, + 5.8945e-01, 5.6858e-01, 8.8873e-01, 7.9609e-01, + 2.5893e-01, 2.7664e-01, 1.0147e-01, 2.5258e-01, + 2.2558e-01, 7.2871e-01, 3.3157e-01, 4.1055e-01, + 9.5711e-01, 8.0965e-01, 1.8228e-01, 9.6547e-01, + 4.7651e-01, 2.9561e-01, 4.0397e-01, 7.2661e-01, + 1.4609e-01, 9.2851e-01, 4.6319e-01, 3.4727e-02, + 3.6253e-01, 3.0754e-01, 8.0140e-01, 8.7005e-01, + 9.7552e-01, 4.9735e-01, 2.4346e-01, 4.4602e-01, + 8.1043e-01, 1.7603e-02, 8.7111e-01, 6.1433e-01, + 9.7406e-01, 1.2812e-01, 7.8253e-01, 3.3928e-01, + 6.9765e-01, 3.6275e-01, 5.3162e-01, 4.5051e-01, + 1.4380e-01, 8.9222e-01, 1.0650e-01, 7.0237e-01, + 3.0375e-01, 1.0105e-01, 9.3085e-01, 7.5717e-01, + 3.9398e-01, 9.5051e-02, 1.6895e-01, 4.0677e-01, + 8.8499e-01, 3.9949e-01, 9.3352e-01, 1.3450e-02, + 9.7567e-01, 5.3094e-01, 8.9470e-01, 7.5108e-03, + 8.8476e-01, 4.2370e-01, 1.3455e-01, 3.6178e-01, + 2.7308e-02, 6.9456e-01, 9.7995e-01, 3.7759e-01, + 7.8649e-01, 5.4844e-01, 7.3179e-01, 7.9573e-01, + 3.0207e-01, 9.2022e-01, 8.9347e-01, 6.7918e-01, + 7.8971e-01, 8.3952e-01, 1.7600e-01, 7.7054e-01, + 9.6307e-01, 3.8258e-01, 9.9376e-01, 1.6891e-01, + 2.2448e-01, 5.7619e-01, 3.8017e-01, 7.8466e-01, + 7.4771e-01, 9.5764e-01, 6.1499e-01, 7.5494e-02, + 1.1161e-01, 3.5663e-01, 1.3368e-01, 9.9824e-02, + 8.6012e-01, 5.8958e-01, 3.5613e-01, 7.5993e-03, + 2.6922e-01, 8.4086e-01, 9.9724e-01, 2.8675e-01, + 5.3665e-01, 2.9377e-01, 5.6535e-01, 5.8522e-01, + 9.7039e-01, 9.6330e-01, 7.3693e-01, 9.8558e-01, + 9.0186e-02, 5.3985e-01, 1.4002e-01, 8.5158e-01, + 3.7300e-01, 4.9995e-01, 3.5352e-01, 4.6800e-02, + 8.3178e-02, 3.4512e-01, 8.7799e-02, 2.4977e-01, + 4.8437e-01, 9.9381e-01, 9.3993e-01, 2.1914e-01, + 5.5585e-01, 6.3316e-01, 6.1014e-01, 3.5682e-01, + 7.9797e-01, 7.0754e-01, 3.6114e-01, 3.6111e-01, + 2.9417e-01, 3.2525e-01, 3.5184e-01, 7.6011e-01, + 2.9267e-02, 8.0699e-01, 4.8509e-02, 2.2742e-01, + 6.1838e-01, 1.1014e-01, 6.2205e-01, 6.9485e-01, + 6.9269e-01, 1.2485e-01, 7.5115e-01, 6.3086e-01, + 1.8315e-01, 3.1457e-01, 5.2536e-01, 8.5088e-01, + 6.0446e-01, 4.6129e-01, 1.6277e-01, 2.9969e-01, + 2.4939e-01, 7.7108e-01, 7.0278e-01, 8.2214e-02, + 2.6862e-01, 5.9596e-01, 8.7281e-01, 2.8774e-01, + 4.9565e-01, 1.0256e-01, 2.1659e-01, 6.7308e-01, + 2.5637e-01, 4.9735e-01, 3.8610e-01, 2.8365e-01, + 1.3328e-01, 9.4157e-01, 6.8543e-01, 4.5039e-02, + 9.2215e-01, 4.4758e-01, 5.6342e-01, 8.1328e-01, + 8.4498e-02, 2.4994e-01, 9.5964e-01, 5.9584e-01, + 9.6684e-01, 1.0296e-01, 5.5950e-01, 6.2896e-01, + 6.8157e-01, 8.7366e-01, 8.2252e-01, 1.5905e-01, + 3.0810e-01, 6.5515e-02, 1.1798e-01, 3.9771e-01, + 3.0401e-02, 6.9393e-01, 1.9514e-01, 9.7038e-01, + 9.6271e-01, 2.5712e-01, 4.0278e-01, 9.8047e-01, + 1.2499e-01, 5.2841e-01, 4.3228e-01, 4.1095e-01, + 6.2178e-01, 3.0168e-01, 9.3697e-01, 8.7675e-01, + 5.4277e-01, 1.0030e-01, 1.7888e-01, 7.3451e-01, + 6.3908e-01, 4.7142e-01, 8.2648e-01, 7.5247e-01, + 6.0525e-01, 5.9642e-01, 6.0505e-01, 2.4098e-01, + 5.1414e-01, 9.8299e-01, 3.5033e-01, 8.9115e-01, + 1.3537e-01, 9.1528e-01, 2.9770e-01, 8.3841e-01, + 2.3762e-01, 3.7276e-01, 9.9207e-01, 2.1198e-01, + 2.9089e-01, 2.4295e-01, 9.8822e-01, 7.1841e-01, + 9.1054e-01, 3.1048e-01, 5.7447e-01, 2.0490e-01, + 6.8643e-01, 3.3879e-01, 8.6100e-01, 1.5154e-01, + 5.1022e-01, 4.6077e-01, 5.1800e-01, 6.0061e-01, + 1.0777e-01, 5.0233e-01, 4.6088e-01, 4.9776e-01, + 5.4651e-01, 5.6521e-01, 7.9322e-01, 5.7045e-01, + 4.1044e-01, 4.5608e-01, 4.9319e-01, 9.2042e-01, + 9.9035e-01, 5.3841e-01, 1.1438e-01, 9.2836e-01, + 6.3976e-01, 2.7302e-01, 6.1191e-01, 5.9814e-01, + 2.4016e-01, 5.6697e-01, 6.0474e-01, 1.5541e-01, + 3.1099e-01, 6.1928e-02, 5.7367e-01, 9.4506e-01, + 2.8699e-01, 9.6127e-01, 6.4695e-01, 6.2339e-01, + 3.8076e-01, 1.0273e-01, 8.6252e-01, 1.8652e-01, + 2.3576e-03, 9.3792e-01, 1.7126e-02, 1.2670e-01, + 2.2515e-01, 7.8025e-01, 8.6387e-01, 7.6618e-01, + 2.6913e-01, 2.7021e-01, 6.9083e-01, 7.1079e-01, + 6.7900e-01, 5.3830e-01, 7.1736e-01, 7.4489e-02, + 7.5624e-01, 4.3482e-01, 3.8815e-01, 9.6691e-01, + 9.8959e-02, 9.5918e-01, 8.9578e-01, 9.9208e-01, + 2.8627e-01, 8.6395e-01, 8.0737e-01, 3.5647e-01, + 6.0473e-01, 8.2002e-01, 2.7636e-02, 3.4086e-01, + 8.6791e-01, 3.0643e-01, 6.6757e-01, 5.3958e-01, + 6.8490e-01, 7.7367e-01, 9.2205e-01, 2.4502e-01, + 9.3637e-01, 2.5530e-01, 5.6843e-01, 6.9660e-01, + 9.9634e-01, 4.8607e-01, 7.9777e-01, 3.2717e-02, + 6.0299e-01, 3.2811e-01, 6.4522e-01, 8.3205e-01, + 6.4952e-01, 8.2705e-01, 8.5122e-01, 6.4463e-03, + 4.1838e-01, 3.2575e-01, 2.9316e-01, 8.1773e-01, + 3.9470e-01, 7.1721e-01, 8.8213e-01, 4.8337e-01, + 5.4965e-01, 2.4773e-01, 6.9928e-01, 9.4339e-01, + 7.7622e-01, 7.8122e-01, 7.2144e-01, 1.2670e-01, + 9.7451e-02, 4.7228e-01, 8.6684e-01, 6.3935e-01, + 8.6708e-01, 2.5505e-01, 8.0235e-01, 6.3876e-01, + 1.1947e-02, 7.1452e-02, 4.7050e-01, 7.0359e-01, + 7.5385e-01, 4.7189e-01, 9.2069e-01, 4.4603e-01, + 6.4545e-01, 2.5065e-01, 1.8024e-01, 6.2509e-01, + 1.6709e-02, 1.0829e-01, 1.3880e-01, 5.3075e-01, + 1.0144e-01, 6.3650e-01, 7.2676e-01, 6.0243e-01, + 3.9749e-01, 5.3280e-01, 9.4635e-01, 6.7063e-01, + 1.5216e-01, 3.7304e-02, 5.1341e-01, 3.1306e-01, + 1.7644e-02, 5.3735e-01, 1.4474e-01, 2.4462e-01, + 2.5239e-01, 2.8940e-01, 8.6288e-01, 4.3810e-01, + 5.3531e-01, 2.7571e-01, 8.4034e-01, 7.1103e-01, + 9.5149e-01, 4.2084e-01, 2.3432e-01, 2.4171e-01, + 2.4066e-01, 8.1300e-01, 9.8376e-01, 5.8135e-01, + 8.0106e-01, 2.0747e-01, 3.4461e-01, 5.8772e-01, + 9.7579e-01, 3.9550e-01, 9.1353e-01, 4.2644e-01, + 3.4443e-01, 5.5316e-01, 3.0078e-01, 4.2695e-01, + 4.5110e-01, 7.6352e-01, 2.4171e-02, 2.5891e-01, + 1.5271e-01, 1.6222e-01, 6.4355e-01, 3.5854e-01, + 9.4553e-01, 9.7519e-01, 8.3050e-01, 7.9358e-01, + 2.7826e-01, 6.1361e-01, 3.5265e-01, 6.4283e-01, + 4.0772e-01, 8.5406e-01, 3.8425e-01, 4.5487e-01, + 4.8772e-02, 7.6197e-02, 9.0000e-01, 7.9935e-01, + 8.3076e-01, 3.4742e-01, 8.6281e-01, 7.4470e-01, + 4.8634e-01, 5.9386e-01, 7.6451e-01, 5.8800e-01, + 5.4037e-02, 1.7086e-01, 5.3850e-01, 5.0628e-01, + 4.7126e-01, 8.5564e-01, 5.2356e-01, 3.9327e-01, + 1.5137e-01, 3.8288e-01, 9.3699e-01, 3.2822e-01, + 2.7750e-01, 9.0765e-01, 5.9550e-01, 7.7995e-01, + 6.6931e-01, 8.0291e-02, 5.4460e-01, 8.8976e-01, + 7.1184e-01, 8.8953e-01, 5.6516e-01, 9.2605e-01, + 3.8563e-01, 7.4381e-02, 4.1779e-02, 8.2726e-01, + 2.2215e-01, 8.9742e-01, 9.9038e-01, 3.4615e-01, + 4.3918e-01, 4.3300e-01, 2.0679e-01, 5.4446e-01, + 3.0474e-01, 8.8962e-01, 1.7431e-02, 3.9679e-01, + 9.1444e-01, 4.4519e-01, 7.8022e-01, 7.4974e-01, + 4.7409e-01, 9.3710e-02, 8.1683e-01, 4.7594e-01, + 5.9911e-01, 6.9128e-01, 9.3413e-01, 1.5473e-01, + 1.0594e-01, 8.7418e-01, 8.9329e-01, 7.0816e-01, + 4.2885e-01, 3.3314e-01, 7.7554e-01, 7.8768e-01, + 4.4532e-01, 2.8838e-01, 8.3056e-01, 7.8894e-02, + 6.7438e-01, 5.2526e-01, 5.3804e-01, 5.3940e-01, + 1.5530e-01, 8.7971e-01, 9.5495e-01, 4.2758e-01, + 3.9954e-01, 2.8896e-01, 5.0383e-01, 1.2474e-01, + 5.8276e-01, 2.2549e-01, 4.9951e-01, 6.1025e-01, + 2.8035e-01, 2.3226e-01, 9.2268e-01, 3.8351e-01, + 9.1898e-01, 8.2957e-01, 8.7848e-01, 7.7463e-01, + 3.6112e-03, 5.0195e-01, 9.2972e-01, 6.4420e-01, + 4.1643e-01, 5.6056e-01, 7.3409e-01, 7.8964e-01, + 2.1712e-01, 6.1967e-01, 6.2410e-01, 5.0033e-01, + 6.6843e-01, 1.2218e-01, 1.1264e-01, 5.2660e-01, + 9.5265e-01, 6.4995e-03, 9.3253e-01, 3.7688e-01, + 5.9142e-01, 2.2190e-02, 1.8417e-01, 1.0514e-01, + 8.2363e-02, 2.1877e-01, 5.6613e-01, 9.3501e-01, + 8.4785e-01, 8.7990e-01, 1.1172e-01, 3.7591e-02, + 4.4505e-01, 5.8834e-01, 6.5676e-01, 2.2034e-01, + 3.9136e-01, 8.4265e-01, 6.8447e-01, 3.2436e-01, + 3.2757e-01, 4.1153e-01, 5.1449e-01, 4.0785e-01, + 2.7211e-01, 1.4612e-02, 3.3070e-01, 3.9539e-01, + 5.0982e-02, 8.1165e-01, 6.9545e-01, 6.9065e-01, + 6.2335e-01, 7.7385e-01, 5.8596e-01, 8.7279e-01, + 4.5990e-01, 5.1090e-01, 3.5904e-01, 2.8110e-01, + 6.1394e-01, 9.3612e-01, 5.4767e-01, 2.2842e-01, + 3.3445e-01, 6.5650e-01, 2.3492e-01, 2.8820e-01, + 8.5802e-01, 6.7740e-01, 2.4585e-01, 3.5830e-01, + 4.3597e-01, 4.6737e-02, 9.4537e-01, 7.5263e-01, + 8.8980e-01, 5.8964e-01, 7.7219e-01, 5.5875e-01, + 6.0009e-01, 6.1974e-01, 3.2523e-01, 9.6290e-01, + 6.6280e-02, 8.0043e-01, 2.8749e-01, 5.3457e-01, + 5.1654e-01, 3.6195e-01, 4.2030e-02, 9.2195e-01, + 7.9858e-01, 7.3206e-01, 9.4648e-01, 3.8362e-01, + 7.6205e-01, 6.9152e-01, 3.3032e-01, 8.3202e-01, + 9.4024e-02, 1.4656e-01, 4.9888e-01, 1.6077e-01, + 6.6330e-01, 5.0890e-01, 6.9949e-01, 3.2023e-01, + 2.2584e-01, 6.7717e-01, 1.5749e-01, 9.5531e-01, + 6.5289e-01, 3.1713e-01, 8.0572e-01, 8.9284e-01, + 2.3869e-01, 3.4888e-01, 8.3617e-02, 5.4371e-02, + 9.5014e-01, 1.0815e-02, 8.3273e-02, 8.4162e-01, + 8.2627e-01, 4.2498e-01, 8.8613e-01, 3.2871e-01, + 9.2535e-01, 7.2150e-01, 7.7965e-01, 1.2352e-01, + 8.3457e-01, 5.9917e-01, 1.2768e-01, 7.3159e-01, + 8.4179e-01, 9.5788e-01, 1.3652e-01, 7.8610e-01, + 8.8992e-01, 1.8021e-02, 1.6190e-01, 9.1865e-01, + 1.7199e-01, 8.3828e-01, 3.6832e-01, 6.7848e-01, + 2.1704e-01, 8.1440e-01, 9.7333e-01, 8.6566e-02, + 3.8851e-01, 7.8593e-01, 4.3997e-01, 6.3456e-01, + 8.4949e-01, 6.2091e-01, 4.7846e-01, 2.3361e-01, + 8.7000e-01, 6.1718e-01, 7.9627e-02, 9.7897e-01, + 3.0811e-02, 7.5622e-01, 6.9108e-01, 8.5701e-01, + 3.8581e-01, 2.0174e-02, 8.9276e-01, 8.8886e-01, + 9.7180e-01, 7.5459e-01, 6.3337e-01, 1.9004e-01, + 9.6820e-01, 9.1071e-01, 4.2850e-01, 7.9597e-01, + 7.4503e-01, 5.3616e-01, 5.8154e-01, 7.1545e-01, + 6.7247e-01, 9.8018e-03, 1.1843e-01, 4.8832e-01, + 7.5805e-01, 7.3667e-01, 9.3805e-01, 5.6202e-02, + 6.9843e-02, 8.7303e-01, 5.4003e-01, 7.8698e-01, + 8.8890e-01, 1.4862e-01, 6.7355e-01, 9.4779e-01, + 7.7617e-01, 2.1173e-01, 2.7942e-01, 7.6872e-01, + 5.6656e-01, 5.0257e-01, 8.1713e-01, 9.5463e-01, + 1.7771e-01, 7.7790e-01, 3.3105e-01, 3.2579e-01, + 3.7500e-01, 9.1460e-03, 7.0852e-01, 6.6830e-01, + 8.0289e-01, 4.0223e-01, 2.9249e-01, 5.6736e-01, + 5.1908e-02, 9.7098e-01, 8.4325e-01, 7.2387e-01, + 6.6623e-01, 4.7845e-01, 5.1829e-01, 8.3621e-02, + 6.2935e-01, 6.6121e-02, 2.6136e-01, 8.7305e-01, + 8.2208e-01, 3.0317e-01, 5.2587e-01, 3.6265e-01, + 2.9588e-01, 9.7620e-01, 6.8948e-01, 7.2708e-01, + 7.8233e-01, 6.9132e-01, 4.4851e-01, 8.2923e-01, + 5.1040e-01, 1.9401e-01, 8.3332e-02, 6.9028e-04, + 9.3902e-01, 4.9914e-01, 4.6653e-01, 2.8656e-01, + 8.7552e-01, 1.4916e-01, 1.6063e-01, 5.2878e-02, + 9.2014e-01, 3.5762e-01, 9.4623e-01, 1.5599e-01, + 8.9114e-01, 1.8627e-01, 8.6958e-01, 6.6281e-01, + 1.3693e-01, 3.3151e-01, 1.0599e-01, 1.0311e-01, + 4.6561e-01, 4.8506e-01, 1.5497e-01, 7.2390e-02, + 8.2487e-01, 8.4539e-01, 2.0023e-03, 9.5817e-01, + 3.6720e-01, 2.7811e-01, 7.7869e-01, 8.1398e-01, + 2.6217e-01, 3.3394e-01, 9.5069e-01, 1.1776e-02, + 1.6240e-01, 5.7105e-01, 3.3724e-01, 1.8734e-01, + 3.2556e-02, 7.8838e-01, 1.0993e-01, 5.9183e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.3736, 0.7525, 0.2304, ..., 0.3246, 0.4617, 0.6737]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.016073942184448242 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '65323', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.797457695007324} + +tensor(indices=tensor([[ 589, 1764, 5520, ..., 7256, 5914, 9675], + [1977, 1759, 1265, ..., 1313, 9661, 8931]]), + values=tensor([0.2603, 0.5902, 0.5454, 0.8806, 0.2506, 0.3517, 0.5517, + 0.2729, 0.7051, 0.8326, 0.6325, 0.0684, 0.9789, 0.6709, + 0.2514, 0.0415, 0.0622, 0.0677, 0.9957, 0.7292, 0.2465, + 0.6749, 0.3557, 0.5262, 0.7980, 0.4723, 0.3453, 0.2930, + 0.3593, 0.1971, 0.6596, 0.4214, 0.6757, 0.6746, 0.2118, + 0.7915, 0.4666, 0.8176, 0.1478, 0.0686, 0.3320, 0.5530, + 0.6402, 0.2982, 0.0811, 0.8869, 0.5025, 0.0302, 0.5285, + 0.2181, 0.6757, 0.8563, 0.6104, 0.2256, 0.8699, 0.9533, + 0.1485, 0.7984, 0.6573, 0.7526, 0.6634, 0.5048, 0.6094, + 0.7905, 0.0129, 0.7342, 0.8822, 0.5563, 0.8886, 0.8767, + 0.3595, 0.8032, 0.8000, 0.2973, 0.0418, 0.2258, 0.1024, + 0.4011, 0.2140, 0.1716, 0.1253, 0.1571, 0.9104, 0.6932, + 0.2738, 0.2115, 0.8372, 0.6091, 0.0698, 0.2245, 0.9486, + 0.3971, 0.4425, 0.1067, 0.6545, 0.3759, 0.7361, 0.2372, + 0.1280, 0.7199, 0.2065, 0.1190, 0.6254, 0.6743, 0.9740, + 0.1181, 0.4386, 0.5462, 0.3643, 0.7477, 0.2869, 0.9243, + 0.7240, 0.8317, 0.2496, 0.1626, 0.3208, 0.7357, 0.8642, + 0.5825, 0.3635, 0.6462, 0.7334, 0.4782, 0.4200, 0.8024, + 0.7909, 0.3904, 0.1685, 0.9564, 0.1741, 0.8669, 0.5573, + 0.3353, 0.5162, 0.4769, 0.1084, 0.6968, 0.5535, 0.2182, + 0.6286, 0.0210, 0.3877, 0.3359, 0.1634, 0.2361, 0.3865, + 0.4481, 0.6061, 0.5599, 0.3752, 0.1603, 0.3548, 0.2497, + 0.4852, 0.5616, 0.4479, 0.2831, 0.2374, 0.9405, 0.3661, + 0.3581, 0.0371, 0.3942, 0.9982, 0.5398, 0.5749, 0.3334, + 0.2759, 0.8186, 0.7867, 0.2476, 0.3940, 0.2166, 0.7435, + 0.5753, 0.8701, 0.0342, 0.9844, 0.7626, 0.5202, 0.9708, + 0.5931, 0.8464, 0.7715, 0.1184, 0.5740, 0.3139, 0.5370, + 0.0595, 0.7600, 0.1623, 0.7204, 0.9580, 0.9237, 0.2223, + 0.8138, 0.0632, 0.1818, 0.5632, 0.2736, 0.9324, 0.1061, + 0.0732, 0.9091, 0.4293, 0.6869, 0.0022, 0.1255, 0.9615, + 0.8829, 0.5193, 0.4919, 0.6085, 0.2546, 0.7144, 0.0596, + 0.8772, 0.8716, 0.9092, 0.4657, 0.3267, 0.7908, 0.6036, + 0.1763, 0.9789, 0.1154, 0.0044, 0.5092, 0.1565, 0.4434, + 0.9924, 0.7622, 0.0635, 0.8360, 0.2619, 0.8447, 0.1241, + 0.0707, 0.1239, 0.6441, 0.2200, 0.6369, 0.0416, 0.3607, + 0.0938, 0.1058, 0.7258, 0.0759, 0.4793, 0.3204, 0.8273, + 0.3494, 0.5903, 0.6560, 0.1454, 0.8531, 0.9109, 0.9099, + 0.7263, 0.3098, 0.3614, 0.8069, 0.0282, 0.6460, 0.3708, + 0.6177, 0.3883, 0.1872, 0.5373, 0.9899, 0.1279, 0.3378, + 0.6597, 0.1246, 0.9094, 0.5385, 0.2867, 0.5516, 0.5054, + 0.7436, 0.7408, 0.4503, 0.3732, 0.0239, 0.4006, 0.3187, + 0.7358, 0.7778, 0.8429, 0.8541, 0.9792, 0.5826, 0.7551, + 0.4024, 0.4579, 0.7102, 0.3406, 0.4861, 0.8838, 0.0251, + 0.5805, 0.1249, 0.5027, 0.3063, 0.4745, 0.1840, 0.5224, + 0.5368, 0.7852, 0.9751, 0.6336, 0.2948, 0.1668, 0.2853, + 0.3514, 0.2797, 0.8206, 0.3392, 0.5008, 0.4243, 0.3326, + 0.5349, 0.5581, 0.2748, 0.9658, 0.9551, 0.0952, 0.1449, + 0.9247, 0.4038, 0.0855, 0.3807, 0.4195, 0.9114, 0.3944, + 0.6929, 0.9106, 0.1182, 0.4878, 0.1693, 0.8538, 0.4574, + 0.5934, 0.1703, 0.0460, 0.9525, 0.9902, 0.3186, 0.5593, + 0.5721, 0.9335, 0.8330, 0.1941, 0.4963, 0.3801, 0.3470, + 0.2826, 0.2292, 0.5672, 0.6547, 0.8397, 0.3333, 0.4394, + 0.0027, 0.7206, 0.3116, 0.5941, 0.8498, 0.4955, 0.6250, + 0.6212, 0.2849, 0.7234, 0.2934, 0.6009, 0.0761, 0.1563, + 0.3093, 0.2156, 0.1738, 0.7500, 0.6307, 0.1999, 0.7837, + 0.9242, 0.3135, 0.2378, 0.8821, 0.6245, 0.2587, 0.7027, + 0.3211, 0.1443, 0.0107, 0.2783, 0.1362, 0.6739, 0.5755, + 0.9876, 0.7078, 0.5970, 0.4134, 0.2534, 0.5801, 0.9812, + 0.8788, 0.0696, 0.1885, 0.5008, 0.2149, 0.9850, 0.8167, + 0.2402, 0.9181, 0.1542, 0.6352, 0.8430, 0.0472, 0.0616, + 0.2864, 0.0493, 0.5164, 0.4359, 0.1466, 0.2914, 0.1848, + 0.5060, 0.3255, 0.1908, 0.3395, 0.8915, 0.5798, 0.4838, + 0.7171, 0.2653, 0.6909, 0.8254, 0.6551, 0.7208, 0.7548, + 0.0633, 0.7816, 0.7257, 0.3228, 0.2889, 0.7652, 0.9883, + 0.9888, 0.1108, 0.3122, 0.2652, 0.2868, 0.9207, 0.1363, + 0.2490, 0.4899, 0.3673, 0.5731, 0.5344, 0.1011, 0.2860, + 0.0553, 0.0391, 0.6978, 0.8081, 0.7662, 0.6983, 0.5840, + 0.2532, 0.7557, 0.7707, 0.2688, 0.2056, 0.0983, 0.4522, + 0.5702, 0.4068, 0.4903, 0.5290, 0.6675, 0.8470, 0.9655, + 0.3660, 0.9195, 0.9451, 0.6472, 0.4881, 0.5865, 0.8811, + 0.3913, 0.0497, 0.1619, 0.0110, 0.1472, 0.8063, 0.9257, + 0.7967, 0.3320, 0.3776, 0.3467, 0.7492, 0.1669, 0.7974, + 0.9578, 0.2456, 0.9079, 0.4732, 0.6436, 0.6383, 0.4513, + 0.3530, 0.6465, 0.5455, 0.0584, 0.0182, 0.9203, 0.6544, + 0.4550, 0.1182, 0.2124, 0.9399, 0.5991, 0.0791, 0.5440, + 0.3874, 0.2903, 0.0562, 0.5691, 0.3119, 0.1830, 0.8593, + 0.8671, 0.6705, 0.1337, 0.1294, 0.7941, 0.7394, 0.6093, + 0.7902, 0.6591, 0.2434, 0.0542, 0.3688, 0.7102, 0.7271, + 0.9764, 0.6429, 0.2997, 0.4475, 0.9571, 0.2408, 0.3206, + 0.3154, 0.5771, 0.6994, 0.7254, 0.9931, 0.7994, 0.5343, + 0.6368, 0.4492, 0.5463, 0.9200, 0.9827, 0.5285, 0.0082, + 0.6855, 0.2529, 0.6487, 0.2167, 0.4467, 0.7616, 0.0701, + 0.2792, 0.9924, 0.4005, 0.7734, 0.1859, 0.4105, 0.7533, + 0.0610, 0.1703, 0.0892, 0.0366, 0.2515, 0.4718, 0.4047, + 0.7247, 0.7599, 0.7855, 0.6917, 0.2725, 0.5311, 0.5150, + 0.0375, 0.5786, 0.3150, 0.7972, 0.3967, 0.6749, 0.5106, + 0.0464, 0.2261, 0.3208, 0.8112, 0.0340, 0.2809, 0.5396, + 0.1120, 0.1043, 0.2775, 0.2734, 0.9547, 0.2004, 0.1646, + 0.7421, 0.1792, 0.8905, 0.9033, 0.6653, 0.3151, 0.1600, + 0.4166, 0.9188, 0.1514, 0.2968, 0.2691, 0.6984, 0.6642, + 0.9114, 0.8285, 0.8748, 0.3601, 0.5100, 0.7936, 0.6684, + 0.2215, 0.1240, 0.9027, 0.6246, 0.7995, 0.4405, 0.7062, + 0.4363, 0.1678, 0.4781, 0.5753, 0.3624, 0.1053, 0.9963, + 0.2215, 0.7811, 0.1063, 0.4680, 0.2507, 0.0040, 0.9768, + 0.1230, 0.4809, 0.7342, 0.3060, 0.8579, 0.5291, 0.8571, + 0.1157, 0.3352, 0.4214, 0.1079, 0.8478, 0.4036, 0.2784, + 0.5793, 0.2429, 0.1089, 0.8098, 0.8881, 0.9789, 0.5087, + 0.8944, 0.7528, 0.9080, 0.4739, 0.1698, 0.9116, 0.4142, + 0.0468, 0.8733, 0.1936, 0.2312, 0.1551, 0.8943, 0.9269, + 0.4255, 0.6786, 0.0865, 0.9206, 0.1043, 0.5079, 0.8854, + 0.4361, 0.0632, 0.7602, 0.1094, 0.1457, 0.1845, 0.4600, + 0.5055, 0.3712, 0.5126, 0.9562, 0.6997, 0.2738, 0.0649, + 0.1796, 0.9706, 0.8853, 0.9551, 0.1789, 0.9347, 0.5765, + 0.6076, 0.6892, 0.9938, 0.3478, 0.5985, 0.2436, 0.4482, + 0.2996, 0.7583, 0.3463, 0.5331, 0.1173, 0.2162, 0.5224, + 0.0973, 0.2586, 0.7439, 0.0024, 0.3542, 0.5387, 0.0185, + 0.7532, 0.8026, 0.8150, 0.7994, 0.0457, 0.1397, 0.3190, + 0.9426, 0.8479, 0.0218, 0.9470, 0.1536, 0.3880, 0.8526, + 0.0993, 0.7749, 0.1090, 0.2558, 0.2917, 0.2686, 0.1593, + 0.6591, 0.0251, 0.1142, 0.9021, 0.5746, 0.8243, 0.9786, + 0.8465, 0.2811, 0.3464, 0.7225, 0.0512, 0.7432, 0.1541, + 0.9797, 0.3361, 0.9676, 0.9373, 0.2366, 0.5868, 0.8692, + 0.6894, 0.0101, 0.9429, 0.4004, 0.6932, 0.7658, 0.0097, + 0.8132, 0.8427, 0.0766, 0.5016, 0.4400, 0.3686, 0.7400, + 0.7164, 0.1714, 0.0458, 0.7197, 0.7738, 0.6767, 0.4736, + 0.2540, 0.5120, 0.9856, 0.2889, 0.8450, 0.0368, 0.0936, + 0.6407, 0.6008, 0.0102, 0.3632, 0.8085, 0.9453, 0.2485, + 0.9136, 0.2135, 0.3111, 0.5199, 0.4420, 0.9142, 0.2427, + 0.3540, 0.1864, 0.6446, 0.1582, 0.1765, 0.8136, 0.0275, + 0.0375, 0.9596, 0.5271, 0.6951, 0.4705, 0.7228, 0.9810, + 0.0921, 0.3391, 0.3671, 0.8567, 0.8475, 0.9080, 0.5431, + 0.6992, 0.1358, 0.9308, 0.7639, 0.6821, 0.2799, 0.5679, + 0.6914, 0.7899, 0.1981, 0.3100, 0.6899, 0.4611, 0.4084, + 0.1294, 0.3887, 0.3416, 0.1240, 0.9496, 0.4338, 0.4544, + 0.5621, 0.2115, 0.7856, 0.9347, 0.6313, 0.9150, 0.4401, + 0.1711, 0.9091, 0.0949, 0.0789, 0.9539, 0.0653, 0.5428, + 0.2695, 0.8801, 0.4942, 0.5628, 0.0574, 0.0689, 0.3106, + 0.2346, 0.6366, 0.6259, 0.4034, 0.6666, 0.8327, 0.1611, + 0.0373, 0.9308, 0.0312, 0.8797, 0.1369, 0.0042, 0.2142, + 0.8062, 0.2923, 0.2453, 0.1829, 0.5478, 0.0497, 0.6989, + 0.7550, 0.7926, 0.6913, 0.6250, 0.2850, 0.6533, 0.3572, + 0.4706, 0.0526, 0.6229, 0.0050, 0.9386, 0.8860, 0.4782, + 0.7346, 0.4194, 0.8811, 0.9659, 0.0887, 0.4273, 0.8455, + 0.7226, 0.2643, 0.8912, 0.1977, 0.1035, 0.1855, 0.7692, + 0.7007, 0.0415, 0.5392, 0.2349, 0.7738, 0.0665, 0.0412, + 0.8485, 0.4921, 0.7630, 0.1012, 0.8267, 0.0025, 0.3823, + 0.6334, 0.2329, 0.5567, 0.7190, 0.3640, 0.5819, 0.7375, + 0.1784, 0.7293, 0.9252, 0.6080, 0.1914, 0.2609, 0.3091, + 0.2669, 0.0728, 0.9340, 0.2120, 0.5330, 0.5773, 0.7122, + 0.2306, 0.4837, 0.1283, 0.4540, 0.2659, 0.1953, 0.7191, + 0.5604, 0.0910, 0.3010, 0.0258, 0.3380, 0.0066, 0.7138, + 0.4937, 0.8689, 0.5079, 0.5370, 0.4622, 0.6292, 0.7287, + 0.5783, 0.5078, 0.8115, 0.3236, 0.1839, 0.0596]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.1280, 0.5013, 0.2459, ..., 0.6482, 0.9894, 0.8969]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 2.797457695007324 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '245183', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.55661129951477} + +tensor(indices=tensor([[5180, 1048, 486, ..., 1778, 3729, 8510], + [4819, 6172, 7204, ..., 2691, 9502, 8604]]), + values=tensor([3.1997e-01, 1.9746e-03, 5.9659e-01, 6.4667e-01, + 6.1573e-01, 6.8280e-01, 8.7912e-01, 1.0541e-01, + 8.7244e-01, 3.5387e-01, 8.4858e-01, 9.1804e-01, + 1.3120e-02, 8.7006e-01, 4.4250e-01, 7.2471e-01, + 8.6395e-02, 1.5554e-01, 3.3804e-01, 2.1524e-01, + 5.3286e-01, 9.7773e-01, 3.7090e-01, 3.2094e-01, + 1.2021e-01, 7.8241e-01, 7.0275e-01, 3.1333e-01, + 1.5571e-01, 2.9996e-01, 1.3413e-01, 8.7322e-01, + 4.8018e-01, 5.6815e-01, 1.8030e-01, 3.6525e-01, + 8.5921e-02, 7.9634e-01, 2.8817e-01, 1.6498e-01, + 3.6921e-01, 8.9090e-01, 5.6953e-02, 8.7505e-01, + 2.5643e-01, 4.0341e-01, 7.8463e-01, 3.1022e-01, + 5.8772e-01, 1.4088e-01, 2.9764e-01, 3.1519e-01, + 6.4486e-01, 6.2083e-01, 1.6716e-01, 8.8200e-01, + 6.0151e-01, 9.9652e-01, 9.4318e-01, 6.9753e-01, + 5.7504e-01, 1.9732e-01, 5.4904e-01, 9.6490e-01, + 9.7912e-01, 3.3823e-01, 3.2420e-01, 8.2193e-01, + 5.7144e-01, 4.1675e-01, 5.9054e-01, 7.1548e-01, + 2.0599e-01, 8.6016e-01, 1.2423e-01, 6.4112e-01, + 2.8729e-01, 1.9319e-01, 6.8258e-01, 5.5365e-01, + 9.2505e-01, 8.9817e-02, 7.3210e-01, 5.4585e-01, + 6.8264e-01, 9.7527e-01, 1.8873e-02, 3.0728e-02, + 4.1134e-01, 1.2944e-01, 6.2916e-01, 8.7786e-01, + 8.2807e-01, 2.8300e-02, 1.0710e-01, 4.6868e-01, + 9.3658e-01, 7.1088e-01, 1.3950e-01, 9.4530e-01, + 1.8050e-01, 3.6170e-01, 4.0026e-01, 8.2397e-01, + 1.9659e-01, 7.0902e-01, 2.7460e-01, 7.6228e-01, + 2.8841e-01, 8.3735e-01, 7.7157e-01, 5.8508e-01, + 3.9936e-01, 2.3258e-01, 4.3851e-01, 5.1270e-01, + 1.8743e-01, 1.7600e-01, 1.3603e-01, 1.8335e-01, + 8.9530e-01, 4.0183e-01, 6.1375e-01, 6.1444e-01, + 9.3122e-01, 7.8277e-01, 2.1815e-01, 8.4317e-01, + 1.3314e-02, 3.2190e-01, 2.5987e-03, 9.8635e-02, + 1.5428e-01, 5.7047e-01, 2.4165e-01, 5.6144e-01, + 6.5588e-04, 4.3570e-01, 2.5930e-01, 7.9358e-01, + 6.2788e-01, 4.7540e-01, 5.5470e-02, 4.3391e-01, + 3.0397e-01, 2.3440e-01, 3.7442e-01, 6.4266e-01, + 5.7099e-01, 5.5223e-01, 1.6745e-04, 6.1158e-01, + 2.5334e-01, 7.7991e-02, 8.9946e-01, 3.0472e-01, + 5.9711e-01, 8.1378e-01, 7.9797e-01, 3.1265e-01, + 5.9195e-01, 4.3496e-01, 4.3677e-01, 3.7001e-01, + 9.7937e-01, 3.2002e-01, 2.2448e-01, 9.7242e-01, + 8.3715e-01, 9.4956e-01, 6.2662e-01, 1.5816e-01, + 3.4639e-01, 4.1324e-01, 5.0900e-01, 1.6984e-01, + 5.4434e-01, 6.9472e-01, 2.6718e-01, 8.7769e-01, + 2.6303e-01, 5.1788e-01, 9.5824e-01, 1.8087e-01, + 8.4632e-01, 6.2227e-01, 5.2599e-02, 1.7981e-01, + 9.8327e-01, 2.7832e-01, 8.6783e-01, 7.0803e-01, + 6.5027e-01, 6.0471e-01, 5.5191e-01, 2.9915e-01, + 5.2965e-01, 8.1406e-01, 8.4521e-02, 1.3297e-03, + 8.2493e-01, 6.2908e-01, 2.6660e-01, 1.0937e-01, + 7.0156e-02, 8.0394e-01, 4.6158e-01, 7.7297e-01, + 7.2187e-01, 5.3053e-01, 2.1392e-01, 5.1454e-01, + 5.1817e-01, 4.1849e-01, 7.6278e-01, 3.0078e-01, + 1.4763e-01, 1.7104e-01, 6.8369e-01, 1.2397e-01, + 9.3694e-01, 8.1823e-01, 5.4515e-01, 1.2595e-01, + 7.9169e-01, 8.9641e-01, 4.2940e-01, 4.1400e-01, + 9.7619e-01, 7.3579e-01, 4.5034e-01, 6.5426e-02, + 5.8285e-01, 7.0870e-01, 7.9733e-02, 8.1539e-02, + 3.5876e-01, 6.2046e-01, 4.5471e-01, 6.0330e-01, + 6.1647e-01, 9.4212e-01, 5.7901e-01, 5.6991e-01, + 4.5541e-01, 8.6414e-01, 3.8840e-01, 9.5995e-01, + 9.3172e-01, 8.1941e-01, 5.6711e-01, 6.6220e-01, + 6.8317e-01, 5.5823e-01, 2.5782e-01, 2.7744e-01, + 7.7498e-01, 7.8945e-01, 8.3817e-01, 6.1985e-01, + 6.5936e-01, 9.9342e-01, 7.2182e-01, 2.8905e-01, + 4.2240e-01, 3.4969e-01, 3.5269e-01, 7.6074e-01, + 1.2974e-01, 7.0944e-01, 9.1205e-01, 5.2262e-01, + 2.5481e-01, 2.8284e-02, 9.6610e-01, 8.3725e-01, + 8.2813e-01, 9.7172e-01, 8.6142e-01, 8.0989e-02, + 7.3999e-01, 1.1119e-02, 4.1762e-01, 6.9778e-01, + 4.7556e-01, 1.2793e-01, 7.1165e-01, 6.7424e-01, + 3.1456e-01, 7.8275e-01, 3.9322e-01, 4.2940e-01, + 7.2392e-01, 2.2833e-01, 8.6519e-01, 3.6761e-01, + 2.1310e-01, 7.2418e-01, 8.8593e-01, 1.8587e-01, + 5.9960e-02, 4.4120e-01, 7.5640e-01, 8.7636e-01, + 9.1339e-01, 9.4496e-02, 9.3445e-01, 3.1463e-01, + 6.1444e-01, 3.1249e-01, 4.4814e-01, 9.4495e-01, + 4.9352e-01, 1.1641e-01, 5.1358e-01, 8.9332e-01, + 7.2345e-01, 7.7966e-01, 1.0461e-01, 5.1303e-01, + 3.8073e-01, 8.9708e-01, 5.0025e-01, 3.5962e-01, + 7.2550e-01, 3.9106e-01, 2.6945e-01, 8.6193e-01, + 6.2566e-01, 6.4358e-01, 1.2075e-01, 6.4043e-01, + 1.8887e-01, 2.7371e-01, 6.8889e-01, 5.2959e-01, + 9.3922e-01, 7.6833e-01, 2.0337e-01, 8.5067e-01, + 8.4082e-01, 3.0508e-01, 5.1279e-03, 7.2337e-01, + 3.1687e-01, 5.1859e-01, 7.7884e-01, 3.3823e-01, + 3.4881e-01, 2.9010e-01, 8.4461e-01, 2.9699e-01, + 1.2862e-01, 6.1963e-01, 8.3243e-01, 4.2408e-01, + 4.2219e-01, 3.9833e-01, 9.8246e-01, 6.5087e-01, + 2.7243e-01, 3.8587e-01, 1.8217e-01, 5.2833e-01, + 8.5813e-01, 5.9056e-01, 5.9287e-01, 1.5427e-02, + 8.6024e-04, 4.7559e-01, 1.2080e-01, 3.2432e-01, + 2.1262e-01, 8.9642e-01, 8.1713e-01, 4.6919e-01, + 8.8752e-01, 3.0883e-01, 7.3607e-02, 5.9033e-01, + 9.2496e-01, 3.8816e-01, 7.4830e-01, 2.0095e-02, + 4.9071e-01, 8.0820e-01, 6.1401e-01, 2.0704e-01, + 1.6333e-01, 3.7218e-01, 3.7576e-01, 4.4775e-01, + 9.4177e-01, 4.5022e-02, 2.6573e-01, 2.7714e-02, + 5.7748e-01, 5.2448e-01, 6.6488e-01, 8.2912e-01, + 4.9601e-01, 5.0720e-01, 7.2601e-01, 1.0621e-01, + 4.2250e-01, 9.2966e-02, 8.5351e-01, 2.0504e-01, + 8.1093e-01, 8.3556e-01, 7.8504e-01, 4.0147e-01, + 5.6616e-01, 9.6457e-01, 3.2385e-01, 8.3005e-01, + 2.3756e-01, 8.5434e-01, 4.1758e-01, 5.0322e-01, + 6.3325e-01, 8.5014e-01, 5.3306e-01, 9.1161e-01, + 7.3395e-01, 5.2626e-01, 6.5514e-01, 4.5030e-01, + 5.7902e-02, 7.4652e-02, 1.6787e-01, 9.3899e-01, + 4.4926e-01, 3.2424e-01, 2.0516e-01, 7.9427e-01, + 6.0377e-01, 7.0112e-01, 1.9099e-01, 9.5815e-01, + 8.5400e-01, 6.4927e-01, 5.3307e-01, 7.5548e-01, + 7.6920e-01, 6.4285e-01, 4.1006e-01, 5.6378e-01, + 7.6262e-01, 9.7070e-01, 8.7387e-01, 3.9961e-01, + 8.3150e-01, 5.1260e-02, 5.0960e-01, 7.3429e-01, + 2.1448e-01, 1.0229e-01, 6.3353e-01, 6.9008e-01, + 2.6848e-01, 2.0064e-01, 3.7248e-01, 6.8503e-01, + 8.4792e-01, 8.9476e-01, 3.8570e-01, 4.0796e-01, + 4.7005e-02, 9.4317e-01, 4.1644e-01, 7.7817e-01, + 5.6127e-02, 1.8637e-03, 5.1723e-01, 1.6884e-01, + 6.2960e-01, 8.8758e-01, 3.0337e-01, 9.7621e-03, + 3.6028e-01, 5.8110e-01, 3.2113e-01, 4.0011e-01, + 5.6132e-01, 6.8610e-01, 2.5600e-01, 3.2670e-01, + 4.7734e-02, 1.5159e-01, 6.8225e-01, 4.8681e-01, + 6.9762e-01, 1.2518e-01, 6.5938e-01, 8.7233e-01, + 1.7476e-01, 5.0644e-01, 4.7666e-02, 6.9836e-01, + 6.0922e-01, 4.5662e-02, 4.2049e-01, 4.4406e-01, + 9.7782e-01, 5.0854e-01, 8.3445e-01, 8.2087e-01, + 5.0709e-01, 8.2261e-01, 1.8601e-01, 8.8353e-01, + 2.4993e-01, 5.7679e-01, 7.3403e-01, 1.0413e-01, + 4.4929e-01, 1.5724e-01, 9.3116e-01, 2.0452e-01, + 8.0055e-01, 6.9072e-01, 8.9440e-01, 1.9210e-01, + 1.0550e-01, 1.8541e-01, 3.8552e-01, 2.3805e-01, + 5.5397e-01, 8.7905e-02, 2.7094e-01, 8.0064e-01, + 7.3497e-01, 1.9059e-01, 9.6061e-01, 3.6064e-01, + 3.1711e-01, 9.4295e-01, 6.1345e-01, 6.1819e-02, + 9.1868e-01, 2.9214e-01, 5.8011e-01, 1.6636e-01, + 7.6270e-03, 7.9050e-01, 7.7118e-02, 2.4240e-01, + 9.7349e-01, 2.0897e-01, 9.8220e-01, 7.9315e-01, + 1.4601e-01, 1.6695e-01, 9.1372e-01, 3.4421e-01, + 7.2173e-01, 4.6515e-02, 7.9507e-01, 6.7404e-01, + 8.5423e-01, 2.4917e-01, 2.8158e-01, 9.6294e-01, + 9.9095e-01, 2.1974e-01, 5.9621e-01, 2.5061e-01, + 8.6609e-01, 3.7982e-01, 5.2088e-01, 3.2935e-01, + 5.9758e-01, 8.2698e-01, 2.6372e-01, 7.5123e-01, + 8.2643e-01, 4.9489e-01, 8.6653e-02, 5.3341e-01, + 9.7573e-01, 2.0084e-02, 5.2517e-01, 6.8384e-01, + 3.9016e-01, 7.1193e-01, 7.8214e-01, 4.5562e-01, + 3.2283e-01, 3.6359e-01, 4.8657e-01, 3.1188e-01, + 8.8621e-01, 9.9689e-01, 4.1360e-01, 5.5442e-01, + 3.6502e-01, 1.9032e-01, 5.5607e-01, 3.1676e-02, + 1.5120e-01, 3.4306e-03, 6.8152e-01, 7.8672e-01, + 4.9689e-01, 4.2707e-01, 2.0425e-02, 8.6422e-01, + 3.3566e-01, 1.3103e-01, 3.9053e-01, 9.5497e-01, + 2.5713e-01, 2.0668e-01, 7.0355e-01, 5.0968e-01, + 8.5080e-01, 2.6333e-03, 9.6484e-01, 8.6585e-01, + 4.3058e-02, 9.4803e-01, 4.1004e-01, 6.4614e-01, + 9.1539e-01, 5.4054e-01, 8.8573e-01, 2.7023e-01, + 5.8173e-01, 9.6819e-01, 3.6370e-01, 6.9719e-01, + 7.6173e-01, 6.3543e-01, 3.2216e-01, 6.7911e-02, + 4.3353e-01, 5.6489e-02, 2.2183e-01, 5.4688e-01, + 1.7106e-01, 6.9986e-01, 4.6502e-01, 2.7154e-02, + 6.2371e-01, 7.1726e-01, 1.4559e-01, 7.4359e-01, + 7.7748e-01, 2.5105e-01, 9.5926e-01, 1.3082e-01, + 1.2513e-01, 3.9857e-01, 5.1402e-01, 9.5098e-02, + 8.3973e-01, 4.4276e-01, 4.0071e-01, 7.4557e-01, + 2.5198e-01, 1.9019e-01, 7.6547e-01, 3.0204e-01, + 2.1113e-01, 5.9267e-01, 3.9936e-01, 1.0894e-01, + 8.5273e-01, 8.2794e-02, 6.8463e-01, 4.1722e-02, + 6.1317e-01, 9.1495e-01, 7.0160e-01, 9.1917e-01, + 1.6630e-01, 6.0670e-02, 5.8223e-01, 4.7812e-01, + 3.5680e-01, 7.7356e-01, 2.7646e-01, 3.3752e-01, + 1.3034e-01, 1.0181e-01, 1.7587e-01, 6.2484e-01, + 9.9420e-01, 8.9889e-01, 8.8424e-01, 9.0037e-01, + 5.4107e-01, 8.4741e-01, 6.2526e-01, 7.8273e-01, + 1.2344e-01, 3.5713e-02, 7.2895e-01, 1.5326e-01, + 8.7766e-01, 9.5493e-01, 3.9965e-01, 6.4888e-01, + 6.1241e-01, 6.5210e-01, 4.5854e-01, 6.4873e-01, + 2.0541e-01, 8.8693e-01, 1.2111e-01, 9.8831e-01, + 3.6272e-01, 3.1110e-01, 6.3708e-01, 4.4504e-01, + 8.4632e-01, 8.1129e-02, 1.7016e-01, 8.5292e-01, + 4.8763e-01, 4.1077e-01, 1.1570e-01, 1.2043e-01, + 4.5817e-01, 1.3937e-01, 9.6375e-01, 7.2373e-01, + 9.0364e-01, 2.5248e-01, 5.5106e-01, 6.5355e-01, + 1.2493e-01, 3.1448e-01, 4.1515e-01, 3.6840e-01, + 8.7500e-01, 5.2610e-01, 1.1679e-01, 9.1071e-01, + 7.6768e-01, 8.4405e-01, 7.1252e-01, 1.5579e-02, + 8.6947e-01, 2.5205e-01, 7.8084e-01, 3.1516e-01, + 3.9010e-01, 5.1827e-01, 4.2507e-01, 8.0829e-01, + 3.3252e-01, 5.9999e-01, 5.4800e-01, 8.4359e-01, + 6.1631e-01, 9.9186e-01, 2.9499e-01, 6.8305e-01, + 5.5698e-02, 3.2931e-01, 3.9575e-01, 9.3730e-01, + 1.9220e-01, 6.3075e-01, 8.2865e-01, 6.7016e-01, + 5.7389e-01, 8.8668e-01, 5.6603e-01, 3.5288e-01, + 7.7506e-01, 8.9399e-02, 1.0483e-01, 2.2150e-01, + 1.2467e-01, 6.5797e-02, 1.0620e-01, 3.6483e-01, + 1.4575e-01, 7.4815e-01, 6.7555e-01, 2.0286e-01, + 9.8160e-01, 9.5716e-01, 7.8317e-01, 6.4937e-01, + 3.7362e-01, 2.5155e-01, 7.4803e-01, 2.9272e-01, + 4.3410e-01, 4.6518e-01, 4.6838e-01, 7.0889e-01, + 6.5950e-01, 5.3321e-01, 9.1244e-01, 4.8615e-01, + 2.9727e-01, 6.3229e-01, 2.6608e-02, 6.4733e-01, + 1.9861e-01, 8.4264e-01, 5.4853e-01, 1.1629e-01, + 4.0906e-02, 1.6713e-01, 4.6641e-01, 2.0275e-01, + 1.1296e-01, 2.4448e-01, 9.9154e-01, 9.4060e-01, + 8.1157e-01, 3.2336e-03, 3.8032e-01, 4.1005e-02, + 7.8229e-03, 4.8765e-01, 3.0631e-01, 7.8100e-01, + 3.4436e-01, 5.7776e-01, 1.3339e-01, 6.1713e-01, + 2.5451e-01, 3.7217e-01, 5.1742e-01, 6.6309e-02, + 9.2402e-01, 6.5062e-01, 4.0702e-01, 3.1084e-01, + 9.3688e-01, 2.0330e-01, 2.3533e-02, 1.3243e-01, + 3.9436e-01, 9.4356e-01, 2.9807e-01, 4.0190e-01, + 7.1822e-01, 2.5064e-01, 5.6780e-01, 3.8962e-01, + 9.0767e-01, 7.8906e-01, 2.3549e-01, 8.2594e-01, + 6.7202e-01, 7.8066e-01, 7.7064e-01, 8.5614e-02, + 9.4784e-01, 7.0525e-01, 8.1039e-02, 9.1669e-01, + 4.3664e-01, 7.2152e-01, 5.9359e-01, 5.1760e-01, + 6.2193e-01, 7.3835e-01, 7.4508e-01, 1.7890e-01, + 9.7857e-01, 5.1210e-01, 3.5115e-02, 3.9984e-01, + 5.3707e-02, 5.1640e-02, 2.8842e-01, 6.4464e-01, + 7.8607e-01, 8.6037e-01, 5.4085e-01, 8.1980e-01, + 8.5711e-01, 5.9070e-01, 1.9969e-01, 1.3851e-01, + 1.9757e-01, 4.9309e-01, 7.3440e-01, 4.5765e-01, + 4.3762e-02, 2.6087e-01, 6.1885e-01, 5.0318e-01, + 6.6678e-01, 2.9223e-01, 2.1849e-01, 2.6399e-02, + 3.1193e-01, 1.9224e-01, 6.2125e-01, 6.0625e-01, + 5.3459e-01, 1.4952e-01, 2.1281e-01, 8.4982e-02, + 7.7520e-01, 4.0304e-01, 7.2543e-01, 5.1551e-01, + 8.3280e-02, 6.7569e-01, 3.4369e-01, 7.4041e-01, + 1.0004e-01, 1.0762e-01, 5.9025e-01, 5.4491e-01, + 8.8908e-01, 3.7324e-01, 7.6994e-01, 2.8208e-01, + 2.3736e-01, 8.2145e-01, 4.5593e-01, 7.8129e-01, + 6.6486e-01, 1.9230e-01, 3.7945e-01, 8.5661e-01, + 5.4141e-02, 7.7827e-01, 1.4318e-01, 4.3511e-01, + 4.2378e-01, 9.1421e-01, 6.5131e-01, 5.3034e-01, + 1.5762e-01, 7.8575e-01, 4.3106e-01, 6.8552e-01, + 6.4713e-01, 8.8703e-01, 2.4503e-01, 5.5727e-01, + 1.9591e-01, 2.7913e-01, 8.4321e-01, 8.4356e-01, + 7.2790e-02, 1.9888e-01, 1.1536e-01, 7.6779e-01, + 1.1003e-01, 9.2989e-01, 3.1070e-02, 1.6601e-01, + 3.5563e-01, 7.8691e-01, 1.0997e-01, 3.1523e-02, + 2.1244e-02, 3.1859e-01, 2.3313e-01, 1.9623e-01, + 4.8432e-01, 7.9227e-01, 8.7568e-01, 1.1524e-01, + 6.1360e-01, 3.1755e-01, 2.0388e-01, 7.1163e-01, + 9.0922e-01, 6.4170e-01, 8.8209e-01, 4.4022e-01, + 6.3262e-01, 4.2812e-01, 6.0416e-01, 1.9969e-01, + 5.5992e-01, 2.6823e-01, 3.2539e-01, 1.4408e-01, + 7.8603e-01, 5.1714e-01, 8.4088e-01, 2.3799e-01, + 1.1020e-01, 3.6550e-01, 9.6830e-01, 7.6098e-01, + 4.2587e-01, 5.0003e-01, 3.0625e-01, 7.5848e-01, + 3.8679e-01, 2.5223e-02, 7.4226e-01, 8.7979e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.2419, 0.7684, 0.5881, ..., 0.2482, 0.4751, 0.6245]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.55661129951477 seconds + +tensor(indices=tensor([[5180, 1048, 486, ..., 1778, 3729, 8510], + [4819, 6172, 7204, ..., 2691, 9502, 8604]]), + values=tensor([3.1997e-01, 1.9746e-03, 5.9659e-01, 6.4667e-01, + 6.1573e-01, 6.8280e-01, 8.7912e-01, 1.0541e-01, + 8.7244e-01, 3.5387e-01, 8.4858e-01, 9.1804e-01, + 1.3120e-02, 8.7006e-01, 4.4250e-01, 7.2471e-01, + 8.6395e-02, 1.5554e-01, 3.3804e-01, 2.1524e-01, + 5.3286e-01, 9.7773e-01, 3.7090e-01, 3.2094e-01, + 1.2021e-01, 7.8241e-01, 7.0275e-01, 3.1333e-01, + 1.5571e-01, 2.9996e-01, 1.3413e-01, 8.7322e-01, + 4.8018e-01, 5.6815e-01, 1.8030e-01, 3.6525e-01, + 8.5921e-02, 7.9634e-01, 2.8817e-01, 1.6498e-01, + 3.6921e-01, 8.9090e-01, 5.6953e-02, 8.7505e-01, + 2.5643e-01, 4.0341e-01, 7.8463e-01, 3.1022e-01, + 5.8772e-01, 1.4088e-01, 2.9764e-01, 3.1519e-01, + 6.4486e-01, 6.2083e-01, 1.6716e-01, 8.8200e-01, + 6.0151e-01, 9.9652e-01, 9.4318e-01, 6.9753e-01, + 5.7504e-01, 1.9732e-01, 5.4904e-01, 9.6490e-01, + 9.7912e-01, 3.3823e-01, 3.2420e-01, 8.2193e-01, + 5.7144e-01, 4.1675e-01, 5.9054e-01, 7.1548e-01, + 2.0599e-01, 8.6016e-01, 1.2423e-01, 6.4112e-01, + 2.8729e-01, 1.9319e-01, 6.8258e-01, 5.5365e-01, + 9.2505e-01, 8.9817e-02, 7.3210e-01, 5.4585e-01, + 6.8264e-01, 9.7527e-01, 1.8873e-02, 3.0728e-02, + 4.1134e-01, 1.2944e-01, 6.2916e-01, 8.7786e-01, + 8.2807e-01, 2.8300e-02, 1.0710e-01, 4.6868e-01, + 9.3658e-01, 7.1088e-01, 1.3950e-01, 9.4530e-01, + 1.8050e-01, 3.6170e-01, 4.0026e-01, 8.2397e-01, + 1.9659e-01, 7.0902e-01, 2.7460e-01, 7.6228e-01, + 2.8841e-01, 8.3735e-01, 7.7157e-01, 5.8508e-01, + 3.9936e-01, 2.3258e-01, 4.3851e-01, 5.1270e-01, + 1.8743e-01, 1.7600e-01, 1.3603e-01, 1.8335e-01, + 8.9530e-01, 4.0183e-01, 6.1375e-01, 6.1444e-01, + 9.3122e-01, 7.8277e-01, 2.1815e-01, 8.4317e-01, + 1.3314e-02, 3.2190e-01, 2.5987e-03, 9.8635e-02, + 1.5428e-01, 5.7047e-01, 2.4165e-01, 5.6144e-01, + 6.5588e-04, 4.3570e-01, 2.5930e-01, 7.9358e-01, + 6.2788e-01, 4.7540e-01, 5.5470e-02, 4.3391e-01, + 3.0397e-01, 2.3440e-01, 3.7442e-01, 6.4266e-01, + 5.7099e-01, 5.5223e-01, 1.6745e-04, 6.1158e-01, + 2.5334e-01, 7.7991e-02, 8.9946e-01, 3.0472e-01, + 5.9711e-01, 8.1378e-01, 7.9797e-01, 3.1265e-01, + 5.9195e-01, 4.3496e-01, 4.3677e-01, 3.7001e-01, + 9.7937e-01, 3.2002e-01, 2.2448e-01, 9.7242e-01, + 8.3715e-01, 9.4956e-01, 6.2662e-01, 1.5816e-01, + 3.4639e-01, 4.1324e-01, 5.0900e-01, 1.6984e-01, + 5.4434e-01, 6.9472e-01, 2.6718e-01, 8.7769e-01, + 2.6303e-01, 5.1788e-01, 9.5824e-01, 1.8087e-01, + 8.4632e-01, 6.2227e-01, 5.2599e-02, 1.7981e-01, + 9.8327e-01, 2.7832e-01, 8.6783e-01, 7.0803e-01, + 6.5027e-01, 6.0471e-01, 5.5191e-01, 2.9915e-01, + 5.2965e-01, 8.1406e-01, 8.4521e-02, 1.3297e-03, + 8.2493e-01, 6.2908e-01, 2.6660e-01, 1.0937e-01, + 7.0156e-02, 8.0394e-01, 4.6158e-01, 7.7297e-01, + 7.2187e-01, 5.3053e-01, 2.1392e-01, 5.1454e-01, + 5.1817e-01, 4.1849e-01, 7.6278e-01, 3.0078e-01, + 1.4763e-01, 1.7104e-01, 6.8369e-01, 1.2397e-01, + 9.3694e-01, 8.1823e-01, 5.4515e-01, 1.2595e-01, + 7.9169e-01, 8.9641e-01, 4.2940e-01, 4.1400e-01, + 9.7619e-01, 7.3579e-01, 4.5034e-01, 6.5426e-02, + 5.8285e-01, 7.0870e-01, 7.9733e-02, 8.1539e-02, + 3.5876e-01, 6.2046e-01, 4.5471e-01, 6.0330e-01, + 6.1647e-01, 9.4212e-01, 5.7901e-01, 5.6991e-01, + 4.5541e-01, 8.6414e-01, 3.8840e-01, 9.5995e-01, + 9.3172e-01, 8.1941e-01, 5.6711e-01, 6.6220e-01, + 6.8317e-01, 5.5823e-01, 2.5782e-01, 2.7744e-01, + 7.7498e-01, 7.8945e-01, 8.3817e-01, 6.1985e-01, + 6.5936e-01, 9.9342e-01, 7.2182e-01, 2.8905e-01, + 4.2240e-01, 3.4969e-01, 3.5269e-01, 7.6074e-01, + 1.2974e-01, 7.0944e-01, 9.1205e-01, 5.2262e-01, + 2.5481e-01, 2.8284e-02, 9.6610e-01, 8.3725e-01, + 8.2813e-01, 9.7172e-01, 8.6142e-01, 8.0989e-02, + 7.3999e-01, 1.1119e-02, 4.1762e-01, 6.9778e-01, + 4.7556e-01, 1.2793e-01, 7.1165e-01, 6.7424e-01, + 3.1456e-01, 7.8275e-01, 3.9322e-01, 4.2940e-01, + 7.2392e-01, 2.2833e-01, 8.6519e-01, 3.6761e-01, + 2.1310e-01, 7.2418e-01, 8.8593e-01, 1.8587e-01, + 5.9960e-02, 4.4120e-01, 7.5640e-01, 8.7636e-01, + 9.1339e-01, 9.4496e-02, 9.3445e-01, 3.1463e-01, + 6.1444e-01, 3.1249e-01, 4.4814e-01, 9.4495e-01, + 4.9352e-01, 1.1641e-01, 5.1358e-01, 8.9332e-01, + 7.2345e-01, 7.7966e-01, 1.0461e-01, 5.1303e-01, + 3.8073e-01, 8.9708e-01, 5.0025e-01, 3.5962e-01, + 7.2550e-01, 3.9106e-01, 2.6945e-01, 8.6193e-01, + 6.2566e-01, 6.4358e-01, 1.2075e-01, 6.4043e-01, + 1.8887e-01, 2.7371e-01, 6.8889e-01, 5.2959e-01, + 9.3922e-01, 7.6833e-01, 2.0337e-01, 8.5067e-01, + 8.4082e-01, 3.0508e-01, 5.1279e-03, 7.2337e-01, + 3.1687e-01, 5.1859e-01, 7.7884e-01, 3.3823e-01, + 3.4881e-01, 2.9010e-01, 8.4461e-01, 2.9699e-01, + 1.2862e-01, 6.1963e-01, 8.3243e-01, 4.2408e-01, + 4.2219e-01, 3.9833e-01, 9.8246e-01, 6.5087e-01, + 2.7243e-01, 3.8587e-01, 1.8217e-01, 5.2833e-01, + 8.5813e-01, 5.9056e-01, 5.9287e-01, 1.5427e-02, + 8.6024e-04, 4.7559e-01, 1.2080e-01, 3.2432e-01, + 2.1262e-01, 8.9642e-01, 8.1713e-01, 4.6919e-01, + 8.8752e-01, 3.0883e-01, 7.3607e-02, 5.9033e-01, + 9.2496e-01, 3.8816e-01, 7.4830e-01, 2.0095e-02, + 4.9071e-01, 8.0820e-01, 6.1401e-01, 2.0704e-01, + 1.6333e-01, 3.7218e-01, 3.7576e-01, 4.4775e-01, + 9.4177e-01, 4.5022e-02, 2.6573e-01, 2.7714e-02, + 5.7748e-01, 5.2448e-01, 6.6488e-01, 8.2912e-01, + 4.9601e-01, 5.0720e-01, 7.2601e-01, 1.0621e-01, + 4.2250e-01, 9.2966e-02, 8.5351e-01, 2.0504e-01, + 8.1093e-01, 8.3556e-01, 7.8504e-01, 4.0147e-01, + 5.6616e-01, 9.6457e-01, 3.2385e-01, 8.3005e-01, + 2.3756e-01, 8.5434e-01, 4.1758e-01, 5.0322e-01, + 6.3325e-01, 8.5014e-01, 5.3306e-01, 9.1161e-01, + 7.3395e-01, 5.2626e-01, 6.5514e-01, 4.5030e-01, + 5.7902e-02, 7.4652e-02, 1.6787e-01, 9.3899e-01, + 4.4926e-01, 3.2424e-01, 2.0516e-01, 7.9427e-01, + 6.0377e-01, 7.0112e-01, 1.9099e-01, 9.5815e-01, + 8.5400e-01, 6.4927e-01, 5.3307e-01, 7.5548e-01, + 7.6920e-01, 6.4285e-01, 4.1006e-01, 5.6378e-01, + 7.6262e-01, 9.7070e-01, 8.7387e-01, 3.9961e-01, + 8.3150e-01, 5.1260e-02, 5.0960e-01, 7.3429e-01, + 2.1448e-01, 1.0229e-01, 6.3353e-01, 6.9008e-01, + 2.6848e-01, 2.0064e-01, 3.7248e-01, 6.8503e-01, + 8.4792e-01, 8.9476e-01, 3.8570e-01, 4.0796e-01, + 4.7005e-02, 9.4317e-01, 4.1644e-01, 7.7817e-01, + 5.6127e-02, 1.8637e-03, 5.1723e-01, 1.6884e-01, + 6.2960e-01, 8.8758e-01, 3.0337e-01, 9.7621e-03, + 3.6028e-01, 5.8110e-01, 3.2113e-01, 4.0011e-01, + 5.6132e-01, 6.8610e-01, 2.5600e-01, 3.2670e-01, + 4.7734e-02, 1.5159e-01, 6.8225e-01, 4.8681e-01, + 6.9762e-01, 1.2518e-01, 6.5938e-01, 8.7233e-01, + 1.7476e-01, 5.0644e-01, 4.7666e-02, 6.9836e-01, + 6.0922e-01, 4.5662e-02, 4.2049e-01, 4.4406e-01, + 9.7782e-01, 5.0854e-01, 8.3445e-01, 8.2087e-01, + 5.0709e-01, 8.2261e-01, 1.8601e-01, 8.8353e-01, + 2.4993e-01, 5.7679e-01, 7.3403e-01, 1.0413e-01, + 4.4929e-01, 1.5724e-01, 9.3116e-01, 2.0452e-01, + 8.0055e-01, 6.9072e-01, 8.9440e-01, 1.9210e-01, + 1.0550e-01, 1.8541e-01, 3.8552e-01, 2.3805e-01, + 5.5397e-01, 8.7905e-02, 2.7094e-01, 8.0064e-01, + 7.3497e-01, 1.9059e-01, 9.6061e-01, 3.6064e-01, + 3.1711e-01, 9.4295e-01, 6.1345e-01, 6.1819e-02, + 9.1868e-01, 2.9214e-01, 5.8011e-01, 1.6636e-01, + 7.6270e-03, 7.9050e-01, 7.7118e-02, 2.4240e-01, + 9.7349e-01, 2.0897e-01, 9.8220e-01, 7.9315e-01, + 1.4601e-01, 1.6695e-01, 9.1372e-01, 3.4421e-01, + 7.2173e-01, 4.6515e-02, 7.9507e-01, 6.7404e-01, + 8.5423e-01, 2.4917e-01, 2.8158e-01, 9.6294e-01, + 9.9095e-01, 2.1974e-01, 5.9621e-01, 2.5061e-01, + 8.6609e-01, 3.7982e-01, 5.2088e-01, 3.2935e-01, + 5.9758e-01, 8.2698e-01, 2.6372e-01, 7.5123e-01, + 8.2643e-01, 4.9489e-01, 8.6653e-02, 5.3341e-01, + 9.7573e-01, 2.0084e-02, 5.2517e-01, 6.8384e-01, + 3.9016e-01, 7.1193e-01, 7.8214e-01, 4.5562e-01, + 3.2283e-01, 3.6359e-01, 4.8657e-01, 3.1188e-01, + 8.8621e-01, 9.9689e-01, 4.1360e-01, 5.5442e-01, + 3.6502e-01, 1.9032e-01, 5.5607e-01, 3.1676e-02, + 1.5120e-01, 3.4306e-03, 6.8152e-01, 7.8672e-01, + 4.9689e-01, 4.2707e-01, 2.0425e-02, 8.6422e-01, + 3.3566e-01, 1.3103e-01, 3.9053e-01, 9.5497e-01, + 2.5713e-01, 2.0668e-01, 7.0355e-01, 5.0968e-01, + 8.5080e-01, 2.6333e-03, 9.6484e-01, 8.6585e-01, + 4.3058e-02, 9.4803e-01, 4.1004e-01, 6.4614e-01, + 9.1539e-01, 5.4054e-01, 8.8573e-01, 2.7023e-01, + 5.8173e-01, 9.6819e-01, 3.6370e-01, 6.9719e-01, + 7.6173e-01, 6.3543e-01, 3.2216e-01, 6.7911e-02, + 4.3353e-01, 5.6489e-02, 2.2183e-01, 5.4688e-01, + 1.7106e-01, 6.9986e-01, 4.6502e-01, 2.7154e-02, + 6.2371e-01, 7.1726e-01, 1.4559e-01, 7.4359e-01, + 7.7748e-01, 2.5105e-01, 9.5926e-01, 1.3082e-01, + 1.2513e-01, 3.9857e-01, 5.1402e-01, 9.5098e-02, + 8.3973e-01, 4.4276e-01, 4.0071e-01, 7.4557e-01, + 2.5198e-01, 1.9019e-01, 7.6547e-01, 3.0204e-01, + 2.1113e-01, 5.9267e-01, 3.9936e-01, 1.0894e-01, + 8.5273e-01, 8.2794e-02, 6.8463e-01, 4.1722e-02, + 6.1317e-01, 9.1495e-01, 7.0160e-01, 9.1917e-01, + 1.6630e-01, 6.0670e-02, 5.8223e-01, 4.7812e-01, + 3.5680e-01, 7.7356e-01, 2.7646e-01, 3.3752e-01, + 1.3034e-01, 1.0181e-01, 1.7587e-01, 6.2484e-01, + 9.9420e-01, 8.9889e-01, 8.8424e-01, 9.0037e-01, + 5.4107e-01, 8.4741e-01, 6.2526e-01, 7.8273e-01, + 1.2344e-01, 3.5713e-02, 7.2895e-01, 1.5326e-01, + 8.7766e-01, 9.5493e-01, 3.9965e-01, 6.4888e-01, + 6.1241e-01, 6.5210e-01, 4.5854e-01, 6.4873e-01, + 2.0541e-01, 8.8693e-01, 1.2111e-01, 9.8831e-01, + 3.6272e-01, 3.1110e-01, 6.3708e-01, 4.4504e-01, + 8.4632e-01, 8.1129e-02, 1.7016e-01, 8.5292e-01, + 4.8763e-01, 4.1077e-01, 1.1570e-01, 1.2043e-01, + 4.5817e-01, 1.3937e-01, 9.6375e-01, 7.2373e-01, + 9.0364e-01, 2.5248e-01, 5.5106e-01, 6.5355e-01, + 1.2493e-01, 3.1448e-01, 4.1515e-01, 3.6840e-01, + 8.7500e-01, 5.2610e-01, 1.1679e-01, 9.1071e-01, + 7.6768e-01, 8.4405e-01, 7.1252e-01, 1.5579e-02, + 8.6947e-01, 2.5205e-01, 7.8084e-01, 3.1516e-01, + 3.9010e-01, 5.1827e-01, 4.2507e-01, 8.0829e-01, + 3.3252e-01, 5.9999e-01, 5.4800e-01, 8.4359e-01, + 6.1631e-01, 9.9186e-01, 2.9499e-01, 6.8305e-01, + 5.5698e-02, 3.2931e-01, 3.9575e-01, 9.3730e-01, + 1.9220e-01, 6.3075e-01, 8.2865e-01, 6.7016e-01, + 5.7389e-01, 8.8668e-01, 5.6603e-01, 3.5288e-01, + 7.7506e-01, 8.9399e-02, 1.0483e-01, 2.2150e-01, + 1.2467e-01, 6.5797e-02, 1.0620e-01, 3.6483e-01, + 1.4575e-01, 7.4815e-01, 6.7555e-01, 2.0286e-01, + 9.8160e-01, 9.5716e-01, 7.8317e-01, 6.4937e-01, + 3.7362e-01, 2.5155e-01, 7.4803e-01, 2.9272e-01, + 4.3410e-01, 4.6518e-01, 4.6838e-01, 7.0889e-01, + 6.5950e-01, 5.3321e-01, 9.1244e-01, 4.8615e-01, + 2.9727e-01, 6.3229e-01, 2.6608e-02, 6.4733e-01, + 1.9861e-01, 8.4264e-01, 5.4853e-01, 1.1629e-01, + 4.0906e-02, 1.6713e-01, 4.6641e-01, 2.0275e-01, + 1.1296e-01, 2.4448e-01, 9.9154e-01, 9.4060e-01, + 8.1157e-01, 3.2336e-03, 3.8032e-01, 4.1005e-02, + 7.8229e-03, 4.8765e-01, 3.0631e-01, 7.8100e-01, + 3.4436e-01, 5.7776e-01, 1.3339e-01, 6.1713e-01, + 2.5451e-01, 3.7217e-01, 5.1742e-01, 6.6309e-02, + 9.2402e-01, 6.5062e-01, 4.0702e-01, 3.1084e-01, + 9.3688e-01, 2.0330e-01, 2.3533e-02, 1.3243e-01, + 3.9436e-01, 9.4356e-01, 2.9807e-01, 4.0190e-01, + 7.1822e-01, 2.5064e-01, 5.6780e-01, 3.8962e-01, + 9.0767e-01, 7.8906e-01, 2.3549e-01, 8.2594e-01, + 6.7202e-01, 7.8066e-01, 7.7064e-01, 8.5614e-02, + 9.4784e-01, 7.0525e-01, 8.1039e-02, 9.1669e-01, + 4.3664e-01, 7.2152e-01, 5.9359e-01, 5.1760e-01, + 6.2193e-01, 7.3835e-01, 7.4508e-01, 1.7890e-01, + 9.7857e-01, 5.1210e-01, 3.5115e-02, 3.9984e-01, + 5.3707e-02, 5.1640e-02, 2.8842e-01, 6.4464e-01, + 7.8607e-01, 8.6037e-01, 5.4085e-01, 8.1980e-01, + 8.5711e-01, 5.9070e-01, 1.9969e-01, 1.3851e-01, + 1.9757e-01, 4.9309e-01, 7.3440e-01, 4.5765e-01, + 4.3762e-02, 2.6087e-01, 6.1885e-01, 5.0318e-01, + 6.6678e-01, 2.9223e-01, 2.1849e-01, 2.6399e-02, + 3.1193e-01, 1.9224e-01, 6.2125e-01, 6.0625e-01, + 5.3459e-01, 1.4952e-01, 2.1281e-01, 8.4982e-02, + 7.7520e-01, 4.0304e-01, 7.2543e-01, 5.1551e-01, + 8.3280e-02, 6.7569e-01, 3.4369e-01, 7.4041e-01, + 1.0004e-01, 1.0762e-01, 5.9025e-01, 5.4491e-01, + 8.8908e-01, 3.7324e-01, 7.6994e-01, 2.8208e-01, + 2.3736e-01, 8.2145e-01, 4.5593e-01, 7.8129e-01, + 6.6486e-01, 1.9230e-01, 3.7945e-01, 8.5661e-01, + 5.4141e-02, 7.7827e-01, 1.4318e-01, 4.3511e-01, + 4.2378e-01, 9.1421e-01, 6.5131e-01, 5.3034e-01, + 1.5762e-01, 7.8575e-01, 4.3106e-01, 6.8552e-01, + 6.4713e-01, 8.8703e-01, 2.4503e-01, 5.5727e-01, + 1.9591e-01, 2.7913e-01, 8.4321e-01, 8.4356e-01, + 7.2790e-02, 1.9888e-01, 1.1536e-01, 7.6779e-01, + 1.1003e-01, 9.2989e-01, 3.1070e-02, 1.6601e-01, + 3.5563e-01, 7.8691e-01, 1.0997e-01, 3.1523e-02, + 2.1244e-02, 3.1859e-01, 2.3313e-01, 1.9623e-01, + 4.8432e-01, 7.9227e-01, 8.7568e-01, 1.1524e-01, + 6.1360e-01, 3.1755e-01, 2.0388e-01, 7.1163e-01, + 9.0922e-01, 6.4170e-01, 8.8209e-01, 4.4022e-01, + 6.3262e-01, 4.2812e-01, 6.0416e-01, 1.9969e-01, + 5.5992e-01, 2.6823e-01, 3.2539e-01, 1.4408e-01, + 7.8603e-01, 5.1714e-01, 8.4088e-01, 2.3799e-01, + 1.1020e-01, 3.6550e-01, 9.6830e-01, 7.6098e-01, + 4.2587e-01, 5.0003e-01, 3.0625e-01, 7.5848e-01, + 3.8679e-01, 2.5223e-02, 7.4226e-01, 8.7979e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_coo) +tensor([0.2419, 0.7684, 0.5881, ..., 0.2482, 0.4751, 0.6245]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.55661129951477 seconds + +[19.25, 18.5, 19.71, 18.65, 18.84, 18.5, 18.52, 18.7, 18.67, 22.57] +[54.11] +13.82238483428955 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.55661129951477, 'TIME_S_1KI': 0.04305604915314182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 747.9292433834075, 'W': 54.11} +[19.25, 18.5, 19.71, 18.65, 18.84, 18.5, 18.52, 18.7, 18.67, 22.57, 19.13, 18.86, 18.61, 18.81, 18.72, 21.91, 19.83, 18.77, 19.45, 18.54] +344.795 +17.23975 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245183, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.55661129951477, 'TIME_S_1KI': 0.04305604915314182, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 747.9292433834075, 'W': 54.11, 'J_1KI': 3.0504938898023415, 'W_1KI': 0.22069229922139788, 'W_D': 36.87025, 'J_D': 509.6347844364643, 'W_D_1KI': 0.15037849279925605, 'J_D_1KI': 0.0006133316453394242} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..bd832dc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57363, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50459599494934, "TIME_S_1KI": 0.18312494107611774, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 753.622943906784, "W": 54.14, "J_1KI": 13.137788189369175, "W_1KI": 0.9438139567316912, "W_D": 37.02675, "J_D": 515.4083549741507, "W_D_1KI": 0.645481407876157, "J_D_1KI": 0.011252574096127418} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..f750d8a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.030348777770996094} + +tensor(indices=tensor([[4846, 6971, 4024, ..., 7332, 4076, 1257], + [5976, 464, 1309, ..., 4636, 9910, 4307]]), + values=tensor([0.9878, 0.7522, 0.0095, ..., 0.3411, 0.1684, 0.2687]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.8149, 0.1038, 0.3865, ..., 0.1082, 0.2004, 0.1529]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.030348777770996094 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '34597', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.3327813148498535} + +tensor(indices=tensor([[4939, 543, 3594, ..., 4966, 937, 9676], + [2932, 3176, 1355, ..., 5501, 1855, 7861]]), + values=tensor([0.6029, 0.9170, 0.2831, ..., 0.1681, 0.5282, 0.4261]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.1818, 0.2705, 0.8269, ..., 0.6435, 0.1848, 0.7774]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 6.3327813148498535 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '57363', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.50459599494934} + +tensor(indices=tensor([[8860, 3670, 9773, ..., 5962, 5086, 1165], + [4368, 1939, 643, ..., 5074, 6592, 7194]]), + values=tensor([0.5149, 0.0342, 0.4245, ..., 0.8220, 0.8164, 0.0557]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.3688, 0.9903, 0.6282, ..., 0.1273, 0.7465, 0.8413]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.50459599494934 seconds + +tensor(indices=tensor([[8860, 3670, 9773, ..., 5962, 5086, 1165], + [4368, 1939, 643, ..., 5074, 6592, 7194]]), + values=tensor([0.5149, 0.0342, 0.4245, ..., 0.8220, 0.8164, 0.0557]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_coo) +tensor([0.3688, 0.9903, 0.6282, ..., 0.1273, 0.7465, 0.8413]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.50459599494934 seconds + +[19.21, 18.78, 18.49, 18.9, 18.82, 18.38, 18.56, 18.82, 18.66, 18.77] +[54.14] +13.919891834259033 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50459599494934, 'TIME_S_1KI': 0.18312494107611774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.622943906784, 'W': 54.14} +[19.21, 18.78, 18.49, 18.9, 18.82, 18.38, 18.56, 18.82, 18.66, 18.77, 19.18, 19.36, 18.69, 19.35, 21.93, 18.87, 18.49, 19.52, 18.64, 18.85] +342.265 +17.11325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.50459599494934, 'TIME_S_1KI': 0.18312494107611774, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 753.622943906784, 'W': 54.14, 'J_1KI': 13.137788189369175, 'W_1KI': 0.9438139567316912, 'W_D': 37.02675, 'J_D': 515.4083549741507, 'W_D_1KI': 0.645481407876157, 'J_D_1KI': 0.011252574096127418} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..fb2c845 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 121.14329671859741, "TIME_S_1KI": 1211.4329671859741, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 6950.835893559456, "W": 53.42, "J_1KI": 69508.35893559456, "W_1KI": 534.2, "W_D": 36.53725, "J_D": 4754.107614226044, "W_D_1KI": 365.3725, "J_D_1KI": 3653.725} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..6ea52ae --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 121.14329671859741} + +tensor(indices=tensor([[422178, 34759, 147995, ..., 313076, 100996, 192246], + [188830, 222551, 381104, ..., 117948, 357802, 271920]]), + values=tensor([0.2774, 0.2708, 0.4114, ..., 0.3449, 0.7705, 0.4350]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.5644, 0.3670, 0.8009, ..., 0.8151, 0.6579, 0.5120]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 121.14329671859741 seconds + +tensor(indices=tensor([[422178, 34759, 147995, ..., 313076, 100996, 192246], + [188830, 222551, 381104, ..., 117948, 357802, 271920]]), + values=tensor([0.2774, 0.2708, 0.4114, ..., 0.3449, 0.7705, 0.4350]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.5644, 0.3670, 0.8009, ..., 0.8151, 0.6579, 0.5120]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 121.14329671859741 seconds + +[19.1, 18.5, 19.21, 18.61, 19.3, 18.73, 18.66, 18.43, 18.49, 19.51] +[53.42] +130.1167333126068 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 121.14329671859741, 'TIME_S_1KI': 1211.4329671859741, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6950.835893559456, 'W': 53.42} +[19.1, 18.5, 19.21, 18.61, 19.3, 18.73, 18.66, 18.43, 18.49, 19.51, 19.13, 18.27, 18.85, 18.54, 18.74, 18.84, 18.83, 18.58, 18.92, 18.57] +337.655 +16.882749999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 121.14329671859741, 'TIME_S_1KI': 1211.4329671859741, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 6950.835893559456, 'W': 53.42, 'J_1KI': 69508.35893559456, 'W_1KI': 534.2, 'W_D': 36.53725, 'J_D': 4754.107614226044, 'W_D_1KI': 365.3725, 'J_D_1KI': 3653.725} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..9567b0b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.522922277450562, "TIME_S_1KI": 115.22922277450562, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 855.605645942688, "W": 55.2, "J_1KI": 8556.05645942688, "W_1KI": 552.0, "W_D": 38.134, "J_D": 591.080900405407, "W_D_1KI": 381.34000000000003, "J_D_1KI": 3813.4} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..463d785 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.522922277450562} + +tensor(indices=tensor([[217732, 58415, 93380, ..., 115267, 253811, 340595], + [467583, 65398, 226188, ..., 480266, 38557, 431173]]), + values=tensor([0.6817, 0.8510, 0.4459, ..., 0.2462, 0.9760, 0.8436]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1753, 0.2117, 0.0575, ..., 0.5883, 0.1306, 0.9376]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.522922277450562 seconds + +tensor(indices=tensor([[217732, 58415, 93380, ..., 115267, 253811, 340595], + [467583, 65398, 226188, ..., 480266, 38557, 431173]]), + values=tensor([0.6817, 0.8510, 0.4459, ..., 0.2462, 0.9760, 0.8436]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.1753, 0.2117, 0.0575, ..., 0.5883, 0.1306, 0.9376]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 11.522922277450562 seconds + +[18.85, 18.75, 18.63, 18.97, 18.59, 19.48, 18.74, 18.75, 18.59, 18.53] +[55.2] +15.500102281570435 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.522922277450562, 'TIME_S_1KI': 115.22922277450562, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.605645942688, 'W': 55.2} +[18.85, 18.75, 18.63, 18.97, 18.59, 19.48, 18.74, 18.75, 18.59, 18.53, 19.36, 18.69, 18.62, 18.49, 18.8, 18.42, 18.5, 22.68, 18.95, 18.6] +341.32 +17.066 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.522922277450562, 'TIME_S_1KI': 115.22922277450562, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 855.605645942688, 'W': 55.2, 'J_1KI': 8556.05645942688, 'W_1KI': 552.0, 'W_D': 38.134, 'J_D': 591.080900405407, 'W_D_1KI': 381.34000000000003, 'J_D_1KI': 3813.4} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..2d2f22b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 59.20624232292175, "TIME_S_1KI": 592.0624232292175, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3582.822951974869, "W": 53.59, "J_1KI": 35828.22951974869, "W_1KI": 535.9000000000001, "W_D": 36.65625000000001, "J_D": 2450.6970299184327, "W_D_1KI": 366.56250000000006, "J_D_1KI": 3665.6250000000005} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..4d2ed5e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 59.20624232292175} + +tensor(indices=tensor([[325985, 30518, 106877, ..., 79533, 381077, 428139], + [196071, 214277, 116287, ..., 166834, 355430, 285764]]), + values=tensor([0.7046, 0.9287, 0.1445, ..., 0.4690, 0.6749, 0.4586]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3540, 0.0915, 0.7365, ..., 0.0225, 0.5897, 0.6865]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 59.20624232292175 seconds + +tensor(indices=tensor([[325985, 30518, 106877, ..., 79533, 381077, 428139], + [196071, 214277, 116287, ..., 166834, 355430, 285764]]), + values=tensor([0.7046, 0.9287, 0.1445, ..., 0.4690, 0.6749, 0.4586]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.3540, 0.0915, 0.7365, ..., 0.0225, 0.5897, 0.6865]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 59.20624232292175 seconds + +[18.98, 19.37, 18.75, 19.33, 19.1, 19.13, 18.74, 18.64, 19.02, 18.56] +[53.59] +66.85618495941162 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 59.20624232292175, 'TIME_S_1KI': 592.0624232292175, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3582.822951974869, 'W': 53.59} +[18.98, 19.37, 18.75, 19.33, 19.1, 19.13, 18.74, 18.64, 19.02, 18.56, 18.93, 18.47, 18.67, 18.59, 18.76, 18.47, 18.93, 18.69, 18.56, 18.44] +338.67499999999995 +16.933749999999996 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 59.20624232292175, 'TIME_S_1KI': 592.0624232292175, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3582.822951974869, 'W': 53.59, 'J_1KI': 35828.22951974869, 'W_1KI': 535.9000000000001, 'W_D': 36.65625000000001, 'J_D': 2450.6970299184327, 'W_D_1KI': 366.56250000000006, 'J_D_1KI': 3665.6250000000005} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..e7a72d5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1037, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.359702587127686, "TIME_S_1KI": 9.990069997230169, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 957.6284668636321, "W": 69.21, "J_1KI": 923.4604309196067, "W_1KI": 66.74059787849565, "W_D": 52.023999999999994, "J_D": 719.833309638977, "W_D_1KI": 50.16779170684667, "J_D_1KI": 48.37781263919641} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..53d5bcc --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.0121476650238037} + +tensor(indices=tensor([[ 9909, 22087, 18728, ..., 18694, 12656, 35092], + [46988, 34313, 8438, ..., 37, 15568, 17063]]), + values=tensor([0.9924, 0.5288, 0.8573, ..., 0.0094, 0.3622, 0.5780]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3871, 0.9103, 0.3671, ..., 0.9976, 0.3643, 0.4960]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 1.0121476650238037 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1037', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.359702587127686} + +tensor(indices=tensor([[45444, 7002, 19345, ..., 26253, 17561, 4166], + [48565, 24379, 11081, ..., 137, 48830, 21538]]), + values=tensor([0.9231, 0.4544, 0.8754, ..., 0.5715, 0.9921, 0.3081]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8740, 0.8886, 0.8697, ..., 0.1342, 0.3527, 0.4608]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.359702587127686 seconds + +tensor(indices=tensor([[45444, 7002, 19345, ..., 26253, 17561, 4166], + [48565, 24379, 11081, ..., 137, 48830, 21538]]), + values=tensor([0.9231, 0.4544, 0.8754, ..., 0.5715, 0.9921, 0.3081]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_coo) +tensor([0.8740, 0.8886, 0.8697, ..., 0.1342, 0.3527, 0.4608]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.359702587127686 seconds + +[18.86, 18.67, 18.56, 18.4, 18.42, 18.52, 22.45, 18.8, 18.3, 18.82] +[69.21] +13.836562156677246 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1037, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.359702587127686, 'TIME_S_1KI': 9.990069997230169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 957.6284668636321, 'W': 69.21} +[18.86, 18.67, 18.56, 18.4, 18.42, 18.52, 22.45, 18.8, 18.3, 18.82, 19.19, 18.46, 22.82, 19.43, 18.53, 18.88, 18.58, 18.71, 18.55, 18.41] +343.72 +17.186 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1037, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.359702587127686, 'TIME_S_1KI': 9.990069997230169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 957.6284668636321, 'W': 69.21, 'J_1KI': 923.4604309196067, 'W_1KI': 66.74059787849565, 'W_D': 52.023999999999994, 'J_D': 719.833309638977, 'W_D_1KI': 50.16779170684667, 'J_D_1KI': 48.37781263919641} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..8fa8bde --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.449341297149658, "TIME_S_1KI": 90.08052842370395, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 810.9393253564834, "W": 55.79, "J_1KI": 6990.856253073133, "W_1KI": 480.94827586206895, "W_D": 39.080749999999995, "J_D": 568.0608897548913, "W_D_1KI": 336.9030172413793, "J_D_1KI": 2904.3363555291317} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..f47a11e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.985235452651978} + +tensor(indices=tensor([[ 5290, 16580, 14916, ..., 49650, 27368, 26297], + [44583, 17858, 34578, ..., 4808, 1006, 6960]]), + values=tensor([0.1260, 0.7287, 0.9517, ..., 0.2338, 0.1951, 0.2707]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8780, 0.2845, 0.1213, ..., 0.9374, 0.2644, 0.3211]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 8.985235452651978 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '116', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.449341297149658} + +tensor(indices=tensor([[18357, 7388, 45331, ..., 12520, 3245, 28486], + [47969, 39919, 26342, ..., 16129, 18626, 30806]]), + values=tensor([0.2619, 0.6744, 0.8310, ..., 0.2520, 0.1545, 0.0220]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5889, 0.9614, 0.8321, ..., 0.9556, 0.7688, 0.5458]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.449341297149658 seconds + +tensor(indices=tensor([[18357, 7388, 45331, ..., 12520, 3245, 28486], + [47969, 39919, 26342, ..., 16129, 18626, 30806]]), + values=tensor([0.2619, 0.6744, 0.8310, ..., 0.2520, 0.1545, 0.0220]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.5889, 0.9614, 0.8321, ..., 0.9556, 0.7688, 0.5458]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.449341297149658 seconds + +[19.37, 18.39, 18.6, 18.34, 18.63, 18.36, 18.62, 18.39, 18.55, 18.41] +[55.79] +14.53556776046753 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.449341297149658, 'TIME_S_1KI': 90.08052842370395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 810.9393253564834, 'W': 55.79} +[19.37, 18.39, 18.6, 18.34, 18.63, 18.36, 18.62, 18.39, 18.55, 18.41, 18.83, 18.74, 18.68, 19.32, 18.47, 18.56, 18.36, 18.28, 18.34, 18.5] +334.185 +16.70925 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.449341297149658, 'TIME_S_1KI': 90.08052842370395, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 810.9393253564834, 'W': 55.79, 'J_1KI': 6990.856253073133, 'W_1KI': 480.94827586206895, 'W_D': 39.080749999999995, 'J_D': 568.0608897548913, 'W_D_1KI': 336.9030172413793, 'J_D_1KI': 2904.3363555291317} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..a67b03b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 88.68171787261963, "TIME_S_1KI": 886.8171787261963, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 5383.488392295838, "W": 53.92, "J_1KI": 53834.883922958375, "W_1KI": 539.2, "W_D": 36.96875, "J_D": 3691.0392526462674, "W_D_1KI": 369.6875, "J_D_1KI": 3696.875} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..1b9d963 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 88.68171787261963} + +tensor(indices=tensor([[16337, 13200, 28840, ..., 1122, 21635, 29443], + [14011, 22948, 46700, ..., 24469, 16174, 8849]]), + values=tensor([0.5557, 0.1776, 0.7789, ..., 0.3275, 0.2390, 0.1807]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3798, 0.0872, 0.0090, ..., 0.4829, 0.5933, 0.1234]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 88.68171787261963 seconds + +tensor(indices=tensor([[16337, 13200, 28840, ..., 1122, 21635, 29443], + [14011, 22948, 46700, ..., 24469, 16174, 8849]]), + values=tensor([0.5557, 0.1776, 0.7789, ..., 0.3275, 0.2390, 0.1807]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_coo) +tensor([0.3798, 0.0872, 0.0090, ..., 0.4829, 0.5933, 0.1234]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 88.68171787261963 seconds + +[18.87, 18.87, 18.54, 19.48, 19.21, 18.66, 18.6, 18.32, 18.46, 18.65] +[53.92] +99.84214377403259 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 88.68171787261963, 'TIME_S_1KI': 886.8171787261963, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5383.488392295838, 'W': 53.92} +[18.87, 18.87, 18.54, 19.48, 19.21, 18.66, 18.6, 18.32, 18.46, 18.65, 19.08, 18.75, 18.82, 19.48, 18.76, 18.51, 18.6, 18.42, 19.88, 18.73] +339.02500000000003 +16.95125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 88.68171787261963, 'TIME_S_1KI': 886.8171787261963, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 5383.488392295838, 'W': 53.92, 'J_1KI': 53834.883922958375, 'W_1KI': 539.2, 'W_D': 36.96875, 'J_D': 3691.0392526462674, 'W_D_1KI': 369.6875, 'J_D_1KI': 3696.875} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..331c318 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 443.95745277404785, "TIME_S_1KI": 4439.5745277404785, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 25918.761705281733, "W": 53.709999999999994, "J_1KI": 259187.61705281737, "W_1KI": 537.0999999999999, "W_D": 36.823249999999994, "J_D": 17769.745707764207, "W_D_1KI": 368.23249999999996, "J_D_1KI": 3682.325} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..4a9a4f1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 443.95745277404785} + +tensor(indices=tensor([[19704, 10194, 3807, ..., 1154, 30396, 17192], + [34683, 3483, 611, ..., 23977, 29555, 40734]]), + values=tensor([0.3957, 0.2278, 0.5768, ..., 0.4253, 0.9451, 0.7508]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.5601, 0.6188, 0.9618, ..., 0.0709, 0.0376, 0.0290]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 443.95745277404785 seconds + +tensor(indices=tensor([[19704, 10194, 3807, ..., 1154, 30396, 17192], + [34683, 3483, 611, ..., 23977, 29555, 40734]]), + values=tensor([0.3957, 0.2278, 0.5768, ..., 0.4253, 0.9451, 0.7508]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_coo) +tensor([0.5601, 0.6188, 0.9618, ..., 0.0709, 0.0376, 0.0290]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 443.95745277404785 seconds + +[19.46, 19.22, 18.68, 18.69, 18.6, 18.8, 18.69, 18.76, 18.74, 18.75] +[53.71] +482.5686409473419 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 443.95745277404785, 'TIME_S_1KI': 4439.5745277404785, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25918.761705281733, 'W': 53.709999999999994} +[19.46, 19.22, 18.68, 18.69, 18.6, 18.8, 18.69, 18.76, 18.74, 18.75, 18.86, 18.59, 18.83, 18.88, 18.75, 18.61, 18.69, 18.69, 18.7, 18.56] +337.735 +16.88675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 443.95745277404785, 'TIME_S_1KI': 4439.5745277404785, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 25918.761705281733, 'W': 53.709999999999994, 'J_1KI': 259187.61705281737, 'W_1KI': 537.0999999999999, 'W_D': 36.823249999999994, 'J_D': 17769.745707764207, 'W_D_1KI': 368.23249999999996, 'J_D_1KI': 3682.325} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..267612a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9450, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.515788555145264, "TIME_S_1KI": 1.1127818576873296, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1132.3027784729004, "W": 81.36, "J_1KI": 119.82039983840217, "W_1KI": 8.60952380952381, "W_D": 64.46575, "J_D": 897.1822497706413, "W_D_1KI": 6.821772486772487, "J_D_1KI": 0.721880686430951} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..5556e43 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1246950626373291} + +tensor(indices=tensor([[ 4185, 15622, 24541, ..., 28724, 15469, 49456], + [12615, 49688, 38427, ..., 20777, 44404, 44221]]), + values=tensor([0.6399, 0.7706, 0.4873, ..., 0.3770, 0.4441, 0.4099]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.3336, 0.9335, 0.1291, ..., 0.8357, 0.2742, 0.7631]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.1246950626373291 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '8420', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.355165243148804} + +tensor(indices=tensor([[28558, 12041, 29559, ..., 26650, 44218, 19780], + [21304, 5343, 21467, ..., 16760, 49284, 38458]]), + values=tensor([0.4399, 0.1543, 0.9943, ..., 0.5036, 0.2929, 0.1908]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5581, 0.7914, 0.0539, ..., 0.2174, 0.9504, 0.5132]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.355165243148804 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '9450', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.515788555145264} + +tensor(indices=tensor([[46359, 17987, 36171, ..., 43601, 26540, 19947], + [22154, 24580, 25519, ..., 40130, 43580, 37830]]), + values=tensor([0.0836, 0.0342, 0.5001, ..., 0.0565, 0.1549, 0.6055]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5967, 0.2929, 0.5954, ..., 0.6991, 0.9306, 0.5854]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.515788555145264 seconds + +tensor(indices=tensor([[46359, 17987, 36171, ..., 43601, 26540, 19947], + [22154, 24580, 25519, ..., 40130, 43580, 37830]]), + values=tensor([0.0836, 0.0342, 0.5001, ..., 0.0565, 0.1549, 0.6055]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5967, 0.2929, 0.5954, ..., 0.6991, 0.9306, 0.5854]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.515788555145264 seconds + +[21.58, 18.96, 18.56, 18.93, 18.42, 18.65, 18.46, 18.4, 18.6, 18.82] +[81.36] +13.917192459106445 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9450, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.515788555145264, 'TIME_S_1KI': 1.1127818576873296, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.3027784729004, 'W': 81.36} +[21.58, 18.96, 18.56, 18.93, 18.42, 18.65, 18.46, 18.4, 18.6, 18.82, 18.89, 18.58, 18.54, 19.68, 18.83, 18.71, 18.58, 18.65, 18.41, 18.56] +337.885 +16.89425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9450, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.515788555145264, 'TIME_S_1KI': 1.1127818576873296, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.3027784729004, 'W': 81.36, 'J_1KI': 119.82039983840217, 'W_1KI': 8.60952380952381, 'W_D': 64.46575, 'J_D': 897.1822497706413, 'W_D_1KI': 6.821772486772487, 'J_D_1KI': 0.721880686430951} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..d8c4e49 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1941, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.266668558120728, "TIME_S_1KI": 5.28937071515751, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1112.5756088733672, "W": 81.29999999999998, "J_1KI": 573.1971194607764, "W_1KI": 41.8856259659969, "W_D": 64.52974999999998, "J_D": 883.0778093074557, "W_D_1KI": 33.24562081401338, "J_D_1KI": 17.128089033494785} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..0b97858 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.5409510135650635} + +tensor(indices=tensor([[42692, 14348, 11313, ..., 33652, 32084, 40612], + [22443, 43380, 7504, ..., 2081, 22916, 19008]]), + values=tensor([0.3867, 0.3902, 0.3623, ..., 0.7348, 0.0650, 0.6782]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.5476, 0.6166, 0.1184, ..., 0.4257, 0.4154, 0.5533]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.5409510135650635 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1941', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.266668558120728} + +tensor(indices=tensor([[31228, 14908, 33095, ..., 7137, 34898, 18371], + [29699, 21971, 12394, ..., 5297, 10739, 8454]]), + values=tensor([0.8764, 0.3018, 0.4909, ..., 0.1157, 0.9417, 0.9152]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.3670, 0.1006, 0.3802, ..., 0.8465, 0.6607, 0.6419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.266668558120728 seconds + +tensor(indices=tensor([[31228, 14908, 33095, ..., 7137, 34898, 18371], + [29699, 21971, 12394, ..., 5297, 10739, 8454]]), + values=tensor([0.8764, 0.3018, 0.4909, ..., 0.1157, 0.9417, 0.9152]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_coo) +tensor([0.3670, 0.1006, 0.3802, ..., 0.8465, 0.6607, 0.6419]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.266668558120728 seconds + +[18.8, 18.8, 18.47, 18.55, 18.67, 18.63, 18.62, 18.51, 18.61, 18.89] +[81.3] +13.684816837310791 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1941, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.266668558120728, 'TIME_S_1KI': 5.28937071515751, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1112.5756088733672, 'W': 81.29999999999998} +[18.8, 18.8, 18.47, 18.55, 18.67, 18.63, 18.62, 18.51, 18.61, 18.89, 19.22, 18.45, 18.49, 18.61, 18.61, 18.63, 18.66, 18.58, 18.81, 18.5] +335.405 +16.770249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1941, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.266668558120728, 'TIME_S_1KI': 5.28937071515751, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1112.5756088733672, 'W': 81.29999999999998, 'J_1KI': 573.1971194607764, 'W_1KI': 41.8856259659969, 'W_D': 64.52974999999998, 'J_D': 883.0778093074557, 'W_D_1KI': 33.24562081401338, 'J_D_1KI': 17.128089033494785} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..effc445 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 110741, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.555631637573242, "TIME_S_1KI": 0.09531818962780941, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 749.4532634925843, "W": 54.06, "J_1KI": 6.767622321385795, "W_1KI": 0.4881660812165323, "W_D": 31.128750000000004, "J_D": 431.54908020615585, "W_D_1KI": 0.2810950777038315, "J_D_1KI": 0.0025383108126514255} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..25f1a7c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02156972885131836} + +tensor(indices=tensor([[3528, 3983, 4227, ..., 4541, 4270, 2862], + [1081, 629, 3833, ..., 4216, 1084, 732]]), + values=tensor([0.7350, 0.4226, 0.6312, ..., 0.6078, 0.9096, 0.2601]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.9224, 0.7713, 0.6372, ..., 0.1080, 0.4847, 0.7627]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.02156972885131836 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '48679', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.6155078411102295} + +tensor(indices=tensor([[1659, 2087, 2826, ..., 3767, 4878, 4707], + [3169, 3716, 4838, ..., 4982, 4563, 825]]), + values=tensor([0.0476, 0.3564, 0.1056, ..., 0.4292, 0.2227, 0.1622]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.6634, 0.5987, 0.2139, ..., 0.1357, 0.4590, 0.0793]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 4.6155078411102295 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '110741', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.555631637573242} + +tensor(indices=tensor([[3518, 3152, 1558, ..., 2136, 1054, 1055], + [ 837, 3026, 1092, ..., 1204, 4761, 2690]]), + values=tensor([0.1700, 0.0498, 0.0661, ..., 0.6307, 0.2456, 0.5131]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8773, 0.4981, 0.9800, ..., 0.2529, 0.5144, 0.4144]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.555631637573242 seconds + +tensor(indices=tensor([[3518, 3152, 1558, ..., 2136, 1054, 1055], + [ 837, 3026, 1092, ..., 1204, 4761, 2690]]), + values=tensor([0.1700, 0.0498, 0.0661, ..., 0.6307, 0.2456, 0.5131]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_coo) +tensor([0.8773, 0.4981, 0.9800, ..., 0.2529, 0.5144, 0.4144]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.555631637573242 seconds + +[45.23, 41.02, 42.34, 44.11, 43.01, 27.49, 18.7, 18.73, 18.72, 18.5] +[54.06] +13.863360404968262 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.555631637573242, 'TIME_S_1KI': 0.09531818962780941, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.4532634925843, 'W': 54.06} +[45.23, 41.02, 42.34, 44.11, 43.01, 27.49, 18.7, 18.73, 18.72, 18.5, 19.01, 18.95, 18.77, 18.66, 18.67, 18.75, 18.68, 18.54, 22.56, 19.11] +458.625 +22.93125 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 110741, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.555631637573242, 'TIME_S_1KI': 0.09531818962780941, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 749.4532634925843, 'W': 54.06, 'J_1KI': 6.767622321385795, 'W_1KI': 0.4881660812165323, 'W_D': 31.128750000000004, 'J_D': 431.54908020615585, 'W_D_1KI': 0.2810950777038315, 'J_D_1KI': 0.0025383108126514255} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..c331d55 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 11854, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.450348138809204, "TIME_S_1KI": 0.8815883363260675, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 754.4207804775237, "W": 54.17999999999999, "J_1KI": 63.642718110133615, "W_1KI": 4.570609077104773, "W_D": 37.17474999999999, "J_D": 517.6338853646515, "W_D_1KI": 3.1360511219841394, "J_D_1KI": 0.26455636257669474} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..0e3cfb9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.10063052177429199} + +tensor(indices=tensor([[4256, 1775, 1486, ..., 25, 3947, 231], + [4149, 860, 4868, ..., 1565, 1200, 2326]]), + values=tensor([0.9676, 0.8494, 0.2258, ..., 0.5842, 0.4702, 0.7325]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.5644, 0.5237, 0.6338, ..., 0.5138, 0.4184, 0.6920]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.10063052177429199 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '10434', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.241670370101929} + +tensor(indices=tensor([[2083, 4342, 4759, ..., 1488, 915, 2572], + [ 176, 1258, 3325, ..., 951, 2675, 893]]), + values=tensor([0.1490, 0.3326, 0.0891, ..., 0.4835, 0.1493, 0.3566]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.9739, 0.3867, 0.2610, ..., 0.7551, 0.4642, 0.9236]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.241670370101929 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '11854', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.450348138809204} + +tensor(indices=tensor([[2712, 3456, 1371, ..., 1637, 3247, 1345], + [3614, 4904, 2296, ..., 3072, 1741, 2726]]), + values=tensor([0.4445, 0.1174, 0.8070, ..., 0.9544, 0.3783, 0.6994]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0373, 0.6085, 0.2319, ..., 0.2260, 0.9153, 0.3442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.450348138809204 seconds + +tensor(indices=tensor([[2712, 3456, 1371, ..., 1637, 3247, 1345], + [3614, 4904, 2296, ..., 3072, 1741, 2726]]), + values=tensor([0.4445, 0.1174, 0.8070, ..., 0.9544, 0.3783, 0.6994]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_coo) +tensor([0.0373, 0.6085, 0.2319, ..., 0.2260, 0.9153, 0.3442]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.450348138809204 seconds + +[20.23, 19.1, 18.68, 18.89, 18.65, 18.63, 18.98, 18.98, 18.81, 18.57] +[54.18] +13.924340724945068 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 11854, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.450348138809204, 'TIME_S_1KI': 0.8815883363260675, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4207804775237, 'W': 54.17999999999999} +[20.23, 19.1, 18.68, 18.89, 18.65, 18.63, 18.98, 18.98, 18.81, 18.57, 19.18, 18.79, 18.57, 18.63, 18.76, 19.39, 18.93, 19.6, 18.46, 18.53] +340.105 +17.00525 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 11854, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.450348138809204, 'TIME_S_1KI': 0.8815883363260675, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 754.4207804775237, 'W': 54.17999999999999, 'J_1KI': 63.642718110133615, 'W_1KI': 4.570609077104773, 'W_D': 37.17474999999999, 'J_D': 517.6338853646515, 'W_D_1KI': 3.1360511219841394, 'J_D_1KI': 0.26455636257669474} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..49bd6d7 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1179, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32565951347351, "TIME_S_1KI": 8.75798092745845, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 746.9021103000641, "W": 54.06, "J_1KI": 633.5047585242274, "W_1KI": 45.85241730279898, "W_D": 36.764, "J_D": 507.9376467456818, "W_D_1KI": 31.182357930449538, "J_D_1KI": 26.448140738294775} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..596f748 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.8898429870605469} + +tensor(indices=tensor([[3110, 3732, 2713, ..., 2410, 1490, 3009], + [ 725, 935, 2493, ..., 4297, 2924, 1981]]), + values=tensor([0.4370, 0.7619, 0.3247, ..., 0.8306, 0.1457, 0.4236]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.6529, 0.9199, 0.3212, ..., 0.5524, 0.5340, 0.6557]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.8898429870605469 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '1179', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32565951347351} + +tensor(indices=tensor([[ 945, 43, 4319, ..., 2458, 4174, 3700], + [2643, 312, 3461, ..., 1181, 347, 2067]]), + values=tensor([0.7757, 0.2701, 0.6926, ..., 0.9982, 0.5521, 0.1080]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3222, 0.5031, 0.8235, ..., 0.4477, 0.7161, 0.8463]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.32565951347351 seconds + +tensor(indices=tensor([[ 945, 43, 4319, ..., 2458, 4174, 3700], + [2643, 312, 3461, ..., 1181, 347, 2067]]), + values=tensor([0.7757, 0.2701, 0.6926, ..., 0.9982, 0.5521, 0.1080]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_coo) +tensor([0.3222, 0.5031, 0.8235, ..., 0.4477, 0.7161, 0.8463]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.32565951347351 seconds + +[19.07, 18.59, 18.48, 18.71, 18.55, 18.44, 23.06, 18.75, 18.63, 19.1] +[54.06] +13.816169261932373 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1179, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32565951347351, 'TIME_S_1KI': 8.75798092745845, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.9021103000641, 'W': 54.06} +[19.07, 18.59, 18.48, 18.71, 18.55, 18.44, 23.06, 18.75, 18.63, 19.1, 19.01, 18.57, 22.93, 18.59, 18.51, 19.01, 18.91, 18.61, 19.51, 18.96] +345.92 +17.296 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1179, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32565951347351, 'TIME_S_1KI': 8.75798092745845, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 746.9021103000641, 'W': 54.06, 'J_1KI': 633.5047585242274, 'W_1KI': 45.85241730279898, 'W_D': 36.764, 'J_D': 507.9376467456818, 'W_D_1KI': 31.182357930449538, 'J_D_1KI': 26.448140738294775} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..a26033c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 238, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.398764610290527, "TIME_S_1KI": 43.69228827853163, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 768.664651837349, "W": 54.21, "J_1KI": 3229.683411081298, "W_1KI": 227.7731092436975, "W_D": 37.3915, "J_D": 530.1886059615612, "W_D_1KI": 157.10714285714286, "J_D_1KI": 660.1140456182472} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..82fa51c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 4.399606227874756} + +tensor(indices=tensor([[2867, 1756, 3485, ..., 3190, 3837, 2483], + [2033, 4, 2131, ..., 3597, 1094, 3566]]), + values=tensor([0.4044, 0.5805, 0.8116, ..., 0.9848, 0.1224, 0.0692]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.6126, 0.0131, 0.5129, ..., 0.4403, 0.2722, 0.4357]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 4.399606227874756 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '238', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.398764610290527} + +tensor(indices=tensor([[1146, 1098, 2540, ..., 2909, 2713, 1234], + [4436, 3812, 3579, ..., 390, 4373, 3552]]), + values=tensor([0.8685, 0.2657, 0.8315, ..., 0.0585, 0.1561, 0.8894]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7976, 0.6602, 0.2638, ..., 0.0325, 0.2340, 0.7009]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.398764610290527 seconds + +tensor(indices=tensor([[1146, 1098, 2540, ..., 2909, 2713, 1234], + [4436, 3812, 3579, ..., 390, 4373, 3552]]), + values=tensor([0.8685, 0.2657, 0.8315, ..., 0.0585, 0.1561, 0.8894]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_coo) +tensor([0.7976, 0.6602, 0.2638, ..., 0.0325, 0.2340, 0.7009]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.398764610290527 seconds + +[19.0, 18.7, 18.69, 18.68, 18.63, 18.61, 18.91, 18.61, 18.48, 18.34] +[54.21] +14.179388523101807 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.398764610290527, 'TIME_S_1KI': 43.69228827853163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.664651837349, 'W': 54.21} +[19.0, 18.7, 18.69, 18.68, 18.63, 18.61, 18.91, 18.61, 18.48, 18.34, 19.09, 18.63, 18.48, 18.89, 18.82, 18.52, 18.88, 18.49, 18.86, 18.55] +336.37 +16.8185 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 238, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.398764610290527, 'TIME_S_1KI': 43.69228827853163, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 768.664651837349, 'W': 54.21, 'J_1KI': 3229.683411081298, 'W_1KI': 227.7731092436975, 'W_D': 37.3915, 'J_D': 530.1886059615612, 'W_D_1KI': 157.10714285714286, 'J_D_1KI': 660.1140456182472} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..95e5391 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 119, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.439661026000977, "TIME_S_1KI": 87.7282439159746, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 784.9499820899963, "W": 54.32, "J_1KI": 6596.218336890725, "W_1KI": 456.47058823529414, "W_D": 37.411249999999995, "J_D": 540.6104568752646, "W_D_1KI": 314.3802521008403, "J_D_1KI": 2641.8508579902546} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..e81e68c --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,56 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.788380146026611} + +tensor(indices=tensor([[2704, 377, 1244, ..., 2748, 2394, 1983], + [4108, 442, 2057, ..., 620, 3311, 4815]]), + values=tensor([0.0888, 0.5061, 0.9526, ..., 0.5140, 0.5074, 0.3214]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.8105, 0.4101, 0.1509, ..., 0.5654, 0.9890, 0.8612]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.788380146026611 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '119', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.439661026000977} + +tensor(indices=tensor([[4559, 121, 4751, ..., 2323, 2876, 4070], + [2508, 4026, 117, ..., 65, 2129, 2793]]), + values=tensor([0.8883, 0.4655, 0.3071, ..., 0.8654, 0.9133, 0.7122]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7533, 0.2735, 0.5181, ..., 0.7274, 0.2157, 0.0507]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.439661026000977 seconds + +tensor(indices=tensor([[4559, 121, 4751, ..., 2323, 2876, 4070], + [2508, 4026, 117, ..., 65, 2129, 2793]]), + values=tensor([0.8883, 0.4655, 0.3071, ..., 0.8654, 0.9133, 0.7122]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_coo) +tensor([0.7533, 0.2735, 0.5181, ..., 0.7274, 0.2157, 0.0507]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.439661026000977 seconds + +[19.22, 19.2, 18.65, 18.66, 19.52, 18.96, 18.67, 18.8, 18.68, 18.88] +[54.32] +14.450478315353394 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.439661026000977, 'TIME_S_1KI': 87.7282439159746, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 784.9499820899963, 'W': 54.32} +[19.22, 19.2, 18.65, 18.66, 19.52, 18.96, 18.67, 18.8, 18.68, 18.88, 19.12, 18.44, 18.77, 18.73, 18.82, 18.48, 18.75, 18.55, 18.59, 18.59] +338.17500000000007 +16.908750000000005 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 119, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.439661026000977, 'TIME_S_1KI': 87.7282439159746, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 784.9499820899963, 'W': 54.32, 'J_1KI': 6596.218336890725, 'W_1KI': 456.47058823529414, 'W_D': 37.411249999999995, 'J_D': 540.6104568752646, 'W_D_1KI': 314.3802521008403, 'J_D_1KI': 2641.8508579902546} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..ad9dd5b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.55470323562622, "TIME_S_1KI": 175.5470323562622, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1197.6621975588798, "W": 54.07, "J_1KI": 11976.6219755888, "W_1KI": 540.6999999999999, "W_D": 37.16175, "J_D": 823.1408021108508, "W_D_1KI": 371.6175, "J_D_1KI": 3716.175} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..327cee0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 17.55470323562622} + +tensor(indices=tensor([[3980, 1656, 791, ..., 1031, 1680, 3850], + [3372, 116, 1065, ..., 2013, 2320, 3899]]), + values=tensor([0.4484, 0.5629, 0.7993, ..., 0.3118, 0.0206, 0.0150]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.8098, 0.1203, 0.7958, ..., 0.8798, 0.4475, 0.0461]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.55470323562622 seconds + +tensor(indices=tensor([[3980, 1656, 791, ..., 1031, 1680, 3850], + [3372, 116, 1065, ..., 2013, 2320, 3899]]), + values=tensor([0.4484, 0.5629, 0.7993, ..., 0.3118, 0.0206, 0.0150]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_coo) +tensor([0.8098, 0.1203, 0.7958, ..., 0.8798, 0.4475, 0.0461]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 17.55470323562622 seconds + +[19.55, 18.49, 18.65, 18.64, 18.92, 18.54, 18.68, 18.63, 18.73, 18.59] +[54.07] +22.150216341018677 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.55470323562622, 'TIME_S_1KI': 175.5470323562622, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.6621975588798, 'W': 54.07} +[19.55, 18.49, 18.65, 18.64, 18.92, 18.54, 18.68, 18.63, 18.73, 18.59, 19.25, 18.64, 19.32, 18.55, 18.83, 19.07, 19.4, 18.45, 18.65, 18.56] +338.16499999999996 +16.90825 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 17.55470323562622, 'TIME_S_1KI': 175.5470323562622, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1197.6621975588798, 'W': 54.07, 'J_1KI': 11976.6219755888, 'W_1KI': 540.6999999999999, 'W_D': 37.16175, 'J_D': 823.1408021108508, 'W_D_1KI': 371.6175, 'J_D_1KI': 3716.175} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..57347d6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.31482243537903, "TIME_S_1KI": 263.1482243537903, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1707.684480586052, "W": 54.06, "J_1KI": 17076.844805860517, "W_1KI": 540.6, "W_D": 36.87325, "J_D": 1164.7775947793125, "W_D_1KI": 368.73249999999996, "J_D_1KI": 3687.3249999999994} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..dee7431 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 26.31482243537903} + +tensor(indices=tensor([[1297, 2423, 242, ..., 2397, 2749, 0], + [4910, 4296, 2548, ..., 2712, 2797, 3453]]), + values=tensor([0.2126, 0.6056, 0.7577, ..., 0.9172, 0.5725, 0.9408]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.5568, 0.6512, 0.5164, ..., 0.9022, 0.9379, 0.3982]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.31482243537903 seconds + +tensor(indices=tensor([[1297, 2423, 242, ..., 2397, 2749, 0], + [4910, 4296, 2548, ..., 2712, 2797, 3453]]), + values=tensor([0.2126, 0.6056, 0.7577, ..., 0.9172, 0.5725, 0.9408]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_coo) +tensor([0.5568, 0.6512, 0.5164, ..., 0.9022, 0.9379, 0.3982]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 26.31482243537903 seconds + +[20.35, 18.51, 18.82, 18.56, 19.09, 19.38, 18.79, 22.37, 19.39, 18.59] +[54.06] +31.588688135147095 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.31482243537903, 'TIME_S_1KI': 263.1482243537903, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1707.684480586052, 'W': 54.06} +[20.35, 18.51, 18.82, 18.56, 19.09, 19.38, 18.79, 22.37, 19.39, 18.59, 19.11, 18.64, 18.96, 18.71, 18.62, 18.46, 18.94, 18.84, 19.35, 18.56] +343.735 +17.18675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 26.31482243537903, 'TIME_S_1KI': 263.1482243537903, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1707.684480586052, 'W': 54.06, 'J_1KI': 17076.844805860517, 'W_1KI': 540.6, 'W_D': 36.87325, 'J_D': 1164.7775947793125, 'W_D_1KI': 368.73249999999996, 'J_D_1KI': 3687.3249999999994} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..b442a3d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 35.09114933013916, "TIME_S_1KI": 350.9114933013916, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2210.063315458298, "W": 53.96, "J_1KI": 22100.633154582978, "W_1KI": 539.6, "W_D": 37.03875000000001, "J_D": 1517.0122799375656, "W_D_1KI": 370.3875000000001, "J_D_1KI": 3703.875000000001} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..3cf5003 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 35.09114933013916} + +tensor(indices=tensor([[1904, 344, 4934, ..., 2409, 1204, 2029], + [3634, 3170, 503, ..., 3590, 2808, 4816]]), + values=tensor([0.1073, 0.4089, 0.7789, ..., 0.0013, 0.6385, 0.7193]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.2941, 0.0801, 0.1888, ..., 0.6193, 0.4358, 0.9474]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 35.09114933013916 seconds + +tensor(indices=tensor([[1904, 344, 4934, ..., 2409, 1204, 2029], + [3634, 3170, 503, ..., 3590, 2808, 4816]]), + values=tensor([0.1073, 0.4089, 0.7789, ..., 0.0013, 0.6385, 0.7193]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_coo) +tensor([0.2941, 0.0801, 0.1888, ..., 0.6193, 0.4358, 0.9474]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 35.09114933013916 seconds + +[19.17, 18.63, 18.69, 19.02, 18.66, 18.63, 18.93, 18.49, 19.76, 18.45] +[53.96] +40.95743727684021 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 35.09114933013916, 'TIME_S_1KI': 350.9114933013916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.063315458298, 'W': 53.96} +[19.17, 18.63, 18.69, 19.02, 18.66, 18.63, 18.93, 18.49, 19.76, 18.45, 19.61, 18.58, 18.5, 18.3, 18.85, 18.76, 18.64, 19.4, 18.49, 18.96] +338.42499999999995 +16.921249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 35.09114933013916, 'TIME_S_1KI': 350.9114933013916, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.063315458298, 'W': 53.96, 'J_1KI': 22100.633154582978, 'W_1KI': 539.6, 'W_D': 37.03875000000001, 'J_D': 1517.0122799375656, 'W_D_1KI': 370.3875000000001, 'J_D_1KI': 3703.875000000001} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..7336dc3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.84387135505676, "TIME_S_1KI": 438.4387135505676, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2710.0879724621773, "W": 53.89, "J_1KI": 27100.879724621773, "W_1KI": 538.9000000000001, "W_D": 36.80575, "J_D": 1850.9337612256409, "W_D_1KI": 368.05750000000006, "J_D_1KI": 3680.5750000000007} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..f65ba0a --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,39 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 43.84387135505676} + +tensor(indices=tensor([[1253, 4667, 3771, ..., 1794, 3251, 2372], + [1944, 625, 4971, ..., 3925, 1009, 4877]]), + values=tensor([0.9799, 0.7006, 0.4017, ..., 0.0089, 0.2742, 0.6115]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.6820, 0.6329, 0.3544, ..., 0.4760, 0.8808, 0.3681]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.84387135505676 seconds + +tensor(indices=tensor([[1253, 4667, 3771, ..., 1794, 3251, 2372], + [1944, 625, 4971, ..., 3925, 1009, 4877]]), + values=tensor([0.9799, 0.7006, 0.4017, ..., 0.0089, 0.2742, 0.6115]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_coo) +tensor([0.6820, 0.6329, 0.3544, ..., 0.4760, 0.8808, 0.3681]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 43.84387135505676 seconds + +[19.01, 18.51, 18.76, 22.58, 19.23, 18.51, 19.61, 18.48, 18.65, 18.91] +[53.89] +50.28925538063049 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.84387135505676, 'TIME_S_1KI': 438.4387135505676, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2710.0879724621773, 'W': 53.89} +[19.01, 18.51, 18.76, 22.58, 19.23, 18.51, 19.61, 18.48, 18.65, 18.91, 19.92, 18.52, 18.77, 18.63, 19.02, 18.34, 18.6, 18.43, 18.82, 18.61] +341.685 +17.08425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 43.84387135505676, 'TIME_S_1KI': 438.4387135505676, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2710.0879724621773, 'W': 53.89, 'J_1KI': 27100.879724621773, 'W_1KI': 538.9000000000001, 'W_D': 36.80575, 'J_D': 1850.9337612256409, 'W_D_1KI': 368.05750000000006, 'J_D_1KI': 3680.5750000000007} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..bfc909f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 643628, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.04805874824524, "TIME_S_1KI": 0.015611593573065869, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 727.0622814679145, "W": 54.01, "J_1KI": 1.1296312178275565, "W_1KI": 0.0839149322279329, "W_D": 37.126999999999995, "J_D": 499.78969309496875, "W_D_1KI": 0.05768394165573902, "J_D_1KI": 8.962310784449872e-05} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..a6da299 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,456 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.014279842376708984} + +tensor(indices=tensor([[4096, 618, 1284, 3568, 3186, 4536, 1711, 3686, 1447, + 1297, 2328, 1395, 1080, 3040, 3188, 1716, 679, 4226, + 3371, 3379, 1190, 669, 419, 3630, 70, 2941, 2450, + 861, 4517, 77, 2868, 3004, 563, 1234, 1788, 4867, + 1778, 2848, 3276, 2831, 2894, 4486, 3377, 1991, 277, + 4030, 945, 2827, 3135, 4805, 1352, 2078, 4180, 1187, + 4343, 447, 4578, 4992, 3314, 1236, 245, 3939, 2187, + 3596, 2980, 2569, 1823, 4014, 1324, 4788, 1894, 4725, + 2017, 1956, 3644, 4720, 2727, 3501, 3077, 3446, 3841, + 1881, 2310, 1324, 1679, 2619, 757, 2321, 1342, 622, + 3839, 3054, 4665, 2024, 746, 1605, 1348, 1245, 1362, + 4241, 1592, 582, 4131, 4328, 3467, 1601, 3863, 2880, + 1920, 4147, 3285, 2543, 4637, 1348, 762, 4220, 3068, + 2098, 608, 1055, 3700, 1839, 3300, 4131, 3072, 1196, + 3086, 381, 4769, 834, 3240, 1030, 2252, 157, 1018, + 2631, 4666, 3578, 1273, 3971, 488, 2434, 3954, 3284, + 2363, 4327, 2118, 3142, 2685, 2792, 4032, 3405, 1206, + 690, 3804, 1265, 1900, 2126, 2387, 2170, 340, 1524, + 1372, 4932, 1618, 695, 4943, 1008, 2146, 884, 3448, + 1673, 378, 3109, 3216, 294, 3153, 3928, 4444, 4807, + 3605, 1200, 4344, 4577, 927, 296, 2772, 1385, 814, + 1123, 435, 1657, 3052, 1976, 384, 3311, 4011, 2014, + 4001, 3482, 4796, 4376, 949, 39, 1580, 4495, 295, + 3576, 4155, 1908, 3171, 3336, 1079, 580, 2958, 2223, + 2113, 457, 3417, 2479, 4264, 1444, 269, 1475, 3828, + 3836, 564, 4115, 470, 2015, 741, 2536, 1200, 1221, + 161, 2130, 3583, 309, 865, 2390, 3881, 1004, 1458, + 2167, 4222, 149, 4097, 3041, 1857, 3717], + [ 615, 4113, 3926, 229, 2417, 1794, 229, 4500, 2589, + 623, 2153, 3984, 4612, 269, 1324, 405, 9, 3064, + 3202, 65, 396, 3400, 4032, 4929, 4721, 2765, 4040, + 3975, 238, 905, 1043, 2205, 53, 3285, 1772, 3578, + 4861, 933, 4613, 1891, 3193, 336, 3085, 3484, 146, + 2163, 3360, 3205, 4051, 1522, 359, 2784, 3700, 4235, + 3634, 441, 4432, 2098, 2909, 2497, 2953, 2758, 1633, + 4460, 2001, 2212, 3199, 2083, 2688, 3031, 714, 4914, + 251, 4095, 1339, 1518, 1343, 4027, 4593, 4715, 2749, + 1654, 4414, 1395, 2901, 2434, 4130, 3325, 2937, 1722, + 4475, 124, 3046, 4083, 185, 253, 4979, 250, 788, + 433, 3761, 4289, 235, 2959, 2537, 611, 149, 2900, + 1691, 4785, 111, 3118, 1156, 2470, 365, 3057, 921, + 3505, 4572, 237, 1804, 3160, 943, 2034, 3160, 663, + 671, 1918, 2394, 1546, 4381, 3490, 4948, 4404, 714, + 623, 4665, 445, 3223, 4146, 4132, 3456, 3118, 700, + 3914, 3377, 2139, 4507, 825, 1067, 4724, 4778, 2109, + 1119, 137, 1256, 4672, 4041, 1633, 4188, 2767, 4475, + 3239, 696, 894, 339, 4052, 2293, 151, 1463, 2173, + 147, 1912, 4909, 189, 882, 2072, 44, 4956, 3755, + 1306, 4831, 4122, 4204, 3068, 2434, 1394, 3306, 257, + 3031, 4587, 2841, 4108, 3933, 4312, 70, 4382, 86, + 4713, 2375, 2666, 2038, 4972, 2508, 3441, 330, 1088, + 3006, 2960, 473, 675, 4351, 4450, 3556, 531, 4330, + 825, 2055, 923, 4823, 1507, 983, 3735, 1656, 1941, + 1742, 3877, 2338, 528, 1582, 3373, 2767, 2422, 782, + 1435, 2785, 3118, 4894, 518, 3195, 4675, 3624, 1558, + 718, 4838, 3322, 2585, 1132, 3933, 3249]]), + values=tensor([7.5372e-01, 2.5786e-02, 1.4885e-01, 5.0062e-01, + 8.8178e-01, 4.0196e-01, 6.3836e-01, 4.0773e-01, + 2.1599e-01, 9.5506e-01, 9.6160e-01, 8.7076e-01, + 8.4546e-01, 5.1592e-02, 4.2212e-01, 5.7938e-01, + 3.9993e-01, 4.2015e-01, 6.7004e-01, 8.9568e-01, + 6.3995e-01, 3.5165e-01, 6.0381e-01, 1.2525e-01, + 7.7683e-01, 2.0997e-01, 8.6845e-01, 9.2082e-01, + 1.6308e-02, 2.1372e-01, 2.9707e-01, 1.8950e-01, + 4.6206e-02, 2.4826e-01, 7.1576e-01, 5.8209e-01, + 1.5328e-02, 1.3575e-01, 7.8451e-01, 3.0895e-01, + 6.0830e-01, 4.9090e-01, 2.2171e-01, 7.2804e-01, + 9.0960e-01, 6.7651e-04, 9.0467e-01, 8.0238e-01, + 8.7324e-01, 6.4909e-01, 7.0730e-01, 9.7453e-01, + 9.5871e-01, 8.0205e-01, 9.6022e-01, 4.3657e-01, + 4.8564e-01, 5.4870e-01, 1.9896e-01, 3.1720e-01, + 7.5474e-01, 7.7513e-01, 7.3053e-01, 1.1060e-01, + 1.4872e-01, 3.0242e-01, 5.6952e-01, 2.0146e-01, + 3.0070e-01, 8.6621e-01, 8.4984e-02, 5.4779e-01, + 7.0835e-01, 9.7024e-01, 5.9656e-01, 6.7023e-01, + 6.5335e-02, 6.9048e-01, 2.8995e-01, 7.0210e-03, + 4.9871e-01, 2.2543e-01, 7.8023e-01, 2.5462e-01, + 9.4360e-01, 5.4981e-02, 7.4709e-02, 9.9422e-01, + 7.6628e-01, 4.7395e-01, 9.6343e-01, 7.1394e-01, + 4.4781e-01, 9.6488e-01, 1.9286e-01, 9.5945e-01, + 3.2788e-01, 1.7819e-01, 2.2324e-01, 7.0886e-01, + 3.4652e-01, 9.7384e-01, 8.4453e-01, 2.9735e-01, + 5.5890e-01, 3.3375e-01, 4.1585e-01, 4.9348e-01, + 8.6323e-01, 5.4179e-01, 8.5141e-01, 1.5492e-01, + 3.0947e-01, 2.0041e-01, 3.7336e-01, 7.2468e-01, + 5.9331e-01, 1.9083e-02, 3.5137e-01, 4.1698e-01, + 2.8646e-03, 3.2185e-01, 3.4594e-01, 7.6465e-01, + 2.1823e-02, 7.8406e-01, 9.4163e-01, 8.1641e-01, + 9.3414e-01, 7.5336e-01, 4.9072e-01, 4.4494e-01, + 4.0417e-01, 4.5657e-01, 5.8169e-01, 3.6069e-01, + 2.3511e-01, 5.6847e-01, 8.7815e-03, 3.9153e-01, + 9.9201e-01, 4.4100e-01, 8.9841e-01, 8.0546e-02, + 4.3109e-01, 8.8772e-01, 1.8462e-01, 5.0689e-02, + 7.4894e-01, 9.4071e-01, 7.1176e-01, 9.9533e-01, + 5.1038e-01, 6.3466e-01, 8.5285e-01, 7.7104e-01, + 1.1601e-01, 9.4578e-01, 3.6726e-02, 5.2496e-01, + 1.6256e-01, 8.9277e-01, 4.6236e-01, 1.2755e-01, + 1.0244e-01, 3.2708e-01, 2.7415e-01, 3.5691e-01, + 4.4364e-02, 7.1372e-01, 9.5404e-01, 4.5883e-01, + 2.3937e-01, 2.6861e-01, 3.6065e-01, 4.3498e-01, + 3.9521e-01, 7.9987e-01, 2.1619e-01, 9.7354e-01, + 9.1788e-01, 6.3399e-01, 3.6619e-01, 8.9590e-02, + 8.7062e-01, 3.0492e-01, 6.0813e-01, 5.1040e-01, + 4.6834e-01, 2.8179e-01, 7.1185e-01, 4.5609e-01, + 1.0255e-01, 7.8322e-01, 9.8037e-01, 5.8090e-02, + 7.0096e-01, 1.0199e-01, 1.3265e-01, 7.8838e-01, + 7.5520e-01, 7.0065e-01, 5.1147e-01, 3.8829e-01, + 7.5036e-01, 4.0759e-01, 8.2136e-01, 8.4073e-01, + 3.9147e-02, 6.4151e-02, 7.1015e-01, 4.3050e-01, + 9.3080e-01, 3.8100e-01, 2.5013e-01, 1.9075e-01, + 3.3512e-01, 8.5354e-01, 2.0474e-02, 2.6901e-01, + 6.4724e-01, 4.7504e-01, 8.5889e-01, 6.2243e-01, + 3.1038e-01, 7.6066e-01, 2.8500e-01, 5.7560e-01, + 3.1100e-01, 8.6658e-01, 7.3868e-01, 6.4684e-01, + 7.6666e-01, 6.7294e-02, 2.4450e-02, 2.5454e-02, + 6.9147e-01, 7.1633e-01, 3.1909e-01, 7.3567e-01, + 8.3513e-01, 9.0354e-01, 4.5157e-01, 4.5276e-01, + 7.8081e-01, 7.3622e-02, 1.6021e-01, 2.4602e-01, + 8.7689e-01, 7.1512e-01]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.4014, 0.2348, 0.3497, ..., 0.6610, 0.7575, 0.2932]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.014279842376708984 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '73530', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.1995508670806885} + +tensor(indices=tensor([[4361, 962, 1468, 4443, 831, 342, 2833, 848, 3784, + 480, 227, 3923, 3239, 4033, 2174, 4604, 1596, 154, + 3804, 2432, 861, 1960, 4809, 1268, 3961, 3243, 1966, + 1793, 3958, 2209, 607, 2963, 2233, 3137, 1148, 323, + 4537, 4374, 1115, 4035, 3227, 3551, 3055, 1364, 1192, + 3688, 1830, 1578, 4466, 4639, 1561, 3274, 1698, 3211, + 4281, 731, 4704, 143, 3785, 4685, 3973, 2980, 4480, + 56, 1849, 746, 897, 4648, 2785, 4538, 1611, 3627, + 4631, 252, 1709, 1747, 3200, 2326, 2084, 4398, 3424, + 4503, 665, 4549, 3482, 4771, 1784, 2219, 4630, 1550, + 3580, 4455, 816, 2482, 389, 1391, 3979, 3016, 3514, + 2864, 4314, 726, 3791, 1663, 3923, 4069, 2526, 2922, + 1388, 1403, 3277, 3822, 372, 2252, 4822, 1088, 2450, + 580, 3869, 3392, 1919, 2318, 2286, 3704, 3495, 3188, + 3123, 4494, 3099, 4022, 1590, 3762, 2501, 4399, 234, + 53, 1738, 2234, 4089, 3185, 801, 2804, 2313, 4747, + 281, 4523, 3016, 2039, 3590, 1050, 927, 1141, 2310, + 1539, 2881, 1811, 2943, 3404, 4146, 1709, 1620, 1279, + 4228, 4121, 1894, 2273, 3615, 4248, 500, 1589, 4890, + 3189, 1093, 1834, 2337, 4220, 2632, 3112, 4647, 4216, + 3937, 1073, 4770, 1608, 2542, 2046, 543, 316, 1169, + 2393, 4610, 3844, 279, 4080, 1697, 2943, 2062, 3567, + 2094, 3856, 679, 1156, 252, 1659, 2334, 3282, 774, + 2932, 155, 4188, 4607, 4924, 3475, 4871, 2845, 159, + 3636, 4044, 1091, 2351, 1315, 72, 4649, 2868, 2745, + 4818, 1, 4250, 2838, 1192, 1988, 1012, 291, 3601, + 2437, 3791, 3017, 993, 2047, 601, 1571, 3824, 1279, + 3877, 3027, 720, 3505, 4078, 1591, 4921], + [1092, 2117, 4327, 1202, 1791, 4009, 4128, 4531, 3399, + 3150, 1346, 4458, 4429, 2265, 1675, 4419, 3667, 174, + 3106, 4575, 3090, 1392, 31, 2519, 3700, 2842, 4716, + 4251, 3945, 3948, 993, 1352, 1140, 755, 3761, 3901, + 4802, 1950, 3376, 952, 4123, 1528, 253, 3684, 2405, + 604, 4311, 1941, 2075, 989, 225, 4917, 2262, 4342, + 4891, 3581, 1674, 158, 1862, 1129, 316, 2606, 867, + 1903, 2323, 3607, 4235, 4533, 1866, 4161, 3756, 886, + 2571, 4738, 2448, 1928, 2459, 2202, 1243, 4040, 3984, + 3653, 36, 2707, 1811, 3872, 418, 237, 4953, 3734, + 1917, 4366, 2723, 1342, 377, 922, 4238, 4109, 3898, + 1103, 1980, 222, 3652, 309, 2529, 221, 4682, 984, + 1185, 3983, 3072, 1166, 4890, 3314, 3556, 327, 4654, + 3306, 3045, 257, 575, 1469, 4706, 1255, 2295, 4212, + 1630, 671, 2796, 3903, 1620, 2158, 462, 219, 996, + 4666, 1754, 3055, 3648, 2155, 3219, 4337, 4674, 673, + 3598, 2682, 4312, 2193, 2568, 3270, 1859, 82, 2643, + 814, 970, 683, 4794, 3827, 4969, 4983, 4514, 1250, + 2906, 4288, 4028, 3010, 420, 2676, 2157, 1692, 2643, + 964, 1509, 1724, 4198, 282, 2706, 2756, 1494, 608, + 4967, 19, 3171, 1702, 699, 219, 4871, 3488, 2544, + 824, 4148, 2170, 978, 3646, 922, 3068, 2299, 595, + 2947, 3728, 1032, 1280, 32, 4048, 2823, 1349, 2712, + 1605, 2549, 371, 1847, 2364, 4550, 2445, 4397, 2201, + 3736, 3963, 4779, 3699, 4918, 684, 568, 4003, 412, + 639, 488, 496, 4510, 4249, 2715, 4163, 4324, 365, + 1571, 2935, 1855, 2304, 1108, 3891, 4438, 1364, 1808, + 3062, 2804, 4620, 488, 987, 3726, 3792]]), + values=tensor([0.1688, 0.5725, 0.8235, 0.1236, 0.9926, 0.0678, 0.4982, + 0.5271, 0.8778, 0.2947, 0.8398, 0.5174, 0.0032, 0.6319, + 0.4775, 0.7035, 0.2773, 0.3499, 0.3030, 0.6828, 0.1610, + 0.7468, 0.2301, 0.9603, 0.2713, 0.8529, 0.1964, 0.5076, + 0.7351, 0.5497, 0.8251, 0.5178, 0.7511, 0.8466, 0.8107, + 0.4003, 0.5343, 0.1522, 0.1571, 0.8583, 0.5374, 0.3356, + 0.9014, 0.5934, 0.8904, 0.8477, 0.1883, 0.4357, 0.7247, + 0.8273, 0.7155, 0.4708, 0.5524, 0.0716, 0.4042, 0.2132, + 0.0176, 0.6597, 0.3255, 0.9472, 0.4265, 0.3889, 0.2010, + 0.0352, 0.2639, 0.2646, 0.9505, 0.9836, 0.9277, 0.6763, + 0.2780, 0.1443, 0.9110, 0.0984, 0.9783, 0.2040, 0.3633, + 0.1734, 0.7847, 0.0013, 0.5311, 0.8340, 0.1726, 0.2746, + 0.6431, 0.8480, 0.0360, 0.0655, 0.8619, 0.5926, 0.1570, + 0.9448, 0.7463, 0.0479, 0.8294, 0.1854, 0.2895, 0.5110, + 0.5855, 0.2523, 0.5798, 0.0695, 0.7625, 1.0000, 0.0298, + 0.9612, 0.5452, 0.0970, 0.7985, 0.8396, 0.7774, 0.8589, + 0.6521, 0.7314, 0.6690, 0.6552, 0.8555, 0.7174, 0.1621, + 0.2298, 0.2358, 0.1878, 0.9573, 0.5250, 0.8117, 0.2462, + 0.3924, 0.6886, 0.8553, 0.5234, 0.6656, 0.9054, 0.6261, + 0.3933, 0.6121, 0.9604, 0.8424, 0.7385, 0.4238, 0.3066, + 0.0153, 0.6536, 0.9054, 0.6327, 0.6410, 0.3852, 0.8062, + 0.4120, 0.9156, 0.7409, 0.2682, 0.7247, 0.7992, 0.9126, + 0.1275, 0.7335, 0.0826, 0.1433, 0.3653, 0.6234, 0.1398, + 0.1257, 0.6174, 0.4352, 0.7994, 0.9565, 0.8412, 0.8974, + 0.5049, 0.0601, 0.9556, 0.5454, 0.7828, 0.2826, 0.1041, + 0.6196, 0.1299, 0.0952, 0.5349, 0.1536, 0.1506, 0.3447, + 0.3019, 0.3856, 0.3131, 0.4826, 0.5354, 0.0347, 0.5754, + 0.2756, 0.5679, 0.4784, 0.2117, 0.0488, 0.5878, 0.8144, + 0.2219, 0.5546, 0.2791, 0.2847, 0.1625, 0.2652, 0.9209, + 0.4867, 0.3160, 0.9620, 0.0602, 0.1103, 0.0021, 0.5019, + 0.3431, 0.9562, 0.6517, 0.8939, 0.1662, 0.4799, 0.7253, + 0.0629, 0.6574, 0.8677, 0.6144, 0.9970, 0.2430, 0.4089, + 0.7972, 0.6784, 0.9449, 0.8226, 0.7206, 0.6835, 0.9986, + 0.7978, 0.4452, 0.8065, 0.0046, 0.5081, 0.2133, 0.9917, + 0.3168, 0.6061, 0.1763, 0.0968, 0.0760, 0.7781, 0.3732, + 0.5273, 0.2298, 0.7594, 0.0028, 0.5947]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.8524, 0.6711, 0.6804, ..., 0.7471, 0.0695, 0.5662]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.1995508670806885 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '643628', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.04805874824524} + +tensor(indices=tensor([[1149, 3609, 2685, 4475, 4916, 3264, 4434, 368, 1852, + 4205, 4125, 2502, 338, 436, 3552, 4171, 2459, 1812, + 3687, 3586, 1481, 859, 207, 4920, 2899, 400, 3957, + 2905, 1719, 1894, 4989, 1506, 4001, 1620, 2103, 4387, + 4493, 4219, 4077, 3872, 1672, 3004, 1748, 4376, 1640, + 827, 2533, 4639, 4232, 2289, 3962, 3376, 2719, 3483, + 3363, 1865, 3442, 4327, 4536, 3610, 1533, 2697, 614, + 2772, 1853, 3383, 301, 4865, 2372, 730, 4460, 3560, + 2660, 4413, 3056, 2728, 3627, 185, 1058, 1427, 2922, + 1019, 3949, 2509, 3571, 627, 4197, 837, 4360, 1359, + 1186, 3721, 367, 90, 4541, 1466, 3001, 3487, 4843, + 673, 3373, 2267, 1719, 4362, 1112, 2656, 4139, 800, + 4668, 1177, 1607, 2185, 4458, 1779, 1421, 2743, 3914, + 198, 4338, 2302, 362, 395, 2520, 1127, 557, 2936, + 1988, 1615, 91, 410, 1723, 4470, 4124, 2632, 612, + 3836, 4222, 3511, 732, 3394, 2461, 2466, 3059, 3786, + 1126, 1329, 1974, 4979, 1719, 773, 1389, 3467, 4128, + 319, 3459, 2302, 3329, 2034, 1886, 3864, 4368, 4120, + 360, 3483, 3132, 170, 4391, 4696, 4829, 2915, 985, + 3482, 3653, 1322, 2192, 4079, 2189, 403, 4473, 4670, + 4858, 621, 3927, 4712, 568, 1437, 2327, 1349, 3218, + 4522, 4355, 459, 3517, 2364, 4230, 2802, 3504, 2477, + 3028, 1962, 3659, 1857, 2192, 1273, 4690, 4191, 2955, + 1267, 4067, 4163, 1461, 2358, 3980, 4739, 2277, 3290, + 2478, 941, 3037, 2459, 2433, 1831, 74, 2339, 2355, + 1203, 2122, 2830, 4732, 3619, 2984, 1637, 3553, 3408, + 3363, 63, 3690, 4469, 4293, 1047, 1051, 4420, 857, + 4950, 2227, 648, 4004, 1907, 485, 849], + [1243, 362, 1734, 2235, 422, 4630, 4119, 4365, 2842, + 3141, 391, 2526, 4693, 1279, 3122, 1756, 1421, 1491, + 3858, 995, 441, 3110, 2604, 3919, 904, 4580, 4366, + 931, 2812, 4020, 60, 3330, 2154, 4832, 2571, 4376, + 4582, 1597, 4938, 4887, 420, 2535, 3956, 1960, 3411, + 3931, 374, 4829, 1816, 1259, 4778, 2817, 3957, 4910, + 3426, 1955, 341, 451, 2979, 588, 48, 3392, 142, + 4544, 4084, 1003, 2031, 4521, 4214, 1139, 868, 3169, + 2407, 870, 2583, 611, 1419, 4118, 371, 711, 2276, + 524, 2883, 1602, 4296, 2806, 2865, 2537, 2828, 1281, + 1032, 1169, 2696, 2131, 2982, 1589, 825, 91, 4635, + 514, 3673, 4618, 739, 2153, 2650, 4101, 4405, 334, + 2370, 2705, 2763, 1661, 130, 1098, 158, 3573, 2312, + 173, 2936, 4537, 4589, 1806, 1783, 4221, 45, 2463, + 4845, 2048, 784, 2660, 250, 2636, 1718, 839, 1949, + 522, 1017, 753, 4246, 2146, 2749, 4388, 4638, 1948, + 4638, 3007, 4104, 1311, 394, 2075, 3232, 754, 1937, + 3252, 407, 541, 2597, 3139, 2815, 1439, 4258, 855, + 2161, 3262, 1837, 2875, 2232, 464, 729, 2492, 509, + 2642, 2066, 577, 3000, 3610, 1312, 1316, 4837, 4744, + 3229, 2472, 4433, 3959, 3682, 722, 1906, 2797, 938, + 2172, 4422, 4237, 3739, 4934, 2555, 1125, 2229, 141, + 633, 2570, 4912, 2316, 2685, 4026, 4923, 222, 1292, + 2761, 3026, 254, 820, 1976, 4241, 1378, 429, 2429, + 2058, 3720, 2368, 4480, 4211, 1888, 4453, 1806, 4299, + 1300, 1977, 4326, 3019, 2918, 4691, 2405, 62, 1082, + 2900, 3023, 3695, 2129, 244, 4684, 3119, 2814, 2328, + 4930, 214, 1697, 1285, 2339, 3169, 2911]]), + values=tensor([0.7440, 0.8345, 0.6142, 0.9813, 0.7813, 0.3918, 0.0101, + 0.3950, 0.4991, 0.7743, 0.7462, 0.0277, 0.7973, 0.4630, + 0.6060, 0.2918, 0.8529, 0.8346, 0.3395, 0.8673, 0.6898, + 0.1805, 0.0744, 0.4950, 0.2805, 0.1082, 0.4682, 0.5390, + 0.5099, 0.4006, 0.1151, 0.0793, 0.1420, 0.1632, 0.1988, + 0.4389, 0.0174, 0.5935, 0.7591, 0.0103, 0.7488, 0.8448, + 0.2271, 0.8417, 0.5965, 0.5167, 0.2451, 0.2244, 0.7771, + 0.2444, 0.4532, 0.1923, 0.3147, 0.7996, 0.8813, 0.6992, + 0.4923, 0.6785, 0.8817, 0.1203, 0.5633, 0.7572, 0.9884, + 0.0497, 0.5024, 0.0569, 0.1229, 0.3316, 0.9454, 0.8881, + 0.9851, 0.9243, 0.0843, 0.6207, 0.0226, 0.0968, 0.6016, + 0.5959, 0.2111, 0.7881, 0.9693, 0.6697, 0.4688, 0.7746, + 0.3554, 0.1077, 0.0416, 0.7084, 0.6641, 0.9464, 0.2858, + 0.3567, 0.5540, 0.8830, 0.3549, 0.2578, 0.8943, 0.8503, + 0.5938, 0.5870, 0.0139, 0.4886, 0.9293, 0.6538, 0.1614, + 0.3533, 0.6345, 0.4179, 0.3842, 0.3957, 0.2715, 0.2500, + 0.5166, 0.8908, 0.9548, 0.4531, 0.1797, 0.3837, 0.4599, + 0.4535, 0.5541, 0.4601, 0.6611, 0.9859, 0.3452, 0.3096, + 0.7036, 0.2401, 0.0819, 0.5769, 0.5062, 0.6194, 0.9382, + 0.0295, 0.3348, 0.5122, 0.4529, 0.0255, 0.0758, 0.7708, + 0.0166, 0.6989, 0.0186, 0.4532, 0.1057, 0.5888, 0.3377, + 0.7542, 0.5158, 0.2588, 0.1427, 0.6251, 0.6878, 0.2496, + 0.4325, 0.8025, 0.3421, 0.4433, 0.0822, 0.2641, 0.5402, + 0.7306, 0.1482, 0.2825, 0.3152, 0.0700, 0.4700, 0.9297, + 0.3751, 0.8982, 0.1483, 0.9186, 0.2743, 0.9597, 0.3340, + 0.2999, 0.6533, 0.7404, 0.0130, 0.2933, 0.9789, 0.9071, + 0.0175, 0.3156, 0.1345, 0.2753, 0.7749, 0.2503, 0.8305, + 0.3034, 0.7305, 0.5373, 0.3992, 0.1960, 0.2907, 0.1535, + 0.1316, 0.2194, 0.1014, 0.9269, 0.9889, 0.9128, 0.0976, + 0.3482, 0.2877, 0.6267, 0.4150, 0.2616, 0.4682, 0.0299, + 0.0836, 0.7458, 0.2793, 0.4768, 0.9958, 0.2488, 0.4201, + 0.2869, 0.3186, 0.7942, 0.8403, 0.5786, 0.4007, 0.6150, + 0.4495, 0.2526, 0.2152, 0.5546, 0.7149, 0.4606, 0.9469, + 0.2333, 0.3018, 0.5568, 0.1244, 0.1971, 0.4252, 0.7876, + 0.7888, 0.8426, 0.5772, 0.3188, 0.4090, 0.9424, 0.2536, + 0.8672, 0.2919, 0.0356, 0.5001, 0.2679]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3876, 0.7864, 0.7691, ..., 0.7892, 0.3784, 0.7593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.04805874824524 seconds + +tensor(indices=tensor([[1149, 3609, 2685, 4475, 4916, 3264, 4434, 368, 1852, + 4205, 4125, 2502, 338, 436, 3552, 4171, 2459, 1812, + 3687, 3586, 1481, 859, 207, 4920, 2899, 400, 3957, + 2905, 1719, 1894, 4989, 1506, 4001, 1620, 2103, 4387, + 4493, 4219, 4077, 3872, 1672, 3004, 1748, 4376, 1640, + 827, 2533, 4639, 4232, 2289, 3962, 3376, 2719, 3483, + 3363, 1865, 3442, 4327, 4536, 3610, 1533, 2697, 614, + 2772, 1853, 3383, 301, 4865, 2372, 730, 4460, 3560, + 2660, 4413, 3056, 2728, 3627, 185, 1058, 1427, 2922, + 1019, 3949, 2509, 3571, 627, 4197, 837, 4360, 1359, + 1186, 3721, 367, 90, 4541, 1466, 3001, 3487, 4843, + 673, 3373, 2267, 1719, 4362, 1112, 2656, 4139, 800, + 4668, 1177, 1607, 2185, 4458, 1779, 1421, 2743, 3914, + 198, 4338, 2302, 362, 395, 2520, 1127, 557, 2936, + 1988, 1615, 91, 410, 1723, 4470, 4124, 2632, 612, + 3836, 4222, 3511, 732, 3394, 2461, 2466, 3059, 3786, + 1126, 1329, 1974, 4979, 1719, 773, 1389, 3467, 4128, + 319, 3459, 2302, 3329, 2034, 1886, 3864, 4368, 4120, + 360, 3483, 3132, 170, 4391, 4696, 4829, 2915, 985, + 3482, 3653, 1322, 2192, 4079, 2189, 403, 4473, 4670, + 4858, 621, 3927, 4712, 568, 1437, 2327, 1349, 3218, + 4522, 4355, 459, 3517, 2364, 4230, 2802, 3504, 2477, + 3028, 1962, 3659, 1857, 2192, 1273, 4690, 4191, 2955, + 1267, 4067, 4163, 1461, 2358, 3980, 4739, 2277, 3290, + 2478, 941, 3037, 2459, 2433, 1831, 74, 2339, 2355, + 1203, 2122, 2830, 4732, 3619, 2984, 1637, 3553, 3408, + 3363, 63, 3690, 4469, 4293, 1047, 1051, 4420, 857, + 4950, 2227, 648, 4004, 1907, 485, 849], + [1243, 362, 1734, 2235, 422, 4630, 4119, 4365, 2842, + 3141, 391, 2526, 4693, 1279, 3122, 1756, 1421, 1491, + 3858, 995, 441, 3110, 2604, 3919, 904, 4580, 4366, + 931, 2812, 4020, 60, 3330, 2154, 4832, 2571, 4376, + 4582, 1597, 4938, 4887, 420, 2535, 3956, 1960, 3411, + 3931, 374, 4829, 1816, 1259, 4778, 2817, 3957, 4910, + 3426, 1955, 341, 451, 2979, 588, 48, 3392, 142, + 4544, 4084, 1003, 2031, 4521, 4214, 1139, 868, 3169, + 2407, 870, 2583, 611, 1419, 4118, 371, 711, 2276, + 524, 2883, 1602, 4296, 2806, 2865, 2537, 2828, 1281, + 1032, 1169, 2696, 2131, 2982, 1589, 825, 91, 4635, + 514, 3673, 4618, 739, 2153, 2650, 4101, 4405, 334, + 2370, 2705, 2763, 1661, 130, 1098, 158, 3573, 2312, + 173, 2936, 4537, 4589, 1806, 1783, 4221, 45, 2463, + 4845, 2048, 784, 2660, 250, 2636, 1718, 839, 1949, + 522, 1017, 753, 4246, 2146, 2749, 4388, 4638, 1948, + 4638, 3007, 4104, 1311, 394, 2075, 3232, 754, 1937, + 3252, 407, 541, 2597, 3139, 2815, 1439, 4258, 855, + 2161, 3262, 1837, 2875, 2232, 464, 729, 2492, 509, + 2642, 2066, 577, 3000, 3610, 1312, 1316, 4837, 4744, + 3229, 2472, 4433, 3959, 3682, 722, 1906, 2797, 938, + 2172, 4422, 4237, 3739, 4934, 2555, 1125, 2229, 141, + 633, 2570, 4912, 2316, 2685, 4026, 4923, 222, 1292, + 2761, 3026, 254, 820, 1976, 4241, 1378, 429, 2429, + 2058, 3720, 2368, 4480, 4211, 1888, 4453, 1806, 4299, + 1300, 1977, 4326, 3019, 2918, 4691, 2405, 62, 1082, + 2900, 3023, 3695, 2129, 244, 4684, 3119, 2814, 2328, + 4930, 214, 1697, 1285, 2339, 3169, 2911]]), + values=tensor([0.7440, 0.8345, 0.6142, 0.9813, 0.7813, 0.3918, 0.0101, + 0.3950, 0.4991, 0.7743, 0.7462, 0.0277, 0.7973, 0.4630, + 0.6060, 0.2918, 0.8529, 0.8346, 0.3395, 0.8673, 0.6898, + 0.1805, 0.0744, 0.4950, 0.2805, 0.1082, 0.4682, 0.5390, + 0.5099, 0.4006, 0.1151, 0.0793, 0.1420, 0.1632, 0.1988, + 0.4389, 0.0174, 0.5935, 0.7591, 0.0103, 0.7488, 0.8448, + 0.2271, 0.8417, 0.5965, 0.5167, 0.2451, 0.2244, 0.7771, + 0.2444, 0.4532, 0.1923, 0.3147, 0.7996, 0.8813, 0.6992, + 0.4923, 0.6785, 0.8817, 0.1203, 0.5633, 0.7572, 0.9884, + 0.0497, 0.5024, 0.0569, 0.1229, 0.3316, 0.9454, 0.8881, + 0.9851, 0.9243, 0.0843, 0.6207, 0.0226, 0.0968, 0.6016, + 0.5959, 0.2111, 0.7881, 0.9693, 0.6697, 0.4688, 0.7746, + 0.3554, 0.1077, 0.0416, 0.7084, 0.6641, 0.9464, 0.2858, + 0.3567, 0.5540, 0.8830, 0.3549, 0.2578, 0.8943, 0.8503, + 0.5938, 0.5870, 0.0139, 0.4886, 0.9293, 0.6538, 0.1614, + 0.3533, 0.6345, 0.4179, 0.3842, 0.3957, 0.2715, 0.2500, + 0.5166, 0.8908, 0.9548, 0.4531, 0.1797, 0.3837, 0.4599, + 0.4535, 0.5541, 0.4601, 0.6611, 0.9859, 0.3452, 0.3096, + 0.7036, 0.2401, 0.0819, 0.5769, 0.5062, 0.6194, 0.9382, + 0.0295, 0.3348, 0.5122, 0.4529, 0.0255, 0.0758, 0.7708, + 0.0166, 0.6989, 0.0186, 0.4532, 0.1057, 0.5888, 0.3377, + 0.7542, 0.5158, 0.2588, 0.1427, 0.6251, 0.6878, 0.2496, + 0.4325, 0.8025, 0.3421, 0.4433, 0.0822, 0.2641, 0.5402, + 0.7306, 0.1482, 0.2825, 0.3152, 0.0700, 0.4700, 0.9297, + 0.3751, 0.8982, 0.1483, 0.9186, 0.2743, 0.9597, 0.3340, + 0.2999, 0.6533, 0.7404, 0.0130, 0.2933, 0.9789, 0.9071, + 0.0175, 0.3156, 0.1345, 0.2753, 0.7749, 0.2503, 0.8305, + 0.3034, 0.7305, 0.5373, 0.3992, 0.1960, 0.2907, 0.1535, + 0.1316, 0.2194, 0.1014, 0.9269, 0.9889, 0.9128, 0.0976, + 0.3482, 0.2877, 0.6267, 0.4150, 0.2616, 0.4682, 0.0299, + 0.0836, 0.7458, 0.2793, 0.4768, 0.9958, 0.2488, 0.4201, + 0.2869, 0.3186, 0.7942, 0.8403, 0.5786, 0.4007, 0.6150, + 0.4495, 0.2526, 0.2152, 0.5546, 0.7149, 0.4606, 0.9469, + 0.2333, 0.3018, 0.5568, 0.1244, 0.1971, 0.4252, 0.7876, + 0.7888, 0.8426, 0.5772, 0.3188, 0.4090, 0.9424, 0.2536, + 0.8672, 0.2919, 0.0356, 0.5001, 0.2679]), + size=(5000, 5000), nnz=250, layout=torch.sparse_coo) +tensor([0.3876, 0.7864, 0.7691, ..., 0.7892, 0.3784, 0.7593]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.04805874824524 seconds + +[19.34, 18.38, 18.73, 18.44, 18.74, 18.5, 18.52, 18.48, 18.88, 18.5] +[54.01] +13.461623430252075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 643628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.04805874824524, 'TIME_S_1KI': 0.015611593573065869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.0622814679145, 'W': 54.01} +[19.34, 18.38, 18.73, 18.44, 18.74, 18.5, 18.52, 18.48, 18.88, 18.5, 19.07, 18.73, 18.74, 18.96, 18.54, 18.52, 19.3, 19.13, 19.34, 18.55] +337.65999999999997 +16.883 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 643628, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.04805874824524, 'TIME_S_1KI': 0.015611593573065869, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 727.0622814679145, 'W': 54.01, 'J_1KI': 1.1296312178275565, 'W_1KI': 0.0839149322279329, 'W_D': 37.126999999999995, 'J_D': 499.78969309496875, 'W_D_1KI': 0.05768394165573902, 'J_D_1KI': 8.962310784449872e-05} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..5c133dd --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 206949, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.472784996032715, "TIME_S_1KI": 0.05060563228637353, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 748.6759082412719, "W": 53.96, "J_1KI": 3.617683140490034, "W_1KI": 0.2607405689324423, "W_D": 27.490750000000002, "J_D": 381.4244296605587, "W_D_1KI": 0.13283828382838284, "J_D_1KI": 0.0006418889863124867} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..2a2ec1b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_coo_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,73 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '100', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.016951322555541992} + +tensor(indices=tensor([[3443, 2313, 2242, ..., 4800, 1483, 3473], + [4032, 3763, 4471, ..., 3085, 1057, 1825]]), + values=tensor([0.1068, 0.9154, 0.8800, ..., 0.3341, 0.8102, 0.7867]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.7557, 0.3910, 0.5109, ..., 0.1555, 0.7629, 0.3514]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.016951322555541992 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '61942', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.1427536010742188} + +tensor(indices=tensor([[1243, 1602, 2784, ..., 26, 4298, 1321], + [2502, 2812, 977, ..., 1424, 4897, 3223]]), + values=tensor([0.6050, 0.9563, 0.3311, ..., 0.5352, 0.0550, 0.6866]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.4920, 0.4870, 0.4099, ..., 0.8715, 0.2306, 0.1514]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 3.1427536010742188 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'coo', '206949', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "coo", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.472784996032715} + +tensor(indices=tensor([[1387, 500, 4790, ..., 1497, 2199, 1869], + [4439, 2421, 2246, ..., 2989, 67, 3327]]), + values=tensor([0.3098, 0.8075, 0.7779, ..., 0.1098, 0.5386, 0.9688]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6317, 0.1493, 0.2614, ..., 0.4209, 0.4836, 0.9840]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.472784996032715 seconds + +tensor(indices=tensor([[1387, 500, 4790, ..., 1497, 2199, 1869], + [4439, 2421, 2246, ..., 2989, 67, 3327]]), + values=tensor([0.3098, 0.8075, 0.7779, ..., 0.1098, 0.5386, 0.9688]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_coo) +tensor([0.6317, 0.1493, 0.2614, ..., 0.4209, 0.4836, 0.9840]) +Matrix Type: synthetic +Matrix Format: coo +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.472784996032715 seconds + +[18.98, 22.05, 19.95, 18.57, 19.29, 18.62, 18.75, 18.51, 18.71, 18.66] +[53.96] +13.874646186828613 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 206949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.472784996032715, 'TIME_S_1KI': 0.05060563228637353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 748.6759082412719, 'W': 53.96} +[18.98, 22.05, 19.95, 18.57, 19.29, 18.62, 18.75, 18.51, 18.71, 18.66, 46.37, 44.01, 43.56, 43.72, 44.21, 42.86, 34.87, 24.18, 33.73, 43.58] +529.385 +26.46925 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 206949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'coo', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.472784996032715, 'TIME_S_1KI': 0.05060563228637353, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 748.6759082412719, 'W': 53.96, 'J_1KI': 3.617683140490034, 'W_1KI': 0.2607405689324423, 'W_D': 27.490750000000002, 'J_D': 381.4244296605587, 'W_D_1KI': 0.13283828382838284, 'J_D_1KI': 0.0006418889863124867} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json index b0c2a19..26904cb 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33525, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643972158432007, "TIME_S_1KI": 0.3174935766870099, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1221.4049115991593, "W": 88.39, "J_1KI": 36.43265955553048, "W_1KI": 2.6365398956002983, "W_D": 72.1155, "J_D": 996.5179986698627, "W_D_1KI": 2.151096196868009, "J_D_1KI": 0.06416394323245365} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 31035, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.417267084121704, "TIME_S_1KI": 0.335661900567801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1250.4872237539291, "W": 90.01, "J_1KI": 40.292805663087776, "W_1KI": 2.9002738843241502, "W_D": 73.13425000000001, "J_D": 1016.0364986537696, "W_D_1KI": 2.3565087804092157, "J_D_1KI": 0.0759306840795623} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output index dc5d053..6056924 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -1,34 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04636812210083008} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04634833335876465} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 13, ..., 999983, - 999994, 1000000]), - col_indices=tensor([ 8176, 34026, 54478, ..., 84998, 92494, 98961]), - values=tensor([0.4351, 0.9999, 0.3437, ..., 0.3684, 0.7357, 0.5729]), - size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8699, 0.2767, 0.3378, ..., 0.5349, 0.7243, 0.7857]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([100000, 100000]) -Rows: 100000 -Size: 10000000000 -NNZ: 1000000 -Density: 0.0001 -Time: 0.04636812210083008 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22644', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.091935634613037} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 23, ..., 999978, +tensor(crow_indices=tensor([ 0, 10, 18, ..., 999987, 999991, 1000000]), - col_indices=tensor([26374, 42582, 44652, ..., 65952, 74293, 78884]), - values=tensor([0.4256, 0.1611, 0.6127, ..., 0.5242, 0.3400, 0.0348]), + col_indices=tensor([ 9174, 26658, 55661, ..., 82470, 90862, 91615]), + values=tensor([0.8356, 0.1194, 0.9715, ..., 0.4592, 0.1175, 0.5527]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.0369, 0.6183, 0.8933, ..., 0.8293, 0.4628, 0.9829]) +tensor([0.8705, 0.7209, 0.9154, ..., 0.3203, 0.9994, 0.8441]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 7.091935634613037 seconds +Time: 0.04634833335876465 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33525', '-ss', '100000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643972158432007} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22654', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.664235591888428} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 999978, - 999994, 1000000]), - col_indices=tensor([ 9594, 11946, 25379, ..., 52892, 57506, 73818]), - values=tensor([0.4978, 0.8076, 0.6002, ..., 0.2925, 0.4675, 0.5122]), +tensor(crow_indices=tensor([ 0, 8, 20, ..., 999979, + 999993, 1000000]), + col_indices=tensor([ 4251, 20168, 21247, ..., 95888, 96747, 99036]), + values=tensor([0.8194, 0.4549, 0.8524, ..., 0.6296, 0.3982, 0.3368]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8765, 0.8954, 0.8874, ..., 0.8137, 0.3245, 0.8007]) +tensor([0.9515, 0.7824, 0.2226, ..., 0.1315, 0.9870, 0.7032]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.643972158432007 seconds +Time: 7.664235591888428 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31035', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.417267084121704} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 17, ..., 999978, - 999994, 1000000]), - col_indices=tensor([ 9594, 11946, 25379, ..., 52892, 57506, 73818]), - values=tensor([0.4978, 0.8076, 0.6002, ..., 0.2925, 0.4675, 0.5122]), +tensor(crow_indices=tensor([ 0, 9, 21, ..., 999970, + 999984, 1000000]), + col_indices=tensor([ 7852, 7977, 10481, ..., 74970, 78788, 85228]), + values=tensor([0.0442, 0.5786, 0.0396, ..., 0.2412, 0.9148, 0.4837]), size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8765, 0.8954, 0.8874, ..., 0.8137, 0.3245, 0.8007]) +tensor([0.5025, 0.3489, 0.0390, ..., 0.2046, 0.9645, 0.0609]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +56,30 @@ Rows: 100000 Size: 10000000000 NNZ: 1000000 Density: 0.0001 -Time: 10.643972158432007 seconds +Time: 10.417267084121704 seconds -[18.37, 17.76, 17.76, 17.79, 18.13, 17.85, 18.03, 17.72, 18.33, 18.48] -[88.39] -13.818360805511475 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643972158432007, 'TIME_S_1KI': 0.3174935766870099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.4049115991593, 'W': 88.39} -[18.37, 17.76, 17.76, 17.79, 18.13, 17.85, 18.03, 17.72, 18.33, 18.48, 18.41, 18.08, 18.64, 18.55, 18.21, 18.02, 18.07, 18.01, 17.96, 17.9] -325.49 -16.2745 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643972158432007, 'TIME_S_1KI': 0.3174935766870099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.4049115991593, 'W': 88.39, 'J_1KI': 36.43265955553048, 'W_1KI': 2.6365398956002983, 'W_D': 72.1155, 'J_D': 996.5179986698627, 'W_D_1KI': 2.151096196868009, 'J_D_1KI': 0.06416394323245365} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 21, ..., 999970, + 999984, 1000000]), + col_indices=tensor([ 7852, 7977, 10481, ..., 74970, 78788, 85228]), + values=tensor([0.0442, 0.5786, 0.0396, ..., 0.2412, 0.9148, 0.4837]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.5025, 0.3489, 0.0390, ..., 0.2046, 0.9645, 0.0609]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.417267084121704 seconds + +[19.13, 18.7, 18.69, 18.52, 18.58, 18.87, 18.64, 18.46, 18.79, 18.85] +[90.01] +13.892758846282959 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 31035, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.417267084121704, 'TIME_S_1KI': 0.335661900567801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1250.4872237539291, 'W': 90.01} +[19.13, 18.7, 18.69, 18.52, 18.58, 18.87, 18.64, 18.46, 18.79, 18.85, 19.33, 18.93, 18.71, 18.54, 18.93, 18.87, 18.59, 18.87, 18.96, 18.42] +337.515 +16.87575 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 31035, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.417267084121704, 'TIME_S_1KI': 0.335661900567801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1250.4872237539291, 'W': 90.01, 'J_1KI': 40.292805663087776, 'W_1KI': 2.9002738843241502, 'W_D': 73.13425000000001, 'J_D': 1016.0364986537696, 'W_D_1KI': 2.3565087804092157, 'J_D_1KI': 0.0759306840795623} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json index 7fe967e..54d8019 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2660, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.18576693534851, "TIME_S_1KI": 3.829235689980643, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1328.5220910072328, "W": 81.2, "J_1KI": 499.44439511550104, "W_1KI": 30.526315789473685, "W_D": 64.453, "J_D": 1054.5225902917387, "W_D_1KI": 24.23045112781955, "J_D_1KI": 9.10919215331562} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2624, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.40669846534729, "TIME_S_1KI": 3.965967402952473, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1365.3930686187744, "W": 83.52, "J_1KI": 520.3479682236183, "W_1KI": 31.829268292682926, "W_D": 65.95025, "J_D": 1078.161089842856, "W_D_1KI": 25.133479420731707, "J_D_1KI": 9.578307706071534} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output index 8cde86c..5fd31c6 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.39462947845458984} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.40013909339904785} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 104, 208, ..., 9999814, - 9999906, 10000000]), - col_indices=tensor([ 1924, 2222, 4663, ..., 98435, 98556, 99127]), - values=tensor([0.9193, 0.2961, 0.8826, ..., 0.2999, 0.4100, 0.0457]), +tensor(crow_indices=tensor([ 0, 98, 192, ..., 9999815, + 9999910, 10000000]), + col_indices=tensor([ 373, 1519, 1794, ..., 95818, 96513, 98259]), + values=tensor([0.4006, 0.4074, 0.2483, ..., 0.9856, 0.5266, 0.6598]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8186, 0.3714, 0.9798, ..., 0.9009, 0.9275, 0.2252]) +tensor([0.1859, 0.5690, 0.3331, ..., 0.2308, 0.3812, 0.0144]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 0.39462947845458984 seconds +Time: 0.40013909339904785 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2660', '-ss', '100000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.18576693534851} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2624', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.40669846534729} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 111, 209, ..., 9999785, - 9999908, 10000000]), - col_indices=tensor([ 4849, 5332, 5597, ..., 99100, 99293, 99777]), - values=tensor([0.3984, 0.3126, 0.3684, ..., 0.2469, 0.5703, 0.8605]), +tensor(crow_indices=tensor([ 0, 117, 216, ..., 9999790, + 9999902, 10000000]), + col_indices=tensor([ 442, 3973, 5538, ..., 98494, 99309, 99448]), + values=tensor([0.2013, 0.4098, 0.8755, ..., 0.3384, 0.3727, 0.3410]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6012, 0.7247, 0.1820, ..., 0.8515, 0.6518, 0.6577]) +tensor([0.9970, 0.2280, 0.7030, ..., 0.1949, 0.4690, 0.9782]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,16 +36,16 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.18576693534851 seconds +Time: 10.40669846534729 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 111, 209, ..., 9999785, - 9999908, 10000000]), - col_indices=tensor([ 4849, 5332, 5597, ..., 99100, 99293, 99777]), - values=tensor([0.3984, 0.3126, 0.3684, ..., 0.2469, 0.5703, 0.8605]), +tensor(crow_indices=tensor([ 0, 117, 216, ..., 9999790, + 9999902, 10000000]), + col_indices=tensor([ 442, 3973, 5538, ..., 98494, 99309, 99448]), + values=tensor([0.2013, 0.4098, 0.8755, ..., 0.3384, 0.3727, 0.3410]), size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.6012, 0.7247, 0.1820, ..., 0.8515, 0.6518, 0.6577]) +tensor([0.9970, 0.2280, 0.7030, ..., 0.1949, 0.4690, 0.9782]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -53,13 +53,13 @@ Rows: 100000 Size: 10000000000 NNZ: 10000000 Density: 0.001 -Time: 10.18576693534851 seconds +Time: 10.40669846534729 seconds -[18.33, 18.01, 18.45, 18.12, 17.88, 17.91, 18.33, 22.42, 18.21, 17.9] -[81.2] -16.361109495162964 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.18576693534851, 'TIME_S_1KI': 3.829235689980643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1328.5220910072328, 'W': 81.2} -[18.33, 18.01, 18.45, 18.12, 17.88, 17.91, 18.33, 22.42, 18.21, 17.9, 22.63, 20.65, 18.09, 18.03, 18.02, 18.26, 18.0, 18.17, 17.93, 18.06] -334.94000000000005 -16.747000000000003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.18576693534851, 'TIME_S_1KI': 3.829235689980643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1328.5220910072328, 'W': 81.2, 'J_1KI': 499.44439511550104, 'W_1KI': 30.526315789473685, 'W_D': 64.453, 'J_D': 1054.5225902917387, 'W_D_1KI': 24.23045112781955, 'J_D_1KI': 9.10919215331562} +[19.16, 22.25, 18.95, 19.07, 19.39, 18.46, 18.69, 18.46, 18.92, 18.39] +[83.52] +16.34809708595276 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2624, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.40669846534729, 'TIME_S_1KI': 3.965967402952473, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.3930686187744, 'W': 83.52} +[19.16, 22.25, 18.95, 19.07, 19.39, 18.46, 18.69, 18.46, 18.92, 18.39, 19.12, 26.91, 20.35, 18.41, 18.57, 18.57, 19.27, 18.75, 18.82, 18.44] +351.395 +17.56975 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2624, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.40669846534729, 'TIME_S_1KI': 3.965967402952473, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1365.3930686187744, 'W': 83.52, 'J_1KI': 520.3479682236183, 'W_1KI': 31.829268292682926, 'W_D': 65.95025, 'J_D': 1078.161089842856, 'W_D_1KI': 25.133479420731707, 'J_D_1KI': 9.578307706071534} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.json new file mode 100644 index 0000000..45279f5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 305, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.495978593826294, "TIME_S_1KI": 34.41304456992227, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2866.215010499954, "W": 65.24, "J_1KI": 9397.426263934276, "W_1KI": 213.9016393442623, "W_D": 48.390249999999995, "J_D": 2125.948205270469, "W_D_1KI": 158.65655737704918, "J_D_1KI": 520.1854340231122} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.output new file mode 100644 index 0000000..59f7c6e --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.439396381378174} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1014, 1961, ..., + 99997966, 99998994, 100000000]), + col_indices=tensor([ 103, 169, 257, ..., 99672, 99692, 99993]), + values=tensor([0.3305, 0.0600, 0.8724, ..., 0.9250, 0.3666, 0.7528]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.3599, 0.7206, 0.4374, ..., 0.1863, 0.8638, 0.5657]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 3.439396381378174 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '305', '-ss', '100000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.495978593826294} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 990, 2000, ..., + 99998044, 99999023, 100000000]), + col_indices=tensor([ 187, 214, 378, ..., 99725, 99778, 99993]), + values=tensor([0.2904, 0.8403, 0.4739, ..., 0.8094, 0.3153, 0.0027]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6334, 0.8307, 0.2969, ..., 0.6260, 0.4306, 0.6036]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.495978593826294 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 990, 2000, ..., + 99998044, 99999023, 100000000]), + col_indices=tensor([ 187, 214, 378, ..., 99725, 99778, 99993]), + values=tensor([0.2904, 0.8403, 0.4739, ..., 0.8094, 0.3153, 0.0027]), + size=(100000, 100000), nnz=100000000, layout=torch.sparse_csr) +tensor([0.6334, 0.8307, 0.2969, ..., 0.6260, 0.4306, 0.6036]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000000 +Density: 0.01 +Time: 10.495978593826294 seconds + +[18.83, 18.53, 18.8, 18.61, 18.65, 18.57, 19.18, 18.51, 18.59, 18.76] +[65.24] +43.93339991569519 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.495978593826294, 'TIME_S_1KI': 34.41304456992227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2866.215010499954, 'W': 65.24} +[18.83, 18.53, 18.8, 18.61, 18.65, 18.57, 19.18, 18.51, 18.59, 18.76, 19.05, 18.56, 18.94, 18.42, 18.92, 18.36, 18.78, 18.44, 18.63, 20.37] +336.995 +16.84975 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.495978593826294, 'TIME_S_1KI': 34.41304456992227, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2866.215010499954, 'W': 65.24, 'J_1KI': 9397.426263934276, 'W_1KI': 213.9016393442623, 'W_D': 48.390249999999995, 'J_D': 2125.948205270469, 'W_D_1KI': 158.65655737704918, 'J_D_1KI': 520.1854340231122} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json index fdd737d..5c14190 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 64522, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3600492477417, "TIME_S_1KI": 0.1605661518201807, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1143.5515343093873, "W": 83.36000000000001, "J_1KI": 17.723435949124134, "W_1KI": 1.291962431418741, "W_D": 66.67275000000001, "J_D": 914.6320244616867, "W_D_1KI": 1.0333335916431607, "J_D_1KI": 0.01601521328605996} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 66137, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.77026081085205, "TIME_S_1KI": 0.16284773743671546, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.6182242155076, "W": 90.22, "J_1KI": 19.27239252181846, "W_1KI": 1.3641380770219393, "W_D": 73.104, "J_D": 1032.8052611732483, "W_D_1KI": 1.1053419417270212, "J_D_1KI": 0.016712913221449736} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output index 8218685..abb77a3 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03250718116760254} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.034224748611450195} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 99998, 100000]), - col_indices=tensor([12882, 21465, 63858, ..., 96153, 4715, 69382]), - values=tensor([0.1495, 0.9028, 0.7353, ..., 0.9651, 0.0553, 0.0388]), + col_indices=tensor([80329, 97577, 78460, ..., 47469, 32746, 34200]), + values=tensor([0.4872, 0.5808, 0.9619, ..., 0.7805, 0.1079, 0.3610]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6296, 0.2324, 0.7696, ..., 0.0819, 0.5051, 0.6795]) +tensor([0.1059, 0.5996, 0.0895, ..., 0.5540, 0.1364, 0.6817]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 0.03250718116760254 seconds +Time: 0.034224748611450195 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '32300', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.256327390670776} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '30679', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.870610475540161} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 3, ..., 99998, 99999, +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99999, 100000, 100000]), - col_indices=tensor([24100, 22524, 41698, ..., 71518, 54296, 46275]), - values=tensor([0.6729, 0.0195, 0.1396, ..., 0.6516, 0.4177, 0.7883]), + col_indices=tensor([55380, 3096, 82701, ..., 85443, 78227, 98923]), + values=tensor([0.5422, 0.0604, 0.4016, ..., 0.7321, 0.2904, 0.5556]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.6330, 0.0132, 0.4522, ..., 0.1249, 0.8426, 0.7168]) +tensor([0.5851, 0.5126, 0.9039, ..., 0.8660, 0.2417, 0.9391]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 5.256327390670776 seconds +Time: 4.870610475540161 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '64522', '-ss', '100000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3600492477417} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '66137', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.77026081085205} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99999, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 99998, 99998, 100000]), - col_indices=tensor([63372, 90175, 43637, ..., 48404, 84175, 41742]), - values=tensor([0.0143, 0.7083, 0.4138, ..., 0.7171, 0.1589, 0.0907]), + col_indices=tensor([54463, 88672, 26831, ..., 14962, 24051, 50951]), + values=tensor([0.0312, 0.6855, 0.2959, ..., 0.0652, 0.5066, 0.5715]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2672, 0.1313, 0.6231, ..., 0.4829, 0.5251, 0.7815]) +tensor([0.6086, 0.3249, 0.3975, ..., 0.5070, 0.0839, 0.6292]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.3600492477417 seconds +Time: 10.77026081085205 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99999, +tensor(crow_indices=tensor([ 0, 2, 4, ..., 99998, 99998, 100000]), - col_indices=tensor([63372, 90175, 43637, ..., 48404, 84175, 41742]), - values=tensor([0.0143, 0.7083, 0.4138, ..., 0.7171, 0.1589, 0.0907]), + col_indices=tensor([54463, 88672, 26831, ..., 14962, 24051, 50951]), + values=tensor([0.0312, 0.6855, 0.2959, ..., 0.0652, 0.5066, 0.5715]), size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) -tensor([0.2672, 0.1313, 0.6231, ..., 0.4829, 0.5251, 0.7815]) +tensor([0.6086, 0.3249, 0.3975, ..., 0.5070, 0.0839, 0.6292]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 100000 Density: 1e-05 -Time: 10.3600492477417 seconds +Time: 10.77026081085205 seconds -[18.53, 17.8, 18.06, 18.37, 18.25, 17.96, 18.14, 21.36, 18.43, 17.88] -[83.36] -13.718228578567505 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64522, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3600492477417, 'TIME_S_1KI': 0.1605661518201807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1143.5515343093873, 'W': 83.36000000000001} -[18.53, 17.8, 18.06, 18.37, 18.25, 17.96, 18.14, 21.36, 18.43, 17.88, 18.55, 17.91, 18.58, 19.12, 20.8, 18.14, 18.33, 17.85, 18.25, 17.83] -333.745 -16.68725 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64522, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3600492477417, 'TIME_S_1KI': 0.1605661518201807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1143.5515343093873, 'W': 83.36000000000001, 'J_1KI': 17.723435949124134, 'W_1KI': 1.291962431418741, 'W_D': 66.67275000000001, 'J_D': 914.6320244616867, 'W_D_1KI': 1.0333335916431607, 'J_D_1KI': 0.01601521328605996} +[19.11, 18.48, 18.88, 20.9, 19.07, 19.66, 19.46, 18.54, 19.32, 18.58] +[90.22] +14.12788987159729 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 66137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.77026081085205, 'TIME_S_1KI': 0.16284773743671546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.6182242155076, 'W': 90.22} +[19.11, 18.48, 18.88, 20.9, 19.07, 19.66, 19.46, 18.54, 19.32, 18.58, 19.93, 18.36, 18.99, 18.76, 18.99, 18.64, 18.96, 18.38, 18.86, 18.52] +342.32 +17.116 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 66137, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.77026081085205, 'TIME_S_1KI': 0.16284773743671546, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.6182242155076, 'W': 90.22, 'J_1KI': 19.27239252181846, 'W_1KI': 1.3641380770219393, 'W_D': 73.104, 'J_D': 1032.8052611732483, 'W_D_1KI': 1.1053419417270212, 'J_D_1KI': 0.016712913221449736} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json index 07c28cc..06bc5c6 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46682, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.460205316543579, "TIME_S_1KI": 0.22407363258951157, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1228.6304648017883, "W": 87.08, "J_1KI": 26.319147954281917, "W_1KI": 1.865387087099953, "W_D": 70.695, "J_D": 997.4509727740286, "W_D_1KI": 1.51439527012553, "J_D_1KI": 0.03244066814030097} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 45734, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.604183673858643, "TIME_S_1KI": 0.2318665254265676, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1283.9346827626227, "W": 90.49, "J_1KI": 28.073964288333027, "W_1KI": 1.9786154720776665, "W_D": 73.52924999999999, "J_D": 1043.283835479319, "W_D_1KI": 1.6077589976822493, "J_D_1KI": 0.03515456766699281} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output index 0398e28..630ea89 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.03882646560668945} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.03907012939453125} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 12, ..., 499993, 499995, +tensor(crow_indices=tensor([ 0, 1, 7, ..., 499989, 499996, 500000]), - col_indices=tensor([20130, 29829, 49027, ..., 32515, 51857, 99803]), - values=tensor([0.4194, 0.2208, 0.8236, ..., 0.3620, 0.7637, 0.5129]), + col_indices=tensor([19574, 2426, 28612, ..., 14575, 58009, 84398]), + values=tensor([0.7192, 0.7689, 0.7517, ..., 0.7221, 0.6786, 0.5171]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.7254, 0.4636, 0.7256, ..., 0.4819, 0.1264, 0.5273]) +tensor([0.6963, 0.4880, 0.9045, ..., 0.8071, 0.4138, 0.4778]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -16,19 +16,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 0.03882646560668945 seconds +Time: 0.03907012939453125 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27043', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.0826029777526855} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '26874', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.169933795928955} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 14, ..., 499987, 499992, +tensor(crow_indices=tensor([ 0, 6, 14, ..., 499992, 499996, 500000]), - col_indices=tensor([19007, 19428, 24486, ..., 87536, 92504, 96559]), - values=tensor([0.8398, 0.0370, 0.5128, ..., 0.3625, 0.4907, 0.6853]), + col_indices=tensor([27921, 32385, 39293, ..., 21232, 30711, 67562]), + values=tensor([0.2888, 0.7317, 0.9554, ..., 0.7337, 0.6318, 0.6969]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.6134, 0.6519, 0.5356, ..., 0.0589, 0.6530, 0.3358]) +tensor([0.2864, 0.2821, 0.7119, ..., 0.8913, 0.8685, 0.7106]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -36,19 +36,19 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 6.0826029777526855 seconds +Time: 6.169933795928955 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46682', '-ss', '100000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.460205316543579} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '45734', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.604183673858643} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 19, ..., 499992, 499995, +tensor(crow_indices=tensor([ 0, 8, 16, ..., 499993, 499996, 500000]), - col_indices=tensor([ 9218, 31473, 33160, ..., 57052, 72094, 94375]), - values=tensor([0.1819, 0.5310, 0.0116, ..., 0.3541, 0.3048, 0.3110]), + col_indices=tensor([ 3064, 12217, 39810, ..., 67896, 79198, 93701]), + values=tensor([0.1389, 0.9146, 0.3747, ..., 0.3963, 0.8562, 0.9389]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.6007, 0.7506, 0.6846, ..., 0.1657, 0.4869, 0.7821]) +tensor([0.9110, 0.2994, 0.1061, ..., 0.1379, 0.8132, 0.9589]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -56,16 +56,16 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.460205316543579 seconds +Time: 10.604183673858643 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 19, ..., 499992, 499995, +tensor(crow_indices=tensor([ 0, 8, 16, ..., 499993, 499996, 500000]), - col_indices=tensor([ 9218, 31473, 33160, ..., 57052, 72094, 94375]), - values=tensor([0.1819, 0.5310, 0.0116, ..., 0.3541, 0.3048, 0.3110]), + col_indices=tensor([ 3064, 12217, 39810, ..., 67896, 79198, 93701]), + values=tensor([0.1389, 0.9146, 0.3747, ..., 0.3963, 0.8562, 0.9389]), size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) -tensor([0.6007, 0.7506, 0.6846, ..., 0.1657, 0.4869, 0.7821]) +tensor([0.9110, 0.2994, 0.1061, ..., 0.1379, 0.8132, 0.9589]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([100000, 100000]) @@ -73,13 +73,13 @@ Rows: 100000 Size: 10000000000 NNZ: 500000 Density: 5e-05 -Time: 10.460205316543579 seconds +Time: 10.604183673858643 seconds -[18.32, 17.95, 18.46, 18.2, 18.15, 17.94, 18.28, 18.06, 18.31, 18.04] -[87.08] -14.109215259552002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.460205316543579, 'TIME_S_1KI': 0.22407363258951157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1228.6304648017883, 'W': 87.08} -[18.32, 17.95, 18.46, 18.2, 18.15, 17.94, 18.28, 18.06, 18.31, 18.04, 18.26, 18.28, 17.89, 18.48, 18.48, 18.03, 17.99, 18.35, 18.44, 18.2] -327.69999999999993 -16.384999999999998 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.460205316543579, 'TIME_S_1KI': 0.22407363258951157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1228.6304648017883, 'W': 87.08, 'J_1KI': 26.319147954281917, 'W_1KI': 1.865387087099953, 'W_D': 70.695, 'J_D': 997.4509727740286, 'W_D_1KI': 1.51439527012553, 'J_D_1KI': 0.03244066814030097} +[19.02, 18.68, 18.7, 18.5, 18.62, 18.64, 18.56, 18.68, 18.53, 18.56] +[90.49] +14.18869137763977 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45734, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.604183673858643, 'TIME_S_1KI': 0.2318665254265676, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.9346827626227, 'W': 90.49} +[19.02, 18.68, 18.7, 18.5, 18.62, 18.64, 18.56, 18.68, 18.53, 18.56, 20.14, 18.55, 18.7, 18.52, 18.59, 18.35, 19.17, 21.09, 18.78, 19.39] +339.21500000000003 +16.96075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 45734, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.604183673858643, 'TIME_S_1KI': 0.2318665254265676, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1283.9346827626227, 'W': 90.49, 'J_1KI': 28.073964288333027, 'W_1KI': 1.9786154720776665, 'W_D': 73.52924999999999, 'J_D': 1043.283835479319, 'W_D_1KI': 1.6077589976822493, 'J_D_1KI': 0.03515456766699281} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json index 1b64231..0b1246f 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 253108, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.677898168563843, "TIME_S_1KI": 0.0421871223689644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1070.6826658773423, "W": 74.61, "J_1KI": 4.230141543836395, "W_1KI": 0.2947753528138186, "W_D": 58.27775, "J_D": 836.3084939194918, "W_D_1KI": 0.23024855002607583, "J_D_1KI": 0.0009096849962311576} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 250235, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.277639150619507, "TIME_S_1KI": 0.04107194897044581, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1144.8245710134506, "W": 82.67, "J_1KI": 4.574997786134836, "W_1KI": 0.3303694527144484, "W_D": 65.82124999999999, "J_D": 911.5009591728448, "W_D_1KI": 0.26303774452015105, "J_D_1KI": 0.0010511628849687335} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output index 1ec4855..793dab9 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02065730094909668} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.019877910614013672} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 9998, 10000]), - col_indices=tensor([6728, 7614, 8179, ..., 1004, 2058, 8025]), - values=tensor([0.0279, 0.0803, 0.4096, ..., 0.0871, 0.5549, 0.2943]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9999, 9999, 10000]), + col_indices=tensor([ 874, 1837, 3843, ..., 101, 741, 1075]), + values=tensor([0.5637, 0.3942, 0.0462, ..., 0.4346, 0.2450, 0.2131]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.5482, 0.4991, 0.5547, ..., 0.6547, 0.2547, 0.5094]) +tensor([0.7228, 0.7678, 0.4139, ..., 0.0364, 0.2913, 0.8096]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 0.02065730094909668 seconds +Time: 0.019877910614013672 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50829', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.10860276222229} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52822', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.2164390087127686} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 10000, 10000, 10000]), - col_indices=tensor([5153, 2587, 4463, ..., 5061, 9520, 1424]), - values=tensor([0.2204, 0.6183, 0.5613, ..., 0.3086, 0.3306, 0.5938]), +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 10000, 10000]), + col_indices=tensor([1121, 8248, 9956, ..., 4131, 2037, 5129]), + values=tensor([0.0817, 0.3609, 0.8105, ..., 0.0978, 0.3572, 0.8156]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.9842, 0.7142, 0.2121, ..., 0.3434, 0.1561, 0.6145]) +tensor([0.2927, 0.3945, 0.6867, ..., 0.0610, 0.2872, 0.3991]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 2.10860276222229 seconds +Time: 2.2164390087127686 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '253108', '-ss', '10000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.677898168563843} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '250235', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.277639150619507} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 9999, 10000]), - col_indices=tensor([7291, 527, 4481, ..., 6785, 7922, 1484]), - values=tensor([0.3434, 0.3822, 0.2401, ..., 0.8298, 0.4309, 0.4668]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9997, 9998, 10000]), + col_indices=tensor([2242, 8860, 3854, ..., 6182, 4773, 9388]), + values=tensor([0.8365, 0.4219, 0.7780, ..., 0.3976, 0.7864, 0.8763]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2882, 0.0253, 0.9805, ..., 0.1323, 0.6315, 0.0794]) +tensor([0.3427, 0.7236, 0.7947, ..., 0.1998, 0.1922, 0.6155]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.677898168563843 seconds +Time: 10.277639150619507 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 9999, 10000]), - col_indices=tensor([7291, 527, 4481, ..., 6785, 7922, 1484]), - values=tensor([0.3434, 0.3822, 0.2401, ..., 0.8298, 0.4309, 0.4668]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 9997, 9998, 10000]), + col_indices=tensor([2242, 8860, 3854, ..., 6182, 4773, 9388]), + values=tensor([0.8365, 0.4219, 0.7780, ..., 0.3976, 0.7864, 0.8763]), size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) -tensor([0.2882, 0.0253, 0.9805, ..., 0.1323, 0.6315, 0.0794]) +tensor([0.3427, 0.7236, 0.7947, ..., 0.1998, 0.1922, 0.6155]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000 Density: 0.0001 -Time: 10.677898168563843 seconds +Time: 10.277639150619507 seconds -[18.38, 17.89, 18.34, 17.77, 18.06, 17.77, 18.13, 17.86, 17.98, 18.23] -[74.61] -14.350390911102295 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.677898168563843, 'TIME_S_1KI': 0.0421871223689644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.6826658773423, 'W': 74.61} -[18.38, 17.89, 18.34, 17.77, 18.06, 17.77, 18.13, 17.86, 17.98, 18.23, 18.53, 18.0, 18.04, 18.11, 18.16, 18.08, 18.2, 18.27, 19.29, 18.25] -326.645 -16.33225 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.677898168563843, 'TIME_S_1KI': 0.0421871223689644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.6826658773423, 'W': 74.61, 'J_1KI': 4.230141543836395, 'W_1KI': 0.2947753528138186, 'W_D': 58.27775, 'J_D': 836.3084939194918, 'W_D_1KI': 0.23024855002607583, 'J_D_1KI': 0.0009096849962311576} +[19.27, 19.16, 18.76, 18.51, 18.49, 18.73, 18.68, 19.0, 18.57, 18.98] +[82.67] +13.84812593460083 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 250235, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.277639150619507, 'TIME_S_1KI': 0.04107194897044581, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1144.8245710134506, 'W': 82.67} +[19.27, 19.16, 18.76, 18.51, 18.49, 18.73, 18.68, 19.0, 18.57, 18.98, 19.5, 18.48, 18.63, 18.9, 18.47, 18.55, 18.63, 18.86, 18.44, 18.48] +336.975 +16.848750000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 250235, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.277639150619507, 'TIME_S_1KI': 0.04107194897044581, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1144.8245710134506, 'W': 82.67, 'J_1KI': 4.574997786134836, 'W_1KI': 0.3303694527144484, 'W_D': 65.82124999999999, 'J_D': 911.5009591728448, 'W_D_1KI': 0.26303774452015105, 'J_D_1KI': 0.0010511628849687335} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json index 090450b..4039661 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 194593, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.473386764526367, "TIME_S_1KI": 0.053822011914747024, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1083.1964892935753, "W": 79.61, "J_1KI": 5.566472017459905, "W_1KI": 0.40911029687604383, "W_D": 62.8515, "J_D": 855.175532556653, "W_D_1KI": 0.3229895217196919, "J_D_1KI": 0.001659820865702733} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 195448, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.777343273162842, "TIME_S_1KI": 0.05514174242337012, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1223.9583963775635, "W": 87.73, "J_1KI": 6.2623224406367095, "W_1KI": 0.4488661945888421, "W_D": 70.56325000000001, "J_D": 984.4577945194246, "W_D_1KI": 0.3610333694895829, "J_D_1KI": 0.001847209331840607} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output index 8858416..b7388c3 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.020806312561035156} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.022109508514404297} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 7, 14, ..., 99985, 99993, +tensor(crow_indices=tensor([ 0, 12, 20, ..., 99984, 99991, 100000]), - col_indices=tensor([5438, 7119, 8479, ..., 6797, 6979, 8109]), - values=tensor([0.2056, 0.5255, 0.6332, ..., 0.1682, 0.9365, 0.4633]), + col_indices=tensor([ 774, 3258, 4252, ..., 8619, 8738, 9232]), + values=tensor([0.1489, 0.7206, 0.7178, ..., 0.3422, 0.0839, 0.6783]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.0316, 0.8438, 0.1562, ..., 0.6730, 0.8332, 0.9126]) +tensor([0.2668, 0.6762, 0.7372, ..., 0.4714, 0.8259, 0.5362]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 0.020806312561035156 seconds +Time: 0.022109508514404297 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50465', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.723015785217285} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '47490', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.551285982131958} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 18, 34, ..., 99978, 99989, +tensor(crow_indices=tensor([ 0, 10, 18, ..., 99986, 99993, 100000]), - col_indices=tensor([ 818, 1321, 1616, ..., 5603, 7366, 9704]), - values=tensor([0.8713, 0.7316, 0.2331, ..., 0.6687, 0.3725, 0.7818]), + col_indices=tensor([ 697, 699, 1538, ..., 3424, 5514, 9931]), + values=tensor([0.0664, 0.0593, 0.3496, ..., 0.3706, 0.2364, 0.3729]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.5742, 0.6233, 0.0987, ..., 0.1452, 0.2067, 0.1195]) +tensor([0.2684, 0.6711, 0.5550, ..., 0.5107, 0.2291, 0.1240]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 2.723015785217285 seconds +Time: 2.551285982131958 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '194593', '-ss', '10000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.473386764526367} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '195448', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.777343273162842} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 16, ..., 99982, 99989, +tensor(crow_indices=tensor([ 0, 9, 16, ..., 99969, 99987, 100000]), - col_indices=tensor([1034, 3380, 4243, ..., 7428, 9116, 9600]), - values=tensor([0.4227, 0.1092, 0.7794, ..., 0.2113, 0.3090, 0.9237]), + col_indices=tensor([ 874, 3785, 4640, ..., 8539, 9539, 9843]), + values=tensor([0.7163, 0.6418, 0.7384, ..., 0.3361, 0.8554, 0.3985]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7442, 0.3504, 0.6358, ..., 0.6138, 0.7536, 0.9226]) +tensor([0.3512, 0.5595, 0.5579, ..., 0.0047, 0.4046, 0.7037]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.473386764526367 seconds +Time: 10.777343273162842 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 11, 16, ..., 99982, 99989, +tensor(crow_indices=tensor([ 0, 9, 16, ..., 99969, 99987, 100000]), - col_indices=tensor([1034, 3380, 4243, ..., 7428, 9116, 9600]), - values=tensor([0.4227, 0.1092, 0.7794, ..., 0.2113, 0.3090, 0.9237]), + col_indices=tensor([ 874, 3785, 4640, ..., 8539, 9539, 9843]), + values=tensor([0.7163, 0.6418, 0.7384, ..., 0.3361, 0.8554, 0.3985]), size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) -tensor([0.7442, 0.3504, 0.6358, ..., 0.6138, 0.7536, 0.9226]) +tensor([0.3512, 0.5595, 0.5579, ..., 0.0047, 0.4046, 0.7037]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 100000 Density: 0.001 -Time: 10.473386764526367 seconds +Time: 10.777343273162842 seconds -[19.08, 18.28, 17.95, 17.9, 19.17, 18.22, 19.03, 18.14, 18.71, 20.45] -[79.61] -13.606286764144897 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 194593, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.473386764526367, 'TIME_S_1KI': 0.053822011914747024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1964892935753, 'W': 79.61} -[19.08, 18.28, 17.95, 17.9, 19.17, 18.22, 19.03, 18.14, 18.71, 20.45, 18.56, 18.36, 18.2, 17.96, 18.09, 18.17, 22.32, 18.37, 18.26, 17.99] -335.17 -16.7585 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 194593, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.473386764526367, 'TIME_S_1KI': 0.053822011914747024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1964892935753, 'W': 79.61, 'J_1KI': 5.566472017459905, 'W_1KI': 0.40911029687604383, 'W_D': 62.8515, 'J_D': 855.175532556653, 'W_D_1KI': 0.3229895217196919, 'J_D_1KI': 0.001659820865702733} +[19.04, 19.62, 18.71, 18.67, 18.54, 18.38, 18.47, 18.71, 20.97, 19.17] +[87.73] +13.951423645019531 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195448, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.777343273162842, 'TIME_S_1KI': 0.05514174242337012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.9583963775635, 'W': 87.73} +[19.04, 19.62, 18.71, 18.67, 18.54, 18.38, 18.47, 18.71, 20.97, 19.17, 18.99, 18.59, 18.55, 18.4, 22.57, 19.05, 18.53, 19.0, 18.59, 18.77] +343.33500000000004 +17.16675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 195448, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.777343273162842, 'TIME_S_1KI': 0.05514174242337012, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1223.9583963775635, 'W': 87.73, 'J_1KI': 6.2623224406367095, 'W_1KI': 0.4488661945888421, 'W_D': 70.56325000000001, 'J_D': 984.4577945194246, 'W_D_1KI': 0.3610333694895829, 'J_D_1KI': 0.001847209331840607} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json index 8c6811d..033e5d4 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57740, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.343472957611084, "TIME_S_1KI": 0.17913877654331634, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1219.6624715328217, "W": 87.4, "J_1KI": 21.123354200429883, "W_1KI": 1.5136820228611017, "W_D": 71.01700000000001, "J_D": 991.0385553872587, "W_D_1KI": 1.2299445791479044, "J_D_1KI": 0.021301430189606934} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 55803, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.610237836837769, "TIME_S_1KI": 0.19013740904320142, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1270.6491134166718, "W": 89.9, "J_1KI": 22.770265279943224, "W_1KI": 1.6110244968908483, "W_D": 73.03275000000001, "J_D": 1032.246930343509, "W_D_1KI": 1.3087602817052848, "J_D_1KI": 0.023453224409176653} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output index ff97c3a..7cde805 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03377032279968262} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03371858596801758} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 110, 219, ..., 999820, - 999911, 1000000]), - col_indices=tensor([ 0, 38, 265, ..., 9703, 9904, 9960]), - values=tensor([0.3683, 0.9828, 0.4174, ..., 0.4331, 0.2376, 0.7467]), +tensor(crow_indices=tensor([ 0, 99, 214, ..., 999812, + 999912, 1000000]), + col_indices=tensor([ 44, 58, 94, ..., 9363, 9662, 9795]), + values=tensor([0.8470, 0.3153, 0.3716, ..., 0.2634, 0.7038, 0.5929]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8519, 0.1445, 0.2230, ..., 0.9278, 0.7204, 0.3614]) +tensor([0.6867, 0.9635, 0.5151, ..., 0.5948, 0.1867, 0.0815]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 0.03377032279968262 seconds +Time: 0.03371858596801758 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31092', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.654046535491943} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31140', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.859341859817505} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 102, 199, ..., 999795, - 999900, 1000000]), - col_indices=tensor([ 113, 165, 189, ..., 9912, 9940, 9996]), - values=tensor([0.2048, 0.9236, 0.8269, ..., 0.2195, 0.4387, 0.6731]), +tensor(crow_indices=tensor([ 0, 96, 195, ..., 999807, + 999901, 1000000]), + col_indices=tensor([ 7, 260, 452, ..., 9620, 9734, 9843]), + values=tensor([0.9634, 0.1244, 0.5347, ..., 0.1267, 0.3210, 0.5779]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.8364, 0.2950, 0.2365, ..., 0.2102, 0.7661, 0.3156]) +tensor([0.0916, 0.0848, 0.6723, ..., 0.9740, 0.3856, 0.4112]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,19 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 5.654046535491943 seconds +Time: 5.859341859817505 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '57740', '-ss', '10000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.343472957611084} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '55803', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.610237836837769} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 106, 216, ..., 999804, - 999899, 1000000]), - col_indices=tensor([ 61, 88, 117, ..., 9666, 9676, 9799]), - values=tensor([0.7050, 0.8533, 0.9508, ..., 0.3667, 0.6991, 0.8071]), +tensor(crow_indices=tensor([ 0, 90, 184, ..., 999818, + 999908, 1000000]), + col_indices=tensor([ 1, 41, 423, ..., 9632, 9973, 9980]), + values=tensor([0.5680, 0.3426, 0.4538, ..., 0.2093, 0.7467, 0.4938]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9939, 0.1893, 0.9694, ..., 0.0779, 0.3428, 0.9229]) +tensor([0.0892, 0.5048, 0.6849, ..., 0.8658, 0.9878, 0.2650]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,16 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.343472957611084 seconds +Time: 10.610237836837769 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 106, 216, ..., 999804, - 999899, 1000000]), - col_indices=tensor([ 61, 88, 117, ..., 9666, 9676, 9799]), - values=tensor([0.7050, 0.8533, 0.9508, ..., 0.3667, 0.6991, 0.8071]), +tensor(crow_indices=tensor([ 0, 90, 184, ..., 999818, + 999908, 1000000]), + col_indices=tensor([ 1, 41, 423, ..., 9632, 9973, 9980]), + values=tensor([0.5680, 0.3426, 0.4538, ..., 0.2093, 0.7467, 0.4938]), size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) -tensor([0.9939, 0.1893, 0.9694, ..., 0.0779, 0.3428, 0.9229]) +tensor([0.0892, 0.5048, 0.6849, ..., 0.8658, 0.9878, 0.2650]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -73,13 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000000 Density: 0.01 -Time: 10.343472957611084 seconds +Time: 10.610237836837769 seconds -[18.73, 18.07, 18.16, 18.31, 18.98, 17.81, 18.18, 18.17, 18.06, 18.83] -[87.4] -13.95494818687439 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.343472957611084, 'TIME_S_1KI': 0.17913877654331634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1219.6624715328217, 'W': 87.4} -[18.73, 18.07, 18.16, 18.31, 18.98, 17.81, 18.18, 18.17, 18.06, 18.83, 18.31, 18.07, 18.19, 17.94, 18.17, 18.13, 18.04, 18.09, 18.22, 18.27] -327.65999999999997 -16.383 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.343472957611084, 'TIME_S_1KI': 0.17913877654331634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1219.6624715328217, 'W': 87.4, 'J_1KI': 21.123354200429883, 'W_1KI': 1.5136820228611017, 'W_D': 71.01700000000001, 'J_D': 991.0385553872587, 'W_D_1KI': 1.2299445791479044, 'J_D_1KI': 0.021301430189606934} +[18.89, 18.41, 18.79, 18.72, 18.62, 18.97, 18.94, 18.42, 18.71, 18.3] +[89.9] +14.13402795791626 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 55803, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.610237836837769, 'TIME_S_1KI': 0.19013740904320142, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1270.6491134166718, 'W': 89.9} +[18.89, 18.41, 18.79, 18.72, 18.62, 18.97, 18.94, 18.42, 18.71, 18.3, 19.28, 18.73, 18.54, 18.62, 18.82, 18.47, 19.62, 18.44, 19.04, 18.5] +337.345 +16.867250000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 55803, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.610237836837769, 'TIME_S_1KI': 0.19013740904320142, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1270.6491134166718, 'W': 89.9, 'J_1KI': 22.770265279943224, 'W_1KI': 1.6110244968908483, 'W_D': 73.03275000000001, 'J_D': 1032.246930343509, 'W_D_1KI': 1.3087602817052848, 'J_D_1KI': 0.023453224409176653} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json index 9882866..350b8f3 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8902, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.860300064086914, "TIME_S_1KI": 1.2199842803961933, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1340.0520877552033, "W": 84.52, "J_1KI": 150.53382248429602, "W_1KI": 9.49449561896203, "W_D": 68.1745, "J_D": 1080.896605024457, "W_D_1KI": 7.658335205571781, "J_D_1KI": 0.8602937773053} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8698, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.610017538070679, "TIME_S_1KI": 1.2198226647586432, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1341.4551094627382, "W": 87.67, "J_1KI": 154.22569665011937, "W_1KI": 10.079328581283054, "W_D": 70.861, "J_D": 1084.257448518753, "W_D_1KI": 8.146815359852841, "J_D_1KI": 0.9366308760465442} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output index 266a604..a8e5825 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.13720321655273438} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.13405466079711914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 497, 987, ..., 4998984, - 4999490, 5000000]), - col_indices=tensor([ 31, 32, 48, ..., 9945, 9978, 9990]), - values=tensor([0.2379, 0.1839, 0.8156, ..., 0.3545, 0.1897, 0.0490]), +tensor(crow_indices=tensor([ 0, 480, 945, ..., 4998944, + 4999481, 5000000]), + col_indices=tensor([ 1, 12, 19, ..., 9947, 9963, 9973]), + values=tensor([0.5890, 0.9205, 0.0614, ..., 0.7220, 0.9709, 0.4172]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7954, 0.2457, 0.2337, ..., 0.1008, 0.2602, 0.7172]) +tensor([0.5818, 0.9691, 0.7591, ..., 0.9152, 0.5043, 0.7665]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,39 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 0.13720321655273438 seconds +Time: 0.13405466079711914 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7652', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.025035858154297} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7832', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.454247951507568} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 488, 990, ..., 4999027, +tensor(crow_indices=tensor([ 0, 485, 1011, ..., 4998959, + 4999487, 5000000]), + col_indices=tensor([ 3, 54, 65, ..., 9920, 9953, 9995]), + values=tensor([0.3879, 0.5951, 0.7663, ..., 0.8899, 0.2864, 0.6094]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7416, 0.0102, 0.0449, ..., 0.4285, 0.7394, 0.6178]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 9.454247951507568 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8698', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.610017538070679} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 506, 993, ..., 4999012, 4999510, 5000000]), - col_indices=tensor([ 13, 24, 30, ..., 9939, 9983, 9997]), - values=tensor([0.8521, 0.2131, 0.0790, ..., 0.0763, 0.7991, 0.5452]), + col_indices=tensor([ 26, 45, 57, ..., 9916, 9958, 9969]), + values=tensor([0.1934, 0.7889, 0.4552, ..., 0.2644, 0.1399, 0.0800]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7324, 0.5581, 0.6877, ..., 0.3893, 0.7172, 0.4223]) +tensor([0.4812, 0.2509, 0.7085, ..., 0.2318, 0.2285, 0.0711]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +56,16 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 9.025035858154297 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8902', '-ss', '10000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.860300064086914} +Time: 10.610017538070679 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 512, 999, ..., 4999039, - 4999553, 5000000]), - col_indices=tensor([ 40, 52, 78, ..., 9943, 9982, 9986]), - values=tensor([0.6271, 0.3251, 0.6536, ..., 0.8006, 0.2414, 0.7322]), +tensor(crow_indices=tensor([ 0, 506, 993, ..., 4999012, + 4999510, 5000000]), + col_indices=tensor([ 26, 45, 57, ..., 9916, 9958, 9969]), + values=tensor([0.1934, 0.7889, 0.4552, ..., 0.2644, 0.1399, 0.0800]), size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7511, 0.5744, 0.2092, ..., 0.0810, 0.1870, 0.5605]) +tensor([0.4812, 0.2509, 0.7085, ..., 0.2318, 0.2285, 0.0711]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +73,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000000 Density: 0.05 -Time: 10.860300064086914 seconds +Time: 10.610017538070679 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 512, 999, ..., 4999039, - 4999553, 5000000]), - col_indices=tensor([ 40, 52, 78, ..., 9943, 9982, 9986]), - values=tensor([0.6271, 0.3251, 0.6536, ..., 0.8006, 0.2414, 0.7322]), - size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7511, 0.5744, 0.2092, ..., 0.0810, 0.1870, 0.5605]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 5000000 -Density: 0.05 -Time: 10.860300064086914 seconds - -[18.66, 18.42, 18.15, 17.87, 18.05, 18.01, 18.31, 17.87, 17.97, 18.0] -[84.52] -15.854851961135864 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8902, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.860300064086914, 'TIME_S_1KI': 1.2199842803961933, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.0520877552033, 'W': 84.52} -[18.66, 18.42, 18.15, 17.87, 18.05, 18.01, 18.31, 17.87, 17.97, 18.0, 18.28, 17.95, 18.41, 17.97, 19.13, 18.12, 18.3, 17.86, 18.09, 17.92] -326.91 -16.3455 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8902, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.860300064086914, 'TIME_S_1KI': 1.2199842803961933, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.0520877552033, 'W': 84.52, 'J_1KI': 150.53382248429602, 'W_1KI': 9.49449561896203, 'W_D': 68.1745, 'J_D': 1080.896605024457, 'W_D_1KI': 7.658335205571781, 'J_D_1KI': 0.8602937773053} +[19.53, 18.93, 18.62, 18.4, 18.88, 18.89, 18.55, 18.63, 18.57, 18.76] +[87.67] +15.301187515258789 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8698, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.610017538070679, 'TIME_S_1KI': 1.2198226647586432, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1341.4551094627382, 'W': 87.67} +[19.53, 18.93, 18.62, 18.4, 18.88, 18.89, 18.55, 18.63, 18.57, 18.76, 18.75, 18.47, 18.63, 18.5, 18.62, 18.29, 18.85, 18.58, 19.0, 18.5] +336.17999999999995 +16.808999999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8698, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.610017538070679, 'TIME_S_1KI': 1.2198226647586432, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1341.4551094627382, 'W': 87.67, 'J_1KI': 154.22569665011937, 'W_1KI': 10.079328581283054, 'W_D': 70.861, 'J_D': 1084.257448518753, 'W_D_1KI': 8.146815359852841, 'J_D_1KI': 0.9366308760465442} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json index 4142b7d..c89ffcb 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2952, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.514222383499146, "TIME_S_1KI": 3.5617284496948325, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1409.0968302583694, "W": 80.82, "J_1KI": 477.3363246132688, "W_1KI": 27.378048780487802, "W_D": 64.16999999999999, "J_D": 1118.8040534234044, "W_D_1KI": 21.737804878048774, "J_D_1KI": 7.363755039989422} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2720, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.711690187454224, "TIME_S_1KI": 3.9381213924464054, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1312.412057888508, "W": 83.95, "J_1KI": 482.5044330472455, "W_1KI": 30.863970588235293, "W_D": 66.99475000000001, "J_D": 1047.3462503302694, "W_D_1KI": 24.630422794117653, "J_D_1KI": 9.055302497837372} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output index f33aa65..0a03f8e 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.3909003734588623} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.3859541416168213} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 946, 1929, ..., 9998013, - 9999030, 10000000]), - col_indices=tensor([ 1, 29, 66, ..., 9951, 9961, 9963]), - values=tensor([0.1920, 0.8019, 0.0618, ..., 0.8349, 0.9652, 0.3956]), +tensor(crow_indices=tensor([ 0, 1029, 2000, ..., 9998003, + 9998985, 10000000]), + col_indices=tensor([ 0, 28, 29, ..., 9954, 9987, 9996]), + values=tensor([0.6829, 0.1416, 0.5262, ..., 0.8908, 0.2734, 0.8338]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.5191, 0.4835, 0.4753, ..., 0.1633, 0.4541, 0.9422]) +tensor([0.3530, 0.8966, 0.4993, ..., 0.7172, 0.3219, 0.6793]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 0.3909003734588623 seconds +Time: 0.3859541416168213 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2686', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.552263259887695} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2720', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.711690187454224} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1009, 2066, ..., 9998034, - 9999022, 10000000]), - col_indices=tensor([ 1, 6, 11, ..., 9982, 9996, 9999]), - values=tensor([0.5825, 0.5025, 0.2695, ..., 0.8369, 0.3596, 0.5616]), +tensor(crow_indices=tensor([ 0, 981, 1980, ..., 9997989, + 9998997, 10000000]), + col_indices=tensor([ 12, 27, 39, ..., 9985, 9991, 9993]), + values=tensor([0.3850, 0.3757, 0.0662, ..., 0.5591, 0.0330, 0.0089]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.8229, 0.3714, 0.9138, ..., 0.5827, 0.9903, 0.1706]) +tensor([0.5245, 0.1235, 0.9662, ..., 0.6225, 0.7512, 0.9707]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 9.552263259887695 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2952', '-ss', '10000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.514222383499146} +Time: 10.711690187454224 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1012, 2027, ..., 9998023, - 9999029, 10000000]), - col_indices=tensor([ 20, 21, 65, ..., 9939, 9966, 9982]), - values=tensor([0.6149, 0.2165, 0.5741, ..., 0.9222, 0.5603, 0.9724]), +tensor(crow_indices=tensor([ 0, 981, 1980, ..., 9997989, + 9998997, 10000000]), + col_indices=tensor([ 12, 27, 39, ..., 9985, 9991, 9993]), + values=tensor([0.3850, 0.3757, 0.0662, ..., 0.5591, 0.0330, 0.0089]), size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1720, 0.3458, 0.5186, ..., 0.7493, 0.6588, 0.1643]) +tensor([0.5245, 0.1235, 0.9662, ..., 0.6225, 0.7512, 0.9707]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,30 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 10000000 Density: 0.1 -Time: 10.514222383499146 seconds +Time: 10.711690187454224 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1012, 2027, ..., 9998023, - 9999029, 10000000]), - col_indices=tensor([ 20, 21, 65, ..., 9939, 9966, 9982]), - values=tensor([0.6149, 0.2165, 0.5741, ..., 0.9222, 0.5603, 0.9724]), - size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.1720, 0.3458, 0.5186, ..., 0.7493, 0.6588, 0.1643]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 10000000 -Density: 0.1 -Time: 10.514222383499146 seconds - -[18.22, 18.02, 18.22, 18.83, 18.28, 18.32, 18.36, 18.07, 18.03, 17.97] -[80.82] -17.435001611709595 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2952, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.514222383499146, 'TIME_S_1KI': 3.5617284496948325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.0968302583694, 'W': 80.82} -[18.22, 18.02, 18.22, 18.83, 18.28, 18.32, 18.36, 18.07, 18.03, 17.97, 19.01, 19.01, 17.78, 18.5, 20.32, 20.23, 18.29, 18.21, 17.96, 17.94] -333.0 -16.65 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2952, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.514222383499146, 'TIME_S_1KI': 3.5617284496948325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.0968302583694, 'W': 80.82, 'J_1KI': 477.3363246132688, 'W_1KI': 27.378048780487802, 'W_D': 64.16999999999999, 'J_D': 1118.8040534234044, 'W_D_1KI': 21.737804878048774, 'J_D_1KI': 7.363755039989422} +[22.49, 19.33, 18.98, 18.95, 18.77, 18.44, 19.01, 18.66, 18.62, 18.34] +[83.95] +15.633258581161499 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2720, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.711690187454224, 'TIME_S_1KI': 3.9381213924464054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1312.412057888508, 'W': 83.95} +[22.49, 19.33, 18.98, 18.95, 18.77, 18.44, 19.01, 18.66, 18.62, 18.34, 18.86, 18.43, 18.64, 18.64, 18.49, 19.07, 18.7, 18.78, 18.49, 18.52] +339.10499999999996 +16.95525 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2720, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.711690187454224, 'TIME_S_1KI': 3.9381213924464054, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1312.412057888508, 'W': 83.95, 'J_1KI': 482.5044330472455, 'W_1KI': 30.863970588235293, 'W_D': 66.99475000000001, 'J_D': 1047.3462503302694, 'W_D_1KI': 24.630422794117653, 'J_D_1KI': 9.055302497837372} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json index 047807c..9c915c5 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1497, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375812530517578, "TIME_S_1KI": 6.931070494667721, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2016.6524360942842, "W": 63.56, "J_1KI": 1347.1292158278452, "W_1KI": 42.458249832999336, "W_D": 46.965500000000006, "J_D": 1490.1367210098506, "W_D_1KI": 31.373079492317977, "J_D_1KI": 20.95730093007213} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1480, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.938852071762085, "TIME_S_1KI": 7.391116264704111, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1528.7213900470733, "W": 80.94, "J_1KI": 1032.9198581399144, "W_1KI": 54.689189189189186, "W_D": 63.9845, "J_D": 1208.4812673766612, "W_D_1KI": 43.232770270270265, "J_D_1KI": 29.211331263696124} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output index e1c6993..4b03a7c 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.7011280059814453} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.7089850902557373} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1997, 4011, ..., 19995978, - 19998029, 20000000]), - col_indices=tensor([ 0, 6, 10, ..., 9977, 9981, 9997]), - values=tensor([0.1409, 0.7742, 0.3684, ..., 0.6455, 0.2528, 0.1779]), +tensor(crow_indices=tensor([ 0, 1931, 3950, ..., 19996023, + 19998022, 20000000]), + col_indices=tensor([ 10, 17, 25, ..., 9994, 9995, 9996]), + values=tensor([0.0457, 0.3054, 0.6832, ..., 0.8985, 0.1646, 0.8277]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.4126, 0.0545, 0.9867, ..., 0.1672, 0.9147, 0.2153]) +tensor([0.2768, 0.9830, 0.6147, ..., 0.3597, 0.6699, 0.4242]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 0.7011280059814453 seconds +Time: 0.7089850902557373 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1497', '-ss', '10000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375812530517578} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1480', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.938852071762085} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2004, 3927, ..., 19995948, - 19997998, 20000000]), - col_indices=tensor([ 0, 7, 8, ..., 9981, 9983, 9993]), - values=tensor([0.6769, 0.3987, 0.0257, ..., 0.1977, 0.2040, 0.8027]), +tensor(crow_indices=tensor([ 0, 2034, 4022, ..., 19995925, + 19997982, 20000000]), + col_indices=tensor([ 2, 15, 18, ..., 9987, 9998, 9999]), + values=tensor([0.7872, 0.2298, 0.4521, ..., 0.3351, 0.9033, 0.4825]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.3084, 0.6863, 0.1716, ..., 0.2861, 0.5214, 0.4353]) +tensor([0.4230, 0.2852, 0.9332, ..., 0.3899, 0.8304, 0.1579]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,16 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.375812530517578 seconds +Time: 10.938852071762085 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2004, 3927, ..., 19995948, - 19997998, 20000000]), - col_indices=tensor([ 0, 7, 8, ..., 9981, 9983, 9993]), - values=tensor([0.6769, 0.3987, 0.0257, ..., 0.1977, 0.2040, 0.8027]), +tensor(crow_indices=tensor([ 0, 2034, 4022, ..., 19995925, + 19997982, 20000000]), + col_indices=tensor([ 2, 15, 18, ..., 9987, 9998, 9999]), + values=tensor([0.7872, 0.2298, 0.4521, ..., 0.3351, 0.9033, 0.4825]), size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) -tensor([0.3084, 0.6863, 0.1716, ..., 0.2861, 0.5214, 0.4353]) +tensor([0.4230, 0.2852, 0.9332, ..., 0.3899, 0.8304, 0.1579]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,13 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 20000000 Density: 0.2 -Time: 10.375812530517578 seconds +Time: 10.938852071762085 seconds -[18.42, 17.88, 18.24, 17.8, 17.91, 17.84, 18.25, 18.2, 17.93, 17.84] -[63.56] -31.728326559066772 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375812530517578, 'TIME_S_1KI': 6.931070494667721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2016.6524360942842, 'W': 63.56} -[18.42, 17.88, 18.24, 17.8, 17.91, 17.84, 18.25, 18.2, 17.93, 17.84, 18.76, 17.93, 18.32, 21.74, 19.38, 18.29, 18.56, 18.06, 18.67, 18.76] -331.89 -16.5945 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375812530517578, 'TIME_S_1KI': 6.931070494667721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2016.6524360942842, 'W': 63.56, 'J_1KI': 1347.1292158278452, 'W_1KI': 42.458249832999336, 'W_D': 46.965500000000006, 'J_D': 1490.1367210098506, 'W_D_1KI': 31.373079492317977, 'J_D_1KI': 20.95730093007213} +[18.93, 18.5, 18.56, 18.73, 18.62, 21.16, 20.02, 18.59, 19.0, 18.39] +[80.94] +18.887094020843506 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1480, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.938852071762085, 'TIME_S_1KI': 7.391116264704111, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1528.7213900470733, 'W': 80.94} +[18.93, 18.5, 18.56, 18.73, 18.62, 21.16, 20.02, 18.59, 19.0, 18.39, 19.32, 18.67, 18.63, 18.35, 18.48, 18.43, 18.55, 18.62, 18.66, 18.44] +339.11 +16.9555 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1480, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.938852071762085, 'TIME_S_1KI': 7.391116264704111, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1528.7213900470733, 'W': 80.94, 'J_1KI': 1032.9198581399144, 'W_1KI': 54.689189189189186, 'W_D': 63.9845, 'J_D': 1208.4812673766612, 'W_D_1KI': 43.232770270270265, 'J_D_1KI': 29.211331263696124} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json index b205393..3429945 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 949, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.941918134689331, "TIME_S_1KI": 11.529945347407093, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3564.162018098831, "W": 52.14, "J_1KI": 3755.7028641715815, "W_1KI": 54.94204425711275, "W_D": 35.83725, "J_D": 2449.7461696032283, "W_D_1KI": 37.763171759747095, "J_D_1KI": 39.79259405663551} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1005, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.332000494003296, "TIME_S_1KI": 10.28059750647094, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1650.0738431143761, "W": 78.11, "J_1KI": 1641.8645205118169, "W_1KI": 77.72139303482587, "W_D": 39.656000000000006, "J_D": 837.733047273636, "W_D_1KI": 39.4587064676617, "J_D_1KI": 39.262394495185774} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output index d986f3d..8a579df 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.9010562896728516} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.0439045429229736} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2954, 5989, ..., 29993992, - 29996937, 30000000]), - col_indices=tensor([ 2, 3, 4, ..., 9991, 9993, 9999]), - values=tensor([0.8641, 0.5764, 0.3491, ..., 0.5822, 0.6256, 0.4859]), +tensor(crow_indices=tensor([ 0, 3022, 5961, ..., 29994092, + 29997073, 30000000]), + col_indices=tensor([ 11, 13, 15, ..., 9992, 9993, 9996]), + values=tensor([0.4005, 0.5190, 0.3009, ..., 0.2678, 0.6328, 0.6513]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.5324, 0.9227, 0.4901, ..., 0.4747, 0.1770, 0.2536]) +tensor([0.1783, 0.2981, 0.9794, ..., 0.1699, 0.0540, 0.0711]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -16,19 +16,19 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 1.9010562896728516 seconds +Time: 1.0439045429229736 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '552', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.5163209438323975} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1005', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.332000494003296} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3039, 6048, ..., 29994088, - 29997031, 30000000]), - col_indices=tensor([ 5, 17, 19, ..., 9983, 9988, 9994]), - values=tensor([0.5897, 0.1355, 0.4586, ..., 0.2138, 0.9666, 0.8141]), +tensor(crow_indices=tensor([ 0, 2967, 5964, ..., 29993953, + 29997003, 30000000]), + col_indices=tensor([ 0, 6, 12, ..., 9987, 9996, 9999]), + values=tensor([0.1521, 0.9589, 0.9546, ..., 0.2072, 0.8415, 0.4716]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.3973, 0.5491, 0.6398, ..., 0.0595, 0.1069, 0.4910]) +tensor([0.1527, 0.9738, 0.6748, ..., 0.1588, 0.8399, 0.1982]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -36,19 +36,16 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 6.5163209438323975 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '889', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.831693172454834} +Time: 10.332000494003296 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3022, 6006, ..., 29994154, - 29996992, 30000000]), - col_indices=tensor([ 2, 5, 11, ..., 9993, 9994, 9997]), - values=tensor([0.4549, 0.7646, 0.3501, ..., 0.7301, 0.5346, 0.2783]), +tensor(crow_indices=tensor([ 0, 2967, 5964, ..., 29993953, + 29997003, 30000000]), + col_indices=tensor([ 0, 6, 12, ..., 9987, 9996, 9999]), + values=tensor([0.1521, 0.9589, 0.9546, ..., 0.2072, 0.8415, 0.4716]), size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.0238, 0.3406, 0.5500, ..., 0.0227, 0.0108, 0.7785]) +tensor([0.1527, 0.9738, 0.6748, ..., 0.1588, 0.8399, 0.1982]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -56,50 +53,13 @@ Rows: 10000 Size: 100000000 NNZ: 30000000 Density: 0.3 -Time: 9.831693172454834 seconds +Time: 10.332000494003296 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '949', '-ss', '10000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.941918134689331} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3007, 5955, ..., 29993893, - 29996918, 30000000]), - col_indices=tensor([ 0, 1, 2, ..., 9996, 9997, 9998]), - values=tensor([0.2348, 0.5341, 0.6896, ..., 0.7208, 0.8300, 0.7790]), - size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7258, 0.9160, 0.6029, ..., 0.0530, 0.7513, 0.2296]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 30000000 -Density: 0.3 -Time: 10.941918134689331 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 3007, 5955, ..., 29993893, - 29996918, 30000000]), - col_indices=tensor([ 0, 1, 2, ..., 9996, 9997, 9998]), - values=tensor([0.2348, 0.5341, 0.6896, ..., 0.7208, 0.8300, 0.7790]), - size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) -tensor([0.7258, 0.9160, 0.6029, ..., 0.0530, 0.7513, 0.2296]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([10000, 10000]) -Rows: 10000 -Size: 100000000 -NNZ: 30000000 -Density: 0.3 -Time: 10.941918134689331 seconds - -[18.62, 18.03, 18.0, 17.99, 18.26, 17.85, 18.27, 17.94, 18.3, 18.09] -[52.14] -68.35753774642944 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.941918134689331, 'TIME_S_1KI': 11.529945347407093, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3564.162018098831, 'W': 52.14} -[18.62, 18.03, 18.0, 17.99, 18.26, 17.85, 18.27, 17.94, 18.3, 18.09, 18.58, 17.83, 17.98, 17.87, 18.18, 18.09, 18.02, 17.83, 18.92, 18.1] -326.055 -16.30275 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.941918134689331, 'TIME_S_1KI': 11.529945347407093, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3564.162018098831, 'W': 52.14, 'J_1KI': 3755.7028641715815, 'W_1KI': 54.94204425711275, 'W_D': 35.83725, 'J_D': 2449.7461696032283, 'W_D_1KI': 37.763171759747095, 'J_D_1KI': 39.79259405663551} +[18.66, 20.38, 43.8, 42.6, 42.06, 45.08, 46.1, 44.23, 43.43, 43.61] +[78.11] +21.125001192092896 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1005, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.332000494003296, 'TIME_S_1KI': 10.28059750647094, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1650.0738431143761, 'W': 78.11} +[18.66, 20.38, 43.8, 42.6, 42.06, 45.08, 46.1, 44.23, 43.43, 43.61, 47.07, 44.12, 45.97, 47.08, 46.91, 45.01, 44.24, 46.19, 44.65, 45.12] +769.0799999999999 +38.45399999999999 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1005, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.332000494003296, 'TIME_S_1KI': 10.28059750647094, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1650.0738431143761, 'W': 78.11, 'J_1KI': 1641.8645205118169, 'W_1KI': 77.72139303482587, 'W_D': 39.656000000000006, 'J_D': 837.733047273636, 'W_D_1KI': 39.4587064676617, 'J_D_1KI': 39.262394495185774} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.json new file mode 100644 index 0000000..19adb0f --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 780, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.529170036315918, "TIME_S_1KI": 13.498935943994766, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1779.8577748131752, "W": 75.39, "J_1KI": 2281.8689420681735, "W_1KI": 96.65384615384616, "W_D": 58.513, "J_D": 1381.4142190959453, "W_D_1KI": 75.01666666666667, "J_D_1KI": 96.17521367521367} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.output new file mode 100644 index 0000000..bf4ac07 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 1.3460514545440674} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3928, 7878, ..., 39991924, + 39996011, 40000000]), + col_indices=tensor([ 3, 4, 5, ..., 9989, 9994, 9999]), + values=tensor([0.6477, 0.4724, 0.8443, ..., 0.0902, 0.8701, 0.4257]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.5531, 0.3437, 0.9958, ..., 0.3243, 0.6186, 0.7377]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 1.3460514545440674 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '780', '-ss', '10000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 40000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.529170036315918} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3921, 7937, ..., 39992035, + 39996055, 40000000]), + col_indices=tensor([ 1, 2, 3, ..., 9989, 9990, 9998]), + values=tensor([0.7086, 0.2169, 0.2366, ..., 0.5166, 0.6786, 0.0291]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.2287, 0.7303, 0.8328, ..., 0.0492, 0.5795, 0.5066]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.529170036315918 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3921, 7937, ..., 39992035, + 39996055, 40000000]), + col_indices=tensor([ 1, 2, 3, ..., 9989, 9990, 9998]), + values=tensor([0.7086, 0.2169, 0.2366, ..., 0.5166, 0.6786, 0.0291]), + size=(10000, 10000), nnz=40000000, layout=torch.sparse_csr) +tensor([0.2287, 0.7303, 0.8328, ..., 0.0492, 0.5795, 0.5066]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 40000000 +Density: 0.4 +Time: 10.529170036315918 seconds + +[19.09, 18.66, 18.59, 18.91, 18.66, 18.68, 19.36, 18.92, 18.61, 18.43] +[75.39] +23.60867190361023 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.529170036315918, 'TIME_S_1KI': 13.498935943994766, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1779.8577748131752, 'W': 75.39} +[19.09, 18.66, 18.59, 18.91, 18.66, 18.68, 19.36, 18.92, 18.61, 18.43, 19.13, 18.43, 18.7, 18.63, 18.66, 18.43, 18.54, 18.93, 19.04, 18.93] +337.5400000000001 +16.877000000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 780, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 40000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.529170036315918, 'TIME_S_1KI': 13.498935943994766, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1779.8577748131752, 'W': 75.39, 'J_1KI': 2281.8689420681735, 'W_1KI': 96.65384615384616, 'W_D': 58.513, 'J_D': 1381.4142190959453, 'W_D_1KI': 75.01666666666667, 'J_D_1KI': 96.17521367521367} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.json new file mode 100644 index 0000000..6163120 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 637, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.681277513504028, "TIME_S_1KI": 16.76809656751025, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1913.3989510321617, "W": 73.81, "J_1KI": 3003.7660141792176, "W_1KI": 115.87127158555731, "W_D": 56.800250000000005, "J_D": 1472.4500578290822, "W_D_1KI": 89.16836734693878, "J_D_1KI": 139.98173837823984} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.output new file mode 100644 index 0000000..dce3a0d --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_0.5.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 1.6458442211151123} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4989, 10016, ..., 49989995, + 49995029, 50000000]), + col_indices=tensor([ 2, 3, 4, ..., 9996, 9998, 9999]), + values=tensor([0.7496, 0.9774, 0.8983, ..., 0.1532, 0.4365, 0.5482]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.5310, 0.4242, 0.2635, ..., 0.7577, 0.7181, 0.5961]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 1.6458442211151123 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '637', '-ss', '10000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 50000000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.681277513504028} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5020, 9944, ..., 49990121, + 49995010, 50000000]), + col_indices=tensor([ 0, 3, 4, ..., 9992, 9994, 9999]), + values=tensor([0.6012, 0.4574, 0.7579, ..., 0.9185, 0.5177, 0.4374]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.0792, 0.5429, 0.9377, ..., 0.0686, 0.6722, 0.7827]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.681277513504028 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5020, 9944, ..., 49990121, + 49995010, 50000000]), + col_indices=tensor([ 0, 3, 4, ..., 9992, 9994, 9999]), + values=tensor([0.6012, 0.4574, 0.7579, ..., 0.9185, 0.5177, 0.4374]), + size=(10000, 10000), nnz=50000000, layout=torch.sparse_csr) +tensor([0.0792, 0.5429, 0.9377, ..., 0.0686, 0.6722, 0.7827]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 50000000 +Density: 0.5 +Time: 10.681277513504028 seconds + +[19.19, 18.64, 18.62, 18.5, 18.66, 18.4, 18.46, 22.93, 19.17, 18.75] +[73.81] +25.92330241203308 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 637, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.681277513504028, 'TIME_S_1KI': 16.76809656751025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1913.3989510321617, 'W': 73.81} +[19.19, 18.64, 18.62, 18.5, 18.66, 18.4, 18.46, 22.93, 19.17, 18.75, 19.6, 18.26, 18.95, 18.49, 18.71, 18.4, 19.05, 18.45, 18.45, 18.57] +340.195 +17.00975 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 637, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 50000000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.681277513504028, 'TIME_S_1KI': 16.76809656751025, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1913.3989510321617, 'W': 73.81, 'J_1KI': 3003.7660141792176, 'W_1KI': 115.87127158555731, 'W_D': 56.800250000000005, 'J_D': 1472.4500578290822, 'W_D_1KI': 89.16836734693878, 'J_D_1KI': 139.98173837823984} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json index 00c8b98..c9e0d89 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 284305, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.350545883178711, "TIME_S_1KI": 0.03640648558125503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1004.1447986841201, "W": 73.05, "J_1KI": 3.5319280304043903, "W_1KI": 0.2569423682313009, "W_D": 56.574, "J_D": 777.6658157529831, "W_D_1KI": 0.19899052074356766, "J_D_1KI": 0.0006999191739278861} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 283407, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.425815343856812, "TIME_S_1KI": 0.03678743059930351, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1115.7269095683098, "W": 81.61, "J_1KI": 3.9368361034424333, "W_1KI": 0.2879604244073012, "W_D": 64.691, "J_D": 884.4196729185581, "W_D_1KI": 0.22826182839520548, "J_D_1KI": 0.0008054205732222757} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output index cbcf9b7..a197da3 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -1,266 +1,373 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019997835159301758} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019604206085205078} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), - col_indices=tensor([7109, 2385, 2417, 5435, 3828, 4759, 3881, 3125, 6359, - 8100, 8295, 5616, 9929, 4079, 7787, 359, 8328, 6707, - 7136, 309, 5171, 1453, 2735, 9306, 3785, 4541, 8081, - 2235, 61, 4688, 6276, 2644, 5471, 837, 3908, 9598, - 9530, 9506, 8707, 10, 4532, 534, 4561, 3707, 1054, - 2351, 8898, 694, 8085, 1481, 2176, 6847, 2046, 5123, - 1350, 2400, 2135, 4037, 9549, 4749, 612, 1214, 9582, - 9826, 1033, 8057, 3787, 6733, 5484, 5013, 513, 3997, - 9224, 768, 6259, 8198, 4081, 8587, 7654, 6997, 3629, - 5266, 8440, 8098, 4353, 7259, 6119, 7369, 9095, 5623, - 7383, 8433, 7910, 8608, 4139, 6690, 2987, 5688, 7896, - 7911, 6242, 6429, 722, 2362, 2947, 7072, 2748, 1401, - 7063, 9804, 2718, 4440, 4754, 9463, 9666, 7272, 8681, - 5892, 7815, 4523, 800, 9689, 6398, 686, 9595, 7223, - 252, 5697, 861, 6369, 1019, 2052, 8774, 3827, 3582, - 5597, 1734, 6986, 7447, 6272, 3623, 3565, 4417, 277, - 3843, 3333, 5620, 6442, 4003, 7391, 9554, 8526, 2101, - 9854, 8955, 4012, 5691, 3570, 9292, 3509, 3148, 5405, - 2599, 5760, 6655, 1397, 5002, 378, 9560, 8896, 7138, - 8297, 8567, 8541, 8058, 9553, 2217, 475, 973, 6966, - 8382, 9130, 3653, 6730, 5404, 1171, 6230, 4421, 8787, - 1013, 7148, 6496, 6029, 92, 1710, 3227, 4417, 1415, - 6028, 7960, 1123, 8741, 7693, 8010, 7518, 4472, 6141, - 7642, 3828, 9726, 5089, 1835, 1514, 2431, 8797, 5042, - 676, 3698, 5937, 3628, 6068, 1887, 5909, 6973, 5973, - 477, 5895, 1885, 7449, 3691, 1472, 679, 7389, 1532, - 192, 1918, 9427, 2430, 3048, 5494, 2607, 9231, 1796, - 5499, 9054, 3543, 8995, 5396, 1535, 682, 2719, 1631, - 5250, 5969, 556, 4778, 6509, 839, 3254, 5655, 3541, - 9791, 8791, 8465, 9659, 643, 5662, 6522, 1226, 6110, - 8738, 2729, 9937, 1594, 5216, 1576, 322, 138, 1174, - 8026, 182, 2386, 8814, 3055, 5706, 1632, 3053, 1250, - 5293, 4479, 2534, 9457, 8636, 4963, 8810, 6513, 3303, - 2068, 7365, 4824, 6966, 3195, 1321, 1454, 3116, 1891, - 4602, 103, 1064, 1824, 4653, 5493, 2856, 7214, 4796, - 8079, 259, 3544, 8673, 5026, 1149, 8318, 1462, 4443, - 1549, 5100, 9230, 1260, 6291, 510, 2869, 7765, 852, - 2488, 6706, 4128, 5493, 7619, 4287, 6605, 7113, 7503, - 9635, 3819, 3422, 9417, 6701, 4678, 7292, 2009, 2745, - 5180, 902, 7517, 8196, 9257, 3699, 5950, 411, 2498, - 9110, 1182, 7464, 8023, 3683, 9203, 4592, 2000, 9730, - 3403, 953, 7193, 6806, 597, 9712, 7122, 7344, 9620, - 5762, 2940, 6890, 5706, 4675, 1376, 4625, 5803, 9790, - 8162, 563, 3992, 4806, 1869, 419, 4808, 6093, 8645, - 7788, 7684, 43, 5821, 4794, 4820, 4004, 9919, 4616, - 3804, 9946, 5014, 1305, 7851, 5946, 4091, 2485, 728, - 6971, 3549, 5585, 2412, 5888, 4457, 2799, 9818, 9618, - 2750, 2922, 131, 8551, 3306, 311, 2869, 5197, 1613, - 4841, 1155, 9841, 5868, 9266, 5281, 1337, 8753, 4768, - 9229, 9711, 9344, 3792, 7689, 9243, 3075, 8565, 7580, - 5999, 5247, 6273, 1505, 5520, 7307, 2076, 4532, 2693, - 6116, 7270, 3433, 8912, 5182, 9345, 8104, 6702, 450, - 9103, 8031, 6959, 6210, 5641, 9318, 4784, 626, 6063, - 6803, 5977, 7007, 8105, 2597, 6168, 9640, 5148, 1688, - 1007, 3856, 2440, 1610, 46, 3388, 8832, 9487, 77, - 9414, 713, 2259, 3015, 2404, 4677, 2375, 9203, 6270, - 6686, 5362, 8403, 149, 4599, 8799, 3044, 9352, 6320, - 9182, 3494, 5862, 697, 9208, 5897, 6456, 742, 9438, - 8241, 7310, 2664, 1140, 2638, 6331, 2836, 2339, 5416, - 6386, 5007, 3424, 4140, 8418, 2768, 7025, 8519, 4071, - 2786, 7259, 6812, 4479, 9408, 3790, 6230, 7656, 9695, - 7286, 975, 8931, 3543, 222, 7839, 9094, 3012, 9084, - 5657, 4120, 1509, 8871, 3510, 6988, 7621, 3914, 9254, - 7142, 2284, 5420, 893, 5854, 8200, 2689, 257, 9233, - 367, 2764, 2893, 1881, 3564, 7955, 9774, 2290, 4849, - 2776, 7436, 5689, 7041, 6283, 9905, 9032, 4403, 634, - 1762, 6101, 1433, 5426, 5594, 1114, 903, 7462, 6667, - 5106, 7360, 5373, 7882, 4664, 8370, 3731, 173, 2530, - 2913, 8577, 9326, 1993, 372, 3877, 3134, 5685, 5591, - 3164, 8523, 4532, 1524, 5876, 2337, 1910, 6825, 1701, - 3345, 9908, 1129, 356, 162, 6909, 8511, 9623, 5538, - 2227, 7108, 5801, 5376, 9922, 4394, 3505, 955, 7447, - 1821, 2264, 2665, 4362, 6017, 10, 1864, 589, 9835, - 2738, 3409, 7929, 445, 7809, 8506, 4478, 5175, 1642, - 6654, 4156, 4637, 493, 8641, 4849, 9459, 1260, 1722, - 3709, 7871, 294, 9014, 3592, 2234, 1761, 3148, 2059, - 5761, 4292, 2390, 8145, 3925, 4523, 4323, 5192, 2668, - 5700, 4434, 4088, 1368, 2304, 5701, 1648, 4535, 7667, - 1044, 9357, 9493, 3733, 2278, 133, 5541, 1244, 1991, - 6410, 8224, 9359, 3118, 3172, 8411, 5956, 6213, 2834, - 5982, 6131, 5315, 9976, 9217, 3251, 7863, 191, 6442, - 949, 3581, 6996, 1148, 1590, 3581, 8669, 6526, 7295, - 8556, 631, 6698, 4352, 244, 9874, 5590, 8576, 2401, - 1573, 3011, 2458, 9030, 4245, 4065, 2172, 6903, 7079, - 180, 6734, 6853, 2898, 2302, 7261, 5268, 6537, 9604, - 2335, 8773, 923, 6612, 3305, 4601, 6384, 7525, 2043, - 5654, 6990, 6475, 3112, 7425, 5617, 5040, 5416, 1325, - 6773, 9569, 5311, 6897, 8039, 8302, 1711, 1443, 196, - 4009, 1601, 330, 2436, 6513, 5857, 7676, 6398, 2101, - 1528, 4145, 5694, 8965, 6555, 727, 1560, 7665, 4841, - 9592, 6158, 320, 8749, 6157, 8725, 3024, 8319, 4003, - 8281, 4317, 3464, 6228, 1404, 1578, 4680, 708, 1351, - 5822, 8145, 2056, 581, 3036, 7910, 1186, 9116, 6024, - 1434, 1121, 2457, 6394, 749, 7915, 803, 8518, 7707, - 4500, 2148, 2294, 5223, 1435, 4429, 6437, 2976, 3334, - 8071, 6080, 8489, 4989, 9346, 5095, 7683, 5920, 8664, - 8772, 9150, 8104, 7035, 3166, 4753, 5176, 7494, 3694, - 9055, 8323, 5783, 9581, 6133, 7181, 4449, 117, 1431, - 1310, 266, 1137, 641, 4783, 4011, 9704, 8990, 9913, - 2411, 6941, 1035, 6630, 99, 4087, 4073, 98, 8673, - 9180, 1336, 2464, 8321, 3258, 7885, 1201, 4488, 1290, - 3441, 3470, 3653, 1653, 1475, 9906, 7698, 6670, 6752, - 2289, 4337, 3018, 852, 1912, 1673, 4537, 1138, 4596, - 7841, 7168, 8678, 7894, 9826, 4471, 2161, 7831, 2817, - 1699, 5434, 614, 6337, 9902, 5326, 7325, 5049, 225, - 8836, 5786, 4286, 4756, 4125, 2729, 2426, 1236, 6028, - 1462, 9172, 3237, 1796, 628, 5307, 403, 6318, 2926, - 4691, 4352, 6358, 7028, 7697, 1313, 5381, 6924, 8207, - 3263, 6004, 9719, 5541, 3521, 162, 72, 8395, 7836, - 2764, 8356, 2955, 3340, 3636, 5608, 97, 858, 6715, - 7963, 8977, 8904, 1286, 2929, 52, 9172, 966, 8016, - 7012]), - values=tensor([0.0046, 0.9711, 0.5783, 0.3964, 0.2053, 0.1726, 0.9437, - 0.5542, 0.1093, 0.4547, 0.1845, 0.3445, 0.8403, 0.4506, - 0.0902, 0.5378, 0.5966, 0.3221, 0.2087, 0.6069, 0.9250, - 0.3018, 0.8874, 0.5907, 0.8418, 0.6404, 0.9959, 0.3059, - 0.6226, 0.0368, 0.8190, 0.1303, 0.5256, 0.5555, 0.1522, - 0.6237, 0.0781, 0.5043, 0.0689, 0.0302, 0.2280, 0.2510, - 0.9391, 0.7562, 0.6143, 0.6495, 0.3458, 0.6716, 0.1218, - 0.4951, 0.5813, 0.4377, 0.3803, 0.5588, 0.7278, 0.7107, - 0.7077, 0.3647, 0.7869, 0.9654, 0.7183, 0.8252, 0.4085, - 0.6874, 0.5162, 0.6114, 0.2250, 0.1783, 0.2852, 0.3965, - 0.8459, 0.5182, 0.7181, 0.4470, 0.6154, 0.2685, 0.7435, - 0.8775, 0.0173, 0.8189, 0.0651, 0.4964, 0.7706, 0.1775, - 0.6594, 0.4169, 0.0757, 0.1719, 0.6911, 0.1189, 0.3832, - 0.8399, 0.3009, 0.9319, 0.3858, 0.4031, 0.3018, 0.8705, - 0.5963, 0.7087, 0.4444, 0.2108, 0.7106, 0.9488, 0.1619, - 0.3416, 0.0156, 0.2551, 0.5023, 0.9762, 0.6355, 0.9340, - 0.8159, 0.3185, 0.7462, 0.9256, 0.7851, 0.0782, 0.8295, - 0.8717, 0.6926, 0.4574, 0.6799, 0.4534, 0.6691, 0.9648, - 0.8736, 0.5727, 0.3189, 0.4649, 0.9263, 0.7281, 0.0818, - 0.5704, 0.3615, 0.0346, 0.8665, 0.5480, 0.2733, 0.6432, - 0.8981, 0.9119, 0.2568, 0.5919, 0.8320, 0.2875, 0.1674, - 0.5919, 0.1054, 0.6833, 0.1399, 0.6362, 0.3561, 0.7467, - 0.9977, 0.1907, 0.4833, 0.7635, 0.1246, 0.8287, 0.5890, - 0.3699, 0.5934, 0.4119, 0.6383, 0.0205, 0.6354, 0.0025, - 0.3055, 0.5465, 0.8335, 0.9974, 0.4798, 0.9157, 0.2277, - 0.9812, 0.4215, 0.6840, 0.5241, 0.0527, 0.5703, 0.9916, - 0.4607, 0.4300, 0.6671, 0.7892, 0.1737, 0.1225, 0.0598, - 0.8796, 0.4030, 0.2456, 0.1589, 0.8107, 0.8549, 0.7206, - 0.2242, 0.4514, 0.2530, 0.9423, 0.8787, 0.4399, 0.3810, - 0.5901, 0.7345, 0.4668, 0.3478, 0.9963, 0.2961, 0.9270, - 0.0301, 0.4210, 0.5019, 0.9625, 0.5041, 0.7790, 0.5782, - 0.6252, 0.3890, 0.3274, 0.9770, 0.8231, 0.4966, 0.5155, - 0.5583, 0.1196, 0.3282, 0.5482, 0.7136, 0.0064, 0.9655, - 0.6750, 0.3247, 0.2048, 0.1243, 0.1485, 0.7152, 0.4737, - 0.3270, 0.2946, 0.1922, 0.2685, 0.5926, 0.8430, 0.0407, - 0.4774, 0.0717, 0.6093, 0.7594, 0.9792, 0.5158, 0.2854, - 0.0632, 0.5014, 0.9048, 0.8406, 0.1082, 0.0133, 0.0800, - 0.2800, 0.2617, 0.2001, 0.9723, 0.1130, 0.9439, 0.4990, - 0.9070, 0.6093, 0.2296, 0.6950, 0.7193, 0.1734, 0.8450, - 0.0447, 0.7501, 0.4548, 0.6695, 0.9652, 0.8606, 0.7535, - 0.0773, 0.7629, 0.0103, 0.0948, 0.7605, 0.9538, 0.4662, - 0.4806, 0.8728, 0.0392, 0.7303, 0.8236, 0.3805, 0.4985, - 0.1028, 0.6346, 0.5210, 0.1024, 0.8731, 0.5873, 0.7105, - 0.2108, 0.4969, 0.9031, 0.1387, 0.8343, 0.4993, 0.5455, - 0.3538, 0.1324, 0.9783, 0.5165, 0.4133, 0.9455, 0.6371, - 0.7301, 0.2826, 0.6823, 0.7616, 0.3904, 0.8366, 0.6816, - 0.9409, 0.7877, 0.5158, 0.1541, 0.1117, 0.4563, 0.1217, - 0.9525, 0.4021, 0.7397, 0.2559, 0.7187, 0.5250, 0.4043, - 0.2738, 0.9184, 0.0736, 0.1076, 0.2850, 0.5552, 0.1645, - 0.2585, 0.5505, 0.6136, 0.2864, 0.1428, 0.3411, 0.7240, - 0.2285, 0.7929, 0.6109, 0.9222, 0.1483, 0.5863, 0.2524, - 0.4750, 0.1637, 0.5790, 0.5447, 0.3454, 0.7649, 0.1908, - 0.3288, 0.5701, 0.5499, 0.0369, 0.9922, 0.3383, 0.4741, - 0.1304, 0.9901, 0.7400, 0.9626, 0.4575, 0.2628, 0.5791, - 0.5795, 0.8287, 0.9675, 0.5851, 0.5863, 0.4631, 0.4520, - 0.6908, 0.8302, 0.4709, 0.1620, 0.2509, 0.4523, 0.9606, - 0.8422, 0.9930, 0.3544, 0.9618, 0.4633, 0.8424, 0.3012, - 0.5074, 0.7673, 0.9314, 0.0486, 0.6586, 0.7796, 0.8682, - 0.1246, 0.1399, 0.3038, 0.0213, 0.2975, 0.1825, 0.5585, - 0.5935, 0.0441, 0.6723, 0.6869, 0.1223, 0.3975, 0.4510, - 0.4005, 0.3733, 0.5220, 0.4286, 0.5743, 0.5693, 0.9693, - 0.4086, 0.3833, 0.3952, 0.3540, 0.0647, 0.9855, 0.3131, - 0.1430, 0.2211, 0.2883, 0.2379, 0.8994, 0.5349, 0.6071, - 0.0012, 0.7795, 0.9085, 0.8059, 0.8457, 0.2047, 0.3553, - 0.4264, 0.4073, 0.7708, 0.1265, 0.5393, 0.9354, 0.1862, - 0.8447, 0.1265, 0.0465, 0.9024, 0.1589, 0.4910, 0.2636, - 0.3445, 0.1545, 0.4366, 0.0582, 0.3173, 0.5045, 0.7525, - 0.2332, 0.9149, 0.7252, 0.7029, 0.1697, 0.1214, 0.8720, - 0.7803, 0.4878, 0.7906, 0.7439, 0.1899, 0.9880, 0.0016, - 0.8939, 0.2914, 0.7113, 0.9239, 0.8221, 0.8266, 0.3684, - 0.5005, 0.8793, 0.5734, 0.2862, 0.4043, 0.7599, 0.4516, - 0.4459, 0.5388, 0.8147, 0.3911, 0.4150, 0.0336, 0.3314, - 0.5237, 0.5194, 0.3540, 0.6803, 0.0259, 0.3157, 0.1146, - 0.7491, 0.8857, 0.2876, 0.6475, 0.0696, 0.1554, 0.9070, - 0.5700, 0.0259, 0.9973, 0.2760, 0.1917, 0.3491, 0.2604, - 0.2647, 0.7207, 0.8895, 0.6272, 0.5734, 0.1372, 0.5916, - 0.1230, 0.6040, 0.2042, 0.4307, 0.6738, 0.3775, 0.2795, - 0.7792, 0.4760, 0.5589, 0.1711, 0.0766, 0.4005, 0.1397, - 0.4940, 0.6769, 0.4257, 0.6752, 0.0242, 0.5821, 0.7391, - 0.1474, 0.0300, 0.9215, 0.5654, 0.7654, 0.6347, 0.1786, - 0.5402, 0.8466, 0.6369, 0.9853, 0.3023, 0.7607, 0.1105, - 0.3121, 0.2599, 0.7204, 0.9168, 0.3226, 0.5478, 0.0268, - 0.5417, 0.0114, 0.3890, 0.7421, 0.8261, 0.6148, 0.3356, - 0.9823, 0.4833, 0.8964, 0.0377, 0.3211, 0.7509, 0.8021, - 0.7647, 0.4392, 0.6333, 0.7826, 0.0068, 0.0507, 0.8981, - 0.1710, 0.5027, 0.0944, 0.1727, 0.3980, 0.1663, 0.4815, - 0.5099, 0.0482, 0.4204, 0.5662, 0.4082, 0.0087, 0.3299, - 0.7731, 0.2476, 0.4990, 0.2030, 0.3567, 0.4913, 0.1398, - 0.7595, 0.5107, 0.5222, 0.1537, 0.7700, 0.5986, 0.2341, - 0.1186, 0.9382, 0.4366, 0.4841, 0.6114, 0.1605, 0.4472, - 0.8581, 0.4342, 0.2576, 0.0087, 0.5861, 0.5826, 0.6503, - 0.7493, 0.7005, 0.0734, 0.4947, 0.2408, 0.7256, 0.1668, - 0.0344, 0.0192, 0.7524, 0.4331, 0.7951, 0.0257, 0.8771, - 0.8072, 0.0260, 0.4420, 0.0808, 0.6151, 0.0167, 0.8132, - 0.7864, 0.9344, 0.3589, 0.8588, 0.5190, 0.0941, 0.3937, - 0.8457, 0.0223, 0.2472, 0.0820, 0.6375, 0.0538, 0.4322, - 0.7601, 0.6627, 0.0062, 0.0971, 0.7569, 0.9276, 0.3232, - 0.8488, 0.9506, 0.0535, 0.2724, 0.3163, 0.3730, 0.1016, - 0.7062, 0.4016, 0.3210, 0.3024, 0.4988, 0.2671, 0.4939, - 0.4041, 0.2020, 0.5827, 0.9863, 0.2068, 0.6956, 0.4398, - 0.8649, 0.3693, 0.0382, 0.5895, 0.1246, 0.3779, 0.4021, - 0.5374, 0.0896, 0.5865, 0.0911, 0.1552, 0.7309, 0.7310, - 0.2105, 0.1533, 0.2641, 0.5318, 0.8937, 0.7014, 0.2487, - 0.1566, 0.4421, 0.8097, 0.3049, 0.0983, 0.7323, 0.3418, - 0.2636, 0.9905, 0.7297, 0.1602, 0.5620, 0.7455, 0.1122, - 0.2778, 0.8976, 0.4302, 0.8190, 0.8163, 0.5642, 0.1086, - 0.1870, 0.4408, 0.7743, 0.8301, 0.3106, 0.4607, 0.1685, - 0.0543, 0.2486, 0.7269, 0.7754, 0.8640, 0.8996, 0.1225, - 0.2326, 0.0306, 0.0742, 0.7702, 0.5675, 0.7799, 0.9378, - 0.5182, 0.8150, 0.0442, 0.1759, 0.3578, 0.2557, 0.3991, - 0.6127, 0.7081, 0.3617, 0.4428, 0.3630, 0.7568, 0.7306, - 0.3583, 0.4751, 0.1731, 0.4017, 0.8439, 0.5007, 0.7755, - 0.1364, 0.1487, 0.0013, 0.4861, 0.3305, 0.4277, 0.8493, - 0.7152, 0.9337, 0.8933, 0.3178, 0.0571, 0.4652, 0.1484, - 0.1146, 0.5186, 0.0231, 0.7337, 0.2879, 0.2599, 0.0899, - 0.0770, 0.6050, 0.7205, 0.0087, 0.4341, 0.8062, 0.5612, - 0.1963, 0.5542, 0.4445, 0.5707, 0.2487, 0.0387, 0.5727, - 0.6643, 0.9010, 0.1369, 0.6983, 0.6861, 0.3137, 0.4893, - 0.4623, 0.0331, 0.5923, 0.7494, 0.8065, 0.6913, 0.0840, - 0.0472, 0.5097, 0.9016, 0.0515, 0.3195, 0.9293, 0.7461, - 0.1002, 0.7266, 0.1789, 0.8944, 0.7784, 0.2704, 0.3854, - 0.4940, 0.9700, 0.7281, 0.8176, 0.6416, 0.3232, 0.8090, - 0.8313, 0.2872, 0.7163, 0.7261, 0.2980, 0.0117, 0.1334, - 0.9487, 0.8411, 0.1255, 0.1021, 0.4857, 0.9363, 0.6866, - 0.0451, 0.9201, 0.6535, 0.4626, 0.0913, 0.1620, 0.4656, - 0.4797, 0.5295, 0.5314, 0.4883, 0.4602, 0.8743, 0.5607, - 0.2065, 0.7862, 0.3051, 0.8028, 0.0092, 0.2581, 0.3033, - 0.5664, 0.6606, 0.8288, 0.0928, 0.1772, 0.1866, 0.3586, - 0.1155, 0.4118, 0.0623, 0.7029, 0.5408, 0.9844, 0.7491, - 0.3391, 0.6861, 0.5463, 0.9448, 0.4787, 0.4255, 0.1814, - 0.5560, 0.4709, 0.7970, 0.6626, 0.4346, 0.7159, 0.4083, - 0.2911, 0.8691, 0.8557, 0.2129, 0.4209, 0.7430, 0.6979, - 0.9694, 0.7088, 0.4947, 0.8038, 0.6058, 0.9515, 0.7892, - 0.7707, 0.4757, 0.4024, 0.6333, 0.3033, 0.1606, 0.0297, - 0.7386, 0.5267, 0.5079, 0.4638, 0.7216, 0.8308, 0.8888, - 0.3272, 0.2089, 0.9250, 0.8630, 0.4374, 0.2899, 0.1274, - 0.6033, 0.2213, 0.0608, 0.3072, 0.5266, 0.1377, 0.2166, - 0.9680, 0.0325, 0.7625, 0.3333, 0.5851, 0.2286, 0.8361, - 0.7176, 0.8044, 0.5438, 0.9445, 0.8563, 0.5310, 0.1555, - 0.9713, 0.4299, 0.7669, 0.0890, 0.3225, 0.6580, 0.0724, - 0.4834, 0.7834, 0.9243, 0.1867, 0.6811, 0.0298, 0.4897, - 0.4802, 0.4453, 0.3359, 0.1067, 0.8065, 0.3394, 0.3666, - 0.4986, 0.5179, 0.7221, 0.2831, 0.8118, 0.0090]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 999, 1000, 1000]), + col_indices=tensor([2455, 7819, 4102, 11, 8695, 9312, 8790, 5610, 1397, + 3661, 5143, 504, 9108, 6989, 6186, 8887, 7677, 3136, + 1718, 8126, 3770, 2053, 2494, 5633, 9529, 2671, 6606, + 9739, 4848, 5639, 6230, 6801, 4053, 6834, 7564, 6887, + 5195, 3595, 1264, 8027, 6208, 7336, 8866, 7238, 1261, + 8527, 9475, 4536, 4655, 3128, 2268, 894, 1342, 4798, + 8965, 2042, 7649, 1839, 5223, 8003, 9643, 7290, 6812, + 4403, 6800, 4483, 7789, 9456, 7829, 9125, 1965, 2676, + 7730, 5794, 1079, 7453, 5378, 9278, 2002, 5970, 9614, + 417, 1021, 6332, 4813, 514, 7115, 6483, 5078, 4615, + 7384, 9346, 5797, 4828, 4357, 9452, 9713, 3, 4849, + 1350, 6093, 3304, 3630, 1476, 2267, 6549, 1083, 2896, + 4440, 5080, 3364, 9638, 798, 6600, 1879, 6698, 8523, + 5090, 5978, 2900, 4916, 6017, 6800, 3359, 4820, 5421, + 6403, 6339, 8079, 6359, 3707, 5801, 875, 8882, 9799, + 6856, 5952, 9451, 208, 1648, 5332, 4732, 1003, 9081, + 3941, 4986, 12, 9697, 3168, 4727, 1925, 306, 2290, + 6620, 4258, 6589, 4386, 2769, 3019, 7243, 6130, 5388, + 1315, 8149, 3251, 8347, 9057, 2158, 1520, 2026, 1970, + 2293, 4446, 5302, 7513, 9963, 8572, 4292, 8884, 3802, + 1261, 1430, 9667, 2597, 9477, 2748, 8281, 279, 4088, + 5748, 9457, 5786, 2696, 2277, 2068, 8111, 153, 7994, + 6343, 4082, 9711, 8663, 7242, 1034, 1480, 241, 1093, + 4416, 7571, 9589, 987, 9131, 4024, 1333, 2607, 695, + 6535, 9649, 578, 2764, 8305, 1492, 507, 4723, 2104, + 2918, 7803, 8543, 8044, 1129, 8735, 2908, 102, 8629, + 1656, 8136, 9691, 7413, 4910, 657, 3514, 5043, 1961, + 7225, 4955, 870, 2127, 300, 6733, 6261, 6340, 8459, + 1491, 4245, 3003, 3399, 1656, 3099, 7728, 3044, 7800, + 3060, 8357, 612, 3241, 9567, 1021, 7085, 7980, 6018, + 8586, 5041, 3973, 6134, 1429, 7113, 2692, 990, 7655, + 8999, 3894, 6560, 2116, 2885, 8594, 9729, 301, 8248, + 9562, 1500, 5030, 1433, 1957, 5509, 5515, 8768, 8231, + 5671, 2694, 6186, 7401, 9259, 8839, 2523, 1616, 1946, + 3129, 8769, 9377, 2342, 5488, 9438, 462, 1368, 7523, + 790, 7136, 6985, 3322, 1165, 8513, 2670, 1861, 3083, + 9144, 8103, 811, 7319, 4158, 3025, 1397, 4532, 7221, + 2170, 8508, 3177, 1956, 9513, 2196, 4614, 7910, 6685, + 7584, 5183, 3530, 8474, 9186, 8346, 699, 335, 8599, + 35, 9962, 1729, 9624, 5037, 4497, 8897, 5552, 8644, + 9978, 7581, 6600, 9982, 6602, 8609, 5626, 7506, 2964, + 619, 5948, 7867, 9187, 9145, 2005, 2353, 7029, 2259, + 5814, 1773, 3178, 677, 5538, 8251, 4350, 9090, 2921, + 7955, 4236, 4183, 7686, 2312, 5667, 7286, 4685, 3518, + 9031, 1873, 176, 4758, 5896, 9327, 9140, 6265, 6523, + 8937, 9156, 2990, 572, 4122, 2010, 6914, 6845, 7650, + 3548, 5734, 9359, 9121, 6095, 2381, 6302, 4812, 7383, + 1743, 774, 359, 3818, 4897, 335, 2429, 7823, 4763, + 6890, 8610, 4275, 2580, 4103, 3469, 3125, 8654, 9392, + 577, 263, 4650, 7392, 7870, 2222, 8330, 4272, 8885, + 7879, 7519, 1135, 1692, 5213, 9701, 1226, 1278, 7365, + 6561, 7087, 5302, 8495, 684, 4815, 459, 5028, 6220, + 4930, 1207, 2150, 3603, 2051, 520, 3267, 8478, 8363, + 6328, 3553, 496, 5120, 6910, 6403, 5896, 6698, 776, + 1760, 7857, 1293, 1289, 971, 7115, 3955, 2155, 1652, + 6608, 6805, 6934, 5082, 5404, 7894, 390, 1593, 4892, + 8792, 4242, 9610, 8448, 6219, 4081, 22, 1962, 5888, + 5692, 7516, 3483, 1696, 3523, 7288, 3365, 7442, 9506, + 4513, 1122, 7768, 4954, 6274, 2042, 7455, 2205, 9189, + 4732, 1152, 9756, 5281, 2650, 6518, 4755, 2023, 700, + 7982, 2457, 6476, 5781, 8630, 1793, 5174, 2704, 4178, + 4460, 8369, 861, 9722, 1352, 2540, 4388, 6372, 436, + 1601, 6673, 3036, 4053, 6067, 8003, 2348, 9564, 6557, + 2855, 1604, 3438, 8033, 8642, 2301, 9299, 1181, 8369, + 8599, 7944, 6690, 4073, 6510, 6963, 8202, 7527, 3281, + 735, 4230, 4843, 9977, 4269, 5248, 2561, 7155, 3347, + 5630, 3587, 2208, 3775, 4255, 3661, 5177, 2793, 5336, + 9415, 818, 8936, 8908, 6372, 3383, 448, 9406, 8291, + 4826, 653, 8758, 6371, 6737, 8060, 8833, 6141, 1684, + 3083, 5642, 7466, 8178, 1781, 3582, 2465, 3351, 9559, + 4129, 2865, 714, 8819, 7926, 7861, 5400, 9629, 5472, + 3706, 8972, 3748, 4867, 5996, 7983, 7395, 1224, 5989, + 7910, 3787, 9604, 5074, 7584, 9305, 574, 6681, 945, + 1391, 7417, 1470, 5952, 8047, 9128, 5848, 4777, 9727, + 2386, 8276, 5524, 1601, 7283, 5477, 855, 5714, 4006, + 1616, 7412, 7487, 1097, 4921, 7440, 1923, 4905, 4051, + 8158, 1907, 1418, 6744, 8280, 8071, 8017, 3323, 496, + 9752, 7760, 3257, 4866, 4596, 6892, 1721, 4253, 7304, + 3838, 2922, 8639, 3321, 6640, 6875, 3732, 2118, 8854, + 4514, 7588, 4642, 9657, 364, 3618, 8319, 7601, 8144, + 2053, 1073, 6739, 1923, 9750, 1176, 2103, 2491, 4943, + 1773, 9042, 5056, 9622, 414, 7734, 1814, 547, 2741, + 6680, 2442, 2796, 7468, 9734, 7866, 345, 3580, 8342, + 6945, 2346, 9605, 4653, 3614, 1176, 6012, 5216, 9812, + 8475, 602, 5549, 9984, 3360, 2762, 5135, 2078, 6148, + 6514, 7773, 7579, 9355, 4131, 377, 2584, 8697, 8233, + 183, 7189, 2450, 329, 9442, 8773, 2134, 62, 2122, + 8510, 9129, 5598, 5188, 7944, 5762, 5590, 6403, 1480, + 9272, 7402, 747, 2238, 8495, 6364, 1215, 9591, 1309, + 1442, 9180, 3078, 1712, 736, 262, 9296, 2409, 4974, + 7835, 584, 450, 1317, 8527, 379, 8130, 9973, 1385, + 2519, 2759, 811, 8930, 20, 5805, 7038, 1309, 3314, + 5874, 1790, 925, 9559, 7471, 7725, 2642, 2828, 6795, + 1292, 8391, 549, 6527, 8311, 3209, 5869, 8257, 1093, + 9116, 6635, 4473, 4364, 6060, 9730, 4943, 1266, 2897, + 4515, 1492, 7608, 753, 3143, 4575, 8450, 672, 60, + 5470, 102, 2843, 8892, 1400, 6099, 8864, 1991, 7927, + 758, 3444, 3100, 6425, 2493, 2238, 4660, 7244, 2498, + 7065, 3569, 6024, 2481, 5782, 8609, 4854, 4808, 7362, + 470, 5615, 886, 3896, 3737, 1061, 9184, 8486, 3725, + 4486, 4782, 1510, 4360, 1430, 3844, 5545, 1952, 5531, + 7991, 6967, 3918, 9282, 6834, 8278, 7232, 1042, 5577, + 7140, 5682, 6235, 2426, 4367, 3046, 1284, 324, 5779, + 7660, 6147, 9347, 2818, 8288, 6309, 7822, 2992, 2138, + 6137, 4667, 2534, 267, 855, 9377, 1080, 2572, 4185, + 1159, 9189, 235, 5551, 5442, 8615, 5941, 2333, 5603, + 8823, 9751, 8238, 7055, 6875, 3139, 4539, 4450, 6280, + 8383, 7878, 8710, 7316, 297, 6101, 1862, 6135, 9399, + 1231, 1042, 406, 5918, 3920, 2100, 6593, 9382, 540, + 3267, 3804, 6269, 6574, 7347, 3806, 6920, 6007, 5439, + 1054, 6835, 3143, 348, 2957, 2293, 6244, 6556, 6106, + 3548]), + values=tensor([7.7676e-01, 4.3511e-01, 8.6888e-01, 6.6196e-01, + 5.5843e-01, 1.2317e-01, 6.4590e-01, 9.2068e-02, + 4.7157e-01, 7.4742e-01, 1.5601e-01, 2.4945e-01, + 9.3800e-01, 7.0295e-01, 8.0418e-01, 7.2391e-01, + 7.3510e-01, 8.8153e-01, 1.6479e-01, 2.5319e-01, + 6.6497e-02, 3.8588e-01, 7.8093e-01, 6.8969e-01, + 6.9003e-01, 2.9211e-01, 7.5497e-01, 4.3915e-01, + 2.7127e-01, 8.7178e-01, 7.9460e-01, 3.0448e-02, + 2.7363e-01, 7.9556e-04, 5.0699e-01, 9.3637e-01, + 8.1513e-02, 3.0579e-01, 1.7203e-01, 3.0116e-01, + 9.6728e-01, 8.8479e-01, 9.3263e-02, 6.5457e-01, + 8.3097e-01, 5.4422e-01, 6.4257e-01, 8.7856e-01, + 8.3579e-01, 5.3100e-01, 3.2650e-01, 5.0038e-01, + 3.7469e-01, 8.7251e-01, 2.5638e-01, 2.1243e-01, + 6.7771e-01, 7.3737e-01, 9.4053e-01, 8.4649e-01, + 8.3938e-01, 3.3832e-01, 1.5003e-01, 6.7943e-01, + 1.4736e-01, 9.8582e-01, 3.1957e-02, 1.8458e-03, + 4.6329e-01, 6.5000e-01, 1.5208e-01, 6.9857e-01, + 9.3001e-01, 2.7433e-01, 1.0225e-01, 6.9030e-01, + 1.4823e-01, 4.3322e-01, 6.6941e-01, 5.1920e-01, + 4.0252e-01, 5.2531e-02, 1.0175e-01, 3.2019e-01, + 4.7568e-01, 5.8094e-01, 3.6249e-01, 4.1272e-01, + 5.2113e-01, 1.9351e-01, 9.3180e-01, 2.8211e-01, + 1.3038e-01, 6.7011e-01, 9.7987e-01, 8.2692e-02, + 7.2597e-01, 3.9772e-01, 2.4438e-01, 7.2234e-01, + 6.3945e-01, 9.3909e-02, 5.0401e-01, 6.9082e-01, + 1.7982e-01, 1.1665e-01, 1.7199e-01, 4.5750e-01, + 3.3053e-01, 3.5072e-01, 3.1737e-01, 6.6392e-01, + 5.8412e-01, 2.6140e-01, 8.1328e-01, 1.1981e-01, + 1.2810e-01, 4.4727e-01, 2.4684e-01, 3.5376e-01, + 4.8500e-01, 5.2411e-01, 7.1029e-01, 2.6461e-01, + 6.8870e-01, 4.3587e-01, 2.0553e-01, 8.0055e-01, + 4.8545e-01, 8.3936e-01, 4.5492e-02, 2.1382e-01, + 1.1255e-01, 8.3228e-01, 5.9997e-01, 5.6041e-01, + 7.8188e-01, 5.3049e-01, 7.2644e-02, 3.8511e-01, + 5.7070e-01, 8.7824e-02, 1.1937e-01, 2.4162e-01, + 2.8478e-02, 6.3002e-01, 5.7014e-01, 7.4388e-01, + 5.4403e-01, 8.5873e-01, 5.0125e-01, 4.9210e-01, + 1.5489e-01, 3.4691e-02, 7.5386e-01, 2.6913e-01, + 2.7861e-02, 3.7296e-01, 4.8211e-02, 5.2535e-01, + 3.3896e-01, 7.6334e-01, 9.3027e-01, 8.1778e-01, + 2.1985e-01, 1.9693e-01, 2.4941e-01, 7.5663e-01, + 1.7368e-01, 5.1750e-01, 4.4755e-01, 3.3057e-01, + 8.7724e-01, 7.6346e-01, 2.7773e-01, 8.1424e-01, + 4.4711e-01, 3.7928e-01, 4.6945e-01, 8.4690e-02, + 5.6030e-01, 6.1715e-01, 2.4712e-01, 4.9675e-01, + 5.8504e-01, 1.8716e-01, 6.9505e-01, 7.6761e-01, + 7.3662e-01, 2.0824e-01, 9.1979e-01, 2.2216e-01, + 6.7197e-01, 5.9727e-01, 6.2578e-01, 6.1583e-01, + 7.7257e-01, 9.1955e-01, 1.3263e-01, 5.4695e-01, + 9.0869e-01, 8.9243e-01, 6.1230e-01, 5.7473e-01, + 8.0174e-01, 8.7921e-01, 4.3263e-01, 2.0552e-01, + 9.7366e-01, 6.2141e-01, 4.4388e-01, 9.3679e-01, + 9.9671e-01, 3.9168e-01, 3.6268e-01, 1.4304e-01, + 8.6320e-01, 7.9524e-01, 3.5040e-01, 2.8178e-01, + 6.9912e-01, 3.4802e-01, 4.6408e-01, 5.6252e-01, + 4.0066e-01, 1.5590e-01, 6.7353e-01, 9.2757e-01, + 6.3032e-01, 1.8838e-01, 4.0044e-01, 2.3705e-01, + 5.9221e-01, 4.3845e-01, 4.7472e-01, 3.3218e-02, + 1.4352e-01, 9.9074e-01, 5.9595e-01, 7.2031e-01, + 2.6817e-01, 9.7580e-01, 8.0192e-01, 9.6142e-01, + 8.5786e-01, 5.8535e-01, 8.0424e-01, 7.7231e-01, + 1.1383e-02, 9.2793e-01, 4.2474e-01, 7.3181e-01, + 4.3667e-01, 1.2831e-01, 6.9425e-01, 7.8069e-01, + 3.4216e-01, 1.0665e-01, 2.6695e-01, 1.4655e-04, + 6.3274e-01, 1.8741e-01, 6.7420e-01, 1.2858e-01, + 2.9153e-01, 8.7456e-01, 4.4179e-02, 8.7575e-01, + 7.8988e-01, 3.1394e-02, 5.6542e-01, 7.2339e-01, + 3.9510e-01, 2.2042e-01, 4.5121e-01, 4.5310e-01, + 5.5384e-01, 4.6224e-01, 4.5436e-01, 1.5627e-01, + 5.0330e-01, 6.2324e-01, 1.0787e-01, 5.1919e-01, + 2.4520e-01, 4.9417e-01, 4.7587e-01, 7.0407e-01, + 9.4495e-01, 3.1828e-01, 3.7069e-01, 2.4529e-01, + 8.8737e-01, 6.8823e-01, 3.1416e-01, 9.0215e-01, + 1.7515e-01, 8.6449e-01, 7.1815e-01, 7.7776e-02, + 4.3146e-01, 1.9027e-01, 4.7730e-01, 8.5590e-01, + 6.7255e-01, 7.3999e-01, 7.0103e-01, 9.6060e-01, + 2.1550e-01, 3.9491e-01, 8.4842e-01, 8.4683e-01, + 7.3256e-01, 1.1784e-01, 3.7123e-01, 4.4846e-02, + 3.5491e-01, 6.7555e-01, 3.3165e-01, 4.1122e-01, + 3.1491e-01, 3.3548e-01, 4.9884e-01, 5.7625e-01, + 5.0081e-01, 1.2757e-01, 1.6288e-01, 3.7376e-01, + 7.5129e-01, 3.5225e-01, 8.2955e-01, 3.4082e-01, + 1.3445e-01, 4.3502e-01, 6.3588e-01, 5.1752e-01, + 4.4866e-01, 9.5135e-01, 4.0662e-01, 7.2239e-01, + 7.9154e-01, 7.9358e-01, 4.1463e-01, 2.9136e-01, + 2.4307e-01, 1.8568e-01, 8.5983e-02, 4.1495e-01, + 6.4960e-02, 4.8637e-01, 2.4141e-01, 1.3391e-01, + 4.4530e-01, 6.2302e-01, 2.2171e-01, 3.0942e-01, + 8.4810e-02, 6.4619e-01, 2.8361e-01, 2.5008e-02, + 6.6062e-01, 3.4635e-01, 4.2529e-02, 4.6604e-01, + 5.4757e-01, 7.4412e-02, 3.7201e-01, 4.4894e-01, + 8.1211e-01, 8.2777e-01, 5.2399e-01, 5.2487e-01, + 5.1385e-01, 7.6098e-01, 7.0300e-01, 7.0205e-01, + 4.3516e-01, 1.2198e-01, 7.7144e-01, 8.3459e-01, + 1.5101e-01, 4.7727e-01, 2.2759e-01, 2.1749e-01, + 4.4153e-01, 2.3228e-01, 2.3774e-01, 3.6341e-01, + 8.6034e-01, 7.9816e-01, 7.6733e-01, 3.0819e-02, + 2.3331e-01, 2.2463e-01, 4.6389e-01, 1.3386e-01, + 9.3030e-01, 5.6475e-01, 7.5111e-02, 9.8787e-01, + 6.0849e-01, 8.1161e-02, 6.9162e-01, 7.7839e-01, + 8.5151e-01, 8.3747e-01, 7.0901e-01, 6.8326e-01, + 5.2657e-01, 8.1226e-01, 5.4544e-01, 9.5973e-01, + 7.5017e-01, 6.9777e-01, 9.4972e-01, 1.5947e-01, + 9.4547e-01, 4.4375e-01, 2.6915e-01, 5.9024e-01, + 6.9379e-01, 2.6997e-01, 8.3665e-01, 8.0079e-01, + 3.0245e-01, 6.2096e-01, 6.5008e-01, 7.6217e-01, + 6.9442e-01, 7.0586e-01, 4.0735e-01, 4.5942e-01, + 8.4771e-01, 6.7411e-01, 6.6768e-01, 2.6609e-01, + 6.2920e-01, 8.1976e-01, 7.6653e-01, 4.2076e-01, + 4.9544e-01, 4.3095e-01, 3.3572e-01, 8.0708e-01, + 3.2921e-02, 8.3263e-02, 1.2540e-01, 4.8972e-01, + 6.9167e-01, 5.8683e-01, 2.7344e-01, 4.9202e-01, + 5.3368e-01, 7.1380e-01, 1.2776e-01, 1.3424e-01, + 2.3664e-01, 8.2030e-01, 8.0897e-01, 9.2853e-01, + 9.9391e-01, 2.6781e-01, 8.5981e-01, 8.7793e-01, + 2.3385e-01, 5.3038e-01, 1.6029e-01, 8.9730e-01, + 5.3098e-03, 5.9810e-01, 1.8878e-01, 4.6584e-03, + 3.4507e-01, 9.1896e-01, 4.7582e-01, 1.8855e-01, + 1.3640e-01, 4.5983e-01, 8.5768e-02, 5.4879e-01, + 2.6936e-01, 8.4450e-01, 7.2920e-02, 1.8512e-01, + 1.2069e-01, 7.6284e-01, 3.9856e-01, 7.4686e-01, + 2.5836e-01, 9.4895e-01, 8.7406e-01, 4.8012e-01, + 6.4741e-01, 4.8078e-02, 3.9278e-01, 6.6585e-02, + 8.4963e-01, 9.0068e-01, 4.2686e-01, 6.0768e-01, + 5.7149e-01, 1.4935e-01, 4.2352e-02, 7.1520e-01, + 3.4158e-01, 5.2219e-01, 4.2486e-01, 4.9195e-01, + 6.0575e-01, 3.8744e-01, 9.3583e-01, 9.5461e-01, + 9.9223e-01, 4.2332e-01, 4.6746e-01, 2.4429e-01, + 6.7972e-01, 1.6418e-01, 7.2066e-01, 2.9051e-01, + 3.9715e-01, 9.4188e-01, 6.8416e-01, 1.1637e-01, + 1.8471e-01, 6.2789e-01, 6.1511e-02, 5.4907e-01, + 5.8160e-01, 1.2735e-01, 1.2778e-01, 6.3218e-01, + 6.9455e-01, 8.7680e-01, 1.1211e-02, 9.6886e-01, + 6.1382e-01, 7.1922e-01, 9.9704e-01, 2.2893e-01, + 8.3228e-01, 2.0461e-01, 9.8206e-01, 2.9661e-02, + 2.2006e-01, 5.8015e-01, 1.4472e-01, 3.6556e-01, + 4.6818e-01, 5.9563e-01, 3.4451e-01, 5.7371e-01, + 9.7343e-01, 8.5663e-01, 8.0647e-01, 2.9515e-01, + 4.2561e-01, 3.4578e-01, 7.8561e-01, 3.6486e-01, + 2.6098e-01, 3.1347e-01, 4.6913e-01, 6.3644e-01, + 8.1747e-01, 7.9888e-01, 6.3703e-01, 1.9528e-02, + 8.8912e-01, 1.2001e-01, 5.1057e-01, 3.2942e-01, + 7.7753e-01, 9.8504e-01, 1.5403e-01, 4.9710e-01, + 9.3179e-01, 9.0216e-01, 7.6519e-01, 5.0104e-01, + 4.1583e-01, 2.6015e-01, 7.9118e-01, 3.2977e-01, + 9.5088e-01, 7.7098e-01, 5.2764e-01, 3.4649e-03, + 9.8023e-01, 2.2515e-01, 2.6750e-01, 9.5444e-01, + 7.5797e-01, 2.4744e-01, 1.3378e-01, 2.4865e-01, + 5.7225e-01, 4.1671e-01, 3.8611e-01, 6.1987e-01, + 2.3395e-01, 1.2290e-01, 9.6253e-01, 6.6917e-01, + 9.1858e-01, 4.3766e-01, 7.9084e-01, 2.4035e-01, + 9.3945e-01, 4.6980e-02, 8.7737e-01, 4.8172e-01, + 3.4784e-01, 5.1366e-01, 7.4246e-02, 2.7170e-01, + 1.6606e-01, 9.2560e-02, 7.7249e-01, 9.7385e-01, + 4.2946e-01, 2.1819e-01, 6.0570e-01, 6.7881e-01, + 2.9891e-01, 2.3591e-01, 1.1986e-01, 4.0182e-01, + 3.3311e-01, 6.5760e-01, 4.0092e-01, 3.3428e-01, + 3.0436e-01, 3.8298e-02, 7.8331e-01, 8.2341e-01, + 7.2465e-01, 8.8633e-01, 3.2223e-01, 7.2366e-01, + 1.4537e-01, 9.2660e-01, 4.0429e-01, 6.0776e-01, + 5.9607e-01, 5.3746e-01, 2.7986e-01, 5.1694e-01, + 6.8009e-01, 8.0536e-01, 8.6904e-01, 7.9306e-03, + 1.8754e-01, 5.9778e-01, 7.5633e-01, 3.5498e-01, + 8.5091e-01, 2.6029e-01, 9.3134e-01, 9.1663e-01, + 2.7486e-01, 1.5024e-01, 2.1246e-01, 5.3574e-02, + 9.1413e-01, 9.3189e-01, 2.4992e-01, 2.0985e-01, + 2.6185e-01, 3.5681e-01, 6.7411e-01, 4.6985e-01, + 7.9582e-01, 1.1844e-01, 6.8420e-01, 9.7119e-01, + 5.7735e-02, 2.1207e-01, 6.1599e-01, 8.6878e-01, + 1.7741e-01, 4.9707e-01, 8.3678e-01, 2.7851e-01, + 1.8648e-01, 3.7907e-01, 6.1436e-02, 2.3541e-01, + 9.9184e-02, 2.9398e-01, 4.5070e-01, 3.8325e-01, + 3.0780e-01, 5.9145e-01, 3.1728e-01, 3.8761e-01, + 3.1099e-02, 5.0500e-01, 8.8723e-02, 9.7749e-01, + 5.2743e-01, 7.6288e-01, 1.8793e-01, 9.7921e-01, + 2.3544e-01, 8.9800e-01, 1.3632e-01, 8.0262e-01, + 1.1002e-01, 2.6545e-01, 1.7963e-01, 6.4444e-01, + 8.6409e-01, 5.5078e-01, 9.3341e-01, 3.6584e-01, + 5.4052e-01, 9.2209e-01, 8.2597e-01, 1.4435e-01, + 9.2414e-01, 2.3170e-01, 9.5805e-01, 4.5031e-01, + 1.0963e-01, 7.9741e-01, 7.2194e-01, 4.0888e-01, + 6.9907e-01, 2.0338e-01, 4.6063e-01, 7.3418e-01, + 4.0908e-01, 2.1076e-01, 4.9394e-01, 7.3095e-01, + 5.6257e-01, 5.1694e-01, 4.0732e-01, 4.4202e-01, + 3.9726e-01, 8.4443e-01, 6.3177e-02, 6.8296e-01, + 1.8101e-02, 4.3115e-01, 6.7036e-02, 8.9924e-01, + 8.6910e-01, 9.6544e-01, 2.0639e-01, 7.0232e-01, + 9.5863e-01, 8.9233e-01, 6.1123e-01, 8.2955e-01, + 5.0081e-01, 9.2243e-01, 1.0615e-01, 4.6438e-01, + 2.5112e-01, 8.4127e-01, 7.0326e-01, 4.7692e-01, + 2.4126e-01, 6.2701e-01, 4.0637e-02, 6.2064e-01, + 2.1577e-01, 1.3727e-01, 3.4352e-01, 5.5173e-01, + 5.4981e-01, 2.5404e-01, 2.2391e-01, 4.1549e-01, + 9.0643e-01, 1.2852e-02, 4.7026e-01, 2.4272e-01, + 3.1344e-01, 5.7995e-01, 7.0274e-01, 5.4708e-01, + 2.6722e-01, 8.4244e-01, 8.4486e-01, 7.4120e-02, + 4.6878e-01, 4.7483e-01, 2.4536e-01, 3.7943e-01, + 9.7435e-01, 5.0425e-01, 2.7584e-01, 8.3479e-01, + 1.2475e-01, 2.1930e-01, 8.8416e-02, 5.6616e-01, + 8.2163e-01, 8.9859e-01, 6.6747e-01, 8.6213e-01, + 4.1250e-01, 2.7047e-02, 4.6532e-02, 2.7662e-01, + 4.8968e-01, 2.0572e-01, 1.8243e-01, 2.6597e-01, + 9.6227e-01, 6.3386e-01, 2.2450e-01, 1.6625e-01, + 3.8834e-01, 6.2735e-01, 2.9908e-01, 9.2673e-01, + 1.2484e-01, 2.4485e-01, 2.7378e-01, 7.4354e-01, + 5.8350e-01, 2.2908e-01, 3.5881e-01, 3.6670e-01, + 4.2856e-01, 3.1033e-01, 1.1014e-01, 4.8822e-01, + 3.9354e-01, 8.5040e-01, 8.8043e-01, 7.5289e-01, + 1.1283e-01, 8.7769e-03, 2.2590e-01, 1.1235e-01, + 1.3109e-01, 7.5020e-01, 6.1948e-01, 7.6640e-02, + 1.2627e-01, 9.3323e-01, 6.8595e-01, 8.9559e-04, + 8.2376e-01, 9.6352e-01, 6.3502e-01, 2.2454e-02, + 8.5547e-01, 9.0791e-01, 7.7667e-01, 3.6451e-01, + 3.7978e-01, 2.2107e-02, 6.3255e-02, 1.2361e-01, + 8.4631e-01, 2.9196e-01, 7.5503e-01, 8.6620e-01, + 1.8951e-01, 3.7778e-01, 8.2630e-01, 5.6822e-01, + 8.8886e-01, 4.1011e-01, 6.6500e-01, 2.7268e-01, + 3.3897e-01, 2.8256e-01, 6.2309e-01, 1.6693e-01, + 9.0682e-02, 4.9400e-01, 3.9069e-01, 4.2011e-01, + 7.3933e-02, 8.6066e-01, 6.5882e-02, 2.0986e-01, + 7.7232e-01, 6.6905e-01, 3.4353e-01, 6.4074e-01, + 6.6949e-01, 1.8573e-01, 8.7289e-03, 1.3303e-02, + 3.7874e-01, 1.9783e-01, 8.0846e-01, 1.2415e-01, + 5.4398e-01, 5.5923e-03, 1.1586e-01, 7.5563e-01, + 6.2400e-01, 7.7912e-01, 6.9674e-01, 6.4832e-01, + 2.6663e-01, 5.7342e-01, 8.9855e-01, 2.6327e-01, + 2.5815e-01, 8.4640e-01, 8.9034e-01, 5.2617e-01, + 4.1899e-01, 2.8794e-01, 7.5614e-01, 2.0480e-01, + 3.9531e-01, 3.9802e-01, 4.4215e-02, 5.0609e-01, + 6.1596e-01, 4.5139e-01, 5.2405e-01, 6.8649e-01, + 5.5328e-01, 7.3148e-01, 6.5039e-01, 8.2393e-01, + 2.2245e-01, 7.2356e-01, 5.4437e-01, 6.0669e-01, + 7.1873e-01, 9.3593e-01, 7.7873e-01, 6.1708e-02, + 4.3216e-01, 9.6912e-01, 8.6860e-01, 1.9832e-02, + 3.7569e-01, 1.8863e-01, 3.0588e-01, 9.5695e-01, + 9.5145e-01, 7.3340e-01, 1.2812e-01, 8.3056e-01, + 5.7506e-02, 6.8723e-01, 5.7414e-01, 8.6495e-01, + 9.9606e-01, 5.3592e-01, 6.8730e-01, 1.7371e-01, + 1.6898e-01, 1.7544e-02, 6.5220e-01, 8.8187e-01, + 8.6912e-01, 2.0081e-01, 2.9717e-01, 9.6531e-01, + 4.8634e-01, 1.4913e-01, 8.7862e-01, 4.5351e-01, + 8.7376e-01, 3.7109e-01, 8.0176e-01, 6.4843e-01, + 1.0545e-01, 9.8334e-01, 5.7656e-01, 7.2629e-01, + 2.8377e-01, 5.8040e-01, 8.0468e-01, 1.8276e-01, + 4.6288e-01, 5.1447e-01, 9.1392e-01, 7.7753e-01, + 1.4587e-02, 2.5953e-01, 9.0037e-02, 4.9753e-01, + 4.4106e-02, 3.5476e-01, 6.5408e-02, 1.0350e-01, + 6.0746e-02, 9.6332e-01, 9.0745e-01, 8.1438e-01, + 4.7396e-01, 2.0444e-01, 1.8101e-01, 8.1941e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.8851, 0.6493, 0.1793, ..., 0.2200, 0.7121, 0.0570]) +tensor([0.2083, 0.4700, 0.0751, ..., 0.0945, 0.0306, 0.1951]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -268,271 +375,378 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 0.019997835159301758 seconds +Time: 0.019604206085205078 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52505', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9391217231750488} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53559', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9843134880065918} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5864, 6409, 3268, 9372, 8851, 1349, 7924, 3157, 113, - 9640, 7379, 4185, 6988, 7306, 9094, 3198, 1145, 6328, - 5627, 8866, 1416, 350, 9287, 2849, 3030, 612, 443, - 8128, 5292, 2326, 3640, 1427, 2108, 7969, 9609, 7236, - 449, 1646, 9388, 8979, 3518, 778, 6273, 9500, 148, - 8296, 6022, 1119, 7603, 6748, 5170, 7685, 82, 4394, - 1107, 6222, 8327, 9701, 1319, 6392, 2965, 9029, 77, - 7995, 2221, 994, 2914, 8588, 327, 853, 3415, 919, - 9810, 4408, 2821, 7082, 5424, 5361, 480, 245, 7586, - 5204, 9870, 329, 4522, 6404, 2012, 935, 9448, 4082, - 3761, 8854, 4617, 2954, 1700, 2460, 8360, 6564, 5100, - 3903, 5238, 470, 9756, 8808, 586, 4317, 2554, 4487, - 2746, 991, 8986, 854, 1896, 4114, 4630, 2853, 8275, - 2623, 6046, 9508, 843, 3243, 5691, 444, 3056, 2850, - 3932, 3623, 259, 8407, 4684, 5172, 642, 1359, 2285, - 474, 5560, 5107, 8311, 8207, 4317, 4153, 5714, 4003, - 4520, 3174, 6645, 1349, 9932, 49, 4730, 4119, 9928, - 924, 9713, 1216, 50, 1671, 1300, 8418, 6864, 8358, - 4746, 4247, 7492, 9246, 7928, 4332, 8375, 9814, 9129, - 6833, 517, 824, 8642, 884, 7236, 7434, 7005, 4885, - 1510, 7918, 3471, 5546, 6237, 6012, 5086, 1592, 1134, - 9459, 4579, 5292, 366, 2218, 1740, 6379, 1799, 5994, - 759, 9988, 1345, 1070, 3745, 943, 8461, 5082, 1933, - 443, 7004, 3136, 7242, 6373, 1139, 5779, 3999, 5632, - 9482, 7292, 5796, 4340, 662, 2787, 1120, 4116, 3039, - 1174, 4242, 5490, 1535, 6250, 1065, 51, 3412, 460, - 9840, 5194, 4843, 7403, 7445, 4382, 8727, 8774, 6609, - 3247, 8201, 755, 8291, 3275, 8064, 4388, 6768, 2576, - 7034, 7091, 7595, 8900, 6407, 6040, 2696, 6321, 4834, - 1840, 3927, 864, 6511, 7817, 7242, 3035, 6998, 4134, - 9868, 182, 4140, 6908, 717, 2392, 8434, 3505, 8667, - 4427, 1108, 8007, 8132, 9145, 9203, 8310, 7166, 6141, - 9370, 4104, 9680, 212, 8669, 6370, 4838, 4629, 457, - 90, 2059, 4579, 7993, 5081, 3590, 42, 8026, 1256, - 9078, 2167, 2789, 4777, 2980, 8000, 7728, 4259, 788, - 9381, 4203, 4616, 6598, 6785, 9230, 3073, 7215, 4492, - 5558, 6560, 4239, 6465, 3254, 9443, 396, 9087, 9830, - 2837, 8876, 5671, 8685, 1134, 9981, 4038, 2521, 8217, - 3054, 3580, 3011, 2548, 4358, 9379, 9530, 6078, 4586, - 2961, 7702, 9843, 1093, 5655, 4332, 7551, 7559, 7229, - 9775, 8733, 7492, 4725, 2683, 5859, 6974, 5769, 413, - 7759, 3920, 4343, 8482, 2185, 3683, 5666, 5694, 9484, - 6805, 8125, 6011, 7452, 4625, 1996, 5674, 8346, 9829, - 5348, 6153, 4294, 7632, 675, 4714, 7735, 690, 8377, - 8366, 8956, 6093, 9462, 520, 8335, 6971, 9049, 3198, - 2406, 1525, 3742, 9835, 2100, 6818, 61, 7216, 7574, - 6125, 8245, 226, 3951, 8198, 6726, 1159, 3284, 4090, - 6559, 2427, 2922, 1917, 2184, 8419, 8320, 6172, 7937, - 3032, 5477, 1970, 2511, 9106, 3545, 5702, 8387, 6240, - 4527, 5936, 2989, 4798, 44, 5593, 8588, 9801, 6339, - 270, 4836, 4663, 7863, 5753, 5138, 1468, 2135, 8533, - 8874, 7550, 9948, 511, 8382, 5075, 5125, 621, 8037, - 2970, 7317, 955, 5187, 9266, 8012, 9116, 8287, 5819, - 6798, 6108, 9485, 7294, 6292, 3050, 9766, 3371, 6775, - 8624, 323, 9518, 993, 713, 4232, 4555, 9512, 4219, - 172, 7436, 1908, 9551, 5355, 2034, 7444, 6222, 5358, - 415, 4523, 8497, 1159, 6546, 2982, 9657, 7284, 2823, - 9948, 1307, 293, 9936, 9908, 9836, 9326, 6563, 2566, - 6336, 7704, 5608, 6306, 9283, 5085, 1829, 8868, 5796, - 2330, 5352, 2073, 2907, 4221, 2317, 4045, 1974, 6528, - 4428, 9165, 9570, 1902, 7934, 3584, 5642, 5226, 6064, - 2401, 627, 4348, 5433, 7193, 9010, 3873, 1074, 3547, - 2036, 1502, 8524, 1516, 8038, 6483, 5421, 9090, 1282, - 6461, 3107, 4786, 5722, 2040, 3948, 1805, 1340, 2167, - 8524, 873, 7912, 9277, 615, 2598, 3584, 5232, 6528, - 4041, 1307, 4984, 7493, 7893, 2441, 8985, 9241, 545, - 8462, 5218, 1131, 1603, 1467, 916, 391, 1880, 6303, - 236, 88, 6641, 9343, 1938, 644, 6243, 6324, 3102, - 4046, 4842, 9256, 877, 1050, 7198, 6510, 675, 7712, - 3992, 5524, 5461, 8463, 2767, 8024, 9124, 3831, 3301, - 5238, 839, 822, 8427, 8192, 7249, 6075, 7734, 8472, - 8865, 4636, 8397, 393, 3339, 9477, 1559, 9233, 1895, - 3219, 9244, 2768, 1620, 5867, 3834, 6459, 6900, 8402, - 4353, 4101, 1060, 3930, 3593, 9426, 100, 2578, 4861, - 5687, 6175, 9073, 9873, 7765, 4191, 8697, 5284, 4715, - 9219, 7611, 5627, 3329, 9064, 6928, 3037, 7722, 7680, - 8059, 8098, 5508, 1795, 6236, 2332, 9755, 2995, 556, - 7522, 7740, 4501, 6741, 6961, 3755, 958, 7285, 897, - 2682, 3633, 9522, 6519, 7381, 4140, 6325, 9071, 7470, - 6696, 5233, 3651, 7869, 774, 543, 8750, 4426, 5429, - 485, 730, 3337, 2506, 4162, 4944, 9298, 3907, 2447, - 8819, 8506, 6050, 3923, 8654, 5401, 6376, 181, 7284, - 5906, 8062, 5293, 5482, 1078, 6794, 7475, 2224, 7260, - 3627, 9570, 135, 9578, 8302, 8006, 6725, 2538, 6437, - 594, 9238, 4645, 7390, 6678, 2155, 7680, 9552, 1670, - 2472, 2676, 5726, 3946, 527, 7726, 2240, 9083, 9592, - 3444, 3546, 376, 9469, 8897, 9577, 5076, 4801, 9727, - 566, 4934, 7459, 7690, 5216, 4131, 7019, 8688, 2923, - 2438, 4950, 4206, 1087, 5226, 3034, 6574, 6662, 9559, - 5466, 9185, 1863, 2990, 6618, 392, 2496, 9534, 5645, - 2208, 4474, 5079, 8377, 5730, 1470, 1051, 9292, 67, - 8958, 5488, 3313, 2560, 3838, 7969, 4761, 7727, 4225, - 3905, 3354, 8114, 5546, 4451, 1620, 530, 3536, 5417, - 292, 5393, 7449, 3344, 2366, 832, 2586, 1114, 1164, - 7745, 1601, 5885, 6819, 9950, 7296, 956, 7785, 751, - 9927, 4784, 6024, 1885, 597, 3327, 6636, 4759, 2636, - 1681, 5642, 9192, 251, 2937, 9361, 5124, 5779, 3731, - 998, 1458, 3334, 3423, 3618, 2367, 2989, 6484, 6596, - 3275, 3629, 1246, 5835, 3014, 480, 3197, 9504, 4739, - 7848, 5807, 5889, 6941, 5807, 5884, 9995, 7302, 9589, - 3118, 8179, 1634, 7475, 493, 2924, 4475, 3955, 9825, - 4774, 5869, 8402, 5728, 9453, 2751, 6979, 8042, 1938, - 255, 5871, 8287, 8146, 4117, 6879, 8378, 1539, 4897, - 6347, 5086, 8422, 8986, 4509, 9294, 2883, 9942, 8480, - 6013, 8174, 977, 1721, 2494, 5501, 2010, 8785, 4090, - 3984, 7019, 8716, 3571, 3228, 7733, 6351, 2624, 2714, - 8580, 9476, 747, 7962, 8294, 3951, 2439, 8475, 4886, - 941, 8870, 8639, 4412, 7879, 4719, 1308, 6188, 3204, - 5487, 2922, 9803, 6526, 1340, 8108, 1809, 5276, 4996, - 2606, 5863, 2844, 128, 5891, 2949, 2522, 2371, 374, - 9131, 1337, 2279, 3177, 3776, 5283, 9934, 312, 8545, - 8088]), - values=tensor([0.8990, 0.9202, 0.0544, 0.3278, 0.1036, 0.9834, 0.5405, - 0.8387, 0.1303, 0.8299, 0.9099, 0.8006, 0.3383, 0.3639, - 0.2173, 0.0563, 0.7372, 0.1968, 0.4499, 0.6369, 0.1211, - 0.5305, 0.7026, 0.0869, 0.4015, 0.0938, 0.8149, 0.6744, - 0.4215, 0.0815, 0.4822, 0.0786, 0.4204, 0.6460, 0.6276, - 0.2883, 0.4330, 0.3983, 0.2719, 0.4502, 0.7063, 0.0086, - 0.6751, 0.5856, 0.5977, 0.3533, 0.7195, 0.3114, 0.7411, - 0.5056, 0.2852, 0.3437, 0.7335, 0.9652, 0.3676, 0.1371, - 0.6895, 0.6801, 0.1441, 0.6072, 0.2913, 0.9589, 0.4646, - 0.7267, 0.3881, 0.6695, 0.4149, 0.4926, 0.6183, 0.3275, - 0.6292, 0.3672, 0.7396, 0.0629, 0.7870, 0.8240, 0.0651, - 0.7638, 0.1125, 0.7864, 0.0952, 0.8313, 0.9512, 0.1631, - 0.0751, 0.7252, 0.2013, 0.8188, 0.2883, 0.7422, 0.6655, - 0.8235, 0.3703, 0.8712, 0.1832, 0.1385, 0.5564, 0.7498, - 0.7736, 0.6575, 0.7905, 0.1372, 0.1047, 0.2516, 0.5275, - 0.0779, 0.8681, 0.3296, 0.6114, 0.4478, 0.1522, 0.3051, - 0.0405, 0.3528, 0.7471, 0.2608, 0.1704, 0.9466, 0.6985, - 0.0406, 0.9413, 0.2522, 0.5107, 0.2012, 0.1769, 0.8491, - 0.1199, 0.4970, 0.5967, 0.3353, 0.0061, 0.9854, 0.8058, - 0.2915, 0.5621, 0.6851, 0.9985, 0.0085, 0.7012, 0.0603, - 0.8973, 0.4404, 0.2983, 0.8140, 0.0895, 0.2012, 0.7432, - 0.9272, 0.9516, 0.8375, 0.3237, 0.4042, 0.2740, 0.8401, - 0.1138, 0.0969, 0.5800, 0.4304, 0.8447, 0.6543, 0.9229, - 0.0170, 0.9115, 0.8422, 0.3169, 0.2633, 0.5759, 0.5969, - 0.1088, 0.3461, 0.7896, 0.4253, 0.4344, 0.4992, 0.6827, - 0.6879, 0.8758, 0.6928, 0.8708, 0.9897, 0.8788, 0.0817, - 0.7722, 0.0931, 0.3481, 0.9886, 0.8853, 0.2278, 0.8570, - 0.9045, 0.1065, 0.2685, 0.6322, 0.7042, 0.6308, 0.2807, - 0.6931, 0.2248, 0.5412, 0.1164, 0.3317, 0.6726, 0.8543, - 0.5206, 0.9983, 0.7612, 0.9817, 0.2254, 0.2596, 0.3813, - 0.6946, 0.1231, 0.1329, 0.8727, 0.5792, 0.5145, 0.9953, - 0.8468, 0.6893, 0.7309, 0.5070, 0.6686, 0.8055, 0.1055, - 0.9254, 0.4455, 0.4767, 0.6225, 0.0209, 0.2930, 0.1811, - 0.8374, 0.8892, 0.9045, 0.1674, 0.8744, 0.7283, 0.0811, - 0.5372, 0.2055, 0.4944, 0.7620, 0.6061, 0.2733, 0.4358, - 0.1363, 0.3064, 0.0238, 0.7458, 0.8337, 0.8253, 0.5030, - 0.0973, 0.5618, 0.2875, 0.7789, 0.8665, 0.9501, 0.5176, - 0.4426, 0.1897, 0.1486, 0.1669, 0.2509, 0.1659, 0.8053, - 0.4105, 0.5846, 0.9191, 0.5151, 0.0551, 0.8020, 0.3620, - 0.9344, 0.3530, 0.0386, 0.0335, 0.8446, 0.1984, 0.7097, - 0.7277, 0.6649, 0.9046, 0.0203, 0.5170, 0.0550, 0.6834, - 0.5615, 0.5645, 0.5249, 0.9453, 0.7207, 0.2810, 0.7493, - 0.2947, 0.9369, 0.6771, 0.7460, 0.1467, 0.5385, 0.9790, - 0.7366, 0.3428, 0.7251, 0.2418, 0.1147, 0.6978, 0.6208, - 0.9677, 0.2034, 0.6015, 0.7335, 0.4291, 0.0051, 0.0440, - 0.2729, 0.3203, 0.0797, 0.2242, 0.1081, 0.2293, 0.8883, - 0.6170, 0.7219, 0.1224, 0.4373, 0.3181, 0.8085, 0.7996, - 0.6508, 0.7263, 0.9121, 0.5064, 0.0502, 0.9917, 0.9834, - 0.3031, 0.8748, 0.6856, 0.5292, 0.6315, 0.8787, 0.4765, - 0.8102, 0.6878, 0.1287, 0.8970, 0.7946, 0.8583, 0.9049, - 0.0389, 0.0623, 0.5568, 0.6879, 0.2721, 0.1463, 0.8727, - 0.9772, 0.1081, 0.1611, 0.2843, 0.0447, 0.6310, 0.1932, - 0.2621, 0.8047, 0.8238, 0.9649, 0.2488, 0.4610, 0.2602, - 0.8419, 0.7266, 0.3002, 0.7494, 0.6113, 0.1487, 0.3589, - 0.4282, 0.7405, 0.9373, 0.2091, 0.9161, 0.1005, 0.8514, - 0.7238, 0.1078, 0.6520, 0.6311, 0.1891, 0.8272, 0.1963, - 0.8590, 0.8189, 0.0257, 0.2833, 0.3513, 0.3634, 0.3576, - 0.8904, 0.2484, 0.4918, 0.2908, 0.1508, 0.5806, 0.9406, - 0.7294, 0.0950, 0.5064, 0.0349, 0.6437, 0.4083, 0.5026, - 0.6560, 0.0148, 0.4276, 0.3401, 0.2383, 0.0904, 0.7044, - 0.9844, 0.8310, 0.9388, 0.3867, 0.2827, 0.1664, 0.3305, - 0.1891, 0.7274, 0.3593, 0.5517, 0.2275, 0.0974, 0.5619, - 0.4200, 0.3096, 0.0034, 0.0516, 0.4134, 0.2690, 0.2115, - 0.6454, 0.2865, 0.6179, 0.9913, 0.0141, 0.5942, 0.3621, - 0.2100, 0.3445, 0.3904, 0.9909, 0.6731, 0.4570, 0.4820, - 0.5848, 0.2093, 0.4957, 0.8327, 0.2632, 0.6657, 0.2324, - 0.0994, 0.6140, 0.4480, 0.9800, 0.2896, 0.1520, 0.2608, - 0.3853, 0.2094, 0.0521, 0.1625, 0.2360, 0.0706, 0.0520, - 0.9422, 0.5291, 0.5359, 0.8697, 0.5739, 0.8183, 0.5758, - 0.4869, 0.9594, 0.7777, 0.8218, 0.1785, 0.0020, 0.3345, - 0.9444, 0.5328, 0.4805, 0.9232, 0.6862, 0.9030, 0.9146, - 0.6421, 0.5457, 0.0643, 0.6817, 0.9655, 0.1845, 0.7938, - 0.4551, 0.4198, 0.9446, 0.9759, 0.6259, 0.5381, 0.3308, - 0.3232, 0.5498, 0.8269, 0.1628, 0.3341, 0.1440, 0.8802, - 0.7811, 0.5460, 0.9304, 0.6129, 0.5889, 0.0395, 0.1012, - 0.5055, 0.2108, 0.0596, 0.8797, 0.0924, 0.0840, 0.9035, - 0.4740, 0.3740, 0.2634, 0.4030, 0.2026, 0.3835, 0.9804, - 0.3664, 0.2344, 0.2632, 0.9989, 0.1712, 0.0177, 0.7018, - 0.1117, 0.0373, 0.6412, 0.6706, 0.9767, 0.2936, 0.5259, - 0.5797, 0.5149, 0.1616, 0.5650, 0.5356, 0.0065, 0.9212, - 0.1415, 0.5431, 0.0097, 0.9858, 0.4122, 0.1358, 0.1911, - 0.7329, 0.8335, 0.4353, 0.3789, 0.9018, 0.6853, 0.2444, - 0.9519, 0.7522, 0.6207, 0.8681, 0.9259, 0.8747, 0.9193, - 0.4093, 0.6975, 0.2182, 0.9647, 0.1252, 0.2843, 0.9046, - 0.3990, 0.1488, 0.1784, 0.6775, 0.5082, 0.9612, 0.5896, - 0.0701, 0.8792, 0.5361, 0.2316, 0.0926, 0.9316, 0.9904, - 0.4062, 0.1734, 0.2068, 0.7329, 0.0588, 0.2116, 0.2498, - 0.2860, 0.6263, 0.2277, 0.3363, 0.0558, 0.6334, 0.9261, - 0.3782, 0.1726, 0.3657, 0.2001, 0.2643, 0.5676, 0.4875, - 0.2652, 0.7263, 0.4490, 0.1802, 0.8678, 0.5205, 0.9599, - 0.7949, 0.8153, 0.1755, 0.6089, 0.8090, 0.1965, 0.7501, - 0.8519, 0.9054, 0.1044, 0.7000, 0.2903, 0.5652, 0.3256, - 0.2532, 0.3074, 0.2884, 0.7156, 0.1406, 0.5555, 0.2160, - 0.4498, 0.0747, 0.9689, 0.5720, 0.9390, 0.9625, 0.3014, - 0.2693, 0.5947, 0.5986, 0.9539, 0.5617, 0.3646, 0.1056, - 0.6700, 0.6793, 0.1324, 0.7287, 0.1481, 0.3299, 0.7685, - 0.3760, 0.6705, 0.5083, 0.8665, 0.5329, 0.5057, 0.7230, - 0.2213, 0.3819, 0.8994, 0.9305, 0.0147, 0.6384, 0.9557, - 0.4297, 0.3363, 0.5610, 0.1485, 0.7226, 0.7840, 0.2842, - 0.6194, 0.3137, 0.3575, 0.5156, 0.8823, 0.6332, 0.6090, - 0.2125, 0.2540, 0.8502, 0.5188, 0.0320, 0.4363, 0.2213, - 0.1104, 0.5745, 0.3976, 0.8852, 0.6841, 0.1436, 0.8128, - 0.0664, 0.8122, 0.0275, 0.5139, 0.0401, 0.8774, 0.4004, - 0.6001, 0.5350, 0.9205, 0.6614, 0.3780, 0.2849, 0.3091, - 0.9873, 0.8828, 0.8702, 0.8818, 0.6909, 0.9257, 0.8168, - 0.3636, 0.3185, 0.8926, 0.4749, 0.0620, 0.4189, 0.6713, - 0.8811, 0.0223, 0.4110, 0.0053, 0.9365, 0.2006, 0.8273, - 0.4083, 0.1751, 0.9186, 0.3824, 0.9992, 0.8941, 0.4645, - 0.1171, 0.3220, 0.6118, 0.7887, 0.2217, 0.1825, 0.7475, - 0.9667, 0.5530, 0.3419, 0.9475, 0.0701, 0.5839, 0.3842, - 0.1233, 0.6971, 0.4857, 0.1287, 0.5722, 0.0250, 0.8936, - 0.8207, 0.3129, 0.2199, 0.6123, 0.1672, 0.5414, 0.7945, - 0.0311, 0.1705, 0.9516, 0.1397, 0.2348, 0.9404, 0.3368, - 0.1816, 0.7521, 0.1326, 0.1366, 0.8702, 0.3360, 0.7067, - 0.1910, 0.8902, 0.9378, 0.2610, 0.2939, 0.3489, 0.7916, - 0.4565, 0.4221, 0.7669, 0.2915, 0.7403, 0.1889, 0.8303, - 0.4423, 0.9543, 0.7012, 0.6502, 0.1369, 0.0177, 0.6082, - 0.2162, 0.2686, 0.0179, 0.4480, 0.6701, 0.3740, 0.8426, - 0.1687, 0.0416, 0.5328, 0.3704, 0.6536, 0.4452, 0.6981, - 0.5511, 0.1110, 0.1241, 0.1011, 0.5345, 0.6973, 0.0525, - 0.1417, 0.8094, 0.0874, 0.8494, 0.7134, 0.8426, 0.7518, - 0.9024, 0.4875, 0.9175, 0.0907, 0.4753, 0.1481, 0.7654, - 0.7412, 0.2930, 0.8077, 0.0519, 0.4826, 0.4705, 0.4910, - 0.0307, 0.1733, 0.7764, 0.7803, 0.3800, 0.6955, 0.7178, - 0.6864, 0.9434, 0.4690, 0.9390, 0.5393, 0.6128, 0.4020, - 0.0722, 0.0742, 0.1685, 0.1343, 0.7205, 0.3661, 0.0503, - 0.2629, 0.7318, 0.6532, 0.6350, 0.4666, 0.8599, 0.1969, - 0.7470, 0.5813, 0.2403, 0.3313, 0.8673, 0.0911, 0.1399, - 0.7504, 0.8548, 0.7052, 0.4678, 0.6190, 0.1905, 0.2523, - 0.4053, 0.0730, 0.1745, 0.6615, 0.1279, 0.0102, 0.5054, - 0.2090, 0.7368, 0.7129, 0.6557, 0.6802, 0.2242, 0.6301, - 0.7717, 0.4921, 0.4267, 0.4852, 0.1080, 0.7094, 0.6823, - 0.5678, 0.2979, 0.7735, 0.4171, 0.3043, 0.4562, 0.9694, - 0.7066, 0.4892, 0.2640, 0.1546, 0.4359, 0.3991, 0.4200, - 0.9092, 0.0011, 0.6541, 0.1216, 0.2866, 0.3196, 0.1896, - 0.5043, 0.0425, 0.9999, 0.4499, 0.5582, 0.8404, 0.9463, - 0.7216, 0.6425, 0.3931, 0.8890, 0.7122, 0.3751, 0.8529, - 0.0063, 0.4366, 0.9377, 0.1575, 0.5576, 0.6903, 0.3497, - 0.5692, 0.9612, 0.7095, 0.9042, 0.6678, 0.8446, 0.3919, - 0.7942, 0.4563, 0.7095, 0.7390, 0.5213, 0.8669, 0.1933, - 0.8827, 0.3576, 0.3715, 0.3966, 0.7670, 0.8625, 0.0249, - 0.4165, 0.2028, 0.9277, 0.8840, 0.7235, 0.4226, 0.0014, - 0.6919, 0.7665, 0.1665, 0.5380, 0.1084, 0.7142]), + col_indices=tensor([7070, 4303, 30, 6140, 8165, 5716, 9546, 2361, 7728, + 2668, 7705, 7757, 2509, 3564, 4029, 5487, 1639, 4111, + 395, 2677, 6135, 9612, 3545, 5846, 3600, 3797, 9227, + 3781, 4132, 5318, 5302, 7796, 3424, 9591, 3065, 6496, + 4323, 1954, 5582, 1053, 6027, 2271, 5701, 9741, 3340, + 2569, 1319, 6485, 3661, 6322, 9764, 1669, 2498, 19, + 7179, 2831, 4106, 9538, 9369, 5015, 9563, 2500, 9760, + 1310, 3338, 4924, 5932, 8828, 9659, 4473, 6113, 5605, + 5394, 2615, 1643, 8398, 8838, 1682, 5685, 2120, 7183, + 9710, 6795, 8908, 7600, 4142, 8344, 6735, 7329, 2764, + 7022, 8916, 3580, 9595, 8020, 8250, 9916, 5616, 7958, + 7659, 4399, 2895, 4372, 8163, 7277, 9684, 8771, 2022, + 56, 5459, 8373, 2942, 141, 5141, 8710, 8011, 529, + 5034, 5883, 7465, 482, 3352, 8460, 9452, 6510, 8966, + 1400, 2714, 3546, 7358, 8733, 5624, 9158, 3980, 7415, + 2820, 5312, 6718, 721, 899, 6328, 8174, 182, 4468, + 111, 8683, 9157, 185, 195, 3788, 2103, 7117, 5739, + 8301, 1401, 1372, 74, 6252, 1003, 5583, 4533, 1353, + 769, 8876, 6684, 948, 6209, 4565, 8011, 2447, 719, + 8445, 2537, 9973, 7746, 4829, 83, 7239, 4681, 6810, + 282, 4623, 1990, 5455, 1003, 2287, 2030, 4927, 506, + 8579, 414, 6365, 286, 7424, 3108, 2033, 3885, 9603, + 1926, 4847, 1592, 7458, 4018, 7757, 2902, 7195, 6994, + 5793, 6437, 4713, 6557, 2930, 5551, 9220, 9302, 4105, + 3955, 2762, 8129, 2564, 7126, 8199, 2540, 4330, 7754, + 1592, 5346, 5424, 2787, 6308, 9050, 7815, 4320, 4652, + 4227, 6632, 4966, 5291, 2925, 9812, 149, 9589, 3393, + 3053, 3134, 4176, 228, 4666, 3256, 6061, 8688, 9027, + 3656, 8009, 8041, 1438, 2792, 4518, 8375, 7928, 7830, + 3184, 9542, 7511, 4063, 4109, 7722, 4162, 2415, 5306, + 7209, 4106, 723, 5365, 4601, 9934, 9033, 8905, 7650, + 9990, 5702, 6486, 2998, 6568, 6682, 9001, 9760, 292, + 4163, 4318, 2849, 3218, 6902, 656, 5306, 6246, 4787, + 5228, 4878, 3093, 7725, 4600, 903, 5027, 638, 3149, + 7102, 8711, 1044, 9183, 747, 4298, 9271, 139, 2843, + 5714, 4421, 672, 6039, 7702, 4473, 264, 9970, 1540, + 1319, 1930, 714, 4720, 4174, 1440, 6448, 3339, 7536, + 6528, 5349, 5157, 626, 51, 315, 8890, 9587, 4498, + 3832, 4974, 6886, 6273, 2151, 3051, 4568, 8325, 6133, + 3975, 6993, 6180, 9545, 224, 1170, 1390, 198, 2578, + 109, 1316, 8375, 437, 8265, 9821, 7958, 4423, 9510, + 435, 3442, 5267, 5488, 9594, 1137, 4663, 9715, 8920, + 6182, 479, 4361, 3297, 2879, 9217, 3878, 4474, 7791, + 9414, 3978, 418, 4182, 4350, 7592, 6955, 1340, 7652, + 6306, 1312, 1936, 8099, 1833, 6504, 6489, 9484, 9796, + 7733, 9771, 3649, 1854, 8574, 1459, 8599, 3960, 9274, + 6927, 8095, 3228, 8141, 6156, 8503, 5605, 4483, 7461, + 7121, 6578, 7077, 9607, 2739, 7309, 7918, 4831, 549, + 7195, 8956, 9425, 2734, 6604, 7513, 9671, 8886, 846, + 5021, 3807, 2441, 3363, 8265, 717, 8948, 4282, 5988, + 8850, 5375, 1856, 1136, 8589, 9424, 7516, 4384, 2764, + 4681, 5937, 9929, 7246, 9512, 8515, 1815, 1836, 5733, + 4084, 1716, 7159, 6761, 371, 9403, 8477, 4420, 2882, + 5701, 3899, 5752, 8311, 967, 7653, 6907, 8898, 8103, + 2599, 8232, 2658, 7698, 9212, 6758, 2599, 3897, 1947, + 3195, 4670, 3024, 9931, 2035, 1232, 4456, 1993, 7910, + 4973, 2110, 5088, 6775, 4567, 4753, 8270, 2906, 352, + 2550, 3500, 8856, 5083, 2103, 1377, 5679, 9545, 8970, + 4402, 3435, 7943, 8162, 2586, 8015, 9237, 5831, 3233, + 8739, 5490, 6753, 4836, 5583, 1717, 1010, 3287, 67, + 7889, 101, 1455, 9717, 8992, 2170, 6597, 4197, 2931, + 4801, 2502, 18, 870, 3108, 1729, 7940, 3694, 3036, + 9492, 6610, 5217, 1526, 8755, 1450, 5869, 5827, 6528, + 5856, 5170, 2875, 4672, 5541, 1677, 5513, 1624, 2842, + 6653, 6890, 3301, 2648, 859, 2320, 7764, 4368, 433, + 3381, 2594, 4984, 2111, 6734, 5887, 6627, 9334, 5071, + 9632, 6431, 455, 7257, 7626, 471, 5581, 5381, 401, + 9525, 2927, 6327, 321, 1438, 831, 2485, 1903, 2610, + 1687, 9306, 3924, 947, 7162, 4648, 380, 6926, 6438, + 4621, 3250, 1403, 4669, 9465, 4064, 9408, 4565, 3741, + 6592, 7899, 1416, 3361, 920, 1779, 3076, 3866, 3858, + 4050, 428, 3812, 2868, 744, 6950, 9268, 878, 4911, + 3948, 5564, 2876, 5596, 189, 1270, 3048, 8376, 4367, + 5000, 1177, 4701, 7194, 1070, 16, 7215, 1221, 3436, + 5354, 606, 6824, 1271, 203, 4201, 7289, 6032, 7308, + 7670, 8671, 2332, 8806, 8584, 3506, 391, 6250, 9866, + 3870, 9836, 5340, 9453, 1263, 3940, 5653, 9499, 6319, + 1612, 1446, 6327, 9254, 3095, 7681, 224, 62, 2754, + 5696, 8834, 2165, 6120, 5838, 2684, 2231, 9309, 3576, + 9331, 256, 3508, 4527, 4131, 2811, 8590, 4699, 1787, + 562, 1768, 3656, 7614, 3020, 2783, 8185, 2615, 4735, + 1501, 1606, 9717, 6777, 878, 6462, 2975, 1877, 1524, + 1297, 7422, 7510, 2900, 2747, 3385, 4110, 8229, 1220, + 9839, 7552, 9119, 6715, 9805, 9991, 1260, 7843, 7914, + 6130, 2664, 9347, 8126, 459, 8622, 9475, 3120, 1198, + 1560, 9385, 3276, 802, 4984, 2935, 3108, 5014, 8231, + 1729, 9663, 721, 8079, 5397, 7927, 659, 2477, 2555, + 9155, 9052, 6211, 4375, 8688, 9173, 6249, 3439, 9060, + 6387, 6997, 3394, 5967, 8423, 230, 3313, 9765, 7617, + 5123, 1735, 3447, 6078, 7676, 7929, 1599, 1588, 4326, + 1794, 658, 1104, 2211, 6716, 2007, 2557, 3240, 341, + 6032, 4423, 1988, 9267, 4992, 3982, 6746, 2922, 8611, + 7561, 1619, 7226, 4881, 9112, 1455, 1303, 7041, 3806, + 2437, 1566, 4173, 4567, 6966, 2942, 6502, 9595, 3976, + 3845, 7093, 964, 9688, 6780, 3168, 2090, 6690, 6435, + 413, 1440, 8131, 4329, 3875, 2580, 7500, 2564, 3964, + 4467, 7560, 4000, 4748, 730, 4588, 293, 3366, 2628, + 9563, 526, 6462, 659, 5768, 3521, 3557, 1134, 4324, + 2351, 8191, 7674, 3791, 5256, 8793, 6216, 5452, 7303, + 8334, 4162, 6587, 8263, 7539, 5200, 970, 3987, 6056, + 5729, 7595, 6960, 8947, 4408, 2718, 3694, 8820, 4994, + 8080, 407, 9139, 8542, 4218, 3848, 9680, 3869, 3362, + 3570, 9356, 1324, 2307, 3654, 5529, 7174, 1640, 5790, + 3038, 1603, 6400, 7917, 2547, 4234, 9921, 6889, 9302, + 1052, 8255, 1727, 8424, 6829, 4302, 8679, 3688, 4766, + 49, 8636, 6227, 142, 4421, 9751, 3039, 741, 4706, + 7619, 5494, 5686, 5031, 6845, 8995, 1803, 274, 8208, + 2723, 6258, 876, 5760, 492, 3804, 5659, 4391, 7186, + 2416, 5360, 19, 3684, 3009, 3081, 4563, 568, 4546, + 31, 2381, 4556, 1472, 7586, 4770, 3982, 7846, 1162, + 9961, 8838, 5913, 54, 5314, 7843, 1460, 5788, 3687, + 6500]), + values=tensor([2.6323e-01, 5.9064e-01, 9.2113e-01, 3.5602e-01, + 7.5928e-01, 8.1196e-01, 4.5852e-01, 6.0951e-01, + 8.5070e-01, 7.4187e-01, 3.3409e-02, 5.6040e-02, + 2.5565e-01, 3.6527e-01, 9.4510e-01, 4.8110e-01, + 3.8117e-01, 5.5755e-01, 6.1278e-01, 5.6941e-01, + 9.8326e-04, 8.7947e-01, 8.1587e-01, 6.0159e-01, + 7.6928e-01, 9.0856e-01, 1.9378e-01, 5.4678e-02, + 9.2935e-01, 2.4953e-01, 5.3792e-01, 1.7683e-01, + 2.8823e-01, 7.4264e-01, 9.0193e-01, 8.0007e-01, + 1.8837e-01, 7.3336e-01, 5.4176e-01, 6.6994e-01, + 6.8270e-01, 2.2645e-01, 5.9691e-01, 9.8463e-01, + 1.7675e-01, 5.0532e-01, 5.0585e-01, 7.2552e-01, + 1.0841e-01, 4.3572e-01, 7.7581e-02, 2.6427e-01, + 4.9499e-01, 8.9863e-01, 3.0780e-01, 6.1211e-01, + 6.1033e-01, 6.1885e-01, 5.4323e-01, 1.8041e-01, + 3.6728e-01, 9.3753e-01, 2.3198e-01, 3.1814e-01, + 8.2540e-01, 9.6570e-01, 7.2137e-01, 6.3102e-01, + 1.9107e-01, 7.0570e-01, 9.1832e-02, 9.2059e-02, + 4.8119e-02, 8.1947e-01, 1.1806e-01, 9.9271e-01, + 6.2592e-01, 8.4736e-01, 3.7694e-02, 9.1414e-02, + 8.2024e-01, 7.3359e-01, 9.1317e-01, 2.3271e-01, + 7.3303e-02, 2.9336e-01, 8.6365e-01, 8.8978e-02, + 1.0304e-01, 8.0234e-02, 7.1249e-01, 8.9492e-01, + 7.0308e-01, 9.2416e-01, 9.2024e-01, 7.5371e-01, + 4.7071e-01, 6.2809e-01, 4.2348e-01, 3.3357e-01, + 6.3653e-01, 2.2434e-01, 6.1182e-01, 4.2798e-01, + 7.2921e-01, 7.2499e-01, 3.8483e-01, 8.3103e-01, + 7.6389e-01, 8.1513e-01, 4.5590e-01, 8.7902e-01, + 8.2439e-01, 4.4479e-01, 1.4675e-01, 7.5621e-01, + 5.5063e-01, 9.6048e-01, 9.5017e-01, 6.6862e-01, + 5.2760e-01, 9.8118e-01, 2.9177e-01, 3.8624e-01, + 6.4213e-01, 1.9834e-01, 8.4162e-01, 2.6099e-01, + 8.0948e-01, 8.4536e-01, 3.1920e-01, 3.6707e-01, + 2.9478e-01, 2.3611e-01, 9.5343e-01, 2.9619e-01, + 9.2318e-01, 9.6788e-01, 8.4428e-01, 4.2461e-01, + 7.6322e-01, 8.1130e-01, 4.0988e-01, 8.8354e-01, + 4.1530e-01, 6.1901e-02, 8.6041e-01, 9.0191e-01, + 6.6342e-01, 4.0534e-02, 2.9481e-01, 8.6811e-01, + 5.2143e-02, 6.1739e-01, 5.9476e-01, 5.1674e-01, + 9.7473e-01, 1.8936e-01, 1.7965e-01, 9.9416e-01, + 1.6509e-01, 7.3065e-01, 2.1445e-01, 4.1067e-01, + 8.8808e-01, 6.2920e-02, 2.2626e-01, 6.6771e-01, + 6.6055e-01, 4.5807e-01, 5.8521e-01, 4.6117e-01, + 9.0407e-01, 1.8381e-01, 3.5627e-01, 5.4640e-02, + 6.3784e-01, 4.3416e-01, 6.8177e-01, 3.2768e-01, + 4.8324e-01, 2.4180e-01, 6.2273e-01, 9.4934e-01, + 7.2532e-01, 3.9712e-01, 1.5456e-02, 1.6711e-01, + 5.9428e-03, 9.0878e-01, 2.7084e-01, 9.4166e-01, + 6.4379e-01, 2.8593e-01, 7.5657e-01, 9.6872e-01, + 5.2819e-01, 7.2828e-01, 4.1676e-01, 6.5033e-03, + 9.0910e-01, 8.8705e-01, 8.9078e-01, 4.2741e-01, + 6.7433e-01, 6.2646e-01, 3.2398e-01, 2.0928e-01, + 7.1072e-01, 6.5532e-01, 9.1913e-01, 9.1844e-01, + 3.2147e-02, 5.5895e-02, 1.2570e-01, 8.5811e-01, + 8.3013e-01, 1.6590e-01, 6.2056e-01, 4.1354e-01, + 7.5978e-01, 4.2153e-01, 1.0821e-01, 9.9146e-01, + 3.4691e-02, 8.8363e-01, 5.0765e-01, 4.1402e-01, + 1.8928e-01, 1.9073e-01, 7.7011e-01, 3.2288e-01, + 9.5446e-01, 1.2415e-01, 2.5805e-01, 1.5897e-01, + 3.2138e-01, 3.8675e-01, 1.9095e-01, 9.5518e-01, + 3.2847e-02, 5.0929e-01, 9.3507e-01, 2.8246e-01, + 5.7539e-01, 4.6309e-02, 9.6261e-01, 3.9838e-01, + 3.7445e-01, 7.8893e-01, 8.3410e-01, 8.5709e-01, + 6.3958e-01, 1.9724e-02, 5.9069e-01, 8.1636e-01, + 3.9153e-01, 5.7215e-01, 6.1253e-01, 9.1220e-01, + 9.3251e-01, 9.9303e-01, 2.2965e-01, 2.6216e-01, + 7.4231e-01, 4.8424e-01, 1.4939e-01, 1.5502e-01, + 1.6663e-01, 6.3515e-01, 3.5129e-01, 9.5162e-01, + 7.1114e-02, 4.9915e-01, 6.9538e-01, 8.2941e-01, + 9.1149e-01, 4.1110e-01, 1.8934e-01, 8.9016e-01, + 7.4965e-01, 4.2804e-01, 8.5503e-01, 5.3179e-01, + 5.9156e-01, 4.0576e-01, 5.7152e-01, 2.7295e-01, + 6.0361e-01, 2.5381e-01, 7.2378e-01, 7.0891e-01, + 3.7451e-01, 2.6627e-01, 7.5262e-01, 5.5518e-01, + 1.8245e-01, 4.0222e-01, 6.7131e-01, 5.2015e-01, + 5.6811e-01, 9.4822e-01, 1.6267e-02, 5.9174e-01, + 2.0459e-01, 3.0768e-01, 9.0677e-01, 1.5176e-01, + 5.7172e-01, 4.0649e-01, 8.1773e-01, 6.8339e-01, + 2.2663e-01, 1.9697e-01, 9.2934e-01, 3.9416e-02, + 3.3446e-01, 1.1562e-01, 2.0085e-01, 7.1360e-01, + 2.6140e-01, 3.9061e-01, 6.5556e-01, 9.9056e-01, + 9.9322e-02, 5.8889e-01, 1.3832e-01, 8.4681e-01, + 5.3057e-01, 8.8611e-01, 9.6888e-01, 1.5212e-01, + 5.9521e-01, 8.0598e-01, 9.1031e-01, 9.4253e-01, + 3.8863e-01, 7.9555e-01, 9.3360e-01, 5.3999e-01, + 6.2922e-01, 6.4757e-01, 7.3251e-01, 8.3641e-01, + 6.9207e-01, 6.5581e-01, 8.0745e-01, 5.0286e-01, + 2.3545e-01, 2.1849e-01, 5.1885e-01, 6.0518e-01, + 6.6713e-01, 7.8717e-01, 1.1748e-01, 9.9495e-01, + 3.7258e-01, 6.8234e-02, 8.0832e-01, 2.5174e-01, + 7.4324e-01, 9.6493e-01, 2.4794e-01, 1.9813e-01, + 3.8410e-01, 2.3972e-02, 8.3988e-01, 1.1378e-01, + 2.1035e-01, 7.8378e-01, 8.8918e-01, 1.9141e-01, + 3.9700e-01, 4.0336e-01, 9.0456e-01, 8.9094e-01, + 1.9383e-01, 2.2907e-01, 3.4917e-01, 6.3029e-01, + 3.6044e-01, 2.4280e-01, 7.3382e-01, 3.2984e-01, + 1.7169e-01, 9.2602e-01, 2.3531e-02, 7.8988e-01, + 4.0491e-01, 2.2814e-01, 3.6633e-02, 7.2812e-01, + 3.4004e-01, 2.9578e-01, 5.5942e-01, 6.5525e-01, + 1.0345e-01, 8.6647e-02, 5.1039e-01, 8.2310e-01, + 8.6722e-01, 3.6923e-01, 6.6100e-01, 8.0021e-01, + 3.5240e-01, 5.0724e-01, 3.7390e-01, 6.6971e-01, + 1.8502e-01, 8.7813e-01, 6.7314e-01, 4.0808e-03, + 4.6333e-01, 6.5945e-01, 2.7101e-01, 8.4879e-01, + 5.6339e-01, 2.5897e-01, 7.0593e-01, 1.7531e-01, + 7.5607e-01, 2.5736e-01, 3.7837e-01, 5.0295e-01, + 7.1970e-01, 5.1676e-01, 1.7540e-01, 4.9402e-01, + 7.7209e-01, 3.6090e-01, 1.3235e-01, 4.8928e-03, + 3.2360e-01, 2.6725e-01, 5.1441e-01, 2.9089e-01, + 8.8060e-01, 1.6678e-01, 9.9961e-02, 8.0587e-01, + 1.8330e-01, 8.1214e-01, 9.0511e-01, 7.6627e-01, + 7.5011e-01, 3.1488e-01, 2.7361e-01, 5.6185e-01, + 7.9176e-01, 7.1045e-01, 5.3838e-01, 9.0507e-01, + 2.2215e-01, 1.2614e-01, 7.2523e-01, 7.0649e-01, + 6.6148e-01, 3.3575e-01, 9.5532e-01, 4.6804e-01, + 3.4270e-01, 8.7847e-01, 6.5537e-01, 5.2932e-01, + 4.5854e-01, 2.6161e-01, 9.2422e-01, 8.6272e-01, + 8.8650e-01, 7.2689e-01, 8.9207e-02, 4.7839e-01, + 3.9453e-01, 5.7669e-01, 7.3027e-01, 3.8739e-01, + 4.2450e-01, 2.8804e-01, 4.5913e-02, 2.4710e-01, + 7.1872e-01, 5.6274e-01, 5.7938e-02, 2.9191e-01, + 2.0342e-01, 3.5117e-01, 9.9620e-01, 2.0829e-01, + 4.5984e-01, 3.5014e-01, 9.8624e-01, 3.1572e-01, + 5.1171e-01, 3.2401e-01, 1.2192e-01, 4.7155e-01, + 2.7406e-01, 7.8893e-01, 2.9044e-01, 9.3040e-01, + 2.8891e-01, 2.3648e-01, 8.4020e-01, 2.7099e-01, + 5.4270e-01, 2.2390e-01, 9.8458e-01, 8.3439e-02, + 6.2152e-01, 8.1786e-01, 9.8059e-01, 5.5461e-01, + 5.0686e-02, 3.1071e-01, 5.1924e-01, 6.0275e-01, + 2.1541e-01, 9.6955e-01, 6.5271e-01, 2.2737e-01, + 6.1307e-01, 5.1972e-01, 3.1222e-01, 1.2596e-01, + 1.4553e-01, 1.2526e-01, 5.0121e-01, 4.3297e-01, + 6.7800e-01, 3.3306e-01, 9.6370e-01, 2.3793e-01, + 5.9628e-01, 7.8668e-01, 5.3250e-01, 4.5035e-01, + 9.0206e-01, 9.6122e-02, 1.3170e-01, 1.5704e-01, + 7.3097e-01, 2.9697e-01, 6.1392e-02, 4.5957e-01, + 1.1589e-01, 1.3098e-01, 3.3304e-01, 1.7764e-01, + 1.1203e-01, 9.6752e-01, 5.5357e-01, 2.8261e-01, + 9.7572e-01, 6.5853e-01, 9.9550e-01, 4.2759e-01, + 6.8050e-01, 9.8751e-01, 7.1102e-01, 5.7009e-01, + 3.1431e-01, 3.8099e-01, 6.7363e-01, 1.1921e-01, + 3.4082e-02, 2.8258e-01, 1.6997e-01, 5.9200e-01, + 4.4348e-01, 7.0273e-01, 3.3517e-01, 6.1549e-01, + 6.2383e-01, 9.0674e-01, 1.8446e-01, 2.0850e-01, + 3.0759e-01, 1.2192e-01, 2.9579e-01, 4.7917e-01, + 4.4101e-01, 6.6565e-01, 2.0656e-01, 1.5511e-01, + 3.6218e-01, 3.6864e-01, 3.5122e-01, 4.6461e-01, + 9.3008e-01, 7.3534e-01, 1.1979e-01, 3.2184e-01, + 1.4926e-01, 2.7902e-01, 7.4327e-01, 1.2136e-01, + 1.9447e-01, 2.4156e-01, 3.0273e-02, 5.6107e-01, + 4.1600e-01, 4.2695e-01, 6.6274e-01, 6.9013e-02, + 6.1025e-02, 1.4584e-01, 3.1653e-02, 7.7372e-01, + 2.6253e-01, 3.9092e-01, 4.0239e-01, 6.4574e-01, + 8.3957e-01, 5.5729e-01, 7.8307e-01, 7.3607e-01, + 9.5113e-01, 2.3078e-01, 3.7926e-01, 3.3216e-01, + 7.9429e-01, 5.6221e-01, 8.4628e-01, 3.4140e-01, + 3.0916e-01, 4.4401e-01, 4.6810e-01, 9.9337e-01, + 8.8369e-01, 5.2596e-01, 4.9813e-01, 4.8659e-01, + 7.4661e-01, 5.2145e-01, 9.8422e-01, 8.4564e-01, + 3.1054e-01, 6.7849e-01, 4.8086e-01, 6.9024e-01, + 4.9917e-01, 1.7504e-01, 2.1010e-01, 1.9632e-01, + 7.1448e-01, 5.8670e-02, 2.9293e-01, 9.4095e-01, + 7.9720e-01, 6.6783e-01, 8.3928e-01, 3.6817e-01, + 7.9069e-01, 7.1315e-01, 6.6186e-01, 8.0575e-01, + 3.2239e-01, 5.0355e-02, 8.3496e-01, 7.1868e-01, + 3.0609e-01, 2.1759e-01, 6.9798e-02, 8.2039e-01, + 1.9427e-01, 5.8426e-01, 7.7187e-01, 9.9720e-01, + 9.2211e-02, 5.1091e-01, 3.0468e-02, 1.7230e-01, + 6.9290e-01, 5.0320e-02, 9.2258e-01, 1.3032e-01, + 7.0976e-01, 4.8315e-01, 4.3214e-01, 8.4174e-01, + 5.2627e-01, 1.2074e-01, 6.1940e-01, 4.1804e-01, + 4.4268e-01, 8.3305e-01, 5.1543e-01, 2.5729e-01, + 8.9022e-01, 8.3204e-01, 9.7044e-01, 1.3198e-01, + 9.2212e-01, 6.4716e-01, 5.8442e-01, 2.8431e-01, + 3.4763e-01, 5.3179e-01, 3.9334e-01, 2.8458e-01, + 9.0914e-01, 6.9390e-01, 9.7107e-01, 5.6895e-01, + 3.6951e-01, 1.4276e-01, 4.9896e-01, 2.4794e-01, + 5.2121e-01, 8.7271e-01, 8.3730e-01, 8.1365e-01, + 4.1485e-01, 2.2784e-01, 5.3002e-01, 8.9232e-02, + 9.8488e-01, 8.0517e-01, 3.8821e-01, 7.9780e-03, + 4.5837e-01, 9.5778e-01, 4.4582e-01, 3.3423e-01, + 9.1066e-01, 4.2219e-01, 8.5438e-01, 5.5892e-01, + 8.4537e-01, 9.0358e-01, 3.0955e-02, 2.6853e-01, + 3.2134e-01, 7.8042e-01, 8.1099e-01, 3.6629e-01, + 8.9169e-01, 7.9463e-01, 8.6648e-01, 7.1473e-01, + 8.6021e-01, 2.1439e-01, 1.9996e-01, 8.6978e-01, + 8.1592e-01, 9.2105e-01, 6.5857e-01, 5.1987e-01, + 8.2482e-01, 9.9936e-02, 1.4519e-01, 4.4654e-02, + 8.6988e-01, 1.8572e-01, 9.4525e-01, 9.3763e-02, + 7.7876e-01, 8.4409e-01, 3.6276e-01, 6.9453e-01, + 3.7378e-01, 6.9578e-01, 1.3419e-01, 9.3736e-02, + 6.2823e-02, 9.9902e-01, 4.1299e-01, 9.6991e-01, + 4.1692e-01, 1.3083e-01, 9.6337e-01, 1.2444e-02, + 8.3134e-01, 8.8623e-01, 6.2998e-01, 3.1969e-01, + 3.7569e-01, 4.1289e-01, 2.2976e-01, 8.7989e-01, + 3.5329e-01, 6.5094e-01, 1.5678e-01, 6.6756e-01, + 4.4344e-01, 4.6699e-01, 2.4389e-01, 3.4120e-01, + 2.0559e-02, 5.3702e-01, 7.6654e-01, 7.4999e-01, + 5.9475e-01, 7.0257e-01, 9.2983e-01, 6.4260e-02, + 7.2505e-01, 1.3076e-01, 3.5189e-01, 6.8845e-02, + 6.1744e-01, 2.6189e-02, 6.8911e-01, 8.7981e-01, + 3.0401e-01, 2.2444e-01, 4.4883e-01, 2.7491e-01, + 3.8558e-01, 4.4656e-01, 6.7298e-01, 4.6599e-01, + 3.2320e-01, 1.3879e-01, 6.0868e-01, 2.2983e-01, + 5.3204e-01, 7.4311e-01, 4.1622e-01, 9.8782e-01, + 2.5777e-01, 4.8339e-01, 9.3845e-01, 8.5379e-01, + 2.8143e-01, 2.6152e-02, 3.1268e-01, 7.7890e-01, + 4.1663e-01, 7.9994e-01, 9.9649e-01, 2.2744e-02, + 3.5380e-01, 3.7860e-01, 7.6731e-01, 7.5947e-01, + 6.5951e-01, 5.7153e-01, 4.3049e-01, 5.1786e-01, + 2.9065e-01, 1.9808e-01, 2.7150e-01, 7.8316e-01, + 4.0717e-01, 3.9061e-01, 4.9553e-01, 6.7805e-01, + 6.1461e-01, 7.0809e-02, 1.0322e-02, 2.0232e-01, + 4.7916e-01, 7.8687e-01, 7.3566e-01, 6.4209e-01, + 1.6484e-01, 7.1247e-02, 5.3883e-01, 1.6025e-01, + 2.9154e-01, 4.7825e-01, 1.0995e-02, 8.7523e-01, + 1.2049e-01, 6.5344e-01, 8.1867e-01, 3.3407e-01, + 6.4309e-01, 2.4093e-01, 7.8101e-01, 5.3873e-01, + 8.9209e-01, 9.2470e-01, 1.2978e-01, 6.8518e-01, + 8.1776e-01, 2.1991e-01, 1.1209e-01, 8.0981e-01, + 5.3198e-02, 5.1139e-01, 5.8401e-01, 7.5467e-01, + 5.6133e-01, 9.4854e-01, 4.7382e-01, 2.1597e-01, + 9.5084e-02, 9.1941e-01, 8.8481e-01, 6.4052e-01, + 4.6483e-01, 3.1883e-01, 4.1136e-01, 1.0527e-01, + 6.3713e-02, 4.3019e-02, 7.2539e-02, 8.1448e-01, + 7.1445e-01, 2.7669e-01, 9.5210e-01, 1.4970e-02, + 9.8966e-01, 1.4363e-01, 4.3587e-01, 7.1032e-01, + 7.4502e-01, 2.9392e-02, 6.9645e-01, 5.2560e-01, + 4.9391e-01, 9.7982e-01, 3.3418e-01, 4.6172e-01, + 5.3484e-01, 9.2299e-01, 2.6014e-01, 1.6831e-01, + 8.4794e-01, 7.2828e-01, 9.2751e-01, 6.4563e-01, + 3.1795e-01, 7.5344e-01, 1.6301e-01, 3.9164e-01, + 9.3245e-01, 2.0745e-01, 2.7880e-01, 7.1191e-01, + 9.8985e-01, 4.4614e-01, 6.1979e-01, 9.1036e-01, + 4.3082e-01, 1.9713e-01, 4.5188e-01, 7.1673e-01, + 7.3569e-01, 1.2598e-01, 6.0377e-01, 3.3133e-01, + 9.8086e-01, 9.2322e-01, 8.5819e-01, 3.9051e-01, + 9.4058e-01, 1.0513e-01, 8.0964e-01, 7.5587e-01, + 5.3408e-01, 8.2132e-01, 5.1620e-01, 1.4013e-01, + 7.4231e-01, 8.8837e-01, 8.7068e-01, 8.1306e-02, + 8.5143e-01, 6.3195e-02, 9.3111e-01, 9.6240e-01, + 1.0701e-01, 2.0564e-02, 1.8246e-01, 8.1303e-01, + 5.7808e-01, 7.1660e-01, 1.5880e-01, 7.1361e-01, + 8.3887e-01, 7.3088e-01, 4.4649e-01, 5.7605e-01, + 1.6507e-01, 4.8965e-01, 2.7443e-01, 7.1315e-01, + 1.2071e-02, 8.8012e-02, 6.5598e-01, 2.8757e-01, + 1.4734e-01, 2.2309e-01, 6.7840e-01, 9.5377e-01, + 5.9396e-01, 1.8746e-01, 4.6977e-01, 3.5594e-01, + 7.3797e-01, 7.8436e-01, 9.0798e-01, 6.7484e-01, + 1.9852e-01, 7.9404e-01, 2.0545e-02, 2.0999e-01]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.5397, 0.8345, 0.2583, ..., 0.2923, 0.3741, 0.4815]) +tensor([0.2091, 0.1216, 0.0010, ..., 0.3031, 0.8707, 0.7868]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -540,378 +754,271 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 1.9391217231750488 seconds +Time: 1.9843134880065918 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '284305', '-ss', '10000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.350545883178711} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '283407', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.425815343856812} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5455, 5592, 4109, 222, 9693, 3577, 9334, 9406, 2137, - 1799, 7682, 5442, 9556, 3008, 6623, 6748, 1968, 1806, - 8890, 8613, 4077, 4885, 7962, 302, 1013, 8955, 2558, - 3711, 9681, 9593, 3121, 9869, 3177, 5155, 4286, 6364, - 2440, 3983, 2281, 3730, 740, 3324, 1186, 4197, 4183, - 3787, 1237, 618, 3728, 4305, 7249, 7969, 8150, 5074, - 336, 432, 1883, 5788, 1998, 9882, 8337, 8410, 6941, - 4025, 3080, 9432, 7607, 2625, 8188, 4417, 71, 2070, - 1396, 3989, 9431, 872, 4755, 1177, 5104, 5, 1671, - 5203, 8385, 3215, 9836, 5654, 6123, 9005, 9573, 3860, - 3248, 2894, 9896, 9991, 4949, 9, 1784, 5266, 2022, - 6095, 1517, 7556, 4941, 521, 2910, 5575, 7555, 1842, - 2590, 9377, 4113, 4907, 8259, 6209, 7070, 152, 6147, - 6757, 41, 774, 7469, 8152, 8924, 5911, 4839, 3386, - 4880, 5093, 7504, 578, 6341, 2413, 9223, 2943, 8245, - 3762, 1885, 3988, 7760, 6659, 2327, 8319, 4889, 2086, - 6582, 2999, 7225, 5620, 5397, 6166, 1192, 9054, 1944, - 8586, 7428, 4653, 1578, 8066, 4063, 6909, 8387, 9373, - 1779, 2964, 5753, 4702, 6011, 649, 4878, 6031, 3620, - 4368, 8038, 9222, 6809, 4045, 7946, 9720, 2447, 8709, - 5211, 3070, 483, 7264, 6025, 5336, 6583, 193, 3061, - 7753, 6937, 7842, 2405, 5951, 5344, 4719, 6714, 6154, - 7382, 3202, 1630, 3013, 8617, 6912, 3313, 4097, 2564, - 7916, 1190, 6815, 3429, 4803, 8583, 6446, 9482, 7512, - 5802, 3394, 6637, 5002, 8800, 9545, 1806, 7825, 5873, - 6547, 42, 3341, 4336, 2945, 8309, 6317, 8694, 7310, - 861, 6529, 9657, 2013, 7583, 4059, 8819, 8573, 3062, - 4530, 3219, 6965, 7043, 4000, 1751, 3453, 7507, 9158, - 8456, 5641, 2323, 894, 6849, 2354, 8414, 9263, 340, - 7205, 5325, 7515, 2661, 4262, 8481, 6503, 8559, 631, - 4284, 892, 3568, 2478, 5313, 8074, 1189, 3638, 1570, - 2900, 1226, 7729, 8931, 3902, 2531, 9721, 4451, 7093, - 6369, 3584, 8858, 4594, 9711, 2895, 8279, 6635, 5468, - 1625, 2878, 183, 9364, 8049, 8688, 3754, 9500, 5156, - 8141, 1233, 6270, 9152, 9609, 8833, 9689, 6804, 6879, - 8482, 5351, 2008, 5339, 174, 2631, 2752, 9101, 4250, - 1959, 1601, 8705, 8321, 4156, 2212, 1758, 6719, 390, - 8271, 8873, 6936, 6913, 6183, 6416, 6740, 9148, 7993, - 6077, 7025, 1434, 5450, 4696, 8201, 4111, 5108, 3263, - 4376, 992, 3001, 3573, 2771, 1330, 3404, 1985, 1213, - 7105, 2969, 1853, 4254, 2574, 8578, 3726, 9824, 8756, - 9790, 4725, 6571, 1525, 4226, 397, 3148, 8143, 1081, - 9319, 954, 8043, 2951, 6714, 1546, 7498, 5572, 6847, - 373, 6338, 6704, 6136, 6912, 8385, 6773, 2897, 1857, - 7837, 5293, 883, 3465, 2333, 2706, 2793, 8339, 9030, - 170, 6025, 238, 5536, 326, 7382, 5214, 9691, 1226, - 6312, 9099, 7743, 9256, 7826, 3961, 7289, 2810, 2889, - 5530, 8793, 1729, 9330, 6285, 259, 2418, 2723, 7786, - 9499, 1444, 3444, 9403, 4786, 2961, 3253, 210, 3033, - 4287, 1941, 2567, 5596, 8952, 1045, 8409, 1209, 2289, - 4009, 2954, 8195, 2521, 9908, 6497, 2176, 8245, 1878, - 2833, 2015, 1191, 8989, 9949, 1659, 7998, 6449, 3287, - 4617, 3730, 4250, 2845, 7685, 2587, 570, 2759, 5601, - 1122, 4194, 6085, 3538, 2068, 3553, 7147, 4353, 8619, - 741, 5644, 3102, 8593, 5741, 717, 5287, 9540, 3750, - 9476, 1080, 2354, 3239, 9391, 2983, 3615, 7545, 9316, - 3217, 546, 2878, 3385, 4369, 7750, 7048, 7638, 5003, - 1954, 2932, 9977, 2825, 6294, 7412, 4947, 5551, 4606, - 6527, 8202, 9874, 3630, 9332, 3043, 9614, 4366, 4415, - 5789, 5831, 6056, 8765, 7112, 3900, 5169, 8287, 5622, - 6492, 3446, 6222, 3934, 6761, 6496, 7921, 3767, 4657, - 6468, 9740, 3922, 1388, 791, 996, 5882, 8931, 4041, - 1804, 3408, 873, 8854, 8857, 5197, 3222, 7923, 1837, - 6637, 5267, 8748, 7386, 9946, 9090, 7284, 9796, 1916, - 1676, 6417, 478, 4042, 9158, 4562, 6428, 4901, 9841, - 4156, 3811, 6129, 8100, 2410, 9667, 1059, 6471, 9086, - 7783, 436, 8399, 2026, 7724, 9135, 6834, 2224, 8238, - 4091, 7721, 9973, 6278, 3251, 4004, 2804, 6760, 1348, - 3105, 8920, 3080, 8489, 6412, 1945, 5275, 9361, 5992, - 4252, 5985, 14, 8664, 6186, 8914, 4367, 8021, 3430, - 7586, 569, 9236, 221, 6330, 387, 7069, 4231, 8633, - 1760, 5743, 1734, 3474, 4921, 5437, 5342, 1188, 3716, - 7244, 3921, 3117, 1899, 6123, 1541, 2426, 3487, 1543, - 8966, 4322, 9621, 5499, 9634, 197, 2016, 1179, 1742, - 603, 4446, 2127, 5502, 5419, 1800, 8843, 3410, 9280, - 6183, 5336, 3557, 1301, 2924, 2685, 4789, 2681, 8075, - 9074, 779, 9722, 2242, 1422, 9799, 190, 8813, 4955, - 9269, 8432, 360, 3105, 1117, 2106, 6923, 340, 3913, - 6338, 2743, 3141, 5508, 8419, 4462, 3804, 4854, 8563, - 27, 56, 4099, 9429, 7128, 3170, 1423, 1480, 4108, - 9263, 248, 6315, 5394, 2111, 8815, 9468, 1653, 6525, - 7824, 7918, 7021, 7163, 2918, 4892, 3181, 2452, 2941, - 8090, 124, 6939, 1996, 9469, 8469, 3564, 8562, 416, - 7462, 4270, 2521, 6664, 1132, 2808, 1882, 6008, 4243, - 8264, 72, 188, 8612, 5177, 8716, 575, 8437, 8572, - 272, 3041, 762, 93, 8887, 453, 2740, 5642, 1483, - 387, 8650, 4556, 7071, 2833, 2163, 2519, 8518, 2921, - 1296, 6818, 2707, 3507, 8598, 8802, 112, 2742, 5974, - 6565, 5489, 7784, 469, 1046, 8118, 2916, 1384, 4596, - 8660, 1524, 2862, 8341, 9259, 3914, 7327, 3943, 1127, - 6398, 6612, 8113, 2461, 8714, 9729, 8226, 6354, 9494, - 9498, 6160, 330, 9056, 6769, 4637, 540, 5583, 6515, - 9235, 1832, 1756, 9622, 3128, 5815, 6161, 5166, 2180, - 2553, 9617, 5271, 540, 669, 8109, 2118, 7870, 9305, - 5197, 8512, 8704, 2565, 8570, 3358, 8597, 6817, 1442, - 7822, 9580, 2286, 877, 5934, 4989, 9155, 9087, 5891, - 4023, 8446, 2014, 2362, 990, 1376, 5099, 6917, 1513, - 4755, 9921, 9633, 586, 9793, 2424, 7385, 2711, 6971, - 8476, 3945, 2785, 4359, 7402, 6094, 3054, 5997, 6264, - 4973, 9403, 224, 8540, 1749, 1440, 9039, 4450, 6560, - 7985, 2950, 8212, 6558, 2305, 2992, 3067, 7181, 688, - 557, 3139, 1085, 2535, 8708, 4783, 1637, 2401, 5400, - 8152, 9595, 2332, 5036, 9866, 6137, 9544, 4606, 3463, - 5129, 4600, 2840, 4681, 5620, 3070, 9929, 3817, 6586, - 1810, 9677, 8838, 5997, 8061, 2182, 2092, 5426, 2012, - 4695, 4335, 5207, 18, 6447, 8196, 4896, 1724, 8190, - 6513, 7255, 7873, 632, 350, 9671, 2671, 6415, 9769, - 5192, 2244, 5805, 3331, 1110, 1188, 9979, 5220, 7760, - 7927, 860, 6526, 7297, 2539, 220, 4541, 1369, 5557, - 6832, 3456, 2993, 2123, 4095, 2625, 8888, 4611, 5854, - 494, 6448, 3694, 6940, 6717, 6857, 7774, 2832, 3690, - 2621]), - values=tensor([3.0392e-01, 3.2415e-01, 6.4353e-02, 7.3274e-01, - 3.2946e-01, 1.5879e-01, 9.2352e-02, 2.3222e-01, - 2.7476e-01, 5.5662e-01, 2.0841e-01, 4.3983e-01, - 4.6932e-01, 4.7844e-01, 6.0685e-01, 9.7693e-01, - 3.5238e-01, 4.6964e-01, 7.5140e-01, 3.1413e-01, - 6.6449e-02, 8.3856e-01, 5.5909e-01, 2.9668e-01, - 6.8550e-01, 8.4874e-01, 2.7284e-01, 6.4469e-01, - 5.5500e-01, 5.1334e-01, 1.1239e-01, 3.2908e-02, - 6.3958e-01, 9.5935e-01, 2.7353e-01, 6.6292e-01, - 3.1922e-01, 6.9750e-01, 5.5048e-01, 6.8061e-01, - 4.3532e-01, 7.7149e-01, 7.8764e-01, 6.0497e-01, - 9.6987e-02, 3.7830e-01, 8.7905e-01, 7.0427e-02, - 1.6845e-01, 8.8919e-01, 8.9750e-01, 1.9794e-01, - 5.5784e-01, 5.4874e-01, 9.3778e-02, 6.5393e-01, - 3.7119e-01, 2.3349e-01, 6.0309e-01, 4.8361e-01, - 5.1730e-01, 5.3303e-01, 8.8849e-01, 7.5067e-03, - 6.5848e-01, 7.7182e-01, 2.5538e-01, 9.6187e-01, - 3.6024e-01, 5.1765e-01, 2.1626e-02, 5.8628e-01, - 6.4821e-01, 2.7907e-01, 5.4479e-01, 9.4676e-01, - 2.6434e-01, 4.1497e-01, 1.2576e-01, 7.4574e-01, - 6.0185e-01, 6.4194e-01, 2.8693e-01, 3.0484e-01, - 6.4746e-01, 8.6023e-01, 7.7437e-01, 8.2817e-01, - 8.2911e-01, 6.5601e-01, 3.6870e-01, 3.0474e-01, - 9.7824e-01, 7.0873e-01, 7.5584e-01, 4.7182e-01, - 3.3010e-01, 1.0185e-02, 8.5565e-01, 6.5803e-01, - 2.1163e-01, 9.6445e-01, 3.5526e-01, 9.0210e-01, - 6.1257e-01, 3.5304e-01, 3.5164e-01, 8.1901e-01, - 9.3322e-01, 2.9058e-01, 5.5850e-01, 4.1175e-01, - 7.5611e-01, 5.2276e-01, 8.1503e-01, 9.5294e-01, - 5.9539e-01, 9.9769e-01, 2.3382e-01, 5.8700e-01, - 3.9790e-01, 1.0685e-01, 1.3325e-01, 1.5247e-02, - 4.9237e-01, 5.8495e-01, 6.7974e-01, 6.5205e-01, - 2.4978e-01, 1.5540e-01, 6.9466e-01, 9.8909e-01, - 6.7400e-01, 4.4045e-01, 8.0887e-01, 8.7366e-01, - 6.1470e-01, 6.6878e-01, 2.0722e-01, 5.6730e-01, - 9.6699e-01, 2.1420e-01, 5.0036e-01, 4.3882e-02, - 2.4509e-01, 9.6699e-01, 6.2712e-02, 8.0118e-01, - 7.0259e-01, 4.9349e-01, 2.3668e-01, 6.4690e-01, - 3.3297e-01, 8.1392e-01, 3.3370e-01, 3.6099e-01, - 6.9785e-01, 5.8653e-01, 2.3494e-01, 4.2606e-01, - 5.3776e-02, 2.9098e-01, 3.5190e-01, 8.5533e-01, - 3.9164e-01, 4.5423e-01, 3.2810e-02, 7.6592e-01, - 5.1452e-01, 5.8263e-01, 3.0590e-01, 4.6225e-01, - 8.9127e-01, 3.8718e-04, 3.6956e-01, 1.0716e-01, - 8.9555e-01, 5.1526e-01, 2.6090e-01, 5.4827e-01, - 7.6613e-01, 9.6451e-02, 1.6855e-01, 2.7123e-01, - 7.9078e-01, 7.0227e-01, 9.2769e-01, 3.2768e-01, - 7.4133e-02, 8.0175e-01, 8.9212e-01, 1.4596e-01, - 5.2250e-02, 7.1920e-01, 2.8915e-01, 7.1399e-01, - 3.9989e-01, 8.4612e-01, 7.6692e-01, 7.6603e-01, - 9.7955e-01, 8.5926e-01, 4.7268e-01, 3.0567e-01, - 3.5521e-01, 4.5346e-01, 7.0907e-01, 1.0000e-01, - 3.8556e-01, 7.4063e-01, 1.2875e-01, 5.0308e-01, - 6.8759e-01, 5.4430e-01, 9.2335e-01, 5.9479e-01, - 3.5215e-01, 2.5979e-01, 3.6195e-01, 1.1209e-01, - 6.7558e-01, 3.6084e-01, 2.9372e-01, 7.9418e-02, - 8.0128e-01, 5.5807e-01, 6.3595e-01, 3.1372e-01, - 2.3848e-01, 1.4012e-02, 2.1033e-01, 5.1052e-01, - 6.6708e-01, 3.8104e-01, 6.2857e-01, 2.5671e-01, - 6.7301e-01, 6.4080e-01, 7.3818e-01, 6.5250e-01, - 7.2748e-01, 2.8088e-01, 4.3795e-01, 7.6139e-01, - 3.2002e-01, 1.0962e-01, 1.1736e-01, 1.1390e-01, - 8.8693e-01, 5.6804e-01, 5.4451e-01, 4.7759e-01, - 9.7875e-02, 8.1348e-02, 1.2472e-01, 6.8343e-01, - 7.6072e-01, 4.3782e-01, 9.4758e-01, 9.8629e-01, - 1.3619e-01, 8.9717e-01, 8.0717e-01, 5.1829e-01, - 6.6901e-01, 7.6695e-01, 3.5278e-01, 7.6203e-02, - 8.1739e-01, 2.6432e-02, 7.8358e-02, 7.6105e-01, - 6.0698e-01, 9.7534e-01, 1.0290e-01, 6.2350e-03, - 3.6916e-02, 9.6921e-01, 9.2309e-01, 3.6705e-01, - 4.1131e-01, 7.1992e-01, 4.8131e-01, 3.8551e-02, - 8.7653e-01, 4.2984e-01, 7.4999e-01, 5.9486e-01, - 2.0777e-01, 2.4797e-02, 7.2719e-01, 8.6476e-01, - 9.2557e-02, 6.6099e-01, 8.8421e-01, 9.9344e-01, - 5.9213e-01, 8.8296e-01, 4.4506e-01, 6.1979e-01, - 2.4620e-01, 6.4475e-01, 9.4222e-01, 4.3135e-01, - 6.9601e-01, 7.7456e-01, 9.3620e-01, 4.9096e-01, - 7.2207e-01, 6.4022e-01, 5.2574e-01, 8.2484e-01, - 5.7041e-01, 6.9043e-01, 2.4631e-02, 9.5777e-02, - 5.9238e-01, 3.0126e-01, 9.4882e-01, 3.7736e-01, - 4.4950e-01, 2.8003e-01, 1.1028e-01, 4.2071e-01, - 9.9009e-01, 5.0994e-01, 4.9474e-01, 7.2898e-01, - 4.3563e-01, 2.0331e-01, 6.0930e-01, 3.4882e-01, - 2.9900e-01, 6.1199e-01, 2.0308e-01, 1.3459e-01, - 5.6701e-01, 4.8437e-01, 6.0606e-01, 4.1922e-01, - 4.5665e-01, 4.1795e-02, 2.1442e-01, 8.5784e-03, - 1.2383e-01, 6.8451e-01, 8.2903e-01, 9.3818e-01, - 8.6183e-01, 9.2220e-01, 1.2146e-02, 9.4702e-01, - 7.2689e-01, 7.0124e-01, 5.2058e-01, 7.6183e-01, - 2.7320e-01, 6.4457e-01, 1.3569e-01, 3.2953e-01, - 1.9373e-01, 1.1614e-01, 6.8419e-01, 1.1889e-01, - 1.5054e-01, 6.8449e-01, 2.2163e-02, 3.3239e-01, - 5.3542e-01, 4.6539e-01, 5.7549e-01, 6.0063e-01, - 7.3725e-01, 7.7272e-01, 7.1549e-01, 8.3333e-02, - 2.5724e-01, 4.8954e-01, 5.4990e-01, 5.5515e-01, - 6.6187e-01, 1.4302e-01, 2.6241e-01, 1.3082e-01, - 1.0201e-01, 4.7238e-01, 8.0345e-01, 3.5296e-01, - 4.3307e-02, 5.1890e-01, 8.2623e-01, 7.8766e-01, - 9.6443e-01, 5.7328e-01, 9.6623e-01, 5.1756e-01, - 8.4229e-01, 8.6955e-01, 5.2500e-01, 2.4364e-01, - 4.6531e-04, 7.7566e-01, 2.3278e-01, 5.0290e-01, - 1.2741e-01, 7.4793e-01, 6.6397e-01, 8.4389e-01, - 7.1603e-01, 1.3434e-01, 9.1897e-01, 1.6605e-01, - 3.1924e-01, 8.7903e-01, 1.4216e-02, 1.4696e-01, - 7.2418e-01, 1.2448e-01, 1.1574e-01, 8.0022e-01, - 3.2231e-01, 5.5328e-01, 2.4152e-01, 4.0399e-01, - 1.2053e-01, 1.5238e-01, 5.0061e-01, 8.8357e-01, - 2.6656e-01, 3.4203e-01, 5.0313e-01, 9.2105e-01, - 5.4412e-01, 7.6757e-01, 1.4392e-01, 9.2549e-01, - 4.4630e-02, 1.0189e-01, 2.1381e-01, 6.3179e-01, - 1.4210e-01, 6.4822e-01, 4.4733e-02, 7.0778e-02, - 5.3670e-01, 6.7468e-01, 9.6249e-02, 1.4701e-01, - 6.7904e-01, 6.2977e-01, 4.8222e-01, 5.6410e-01, - 6.6069e-01, 8.2291e-01, 2.7086e-01, 1.3385e-02, - 3.8370e-02, 7.6000e-01, 1.2836e-01, 4.2271e-01, - 3.7971e-01, 4.0221e-01, 1.9058e-01, 1.3246e-02, - 9.7472e-01, 8.1468e-01, 9.5465e-01, 5.0494e-01, - 4.8024e-01, 8.6375e-01, 2.1211e-01, 7.4747e-01, - 8.8496e-01, 2.3040e-01, 2.1539e-01, 6.6296e-01, - 4.6006e-01, 7.6222e-01, 6.9519e-01, 2.5685e-01, - 1.2762e-01, 4.8623e-01, 8.5541e-01, 1.9816e-01, - 6.4360e-01, 5.6243e-01, 6.0436e-01, 9.6360e-01, - 7.3027e-01, 8.0053e-01, 9.3960e-02, 8.9196e-01, - 6.5344e-01, 5.7618e-01, 3.8071e-02, 9.8561e-01, - 3.9902e-01, 2.0152e-02, 8.4945e-01, 1.0773e-01, - 5.1144e-01, 7.1844e-01, 8.5285e-02, 4.5100e-01, - 1.5098e-01, 2.6810e-01, 5.1885e-02, 6.6289e-01, - 5.9605e-01, 5.1952e-01, 3.2494e-01, 4.2823e-01, - 8.5842e-01, 6.0189e-01, 2.0347e-01, 9.8130e-01, - 1.8163e-01, 2.5564e-02, 1.8724e-02, 5.1201e-01, - 4.5720e-01, 7.9371e-01, 4.8374e-01, 4.3205e-01, - 7.7302e-01, 7.5530e-01, 9.7319e-01, 2.8166e-01, - 6.8553e-01, 9.0165e-01, 1.3726e-01, 6.1107e-01, - 9.6470e-01, 9.3457e-01, 1.6750e-01, 5.7026e-02, - 9.7853e-01, 4.8808e-01, 5.2986e-01, 4.4763e-01, - 2.1220e-01, 1.8968e-01, 6.6682e-01, 2.7978e-01, - 6.2518e-02, 1.7188e-01, 6.6203e-01, 9.5117e-01, - 2.6765e-01, 7.5161e-01, 4.2205e-01, 8.3213e-01, - 3.4401e-02, 5.5833e-01, 2.2728e-01, 8.6978e-01, - 5.0487e-01, 6.8315e-01, 8.8845e-01, 3.7450e-01, - 4.7156e-01, 2.0297e-01, 5.9919e-01, 8.7437e-01, - 1.8376e-01, 3.8162e-01, 3.8759e-01, 2.8332e-01, - 7.3703e-01, 8.2598e-01, 5.7822e-01, 8.1318e-01, - 6.0403e-01, 3.0036e-01, 8.6647e-01, 6.3278e-01, - 3.3144e-02, 8.3655e-02, 2.0403e-01, 6.3295e-01, - 5.5983e-01, 1.7687e-01, 7.9099e-01, 6.5995e-01, - 1.6364e-01, 8.0576e-01, 5.9218e-01, 3.7825e-01, - 7.3625e-01, 6.8133e-01, 1.5251e-01, 3.4541e-01, - 4.3200e-01, 4.7561e-01, 6.5115e-01, 4.9609e-01, - 4.9830e-01, 1.7116e-01, 8.1782e-01, 5.6982e-02, - 7.8582e-01, 4.1358e-01, 6.8337e-01, 8.8174e-01, - 2.7893e-02, 6.1252e-01, 6.8428e-01, 4.7886e-04, - 4.5579e-01, 2.6597e-01, 8.9291e-01, 4.7913e-01, - 5.1187e-02, 9.1252e-01, 5.2623e-01, 9.2323e-01, - 2.5712e-02, 7.4165e-01, 8.7147e-01, 1.1067e-01, - 3.3337e-01, 6.6053e-01, 8.1395e-02, 3.0634e-01, - 5.9633e-01, 6.1441e-01, 5.2337e-01, 9.4467e-01, - 1.6455e-01, 6.0027e-01, 9.9735e-01, 5.4964e-01, - 4.2544e-01, 5.2938e-01, 2.7487e-01, 5.2740e-01, - 7.2458e-01, 2.1872e-01, 2.3165e-02, 8.5565e-01, - 3.4327e-02, 8.8372e-01, 3.6038e-01, 5.2099e-01, - 7.8544e-01, 8.5584e-01, 2.0450e-01, 7.0439e-01, - 1.8946e-01, 5.2352e-01, 1.3840e-01, 7.9757e-01, - 6.4771e-01, 4.2226e-01, 3.3701e-01, 5.7625e-01, - 7.5119e-01, 6.0090e-01, 5.5169e-01, 2.6335e-01, - 2.5175e-01, 1.3511e-01, 7.5301e-01, 3.6857e-01, - 2.4815e-01, 9.7900e-01, 5.3734e-01, 2.5181e-01, - 9.6212e-01, 3.0052e-01, 2.3817e-01, 5.4727e-01, - 7.4985e-01, 4.0814e-01, 2.3324e-01, 9.0258e-01, - 9.2453e-01, 5.1688e-01, 6.8008e-01, 8.4417e-01, - 2.9048e-01, 3.5275e-01, 9.1446e-01, 3.1166e-01, - 9.3812e-02, 3.2061e-01, 3.8420e-01, 8.3599e-01, - 2.2761e-01, 7.4261e-01, 2.4781e-01, 8.3249e-01, - 6.2992e-01, 3.7271e-01, 6.4205e-01, 8.0600e-01, - 5.2952e-01, 8.6497e-03, 2.0012e-01, 6.7808e-01, - 7.6509e-01, 3.6618e-01, 3.6418e-01, 7.0343e-01, - 6.0658e-01, 1.8231e-01, 4.0747e-02, 7.2457e-01, - 7.5662e-01, 3.1029e-02, 1.9408e-01, 5.8483e-03, - 2.5497e-01, 2.7861e-01, 6.7215e-01, 9.8377e-01, - 3.9461e-01, 5.7729e-01, 7.4282e-01, 7.8487e-01, - 6.6966e-01, 8.0111e-01, 2.7436e-01, 9.1071e-01, - 7.5479e-01, 9.6961e-02, 5.9253e-01, 4.3539e-01, - 8.1993e-02, 1.4436e-01, 5.4192e-02, 4.5414e-01, - 2.2083e-01, 6.8883e-01, 3.0813e-03, 5.7122e-01, - 6.4824e-01, 6.1088e-01, 9.9103e-01, 3.0128e-01, - 2.5519e-01, 5.5098e-01, 7.7501e-01, 8.2747e-01, - 2.7382e-01, 1.6131e-01, 3.8473e-01, 1.8858e-01, - 7.6480e-01, 5.7925e-01, 3.7285e-01, 4.1565e-01, - 6.6174e-01, 7.9534e-01, 3.9512e-01, 3.1463e-02, - 2.2917e-01, 3.0596e-01, 2.7861e-03, 2.0807e-01, - 3.7628e-01, 6.8980e-01, 2.7128e-02, 6.5713e-01, - 4.9102e-01, 7.0889e-01, 4.5564e-02, 9.8297e-01, - 6.8338e-01, 2.7678e-03, 5.2548e-01, 9.3723e-01, - 6.9210e-01, 8.4505e-01, 5.3302e-01, 5.5013e-01, - 2.2889e-02, 8.7608e-02, 1.4028e-01, 4.6686e-01, - 6.4919e-01, 9.9436e-01, 7.3888e-01, 4.1538e-01, - 9.4718e-01, 9.8412e-01, 4.2306e-01, 1.3232e-01, - 6.8812e-01, 2.0943e-01, 7.5932e-01, 1.2380e-01, - 6.2774e-01, 4.9894e-01, 3.0889e-01, 1.1004e-01, - 7.4553e-01, 8.5544e-01, 2.0482e-01, 7.2767e-01, - 2.0936e-02, 5.0479e-01, 5.7524e-01, 5.4774e-01, - 8.4315e-01, 5.2362e-01, 2.8163e-01, 5.1186e-01, - 8.0800e-01, 7.9337e-01, 3.6713e-01, 6.2638e-01, - 6.5095e-01, 4.0210e-01, 6.3034e-01, 5.3272e-01, - 3.4358e-01, 5.9924e-01, 1.9674e-01, 6.4351e-01, - 8.5855e-02, 2.3977e-01, 1.2607e-01, 7.8134e-01, - 9.6526e-02, 6.0975e-01, 4.6583e-01, 1.7783e-01, - 7.3707e-02, 8.4537e-01, 7.9093e-01, 4.1040e-02, - 2.5473e-01, 3.9781e-01, 1.3302e-01, 4.8382e-01, - 6.0215e-02, 7.9066e-01, 9.9902e-01, 9.0029e-01, - 5.6279e-01, 9.9788e-01, 7.8531e-01, 2.8619e-01, - 6.3489e-01, 1.5229e-01, 6.9891e-01, 7.3891e-01, - 9.9196e-01, 1.2942e-02, 1.0882e-01, 1.6395e-01, - 9.7799e-01, 9.8840e-01, 9.3287e-01, 9.7669e-01, - 9.3534e-01, 3.8769e-01, 5.8205e-01, 7.1933e-01, - 8.1893e-01, 2.3647e-02, 7.8599e-01, 3.5138e-01, - 8.8097e-01, 5.9339e-01, 8.1908e-01, 2.1259e-01, - 1.7916e-01, 8.3775e-01, 4.2342e-02, 1.7547e-01, - 4.9728e-01, 7.5815e-01, 6.1732e-01, 7.7818e-01, - 3.0544e-01, 7.8172e-01, 1.0312e-01, 9.2880e-01, - 4.3926e-01, 8.6426e-02, 2.2251e-01, 4.2217e-01, - 8.8595e-01, 8.2722e-02, 7.3624e-01, 5.9382e-02, - 4.9932e-01, 1.4760e-01, 4.7458e-02, 6.0080e-01, - 1.4431e-01, 2.9584e-01, 7.1655e-01, 8.9494e-01, - 7.0292e-01, 6.4015e-01, 3.6954e-01, 4.8047e-01, - 2.1881e-01, 1.9305e-01, 5.3317e-01, 1.5076e-01, - 4.3873e-01, 9.4135e-01, 1.0942e-01, 8.7236e-01, - 5.3086e-01, 1.7910e-01, 2.4322e-01, 2.2052e-02, - 4.1013e-01, 1.4191e-01, 3.4491e-01, 5.5417e-01, - 3.3288e-01, 3.1302e-01, 2.3566e-01, 1.8459e-01, - 3.6159e-01, 4.8536e-01, 9.0908e-01, 6.3893e-02, - 7.7289e-02, 8.0569e-01, 8.2725e-01, 8.5490e-01, - 4.1791e-02, 4.0255e-01, 2.0672e-01, 9.7864e-01, - 3.6149e-01, 8.5159e-01, 9.6188e-01, 4.1123e-01, - 2.3421e-01, 6.1193e-01, 2.1097e-01, 2.7882e-01, - 5.5135e-01, 8.0230e-01, 3.1907e-01, 1.8198e-01, - 3.6314e-02, 6.2212e-01, 5.1175e-01, 6.7357e-01, - 3.7007e-02, 1.2024e-01, 9.1569e-01, 8.0422e-01, - 4.8728e-01, 3.3584e-01, 5.0960e-01, 3.6273e-01, - 6.1914e-01, 3.8996e-01, 5.9564e-01, 4.2090e-01, - 8.0087e-01, 6.1589e-01, 5.8173e-01, 6.2723e-01, - 6.9793e-01, 1.5997e-01, 1.5756e-01, 9.2695e-02, - 7.7600e-01, 5.7205e-01, 5.7808e-01, 5.9198e-02, - 8.2480e-01, 2.8009e-01, 8.4499e-02, 3.5912e-01, - 7.7211e-01, 1.0716e-01, 7.1486e-01, 5.0595e-01, - 6.5373e-02, 7.7498e-01, 9.1086e-03, 4.0288e-01, - 6.5420e-01, 2.4228e-01, 8.2790e-01, 1.6179e-02, - 4.5517e-01, 2.3154e-01, 4.0528e-01, 9.9680e-01, - 1.9648e-01, 8.9156e-01, 4.9073e-01, 6.1991e-01, - 5.2047e-01, 2.0881e-01, 9.4192e-01, 4.9260e-01]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([8945, 3396, 1179, 7845, 5353, 7539, 7036, 1743, 4590, + 9706, 9114, 1883, 3360, 9714, 6892, 5151, 3383, 1404, + 1831, 4276, 9165, 2466, 2237, 3707, 3545, 7291, 2878, + 2495, 9824, 2602, 3234, 6210, 803, 3676, 5976, 3272, + 3806, 5164, 6743, 7171, 162, 1405, 4701, 8601, 8019, + 4862, 3797, 1907, 8263, 3320, 9466, 335, 9390, 4209, + 5095, 2772, 7268, 8726, 7661, 94, 7630, 1233, 7006, + 2204, 4233, 3286, 900, 2884, 1201, 4167, 5378, 7309, + 5080, 5779, 9724, 2453, 3090, 5707, 7810, 9441, 3585, + 8544, 683, 5841, 5444, 3816, 3300, 548, 3220, 6182, + 4001, 3210, 2638, 41, 2488, 34, 5895, 1929, 6262, + 6745, 1441, 2060, 7662, 3205, 9914, 6565, 8901, 9926, + 7630, 6163, 8556, 4895, 7090, 748, 9427, 3078, 5873, + 2297, 1006, 7916, 2596, 536, 3213, 8594, 8061, 5619, + 1607, 8351, 4783, 649, 1089, 8992, 6973, 2739, 8357, + 9645, 6973, 9438, 3529, 4166, 2361, 6581, 9866, 3387, + 3080, 930, 9748, 678, 7194, 9819, 6374, 6338, 8539, + 9749, 2131, 8093, 4606, 6550, 3719, 7613, 1304, 5282, + 7044, 8559, 9384, 8257, 1763, 473, 1385, 7934, 5409, + 336, 7981, 2265, 3340, 2900, 8402, 9657, 9519, 3615, + 1302, 4040, 4426, 153, 4893, 7466, 737, 7283, 1137, + 3184, 1607, 2087, 2802, 8646, 7649, 7107, 3563, 7564, + 9504, 8510, 6434, 6665, 4775, 7488, 6418, 2058, 1819, + 4396, 1650, 5111, 2215, 6579, 838, 4639, 7271, 833, + 1898, 1581, 3894, 480, 2937, 2619, 6452, 3750, 8106, + 8345, 4363, 781, 3703, 8349, 2403, 9207, 7354, 662, + 5670, 9094, 7076, 7476, 9641, 4009, 628, 354, 4864, + 6408, 6881, 2199, 3780, 6648, 3256, 5259, 3400, 7677, + 9765, 3406, 8375, 4626, 7974, 2370, 3438, 5886, 6785, + 8785, 5325, 2444, 5731, 3261, 7251, 3966, 630, 4711, + 5083, 8173, 9533, 9893, 5512, 9795, 2261, 9003, 1575, + 7838, 5221, 3905, 3659, 4077, 115, 935, 6215, 8429, + 3917, 5204, 311, 2973, 3219, 1577, 9389, 2991, 5510, + 2461, 9770, 5136, 3343, 8917, 9218, 1311, 8986, 4405, + 8556, 3238, 2558, 5250, 618, 3389, 8858, 45, 7363, + 4818, 7081, 3737, 6906, 9478, 6432, 5077, 8225, 4404, + 7717, 5750, 7191, 8819, 8189, 5729, 5250, 7384, 6893, + 4600, 1582, 3815, 4897, 3942, 2070, 2436, 6640, 1420, + 3679, 9245, 958, 3679, 9640, 3274, 3102, 8666, 615, + 5650, 6557, 5747, 3484, 246, 8057, 2329, 6318, 6418, + 2071, 3817, 6138, 6747, 7407, 1450, 369, 5047, 5515, + 7855, 3161, 419, 3580, 8434, 3391, 1876, 8301, 7626, + 7469, 4086, 1576, 9404, 6029, 5790, 4643, 9099, 6641, + 269, 6145, 652, 313, 2335, 2792, 2914, 7328, 4203, + 7886, 8148, 3406, 4180, 9650, 3634, 648, 9181, 62, + 5149, 5973, 9105, 5497, 8296, 5420, 4424, 9143, 1523, + 6941, 1142, 4493, 6351, 446, 577, 1629, 245, 7135, + 6160, 9525, 6084, 7212, 8425, 3424, 6520, 1322, 4124, + 5324, 2697, 5850, 3189, 2684, 2839, 3549, 3012, 7923, + 8589, 8612, 9075, 6590, 3499, 7946, 3141, 6391, 8953, + 6142, 2419, 1790, 7146, 6236, 9268, 2896, 1689, 6613, + 4532, 4584, 1325, 7394, 8284, 4463, 3642, 2282, 8414, + 9423, 138, 3947, 306, 7855, 9163, 6896, 8979, 1724, + 5170, 51, 4016, 8013, 3846, 2422, 3223, 4421, 2375, + 1717, 7745, 5235, 1263, 3985, 5971, 5104, 3589, 3293, + 4349, 393, 2092, 4442, 6577, 7922, 9368, 4648, 7995, + 5075, 4429, 7398, 5908, 2671, 2634, 8764, 4132, 6450, + 9438, 5182, 8656, 1204, 3267, 2173, 3521, 1424, 701, + 1084, 8974, 3663, 4399, 1519, 8751, 4361, 5549, 8815, + 5968, 3693, 9898, 8818, 4693, 9186, 3877, 5588, 6992, + 7704, 4079, 448, 3632, 9137, 7667, 4424, 6893, 2724, + 9315, 4019, 9920, 4465, 5349, 1515, 774, 8824, 1637, + 9230, 7482, 6454, 4758, 3926, 7862, 3487, 4252, 9245, + 7419, 8902, 3306, 9144, 3069, 8624, 7027, 9844, 390, + 3671, 1935, 5147, 6458, 2901, 4100, 1627, 8093, 7742, + 1139, 9677, 8468, 3740, 7956, 615, 7163, 5722, 8254, + 2208, 1056, 9928, 697, 1166, 2375, 5493, 122, 391, + 9045, 4689, 515, 1729, 4724, 5766, 9306, 1196, 1036, + 4607, 4064, 5704, 3279, 6458, 2414, 7135, 1248, 5592, + 3740, 6862, 3502, 7351, 7215, 8844, 7880, 9911, 8570, + 1484, 5691, 5680, 4882, 836, 9572, 4343, 7630, 2724, + 9110, 7775, 1871, 8035, 462, 3852, 942, 921, 6491, + 1116, 6752, 5778, 1413, 5915, 751, 1950, 9072, 5886, + 1001, 1725, 9574, 52, 9517, 5502, 8263, 483, 297, + 7916, 5377, 9460, 7305, 23, 7869, 8971, 3087, 4611, + 5504, 2405, 5239, 9022, 7221, 1354, 2985, 7954, 3823, + 4601, 4002, 2438, 4562, 3420, 6775, 6456, 7973, 5475, + 7634, 7316, 8831, 1826, 7786, 7607, 6474, 270, 296, + 184, 3850, 4251, 4668, 600, 8846, 529, 337, 2584, + 8140, 3615, 9419, 5670, 8148, 3912, 4023, 8122, 3647, + 4021, 7298, 6265, 8251, 3226, 400, 5247, 7606, 4710, + 6782, 4412, 2855, 9924, 885, 3817, 2006, 8730, 9904, + 4995, 1121, 6187, 130, 7650, 9511, 7319, 6961, 1773, + 6534, 817, 3967, 7690, 3866, 2599, 2767, 6660, 1106, + 7926, 4942, 6471, 9767, 3357, 7909, 3767, 8210, 6330, + 6343, 2348, 5182, 8276, 9594, 741, 8381, 8691, 7988, + 3403, 2290, 4713, 9889, 5866, 4290, 9536, 7655, 2908, + 3996, 2613, 2321, 2101, 5177, 5555, 4157, 6579, 8198, + 4437, 3123, 959, 2322, 3873, 5229, 8315, 2125, 1068, + 3339, 4442, 5920, 1106, 5605, 8949, 804, 1779, 4985, + 8824, 7006, 1504, 3648, 8602, 2160, 6342, 9535, 8002, + 4087, 8531, 3998, 4073, 2272, 9371, 1582, 6595, 2758, + 8018, 7889, 2888, 2724, 166, 1025, 5415, 5887, 5962, + 4693, 9500, 5479, 7164, 8949, 9300, 2490, 8900, 7784, + 5112, 9529, 2586, 8119, 1217, 5680, 7692, 1414, 6045, + 1897, 1410, 6232, 1224, 8075, 909, 8079, 6297, 5594, + 6563, 447, 7639, 92, 6240, 5816, 3932, 6779, 1422, + 5413, 5034, 8673, 6215, 3050, 7602, 4865, 8751, 2686, + 3397, 4590, 1441, 1218, 9330, 9843, 9750, 5252, 9559, + 8730, 9110, 635, 7887, 7096, 8550, 7459, 3334, 1297, + 8605, 5687, 2503, 6275, 4035, 8191, 7143, 7275, 6676, + 7971, 5893, 6753, 5613, 808, 8632, 1964, 673, 127, + 8324, 8653, 4841, 8589, 3857, 2582, 6059, 9639, 5226, + 6110, 3373, 9853, 6381, 5046, 6036, 7589, 5296, 5514, + 9737, 8932, 1243, 880, 1655, 7250, 3620, 3051, 4445, + 5601, 4526, 3361, 9690, 156, 6080, 280, 8730, 6431, + 7500, 5972, 7423, 7931, 5347, 1208, 9247, 8858, 7800, + 3207, 5794, 4373, 9912, 8788, 4783, 9650, 391, 8406, + 3597, 4853, 3749, 8966, 3381, 5873, 8659, 4038, 5007, + 4925, 6124, 4241, 7328, 1463, 4456, 4979, 5642, 5628, + 2700, 4136, 1508, 6833, 9913, 4703, 1181, 9942, 1386, + 2626]), + values=tensor([0.6328, 0.5112, 0.4765, 0.0665, 0.1734, 0.0665, 0.6185, + 0.0181, 0.0725, 0.2532, 0.9599, 0.4060, 0.4487, 0.3971, + 0.5036, 0.0663, 0.7667, 0.4567, 0.6015, 0.0441, 0.0537, + 0.2971, 0.1276, 0.2201, 0.4696, 0.3107, 0.3630, 0.0915, + 0.5682, 0.1732, 0.1589, 0.6673, 0.7066, 0.3143, 0.9178, + 0.5172, 0.0354, 0.4886, 0.0057, 0.8793, 0.8024, 0.3323, + 0.7622, 0.8288, 0.9792, 0.6146, 0.0245, 0.4363, 0.1970, + 0.3869, 0.0584, 0.1673, 0.4043, 0.4978, 0.8504, 0.9699, + 0.0473, 0.4292, 0.5328, 0.9792, 0.7458, 0.9252, 0.8316, + 0.4866, 0.4309, 0.8744, 0.7165, 0.0379, 0.4454, 0.4735, + 0.2305, 0.7674, 0.9705, 0.3678, 0.7813, 0.7238, 0.6940, + 0.9317, 0.2240, 0.1941, 0.3116, 0.6138, 0.6246, 0.2994, + 0.3718, 0.8037, 0.4721, 0.2912, 0.7832, 0.7698, 0.1876, + 0.6541, 0.0121, 0.1183, 0.7660, 0.9302, 0.7307, 0.6241, + 0.0980, 0.2478, 0.9494, 0.2980, 0.4179, 0.5405, 0.3656, + 0.5253, 0.9366, 0.2741, 0.3689, 0.1650, 0.1150, 0.2357, + 0.2589, 0.8658, 0.3718, 0.4559, 0.0607, 0.6854, 0.5740, + 0.0099, 0.0842, 0.2290, 0.9778, 0.4741, 0.0309, 0.3407, + 0.9693, 0.8160, 0.6529, 0.3165, 0.7635, 0.6006, 0.3346, + 0.0092, 0.1731, 0.2562, 0.2849, 0.3168, 0.4420, 0.8098, + 0.5916, 0.5939, 0.1808, 0.0213, 0.0030, 0.3273, 0.2527, + 0.9427, 0.7138, 0.2956, 0.9898, 0.6605, 0.7670, 0.3577, + 0.3575, 0.6873, 0.0220, 0.4762, 0.1607, 0.6636, 0.2404, + 0.5640, 0.3825, 0.8669, 0.9936, 0.4437, 0.3622, 0.5991, + 0.6010, 0.1390, 0.5097, 0.6670, 0.8363, 0.4078, 0.4457, + 0.6784, 0.8335, 0.6551, 0.5699, 0.2070, 0.2892, 0.8107, + 0.3392, 0.7591, 0.1556, 0.7279, 0.6536, 0.0812, 0.1948, + 0.2981, 0.7056, 0.7521, 0.1516, 0.4228, 0.0576, 0.0476, + 0.3420, 0.2920, 0.6385, 0.3484, 0.2147, 0.0377, 0.0600, + 0.7111, 0.7770, 0.5438, 0.7238, 0.1643, 0.7321, 0.0557, + 0.8672, 0.0722, 0.6589, 0.3135, 0.3991, 0.7778, 0.9018, + 0.8278, 0.8837, 0.6604, 0.1719, 0.2527, 0.1772, 0.7083, + 0.6464, 0.6345, 0.4008, 0.8240, 0.4263, 0.1009, 0.1581, + 0.1175, 0.1210, 0.8371, 0.7669, 0.0035, 0.8185, 0.0095, + 0.8614, 0.7936, 0.8270, 0.5196, 0.0669, 0.9742, 0.3211, + 0.1091, 0.7255, 0.1908, 0.7702, 0.6533, 0.8657, 0.4928, + 0.2724, 0.3566, 0.6408, 0.7526, 0.7137, 0.9179, 0.0186, + 0.5034, 0.7474, 0.8220, 0.5436, 0.7154, 0.5585, 0.6583, + 0.7027, 0.4150, 0.8241, 0.7987, 0.4703, 0.5787, 0.2111, + 0.3140, 0.4002, 0.0314, 0.7234, 0.3993, 0.6999, 0.7431, + 0.3273, 0.5426, 0.8777, 0.5686, 0.1940, 0.5264, 0.0654, + 0.0803, 0.1801, 0.4317, 0.1449, 0.9288, 0.3382, 0.0782, + 0.2906, 0.1907, 0.5908, 0.2320, 0.7996, 0.3326, 0.9073, + 0.6262, 0.1248, 0.9796, 0.2581, 0.7680, 0.6286, 0.8856, + 0.3858, 0.7435, 0.3491, 0.1581, 0.2008, 0.6343, 0.9670, + 0.0951, 0.6572, 0.5074, 0.2256, 0.4076, 0.7261, 0.2860, + 0.2840, 0.8543, 0.5327, 0.0708, 0.8227, 0.7674, 0.1378, + 0.2275, 0.5249, 0.7149, 0.9475, 0.9988, 0.3974, 0.2904, + 0.4868, 0.8516, 0.6448, 0.3137, 0.0093, 0.8414, 0.6284, + 0.6688, 0.5045, 0.1113, 0.0319, 0.5483, 0.9009, 0.3181, + 0.6169, 0.7072, 0.6471, 0.8162, 0.4043, 0.9344, 0.3371, + 0.7476, 0.0758, 0.5203, 0.2836, 0.4781, 0.8972, 0.8509, + 0.8308, 0.0693, 0.1355, 0.0098, 0.2233, 0.8841, 0.4939, + 0.4893, 0.8577, 0.6130, 0.3307, 0.8263, 0.9332, 0.9152, + 0.2765, 0.3176, 0.4801, 0.5128, 0.1151, 0.2896, 0.8491, + 0.1399, 0.6841, 0.7791, 0.8495, 0.7517, 0.3322, 0.3399, + 0.2090, 0.4326, 0.2931, 0.3882, 0.0192, 0.5096, 0.7396, + 0.3615, 0.3835, 0.9249, 0.7264, 0.9818, 0.7046, 0.0718, + 0.0299, 0.1397, 0.4426, 0.7572, 0.0545, 0.7970, 0.8139, + 0.9342, 0.3447, 0.9270, 0.8988, 0.4144, 0.1455, 0.6063, + 0.9656, 0.3461, 0.0662, 0.1588, 0.4889, 0.0435, 0.0505, + 0.2310, 0.2304, 0.6950, 0.8340, 0.2582, 0.1054, 0.5458, + 0.3408, 0.5412, 0.0913, 0.1004, 0.8661, 0.6202, 0.2592, + 0.6923, 0.3687, 0.1088, 0.2031, 0.7085, 0.3896, 0.9273, + 0.0631, 0.8597, 0.5085, 0.4210, 0.5198, 0.8408, 0.4871, + 0.1356, 0.4654, 0.5719, 0.7761, 0.8139, 0.3598, 0.6241, + 0.9513, 0.2844, 0.7243, 0.3186, 0.1385, 0.3457, 0.3843, + 0.4150, 0.0789, 0.9208, 0.8685, 0.3678, 0.4569, 0.9747, + 0.1423, 0.6800, 0.9024, 0.0169, 0.9981, 0.8256, 0.6974, + 0.5621, 0.1760, 0.7437, 0.7608, 0.1172, 0.5954, 0.7145, + 0.9756, 0.0790, 0.9835, 0.8334, 0.8632, 0.1285, 0.9257, + 0.9551, 0.0982, 0.4373, 0.2748, 0.7133, 0.3121, 0.6542, + 0.1952, 0.0849, 0.3599, 0.7623, 0.7153, 0.5821, 0.6074, + 0.8762, 0.1444, 0.5110, 0.9378, 0.0157, 0.2010, 0.4720, + 0.3976, 0.7842, 0.6920, 0.2721, 0.8751, 0.4100, 0.4580, + 0.2104, 0.9757, 0.8016, 0.9364, 0.5061, 0.4950, 0.4321, + 0.9744, 0.7509, 0.0778, 0.3566, 0.0305, 0.6194, 0.3360, + 0.8082, 0.3227, 0.1973, 0.6016, 0.6943, 0.3027, 0.7546, + 0.6573, 0.5343, 0.5497, 0.8833, 0.9913, 0.9660, 0.3089, + 0.1248, 0.2260, 0.4299, 0.8327, 0.6883, 0.2237, 0.0364, + 0.1167, 0.0909, 0.8445, 0.4475, 0.8801, 0.3579, 0.7639, + 0.8627, 0.6154, 0.9509, 0.9942, 0.2111, 0.7502, 0.0343, + 0.7545, 0.6893, 0.8096, 0.3698, 0.7535, 0.6656, 0.5620, + 0.8632, 0.6307, 0.4399, 0.3365, 0.1523, 0.7446, 0.3731, + 0.5685, 0.2299, 0.4967, 0.1696, 0.0488, 0.5444, 0.6499, + 0.5061, 0.7582, 0.4351, 0.0459, 0.5148, 0.7255, 0.7221, + 0.5147, 0.3346, 0.8222, 0.9402, 0.5147, 0.1994, 0.8894, + 0.6587, 0.4491, 0.8322, 0.4019, 0.7869, 0.3341, 0.9335, + 0.3526, 0.5094, 0.8154, 0.2532, 0.9317, 0.0792, 0.4247, + 0.7418, 0.7936, 0.8473, 0.6363, 0.2608, 0.9080, 0.2553, + 0.4912, 0.2318, 0.9993, 0.5667, 0.3724, 0.0952, 0.5794, + 0.3214, 0.9709, 0.5244, 0.8426, 0.9671, 0.9749, 0.9732, + 0.3637, 0.5072, 0.4081, 0.6560, 0.9479, 0.2192, 0.9666, + 0.2576, 0.6222, 0.8978, 0.5046, 0.8876, 0.4772, 0.2118, + 0.9053, 0.6963, 0.6889, 0.0405, 0.5808, 0.5507, 0.5861, + 0.0583, 0.2552, 0.0318, 0.7752, 0.2871, 0.6239, 0.9966, + 0.6469, 0.5840, 0.1134, 0.1594, 0.6911, 0.9820, 0.5833, + 0.6494, 0.1721, 0.2022, 0.8434, 0.0943, 0.6635, 0.2434, + 0.3363, 0.1825, 0.9443, 0.1938, 0.9771, 0.0202, 0.3630, + 0.2508, 0.6021, 0.5492, 0.0491, 0.3316, 0.7721, 0.3799, + 0.8237, 0.3240, 0.6506, 0.9443, 0.0260, 0.0910, 0.4223, + 0.8709, 0.5992, 0.2823, 0.7916, 0.5827, 0.5220, 0.8628, + 0.7689, 0.1050, 0.3488, 0.0494, 0.8068, 0.1642, 0.7790, + 0.5140, 0.1047, 0.7710, 0.7871, 0.8287, 0.4389, 0.1787, + 0.6815, 0.2231, 0.8519, 0.5556, 0.2774, 0.8382, 0.9451, + 0.8734, 0.8635, 0.2108, 0.7996, 0.6170, 0.0526, 0.7979, + 0.8662, 0.1730, 0.0691, 0.0668, 0.6049, 0.5908, 0.7047, + 0.8468, 0.4485, 0.3706, 0.9801, 0.1985, 0.1477, 0.2749, + 0.0702, 0.0328, 0.9500, 0.5657, 0.5124, 0.1613, 0.1996, + 0.3933, 0.4330, 0.3130, 0.1602, 0.5558, 0.9162, 0.2688, + 0.4727, 0.5825, 0.1557, 0.7294, 0.8714, 0.1321, 0.7130, + 0.9610, 0.6894, 0.5403, 0.7020, 0.3602, 0.1816, 0.0283, + 0.5886, 0.9803, 0.9744, 0.0303, 0.9827, 0.1918, 0.3763, + 0.2881, 0.7577, 0.5974, 0.7908, 0.1092, 0.3742, 0.1171, + 0.3723, 0.8509, 0.2978, 0.3616, 0.3397, 0.8046, 0.2599, + 0.2349, 0.9827, 0.0690, 0.3523, 0.7776, 0.0194, 0.2950, + 0.0301, 0.3824, 0.8581, 0.8230, 0.9448, 0.0054, 0.4019, + 0.3582, 0.8848, 0.6090, 0.0363, 0.0164, 0.9237, 0.2887, + 0.5411, 0.9583, 0.0473, 0.8185, 0.9223, 0.0525, 0.9777, + 0.5953, 0.2917, 0.8644, 0.6195, 0.3030, 0.7733, 0.0611, + 0.4991, 0.9366, 0.6827, 0.5465, 0.5034, 0.0221, 0.6163, + 0.9029, 0.4801, 0.8324, 0.2157, 0.9754, 0.3283, 0.9381, + 0.2417, 0.6561, 0.5989, 0.8029, 0.1791, 0.7471, 0.1783, + 0.7994, 0.5807, 0.8763, 0.8464, 0.4109, 0.1088, 0.3839, + 0.2038, 0.0726, 0.8936, 0.1074, 0.0351, 0.9235, 0.7083, + 0.3558, 0.3932, 0.3260, 0.5944, 0.0072, 0.3316, 0.2196, + 0.8039, 0.4128, 0.9238, 0.2574, 0.2967, 0.0032, 0.4678, + 0.2924, 0.1058, 0.1821, 0.9271, 0.0700, 0.9897, 0.5525, + 0.2354, 0.6866, 0.4453, 0.5295, 0.1370, 0.8061, 0.3602, + 0.8010, 0.8126, 0.1953, 0.2307, 0.6779, 0.5612, 0.9952, + 0.6615, 0.0975, 0.1560, 0.5099, 0.8269, 0.5293, 0.5824, + 0.2968, 0.9192, 0.9748, 0.5507, 0.4204, 0.8749, 0.4482, + 0.9546, 0.2194, 0.3212, 0.8656, 0.2083, 0.5780, 0.8602, + 0.7862, 0.7419, 0.9902, 0.1332, 0.9222, 0.1391, 0.2169, + 0.4796, 0.5498, 0.5026, 0.9050, 0.8922, 0.3026, 0.7405, + 0.8729, 0.1021, 0.0894, 0.1148, 0.5970, 0.2647, 0.2807, + 0.6983, 0.4238, 0.5047, 0.1497, 0.9380, 0.6580, 0.5332, + 0.5670, 0.4759, 0.3379, 0.7770, 0.4989, 0.9628, 0.8008, + 0.0691, 0.1395, 0.8825, 0.8428, 0.2869, 0.5446, 0.7076, + 0.9507, 0.7554, 0.2652, 0.2555, 0.3540, 0.4539, 0.0023, + 0.9956, 0.2987, 0.1515, 0.8710, 0.6854, 0.8705, 0.2345, + 0.6768, 0.8055, 0.8135, 0.4333, 0.9453, 0.5409, 0.2969, + 0.4296, 0.7266, 0.0168, 0.7668, 0.4531, 0.9406]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4825, 0.6787, 0.3927, ..., 0.9418, 0.0965, 0.0788]) +tensor([0.4142, 0.5109, 0.3445, ..., 0.2930, 0.0409, 0.1153]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -919,375 +1026,268 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.350545883178711 seconds +Time: 10.425815343856812 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), - col_indices=tensor([5455, 5592, 4109, 222, 9693, 3577, 9334, 9406, 2137, - 1799, 7682, 5442, 9556, 3008, 6623, 6748, 1968, 1806, - 8890, 8613, 4077, 4885, 7962, 302, 1013, 8955, 2558, - 3711, 9681, 9593, 3121, 9869, 3177, 5155, 4286, 6364, - 2440, 3983, 2281, 3730, 740, 3324, 1186, 4197, 4183, - 3787, 1237, 618, 3728, 4305, 7249, 7969, 8150, 5074, - 336, 432, 1883, 5788, 1998, 9882, 8337, 8410, 6941, - 4025, 3080, 9432, 7607, 2625, 8188, 4417, 71, 2070, - 1396, 3989, 9431, 872, 4755, 1177, 5104, 5, 1671, - 5203, 8385, 3215, 9836, 5654, 6123, 9005, 9573, 3860, - 3248, 2894, 9896, 9991, 4949, 9, 1784, 5266, 2022, - 6095, 1517, 7556, 4941, 521, 2910, 5575, 7555, 1842, - 2590, 9377, 4113, 4907, 8259, 6209, 7070, 152, 6147, - 6757, 41, 774, 7469, 8152, 8924, 5911, 4839, 3386, - 4880, 5093, 7504, 578, 6341, 2413, 9223, 2943, 8245, - 3762, 1885, 3988, 7760, 6659, 2327, 8319, 4889, 2086, - 6582, 2999, 7225, 5620, 5397, 6166, 1192, 9054, 1944, - 8586, 7428, 4653, 1578, 8066, 4063, 6909, 8387, 9373, - 1779, 2964, 5753, 4702, 6011, 649, 4878, 6031, 3620, - 4368, 8038, 9222, 6809, 4045, 7946, 9720, 2447, 8709, - 5211, 3070, 483, 7264, 6025, 5336, 6583, 193, 3061, - 7753, 6937, 7842, 2405, 5951, 5344, 4719, 6714, 6154, - 7382, 3202, 1630, 3013, 8617, 6912, 3313, 4097, 2564, - 7916, 1190, 6815, 3429, 4803, 8583, 6446, 9482, 7512, - 5802, 3394, 6637, 5002, 8800, 9545, 1806, 7825, 5873, - 6547, 42, 3341, 4336, 2945, 8309, 6317, 8694, 7310, - 861, 6529, 9657, 2013, 7583, 4059, 8819, 8573, 3062, - 4530, 3219, 6965, 7043, 4000, 1751, 3453, 7507, 9158, - 8456, 5641, 2323, 894, 6849, 2354, 8414, 9263, 340, - 7205, 5325, 7515, 2661, 4262, 8481, 6503, 8559, 631, - 4284, 892, 3568, 2478, 5313, 8074, 1189, 3638, 1570, - 2900, 1226, 7729, 8931, 3902, 2531, 9721, 4451, 7093, - 6369, 3584, 8858, 4594, 9711, 2895, 8279, 6635, 5468, - 1625, 2878, 183, 9364, 8049, 8688, 3754, 9500, 5156, - 8141, 1233, 6270, 9152, 9609, 8833, 9689, 6804, 6879, - 8482, 5351, 2008, 5339, 174, 2631, 2752, 9101, 4250, - 1959, 1601, 8705, 8321, 4156, 2212, 1758, 6719, 390, - 8271, 8873, 6936, 6913, 6183, 6416, 6740, 9148, 7993, - 6077, 7025, 1434, 5450, 4696, 8201, 4111, 5108, 3263, - 4376, 992, 3001, 3573, 2771, 1330, 3404, 1985, 1213, - 7105, 2969, 1853, 4254, 2574, 8578, 3726, 9824, 8756, - 9790, 4725, 6571, 1525, 4226, 397, 3148, 8143, 1081, - 9319, 954, 8043, 2951, 6714, 1546, 7498, 5572, 6847, - 373, 6338, 6704, 6136, 6912, 8385, 6773, 2897, 1857, - 7837, 5293, 883, 3465, 2333, 2706, 2793, 8339, 9030, - 170, 6025, 238, 5536, 326, 7382, 5214, 9691, 1226, - 6312, 9099, 7743, 9256, 7826, 3961, 7289, 2810, 2889, - 5530, 8793, 1729, 9330, 6285, 259, 2418, 2723, 7786, - 9499, 1444, 3444, 9403, 4786, 2961, 3253, 210, 3033, - 4287, 1941, 2567, 5596, 8952, 1045, 8409, 1209, 2289, - 4009, 2954, 8195, 2521, 9908, 6497, 2176, 8245, 1878, - 2833, 2015, 1191, 8989, 9949, 1659, 7998, 6449, 3287, - 4617, 3730, 4250, 2845, 7685, 2587, 570, 2759, 5601, - 1122, 4194, 6085, 3538, 2068, 3553, 7147, 4353, 8619, - 741, 5644, 3102, 8593, 5741, 717, 5287, 9540, 3750, - 9476, 1080, 2354, 3239, 9391, 2983, 3615, 7545, 9316, - 3217, 546, 2878, 3385, 4369, 7750, 7048, 7638, 5003, - 1954, 2932, 9977, 2825, 6294, 7412, 4947, 5551, 4606, - 6527, 8202, 9874, 3630, 9332, 3043, 9614, 4366, 4415, - 5789, 5831, 6056, 8765, 7112, 3900, 5169, 8287, 5622, - 6492, 3446, 6222, 3934, 6761, 6496, 7921, 3767, 4657, - 6468, 9740, 3922, 1388, 791, 996, 5882, 8931, 4041, - 1804, 3408, 873, 8854, 8857, 5197, 3222, 7923, 1837, - 6637, 5267, 8748, 7386, 9946, 9090, 7284, 9796, 1916, - 1676, 6417, 478, 4042, 9158, 4562, 6428, 4901, 9841, - 4156, 3811, 6129, 8100, 2410, 9667, 1059, 6471, 9086, - 7783, 436, 8399, 2026, 7724, 9135, 6834, 2224, 8238, - 4091, 7721, 9973, 6278, 3251, 4004, 2804, 6760, 1348, - 3105, 8920, 3080, 8489, 6412, 1945, 5275, 9361, 5992, - 4252, 5985, 14, 8664, 6186, 8914, 4367, 8021, 3430, - 7586, 569, 9236, 221, 6330, 387, 7069, 4231, 8633, - 1760, 5743, 1734, 3474, 4921, 5437, 5342, 1188, 3716, - 7244, 3921, 3117, 1899, 6123, 1541, 2426, 3487, 1543, - 8966, 4322, 9621, 5499, 9634, 197, 2016, 1179, 1742, - 603, 4446, 2127, 5502, 5419, 1800, 8843, 3410, 9280, - 6183, 5336, 3557, 1301, 2924, 2685, 4789, 2681, 8075, - 9074, 779, 9722, 2242, 1422, 9799, 190, 8813, 4955, - 9269, 8432, 360, 3105, 1117, 2106, 6923, 340, 3913, - 6338, 2743, 3141, 5508, 8419, 4462, 3804, 4854, 8563, - 27, 56, 4099, 9429, 7128, 3170, 1423, 1480, 4108, - 9263, 248, 6315, 5394, 2111, 8815, 9468, 1653, 6525, - 7824, 7918, 7021, 7163, 2918, 4892, 3181, 2452, 2941, - 8090, 124, 6939, 1996, 9469, 8469, 3564, 8562, 416, - 7462, 4270, 2521, 6664, 1132, 2808, 1882, 6008, 4243, - 8264, 72, 188, 8612, 5177, 8716, 575, 8437, 8572, - 272, 3041, 762, 93, 8887, 453, 2740, 5642, 1483, - 387, 8650, 4556, 7071, 2833, 2163, 2519, 8518, 2921, - 1296, 6818, 2707, 3507, 8598, 8802, 112, 2742, 5974, - 6565, 5489, 7784, 469, 1046, 8118, 2916, 1384, 4596, - 8660, 1524, 2862, 8341, 9259, 3914, 7327, 3943, 1127, - 6398, 6612, 8113, 2461, 8714, 9729, 8226, 6354, 9494, - 9498, 6160, 330, 9056, 6769, 4637, 540, 5583, 6515, - 9235, 1832, 1756, 9622, 3128, 5815, 6161, 5166, 2180, - 2553, 9617, 5271, 540, 669, 8109, 2118, 7870, 9305, - 5197, 8512, 8704, 2565, 8570, 3358, 8597, 6817, 1442, - 7822, 9580, 2286, 877, 5934, 4989, 9155, 9087, 5891, - 4023, 8446, 2014, 2362, 990, 1376, 5099, 6917, 1513, - 4755, 9921, 9633, 586, 9793, 2424, 7385, 2711, 6971, - 8476, 3945, 2785, 4359, 7402, 6094, 3054, 5997, 6264, - 4973, 9403, 224, 8540, 1749, 1440, 9039, 4450, 6560, - 7985, 2950, 8212, 6558, 2305, 2992, 3067, 7181, 688, - 557, 3139, 1085, 2535, 8708, 4783, 1637, 2401, 5400, - 8152, 9595, 2332, 5036, 9866, 6137, 9544, 4606, 3463, - 5129, 4600, 2840, 4681, 5620, 3070, 9929, 3817, 6586, - 1810, 9677, 8838, 5997, 8061, 2182, 2092, 5426, 2012, - 4695, 4335, 5207, 18, 6447, 8196, 4896, 1724, 8190, - 6513, 7255, 7873, 632, 350, 9671, 2671, 6415, 9769, - 5192, 2244, 5805, 3331, 1110, 1188, 9979, 5220, 7760, - 7927, 860, 6526, 7297, 2539, 220, 4541, 1369, 5557, - 6832, 3456, 2993, 2123, 4095, 2625, 8888, 4611, 5854, - 494, 6448, 3694, 6940, 6717, 6857, 7774, 2832, 3690, - 2621]), - values=tensor([3.0392e-01, 3.2415e-01, 6.4353e-02, 7.3274e-01, - 3.2946e-01, 1.5879e-01, 9.2352e-02, 2.3222e-01, - 2.7476e-01, 5.5662e-01, 2.0841e-01, 4.3983e-01, - 4.6932e-01, 4.7844e-01, 6.0685e-01, 9.7693e-01, - 3.5238e-01, 4.6964e-01, 7.5140e-01, 3.1413e-01, - 6.6449e-02, 8.3856e-01, 5.5909e-01, 2.9668e-01, - 6.8550e-01, 8.4874e-01, 2.7284e-01, 6.4469e-01, - 5.5500e-01, 5.1334e-01, 1.1239e-01, 3.2908e-02, - 6.3958e-01, 9.5935e-01, 2.7353e-01, 6.6292e-01, - 3.1922e-01, 6.9750e-01, 5.5048e-01, 6.8061e-01, - 4.3532e-01, 7.7149e-01, 7.8764e-01, 6.0497e-01, - 9.6987e-02, 3.7830e-01, 8.7905e-01, 7.0427e-02, - 1.6845e-01, 8.8919e-01, 8.9750e-01, 1.9794e-01, - 5.5784e-01, 5.4874e-01, 9.3778e-02, 6.5393e-01, - 3.7119e-01, 2.3349e-01, 6.0309e-01, 4.8361e-01, - 5.1730e-01, 5.3303e-01, 8.8849e-01, 7.5067e-03, - 6.5848e-01, 7.7182e-01, 2.5538e-01, 9.6187e-01, - 3.6024e-01, 5.1765e-01, 2.1626e-02, 5.8628e-01, - 6.4821e-01, 2.7907e-01, 5.4479e-01, 9.4676e-01, - 2.6434e-01, 4.1497e-01, 1.2576e-01, 7.4574e-01, - 6.0185e-01, 6.4194e-01, 2.8693e-01, 3.0484e-01, - 6.4746e-01, 8.6023e-01, 7.7437e-01, 8.2817e-01, - 8.2911e-01, 6.5601e-01, 3.6870e-01, 3.0474e-01, - 9.7824e-01, 7.0873e-01, 7.5584e-01, 4.7182e-01, - 3.3010e-01, 1.0185e-02, 8.5565e-01, 6.5803e-01, - 2.1163e-01, 9.6445e-01, 3.5526e-01, 9.0210e-01, - 6.1257e-01, 3.5304e-01, 3.5164e-01, 8.1901e-01, - 9.3322e-01, 2.9058e-01, 5.5850e-01, 4.1175e-01, - 7.5611e-01, 5.2276e-01, 8.1503e-01, 9.5294e-01, - 5.9539e-01, 9.9769e-01, 2.3382e-01, 5.8700e-01, - 3.9790e-01, 1.0685e-01, 1.3325e-01, 1.5247e-02, - 4.9237e-01, 5.8495e-01, 6.7974e-01, 6.5205e-01, - 2.4978e-01, 1.5540e-01, 6.9466e-01, 9.8909e-01, - 6.7400e-01, 4.4045e-01, 8.0887e-01, 8.7366e-01, - 6.1470e-01, 6.6878e-01, 2.0722e-01, 5.6730e-01, - 9.6699e-01, 2.1420e-01, 5.0036e-01, 4.3882e-02, - 2.4509e-01, 9.6699e-01, 6.2712e-02, 8.0118e-01, - 7.0259e-01, 4.9349e-01, 2.3668e-01, 6.4690e-01, - 3.3297e-01, 8.1392e-01, 3.3370e-01, 3.6099e-01, - 6.9785e-01, 5.8653e-01, 2.3494e-01, 4.2606e-01, - 5.3776e-02, 2.9098e-01, 3.5190e-01, 8.5533e-01, - 3.9164e-01, 4.5423e-01, 3.2810e-02, 7.6592e-01, - 5.1452e-01, 5.8263e-01, 3.0590e-01, 4.6225e-01, - 8.9127e-01, 3.8718e-04, 3.6956e-01, 1.0716e-01, - 8.9555e-01, 5.1526e-01, 2.6090e-01, 5.4827e-01, - 7.6613e-01, 9.6451e-02, 1.6855e-01, 2.7123e-01, - 7.9078e-01, 7.0227e-01, 9.2769e-01, 3.2768e-01, - 7.4133e-02, 8.0175e-01, 8.9212e-01, 1.4596e-01, - 5.2250e-02, 7.1920e-01, 2.8915e-01, 7.1399e-01, - 3.9989e-01, 8.4612e-01, 7.6692e-01, 7.6603e-01, - 9.7955e-01, 8.5926e-01, 4.7268e-01, 3.0567e-01, - 3.5521e-01, 4.5346e-01, 7.0907e-01, 1.0000e-01, - 3.8556e-01, 7.4063e-01, 1.2875e-01, 5.0308e-01, - 6.8759e-01, 5.4430e-01, 9.2335e-01, 5.9479e-01, - 3.5215e-01, 2.5979e-01, 3.6195e-01, 1.1209e-01, - 6.7558e-01, 3.6084e-01, 2.9372e-01, 7.9418e-02, - 8.0128e-01, 5.5807e-01, 6.3595e-01, 3.1372e-01, - 2.3848e-01, 1.4012e-02, 2.1033e-01, 5.1052e-01, - 6.6708e-01, 3.8104e-01, 6.2857e-01, 2.5671e-01, - 6.7301e-01, 6.4080e-01, 7.3818e-01, 6.5250e-01, - 7.2748e-01, 2.8088e-01, 4.3795e-01, 7.6139e-01, - 3.2002e-01, 1.0962e-01, 1.1736e-01, 1.1390e-01, - 8.8693e-01, 5.6804e-01, 5.4451e-01, 4.7759e-01, - 9.7875e-02, 8.1348e-02, 1.2472e-01, 6.8343e-01, - 7.6072e-01, 4.3782e-01, 9.4758e-01, 9.8629e-01, - 1.3619e-01, 8.9717e-01, 8.0717e-01, 5.1829e-01, - 6.6901e-01, 7.6695e-01, 3.5278e-01, 7.6203e-02, - 8.1739e-01, 2.6432e-02, 7.8358e-02, 7.6105e-01, - 6.0698e-01, 9.7534e-01, 1.0290e-01, 6.2350e-03, - 3.6916e-02, 9.6921e-01, 9.2309e-01, 3.6705e-01, - 4.1131e-01, 7.1992e-01, 4.8131e-01, 3.8551e-02, - 8.7653e-01, 4.2984e-01, 7.4999e-01, 5.9486e-01, - 2.0777e-01, 2.4797e-02, 7.2719e-01, 8.6476e-01, - 9.2557e-02, 6.6099e-01, 8.8421e-01, 9.9344e-01, - 5.9213e-01, 8.8296e-01, 4.4506e-01, 6.1979e-01, - 2.4620e-01, 6.4475e-01, 9.4222e-01, 4.3135e-01, - 6.9601e-01, 7.7456e-01, 9.3620e-01, 4.9096e-01, - 7.2207e-01, 6.4022e-01, 5.2574e-01, 8.2484e-01, - 5.7041e-01, 6.9043e-01, 2.4631e-02, 9.5777e-02, - 5.9238e-01, 3.0126e-01, 9.4882e-01, 3.7736e-01, - 4.4950e-01, 2.8003e-01, 1.1028e-01, 4.2071e-01, - 9.9009e-01, 5.0994e-01, 4.9474e-01, 7.2898e-01, - 4.3563e-01, 2.0331e-01, 6.0930e-01, 3.4882e-01, - 2.9900e-01, 6.1199e-01, 2.0308e-01, 1.3459e-01, - 5.6701e-01, 4.8437e-01, 6.0606e-01, 4.1922e-01, - 4.5665e-01, 4.1795e-02, 2.1442e-01, 8.5784e-03, - 1.2383e-01, 6.8451e-01, 8.2903e-01, 9.3818e-01, - 8.6183e-01, 9.2220e-01, 1.2146e-02, 9.4702e-01, - 7.2689e-01, 7.0124e-01, 5.2058e-01, 7.6183e-01, - 2.7320e-01, 6.4457e-01, 1.3569e-01, 3.2953e-01, - 1.9373e-01, 1.1614e-01, 6.8419e-01, 1.1889e-01, - 1.5054e-01, 6.8449e-01, 2.2163e-02, 3.3239e-01, - 5.3542e-01, 4.6539e-01, 5.7549e-01, 6.0063e-01, - 7.3725e-01, 7.7272e-01, 7.1549e-01, 8.3333e-02, - 2.5724e-01, 4.8954e-01, 5.4990e-01, 5.5515e-01, - 6.6187e-01, 1.4302e-01, 2.6241e-01, 1.3082e-01, - 1.0201e-01, 4.7238e-01, 8.0345e-01, 3.5296e-01, - 4.3307e-02, 5.1890e-01, 8.2623e-01, 7.8766e-01, - 9.6443e-01, 5.7328e-01, 9.6623e-01, 5.1756e-01, - 8.4229e-01, 8.6955e-01, 5.2500e-01, 2.4364e-01, - 4.6531e-04, 7.7566e-01, 2.3278e-01, 5.0290e-01, - 1.2741e-01, 7.4793e-01, 6.6397e-01, 8.4389e-01, - 7.1603e-01, 1.3434e-01, 9.1897e-01, 1.6605e-01, - 3.1924e-01, 8.7903e-01, 1.4216e-02, 1.4696e-01, - 7.2418e-01, 1.2448e-01, 1.1574e-01, 8.0022e-01, - 3.2231e-01, 5.5328e-01, 2.4152e-01, 4.0399e-01, - 1.2053e-01, 1.5238e-01, 5.0061e-01, 8.8357e-01, - 2.6656e-01, 3.4203e-01, 5.0313e-01, 9.2105e-01, - 5.4412e-01, 7.6757e-01, 1.4392e-01, 9.2549e-01, - 4.4630e-02, 1.0189e-01, 2.1381e-01, 6.3179e-01, - 1.4210e-01, 6.4822e-01, 4.4733e-02, 7.0778e-02, - 5.3670e-01, 6.7468e-01, 9.6249e-02, 1.4701e-01, - 6.7904e-01, 6.2977e-01, 4.8222e-01, 5.6410e-01, - 6.6069e-01, 8.2291e-01, 2.7086e-01, 1.3385e-02, - 3.8370e-02, 7.6000e-01, 1.2836e-01, 4.2271e-01, - 3.7971e-01, 4.0221e-01, 1.9058e-01, 1.3246e-02, - 9.7472e-01, 8.1468e-01, 9.5465e-01, 5.0494e-01, - 4.8024e-01, 8.6375e-01, 2.1211e-01, 7.4747e-01, - 8.8496e-01, 2.3040e-01, 2.1539e-01, 6.6296e-01, - 4.6006e-01, 7.6222e-01, 6.9519e-01, 2.5685e-01, - 1.2762e-01, 4.8623e-01, 8.5541e-01, 1.9816e-01, - 6.4360e-01, 5.6243e-01, 6.0436e-01, 9.6360e-01, - 7.3027e-01, 8.0053e-01, 9.3960e-02, 8.9196e-01, - 6.5344e-01, 5.7618e-01, 3.8071e-02, 9.8561e-01, - 3.9902e-01, 2.0152e-02, 8.4945e-01, 1.0773e-01, - 5.1144e-01, 7.1844e-01, 8.5285e-02, 4.5100e-01, - 1.5098e-01, 2.6810e-01, 5.1885e-02, 6.6289e-01, - 5.9605e-01, 5.1952e-01, 3.2494e-01, 4.2823e-01, - 8.5842e-01, 6.0189e-01, 2.0347e-01, 9.8130e-01, - 1.8163e-01, 2.5564e-02, 1.8724e-02, 5.1201e-01, - 4.5720e-01, 7.9371e-01, 4.8374e-01, 4.3205e-01, - 7.7302e-01, 7.5530e-01, 9.7319e-01, 2.8166e-01, - 6.8553e-01, 9.0165e-01, 1.3726e-01, 6.1107e-01, - 9.6470e-01, 9.3457e-01, 1.6750e-01, 5.7026e-02, - 9.7853e-01, 4.8808e-01, 5.2986e-01, 4.4763e-01, - 2.1220e-01, 1.8968e-01, 6.6682e-01, 2.7978e-01, - 6.2518e-02, 1.7188e-01, 6.6203e-01, 9.5117e-01, - 2.6765e-01, 7.5161e-01, 4.2205e-01, 8.3213e-01, - 3.4401e-02, 5.5833e-01, 2.2728e-01, 8.6978e-01, - 5.0487e-01, 6.8315e-01, 8.8845e-01, 3.7450e-01, - 4.7156e-01, 2.0297e-01, 5.9919e-01, 8.7437e-01, - 1.8376e-01, 3.8162e-01, 3.8759e-01, 2.8332e-01, - 7.3703e-01, 8.2598e-01, 5.7822e-01, 8.1318e-01, - 6.0403e-01, 3.0036e-01, 8.6647e-01, 6.3278e-01, - 3.3144e-02, 8.3655e-02, 2.0403e-01, 6.3295e-01, - 5.5983e-01, 1.7687e-01, 7.9099e-01, 6.5995e-01, - 1.6364e-01, 8.0576e-01, 5.9218e-01, 3.7825e-01, - 7.3625e-01, 6.8133e-01, 1.5251e-01, 3.4541e-01, - 4.3200e-01, 4.7561e-01, 6.5115e-01, 4.9609e-01, - 4.9830e-01, 1.7116e-01, 8.1782e-01, 5.6982e-02, - 7.8582e-01, 4.1358e-01, 6.8337e-01, 8.8174e-01, - 2.7893e-02, 6.1252e-01, 6.8428e-01, 4.7886e-04, - 4.5579e-01, 2.6597e-01, 8.9291e-01, 4.7913e-01, - 5.1187e-02, 9.1252e-01, 5.2623e-01, 9.2323e-01, - 2.5712e-02, 7.4165e-01, 8.7147e-01, 1.1067e-01, - 3.3337e-01, 6.6053e-01, 8.1395e-02, 3.0634e-01, - 5.9633e-01, 6.1441e-01, 5.2337e-01, 9.4467e-01, - 1.6455e-01, 6.0027e-01, 9.9735e-01, 5.4964e-01, - 4.2544e-01, 5.2938e-01, 2.7487e-01, 5.2740e-01, - 7.2458e-01, 2.1872e-01, 2.3165e-02, 8.5565e-01, - 3.4327e-02, 8.8372e-01, 3.6038e-01, 5.2099e-01, - 7.8544e-01, 8.5584e-01, 2.0450e-01, 7.0439e-01, - 1.8946e-01, 5.2352e-01, 1.3840e-01, 7.9757e-01, - 6.4771e-01, 4.2226e-01, 3.3701e-01, 5.7625e-01, - 7.5119e-01, 6.0090e-01, 5.5169e-01, 2.6335e-01, - 2.5175e-01, 1.3511e-01, 7.5301e-01, 3.6857e-01, - 2.4815e-01, 9.7900e-01, 5.3734e-01, 2.5181e-01, - 9.6212e-01, 3.0052e-01, 2.3817e-01, 5.4727e-01, - 7.4985e-01, 4.0814e-01, 2.3324e-01, 9.0258e-01, - 9.2453e-01, 5.1688e-01, 6.8008e-01, 8.4417e-01, - 2.9048e-01, 3.5275e-01, 9.1446e-01, 3.1166e-01, - 9.3812e-02, 3.2061e-01, 3.8420e-01, 8.3599e-01, - 2.2761e-01, 7.4261e-01, 2.4781e-01, 8.3249e-01, - 6.2992e-01, 3.7271e-01, 6.4205e-01, 8.0600e-01, - 5.2952e-01, 8.6497e-03, 2.0012e-01, 6.7808e-01, - 7.6509e-01, 3.6618e-01, 3.6418e-01, 7.0343e-01, - 6.0658e-01, 1.8231e-01, 4.0747e-02, 7.2457e-01, - 7.5662e-01, 3.1029e-02, 1.9408e-01, 5.8483e-03, - 2.5497e-01, 2.7861e-01, 6.7215e-01, 9.8377e-01, - 3.9461e-01, 5.7729e-01, 7.4282e-01, 7.8487e-01, - 6.6966e-01, 8.0111e-01, 2.7436e-01, 9.1071e-01, - 7.5479e-01, 9.6961e-02, 5.9253e-01, 4.3539e-01, - 8.1993e-02, 1.4436e-01, 5.4192e-02, 4.5414e-01, - 2.2083e-01, 6.8883e-01, 3.0813e-03, 5.7122e-01, - 6.4824e-01, 6.1088e-01, 9.9103e-01, 3.0128e-01, - 2.5519e-01, 5.5098e-01, 7.7501e-01, 8.2747e-01, - 2.7382e-01, 1.6131e-01, 3.8473e-01, 1.8858e-01, - 7.6480e-01, 5.7925e-01, 3.7285e-01, 4.1565e-01, - 6.6174e-01, 7.9534e-01, 3.9512e-01, 3.1463e-02, - 2.2917e-01, 3.0596e-01, 2.7861e-03, 2.0807e-01, - 3.7628e-01, 6.8980e-01, 2.7128e-02, 6.5713e-01, - 4.9102e-01, 7.0889e-01, 4.5564e-02, 9.8297e-01, - 6.8338e-01, 2.7678e-03, 5.2548e-01, 9.3723e-01, - 6.9210e-01, 8.4505e-01, 5.3302e-01, 5.5013e-01, - 2.2889e-02, 8.7608e-02, 1.4028e-01, 4.6686e-01, - 6.4919e-01, 9.9436e-01, 7.3888e-01, 4.1538e-01, - 9.4718e-01, 9.8412e-01, 4.2306e-01, 1.3232e-01, - 6.8812e-01, 2.0943e-01, 7.5932e-01, 1.2380e-01, - 6.2774e-01, 4.9894e-01, 3.0889e-01, 1.1004e-01, - 7.4553e-01, 8.5544e-01, 2.0482e-01, 7.2767e-01, - 2.0936e-02, 5.0479e-01, 5.7524e-01, 5.4774e-01, - 8.4315e-01, 5.2362e-01, 2.8163e-01, 5.1186e-01, - 8.0800e-01, 7.9337e-01, 3.6713e-01, 6.2638e-01, - 6.5095e-01, 4.0210e-01, 6.3034e-01, 5.3272e-01, - 3.4358e-01, 5.9924e-01, 1.9674e-01, 6.4351e-01, - 8.5855e-02, 2.3977e-01, 1.2607e-01, 7.8134e-01, - 9.6526e-02, 6.0975e-01, 4.6583e-01, 1.7783e-01, - 7.3707e-02, 8.4537e-01, 7.9093e-01, 4.1040e-02, - 2.5473e-01, 3.9781e-01, 1.3302e-01, 4.8382e-01, - 6.0215e-02, 7.9066e-01, 9.9902e-01, 9.0029e-01, - 5.6279e-01, 9.9788e-01, 7.8531e-01, 2.8619e-01, - 6.3489e-01, 1.5229e-01, 6.9891e-01, 7.3891e-01, - 9.9196e-01, 1.2942e-02, 1.0882e-01, 1.6395e-01, - 9.7799e-01, 9.8840e-01, 9.3287e-01, 9.7669e-01, - 9.3534e-01, 3.8769e-01, 5.8205e-01, 7.1933e-01, - 8.1893e-01, 2.3647e-02, 7.8599e-01, 3.5138e-01, - 8.8097e-01, 5.9339e-01, 8.1908e-01, 2.1259e-01, - 1.7916e-01, 8.3775e-01, 4.2342e-02, 1.7547e-01, - 4.9728e-01, 7.5815e-01, 6.1732e-01, 7.7818e-01, - 3.0544e-01, 7.8172e-01, 1.0312e-01, 9.2880e-01, - 4.3926e-01, 8.6426e-02, 2.2251e-01, 4.2217e-01, - 8.8595e-01, 8.2722e-02, 7.3624e-01, 5.9382e-02, - 4.9932e-01, 1.4760e-01, 4.7458e-02, 6.0080e-01, - 1.4431e-01, 2.9584e-01, 7.1655e-01, 8.9494e-01, - 7.0292e-01, 6.4015e-01, 3.6954e-01, 4.8047e-01, - 2.1881e-01, 1.9305e-01, 5.3317e-01, 1.5076e-01, - 4.3873e-01, 9.4135e-01, 1.0942e-01, 8.7236e-01, - 5.3086e-01, 1.7910e-01, 2.4322e-01, 2.2052e-02, - 4.1013e-01, 1.4191e-01, 3.4491e-01, 5.5417e-01, - 3.3288e-01, 3.1302e-01, 2.3566e-01, 1.8459e-01, - 3.6159e-01, 4.8536e-01, 9.0908e-01, 6.3893e-02, - 7.7289e-02, 8.0569e-01, 8.2725e-01, 8.5490e-01, - 4.1791e-02, 4.0255e-01, 2.0672e-01, 9.7864e-01, - 3.6149e-01, 8.5159e-01, 9.6188e-01, 4.1123e-01, - 2.3421e-01, 6.1193e-01, 2.1097e-01, 2.7882e-01, - 5.5135e-01, 8.0230e-01, 3.1907e-01, 1.8198e-01, - 3.6314e-02, 6.2212e-01, 5.1175e-01, 6.7357e-01, - 3.7007e-02, 1.2024e-01, 9.1569e-01, 8.0422e-01, - 4.8728e-01, 3.3584e-01, 5.0960e-01, 3.6273e-01, - 6.1914e-01, 3.8996e-01, 5.9564e-01, 4.2090e-01, - 8.0087e-01, 6.1589e-01, 5.8173e-01, 6.2723e-01, - 6.9793e-01, 1.5997e-01, 1.5756e-01, 9.2695e-02, - 7.7600e-01, 5.7205e-01, 5.7808e-01, 5.9198e-02, - 8.2480e-01, 2.8009e-01, 8.4499e-02, 3.5912e-01, - 7.7211e-01, 1.0716e-01, 7.1486e-01, 5.0595e-01, - 6.5373e-02, 7.7498e-01, 9.1086e-03, 4.0288e-01, - 6.5420e-01, 2.4228e-01, 8.2790e-01, 1.6179e-02, - 4.5517e-01, 2.3154e-01, 4.0528e-01, 9.9680e-01, - 1.9648e-01, 8.9156e-01, 4.9073e-01, 6.1991e-01, - 5.2047e-01, 2.0881e-01, 9.4192e-01, 4.9260e-01]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([8945, 3396, 1179, 7845, 5353, 7539, 7036, 1743, 4590, + 9706, 9114, 1883, 3360, 9714, 6892, 5151, 3383, 1404, + 1831, 4276, 9165, 2466, 2237, 3707, 3545, 7291, 2878, + 2495, 9824, 2602, 3234, 6210, 803, 3676, 5976, 3272, + 3806, 5164, 6743, 7171, 162, 1405, 4701, 8601, 8019, + 4862, 3797, 1907, 8263, 3320, 9466, 335, 9390, 4209, + 5095, 2772, 7268, 8726, 7661, 94, 7630, 1233, 7006, + 2204, 4233, 3286, 900, 2884, 1201, 4167, 5378, 7309, + 5080, 5779, 9724, 2453, 3090, 5707, 7810, 9441, 3585, + 8544, 683, 5841, 5444, 3816, 3300, 548, 3220, 6182, + 4001, 3210, 2638, 41, 2488, 34, 5895, 1929, 6262, + 6745, 1441, 2060, 7662, 3205, 9914, 6565, 8901, 9926, + 7630, 6163, 8556, 4895, 7090, 748, 9427, 3078, 5873, + 2297, 1006, 7916, 2596, 536, 3213, 8594, 8061, 5619, + 1607, 8351, 4783, 649, 1089, 8992, 6973, 2739, 8357, + 9645, 6973, 9438, 3529, 4166, 2361, 6581, 9866, 3387, + 3080, 930, 9748, 678, 7194, 9819, 6374, 6338, 8539, + 9749, 2131, 8093, 4606, 6550, 3719, 7613, 1304, 5282, + 7044, 8559, 9384, 8257, 1763, 473, 1385, 7934, 5409, + 336, 7981, 2265, 3340, 2900, 8402, 9657, 9519, 3615, + 1302, 4040, 4426, 153, 4893, 7466, 737, 7283, 1137, + 3184, 1607, 2087, 2802, 8646, 7649, 7107, 3563, 7564, + 9504, 8510, 6434, 6665, 4775, 7488, 6418, 2058, 1819, + 4396, 1650, 5111, 2215, 6579, 838, 4639, 7271, 833, + 1898, 1581, 3894, 480, 2937, 2619, 6452, 3750, 8106, + 8345, 4363, 781, 3703, 8349, 2403, 9207, 7354, 662, + 5670, 9094, 7076, 7476, 9641, 4009, 628, 354, 4864, + 6408, 6881, 2199, 3780, 6648, 3256, 5259, 3400, 7677, + 9765, 3406, 8375, 4626, 7974, 2370, 3438, 5886, 6785, + 8785, 5325, 2444, 5731, 3261, 7251, 3966, 630, 4711, + 5083, 8173, 9533, 9893, 5512, 9795, 2261, 9003, 1575, + 7838, 5221, 3905, 3659, 4077, 115, 935, 6215, 8429, + 3917, 5204, 311, 2973, 3219, 1577, 9389, 2991, 5510, + 2461, 9770, 5136, 3343, 8917, 9218, 1311, 8986, 4405, + 8556, 3238, 2558, 5250, 618, 3389, 8858, 45, 7363, + 4818, 7081, 3737, 6906, 9478, 6432, 5077, 8225, 4404, + 7717, 5750, 7191, 8819, 8189, 5729, 5250, 7384, 6893, + 4600, 1582, 3815, 4897, 3942, 2070, 2436, 6640, 1420, + 3679, 9245, 958, 3679, 9640, 3274, 3102, 8666, 615, + 5650, 6557, 5747, 3484, 246, 8057, 2329, 6318, 6418, + 2071, 3817, 6138, 6747, 7407, 1450, 369, 5047, 5515, + 7855, 3161, 419, 3580, 8434, 3391, 1876, 8301, 7626, + 7469, 4086, 1576, 9404, 6029, 5790, 4643, 9099, 6641, + 269, 6145, 652, 313, 2335, 2792, 2914, 7328, 4203, + 7886, 8148, 3406, 4180, 9650, 3634, 648, 9181, 62, + 5149, 5973, 9105, 5497, 8296, 5420, 4424, 9143, 1523, + 6941, 1142, 4493, 6351, 446, 577, 1629, 245, 7135, + 6160, 9525, 6084, 7212, 8425, 3424, 6520, 1322, 4124, + 5324, 2697, 5850, 3189, 2684, 2839, 3549, 3012, 7923, + 8589, 8612, 9075, 6590, 3499, 7946, 3141, 6391, 8953, + 6142, 2419, 1790, 7146, 6236, 9268, 2896, 1689, 6613, + 4532, 4584, 1325, 7394, 8284, 4463, 3642, 2282, 8414, + 9423, 138, 3947, 306, 7855, 9163, 6896, 8979, 1724, + 5170, 51, 4016, 8013, 3846, 2422, 3223, 4421, 2375, + 1717, 7745, 5235, 1263, 3985, 5971, 5104, 3589, 3293, + 4349, 393, 2092, 4442, 6577, 7922, 9368, 4648, 7995, + 5075, 4429, 7398, 5908, 2671, 2634, 8764, 4132, 6450, + 9438, 5182, 8656, 1204, 3267, 2173, 3521, 1424, 701, + 1084, 8974, 3663, 4399, 1519, 8751, 4361, 5549, 8815, + 5968, 3693, 9898, 8818, 4693, 9186, 3877, 5588, 6992, + 7704, 4079, 448, 3632, 9137, 7667, 4424, 6893, 2724, + 9315, 4019, 9920, 4465, 5349, 1515, 774, 8824, 1637, + 9230, 7482, 6454, 4758, 3926, 7862, 3487, 4252, 9245, + 7419, 8902, 3306, 9144, 3069, 8624, 7027, 9844, 390, + 3671, 1935, 5147, 6458, 2901, 4100, 1627, 8093, 7742, + 1139, 9677, 8468, 3740, 7956, 615, 7163, 5722, 8254, + 2208, 1056, 9928, 697, 1166, 2375, 5493, 122, 391, + 9045, 4689, 515, 1729, 4724, 5766, 9306, 1196, 1036, + 4607, 4064, 5704, 3279, 6458, 2414, 7135, 1248, 5592, + 3740, 6862, 3502, 7351, 7215, 8844, 7880, 9911, 8570, + 1484, 5691, 5680, 4882, 836, 9572, 4343, 7630, 2724, + 9110, 7775, 1871, 8035, 462, 3852, 942, 921, 6491, + 1116, 6752, 5778, 1413, 5915, 751, 1950, 9072, 5886, + 1001, 1725, 9574, 52, 9517, 5502, 8263, 483, 297, + 7916, 5377, 9460, 7305, 23, 7869, 8971, 3087, 4611, + 5504, 2405, 5239, 9022, 7221, 1354, 2985, 7954, 3823, + 4601, 4002, 2438, 4562, 3420, 6775, 6456, 7973, 5475, + 7634, 7316, 8831, 1826, 7786, 7607, 6474, 270, 296, + 184, 3850, 4251, 4668, 600, 8846, 529, 337, 2584, + 8140, 3615, 9419, 5670, 8148, 3912, 4023, 8122, 3647, + 4021, 7298, 6265, 8251, 3226, 400, 5247, 7606, 4710, + 6782, 4412, 2855, 9924, 885, 3817, 2006, 8730, 9904, + 4995, 1121, 6187, 130, 7650, 9511, 7319, 6961, 1773, + 6534, 817, 3967, 7690, 3866, 2599, 2767, 6660, 1106, + 7926, 4942, 6471, 9767, 3357, 7909, 3767, 8210, 6330, + 6343, 2348, 5182, 8276, 9594, 741, 8381, 8691, 7988, + 3403, 2290, 4713, 9889, 5866, 4290, 9536, 7655, 2908, + 3996, 2613, 2321, 2101, 5177, 5555, 4157, 6579, 8198, + 4437, 3123, 959, 2322, 3873, 5229, 8315, 2125, 1068, + 3339, 4442, 5920, 1106, 5605, 8949, 804, 1779, 4985, + 8824, 7006, 1504, 3648, 8602, 2160, 6342, 9535, 8002, + 4087, 8531, 3998, 4073, 2272, 9371, 1582, 6595, 2758, + 8018, 7889, 2888, 2724, 166, 1025, 5415, 5887, 5962, + 4693, 9500, 5479, 7164, 8949, 9300, 2490, 8900, 7784, + 5112, 9529, 2586, 8119, 1217, 5680, 7692, 1414, 6045, + 1897, 1410, 6232, 1224, 8075, 909, 8079, 6297, 5594, + 6563, 447, 7639, 92, 6240, 5816, 3932, 6779, 1422, + 5413, 5034, 8673, 6215, 3050, 7602, 4865, 8751, 2686, + 3397, 4590, 1441, 1218, 9330, 9843, 9750, 5252, 9559, + 8730, 9110, 635, 7887, 7096, 8550, 7459, 3334, 1297, + 8605, 5687, 2503, 6275, 4035, 8191, 7143, 7275, 6676, + 7971, 5893, 6753, 5613, 808, 8632, 1964, 673, 127, + 8324, 8653, 4841, 8589, 3857, 2582, 6059, 9639, 5226, + 6110, 3373, 9853, 6381, 5046, 6036, 7589, 5296, 5514, + 9737, 8932, 1243, 880, 1655, 7250, 3620, 3051, 4445, + 5601, 4526, 3361, 9690, 156, 6080, 280, 8730, 6431, + 7500, 5972, 7423, 7931, 5347, 1208, 9247, 8858, 7800, + 3207, 5794, 4373, 9912, 8788, 4783, 9650, 391, 8406, + 3597, 4853, 3749, 8966, 3381, 5873, 8659, 4038, 5007, + 4925, 6124, 4241, 7328, 1463, 4456, 4979, 5642, 5628, + 2700, 4136, 1508, 6833, 9913, 4703, 1181, 9942, 1386, + 2626]), + values=tensor([0.6328, 0.5112, 0.4765, 0.0665, 0.1734, 0.0665, 0.6185, + 0.0181, 0.0725, 0.2532, 0.9599, 0.4060, 0.4487, 0.3971, + 0.5036, 0.0663, 0.7667, 0.4567, 0.6015, 0.0441, 0.0537, + 0.2971, 0.1276, 0.2201, 0.4696, 0.3107, 0.3630, 0.0915, + 0.5682, 0.1732, 0.1589, 0.6673, 0.7066, 0.3143, 0.9178, + 0.5172, 0.0354, 0.4886, 0.0057, 0.8793, 0.8024, 0.3323, + 0.7622, 0.8288, 0.9792, 0.6146, 0.0245, 0.4363, 0.1970, + 0.3869, 0.0584, 0.1673, 0.4043, 0.4978, 0.8504, 0.9699, + 0.0473, 0.4292, 0.5328, 0.9792, 0.7458, 0.9252, 0.8316, + 0.4866, 0.4309, 0.8744, 0.7165, 0.0379, 0.4454, 0.4735, + 0.2305, 0.7674, 0.9705, 0.3678, 0.7813, 0.7238, 0.6940, + 0.9317, 0.2240, 0.1941, 0.3116, 0.6138, 0.6246, 0.2994, + 0.3718, 0.8037, 0.4721, 0.2912, 0.7832, 0.7698, 0.1876, + 0.6541, 0.0121, 0.1183, 0.7660, 0.9302, 0.7307, 0.6241, + 0.0980, 0.2478, 0.9494, 0.2980, 0.4179, 0.5405, 0.3656, + 0.5253, 0.9366, 0.2741, 0.3689, 0.1650, 0.1150, 0.2357, + 0.2589, 0.8658, 0.3718, 0.4559, 0.0607, 0.6854, 0.5740, + 0.0099, 0.0842, 0.2290, 0.9778, 0.4741, 0.0309, 0.3407, + 0.9693, 0.8160, 0.6529, 0.3165, 0.7635, 0.6006, 0.3346, + 0.0092, 0.1731, 0.2562, 0.2849, 0.3168, 0.4420, 0.8098, + 0.5916, 0.5939, 0.1808, 0.0213, 0.0030, 0.3273, 0.2527, + 0.9427, 0.7138, 0.2956, 0.9898, 0.6605, 0.7670, 0.3577, + 0.3575, 0.6873, 0.0220, 0.4762, 0.1607, 0.6636, 0.2404, + 0.5640, 0.3825, 0.8669, 0.9936, 0.4437, 0.3622, 0.5991, + 0.6010, 0.1390, 0.5097, 0.6670, 0.8363, 0.4078, 0.4457, + 0.6784, 0.8335, 0.6551, 0.5699, 0.2070, 0.2892, 0.8107, + 0.3392, 0.7591, 0.1556, 0.7279, 0.6536, 0.0812, 0.1948, + 0.2981, 0.7056, 0.7521, 0.1516, 0.4228, 0.0576, 0.0476, + 0.3420, 0.2920, 0.6385, 0.3484, 0.2147, 0.0377, 0.0600, + 0.7111, 0.7770, 0.5438, 0.7238, 0.1643, 0.7321, 0.0557, + 0.8672, 0.0722, 0.6589, 0.3135, 0.3991, 0.7778, 0.9018, + 0.8278, 0.8837, 0.6604, 0.1719, 0.2527, 0.1772, 0.7083, + 0.6464, 0.6345, 0.4008, 0.8240, 0.4263, 0.1009, 0.1581, + 0.1175, 0.1210, 0.8371, 0.7669, 0.0035, 0.8185, 0.0095, + 0.8614, 0.7936, 0.8270, 0.5196, 0.0669, 0.9742, 0.3211, + 0.1091, 0.7255, 0.1908, 0.7702, 0.6533, 0.8657, 0.4928, + 0.2724, 0.3566, 0.6408, 0.7526, 0.7137, 0.9179, 0.0186, + 0.5034, 0.7474, 0.8220, 0.5436, 0.7154, 0.5585, 0.6583, + 0.7027, 0.4150, 0.8241, 0.7987, 0.4703, 0.5787, 0.2111, + 0.3140, 0.4002, 0.0314, 0.7234, 0.3993, 0.6999, 0.7431, + 0.3273, 0.5426, 0.8777, 0.5686, 0.1940, 0.5264, 0.0654, + 0.0803, 0.1801, 0.4317, 0.1449, 0.9288, 0.3382, 0.0782, + 0.2906, 0.1907, 0.5908, 0.2320, 0.7996, 0.3326, 0.9073, + 0.6262, 0.1248, 0.9796, 0.2581, 0.7680, 0.6286, 0.8856, + 0.3858, 0.7435, 0.3491, 0.1581, 0.2008, 0.6343, 0.9670, + 0.0951, 0.6572, 0.5074, 0.2256, 0.4076, 0.7261, 0.2860, + 0.2840, 0.8543, 0.5327, 0.0708, 0.8227, 0.7674, 0.1378, + 0.2275, 0.5249, 0.7149, 0.9475, 0.9988, 0.3974, 0.2904, + 0.4868, 0.8516, 0.6448, 0.3137, 0.0093, 0.8414, 0.6284, + 0.6688, 0.5045, 0.1113, 0.0319, 0.5483, 0.9009, 0.3181, + 0.6169, 0.7072, 0.6471, 0.8162, 0.4043, 0.9344, 0.3371, + 0.7476, 0.0758, 0.5203, 0.2836, 0.4781, 0.8972, 0.8509, + 0.8308, 0.0693, 0.1355, 0.0098, 0.2233, 0.8841, 0.4939, + 0.4893, 0.8577, 0.6130, 0.3307, 0.8263, 0.9332, 0.9152, + 0.2765, 0.3176, 0.4801, 0.5128, 0.1151, 0.2896, 0.8491, + 0.1399, 0.6841, 0.7791, 0.8495, 0.7517, 0.3322, 0.3399, + 0.2090, 0.4326, 0.2931, 0.3882, 0.0192, 0.5096, 0.7396, + 0.3615, 0.3835, 0.9249, 0.7264, 0.9818, 0.7046, 0.0718, + 0.0299, 0.1397, 0.4426, 0.7572, 0.0545, 0.7970, 0.8139, + 0.9342, 0.3447, 0.9270, 0.8988, 0.4144, 0.1455, 0.6063, + 0.9656, 0.3461, 0.0662, 0.1588, 0.4889, 0.0435, 0.0505, + 0.2310, 0.2304, 0.6950, 0.8340, 0.2582, 0.1054, 0.5458, + 0.3408, 0.5412, 0.0913, 0.1004, 0.8661, 0.6202, 0.2592, + 0.6923, 0.3687, 0.1088, 0.2031, 0.7085, 0.3896, 0.9273, + 0.0631, 0.8597, 0.5085, 0.4210, 0.5198, 0.8408, 0.4871, + 0.1356, 0.4654, 0.5719, 0.7761, 0.8139, 0.3598, 0.6241, + 0.9513, 0.2844, 0.7243, 0.3186, 0.1385, 0.3457, 0.3843, + 0.4150, 0.0789, 0.9208, 0.8685, 0.3678, 0.4569, 0.9747, + 0.1423, 0.6800, 0.9024, 0.0169, 0.9981, 0.8256, 0.6974, + 0.5621, 0.1760, 0.7437, 0.7608, 0.1172, 0.5954, 0.7145, + 0.9756, 0.0790, 0.9835, 0.8334, 0.8632, 0.1285, 0.9257, + 0.9551, 0.0982, 0.4373, 0.2748, 0.7133, 0.3121, 0.6542, + 0.1952, 0.0849, 0.3599, 0.7623, 0.7153, 0.5821, 0.6074, + 0.8762, 0.1444, 0.5110, 0.9378, 0.0157, 0.2010, 0.4720, + 0.3976, 0.7842, 0.6920, 0.2721, 0.8751, 0.4100, 0.4580, + 0.2104, 0.9757, 0.8016, 0.9364, 0.5061, 0.4950, 0.4321, + 0.9744, 0.7509, 0.0778, 0.3566, 0.0305, 0.6194, 0.3360, + 0.8082, 0.3227, 0.1973, 0.6016, 0.6943, 0.3027, 0.7546, + 0.6573, 0.5343, 0.5497, 0.8833, 0.9913, 0.9660, 0.3089, + 0.1248, 0.2260, 0.4299, 0.8327, 0.6883, 0.2237, 0.0364, + 0.1167, 0.0909, 0.8445, 0.4475, 0.8801, 0.3579, 0.7639, + 0.8627, 0.6154, 0.9509, 0.9942, 0.2111, 0.7502, 0.0343, + 0.7545, 0.6893, 0.8096, 0.3698, 0.7535, 0.6656, 0.5620, + 0.8632, 0.6307, 0.4399, 0.3365, 0.1523, 0.7446, 0.3731, + 0.5685, 0.2299, 0.4967, 0.1696, 0.0488, 0.5444, 0.6499, + 0.5061, 0.7582, 0.4351, 0.0459, 0.5148, 0.7255, 0.7221, + 0.5147, 0.3346, 0.8222, 0.9402, 0.5147, 0.1994, 0.8894, + 0.6587, 0.4491, 0.8322, 0.4019, 0.7869, 0.3341, 0.9335, + 0.3526, 0.5094, 0.8154, 0.2532, 0.9317, 0.0792, 0.4247, + 0.7418, 0.7936, 0.8473, 0.6363, 0.2608, 0.9080, 0.2553, + 0.4912, 0.2318, 0.9993, 0.5667, 0.3724, 0.0952, 0.5794, + 0.3214, 0.9709, 0.5244, 0.8426, 0.9671, 0.9749, 0.9732, + 0.3637, 0.5072, 0.4081, 0.6560, 0.9479, 0.2192, 0.9666, + 0.2576, 0.6222, 0.8978, 0.5046, 0.8876, 0.4772, 0.2118, + 0.9053, 0.6963, 0.6889, 0.0405, 0.5808, 0.5507, 0.5861, + 0.0583, 0.2552, 0.0318, 0.7752, 0.2871, 0.6239, 0.9966, + 0.6469, 0.5840, 0.1134, 0.1594, 0.6911, 0.9820, 0.5833, + 0.6494, 0.1721, 0.2022, 0.8434, 0.0943, 0.6635, 0.2434, + 0.3363, 0.1825, 0.9443, 0.1938, 0.9771, 0.0202, 0.3630, + 0.2508, 0.6021, 0.5492, 0.0491, 0.3316, 0.7721, 0.3799, + 0.8237, 0.3240, 0.6506, 0.9443, 0.0260, 0.0910, 0.4223, + 0.8709, 0.5992, 0.2823, 0.7916, 0.5827, 0.5220, 0.8628, + 0.7689, 0.1050, 0.3488, 0.0494, 0.8068, 0.1642, 0.7790, + 0.5140, 0.1047, 0.7710, 0.7871, 0.8287, 0.4389, 0.1787, + 0.6815, 0.2231, 0.8519, 0.5556, 0.2774, 0.8382, 0.9451, + 0.8734, 0.8635, 0.2108, 0.7996, 0.6170, 0.0526, 0.7979, + 0.8662, 0.1730, 0.0691, 0.0668, 0.6049, 0.5908, 0.7047, + 0.8468, 0.4485, 0.3706, 0.9801, 0.1985, 0.1477, 0.2749, + 0.0702, 0.0328, 0.9500, 0.5657, 0.5124, 0.1613, 0.1996, + 0.3933, 0.4330, 0.3130, 0.1602, 0.5558, 0.9162, 0.2688, + 0.4727, 0.5825, 0.1557, 0.7294, 0.8714, 0.1321, 0.7130, + 0.9610, 0.6894, 0.5403, 0.7020, 0.3602, 0.1816, 0.0283, + 0.5886, 0.9803, 0.9744, 0.0303, 0.9827, 0.1918, 0.3763, + 0.2881, 0.7577, 0.5974, 0.7908, 0.1092, 0.3742, 0.1171, + 0.3723, 0.8509, 0.2978, 0.3616, 0.3397, 0.8046, 0.2599, + 0.2349, 0.9827, 0.0690, 0.3523, 0.7776, 0.0194, 0.2950, + 0.0301, 0.3824, 0.8581, 0.8230, 0.9448, 0.0054, 0.4019, + 0.3582, 0.8848, 0.6090, 0.0363, 0.0164, 0.9237, 0.2887, + 0.5411, 0.9583, 0.0473, 0.8185, 0.9223, 0.0525, 0.9777, + 0.5953, 0.2917, 0.8644, 0.6195, 0.3030, 0.7733, 0.0611, + 0.4991, 0.9366, 0.6827, 0.5465, 0.5034, 0.0221, 0.6163, + 0.9029, 0.4801, 0.8324, 0.2157, 0.9754, 0.3283, 0.9381, + 0.2417, 0.6561, 0.5989, 0.8029, 0.1791, 0.7471, 0.1783, + 0.7994, 0.5807, 0.8763, 0.8464, 0.4109, 0.1088, 0.3839, + 0.2038, 0.0726, 0.8936, 0.1074, 0.0351, 0.9235, 0.7083, + 0.3558, 0.3932, 0.3260, 0.5944, 0.0072, 0.3316, 0.2196, + 0.8039, 0.4128, 0.9238, 0.2574, 0.2967, 0.0032, 0.4678, + 0.2924, 0.1058, 0.1821, 0.9271, 0.0700, 0.9897, 0.5525, + 0.2354, 0.6866, 0.4453, 0.5295, 0.1370, 0.8061, 0.3602, + 0.8010, 0.8126, 0.1953, 0.2307, 0.6779, 0.5612, 0.9952, + 0.6615, 0.0975, 0.1560, 0.5099, 0.8269, 0.5293, 0.5824, + 0.2968, 0.9192, 0.9748, 0.5507, 0.4204, 0.8749, 0.4482, + 0.9546, 0.2194, 0.3212, 0.8656, 0.2083, 0.5780, 0.8602, + 0.7862, 0.7419, 0.9902, 0.1332, 0.9222, 0.1391, 0.2169, + 0.4796, 0.5498, 0.5026, 0.9050, 0.8922, 0.3026, 0.7405, + 0.8729, 0.1021, 0.0894, 0.1148, 0.5970, 0.2647, 0.2807, + 0.6983, 0.4238, 0.5047, 0.1497, 0.9380, 0.6580, 0.5332, + 0.5670, 0.4759, 0.3379, 0.7770, 0.4989, 0.9628, 0.8008, + 0.0691, 0.1395, 0.8825, 0.8428, 0.2869, 0.5446, 0.7076, + 0.9507, 0.7554, 0.2652, 0.2555, 0.3540, 0.4539, 0.0023, + 0.9956, 0.2987, 0.1515, 0.8710, 0.6854, 0.8705, 0.2345, + 0.6768, 0.8055, 0.8135, 0.4333, 0.9453, 0.5409, 0.2969, + 0.4296, 0.7266, 0.0168, 0.7668, 0.4531, 0.9406]), size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) -tensor([0.4825, 0.6787, 0.3927, ..., 0.9418, 0.0965, 0.0788]) +tensor([0.4142, 0.5109, 0.3445, ..., 0.2930, 0.0409, 0.1153]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -1295,13 +1295,13 @@ Rows: 10000 Size: 100000000 NNZ: 1000 Density: 1e-05 -Time: 10.350545883178711 seconds +Time: 10.425815343856812 seconds -[18.41, 22.21, 18.1, 18.25, 17.88, 18.06, 18.18, 18.5, 18.02, 17.84] -[73.05] -13.74599313735962 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 284305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.350545883178711, 'TIME_S_1KI': 0.03640648558125503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1004.1447986841201, 'W': 73.05} -[18.41, 22.21, 18.1, 18.25, 17.88, 18.06, 18.18, 18.5, 18.02, 17.84, 18.25, 17.97, 18.0, 17.86, 18.03, 17.9, 18.06, 17.99, 17.95, 18.62] -329.52 -16.476 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 284305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.350545883178711, 'TIME_S_1KI': 0.03640648558125503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1004.1447986841201, 'W': 73.05, 'J_1KI': 3.5319280304043903, 'W_1KI': 0.2569423682313009, 'W_D': 56.574, 'J_D': 777.6658157529831, 'W_D_1KI': 0.19899052074356766, 'J_D_1KI': 0.0006999191739278861} +[19.9, 18.99, 18.67, 18.75, 18.75, 18.68, 18.82, 18.43, 18.71, 18.96] +[81.61] +13.671448469161987 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 283407, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.425815343856812, 'TIME_S_1KI': 0.03678743059930351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.7269095683098, 'W': 81.61} +[19.9, 18.99, 18.67, 18.75, 18.75, 18.68, 18.82, 18.43, 18.71, 18.96, 19.65, 18.49, 18.9, 18.52, 19.02, 18.68, 19.05, 18.58, 18.82, 18.53] +338.38 +16.919 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 283407, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.425815343856812, 'TIME_S_1KI': 0.03678743059930351, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1115.7269095683098, 'W': 81.61, 'J_1KI': 3.9368361034424333, 'W_1KI': 0.2879604244073012, 'W_D': 64.691, 'J_D': 884.4196729185581, 'W_D_1KI': 0.22826182839520548, 'J_D_1KI': 0.0008054205732222757} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json index 3fe7967..b04c1f7 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 264429, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.455932378768921, "TIME_S_1KI": 0.03954154944718212, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1008.905144047737, "W": 73.49, "J_1KI": 3.8154103522977323, "W_1KI": 0.2779195927829398, "W_D": 57.09475, "J_D": 783.8234722155332, "W_D_1KI": 0.21591712709271677, "J_D_1KI": 0.0008165410264861901} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 270916, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.355923652648926, "TIME_S_1KI": 0.038225588937711046, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1135.3567011880875, "W": 82.26, "J_1KI": 4.190807118029528, "W_1KI": 0.3036365515510343, "W_D": 65.19800000000001, "J_D": 899.8661099448206, "W_D_1KI": 0.24065762081235517, "J_D_1KI": 0.0008883108447354721} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output index 39a6146..b8a2d05 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01993107795715332} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.021791696548461914} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 4997, 4998, 5000]), - col_indices=tensor([9328, 8573, 6400, ..., 9443, 3853, 6322]), - values=tensor([0.9995, 0.8210, 0.4187, ..., 0.1342, 0.0596, 0.9033]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4997, 5000, 5000]), + col_indices=tensor([4472, 6601, 2920, ..., 1201, 3298, 9929]), + values=tensor([0.0277, 0.0355, 0.7076, ..., 0.9866, 0.1330, 0.6698]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.6106, 0.0696, 0.9188, ..., 0.7595, 0.3313, 0.0671]) +tensor([0.1564, 0.7861, 0.3014, ..., 0.2808, 0.5767, 0.0131]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -15,18 +15,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 0.01993107795715332 seconds +Time: 0.021791696548461914 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52681', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.0918641090393066} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '48183', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.8674421310424805} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 5000]), - col_indices=tensor([2599, 3812, 3885, ..., 206, 4492, 5501]), - values=tensor([0.4008, 0.6783, 0.6051, ..., 0.8606, 0.8114, 0.5557]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4998, 4999, 5000]), + col_indices=tensor([3289, 9092, 5142, ..., 3019, 5284, 7745]), + values=tensor([0.4883, 0.6873, 0.5973, ..., 0.5290, 0.7749, 0.6785]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.4634, 0.0164, 0.5073, ..., 0.3776, 0.2676, 0.0715]) +tensor([0.4543, 0.9740, 0.6974, ..., 0.6831, 0.0081, 0.6533]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -34,18 +34,18 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 2.0918641090393066 seconds +Time: 1.8674421310424805 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '264429', '-ss', '10000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.455932378768921} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '270916', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.355923652648926} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([3612, 1318, 1874, ..., 9308, 8111, 3978]), - values=tensor([0.0263, 0.8701, 0.1587, ..., 0.1521, 0.0294, 0.0367]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), + col_indices=tensor([7630, 8923, 2667, ..., 8010, 8482, 224]), + values=tensor([0.8705, 0.1590, 0.0738, ..., 0.5587, 0.1310, 0.4625]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.4791, 0.9785, 0.1725, ..., 0.5898, 0.0968, 0.4149]) +tensor([0.8091, 0.7111, 0.4284, ..., 0.5849, 0.8536, 0.5126]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -53,15 +53,15 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.455932378768921 seconds +Time: 10.355923652648926 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), - col_indices=tensor([3612, 1318, 1874, ..., 9308, 8111, 3978]), - values=tensor([0.0263, 0.8701, 0.1587, ..., 0.1521, 0.0294, 0.0367]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 4999, 4999, 5000]), + col_indices=tensor([7630, 8923, 2667, ..., 8010, 8482, 224]), + values=tensor([0.8705, 0.1590, 0.0738, ..., 0.5587, 0.1310, 0.4625]), size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) -tensor([0.4791, 0.9785, 0.1725, ..., 0.5898, 0.0968, 0.4149]) +tensor([0.8091, 0.7111, 0.4284, ..., 0.5849, 0.8536, 0.5126]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([10000, 10000]) @@ -69,13 +69,13 @@ Rows: 10000 Size: 100000000 NNZ: 5000 Density: 5e-05 -Time: 10.455932378768921 seconds +Time: 10.355923652648926 seconds -[19.02, 17.97, 18.1, 18.25, 17.93, 18.7, 18.16, 18.15, 18.22, 17.98] -[73.49] -13.728468418121338 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 264429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.455932378768921, 'TIME_S_1KI': 0.03954154944718212, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1008.905144047737, 'W': 73.49} -[19.02, 17.97, 18.1, 18.25, 17.93, 18.7, 18.16, 18.15, 18.22, 17.98, 18.41, 18.7, 18.23, 17.8, 17.94, 18.94, 18.01, 18.04, 18.02, 18.08] -327.905 -16.395249999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 264429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.455932378768921, 'TIME_S_1KI': 0.03954154944718212, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1008.905144047737, 'W': 73.49, 'J_1KI': 3.8154103522977323, 'W_1KI': 0.2779195927829398, 'W_D': 57.09475, 'J_D': 783.8234722155332, 'W_D_1KI': 0.21591712709271677, 'J_D_1KI': 0.0008165410264861901} +[18.87, 18.62, 18.42, 18.47, 18.37, 18.72, 18.49, 18.34, 18.5, 18.86] +[82.26] +13.802050828933716 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 270916, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.355923652648926, 'TIME_S_1KI': 0.038225588937711046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1135.3567011880875, 'W': 82.26} +[18.87, 18.62, 18.42, 18.47, 18.37, 18.72, 18.49, 18.34, 18.5, 18.86, 19.06, 19.43, 18.48, 18.51, 18.63, 18.3, 20.63, 18.66, 22.78, 18.99] +341.24 +17.062 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 270916, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.355923652648926, 'TIME_S_1KI': 0.038225588937711046, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1135.3567011880875, 'W': 82.26, 'J_1KI': 4.190807118029528, 'W_1KI': 0.3036365515510343, 'W_D': 65.19800000000001, 'J_D': 899.8661099448206, 'W_D_1KI': 0.24065762081235517, 'J_D_1KI': 0.0008883108447354721} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json index 35c6334..86ca136 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433570146560669, "TIME_S_1KI": 15.055656777143822, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2886.4447816610336, "W": 54.5, "J_1KI": 4165.1439850808565, "W_1KI": 78.64357864357864, "W_D": 38.16675, "J_D": 2021.3984655130505, "W_D_1KI": 55.074675324675326, "J_D_1KI": 79.47283596634246} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 746, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.84559440612793, "TIME_S_1KI": 14.53833030312055, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1694.9268332147599, "W": 78.11, "J_1KI": 2272.019883665898, "W_1KI": 104.70509383378015, "W_D": 61.11425, "J_D": 1326.1321497477293, "W_D_1KI": 81.92258713136728, "J_D_1KI": 109.81580044419206} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output index f2efbf0..cb05528 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8274271488189697} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.4070062637329102} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 57, 98, ..., 24999897, - 24999949, 25000000]), - col_indices=tensor([ 2496, 8518, 12544, ..., 449306, 467869, - 486714]), - values=tensor([0.2667, 0.8213, 0.8309, ..., 0.8074, 0.6926, 0.7796]), +tensor(crow_indices=tensor([ 0, 41, 84, ..., 24999913, + 24999952, 25000000]), + col_indices=tensor([ 61234, 85858, 98849, ..., 469430, 496166, + 499540]), + values=tensor([0.5443, 0.1922, 0.4452, ..., 0.7461, 0.7373, 0.7127]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.0179, 0.7693, 0.5874, ..., 0.4128, 0.7472, 0.6195]) +tensor([0.0687, 0.4582, 0.0033, ..., 0.9956, 0.4509, 0.1129]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 1.8274271488189697 seconds +Time: 1.4070062637329102 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '574', '-ss', '500000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.696394205093384} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '746', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.84559440612793} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 45, 88, ..., 24999889, - 24999948, 25000000]), - col_indices=tensor([ 21942, 37292, 56785, ..., 479111, 486535, - 489318]), - values=tensor([0.4753, 0.7614, 0.3285, ..., 0.3162, 0.0061, 0.9591]), +tensor(crow_indices=tensor([ 0, 51, 116, ..., 24999893, + 24999955, 25000000]), + col_indices=tensor([ 6279, 16149, 17234, ..., 473800, 479653, + 499129]), + values=tensor([0.3557, 0.5902, 0.8620, ..., 0.9429, 0.0947, 0.4761]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.2582, 0.0329, 0.5597, ..., 0.0495, 0.5298, 0.4237]) +tensor([0.0901, 0.6319, 0.4091, ..., 0.4625, 0.3509, 0.9371]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 8.696394205093384 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '693', '-ss', '500000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433570146560669} +Time: 10.84559440612793 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 38, 84, ..., 24999899, - 24999948, 25000000]), - col_indices=tensor([ 30660, 43953, 94811, ..., 484319, 487924, - 499108]), - values=tensor([0.4696, 0.5982, 0.2681, ..., 0.1240, 0.7008, 0.8579]), +tensor(crow_indices=tensor([ 0, 51, 116, ..., 24999893, + 24999955, 25000000]), + col_indices=tensor([ 6279, 16149, 17234, ..., 473800, 479653, + 499129]), + values=tensor([0.3557, 0.5902, 0.8620, ..., 0.9429, 0.0947, 0.4761]), size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3176, 0.2592, 0.6200, ..., 0.0886, 0.2852, 0.1534]) +tensor([0.0901, 0.6319, 0.4091, ..., 0.4625, 0.3509, 0.9371]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,31 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 25000000 Density: 0.0001 -Time: 10.433570146560669 seconds +Time: 10.84559440612793 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 38, 84, ..., 24999899, - 24999948, 25000000]), - col_indices=tensor([ 30660, 43953, 94811, ..., 484319, 487924, - 499108]), - values=tensor([0.4696, 0.5982, 0.2681, ..., 0.1240, 0.7008, 0.8579]), - size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.3176, 0.2592, 0.6200, ..., 0.0886, 0.2852, 0.1534]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([500000, 500000]) -Rows: 500000 -Size: 250000000000 -NNZ: 25000000 -Density: 0.0001 -Time: 10.433570146560669 seconds - -[18.16, 18.09, 17.95, 18.13, 17.83, 18.01, 18.2, 17.99, 18.23, 17.93] -[54.5] -52.962289571762085 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433570146560669, 'TIME_S_1KI': 15.055656777143822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2886.4447816610336, 'W': 54.5} -[18.16, 18.09, 17.95, 18.13, 17.83, 18.01, 18.2, 17.99, 18.23, 17.93, 18.48, 17.91, 18.03, 18.08, 18.03, 18.09, 18.34, 19.26, 18.16, 18.1] -326.66499999999996 -16.33325 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433570146560669, 'TIME_S_1KI': 15.055656777143822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2886.4447816610336, 'W': 54.5, 'J_1KI': 4165.1439850808565, 'W_1KI': 78.64357864357864, 'W_D': 38.16675, 'J_D': 2021.3984655130505, 'W_D_1KI': 55.074675324675326, 'J_D_1KI': 79.47283596634246} +[19.36, 18.51, 18.89, 21.88, 19.02, 18.5, 18.86, 18.41, 18.63, 18.48] +[78.11] +21.69922971725464 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 746, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.84559440612793, 'TIME_S_1KI': 14.53833030312055, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1694.9268332147599, 'W': 78.11} +[19.36, 18.51, 18.89, 21.88, 19.02, 18.5, 18.86, 18.41, 18.63, 18.48, 19.29, 18.38, 18.34, 18.5, 19.12, 18.34, 19.58, 18.39, 18.81, 18.38] +339.915 +16.99575 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 746, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.84559440612793, 'TIME_S_1KI': 14.53833030312055, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1694.9268332147599, 'W': 78.11, 'J_1KI': 2272.019883665898, 'W_1KI': 104.70509383378015, 'W_D': 61.11425, 'J_D': 1326.1321497477293, 'W_D_1KI': 81.92258713136728, 'J_D_1KI': 109.81580044419206} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json index c977413..f553808 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8054, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420526504516602, "TIME_S_1KI": 1.293832444067122, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1257.315396785736, "W": 87.34, "J_1KI": 156.11067752492377, "W_1KI": 10.844300968462875, "W_D": 70.58375000000001, "J_D": 1016.0984158217908, "W_D_1KI": 8.763813012167867, "J_D_1KI": 1.0881317372942474} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 7784, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.579402923583984, "TIME_S_1KI": 1.3591216499979424, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1294.7073955726623, "W": 88.97, "J_1KI": 166.32931597798847, "W_1KI": 11.429856115107913, "W_D": 72.1655, "J_D": 1050.1652979116438, "W_D_1KI": 9.27100462487153, "J_D_1KI": 1.1910334821263526} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output index 2324509..623dbdf 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1425309181213379} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.14815759658813477} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 8, ..., 2499994, - 2499996, 2500000]), - col_indices=tensor([ 55676, 267462, 335220, ..., 59414, 387658, - 467981]), - values=tensor([0.3669, 0.3244, 0.1572, ..., 0.0615, 0.1330, 0.1317]), +tensor(crow_indices=tensor([ 0, 6, 16, ..., 2499993, + 2499995, 2500000]), + col_indices=tensor([ 3766, 237018, 253848, ..., 331484, 410452, + 411334]), + values=tensor([0.6777, 0.9195, 0.2043, ..., 0.7681, 0.5254, 0.8125]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.2232, 0.7011, 0.4607, ..., 0.0263, 0.5751, 0.5574]) +tensor([0.0134, 0.5178, 0.5077, ..., 0.9311, 0.1963, 0.6173]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 0.1425309181213379 seconds +Time: 0.14815759658813477 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7366', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.601978540420532} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7087', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.558655261993408} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 17, ..., 2499992, - 2499998, 2500000]), - col_indices=tensor([ 78904, 197792, 264056, ..., 387862, 102468, - 277870]), - values=tensor([0.6219, 0.1671, 0.9871, ..., 0.4185, 0.9305, 0.2778]), +tensor(crow_indices=tensor([ 0, 3, 5, ..., 2499987, + 2499995, 2500000]), + col_indices=tensor([448569, 459977, 493128, ..., 361590, 442935, + 473939]), + values=tensor([0.6275, 0.9076, 0.6066, ..., 0.7922, 0.8137, 0.6712]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0210, 0.3595, 0.6743, ..., 0.0036, 0.8741, 0.0347]) +tensor([0.4543, 0.7339, 0.9628, ..., 0.5995, 0.5397, 0.7491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,20 +38,20 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 9.601978540420532 seconds +Time: 9.558655261993408 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8054', '-ss', '500000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420526504516602} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7784', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.579402923583984} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499986, - 2499993, 2500000]), - col_indices=tensor([136514, 185390, 204506, ..., 365577, 371722, - 449843]), - values=tensor([0.2143, 0.0864, 0.9943, ..., 0.6371, 0.6570, 0.8441]), +tensor(crow_indices=tensor([ 0, 3, 9, ..., 2499990, + 2499994, 2500000]), + col_indices=tensor([ 5991, 210893, 321972, ..., 434208, 435414, + 497706]), + values=tensor([0.4625, 0.6458, 0.2981, ..., 0.9507, 0.2530, 0.6760]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0733, 0.2994, 0.4999, ..., 0.8006, 0.1699, 0.8850]) +tensor([0.6802, 0.7796, 0.9205, ..., 0.5425, 0.2003, 0.4676]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -59,17 +59,17 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.420526504516602 seconds +Time: 10.579402923583984 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499986, - 2499993, 2500000]), - col_indices=tensor([136514, 185390, 204506, ..., 365577, 371722, - 449843]), - values=tensor([0.2143, 0.0864, 0.9943, ..., 0.6371, 0.6570, 0.8441]), +tensor(crow_indices=tensor([ 0, 3, 9, ..., 2499990, + 2499994, 2500000]), + col_indices=tensor([ 5991, 210893, 321972, ..., 434208, 435414, + 497706]), + values=tensor([0.4625, 0.6458, 0.2981, ..., 0.9507, 0.2530, 0.6760]), size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0733, 0.2994, 0.4999, ..., 0.8006, 0.1699, 0.8850]) +tensor([0.6802, 0.7796, 0.9205, ..., 0.5425, 0.2003, 0.4676]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -77,13 +77,13 @@ Rows: 500000 Size: 250000000000 NNZ: 2500000 Density: 1e-05 -Time: 10.420526504516602 seconds +Time: 10.579402923583984 seconds -[18.47, 17.91, 18.18, 18.64, 18.09, 18.11, 18.17, 18.18, 22.06, 17.96] -[87.34] -14.395642280578613 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420526504516602, 'TIME_S_1KI': 1.293832444067122, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.315396785736, 'W': 87.34} -[18.47, 17.91, 18.18, 18.64, 18.09, 18.11, 18.17, 18.18, 22.06, 17.96, 18.46, 18.6, 18.06, 19.85, 20.28, 18.18, 18.02, 17.82, 18.61, 17.84] -335.125 -16.75625 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420526504516602, 'TIME_S_1KI': 1.293832444067122, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.315396785736, 'W': 87.34, 'J_1KI': 156.11067752492377, 'W_1KI': 10.844300968462875, 'W_D': 70.58375000000001, 'J_D': 1016.0984158217908, 'W_D_1KI': 8.763813012167867, 'J_D_1KI': 1.0881317372942474} +[19.1, 18.44, 18.85, 18.36, 18.5, 18.61, 18.93, 18.44, 18.66, 18.57] +[88.97] +14.552179336547852 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7784, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.579402923583984, 'TIME_S_1KI': 1.3591216499979424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.7073955726623, 'W': 88.97} +[19.1, 18.44, 18.85, 18.36, 18.5, 18.61, 18.93, 18.44, 18.66, 18.57, 18.8, 18.43, 18.68, 18.91, 18.54, 19.38, 18.58, 18.68, 18.67, 18.39] +336.09000000000003 +16.8045 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 7784, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.579402923583984, 'TIME_S_1KI': 1.3591216499979424, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.7073955726623, 'W': 88.97, 'J_1KI': 166.32931597798847, 'W_1KI': 11.429856115107913, 'W_D': 72.1655, 'J_D': 1050.1652979116438, 'W_D_1KI': 9.27100462487153, 'J_D_1KI': 1.1910334821263526} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json index 72c367a..af927a9 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1363, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.215147256851196, "TIME_S_1KI": 7.494605470910636, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1461.7694299888612, "W": 77.68, "J_1KI": 1072.4647322001917, "W_1KI": 56.99192956713133, "W_D": 61.057, "J_D": 1148.960557245493, "W_D_1KI": 44.796038151137196, "J_D_1KI": 32.865765334656786} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1296, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.415658712387085, "TIME_S_1KI": 8.036773697829542, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1467.0796593379973, "W": 82.82, "J_1KI": 1132.0059099830225, "W_1KI": 63.90432098765432, "W_D": 66.01575, "J_D": 1169.4079210449456, "W_D_1KI": 50.9380787037037, "J_D_1KI": 39.30407307384545} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output index 097beff..1ac7539 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -1,15 +1,15 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.7700090408325195} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.8099560737609863} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 24, 55, ..., 12499948, - 12499975, 12500000]), - col_indices=tensor([ 4417, 27723, 55822, ..., 442008, 448310, - 496598]), - values=tensor([0.6543, 0.3065, 0.8363, ..., 0.0104, 0.9892, 0.0257]), +tensor(crow_indices=tensor([ 0, 20, 43, ..., 12499954, + 12499972, 12500000]), + col_indices=tensor([ 15140, 122304, 132861, ..., 413745, 434718, + 449928]), + values=tensor([0.7713, 0.2886, 0.8294, ..., 0.6043, 0.3703, 0.8077]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.5546, 0.9568, 0.6697, ..., 0.4050, 0.9738, 0.6596]) +tensor([0.4264, 0.3783, 0.7665, ..., 0.2411, 0.5032, 0.4857]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -17,20 +17,20 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 0.7700090408325195 seconds +Time: 0.8099560737609863 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1363', '-ss', '500000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.215147256851196} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1296', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.415658712387085} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 32, 58, ..., 12499956, +tensor(crow_indices=tensor([ 0, 27, 50, ..., 12499957, 12499981, 12500000]), - col_indices=tensor([ 7736, 12237, 33305, ..., 443222, 470958, - 475326]), - values=tensor([0.9583, 0.2636, 0.0225, ..., 0.4084, 0.8296, 0.8114]), + col_indices=tensor([ 22700, 42704, 60062, ..., 407983, 461451, + 481435]), + values=tensor([0.7364, 0.7571, 0.4841, ..., 0.0707, 0.3766, 0.3990]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4848, 0.9833, 0.6868, ..., 0.4430, 0.1817, 0.0586]) +tensor([0.2903, 0.4833, 0.2839, ..., 0.8559, 0.6564, 0.0108]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -38,17 +38,17 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.215147256851196 seconds +Time: 10.415658712387085 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 32, 58, ..., 12499956, +tensor(crow_indices=tensor([ 0, 27, 50, ..., 12499957, 12499981, 12500000]), - col_indices=tensor([ 7736, 12237, 33305, ..., 443222, 470958, - 475326]), - values=tensor([0.9583, 0.2636, 0.0225, ..., 0.4084, 0.8296, 0.8114]), + col_indices=tensor([ 22700, 42704, 60062, ..., 407983, 461451, + 481435]), + values=tensor([0.7364, 0.7571, 0.4841, ..., 0.0707, 0.3766, 0.3990]), size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.4848, 0.9833, 0.6868, ..., 0.4430, 0.1817, 0.0586]) +tensor([0.2903, 0.4833, 0.2839, ..., 0.8559, 0.6564, 0.0108]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([500000, 500000]) @@ -56,13 +56,13 @@ Rows: 500000 Size: 250000000000 NNZ: 12500000 Density: 5e-05 -Time: 10.215147256851196 seconds +Time: 10.415658712387085 seconds -[18.64, 18.68, 18.32, 18.11, 18.55, 17.92, 17.92, 19.0, 18.5, 17.99] -[77.68] -18.817835092544556 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.215147256851196, 'TIME_S_1KI': 7.494605470910636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.7694299888612, 'W': 77.68} -[18.64, 18.68, 18.32, 18.11, 18.55, 17.92, 17.92, 19.0, 18.5, 17.99, 18.42, 18.14, 18.35, 18.41, 18.1, 18.07, 18.11, 21.75, 17.97, 18.07] -332.46000000000004 -16.623 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.215147256851196, 'TIME_S_1KI': 7.494605470910636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.7694299888612, 'W': 77.68, 'J_1KI': 1072.4647322001917, 'W_1KI': 56.99192956713133, 'W_D': 61.057, 'J_D': 1148.960557245493, 'W_D_1KI': 44.796038151137196, 'J_D_1KI': 32.865765334656786} +[19.7, 18.76, 18.79, 18.44, 19.07, 18.28, 18.71, 18.42, 18.71, 18.44] +[82.82] +17.71407461166382 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.415658712387085, 'TIME_S_1KI': 8.036773697829542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1467.0796593379973, 'W': 82.82} +[19.7, 18.76, 18.79, 18.44, 19.07, 18.28, 18.71, 18.42, 18.71, 18.44, 19.25, 18.74, 18.64, 18.32, 18.52, 18.38, 18.43, 18.41, 19.07, 19.4] +336.08500000000004 +16.804250000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1296, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.415658712387085, 'TIME_S_1KI': 8.036773697829542, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1467.0796593379973, 'W': 82.82, 'J_1KI': 1132.0059099830225, 'W_1KI': 63.90432098765432, 'W_D': 66.01575, 'J_D': 1169.4079210449456, 'W_D_1KI': 50.9380787037037, 'J_D_1KI': 39.30407307384545} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json index 393ceed..8c451f8 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 80365, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.785846710205078, "TIME_S_1KI": 0.1342107473428119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1142.0150223541261, "W": 83.68, "J_1KI": 14.210353043664854, "W_1KI": 1.041249300068438, "W_D": 67.33675000000001, "J_D": 918.9720370041133, "W_D_1KI": 0.8378865177627077, "J_D_1KI": 0.01042601278868547} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 78692, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.352098941802979, "TIME_S_1KI": 0.13155211383371854, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1251.0309797000884, "W": 89.96, "J_1KI": 15.897816546791143, "W_1KI": 1.1431911757230722, "W_D": 73.1585, "J_D": 1017.380501649499, "W_D_1KI": 0.9296815432318406, "J_D_1KI": 0.011814181152237084} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output index ace565c..4d0d66d 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.031140804290771484} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.03153657913208008} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 10, ..., 249988, 249996, +tensor(crow_indices=tensor([ 0, 7, 15, ..., 249984, 249990, 250000]), - col_indices=tensor([ 3938, 11827, 18410, ..., 25331, 39292, 43613]), - values=tensor([0.0696, 0.4480, 0.5528, ..., 0.5051, 0.2445, 0.3952]), + col_indices=tensor([ 2492, 9440, 17114, ..., 45352, 46897, 49157]), + values=tensor([0.2702, 0.2914, 0.3123, ..., 0.0040, 0.1038, 0.9095]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.1610, 0.2872, 0.6159, ..., 0.3941, 0.7048, 0.2210]) +tensor([0.7068, 0.5554, 0.2496, ..., 0.6351, 0.9308, 0.5843]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 0.031140804290771484 seconds +Time: 0.03153657913208008 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33717', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.405243873596191} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33294', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.442471742630005} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 8, 16, ..., 249992, 249993, +tensor(crow_indices=tensor([ 0, 9, 14, ..., 249990, 249992, 250000]), - col_indices=tensor([ 4798, 5191, 8748, ..., 31389, 41207, 45142]), - values=tensor([0.4595, 0.5274, 0.3770, ..., 0.1787, 0.1620, 0.3761]), + col_indices=tensor([11099, 16101, 18653, ..., 16723, 26003, 26144]), + values=tensor([0.7576, 0.9790, 0.7742, ..., 0.4957, 0.9421, 0.1630]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.7848, 0.4939, 0.9259, ..., 0.5597, 0.9743, 0.2454]) +tensor([0.5654, 0.1751, 0.3696, ..., 0.4101, 0.6979, 0.6769]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 4.405243873596191 seconds +Time: 4.442471742630005 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '80365', '-ss', '50000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.785846710205078} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '78692', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.352098941802979} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 6, ..., 249987, 249993, +tensor(crow_indices=tensor([ 0, 13, 18, ..., 249987, 249992, 250000]), - col_indices=tensor([ 3036, 18920, 36950, ..., 33008, 35825, 42083]), - values=tensor([0.1216, 0.1218, 0.6996, ..., 0.2614, 0.2596, 0.0475]), + col_indices=tensor([ 2685, 9199, 16962, ..., 29672, 31794, 49403]), + values=tensor([0.9434, 0.9131, 0.3740, ..., 0.2777, 0.1729, 0.4549]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0022, 0.5368, 0.2941, ..., 0.2951, 0.8515, 0.2587]) +tensor([0.8203, 0.0251, 0.4654, ..., 0.7560, 0.1695, 0.4972]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.785846710205078 seconds +Time: 10.352098941802979 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 6, ..., 249987, 249993, +tensor(crow_indices=tensor([ 0, 13, 18, ..., 249987, 249992, 250000]), - col_indices=tensor([ 3036, 18920, 36950, ..., 33008, 35825, 42083]), - values=tensor([0.1216, 0.1218, 0.6996, ..., 0.2614, 0.2596, 0.0475]), + col_indices=tensor([ 2685, 9199, 16962, ..., 29672, 31794, 49403]), + values=tensor([0.9434, 0.9131, 0.3740, ..., 0.2777, 0.1729, 0.4549]), size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) -tensor([0.0022, 0.5368, 0.2941, ..., 0.2951, 0.8515, 0.2587]) +tensor([0.8203, 0.0251, 0.4654, ..., 0.7560, 0.1695, 0.4972]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 250000 Density: 0.0001 -Time: 10.785846710205078 seconds +Time: 10.352098941802979 seconds -[18.57, 17.87, 18.3, 17.99, 18.1, 18.03, 18.33, 18.16, 18.09, 17.89] -[83.68] -13.647407054901123 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.785846710205078, 'TIME_S_1KI': 0.1342107473428119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1142.0150223541261, 'W': 83.68} -[18.57, 17.87, 18.3, 17.99, 18.1, 18.03, 18.33, 18.16, 18.09, 17.89, 18.23, 17.91, 18.25, 19.1, 18.2, 17.88, 18.14, 18.11, 18.11, 17.9] -326.865 -16.34325 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.785846710205078, 'TIME_S_1KI': 0.1342107473428119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1142.0150223541261, 'W': 83.68, 'J_1KI': 14.210353043664854, 'W_1KI': 1.041249300068438, 'W_D': 67.33675000000001, 'J_D': 918.9720370041133, 'W_D_1KI': 0.8378865177627077, 'J_D_1KI': 0.01042601278868547} +[19.65, 18.44, 19.1, 18.35, 19.07, 18.63, 18.43, 18.35, 18.65, 18.3] +[89.96] +13.906524896621704 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78692, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.352098941802979, 'TIME_S_1KI': 0.13155211383371854, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1251.0309797000884, 'W': 89.96} +[19.65, 18.44, 19.1, 18.35, 19.07, 18.63, 18.43, 18.35, 18.65, 18.3, 19.16, 18.59, 18.84, 18.4, 18.48, 18.5, 19.04, 18.48, 18.87, 18.51] +336.03 +16.801499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 78692, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.352098941802979, 'TIME_S_1KI': 0.13155211383371854, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1251.0309797000884, 'W': 89.96, 'J_1KI': 15.897816546791143, 'W_1KI': 1.1431911757230722, 'W_D': 73.1585, 'J_D': 1017.380501649499, 'W_D_1KI': 0.9296815432318406, 'J_D_1KI': 0.011814181152237084} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json index 3d88fb8..ca43d5e 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17258, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.52219271659851, "TIME_S_1KI": 0.6096994273147822, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1290.1128985500336, "W": 87.73, "J_1KI": 74.75448479256191, "W_1KI": 5.083439564260054, "W_D": 71.3875, "J_D": 1049.7883796334268, "W_D_1KI": 4.136487426121219, "J_D_1KI": 0.23968521416857222} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 16563, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.481314182281494, "TIME_S_1KI": 0.6328149599880151, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1300.5569376850128, "W": 89.09, "J_1KI": 78.52182199390285, "W_1KI": 5.378856487351325, "W_D": 71.86, "J_D": 1049.0293135261536, "W_D_1KI": 4.338586004950795, "J_D_1KI": 0.26194445480594064} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output index c067392..40e4261 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output @@ -1,34 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07522106170654297} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.0770113468170166} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 54, 95, ..., 2499899, - 2499951, 2500000]), - col_indices=tensor([ 245, 316, 650, ..., 46425, 47933, 49262]), - values=tensor([0.3598, 0.8732, 0.2112, ..., 0.2076, 0.2855, 0.8514]), - size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6227, 0.9680, 0.6855, ..., 0.8971, 0.9917, 0.2060]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 2500000 -Density: 0.001 -Time: 0.07522106170654297 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '13958', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.492224931716919} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 97, ..., 2499905, +tensor(crow_indices=tensor([ 0, 57, 97, ..., 2499901, 2499950, 2500000]), - col_indices=tensor([ 3061, 5035, 6476, ..., 48999, 49661, 49813]), - values=tensor([0.5243, 0.0379, 0.3507, ..., 0.2954, 0.9764, 0.8519]), + col_indices=tensor([ 124, 175, 906, ..., 48953, 49099, 49775]), + values=tensor([0.5729, 0.6603, 0.4257, ..., 0.7714, 0.6486, 0.1048]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.8088, 0.8530, 0.3688, ..., 0.5327, 0.1148, 0.7333]) +tensor([0.1490, 0.9796, 0.6323, ..., 0.8258, 0.0173, 0.9887]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 8.492224931716919 seconds +Time: 0.0770113468170166 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17258', '-ss', '50000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.52219271659851} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '13634', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.642810106277466} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 98, ..., 2499898, - 2499955, 2500000]), - col_indices=tensor([ 336, 813, 2467, ..., 44805, 45101, 46338]), - values=tensor([0.1989, 0.3364, 0.1097, ..., 0.3897, 0.4637, 0.0665]), +tensor(crow_indices=tensor([ 0, 44, 96, ..., 2499886, + 2499947, 2500000]), + col_indices=tensor([ 2781, 2792, 2952, ..., 47835, 48355, 49738]), + values=tensor([0.6657, 0.8068, 0.2679, ..., 0.1283, 0.7173, 0.1870]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6060, 0.7426, 0.0681, ..., 0.1484, 0.6293, 0.7345]) +tensor([0.0925, 0.3234, 0.5984, ..., 0.6757, 0.5158, 0.8086]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.52219271659851 seconds +Time: 8.642810106277466 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '16563', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.481314182281494} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 60, 98, ..., 2499898, - 2499955, 2500000]), - col_indices=tensor([ 336, 813, 2467, ..., 44805, 45101, 46338]), - values=tensor([0.1989, 0.3364, 0.1097, ..., 0.3897, 0.4637, 0.0665]), +tensor(crow_indices=tensor([ 0, 49, 98, ..., 2499924, + 2499965, 2500000]), + col_indices=tensor([ 2415, 5474, 6582, ..., 47823, 47849, 49372]), + values=tensor([0.1268, 0.5761, 0.4335, ..., 0.2798, 0.1362, 0.5670]), size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.6060, 0.7426, 0.0681, ..., 0.1484, 0.6293, 0.7345]) +tensor([0.4866, 0.7985, 0.0883, ..., 0.5992, 0.0522, 0.8201]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +56,30 @@ Rows: 50000 Size: 2500000000 NNZ: 2500000 Density: 0.001 -Time: 10.52219271659851 seconds +Time: 10.481314182281494 seconds -[18.24, 17.95, 18.03, 17.9, 18.31, 18.2, 18.96, 17.99, 17.74, 18.24] -[87.73] -14.705492973327637 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17258, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.52219271659851, 'TIME_S_1KI': 0.6096994273147822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.1128985500336, 'W': 87.73} -[18.24, 17.95, 18.03, 17.9, 18.31, 18.2, 18.96, 17.99, 17.74, 18.24, 18.79, 18.06, 18.38, 18.13, 18.12, 18.05, 18.08, 18.09, 18.27, 17.91] -326.85 -16.3425 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17258, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.52219271659851, 'TIME_S_1KI': 0.6096994273147822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.1128985500336, 'W': 87.73, 'J_1KI': 74.75448479256191, 'W_1KI': 5.083439564260054, 'W_D': 71.3875, 'J_D': 1049.7883796334268, 'W_D_1KI': 4.136487426121219, 'J_D_1KI': 0.23968521416857222} +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 98, ..., 2499924, + 2499965, 2500000]), + col_indices=tensor([ 2415, 5474, 6582, ..., 47823, 47849, 49372]), + values=tensor([0.1268, 0.5761, 0.4335, ..., 0.2798, 0.1362, 0.5670]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4866, 0.7985, 0.0883, ..., 0.5992, 0.0522, 0.8201]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.481314182281494 seconds + +[18.81, 19.03, 18.91, 18.88, 21.0, 18.65, 19.63, 19.12, 18.72, 18.6] +[89.09] +14.598237037658691 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16563, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.481314182281494, 'TIME_S_1KI': 0.6328149599880151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.5569376850128, 'W': 89.09} +[18.81, 19.03, 18.91, 18.88, 21.0, 18.65, 19.63, 19.12, 18.72, 18.6, 19.47, 18.46, 18.88, 18.42, 18.93, 18.66, 18.63, 21.61, 19.39, 18.48] +344.6 +17.23 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 16563, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.481314182281494, 'TIME_S_1KI': 0.6328149599880151, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1300.5569376850128, 'W': 89.09, 'J_1KI': 78.52182199390285, 'W_1KI': 5.378856487351325, 'W_D': 71.86, 'J_D': 1049.0293135261536, 'W_D_1KI': 4.338586004950795, 'J_D_1KI': 0.26194445480594064} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json index 00dd221..babdca8 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.347081184387207, "TIME_S_1KI": 9.27157812221076, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2825.247347984314, "W": 53.89, "J_1KI": 2531.5836451472346, "W_1KI": 48.288530465949826, "W_D": 37.37625, "J_D": 1959.4943624067305, "W_D_1KI": 33.491263440860216, "J_D_1KI": 30.010092688942844} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1167, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.299102067947388, "TIME_S_1KI": 8.82528026387951, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1704.976960287094, "W": 77.39, "J_1KI": 1460.9913969897977, "W_1KI": 66.31533847472151, "W_D": 60.3825, "J_D": 1330.2851958203316, "W_D_1KI": 51.741645244215945, "J_D_1KI": 44.33731383394682} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output index 1b20dcd..744df53 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output @@ -1,15 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.2946875095367432} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.8992819786071777} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 488, 979, ..., 24999008, - 24999504, 25000000]), - col_indices=tensor([ 111, 301, 401, ..., 49766, 49891, 49958]), - values=tensor([2.3418e-01, 9.8131e-01, 8.1298e-03, ..., - 6.0106e-01, 4.2789e-04, 8.6966e-01]), +tensor(crow_indices=tensor([ 0, 475, 1011, ..., 24998974, + 24999527, 25000000]), + col_indices=tensor([ 168, 172, 198, ..., 49907, 49922, 49927]), + values=tensor([0.0665, 0.3170, 0.5936, ..., 0.6846, 0.2811, 0.3676]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.1063, 0.9255, 0.6653, ..., 0.3278, 0.7920, 0.4701]) +tensor([0.7181, 0.4244, 0.4435, ..., 0.9840, 0.0888, 0.6399]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -17,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 1.2946875095367432 seconds +Time: 0.8992819786071777 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '811', '-ss', '50000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.6258299350738525} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1167', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.299102067947388} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 521, 1028, ..., 24999005, - 24999496, 25000000]), - col_indices=tensor([ 169, 333, 382, ..., 49620, 49646, 49746]), - values=tensor([0.1336, 0.2367, 0.6093, ..., 0.7411, 0.2218, 0.9154]), +tensor(crow_indices=tensor([ 0, 466, 988, ..., 24999020, + 24999475, 25000000]), + col_indices=tensor([ 26, 43, 133, ..., 49742, 49774, 49815]), + values=tensor([0.5412, 0.2221, 0.8751, ..., 0.6785, 0.2543, 0.9299]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.7321, 0.9416, 0.5259, ..., 0.9099, 0.7583, 0.2580]) +tensor([0.6108, 0.5492, 0.3892, ..., 0.2620, 0.5593, 0.4517]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -37,19 +36,16 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 7.6258299350738525 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1116', '-ss', '50000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.347081184387207} +Time: 10.299102067947388 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 467, 956, ..., 24998994, - 24999500, 25000000]), - col_indices=tensor([ 21, 163, 165, ..., 49855, 49860, 49938]), - values=tensor([0.3050, 0.9077, 0.0930, ..., 0.0680, 0.0415, 0.3010]), +tensor(crow_indices=tensor([ 0, 466, 988, ..., 24999020, + 24999475, 25000000]), + col_indices=tensor([ 26, 43, 133, ..., 49742, 49774, 49815]), + values=tensor([0.5412, 0.2221, 0.8751, ..., 0.6785, 0.2543, 0.9299]), size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5141, 0.6637, 0.6626, ..., 0.2332, 0.8993, 0.6899]) +tensor([0.6108, 0.5492, 0.3892, ..., 0.2620, 0.5593, 0.4517]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -57,30 +53,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000000 Density: 0.01 -Time: 10.347081184387207 seconds +Time: 10.299102067947388 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 467, 956, ..., 24998994, - 24999500, 25000000]), - col_indices=tensor([ 21, 163, 165, ..., 49855, 49860, 49938]), - values=tensor([0.3050, 0.9077, 0.0930, ..., 0.0680, 0.0415, 0.3010]), - size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) -tensor([0.5141, 0.6637, 0.6626, ..., 0.2332, 0.8993, 0.6899]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000000 -Density: 0.01 -Time: 10.347081184387207 seconds - -[18.41, 17.99, 18.33, 18.02, 17.96, 17.91, 21.56, 18.23, 18.08, 17.76] -[53.89] -52.42618942260742 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.347081184387207, 'TIME_S_1KI': 9.27157812221076, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2825.247347984314, 'W': 53.89} -[18.41, 17.99, 18.33, 18.02, 17.96, 17.91, 21.56, 18.23, 18.08, 17.76, 18.54, 18.29, 18.11, 17.98, 18.09, 18.71, 18.38, 18.01, 18.26, 18.02] -330.27500000000003 -16.51375 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.347081184387207, 'TIME_S_1KI': 9.27157812221076, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2825.247347984314, 'W': 53.89, 'J_1KI': 2531.5836451472346, 'W_1KI': 48.288530465949826, 'W_D': 37.37625, 'J_D': 1959.4943624067305, 'W_D_1KI': 33.491263440860216, 'J_D_1KI': 30.010092688942844} +[18.97, 18.46, 18.59, 18.54, 18.42, 18.73, 22.71, 18.61, 18.6, 18.87] +[77.39] +22.030972480773926 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1167, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.299102067947388, 'TIME_S_1KI': 8.82528026387951, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1704.976960287094, 'W': 77.39} +[18.97, 18.46, 18.59, 18.54, 18.42, 18.73, 22.71, 18.61, 18.6, 18.87, 18.92, 18.63, 18.51, 18.39, 18.7, 19.25, 18.58, 18.87, 18.56, 19.24] +340.15 +17.0075 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1167, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.299102067947388, 'TIME_S_1KI': 8.82528026387951, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1704.976960287094, 'W': 77.39, 'J_1KI': 1460.9913969897977, 'W_1KI': 66.31533847472151, 'W_D': 60.3825, 'J_D': 1330.2851958203316, 'W_D_1KI': 51.741645244215945, 'J_D_1KI': 44.33731383394682} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.json new file mode 100644 index 0000000..a21684b --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 245, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.408657312393188, "TIME_S_1KI": 42.484315560788524, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3208.5461443829536, "W": 63.89, "J_1KI": 13096.106711767157, "W_1KI": 260.7755102040816, "W_D": 47.0305, "J_D": 2361.8646023384335, "W_D_1KI": 191.96122448979594, "J_D_1KI": 783.5152019991672} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.output new file mode 100644 index 0000000..36c63f1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_0.05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 4.282553195953369} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2509, 5000, ..., + 124994889, 124997381, 125000000]), + col_indices=tensor([ 39, 67, 120, ..., 49971, 49987, 49999]), + values=tensor([0.1239, 0.6091, 0.1309, ..., 0.2312, 0.6997, 0.4588]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.7734, 0.0486, 0.5881, ..., 0.1213, 0.2314, 0.3704]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 4.282553195953369 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '245', '-ss', '50000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.408657312393188} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2469, 4879, ..., + 124994934, 124997542, 125000000]), + col_indices=tensor([ 3, 19, 65, ..., 49948, 49961, 49973]), + values=tensor([0.8915, 0.8778, 0.4113, ..., 0.0094, 0.1568, 0.0935]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.4183, 0.8255, 0.8618, ..., 0.9842, 0.9207, 0.5380]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.408657312393188 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2469, 4879, ..., + 124994934, 124997542, 125000000]), + col_indices=tensor([ 3, 19, 65, ..., 49948, 49961, 49973]), + values=tensor([0.8915, 0.8778, 0.4113, ..., 0.0094, 0.1568, 0.0935]), + size=(50000, 50000), nnz=125000000, layout=torch.sparse_csr) +tensor([0.4183, 0.8255, 0.8618, ..., 0.9842, 0.9207, 0.5380]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000000 +Density: 0.05 +Time: 10.408657312393188 seconds + +[19.87, 19.45, 19.2, 18.52, 18.89, 18.52, 19.03, 18.42, 18.52, 18.49] +[63.89] +50.21984887123108 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.408657312393188, 'TIME_S_1KI': 42.484315560788524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3208.5461443829536, 'W': 63.89} +[19.87, 19.45, 19.2, 18.52, 18.89, 18.52, 19.03, 18.42, 18.52, 18.49, 19.18, 18.4, 18.51, 18.38, 18.95, 18.47, 18.59, 18.61, 18.77, 18.38] +337.18999999999994 +16.859499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 245, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.408657312393188, 'TIME_S_1KI': 42.484315560788524, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3208.5461443829536, 'W': 63.89, 'J_1KI': 13096.106711767157, 'W_1KI': 260.7755102040816, 'W_D': 47.0305, 'J_D': 2361.8646023384335, 'W_D_1KI': 191.96122448979594, 'J_D_1KI': 783.5152019991672} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json index 3827350..ffdee89 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 114513, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.690773725509644, "TIME_S_1KI": 0.09335860317614283, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1104.0986349916457, "W": 76.13, "J_1KI": 9.641688148870832, "W_1KI": 0.6648153484757189, "W_D": 59.82149999999999, "J_D": 867.5796202962398, "W_D_1KI": 0.5223992035838725, "J_D_1KI": 0.004561920511940762} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 111596, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.293413400650024, "TIME_S_1KI": 0.09223819313102642, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1166.8953761601447, "W": 84.57, "J_1KI": 10.456426540020653, "W_1KI": 0.7578228610344456, "W_D": 67.21674999999999, "J_D": 927.4555371350644, "W_D_1KI": 0.6023222158500304, "J_D_1KI": 0.005397345925033428} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output index 1524e8c..0d924d9 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.025936126708984375} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.02682971954345703} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), - col_indices=tensor([20193, 9953, 42880, ..., 12429, 7497, 42914]), - values=tensor([0.2197, 0.8269, 0.1857, ..., 0.3505, 0.1509, 0.9771]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([25652, 38257, 1169, ..., 2722, 11737, 19460]), + values=tensor([0.9159, 0.4620, 0.8343, ..., 0.9065, 0.0560, 0.0364]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.1918, 0.1249, 0.1152, ..., 0.8034, 0.5794, 0.7689]) +tensor([0.9019, 0.0029, 0.3566, ..., 0.1355, 0.7017, 0.7582]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -15,18 +15,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 0.025936126708984375 seconds +Time: 0.02682971954345703 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '40484', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.938389301300049} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '39135', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.6821634769439697} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), - col_indices=tensor([10460, 10153, 3528, ..., 24271, 7757, 10191]), - values=tensor([0.9506, 0.4415, 0.5851, ..., 0.4538, 0.6997, 0.3353]), +tensor(crow_indices=tensor([ 0, 1, 4, ..., 24999, 25000, 25000]), + col_indices=tensor([ 1978, 22305, 34616, ..., 21626, 43006, 7893]), + values=tensor([0.7658, 0.0431, 0.6752, ..., 0.0137, 0.8831, 0.2221]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8879, 0.8298, 0.7702, ..., 0.5204, 0.0041, 0.9281]) +tensor([0.4558, 0.0497, 0.4534, ..., 0.8709, 0.2398, 0.0989]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -34,18 +34,18 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 3.938389301300049 seconds +Time: 3.6821634769439697 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '107932', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.896521091461182} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '111596', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.293413400650024} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), - col_indices=tensor([28383, 18616, 44948, ..., 18982, 36427, 31817]), - values=tensor([0.7119, 0.6692, 0.1695, ..., 0.9276, 0.2360, 0.8672]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([38836, 19073, 33792, ..., 47433, 48865, 18364]), + values=tensor([0.8428, 0.5263, 0.8912, ..., 0.2987, 0.0859, 0.8646]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.3408, 0.5698, 0.4224, ..., 0.7704, 0.6104, 0.7559]) +tensor([0.6948, 0.0466, 0.9593, ..., 0.0633, 0.1230, 0.1300]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -53,18 +53,15 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 9.896521091461182 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '114513', '-ss', '50000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.690773725509644} +Time: 10.293413400650024 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([ 6936, 24119, 42629, ..., 33437, 2193, 4338]), - values=tensor([0.2031, 0.4334, 0.3517, ..., 0.1037, 0.1982, 0.4022]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([38836, 19073, 33792, ..., 47433, 48865, 18364]), + values=tensor([0.8428, 0.5263, 0.8912, ..., 0.2987, 0.0859, 0.8646]), size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0121, 0.3083, 0.6972, ..., 0.1499, 0.8195, 0.8871]) +tensor([0.6948, 0.0466, 0.9593, ..., 0.0633, 0.1230, 0.1300]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -72,29 +69,13 @@ Rows: 50000 Size: 2500000000 NNZ: 25000 Density: 1e-05 -Time: 10.690773725509644 seconds +Time: 10.293413400650024 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), - col_indices=tensor([ 6936, 24119, 42629, ..., 33437, 2193, 4338]), - values=tensor([0.2031, 0.4334, 0.3517, ..., 0.1037, 0.1982, 0.4022]), - size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) -tensor([0.0121, 0.3083, 0.6972, ..., 0.1499, 0.8195, 0.8871]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([50000, 50000]) -Rows: 50000 -Size: 2500000000 -NNZ: 25000 -Density: 1e-05 -Time: 10.690773725509644 seconds - -[18.43, 18.05, 18.18, 17.94, 17.98, 18.17, 18.18, 17.96, 18.08, 18.28] -[76.13] -14.502806186676025 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 114513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.690773725509644, 'TIME_S_1KI': 0.09335860317614283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1104.0986349916457, 'W': 76.13} -[18.43, 18.05, 18.18, 17.94, 17.98, 18.17, 18.18, 17.96, 18.08, 18.28, 18.27, 17.98, 18.37, 18.47, 18.2, 17.9, 18.1, 18.12, 18.02, 17.96] -326.17 -16.308500000000002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 114513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.690773725509644, 'TIME_S_1KI': 0.09335860317614283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1104.0986349916457, 'W': 76.13, 'J_1KI': 9.641688148870832, 'W_1KI': 0.6648153484757189, 'W_D': 59.82149999999999, 'J_D': 867.5796202962398, 'W_D_1KI': 0.5223992035838725, 'J_D_1KI': 0.004561920511940762} +[20.33, 18.49, 18.87, 18.45, 18.55, 18.56, 18.9, 23.02, 19.39, 19.55] +[84.57] +13.797982454299927 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 111596, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.293413400650024, 'TIME_S_1KI': 0.09223819313102642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1166.8953761601447, 'W': 84.57} +[20.33, 18.49, 18.87, 18.45, 18.55, 18.56, 18.9, 23.02, 19.39, 19.55, 19.17, 18.55, 18.35, 22.92, 19.21, 18.36, 19.07, 18.44, 19.11, 18.6] +347.065 +17.35325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 111596, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.293413400650024, 'TIME_S_1KI': 0.09223819313102642, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1166.8953761601447, 'W': 84.57, 'J_1KI': 10.456426540020653, 'W_1KI': 0.7578228610344456, 'W_D': 67.21674999999999, 'J_D': 927.4555371350644, 'W_D_1KI': 0.6023222158500304, 'J_D_1KI': 0.005397345925033428} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json index 7635079..19bec8b 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 87123, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.505861759185791, "TIME_S_1KI": 0.12058654728585783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1083.1826461410521, "W": 80.24, "J_1KI": 12.43279783915903, "W_1KI": 0.9209967517188342, "W_D": 63.674249999999994, "J_D": 859.5568619896172, "W_D_1KI": 0.7308546537653662, "J_D_1KI": 0.00838876822154157} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 87046, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.124921798706055, "TIME_S_1KI": 0.11631691058412856, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1227.6771955490112, "W": 88.8, "J_1KI": 14.103774964375287, "W_1KI": 1.020150265376927, "W_D": 72.0095, "J_D": 995.5452816766501, "W_D_1KI": 0.8272580015164396, "J_D_1KI": 0.009503687722772323} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output index 089ad96..a720845 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.029273509979248047} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.029453277587890625} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 5, 7, ..., 124995, 124998, +tensor(crow_indices=tensor([ 0, 3, 6, ..., 124996, 124999, 125000]), - col_indices=tensor([ 551, 18742, 22548, ..., 32794, 16422, 37041]), - values=tensor([0.7421, 0.1633, 0.4685, ..., 0.4955, 0.1690, 0.9373]), + col_indices=tensor([11626, 18674, 38151, ..., 18667, 46031, 40236]), + values=tensor([0.7612, 0.8098, 0.0566, ..., 0.5099, 0.8438, 0.7248]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.5671, 0.1718, 0.7006, ..., 0.0603, 0.4651, 0.4190]) +tensor([0.8787, 0.0380, 0.9789, ..., 0.5227, 0.6173, 0.3832]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -16,19 +16,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 0.029273509979248047 seconds +Time: 0.029453277587890625 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '35868', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.322763681411743} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '35649', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.3001580238342285} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 6, 7, ..., 124993, 124996, +tensor(crow_indices=tensor([ 0, 1, 4, ..., 124996, 124999, 125000]), - col_indices=tensor([10292, 17103, 19384, ..., 21480, 22459, 30474]), - values=tensor([0.3992, 0.1013, 0.2691, ..., 0.1460, 0.2288, 0.5075]), + col_indices=tensor([ 8274, 53, 3255, ..., 22982, 41193, 1491]), + values=tensor([0.6827, 0.3956, 0.5638, ..., 0.4920, 0.3870, 0.5841]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.9479, 0.9531, 0.4260, ..., 0.3198, 0.2541, 0.8697]) +tensor([0.8552, 0.7816, 0.1886, ..., 0.7134, 0.7031, 0.0804]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -36,19 +36,19 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 4.322763681411743 seconds +Time: 4.3001580238342285 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '87123', '-ss', '50000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.505861759185791} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '87046', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.124921798706055} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 124993, 124998, +tensor(crow_indices=tensor([ 0, 2, 8, ..., 124997, 124998, 125000]), - col_indices=tensor([ 2935, 11122, 10966, ..., 49613, 4331, 15007]), - values=tensor([0.3574, 0.4392, 0.6710, ..., 0.1033, 0.1015, 0.5102]), + col_indices=tensor([39599, 48767, 2471, ..., 11144, 1728, 14804]), + values=tensor([0.9256, 0.8305, 0.6433, ..., 0.8812, 0.1393, 0.1023]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.0826, 0.0461, 0.6873, ..., 0.1063, 0.7746, 0.5118]) +tensor([0.5907, 0.4322, 0.8713, ..., 0.0184, 0.2881, 0.6574]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -56,16 +56,16 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.505861759185791 seconds +Time: 10.124921798706055 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 2, ..., 124993, 124998, +tensor(crow_indices=tensor([ 0, 2, 8, ..., 124997, 124998, 125000]), - col_indices=tensor([ 2935, 11122, 10966, ..., 49613, 4331, 15007]), - values=tensor([0.3574, 0.4392, 0.6710, ..., 0.1033, 0.1015, 0.5102]), + col_indices=tensor([39599, 48767, 2471, ..., 11144, 1728, 14804]), + values=tensor([0.9256, 0.8305, 0.6433, ..., 0.8812, 0.1393, 0.1023]), size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) -tensor([0.0826, 0.0461, 0.6873, ..., 0.1063, 0.7746, 0.5118]) +tensor([0.5907, 0.4322, 0.8713, ..., 0.0184, 0.2881, 0.6574]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([50000, 50000]) @@ -73,13 +73,13 @@ Rows: 50000 Size: 2500000000 NNZ: 125000 Density: 5e-05 -Time: 10.505861759185791 seconds +Time: 10.124921798706055 seconds -[18.58, 18.04, 18.0, 17.91, 17.9, 20.59, 18.05, 18.07, 17.88, 18.18] -[80.24] -13.499285221099854 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87123, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.505861759185791, 'TIME_S_1KI': 0.12058654728585783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1826461410521, 'W': 80.24} -[18.58, 18.04, 18.0, 17.91, 17.9, 20.59, 18.05, 18.07, 17.88, 18.18, 18.74, 21.37, 18.26, 18.13, 18.12, 17.84, 18.06, 17.91, 18.35, 18.17] -331.315 -16.56575 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87123, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.505861759185791, 'TIME_S_1KI': 0.12058654728585783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1826461410521, 'W': 80.24, 'J_1KI': 12.43279783915903, 'W_1KI': 0.9209967517188342, 'W_D': 63.674249999999994, 'J_D': 859.5568619896172, 'W_D_1KI': 0.7308546537653662, 'J_D_1KI': 0.00838876822154157} +[18.89, 18.63, 18.4, 18.48, 18.59, 18.44, 18.53, 18.56, 18.51, 18.56] +[88.8] +13.825193643569946 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87046, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.124921798706055, 'TIME_S_1KI': 0.11631691058412856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.6771955490112, 'W': 88.8} +[18.89, 18.63, 18.4, 18.48, 18.59, 18.44, 18.53, 18.56, 18.51, 18.56, 18.7, 18.82, 18.63, 18.49, 18.69, 18.87, 18.78, 19.66, 18.44, 18.43] +335.81000000000006 +16.7905 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87046, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.124921798706055, 'TIME_S_1KI': 0.11631691058412856, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1227.6771955490112, 'W': 88.8, 'J_1KI': 14.103774964375287, 'W_1KI': 1.020150265376927, 'W_D': 72.0095, 'J_D': 995.5452816766501, 'W_D_1KI': 0.8272580015164396, 'J_D_1KI': 0.009503687722772323} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json index c76a143..97f2f8f 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 334626, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109118700027466, "TIME_S_1KI": 0.030210200940833844, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1025.1804789018631, "W": 73.29, "J_1KI": 3.063660561049838, "W_1KI": 0.219020637965968, "W_D": 56.847750000000005, "J_D": 795.1862951220274, "W_D_1KI": 0.16988443814885876, "J_D_1KI": 0.0005076845139016656} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 338825, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.060312271118164, "TIME_S_1KI": 0.029691764985222944, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1132.0499188041686, "W": 81.52, "J_1KI": 3.341105050702187, "W_1KI": 0.24059617796797755, "W_D": 64.5615, "J_D": 896.5510406388044, "W_D_1KI": 0.19054526673061312, "J_D_1KI": 0.000562370742213866} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output index 81b5928..2440778 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.020055532455444336} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.0212860107421875} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), - col_indices=tensor([2122, 3396, 4900, ..., 3006, 1251, 2017]), - values=tensor([0.1868, 0.0259, 0.3673, ..., 0.0909, 0.5358, 0.3608]), +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2500, 2500, 2500]), + col_indices=tensor([ 862, 3282, 1685, ..., 886, 3494, 391]), + values=tensor([0.7655, 0.9778, 0.6082, ..., 0.3892, 0.0884, 0.1248]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.5194, 0.1996, 0.4802, ..., 0.7962, 0.4168, 0.5561]) +tensor([0.5013, 0.5983, 0.1757, ..., 0.3100, 0.7614, 0.7422]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 0.020055532455444336 seconds +Time: 0.0212860107421875 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52354', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.6427783966064453} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '49328', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.528644323348999} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 2498, 2499, 2500]), - col_indices=tensor([2755, 4785, 642, ..., 761, 1671, 4009]), - values=tensor([0.6216, 0.3711, 0.5927, ..., 0.4412, 0.8122, 0.2675]), + col_indices=tensor([1829, 588, 1447, ..., 3492, 17, 289]), + values=tensor([0.9440, 0.4060, 0.2996, ..., 0.0921, 0.1221, 0.2711]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.4766, 0.5997, 0.2696, ..., 0.3490, 0.2681, 0.0383]) +tensor([0.8335, 0.5679, 0.9472, ..., 0.1841, 0.8163, 0.9061]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 1.6427783966064453 seconds +Time: 1.528644323348999 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '334626', '-ss', '5000', '-sd', '0.0001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109118700027466} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '338825', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.060312271118164} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), - col_indices=tensor([3166, 2984, 4242, ..., 1801, 191, 2968]), - values=tensor([0.8061, 0.6429, 0.9344, ..., 0.0259, 0.3545, 0.7535]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2907, 4652, 3947, ..., 4388, 1313, 2255]), + values=tensor([0.3379, 0.0847, 0.0414, ..., 0.5545, 0.2311, 0.7207]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.9083, 0.2521, 0.7196, ..., 0.0784, 0.7825, 0.2390]) +tensor([0.3290, 0.4647, 0.9549, ..., 0.7359, 0.9189, 0.2327]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.109118700027466 seconds +Time: 10.060312271118164 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), - col_indices=tensor([3166, 2984, 4242, ..., 1801, 191, 2968]), - values=tensor([0.8061, 0.6429, 0.9344, ..., 0.0259, 0.3545, 0.7535]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2907, 4652, 3947, ..., 4388, 1313, 2255]), + values=tensor([0.3379, 0.0847, 0.0414, ..., 0.5545, 0.2311, 0.7207]), size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) -tensor([0.9083, 0.2521, 0.7196, ..., 0.0784, 0.7825, 0.2390]) +tensor([0.3290, 0.4647, 0.9549, ..., 0.7359, 0.9189, 0.2327]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500 Density: 0.0001 -Time: 10.109118700027466 seconds +Time: 10.060312271118164 seconds -[20.29, 18.13, 18.23, 18.27, 18.36, 17.99, 18.37, 18.01, 18.07, 17.87] -[73.29] -13.987999439239502 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109118700027466, 'TIME_S_1KI': 0.030210200940833844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1025.1804789018631, 'W': 73.29} -[20.29, 18.13, 18.23, 18.27, 18.36, 17.99, 18.37, 18.01, 18.07, 17.87, 18.97, 18.06, 18.34, 18.0, 18.27, 18.07, 18.01, 18.54, 18.55, 18.02] -328.845 -16.44225 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109118700027466, 'TIME_S_1KI': 0.030210200940833844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1025.1804789018631, 'W': 73.29, 'J_1KI': 3.063660561049838, 'W_1KI': 0.219020637965968, 'W_D': 56.847750000000005, 'J_D': 795.1862951220274, 'W_D_1KI': 0.16988443814885876, 'J_D_1KI': 0.0005076845139016656} +[18.81, 19.85, 21.12, 18.64, 18.63, 18.55, 18.61, 18.66, 18.48, 18.77] +[81.52] +13.886775255203247 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 338825, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.060312271118164, 'TIME_S_1KI': 0.029691764985222944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.0499188041686, 'W': 81.52} +[18.81, 19.85, 21.12, 18.64, 18.63, 18.55, 18.61, 18.66, 18.48, 18.77, 18.91, 18.57, 18.97, 18.52, 18.57, 18.47, 18.75, 18.68, 18.38, 18.95] +339.16999999999996 +16.958499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 338825, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.060312271118164, 'TIME_S_1KI': 0.029691764985222944, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1132.0499188041686, 'W': 81.52, 'J_1KI': 3.341105050702187, 'W_1KI': 0.24059617796797755, 'W_D': 64.5615, 'J_D': 896.5510406388044, 'W_D_1KI': 0.19054526673061312, 'J_D_1KI': 0.000562370742213866} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json index 417052f..0ffe450 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 248882, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432960033416748, "TIME_S_1KI": 0.041919303257836035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.6097160863876, "W": 75.02, "J_1KI": 4.2172986237911445, "W_1KI": 0.3014279859531826, "W_D": 58.4725, "J_D": 818.0925636345148, "W_D_1KI": 0.2349406546074043, "J_D_1KI": 0.0009439841153936576} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 240786, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.539137125015259, "TIME_S_1KI": 0.04376972550320724, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1149.7912251281737, "W": 82.49, "J_1KI": 4.775158128496564, "W_1KI": 0.3425863629945262, "W_D": 65.42224999999999, "J_D": 911.8914896125792, "W_D_1KI": 0.2717028813967589, "J_D_1KI": 0.001128399829710859} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output index 121e30b..b4f6567 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.018964052200317383} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.019769668579101562} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 14, ..., 24990, 24995, 25000]), - col_indices=tensor([ 484, 2538, 3016, ..., 2694, 4380, 4909]), - values=tensor([0.3483, 0.1743, 0.1939, ..., 0.2265, 0.8602, 0.7977]), +tensor(crow_indices=tensor([ 0, 12, 15, ..., 24990, 24996, 25000]), + col_indices=tensor([ 63, 354, 650, ..., 3532, 4153, 4665]), + values=tensor([0.4182, 0.3317, 0.3643, ..., 0.9464, 0.1095, 0.7227]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.6164, 0.4905, 0.7241, ..., 0.3672, 0.4239, 0.9077]) +tensor([0.5240, 0.7741, 0.0525, ..., 0.8468, 0.5656, 0.2849]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 0.018964052200317383 seconds +Time: 0.019769668579101562 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '55367', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.335857391357422} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53111', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.316013813018799} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 4, 9, ..., 24986, 24994, 25000]), - col_indices=tensor([1163, 1240, 1422, ..., 4522, 4571, 4830]), - values=tensor([0.2729, 0.7937, 0.6768, ..., 0.2019, 0.8649, 0.0759]), +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24986, 24993, 25000]), + col_indices=tensor([1400, 1599, 2589, ..., 2553, 2849, 3160]), + values=tensor([0.3612, 0.8374, 0.3348, ..., 0.4551, 0.5749, 0.2985]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.9266, 0.1779, 0.1408, ..., 0.8566, 0.7762, 0.0695]) +tensor([0.7196, 0.9731, 0.9838, ..., 0.2317, 0.3300, 0.5980]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 2.335857391357422 seconds +Time: 2.316013813018799 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '248882', '-ss', '5000', '-sd', '0.001'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432960033416748} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '240786', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.539137125015259} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 15, ..., 24986, 24991, 25000]), - col_indices=tensor([ 131, 267, 600, ..., 3068, 3643, 3839]), - values=tensor([0.7605, 0.7816, 0.2401, ..., 0.7557, 0.2099, 0.5290]), +tensor(crow_indices=tensor([ 0, 7, 12, ..., 24989, 24993, 25000]), + col_indices=tensor([ 225, 485, 1185, ..., 1376, 2470, 3978]), + values=tensor([0.3572, 0.7250, 0.4986, ..., 0.6618, 0.0645, 0.8832]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8976, 0.4401, 0.6879, ..., 0.0741, 0.3573, 0.5052]) +tensor([0.0319, 0.1498, 0.5778, ..., 0.9700, 0.0104, 0.1348]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.432960033416748 seconds +Time: 10.539137125015259 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 10, 15, ..., 24986, 24991, 25000]), - col_indices=tensor([ 131, 267, 600, ..., 3068, 3643, 3839]), - values=tensor([0.7605, 0.7816, 0.2401, ..., 0.7557, 0.2099, 0.5290]), +tensor(crow_indices=tensor([ 0, 7, 12, ..., 24989, 24993, 25000]), + col_indices=tensor([ 225, 485, 1185, ..., 1376, 2470, 3978]), + values=tensor([0.3572, 0.7250, 0.4986, ..., 0.6618, 0.0645, 0.8832]), size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) -tensor([0.8976, 0.4401, 0.6879, ..., 0.0741, 0.3573, 0.5052]) +tensor([0.0319, 0.1498, 0.5778, ..., 0.9700, 0.0104, 0.1348]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 25000 Density: 0.001 -Time: 10.432960033416748 seconds +Time: 10.539137125015259 seconds -[18.55, 18.1, 18.04, 18.77, 18.18, 18.14, 18.08, 18.33, 18.19, 18.07] -[75.02] -13.991065263748169 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248882, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432960033416748, 'TIME_S_1KI': 0.041919303257836035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.6097160863876, 'W': 75.02} -[18.55, 18.1, 18.04, 18.77, 18.18, 18.14, 18.08, 18.33, 18.19, 18.07, 18.74, 17.95, 18.05, 18.34, 18.14, 19.14, 18.15, 18.14, 20.44, 18.18] -330.95 -16.5475 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248882, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432960033416748, 'TIME_S_1KI': 0.041919303257836035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.6097160863876, 'W': 75.02, 'J_1KI': 4.2172986237911445, 'W_1KI': 0.3014279859531826, 'W_D': 58.4725, 'J_D': 818.0925636345148, 'W_D_1KI': 0.2349406546074043, 'J_D_1KI': 0.0009439841153936576} +[18.8, 18.42, 18.61, 20.73, 19.73, 18.52, 19.41, 18.51, 18.9, 18.66] +[82.49] +13.938552856445312 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 240786, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.539137125015259, 'TIME_S_1KI': 0.04376972550320724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1149.7912251281737, 'W': 82.49} +[18.8, 18.42, 18.61, 20.73, 19.73, 18.52, 19.41, 18.51, 18.9, 18.66, 19.9, 18.56, 19.41, 18.77, 19.35, 18.71, 18.58, 18.48, 18.71, 18.55] +341.355 +17.06775 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 240786, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.539137125015259, 'TIME_S_1KI': 0.04376972550320724, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1149.7912251281737, 'W': 82.49, 'J_1KI': 4.775158128496564, 'W_1KI': 0.3425863629945262, 'W_D': 65.42224999999999, 'J_D': 911.8914896125792, 'W_D_1KI': 0.2717028813967589, 'J_D_1KI': 0.001128399829710859} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json index e80d2db..3d2093d 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 163647, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.69955587387085, "TIME_S_1KI": 0.06538192495964393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1158.7786533069611, "W": 84.04, "J_1KI": 7.080964840827887, "W_1KI": 0.5135443973919473, "W_D": 67.71475000000001, "J_D": 933.6792814614178, "W_D_1KI": 0.41378546505588254, "J_D_1KI": 0.002528524599020346} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 164896, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.690629959106445, "TIME_S_1KI": 0.06483256088144312, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1271.159963607788, "W": 90.0, "J_1KI": 7.708858696437683, "W_1KI": 0.545798563943334, "W_D": 73.195, "J_D": 1033.8061504030227, "W_D_1KI": 0.44388584319813695, "J_D_1KI": 0.0026919139530257674} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output index 483c400..f4564a4 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.021794557571411133} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.023497343063354492} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 52, 109, ..., 249903, 249954, +tensor(crow_indices=tensor([ 0, 52, 106, ..., 249919, 249958, 250000]), - col_indices=tensor([ 37, 94, 126, ..., 4726, 4735, 4938]), - values=tensor([0.6014, 0.9404, 0.8499, ..., 0.7854, 0.8553, 0.6608]), + col_indices=tensor([ 86, 197, 474, ..., 4720, 4843, 4964]), + values=tensor([0.1328, 0.0055, 0.0846, ..., 0.6221, 0.9046, 0.0540]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8748, 0.2409, 0.9137, ..., 0.2396, 0.5569, 0.4924]) +tensor([0.1198, 0.0635, 0.3662, ..., 0.5079, 0.7170, 0.6953]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 0.021794557571411133 seconds +Time: 0.023497343063354492 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '48177', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.0911431312561035} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44685', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.845383405685425} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 42, 103, ..., 249895, 249947, +tensor(crow_indices=tensor([ 0, 41, 90, ..., 249910, 249957, 250000]), - col_indices=tensor([ 128, 146, 225, ..., 4883, 4900, 4933]), - values=tensor([0.9316, 0.8049, 0.8923, ..., 0.5892, 0.9266, 0.8295]), + col_indices=tensor([ 107, 286, 590, ..., 4505, 4711, 4778]), + values=tensor([0.0122, 0.5794, 0.5888, ..., 0.5900, 0.1134, 0.8897]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.8118, 0.4201, 0.8184, ..., 0.0833, 0.7283, 0.6165]) +tensor([0.3990, 0.8037, 0.0608, ..., 0.0444, 0.2711, 0.1109]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 3.0911431312561035 seconds +Time: 2.845383405685425 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '163647', '-ss', '5000', '-sd', '0.01'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.69955587387085} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '164896', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.690629959106445} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 65, 110, ..., 249906, 249948, +tensor(crow_indices=tensor([ 0, 63, 113, ..., 249898, 249954, 250000]), - col_indices=tensor([ 32, 165, 212, ..., 4365, 4391, 4539]), - values=tensor([0.5399, 0.4522, 0.1183, ..., 0.3103, 0.6929, 0.7632]), + col_indices=tensor([ 50, 132, 187, ..., 4892, 4896, 4898]), + values=tensor([0.7316, 0.7125, 0.7015, ..., 0.2059, 0.7394, 0.1021]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3241, 0.6966, 0.4101, ..., 0.4425, 0.6108, 0.0322]) +tensor([0.9239, 0.8999, 0.9114, ..., 0.0588, 0.7389, 0.7470]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.69955587387085 seconds +Time: 10.690629959106445 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 65, 110, ..., 249906, 249948, +tensor(crow_indices=tensor([ 0, 63, 113, ..., 249898, 249954, 250000]), - col_indices=tensor([ 32, 165, 212, ..., 4365, 4391, 4539]), - values=tensor([0.5399, 0.4522, 0.1183, ..., 0.3103, 0.6929, 0.7632]), + col_indices=tensor([ 50, 132, 187, ..., 4892, 4896, 4898]), + values=tensor([0.7316, 0.7125, 0.7015, ..., 0.2059, 0.7394, 0.1021]), size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) -tensor([0.3241, 0.6966, 0.4101, ..., 0.4425, 0.6108, 0.0322]) +tensor([0.9239, 0.8999, 0.9114, ..., 0.0588, 0.7389, 0.7470]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 250000 Density: 0.01 -Time: 10.69955587387085 seconds +Time: 10.690629959106445 seconds -[18.19, 18.02, 18.18, 18.28, 17.94, 18.08, 18.33, 18.44, 17.82, 18.2] -[84.04] -13.788418054580688 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 163647, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.69955587387085, 'TIME_S_1KI': 0.06538192495964393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.7786533069611, 'W': 84.04} -[18.19, 18.02, 18.18, 18.28, 17.94, 18.08, 18.33, 18.44, 17.82, 18.2, 18.17, 18.08, 18.2, 17.87, 17.92, 18.23, 18.75, 17.8, 18.09, 18.39] -326.505 -16.32525 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 163647, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.69955587387085, 'TIME_S_1KI': 0.06538192495964393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.7786533069611, 'W': 84.04, 'J_1KI': 7.080964840827887, 'W_1KI': 0.5135443973919473, 'W_D': 67.71475000000001, 'J_D': 933.6792814614178, 'W_D_1KI': 0.41378546505588254, 'J_D_1KI': 0.002528524599020346} +[18.92, 18.71, 18.61, 18.55, 18.75, 18.47, 18.65, 18.47, 18.78, 18.74] +[90.0] +14.12399959564209 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 164896, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.690629959106445, 'TIME_S_1KI': 0.06483256088144312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1271.159963607788, 'W': 90.0} +[18.92, 18.71, 18.61, 18.55, 18.75, 18.47, 18.65, 18.47, 18.78, 18.74, 18.9, 18.39, 18.87, 18.47, 19.4, 18.46, 18.89, 18.43, 18.68, 18.48] +336.1 +16.805 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 164896, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.690629959106445, 'TIME_S_1KI': 0.06483256088144312, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1271.159963607788, 'W': 90.0, 'J_1KI': 7.708858696437683, 'W_1KI': 0.545798563943334, 'W_D': 73.195, 'J_D': 1033.8061504030227, 'W_D_1KI': 0.44388584319813695, 'J_D_1KI': 0.0026919139530257674} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json index 868cce2..e091508 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46354, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460933923721313, "TIME_S_1KI": 0.22567489156753062, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1249.411584854126, "W": 87.62, "J_1KI": 26.95369514721763, "W_1KI": 1.8902360098373387, "W_D": 71.2095, "J_D": 1015.4071473598481, "W_D_1KI": 1.5362104672735903, "J_D_1KI": 0.03314083935094254} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 44184, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.627149820327759, "TIME_S_1KI": 0.24052032003276658, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1296.8053016662598, "W": 90.25, "J_1KI": 29.350110937585097, "W_1KI": 2.0425946043816765, "W_D": 73.50450000000001, "J_D": 1056.188645942688, "W_D_1KI": 1.6635999456816948, "J_D_1KI": 0.03765163737284299} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output index 056b89d..02790e9 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03854489326477051} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03829145431518555} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 268, 547, ..., 1249522, - 1249752, 1250000]), - col_indices=tensor([ 55, 88, 92, ..., 4943, 4993, 4995]), - values=tensor([0.0205, 0.9162, 0.9855, ..., 0.3784, 0.4892, 0.4396]), +tensor(crow_indices=tensor([ 0, 264, 533, ..., 1249509, + 1249753, 1250000]), + col_indices=tensor([ 6, 18, 20, ..., 4982, 4996, 4998]), + values=tensor([0.1975, 0.6349, 0.2442, ..., 0.9605, 0.8086, 0.6932]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.2348, 0.0514, 0.2847, ..., 0.2791, 0.0660, 0.1214]) +tensor([0.7749, 0.3521, 0.9557, ..., 0.7060, 0.2617, 0.4155]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 0.03854489326477051 seconds +Time: 0.03829145431518555 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27240', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.170231342315674} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27421', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.516348361968994} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 248, 482, ..., 1249549, - 1249769, 1250000]), - col_indices=tensor([ 15, 21, 37, ..., 4863, 4919, 4974]), - values=tensor([0.5137, 0.1549, 0.4159, ..., 0.2935, 0.1856, 0.8128]), +tensor(crow_indices=tensor([ 0, 264, 511, ..., 1249491, + 1249738, 1250000]), + col_indices=tensor([ 39, 40, 44, ..., 4968, 4977, 4997]), + values=tensor([0.1995, 0.5311, 0.6633, ..., 0.9739, 0.8364, 0.4535]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.7661, 0.7592, 0.8422, ..., 0.1260, 0.8520, 0.6179]) +tensor([0.6752, 0.8408, 0.9129, ..., 0.7977, 0.9220, 0.0568]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 6.170231342315674 seconds +Time: 6.516348361968994 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46354', '-ss', '5000', '-sd', '0.05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460933923721313} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '44184', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.627149820327759} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 262, 517, ..., 1249525, - 1249760, 1250000]), - col_indices=tensor([ 1, 93, 112, ..., 4957, 4968, 4975]), - values=tensor([0.1837, 0.8744, 0.5620, ..., 0.5820, 0.1645, 0.1343]), +tensor(crow_indices=tensor([ 0, 251, 483, ..., 1249489, + 1249737, 1250000]), + col_indices=tensor([ 4, 6, 44, ..., 4966, 4973, 4995]), + values=tensor([0.2877, 0.3585, 0.1690, ..., 0.3030, 0.5649, 0.3335]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.3700, 0.8319, 0.5287, ..., 0.8208, 0.8249, 0.9051]) +tensor([0.1025, 0.5335, 0.0276, ..., 0.7210, 0.1987, 0.0972]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.460933923721313 seconds +Time: 10.627149820327759 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 262, 517, ..., 1249525, - 1249760, 1250000]), - col_indices=tensor([ 1, 93, 112, ..., 4957, 4968, 4975]), - values=tensor([0.1837, 0.8744, 0.5620, ..., 0.5820, 0.1645, 0.1343]), +tensor(crow_indices=tensor([ 0, 251, 483, ..., 1249489, + 1249737, 1250000]), + col_indices=tensor([ 4, 6, 44, ..., 4966, 4973, 4995]), + values=tensor([0.2877, 0.3585, 0.1690, ..., 0.3030, 0.5649, 0.3335]), size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) -tensor([0.3700, 0.8319, 0.5287, ..., 0.8208, 0.8249, 0.9051]) +tensor([0.1025, 0.5335, 0.0276, ..., 0.7210, 0.1987, 0.0972]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250000 Density: 0.05 -Time: 10.460933923721313 seconds +Time: 10.627149820327759 seconds -[18.2, 17.97, 18.75, 20.89, 18.05, 18.04, 17.92, 18.15, 18.09, 18.27] -[87.62] -14.25943374633789 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460933923721313, 'TIME_S_1KI': 0.22567489156753062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.411584854126, 'W': 87.62} -[18.2, 17.97, 18.75, 20.89, 18.05, 18.04, 17.92, 18.15, 18.09, 18.27, 18.35, 17.98, 17.96, 18.31, 17.83, 17.89, 18.02, 18.18, 17.85, 17.84] -328.21000000000004 -16.410500000000003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460933923721313, 'TIME_S_1KI': 0.22567489156753062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.411584854126, 'W': 87.62, 'J_1KI': 26.95369514721763, 'W_1KI': 1.8902360098373387, 'W_D': 71.2095, 'J_D': 1015.4071473598481, 'W_D_1KI': 1.5362104672735903, 'J_D_1KI': 0.03314083935094254} +[18.92, 18.52, 18.65, 18.58, 18.51, 18.51, 19.18, 18.35, 18.69, 18.45] +[90.25] +14.369033813476562 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 44184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.627149820327759, 'TIME_S_1KI': 0.24052032003276658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.8053016662598, 'W': 90.25} +[18.92, 18.52, 18.65, 18.58, 18.51, 18.51, 19.18, 18.35, 18.69, 18.45, 18.88, 18.64, 18.5, 18.52, 18.57, 18.39, 18.74, 18.57, 18.42, 18.89] +334.90999999999997 +16.7455 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 44184, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.627149820327759, 'TIME_S_1KI': 0.24052032003276658, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1296.8053016662598, 'W': 90.25, 'J_1KI': 29.350110937585097, 'W_1KI': 2.0425946043816765, 'W_D': 73.50450000000001, 'J_D': 1056.188645942688, 'W_D_1KI': 1.6635999456816948, 'J_D_1KI': 0.03765163737284299} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json index 30ee0bd..f04e653 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19580, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.499455213546753, "TIME_S_1KI": 0.5362336676990169, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1272.879629366398, "W": 87.53, "J_1KI": 65.00917412494371, "W_1KI": 4.470377936670071, "W_D": 70.8715, "J_D": 1030.6282263525725, "W_D_1KI": 3.6195863125638406, "J_D_1KI": 0.18486140513604907} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 18964, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.52883791923523, "TIME_S_1KI": 0.5552013245747326, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1289.2344594287874, "W": 89.18, "J_1KI": 67.98325561214867, "W_1KI": 4.702594389369332, "W_D": 72.16050000000001, "J_D": 1043.1913344876768, "W_D_1KI": 3.8051307740982923, "J_D_1KI": 0.20065022010642755} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output index e7e4d14..84960d6 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06696105003356934} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06770515441894531} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 497, 967, ..., 2499018, - 2499482, 2500000]), - col_indices=tensor([ 1, 5, 13, ..., 4954, 4967, 4978]), - values=tensor([0.9405, 0.9423, 0.1495, ..., 0.5665, 0.9976, 0.7425]), +tensor(crow_indices=tensor([ 0, 479, 947, ..., 2499022, + 2499522, 2500000]), + col_indices=tensor([ 24, 25, 38, ..., 4963, 4965, 4978]), + values=tensor([0.7367, 0.1744, 0.5199, ..., 0.8158, 0.8777, 0.0978]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.7464, 0.9379, 0.0503, ..., 0.7608, 0.2384, 0.8928]) +tensor([0.8006, 0.3041, 0.8106, ..., 0.5156, 0.6299, 0.1996]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 0.06696105003356934 seconds +Time: 0.06770515441894531 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15680', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.408252954483032} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15508', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.586194038391113} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 497, 993, ..., 2499009, - 2499527, 2500000]), - col_indices=tensor([ 20, 30, 45, ..., 4969, 4978, 4985]), - values=tensor([0.7997, 0.6173, 0.7759, ..., 0.4558, 0.1544, 0.5880]), +tensor(crow_indices=tensor([ 0, 486, 1021, ..., 2499001, + 2499503, 2500000]), + col_indices=tensor([ 6, 22, 32, ..., 4948, 4967, 4987]), + values=tensor([0.7770, 0.4847, 0.4860, ..., 0.5070, 0.8737, 0.9662]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.0537, 0.7107, 0.3297, ..., 0.5168, 0.3739, 0.3386]) +tensor([0.4938, 0.2706, 0.5992, ..., 0.2629, 0.2006, 0.7946]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 8.408252954483032 seconds +Time: 8.586194038391113 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19580', '-ss', '5000', '-sd', '0.1'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.499455213546753} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '18964', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.52883791923523} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 479, 1002, ..., 2498942, - 2499462, 2500000]), - col_indices=tensor([ 2, 9, 12, ..., 4986, 4993, 4996]), - values=tensor([0.9703, 0.1385, 0.8259, ..., 0.8503, 0.3399, 0.6161]), +tensor(crow_indices=tensor([ 0, 476, 998, ..., 2498989, + 2499506, 2500000]), + col_indices=tensor([ 0, 10, 15, ..., 4984, 4997, 4999]), + values=tensor([0.5072, 0.7041, 0.1877, ..., 0.5081, 0.5481, 0.3350]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4198, 0.0425, 0.0591, ..., 0.3064, 0.5860, 0.9864]) +tensor([0.9938, 0.7251, 0.4236, ..., 0.4094, 0.3045, 0.8450]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.499455213546753 seconds +Time: 10.52883791923523 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 479, 1002, ..., 2498942, - 2499462, 2500000]), - col_indices=tensor([ 2, 9, 12, ..., 4986, 4993, 4996]), - values=tensor([0.9703, 0.1385, 0.8259, ..., 0.8503, 0.3399, 0.6161]), +tensor(crow_indices=tensor([ 0, 476, 998, ..., 2498989, + 2499506, 2500000]), + col_indices=tensor([ 0, 10, 15, ..., 4984, 4997, 4999]), + values=tensor([0.5072, 0.7041, 0.1877, ..., 0.5081, 0.5481, 0.3350]), size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) -tensor([0.4198, 0.0425, 0.0591, ..., 0.3064, 0.5860, 0.9864]) +tensor([0.9938, 0.7251, 0.4236, ..., 0.4094, 0.3045, 0.8450]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 2500000 Density: 0.1 -Time: 10.499455213546753 seconds +Time: 10.52883791923523 seconds -[18.41, 17.85, 18.5, 18.03, 18.16, 17.87, 17.95, 21.49, 18.34, 18.02] -[87.53] -14.54220986366272 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.499455213546753, 'TIME_S_1KI': 0.5362336676990169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.879629366398, 'W': 87.53} -[18.41, 17.85, 18.5, 18.03, 18.16, 17.87, 17.95, 21.49, 18.34, 18.02, 18.22, 18.11, 19.31, 20.46, 18.5, 18.28, 17.98, 18.24, 17.73, 18.09] -333.17 -16.6585 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.499455213546753, 'TIME_S_1KI': 0.5362336676990169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.879629366398, 'W': 87.53, 'J_1KI': 65.00917412494371, 'W_1KI': 4.470377936670071, 'W_D': 70.8715, 'J_D': 1030.6282263525725, 'W_D_1KI': 3.6195863125638406, 'J_D_1KI': 0.18486140513604907} +[20.33, 18.52, 18.78, 18.57, 18.89, 19.21, 18.64, 18.34, 22.74, 18.79] +[89.18] +14.456542491912842 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 18964, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.52883791923523, 'TIME_S_1KI': 0.5552013245747326, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.2344594287874, 'W': 89.18} +[20.33, 18.52, 18.78, 18.57, 18.89, 19.21, 18.64, 18.34, 22.74, 18.79, 18.95, 18.6, 18.77, 18.62, 18.46, 18.58, 18.64, 18.33, 18.48, 18.37] +340.39 +17.0195 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 18964, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.52883791923523, 'TIME_S_1KI': 0.5552013245747326, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1289.2344594287874, 'W': 89.18, 'J_1KI': 67.98325561214867, 'W_1KI': 4.702594389369332, 'W_D': 72.16050000000001, 'J_D': 1043.1913344876768, 'W_D_1KI': 3.8051307740982923, 'J_D_1KI': 0.20065022010642755} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json index e35fdc8..35375b5 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.682870388031006, "TIME_S_1KI": 1.1649804130895316, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1294.0732465410233, "W": 85.83, "J_1KI": 141.1203104188684, "W_1KI": 9.359869138495092, "W_D": 69.25649999999999, "J_D": 1044.1918187005517, "W_D_1KI": 7.552508178844056, "J_D_1KI": 0.8236104884235611} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9069, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.808780908584595, "TIME_S_1KI": 1.1918382300787953, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1335.8631198358535, "W": 87.82, "J_1KI": 147.2999360277708, "W_1KI": 9.683537324953136, "W_D": 71.07149999999999, "J_D": 1081.09537373507, "W_D_1KI": 7.8367515712868006, "J_D_1KI": 0.8641252146087551} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output index 5537faf..9105ea4 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.13022398948669434} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.13054227828979492} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1019, 1995, ..., 4997994, - 4998986, 5000000]), - col_indices=tensor([ 3, 6, 16, ..., 4985, 4988, 4992]), - values=tensor([0.7011, 0.4191, 0.0262, ..., 0.8760, 0.6600, 0.8030]), +tensor(crow_indices=tensor([ 0, 1027, 2015, ..., 4998071, + 4999049, 5000000]), + col_indices=tensor([ 1, 6, 7, ..., 4987, 4992, 4995]), + values=tensor([0.1450, 0.0999, 0.1753, ..., 0.4732, 0.7755, 0.6556]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.6407, 0.2177, 0.8923, ..., 0.2240, 0.7772, 0.6410]) +tensor([0.1160, 0.7355, 0.8244, ..., 0.8238, 0.4173, 0.8898]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 0.13022398948669434 seconds +Time: 0.13054227828979492 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8063', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.23195219039917} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8043', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.311188459396362} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 967, 1963, ..., 4997972, - 4999023, 5000000]), - col_indices=tensor([ 1, 8, 12, ..., 4992, 4997, 4998]), - values=tensor([0.9256, 0.0587, 0.0398, ..., 0.7563, 0.3156, 0.8023]), +tensor(crow_indices=tensor([ 0, 1012, 2014, ..., 4998016, + 4999028, 5000000]), + col_indices=tensor([ 0, 2, 9, ..., 4995, 4998, 4999]), + values=tensor([0.5092, 0.6011, 0.2048, ..., 0.5892, 0.0267, 0.9458]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.2454, 0.6913, 0.0165, ..., 0.7228, 0.0396, 0.9796]) +tensor([0.6588, 0.2413, 0.0767, ..., 0.4428, 0.1929, 0.0330]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 9.23195219039917 seconds +Time: 9.311188459396362 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9170', '-ss', '5000', '-sd', '0.2'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.682870388031006} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9069', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.808780908584595} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1039, 2045, ..., 4997951, - 4999001, 5000000]), - col_indices=tensor([ 1, 3, 21, ..., 4981, 4993, 4997]), - values=tensor([0.7269, 0.5102, 0.5857, ..., 0.9126, 0.4820, 0.1281]), +tensor(crow_indices=tensor([ 0, 1030, 2056, ..., 4997951, + 4998993, 5000000]), + col_indices=tensor([ 3, 5, 7, ..., 4980, 4988, 4989]), + values=tensor([0.7265, 0.8226, 0.3089, ..., 0.6002, 0.0899, 0.9267]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7504, 0.3556, 0.6036, ..., 0.9776, 0.8116, 0.9002]) +tensor([0.7524, 0.0232, 0.5140, ..., 0.7543, 0.4328, 0.8193]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.682870388031006 seconds +Time: 10.808780908584595 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1039, 2045, ..., 4997951, - 4999001, 5000000]), - col_indices=tensor([ 1, 3, 21, ..., 4981, 4993, 4997]), - values=tensor([0.7269, 0.5102, 0.5857, ..., 0.9126, 0.4820, 0.1281]), +tensor(crow_indices=tensor([ 0, 1030, 2056, ..., 4997951, + 4998993, 5000000]), + col_indices=tensor([ 3, 5, 7, ..., 4980, 4988, 4989]), + values=tensor([0.7265, 0.8226, 0.3089, ..., 0.6002, 0.0899, 0.9267]), size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) -tensor([0.7504, 0.3556, 0.6036, ..., 0.9776, 0.8116, 0.9002]) +tensor([0.7524, 0.0232, 0.5140, ..., 0.7543, 0.4328, 0.8193]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 5000000 Density: 0.2 -Time: 10.682870388031006 seconds +Time: 10.808780908584595 seconds -[18.71, 17.74, 17.83, 18.37, 18.26, 18.61, 17.98, 17.99, 18.09, 22.02] -[85.83] -15.07716703414917 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.682870388031006, 'TIME_S_1KI': 1.1649804130895316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.0732465410233, 'W': 85.83} -[18.71, 17.74, 17.83, 18.37, 18.26, 18.61, 17.98, 17.99, 18.09, 22.02, 18.37, 17.98, 18.13, 19.94, 19.96, 18.01, 18.17, 17.86, 18.08, 17.84] -331.47 -16.573500000000003 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.682870388031006, 'TIME_S_1KI': 1.1649804130895316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.0732465410233, 'W': 85.83, 'J_1KI': 141.1203104188684, 'W_1KI': 9.359869138495092, 'W_D': 69.25649999999999, 'J_D': 1044.1918187005517, 'W_D_1KI': 7.552508178844056, 'J_D_1KI': 0.8236104884235611} +[18.91, 19.03, 18.53, 18.92, 18.62, 18.25, 18.44, 18.59, 18.48, 18.72] +[87.82] +15.211376905441284 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9069, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.808780908584595, 'TIME_S_1KI': 1.1918382300787953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1335.8631198358535, 'W': 87.82} +[18.91, 19.03, 18.53, 18.92, 18.62, 18.25, 18.44, 18.59, 18.48, 18.72, 19.14, 18.53, 18.56, 18.45, 19.02, 18.38, 18.48, 18.35, 18.59, 18.73] +334.97 +16.7485 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9069, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.808780908584595, 'TIME_S_1KI': 1.1918382300787953, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1335.8631198358535, 'W': 87.82, 'J_1KI': 147.2999360277708, 'W_1KI': 9.683537324953136, 'W_D': 71.07149999999999, 'J_D': 1081.09537373507, 'W_D_1KI': 7.8367515712868006, 'J_D_1KI': 0.8641252146087551} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json index b09a9ac..f9757bf 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5638, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.486673831939697, "TIME_S_1KI": 1.859998905984338, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1324.2176235437394, "W": 84.45, "J_1KI": 234.87364731176646, "W_1KI": 14.978715856686769, "W_D": 67.81275, "J_D": 1063.3373434098958, "W_D_1KI": 12.027802412202908, "J_D_1KI": 2.1333455857046664} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5531, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.627587795257568, "TIME_S_1KI": 1.9214586503810465, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1356.078349585533, "W": 86.22, "J_1KI": 245.17778875167838, "W_1KI": 15.588501175194358, "W_D": 69.2895, "J_D": 1089.7934447182417, "W_D_1KI": 12.527481468088952, "J_D_1KI": 2.2649577776331498} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output index 933e0af..a4de7be 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.19828367233276367} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.20510458946228027} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1476, 3005, ..., 7497107, - 7498540, 7500000]), - col_indices=tensor([ 1, 2, 5, ..., 4997, 4998, 4999]), - values=tensor([0.6696, 0.7406, 0.6295, ..., 0.1006, 0.8443, 0.3530]), +tensor(crow_indices=tensor([ 0, 1517, 3037, ..., 7496968, + 7498491, 7500000]), + col_indices=tensor([ 0, 1, 3, ..., 4994, 4996, 4998]), + values=tensor([0.6481, 0.0124, 0.5642, ..., 0.1896, 0.9400, 0.5644]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.2303, 0.2102, 0.7986, ..., 0.7152, 0.6703, 0.2250]) +tensor([0.4075, 0.2598, 0.1028, ..., 0.3258, 0.6684, 0.5241]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 0.19828367233276367 seconds +Time: 0.20510458946228027 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5295', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.860368490219116} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5119', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.716318368911743} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1522, 3011, ..., 7496960, - 7498495, 7500000]), - col_indices=tensor([ 0, 6, 7, ..., 4993, 4995, 4996]), - values=tensor([0.9810, 0.8874, 0.6033, ..., 0.0373, 0.4053, 0.1135]), +tensor(crow_indices=tensor([ 0, 1465, 2956, ..., 7496982, + 7498498, 7500000]), + col_indices=tensor([ 2, 4, 9, ..., 4994, 4995, 4998]), + values=tensor([0.9623, 0.7171, 0.2788, ..., 0.7182, 0.2393, 0.4893]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.3541, 0.8542, 0.0162, ..., 0.1880, 0.1632, 0.2816]) +tensor([0.2907, 0.3907, 0.7457, ..., 0.2444, 0.9787, 0.2284]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,19 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 9.860368490219116 seconds +Time: 9.716318368911743 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5638', '-ss', '5000', '-sd', '0.3'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.486673831939697} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5531', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.627587795257568} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1590, 3134, ..., 7496893, - 7498424, 7500000]), - col_indices=tensor([ 1, 9, 13, ..., 4990, 4995, 4996]), - values=tensor([0.2379, 0.8284, 0.2265, ..., 0.1431, 0.4495, 0.9988]), +tensor(crow_indices=tensor([ 0, 1504, 3000, ..., 7497018, + 7498539, 7500000]), + col_indices=tensor([ 2, 10, 11, ..., 4989, 4993, 4999]), + values=tensor([0.2263, 0.8222, 0.1853, ..., 0.0907, 0.1187, 0.3742]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.8528, 0.9292, 0.5816, ..., 0.4785, 0.5785, 0.0165]) +tensor([0.2919, 0.0893, 0.8411, ..., 0.3428, 0.6296, 0.9732]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,16 +56,16 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.486673831939697 seconds +Time: 10.627587795257568 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 1590, 3134, ..., 7496893, - 7498424, 7500000]), - col_indices=tensor([ 1, 9, 13, ..., 4990, 4995, 4996]), - values=tensor([0.2379, 0.8284, 0.2265, ..., 0.1431, 0.4495, 0.9988]), +tensor(crow_indices=tensor([ 0, 1504, 3000, ..., 7497018, + 7498539, 7500000]), + col_indices=tensor([ 2, 10, 11, ..., 4989, 4993, 4999]), + values=tensor([0.2263, 0.8222, 0.1853, ..., 0.0907, 0.1187, 0.3742]), size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) -tensor([0.8528, 0.9292, 0.5816, ..., 0.4785, 0.5785, 0.0165]) +tensor([0.2919, 0.0893, 0.8411, ..., 0.3428, 0.6296, 0.9732]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -73,13 +73,13 @@ Rows: 5000 Size: 25000000 NNZ: 7500000 Density: 0.3 -Time: 10.486673831939697 seconds +Time: 10.627587795257568 seconds -[18.49, 17.91, 18.44, 17.9, 18.58, 17.81, 17.92, 21.43, 18.39, 17.95] -[84.45] -15.680492877960205 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.486673831939697, 'TIME_S_1KI': 1.859998905984338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1324.2176235437394, 'W': 84.45} -[18.49, 17.91, 18.44, 17.9, 18.58, 17.81, 17.92, 21.43, 18.39, 17.95, 18.43, 20.84, 19.1, 18.06, 18.27, 17.75, 18.15, 17.72, 18.16, 17.76] -332.745 -16.63725 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.486673831939697, 'TIME_S_1KI': 1.859998905984338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1324.2176235437394, 'W': 84.45, 'J_1KI': 234.87364731176646, 'W_1KI': 14.978715856686769, 'W_D': 67.81275, 'J_D': 1063.3373434098958, 'W_D_1KI': 12.027802412202908, 'J_D_1KI': 2.1333455857046664} +[20.89, 18.65, 18.69, 18.66, 18.83, 18.51, 18.64, 18.62, 19.09, 18.57] +[86.22] +15.728118181228638 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.627587795257568, 'TIME_S_1KI': 1.9214586503810465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1356.078349585533, 'W': 86.22} +[20.89, 18.65, 18.69, 18.66, 18.83, 18.51, 18.64, 18.62, 19.09, 18.57, 19.24, 18.63, 19.54, 18.41, 18.76, 18.52, 18.91, 18.5, 19.09, 18.42] +338.60999999999996 +16.9305 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.627587795257568, 'TIME_S_1KI': 1.9214586503810465, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1356.078349585533, 'W': 86.22, 'J_1KI': 245.17778875167838, 'W_1KI': 15.588501175194358, 'W_D': 69.2895, 'J_D': 1089.7934447182417, 'W_D_1KI': 12.527481468088952, 'J_D_1KI': 2.2649577776331498} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json index 6bb4210..b1cc6e6 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2997, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.852682113647461, "TIME_S_1KI": 3.6211818864355894, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1423.3116033363342, "W": 83.38, "J_1KI": 474.9121132253367, "W_1KI": 27.82115448782115, "W_D": 66.859, "J_D": 1141.295160559654, "W_D_1KI": 22.30864197530864, "J_D_1KI": 7.443657649418965} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2769, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.38504934310913, "TIME_S_1KI": 3.7504692463377145, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1383.2232024383545, "W": 84.59, "J_1KI": 499.5388957884993, "W_1KI": 30.54893463344168, "W_D": 67.334, "J_D": 1101.0515558929444, "W_D_1KI": 24.31708197905381, "J_D_1KI": 8.781900317462553} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output index 7229c6c..af7b6f9 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output @@ -1,14 +1,34 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.37653517723083496} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.37917041778564453} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2029, 4093, ..., 9995991, +tensor(crow_indices=tensor([ 0, 1948, 3957, ..., 9995933, + 9997945, 10000000]), + col_indices=tensor([ 8, 10, 11, ..., 4990, 4993, 4994]), + values=tensor([0.8896, 0.7869, 0.6720, ..., 0.3221, 0.4513, 0.5276]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8022, 0.1843, 0.7407, ..., 0.1218, 0.5519, 0.4188]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 0.37917041778564453 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2769', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.38504934310913} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1983, 3967, ..., 9995969, 9998004, 10000000]), - col_indices=tensor([ 2, 3, 6, ..., 4991, 4993, 4995]), - values=tensor([0.7344, 0.2463, 0.3008, ..., 0.7679, 0.9534, 0.0176]), + col_indices=tensor([ 0, 1, 3, ..., 4996, 4997, 4999]), + values=tensor([0.0447, 0.2201, 0.1579, ..., 0.6673, 0.7600, 0.5875]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.9829, 0.9205, 0.8754, ..., 0.7887, 0.3539, 0.3362]) +tensor([0.3136, 0.6559, 0.5006, ..., 0.2088, 0.2304, 0.3448]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 0.37653517723083496 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2788', '-ss', '5000', '-sd', '0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.764836072921753} +Time: 10.38504934310913 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2035, 4051, ..., 9995975, - 9998000, 10000000]), - col_indices=tensor([ 1, 7, 13, ..., 4993, 4995, 4999]), - values=tensor([0.5646, 0.0016, 0.0540, ..., 0.1360, 0.4218, 0.1397]), +tensor(crow_indices=tensor([ 0, 1983, 3967, ..., 9995969, + 9998004, 10000000]), + col_indices=tensor([ 0, 1, 3, ..., 4996, 4997, 4999]), + values=tensor([0.0447, 0.2201, 0.1579, ..., 0.6673, 0.7600, 0.5875]), size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2261, 0.1429, 0.2182, ..., 0.9969, 0.6483, 0.4023]) +tensor([0.3136, 0.6559, 0.5006, ..., 0.2088, 0.2304, 0.3448]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,50 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 10000000 Density: 0.4 -Time: 9.764836072921753 seconds +Time: 10.38504934310913 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2997', '-ss', '5000', '-sd', '0.4'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.852682113647461} - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2037, 4021, ..., 9995904, - 9997943, 10000000]), - col_indices=tensor([ 4, 9, 10, ..., 4993, 4994, 4995]), - values=tensor([0.7234, 0.7434, 0.9107, ..., 0.4914, 0.1939, 0.0446]), - size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2935, 0.8834, 0.9590, ..., 0.1952, 0.7236, 0.3428]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 10000000 -Density: 0.4 -Time: 10.852682113647461 seconds - -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2037, 4021, ..., 9995904, - 9997943, 10000000]), - col_indices=tensor([ 4, 9, 10, ..., 4993, 4994, 4995]), - values=tensor([0.7234, 0.7434, 0.9107, ..., 0.4914, 0.1939, 0.0446]), - size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) -tensor([0.2935, 0.8834, 0.9590, ..., 0.1952, 0.7236, 0.3428]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 10000000 -Density: 0.4 -Time: 10.852682113647461 seconds - -[18.3, 18.02, 18.13, 22.02, 18.21, 18.26, 17.95, 17.94, 17.94, 18.03] -[83.38] -17.07017993927002 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.852682113647461, 'TIME_S_1KI': 3.6211818864355894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1423.3116033363342, 'W': 83.38} -[18.3, 18.02, 18.13, 22.02, 18.21, 18.26, 17.95, 17.94, 17.94, 18.03, 18.77, 18.01, 18.52, 18.59, 18.17, 17.81, 18.04, 18.12, 18.22, 17.84] -330.41999999999996 -16.520999999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.852682113647461, 'TIME_S_1KI': 3.6211818864355894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1423.3116033363342, 'W': 83.38, 'J_1KI': 474.9121132253367, 'W_1KI': 27.82115448782115, 'W_D': 66.859, 'J_D': 1141.295160559654, 'W_D_1KI': 22.30864197530864, 'J_D_1KI': 7.443657649418965} +[18.83, 18.68, 18.58, 18.59, 18.43, 18.65, 18.69, 18.22, 22.98, 18.9] +[84.59] +16.352088928222656 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2769, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.38504934310913, 'TIME_S_1KI': 3.7504692463377145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1383.2232024383545, 'W': 84.59} +[18.83, 18.68, 18.58, 18.59, 18.43, 18.65, 18.69, 18.22, 22.98, 18.9, 19.07, 18.48, 23.27, 19.52, 18.46, 19.14, 18.96, 18.38, 18.4, 18.58] +345.12 +17.256 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2769, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.38504934310913, 'TIME_S_1KI': 3.7504692463377145, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1383.2232024383545, 'W': 84.59, 'J_1KI': 499.5388957884993, 'W_1KI': 30.54893463344168, 'W_D': 67.334, 'J_D': 1101.0515558929444, 'W_D_1KI': 24.31708197905381, 'J_D_1KI': 8.781900317462553} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json index 65b89f9..2a70fc1 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2368, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.176312685012817, "TIME_S_1KI": 4.297429343333116, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1437.3391367673873, "W": 78.7, "J_1KI": 606.984432756498, "W_1KI": 33.2347972972973, "W_D": 62.306000000000004, "J_D": 1137.9269663968087, "W_D_1KI": 26.311655405405407, "J_D_1KI": 11.111340965120526} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2343, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.741075277328491, "TIME_S_1KI": 4.584325769239646, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1401.9371602535248, "W": 84.04, "J_1KI": 598.3513274662931, "W_1KI": 35.86854460093897, "W_D": 67.28325000000001, "J_D": 1122.4046696528794, "W_D_1KI": 28.71670934699104, "J_D_1KI": 12.256384697819477} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output index 4c78e30..ec349ea 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output @@ -1,14 +1,14 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.48082637786865234} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.4479796886444092} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2493, 4988, ..., 12494973, - 12497489, 12500000]), - col_indices=tensor([ 0, 3, 4, ..., 4995, 4996, 4997]), - values=tensor([0.7019, 0.9323, 0.5533, ..., 0.8475, 0.3948, 0.0670]), +tensor(crow_indices=tensor([ 0, 2552, 5046, ..., 12495015, + 12497490, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4995, 4997, 4998]), + values=tensor([0.4993, 0.8383, 0.4900, ..., 0.5406, 0.0306, 0.9186]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.2037, 0.7210, 0.2412, ..., 0.0549, 0.3207, 0.0757]) +tensor([0.9628, 0.9871, 0.1356, ..., 0.1551, 0.9107, 0.4718]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -16,19 +16,19 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 0.48082637786865234 seconds +Time: 0.4479796886444092 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2183', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.679495573043823} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2343', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.741075277328491} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2494, 4974, ..., 12494974, - 12497479, 12500000]), - col_indices=tensor([ 2, 3, 5, ..., 4990, 4992, 4993]), - values=tensor([0.3550, 0.2209, 0.9878, ..., 0.1100, 0.7010, 0.8735]), +tensor(crow_indices=tensor([ 0, 2523, 4985, ..., 12495058, + 12497538, 12500000]), + col_indices=tensor([ 0, 2, 3, ..., 4994, 4996, 4997]), + values=tensor([0.7346, 0.1923, 0.5467, ..., 0.0039, 0.3545, 0.5505]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.0897, 0.5505, 0.7451, ..., 0.8823, 0.0649, 0.5912]) +tensor([0.4147, 0.5149, 0.6904, ..., 0.2521, 0.9944, 0.6875]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -36,19 +36,16 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 9.679495573043823 seconds - -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2368', '-ss', '5000', '-sd', '0.5'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.176312685012817} +Time: 10.741075277328491 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2500, 5025, ..., 12495060, - 12497549, 12500000]), - col_indices=tensor([ 3, 7, 9, ..., 4994, 4997, 4998]), - values=tensor([0.3762, 0.0915, 0.3071, ..., 0.1948, 0.2052, 0.2572]), +tensor(crow_indices=tensor([ 0, 2523, 4985, ..., 12495058, + 12497538, 12500000]), + col_indices=tensor([ 0, 2, 3, ..., 4994, 4996, 4997]), + values=tensor([0.7346, 0.1923, 0.5467, ..., 0.0039, 0.3545, 0.5505]), size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.2717, 0.6125, 0.0994, ..., 0.4315, 0.1911, 0.5863]) +tensor([0.4147, 0.5149, 0.6904, ..., 0.2521, 0.9944, 0.6875]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -56,30 +53,13 @@ Rows: 5000 Size: 25000000 NNZ: 12500000 Density: 0.5 -Time: 10.176312685012817 seconds +Time: 10.741075277328491 seconds -/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) - matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 2500, 5025, ..., 12495060, - 12497549, 12500000]), - col_indices=tensor([ 3, 7, 9, ..., 4994, 4997, 4998]), - values=tensor([0.3762, 0.0915, 0.3071, ..., 0.1948, 0.2052, 0.2572]), - size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) -tensor([0.2717, 0.6125, 0.0994, ..., 0.4315, 0.1911, 0.5863]) -Matrix Type: synthetic -Matrix Format: csr -Shape: torch.Size([5000, 5000]) -Rows: 5000 -Size: 25000000 -NNZ: 12500000 -Density: 0.5 -Time: 10.176312685012817 seconds - -[18.38, 18.14, 18.13, 18.09, 18.53, 18.05, 18.22, 17.96, 18.23, 17.96] -[78.7] -18.263521432876587 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.176312685012817, 'TIME_S_1KI': 4.297429343333116, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1437.3391367673873, 'W': 78.7} -[18.38, 18.14, 18.13, 18.09, 18.53, 18.05, 18.22, 17.96, 18.23, 17.96, 18.81, 17.96, 18.41, 17.98, 18.95, 18.19, 18.23, 17.96, 18.21, 18.13] -327.88 -16.394 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.176312685012817, 'TIME_S_1KI': 4.297429343333116, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1437.3391367673873, 'W': 78.7, 'J_1KI': 606.984432756498, 'W_1KI': 33.2347972972973, 'W_D': 62.306000000000004, 'J_D': 1137.9269663968087, 'W_D_1KI': 26.311655405405407, 'J_D_1KI': 11.111340965120526} +[19.07, 18.64, 18.41, 18.39, 18.48, 18.8, 18.45, 18.44, 18.41, 18.74] +[84.04] +16.6817843914032 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.741075277328491, 'TIME_S_1KI': 4.584325769239646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1401.9371602535248, 'W': 84.04} +[19.07, 18.64, 18.41, 18.39, 18.48, 18.8, 18.45, 18.44, 18.41, 18.74, 19.14, 18.63, 18.64, 18.44, 19.12, 18.55, 18.58, 18.45, 18.8, 18.86] +335.135 +16.75675 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2343, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.741075277328491, 'TIME_S_1KI': 4.584325769239646, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1401.9371602535248, 'W': 84.04, 'J_1KI': 598.3513274662931, 'W_1KI': 35.86854460093897, 'W_D': 67.28325000000001, 'J_D': 1122.4046696528794, 'W_D_1KI': 28.71670934699104, 'J_D_1KI': 12.256384697819477} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json index 2ac6398..3169aff 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 357325, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.499107599258423, "TIME_S_1KI": 0.029382516194664303, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1011.0901748657226, "W": 72.8, "J_1KI": 2.829609388835717, "W_1KI": 0.20373609459175818, "W_D": 46.4985, "J_D": 645.7991276922226, "W_D_1KI": 0.1301294339886658, "J_D_1KI": 0.0003641766850588842} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 362766, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.475499391555786, "TIME_S_1KI": 0.028876739803498083, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1139.0763648700713, "W": 81.49, "J_1KI": 3.1399755348353247, "W_1KI": 0.22463516426566987, "W_D": 64.764, "J_D": 905.27845986557, "W_D_1KI": 0.17852830750401083, "J_D_1KI": 0.0004921307606115536} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output index 109e8d2..6c9e726 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -1,75 +1,75 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.020415544509887695} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019411802291870117} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([1333, 4323, 980, 4513, 212, 4800, 425, 2650, 4181, - 969, 725, 4708, 2978, 4829, 2835, 2475, 4298, 490, - 871, 3121, 4762, 4479, 218, 2461, 4825, 2919, 512, - 1363, 1944, 2772, 493, 2448, 4663, 4533, 3227, 2251, - 3206, 1036, 4160, 3108, 4112, 520, 4884, 1695, 3846, - 3433, 4341, 314, 4963, 4557, 1460, 2013, 3131, 1087, - 1051, 4820, 1214, 2275, 33, 4231, 755, 1677, 4993, - 6, 698, 1317, 4059, 4178, 3988, 3341, 1529, 1327, - 4807, 1459, 3985, 2360, 1563, 1141, 2987, 4820, 3117, - 3988, 665, 4532, 3163, 3252, 4621, 4938, 2411, 4883, - 2036, 4582, 143, 3182, 722, 2609, 1076, 3554, 197, - 34, 2097, 1008, 2860, 1655, 2265, 3241, 2350, 1493, - 3432, 135, 3780, 2490, 3232, 1376, 4188, 3874, 3039, - 211, 1650, 4645, 4026, 3400, 1898, 591, 1094, 2347, - 3740, 1237, 925, 2787, 2157, 2054, 2757, 4014, 2493, - 4673, 4093, 396, 3012, 1306, 4796, 91, 1743, 543, - 4293, 1561, 1048, 3859, 1955, 3884, 1858, 722, 4225, - 3, 2890, 2664, 3170, 385, 1272, 188, 920, 1323, - 3875, 304, 4410, 2754, 3377, 4890, 4523, 4463, 2061, - 4108, 1554, 1408, 436, 1757, 483, 3973, 1483, 4992, - 3373, 401, 3150, 4902, 982, 3824, 3035, 683, 1843, - 1258, 996, 3656, 3918, 3943, 3598, 214, 724, 3021, - 312, 2986, 2034, 3929, 4868, 4093, 391, 1333, 1344, - 94, 1127, 616, 21, 1697, 646, 658, 1404, 4972, - 1106, 2493, 822, 2442, 3750, 3893, 297, 3607, 4990, - 1137, 2122, 3346, 36, 515, 2447, 1921, 2437, 4525, - 918, 1225, 2348, 1665, 3413, 132, 2821, 111, 225, - 4735, 1677, 1746, 1399, 3492, 1994, 2191]), - values=tensor([0.3602, 0.2302, 0.1896, 0.7717, 0.8956, 0.2078, 0.9487, - 0.0390, 0.3051, 0.4281, 0.9325, 0.8375, 0.6844, 0.7644, - 0.9804, 0.7332, 0.5227, 0.6093, 0.2260, 0.2957, 0.5954, - 0.5378, 0.5320, 0.4643, 0.3270, 0.8112, 0.8555, 0.5438, - 0.7934, 0.2222, 0.8906, 0.2729, 0.9780, 0.8597, 0.4400, - 0.4858, 0.2262, 0.4282, 0.8462, 0.2562, 0.8815, 0.7446, - 0.8391, 0.9365, 0.8781, 0.6986, 0.6943, 0.7579, 0.7636, - 0.9590, 0.7739, 0.5121, 0.8335, 0.2962, 0.5344, 0.4995, - 0.3316, 0.3046, 0.9720, 0.3429, 0.6108, 0.9040, 0.4735, - 0.4252, 0.9794, 0.6492, 0.0484, 0.2038, 0.5032, 0.1575, - 0.2125, 0.6202, 0.9042, 0.6689, 0.5066, 0.9113, 0.9195, - 0.2614, 0.9604, 0.9164, 0.9297, 0.6647, 0.8507, 0.2096, - 0.8186, 0.7548, 0.5297, 0.5479, 0.9328, 0.6830, 0.1651, - 0.9770, 0.1478, 0.1631, 0.6486, 0.7340, 0.9314, 0.8298, - 0.0625, 0.5779, 0.9267, 0.8341, 0.6247, 0.5959, 0.2706, - 0.5015, 0.0222, 0.8356, 0.2739, 0.1607, 0.6428, 0.2275, - 0.4531, 0.9656, 0.4285, 0.6913, 0.6388, 0.2488, 0.5417, - 0.6641, 0.6693, 0.7008, 0.2798, 0.3263, 0.3780, 0.6106, - 0.1001, 0.9342, 0.1338, 0.9089, 0.1127, 0.8287, 0.1281, - 0.0152, 0.2533, 0.8228, 0.6184, 0.5915, 0.5316, 0.8294, - 0.0163, 0.4275, 0.8868, 0.8268, 0.7667, 0.0040, 0.2374, - 0.5011, 0.5032, 0.5731, 0.6046, 0.1518, 0.5693, 0.3308, - 0.3440, 0.5026, 0.8207, 0.2844, 0.6651, 0.1261, 0.7301, - 0.3788, 0.3102, 0.2854, 0.7833, 0.8403, 0.4733, 0.1726, - 0.2895, 0.2693, 0.2450, 0.4577, 0.3046, 0.8813, 0.0902, - 0.8416, 0.8712, 0.6652, 0.0828, 0.2244, 0.6326, 0.9806, - 0.2561, 0.7608, 0.6760, 0.2091, 0.8004, 0.6240, 0.1978, - 0.2246, 0.8102, 0.6207, 0.2204, 0.2354, 0.0013, 0.2440, - 0.6407, 0.4005, 0.5265, 0.2470, 0.8146, 0.4334, 0.7311, - 0.4576, 0.4028, 0.8969, 0.8062, 0.4521, 0.2212, 0.0569, - 0.5145, 0.3716, 0.8367, 0.2637, 0.4458, 0.4981, 0.9509, - 0.3003, 0.2789, 0.8010, 0.0297, 0.6055, 0.0149, 0.5383, - 0.2695, 0.1287, 0.0767, 0.7664, 0.3198, 0.2386, 0.2384, - 0.1616, 0.1390, 0.9600, 0.6032, 0.7446, 0.5942, 0.1408, - 0.5912, 0.3909, 0.1909, 0.4245, 0.3596, 0.3316, 0.0552, - 0.1715, 0.2483, 0.6015, 0.5276, 0.6350]), + col_indices=tensor([4960, 3833, 633, 4471, 4202, 1676, 1787, 1199, 2306, + 3124, 846, 3215, 476, 334, 4807, 915, 3105, 1045, + 4972, 3623, 3217, 1182, 3202, 2311, 396, 4144, 3334, + 478, 2328, 2786, 1856, 2738, 3225, 2480, 2441, 3299, + 707, 3319, 4597, 2911, 2174, 4484, 2800, 2744, 1794, + 1177, 477, 1901, 3376, 4387, 4560, 2107, 1920, 4480, + 4363, 2713, 919, 3204, 102, 3845, 4256, 2003, 1753, + 1119, 544, 1896, 4387, 1466, 40, 1038, 3721, 2751, + 1788, 2058, 4113, 411, 4069, 3487, 4071, 2090, 2328, + 3038, 1148, 3078, 3958, 4883, 4751, 2760, 590, 3468, + 3471, 192, 3227, 3746, 703, 2066, 576, 1090, 1325, + 194, 4246, 1921, 2956, 4418, 3795, 4442, 1902, 1941, + 216, 4485, 1996, 3049, 2979, 2254, 3655, 4978, 1651, + 4172, 2264, 3550, 4572, 945, 681, 753, 1031, 2829, + 423, 3340, 3363, 4164, 4378, 2110, 2976, 4350, 1911, + 3044, 468, 3955, 4798, 3868, 957, 3308, 1409, 3218, + 4780, 4596, 1312, 1778, 3990, 2963, 1331, 2398, 1381, + 563, 4773, 727, 4840, 1806, 194, 2625, 69, 1112, + 4951, 4747, 4526, 2806, 2806, 1095, 2373, 3616, 1305, + 1464, 4717, 2068, 106, 4757, 2842, 1199, 345, 2205, + 387, 1076, 2021, 4307, 1228, 1241, 4156, 1699, 3047, + 1911, 3014, 637, 4455, 39, 1827, 1661, 4609, 3691, + 4939, 3838, 3075, 4679, 4661, 3875, 1060, 4265, 3315, + 810, 588, 1453, 2725, 976, 1877, 3250, 3600, 3995, + 3450, 208, 2636, 3638, 2035, 1179, 4628, 4864, 1687, + 1039, 4608, 1218, 1280, 1836, 3016, 4035, 1429, 346, + 694, 3134, 1495, 3762, 2936, 4102, 132, 4792, 229, + 2506, 1809, 3129, 290, 1615, 770, 1789]), + values=tensor([0.2869, 0.8959, 0.1230, 0.9181, 0.4916, 0.0949, 0.3853, + 0.8792, 0.8143, 0.3269, 0.3167, 0.3556, 0.3787, 0.8797, + 0.9707, 0.4820, 0.8280, 0.5877, 0.3345, 0.8018, 0.1103, + 0.7156, 0.3120, 0.1165, 0.0076, 0.7338, 0.7551, 0.5541, + 0.5517, 0.9035, 0.1057, 0.5153, 0.1656, 0.8088, 0.5748, + 0.4422, 0.0352, 0.7848, 0.9643, 0.0040, 0.7585, 0.1270, + 0.7414, 0.3673, 0.5320, 0.1373, 0.7158, 0.4388, 0.1792, + 0.4447, 0.8554, 0.4865, 0.9493, 0.3981, 0.8849, 0.3396, + 0.6665, 0.2592, 0.4128, 0.4580, 0.7337, 0.9051, 0.6118, + 0.7618, 0.0594, 0.0261, 0.9894, 0.4200, 0.0044, 0.1480, + 0.6465, 0.3760, 0.8682, 0.6482, 0.8167, 0.4415, 0.0028, + 0.0746, 0.8077, 0.0515, 0.7025, 0.6510, 0.8573, 0.9618, + 0.7529, 0.2294, 0.4577, 0.9398, 0.0496, 0.2194, 0.0358, + 0.9176, 0.7583, 0.0142, 0.3173, 0.7359, 0.8555, 0.0100, + 0.3764, 0.2891, 0.7313, 0.3183, 0.3390, 0.4969, 0.8263, + 0.8676, 0.3966, 0.9944, 0.5408, 0.6388, 0.8646, 0.0834, + 0.9032, 0.6474, 0.5820, 0.7360, 0.0260, 0.7377, 0.3886, + 0.9575, 0.0924, 0.2274, 0.1122, 0.6076, 0.7589, 0.5711, + 0.8863, 0.8848, 0.8179, 0.6628, 0.0821, 0.5477, 0.2648, + 0.4391, 0.8445, 0.6724, 0.5323, 0.0027, 0.0583, 0.7321, + 0.8548, 0.0074, 0.3957, 0.6311, 0.2517, 0.7528, 0.1816, + 0.3071, 0.0713, 0.5735, 0.3349, 0.3818, 0.0042, 0.6241, + 0.9667, 0.4610, 0.2527, 0.0211, 0.9795, 0.0027, 0.2551, + 0.9960, 0.0288, 0.1389, 0.6797, 0.7264, 0.7649, 0.6387, + 0.3351, 0.2456, 0.3344, 0.3816, 0.2839, 0.4175, 0.6365, + 0.3504, 0.9113, 0.8124, 0.7638, 0.9220, 0.3762, 0.0562, + 0.4237, 0.6986, 0.8893, 0.4006, 0.4298, 0.5397, 0.7499, + 0.6551, 0.8851, 0.7450, 0.5531, 0.2759, 0.8211, 0.1282, + 0.1044, 0.9759, 0.0575, 0.3280, 0.1045, 0.1087, 0.6754, + 0.1435, 0.0998, 0.4988, 0.8681, 0.0752, 0.4069, 0.8414, + 0.3281, 0.8089, 0.9851, 0.4099, 0.4565, 0.6462, 0.9468, + 0.9400, 0.7595, 0.9734, 0.7270, 0.7066, 0.1320, 0.7342, + 0.7324, 0.9768, 0.2893, 0.1781, 0.1344, 0.5459, 0.0035, + 0.5325, 0.6974, 0.3569, 0.7178, 0.6552, 0.2834, 0.8334, + 0.2236, 0.3532, 0.7476, 0.4233, 0.3118, 0.7910, 0.8860, + 0.7759, 0.9035, 0.9230, 0.9359, 0.3876]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.0146, 0.8199, 0.1846, ..., 0.5715, 0.8048, 0.4041]) +tensor([0.4538, 0.4285, 0.5081, ..., 0.4471, 0.6341, 0.2594]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -77,80 +77,107 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 0.020415544509887695 seconds +Time: 0.019411802291870117 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51431', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5112977027893066} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '54090', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.56559419631958} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([4110, 4882, 2528, 560, 2850, 1135, 2189, 267, 591, - 4248, 392, 4389, 952, 4426, 675, 791, 4134, 2003, - 330, 4266, 1795, 3038, 2537, 1414, 4513, 1875, 1420, - 3040, 4172, 4236, 4029, 37, 2565, 1038, 1280, 832, - 4901, 4729, 13, 26, 702, 4420, 4734, 1556, 899, - 2280, 4293, 3965, 1974, 4811, 4611, 1009, 692, 1950, - 707, 1172, 1767, 2120, 1996, 634, 3311, 4113, 1903, - 2733, 3685, 3363, 3449, 2814, 1438, 4520, 218, 740, - 498, 655, 1313, 1745, 4116, 95, 357, 4580, 2628, - 4483, 1655, 4732, 165, 2596, 205, 1356, 2811, 2330, - 4604, 2547, 3455, 1624, 3457, 1604, 4751, 1859, 1095, - 4469, 2397, 2809, 2539, 2371, 2342, 4452, 4342, 4591, - 2097, 1276, 1250, 2060, 1578, 4677, 4803, 4831, 2724, - 2018, 2284, 2775, 4691, 199, 4210, 2309, 943, 2881, - 1649, 3410, 3567, 3333, 2952, 4866, 2146, 2388, 3555, - 3049, 2189, 2470, 1262, 1368, 686, 4746, 2521, 3366, - 3586, 4367, 3371, 2520, 3654, 86, 810, 3504, 2051, - 4871, 4860, 841, 544, 3974, 2005, 981, 3244, 488, - 3372, 1091, 2635, 1589, 2299, 733, 4674, 2817, 2103, - 1209, 3577, 3623, 4300, 2443, 1295, 4756, 2336, 3550, - 2095, 4292, 2842, 1971, 4691, 4042, 3354, 592, 2063, - 1832, 1378, 3596, 4385, 745, 2532, 1883, 1739, 1247, - 2943, 1826, 3219, 4178, 3486, 2469, 1068, 4370, 1539, - 326, 2628, 3947, 4717, 3828, 4255, 4155, 4188, 3551, - 3390, 3231, 2307, 2529, 2288, 367, 4198, 4896, 743, - 1579, 1013, 645, 2200, 4435, 4882, 1640, 288, 3098, - 3761, 2384, 847, 509, 4067, 1126, 4356, 2690, 2684, - 3064, 181, 819, 409, 3928, 4195, 3862]), - values=tensor([0.6256, 0.4971, 0.1060, 0.7139, 0.1823, 0.2509, 0.2673, - 0.8663, 0.0835, 0.0808, 0.2049, 0.4420, 0.4395, 0.8635, - 0.0169, 0.4133, 0.6820, 0.9890, 0.7762, 0.6220, 0.1534, - 0.5506, 0.6723, 0.8443, 0.3974, 0.8265, 0.0110, 0.1617, - 0.9872, 0.3380, 0.0737, 0.5011, 0.8812, 0.5991, 0.0522, - 0.5730, 0.8120, 0.7506, 0.2397, 0.8742, 0.8477, 0.3557, - 0.0116, 0.0076, 0.2125, 0.8492, 0.7414, 0.8939, 0.9161, - 0.6369, 0.7960, 0.8359, 0.4748, 0.7205, 0.4515, 0.6595, - 0.2492, 0.2025, 0.4674, 0.4437, 0.5543, 0.2380, 0.4617, - 0.5720, 0.4895, 0.6114, 0.1214, 0.9049, 0.4914, 0.2397, - 0.6242, 0.3103, 0.8200, 0.7931, 0.9387, 0.1219, 0.7863, - 0.3172, 0.8665, 0.4111, 0.3328, 0.3737, 0.2933, 0.4526, - 0.3424, 0.8586, 0.4153, 0.8629, 0.3744, 0.5686, 0.0746, - 0.0221, 0.4632, 0.2182, 0.7406, 0.2698, 0.1778, 0.5649, - 0.4080, 0.6883, 0.2942, 0.9953, 0.6518, 0.2163, 0.7535, - 0.6975, 0.2221, 0.2275, 0.5405, 0.0798, 0.5573, 0.6302, - 0.4985, 0.1104, 0.5174, 0.9401, 0.1389, 0.1581, 0.0424, - 0.6895, 0.3613, 0.0928, 0.0065, 0.9081, 0.0044, 0.4960, - 0.9823, 0.5854, 0.2610, 0.0038, 0.7425, 0.4802, 0.1516, - 0.8368, 0.1838, 0.8782, 0.0715, 0.4223, 0.7005, 0.4430, - 0.9462, 0.7502, 0.6645, 0.2891, 0.9383, 0.3987, 0.8215, - 0.5606, 0.2261, 0.8011, 0.4375, 0.3961, 0.7279, 0.2806, - 0.0713, 0.4871, 0.3038, 0.5371, 0.2216, 0.6841, 0.2878, - 0.3646, 0.4532, 0.8491, 0.7578, 0.6510, 0.7523, 0.4340, - 0.7007, 0.4708, 0.9965, 0.8203, 0.2246, 0.8996, 0.3303, - 0.0148, 0.3760, 0.7093, 0.5329, 0.2574, 0.3450, 0.1580, - 0.7186, 0.2033, 0.7798, 0.4593, 0.7075, 0.5720, 0.1499, - 0.1156, 0.4260, 0.0288, 0.5145, 0.3668, 0.5589, 0.8589, - 0.3216, 0.4607, 0.7258, 0.8841, 0.3750, 0.0423, 0.5771, - 0.4191, 0.6488, 0.7104, 0.5144, 0.6011, 0.7757, 0.0524, - 0.4180, 0.1533, 0.5397, 0.3671, 0.1765, 0.9044, 0.7096, - 0.5951, 0.8654, 0.2226, 0.3541, 0.2302, 0.8815, 0.7479, - 0.9847, 0.5335, 0.2697, 0.6788, 0.5998, 0.9345, 0.5600, - 0.8733, 0.1996, 0.0591, 0.2713, 0.9857, 0.2291, 0.3974, - 0.8274, 0.9419, 0.8414, 0.0360, 0.8374, 0.9999, 0.3537, - 0.3299, 0.5865, 0.7469, 0.2686, 0.3364]), - size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.4917, 0.3630, 0.9281, ..., 0.8752, 0.5291, 0.1405]) + col_indices=tensor([3789, 1243, 4776, 4880, 1902, 1389, 4775, 2798, 986, + 2992, 160, 1178, 2753, 1276, 2143, 2125, 2082, 796, + 4287, 2891, 4008, 2584, 357, 1545, 3975, 1985, 1893, + 641, 4457, 2674, 730, 2389, 3978, 503, 3524, 3968, + 2639, 4331, 4159, 2851, 2113, 2296, 4897, 1954, 2905, + 1988, 349, 4285, 4761, 113, 1156, 1476, 1164, 4493, + 3741, 3481, 306, 3538, 3291, 4259, 1912, 1480, 269, + 941, 1836, 896, 2280, 4835, 2305, 1424, 1027, 3018, + 3322, 3011, 4696, 4785, 4442, 1382, 1843, 3478, 3220, + 1605, 1347, 4249, 3840, 558, 961, 698, 2214, 131, + 2160, 998, 507, 1911, 4411, 3479, 2466, 66, 3262, + 1008, 4898, 954, 1753, 4846, 1589, 2531, 3747, 2414, + 4530, 1537, 4864, 3346, 387, 953, 3822, 2504, 1783, + 3528, 4709, 950, 2663, 3783, 3407, 1045, 836, 713, + 4150, 853, 918, 4114, 27, 733, 1398, 1923, 2992, + 1794, 2504, 1422, 2624, 2518, 2231, 1402, 1836, 2065, + 4473, 2983, 1053, 704, 3419, 364, 2236, 2201, 4632, + 2096, 1171, 1271, 2334, 1004, 4644, 3622, 544, 4456, + 4631, 502, 303, 4281, 3904, 3473, 1037, 2430, 4629, + 4377, 4956, 2464, 404, 2137, 1087, 4351, 828, 4839, + 4041, 938, 3503, 905, 4497, 376, 2116, 3228, 312, + 1285, 4031, 2321, 4639, 2358, 1958, 1117, 3624, 2395, + 2366, 1581, 3243, 4341, 2140, 3211, 3854, 1952, 367, + 3254, 4276, 2270, 4621, 1017, 607, 3657, 2568, 3087, + 1073, 1367, 474, 161, 2883, 1942, 3821, 2295, 2309, + 3169, 4731, 3971, 4764, 2695, 1812, 545, 1937, 4542, + 322, 4280, 923, 2988, 839, 4434, 2752, 2209, 1669, + 1684, 2769, 4993, 405, 1503, 4102, 4270]), + values=tensor([5.8193e-01, 1.6967e-01, 7.6265e-01, 8.3999e-01, + 3.5067e-01, 8.4136e-01, 5.9135e-02, 9.4997e-01, + 4.5598e-01, 5.0948e-01, 6.3977e-01, 6.0317e-01, + 7.0442e-01, 3.5449e-01, 8.0328e-01, 9.9043e-01, + 8.7762e-01, 2.7569e-01, 9.3437e-01, 2.5581e-01, + 7.8133e-01, 9.1962e-01, 5.0363e-01, 7.7144e-01, + 7.6702e-01, 9.5569e-01, 8.2036e-01, 9.4378e-01, + 3.4370e-02, 1.5833e-01, 3.2839e-01, 7.9453e-01, + 1.2917e-01, 4.0631e-01, 6.9895e-01, 5.7160e-01, + 4.3608e-01, 9.1033e-01, 6.8047e-01, 5.6095e-01, + 2.7380e-01, 2.7289e-01, 3.4314e-02, 1.4483e-01, + 8.8443e-02, 9.6977e-01, 1.6710e-02, 6.5568e-01, + 1.6041e-01, 4.8166e-01, 3.9569e-01, 2.0296e-01, + 4.0497e-02, 8.5598e-01, 8.4482e-01, 9.8098e-02, + 3.1198e-01, 7.9009e-01, 7.1343e-01, 7.4770e-01, + 4.0178e-01, 8.6152e-01, 8.5676e-01, 7.5249e-01, + 3.9595e-01, 5.2862e-01, 2.7242e-01, 5.2190e-02, + 9.7129e-01, 9.2298e-01, 8.7805e-01, 9.7186e-01, + 8.1384e-01, 6.2763e-01, 3.4871e-01, 3.7315e-01, + 5.7017e-01, 4.2774e-01, 6.8346e-01, 7.1163e-01, + 9.9781e-02, 7.4433e-01, 9.8423e-01, 5.2672e-01, + 9.3192e-01, 4.9745e-01, 8.7323e-01, 7.8411e-01, + 8.2508e-01, 3.0079e-01, 4.3750e-01, 8.1666e-01, + 1.9204e-01, 4.8170e-01, 7.9401e-01, 9.0845e-01, + 3.8320e-01, 1.4469e-01, 6.5551e-01, 1.9421e-01, + 5.6315e-02, 8.3221e-01, 7.2817e-01, 8.0065e-01, + 7.7589e-01, 9.1337e-01, 8.9020e-01, 4.3957e-01, + 3.8686e-01, 9.8707e-01, 8.4008e-01, 5.4984e-02, + 2.4305e-01, 8.5829e-01, 4.3504e-02, 1.7416e-01, + 6.7981e-01, 7.3005e-01, 1.3866e-01, 4.6051e-01, + 2.2246e-01, 4.3171e-01, 3.1707e-01, 3.1477e-01, + 9.7685e-01, 1.3937e-01, 9.9960e-01, 5.4033e-01, + 9.8566e-01, 9.7025e-01, 8.6825e-01, 8.0279e-01, + 4.9174e-02, 7.7746e-01, 2.7360e-02, 8.8997e-01, + 3.6341e-01, 5.7061e-02, 9.8737e-01, 5.4324e-01, + 8.5528e-02, 9.5339e-01, 7.9510e-01, 3.4118e-01, + 6.1227e-01, 9.8590e-01, 1.2360e-01, 3.7274e-01, + 4.5975e-02, 1.1156e-01, 1.3423e-01, 4.4049e-01, + 5.2826e-01, 6.1652e-01, 3.8540e-01, 2.5875e-01, + 8.4165e-01, 4.1927e-01, 3.3346e-01, 3.7819e-01, + 4.3162e-01, 9.4273e-01, 4.1969e-04, 1.9136e-01, + 5.5210e-01, 5.9275e-01, 5.2183e-01, 4.5867e-01, + 9.1634e-01, 6.3957e-03, 6.1488e-01, 3.0151e-01, + 1.5067e-01, 3.1261e-02, 3.3052e-01, 4.3902e-01, + 3.4783e-01, 8.5534e-01, 3.3467e-01, 9.8019e-03, + 5.1740e-01, 1.0430e-01, 7.0916e-01, 8.8095e-01, + 9.2562e-01, 5.6818e-01, 9.8123e-01, 3.1989e-01, + 9.3706e-01, 9.8660e-01, 9.3024e-01, 6.1657e-01, + 4.3348e-01, 5.8678e-01, 5.5304e-01, 8.1264e-01, + 5.4898e-01, 8.0422e-01, 2.4219e-02, 5.6420e-01, + 8.4187e-01, 9.0527e-01, 9.7111e-02, 1.2976e-01, + 6.3760e-01, 2.6217e-02, 1.9330e-01, 1.5936e-01, + 6.7589e-02, 3.1436e-01, 8.6443e-01, 9.6293e-01, + 1.3153e-01, 7.5441e-01, 4.5200e-01, 9.4461e-01, + 4.0104e-01, 8.9185e-01, 7.2415e-02, 1.0189e-01, + 8.6886e-01, 6.7460e-01, 1.5530e-01, 2.4369e-01, + 6.7486e-01, 9.4765e-01, 8.7610e-01, 2.4842e-01, + 1.8471e-01, 6.2321e-01, 4.9411e-01, 5.3125e-02, + 4.7713e-01, 4.1927e-01, 4.1643e-01, 7.7590e-01, + 3.2378e-02, 3.2515e-01, 4.8623e-01, 3.3402e-01, + 4.7807e-01, 8.5016e-01, 8.4547e-01, 5.1588e-01, + 3.4987e-01, 8.9127e-01, 4.9290e-01, 7.0623e-01, + 3.9491e-01, 8.3220e-01]), size=(5000, 5000), nnz=250, + layout=torch.sparse_csr) +tensor([0.8994, 0.1119, 0.9711, ..., 0.6840, 0.7927, 0.1169]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -158,80 +185,80 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 1.5112977027893066 seconds +Time: 1.56559419631958 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '357325', '-ss', '5000', '-sd', '1e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.499107599258423} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '362766', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.475499391555786} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2104, 1861, 7, 1520, 3728, 1941, 2107, 138, 2161, - 3365, 332, 4735, 2493, 4284, 393, 1314, 1302, 4705, - 1583, 2354, 365, 1361, 3891, 149, 1170, 2523, 1316, - 504, 3112, 2441, 3025, 3794, 4286, 3194, 1606, 1584, - 3408, 2741, 1246, 4491, 4352, 753, 1486, 3301, 2391, - 2673, 3251, 2341, 1657, 2899, 1405, 28, 3720, 4641, - 155, 2571, 1960, 1838, 3742, 1460, 3050, 1966, 3313, - 1854, 4564, 1529, 1889, 4664, 3289, 4098, 3070, 1858, - 1104, 4802, 1430, 2787, 4743, 1421, 1813, 2073, 2691, - 3256, 821, 4666, 4791, 494, 2847, 2089, 295, 92, - 3053, 2874, 4675, 1142, 2097, 3430, 3192, 3228, 4790, - 4424, 4658, 1164, 1384, 2389, 731, 3926, 526, 3782, - 4373, 3966, 3264, 2145, 1214, 2000, 245, 4102, 2011, - 66, 3256, 4976, 3641, 1843, 2314, 3228, 1928, 847, - 3368, 1129, 1702, 2867, 4161, 4680, 2563, 195, 417, - 4789, 399, 2588, 3130, 324, 4572, 3283, 4937, 216, - 3937, 29, 3425, 1846, 776, 2604, 3452, 1647, 2368, - 423, 57, 1474, 4006, 1987, 4359, 1194, 867, 4968, - 2616, 1438, 4125, 4484, 4543, 3714, 1301, 2231, 4571, - 1275, 4305, 3280, 4945, 2663, 1185, 1842, 622, 3361, - 2569, 1402, 2348, 1829, 182, 3336, 570, 3323, 562, - 1935, 4764, 3304, 2399, 1649, 1406, 4405, 2794, 3186, - 4464, 4180, 1762, 4406, 310, 3727, 661, 2438, 4818, - 1167, 2299, 3410, 2102, 990, 2923, 1131, 3164, 4442, - 2744, 348, 2673, 3077, 2766, 827, 2340, 2888, 4315, - 1191, 4533, 1425, 1473, 1269, 2758, 4677, 1018, 4467, - 4891, 1237, 820, 1622, 44, 4185, 1279, 3969, 4975, - 4333, 1750, 1936, 243, 4652, 1623, 3975]), - values=tensor([0.9387, 0.4700, 0.2447, 0.6228, 0.4503, 0.1997, 0.0053, - 0.5157, 0.4796, 0.4250, 0.8220, 0.3582, 0.4251, 0.4646, - 0.9478, 0.0325, 0.0848, 0.5917, 0.2071, 0.4572, 0.9167, - 0.3871, 0.6967, 0.0513, 0.7695, 0.7027, 0.3866, 0.0150, - 0.1037, 0.9121, 0.5340, 0.5642, 0.8146, 0.9553, 0.8607, - 0.1413, 0.3229, 0.0119, 0.0552, 0.9205, 0.4848, 0.8673, - 0.6922, 0.3249, 0.7973, 0.5503, 0.3256, 0.5167, 0.2558, - 0.6122, 0.0546, 0.7903, 0.3759, 0.7968, 0.2510, 0.1679, - 0.4338, 0.6740, 0.2626, 0.4105, 0.4029, 0.3869, 0.1119, - 0.9939, 0.6154, 0.1891, 0.7886, 0.8106, 0.2588, 0.8530, - 0.1960, 0.5522, 0.9502, 0.2036, 0.8479, 0.5893, 0.2336, - 0.5492, 0.4318, 0.6485, 0.5562, 0.9481, 0.9451, 0.8583, - 0.7556, 0.1458, 0.7503, 0.1231, 0.2746, 0.0172, 0.6993, - 0.4164, 0.7854, 0.6995, 0.8084, 0.5901, 0.9482, 0.9912, - 0.8222, 0.8265, 0.6019, 0.5462, 0.6062, 0.4916, 0.3704, - 0.9894, 0.1946, 0.9052, 0.1356, 0.8292, 0.4460, 0.3066, - 0.5424, 0.9972, 0.4829, 0.2518, 0.5680, 0.4922, 0.3950, - 0.5510, 0.1881, 0.6119, 0.3857, 0.5521, 0.8319, 0.6787, - 0.8327, 0.0041, 0.6533, 0.9035, 0.3529, 0.2045, 0.8781, - 0.7715, 0.7148, 0.2012, 0.4429, 0.6908, 0.0217, 0.9409, - 0.2398, 0.9282, 0.8675, 0.2060, 0.8887, 0.5702, 0.9940, - 0.9652, 0.4202, 0.5062, 0.1515, 0.6839, 0.1289, 0.7773, - 0.8039, 0.1706, 0.5382, 0.6842, 0.4665, 0.8705, 0.9740, - 0.3731, 0.3796, 0.0945, 0.8938, 0.9546, 0.7375, 0.4347, - 0.0317, 0.0560, 0.1482, 0.2781, 0.1532, 0.0170, 0.3948, - 0.1619, 0.3596, 0.1826, 0.6650, 0.4684, 0.2191, 0.9123, - 0.0088, 0.2608, 0.7519, 0.3594, 0.8499, 0.1793, 0.6810, - 0.4007, 0.7171, 0.6382, 0.5741, 0.6774, 0.9314, 0.4111, - 0.3073, 0.3304, 0.1048, 0.8381, 0.8387, 0.6626, 0.9350, - 0.3559, 0.0898, 0.9007, 0.8371, 0.0933, 0.7505, 0.1760, - 0.8200, 0.6000, 0.8535, 0.8461, 0.8576, 0.1788, 0.4860, - 0.8224, 0.8204, 0.6502, 0.3745, 0.9926, 0.3104, 0.0651, - 0.2962, 0.9752, 0.3565, 0.2747, 0.0907, 0.8216, 0.2197, - 0.9709, 0.0139, 0.1557, 0.9115, 0.1512, 0.0977, 0.5834, - 0.2007, 0.9483, 0.8228, 0.2172, 0.6709, 0.2237, 0.7140, - 0.3427, 0.4328, 0.7857, 0.0871, 0.8851]), + col_indices=tensor([1956, 3431, 4238, 4052, 786, 1266, 4001, 4666, 1092, + 3296, 1173, 3822, 1879, 2941, 234, 3811, 4402, 613, + 1443, 3709, 2594, 4608, 2613, 4283, 2501, 2445, 1427, + 1885, 2181, 4860, 4243, 1214, 4813, 213, 317, 2056, + 1188, 2407, 2876, 3155, 1101, 928, 2235, 3824, 2607, + 1838, 1357, 2674, 2550, 2106, 3424, 1994, 546, 3354, + 2675, 2714, 2261, 610, 3755, 671, 2095, 3697, 1347, + 360, 4317, 4158, 117, 2249, 4990, 9, 681, 1881, + 3923, 690, 4091, 4107, 2892, 3886, 738, 1353, 208, + 2168, 895, 3568, 3595, 4527, 3909, 2258, 3141, 4180, + 3592, 2057, 988, 1280, 2222, 2461, 1230, 4436, 4278, + 242, 2898, 3677, 486, 2915, 2700, 3313, 1582, 3947, + 2304, 3998, 4209, 1784, 1143, 1298, 4100, 1668, 4807, + 1129, 194, 547, 3477, 3466, 2653, 743, 1451, 4033, + 4459, 4677, 861, 209, 3635, 4103, 4596, 643, 4673, + 918, 2069, 3861, 438, 73, 4960, 2624, 85, 282, + 1088, 4531, 2175, 2789, 2455, 4369, 79, 3997, 2161, + 4382, 981, 4359, 3035, 4336, 2903, 3375, 3402, 3351, + 1467, 4310, 1833, 4757, 656, 4630, 2068, 4311, 2912, + 3879, 2330, 4316, 2205, 1238, 2693, 24, 3949, 2312, + 3596, 1298, 4258, 443, 1549, 4970, 757, 2174, 4522, + 1179, 2988, 3936, 197, 3432, 3114, 1592, 3091, 2331, + 4463, 471, 4489, 3432, 1070, 2015, 3140, 985, 1046, + 1718, 2928, 3116, 850, 3114, 4187, 476, 1975, 2045, + 614, 4508, 49, 4308, 3320, 2902, 2360, 679, 1323, + 174, 1683, 333, 3655, 3484, 4282, 1715, 4161, 2251, + 4716, 3026, 1067, 4926, 819, 4915, 1642, 2499, 4667, + 1179, 724, 2553, 2235, 2183, 4346, 882]), + values=tensor([0.9579, 0.0390, 0.9157, 0.4104, 0.8526, 0.8581, 0.0404, + 0.7977, 0.1127, 0.6398, 0.6320, 0.5466, 0.3249, 0.5353, + 0.2779, 0.1888, 0.8800, 0.3646, 0.5797, 0.6102, 0.4524, + 0.6672, 0.1933, 0.3172, 0.9142, 0.2116, 0.8694, 0.7671, + 0.3860, 0.8538, 0.6306, 0.0812, 0.2634, 0.0747, 0.1165, + 0.9209, 0.5169, 0.3874, 0.9909, 0.1687, 0.6244, 0.9682, + 0.8180, 0.6958, 0.5743, 0.6762, 0.4274, 0.8734, 0.8720, + 0.7096, 0.0314, 0.6237, 0.5408, 0.2599, 0.6993, 0.8694, + 0.9325, 0.6629, 0.2586, 0.5189, 0.1311, 0.0244, 0.5487, + 0.2381, 0.3764, 0.6005, 0.3517, 0.2902, 0.4481, 0.7095, + 0.8595, 0.4167, 0.1455, 0.6941, 0.7561, 0.5053, 0.8598, + 0.7230, 0.0345, 0.2736, 0.2353, 0.0978, 0.0877, 0.4407, + 0.2045, 0.2192, 0.5832, 0.6619, 0.6681, 0.4330, 0.3404, + 0.7309, 0.2497, 0.1260, 0.4309, 0.3463, 0.5670, 0.2970, + 0.0325, 0.3660, 0.0477, 0.7916, 0.4884, 0.4378, 0.2655, + 0.3694, 0.2809, 0.0571, 0.7346, 0.1636, 0.4691, 0.0250, + 0.7714, 0.9482, 0.5488, 0.4704, 0.4720, 0.5671, 0.0525, + 0.3009, 0.2823, 0.4234, 0.6898, 0.6608, 0.4215, 0.1505, + 0.7591, 0.3435, 0.3440, 0.1149, 0.8423, 0.9694, 0.2894, + 0.1485, 0.3692, 0.7112, 0.1061, 0.2705, 0.6923, 0.0732, + 0.3921, 0.4155, 0.1813, 0.6651, 0.6460, 0.6574, 0.6629, + 0.9666, 0.4550, 0.0643, 0.0699, 0.5834, 0.9262, 0.5997, + 0.8477, 0.8876, 0.3470, 0.5717, 0.3027, 0.1708, 0.3344, + 0.6564, 0.5221, 0.9971, 0.5670, 0.6595, 0.4442, 0.6979, + 0.0346, 0.7421, 0.0167, 0.0427, 0.0476, 0.9750, 0.8359, + 0.0377, 0.5836, 0.0387, 0.6069, 0.4111, 0.5689, 0.8082, + 0.2356, 0.9052, 0.6351, 0.6643, 0.1830, 0.8091, 0.9222, + 0.0255, 0.1085, 0.2291, 0.7019, 0.5629, 0.9653, 0.1272, + 0.0502, 0.4026, 0.3717, 0.7964, 0.3440, 0.2227, 0.9211, + 0.0353, 0.8462, 0.1378, 0.9728, 0.9579, 0.9282, 0.2986, + 0.0902, 0.8570, 0.3740, 0.3544, 0.0310, 0.9061, 0.7041, + 0.8388, 0.2022, 0.8961, 0.9929, 0.7981, 0.4087, 0.5336, + 0.1071, 0.4185, 0.4326, 0.1289, 0.6766, 0.0293, 0.3061, + 0.0095, 0.2011, 0.1571, 0.7405, 0.5313, 0.7308, 0.3716, + 0.4635, 0.9587, 0.8976, 0.2622, 0.9705, 0.8033, 0.2844, + 0.0305, 0.3661, 0.4222, 0.5149, 0.3785]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.5944, 0.7034, 0.9760, ..., 0.8304, 0.5842, 0.4500]) +tensor([0.4029, 0.7352, 0.6155, ..., 0.7243, 0.5046, 0.3375]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -239,77 +266,77 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.499107599258423 seconds +Time: 10.475499391555786 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), - col_indices=tensor([2104, 1861, 7, 1520, 3728, 1941, 2107, 138, 2161, - 3365, 332, 4735, 2493, 4284, 393, 1314, 1302, 4705, - 1583, 2354, 365, 1361, 3891, 149, 1170, 2523, 1316, - 504, 3112, 2441, 3025, 3794, 4286, 3194, 1606, 1584, - 3408, 2741, 1246, 4491, 4352, 753, 1486, 3301, 2391, - 2673, 3251, 2341, 1657, 2899, 1405, 28, 3720, 4641, - 155, 2571, 1960, 1838, 3742, 1460, 3050, 1966, 3313, - 1854, 4564, 1529, 1889, 4664, 3289, 4098, 3070, 1858, - 1104, 4802, 1430, 2787, 4743, 1421, 1813, 2073, 2691, - 3256, 821, 4666, 4791, 494, 2847, 2089, 295, 92, - 3053, 2874, 4675, 1142, 2097, 3430, 3192, 3228, 4790, - 4424, 4658, 1164, 1384, 2389, 731, 3926, 526, 3782, - 4373, 3966, 3264, 2145, 1214, 2000, 245, 4102, 2011, - 66, 3256, 4976, 3641, 1843, 2314, 3228, 1928, 847, - 3368, 1129, 1702, 2867, 4161, 4680, 2563, 195, 417, - 4789, 399, 2588, 3130, 324, 4572, 3283, 4937, 216, - 3937, 29, 3425, 1846, 776, 2604, 3452, 1647, 2368, - 423, 57, 1474, 4006, 1987, 4359, 1194, 867, 4968, - 2616, 1438, 4125, 4484, 4543, 3714, 1301, 2231, 4571, - 1275, 4305, 3280, 4945, 2663, 1185, 1842, 622, 3361, - 2569, 1402, 2348, 1829, 182, 3336, 570, 3323, 562, - 1935, 4764, 3304, 2399, 1649, 1406, 4405, 2794, 3186, - 4464, 4180, 1762, 4406, 310, 3727, 661, 2438, 4818, - 1167, 2299, 3410, 2102, 990, 2923, 1131, 3164, 4442, - 2744, 348, 2673, 3077, 2766, 827, 2340, 2888, 4315, - 1191, 4533, 1425, 1473, 1269, 2758, 4677, 1018, 4467, - 4891, 1237, 820, 1622, 44, 4185, 1279, 3969, 4975, - 4333, 1750, 1936, 243, 4652, 1623, 3975]), - values=tensor([0.9387, 0.4700, 0.2447, 0.6228, 0.4503, 0.1997, 0.0053, - 0.5157, 0.4796, 0.4250, 0.8220, 0.3582, 0.4251, 0.4646, - 0.9478, 0.0325, 0.0848, 0.5917, 0.2071, 0.4572, 0.9167, - 0.3871, 0.6967, 0.0513, 0.7695, 0.7027, 0.3866, 0.0150, - 0.1037, 0.9121, 0.5340, 0.5642, 0.8146, 0.9553, 0.8607, - 0.1413, 0.3229, 0.0119, 0.0552, 0.9205, 0.4848, 0.8673, - 0.6922, 0.3249, 0.7973, 0.5503, 0.3256, 0.5167, 0.2558, - 0.6122, 0.0546, 0.7903, 0.3759, 0.7968, 0.2510, 0.1679, - 0.4338, 0.6740, 0.2626, 0.4105, 0.4029, 0.3869, 0.1119, - 0.9939, 0.6154, 0.1891, 0.7886, 0.8106, 0.2588, 0.8530, - 0.1960, 0.5522, 0.9502, 0.2036, 0.8479, 0.5893, 0.2336, - 0.5492, 0.4318, 0.6485, 0.5562, 0.9481, 0.9451, 0.8583, - 0.7556, 0.1458, 0.7503, 0.1231, 0.2746, 0.0172, 0.6993, - 0.4164, 0.7854, 0.6995, 0.8084, 0.5901, 0.9482, 0.9912, - 0.8222, 0.8265, 0.6019, 0.5462, 0.6062, 0.4916, 0.3704, - 0.9894, 0.1946, 0.9052, 0.1356, 0.8292, 0.4460, 0.3066, - 0.5424, 0.9972, 0.4829, 0.2518, 0.5680, 0.4922, 0.3950, - 0.5510, 0.1881, 0.6119, 0.3857, 0.5521, 0.8319, 0.6787, - 0.8327, 0.0041, 0.6533, 0.9035, 0.3529, 0.2045, 0.8781, - 0.7715, 0.7148, 0.2012, 0.4429, 0.6908, 0.0217, 0.9409, - 0.2398, 0.9282, 0.8675, 0.2060, 0.8887, 0.5702, 0.9940, - 0.9652, 0.4202, 0.5062, 0.1515, 0.6839, 0.1289, 0.7773, - 0.8039, 0.1706, 0.5382, 0.6842, 0.4665, 0.8705, 0.9740, - 0.3731, 0.3796, 0.0945, 0.8938, 0.9546, 0.7375, 0.4347, - 0.0317, 0.0560, 0.1482, 0.2781, 0.1532, 0.0170, 0.3948, - 0.1619, 0.3596, 0.1826, 0.6650, 0.4684, 0.2191, 0.9123, - 0.0088, 0.2608, 0.7519, 0.3594, 0.8499, 0.1793, 0.6810, - 0.4007, 0.7171, 0.6382, 0.5741, 0.6774, 0.9314, 0.4111, - 0.3073, 0.3304, 0.1048, 0.8381, 0.8387, 0.6626, 0.9350, - 0.3559, 0.0898, 0.9007, 0.8371, 0.0933, 0.7505, 0.1760, - 0.8200, 0.6000, 0.8535, 0.8461, 0.8576, 0.1788, 0.4860, - 0.8224, 0.8204, 0.6502, 0.3745, 0.9926, 0.3104, 0.0651, - 0.2962, 0.9752, 0.3565, 0.2747, 0.0907, 0.8216, 0.2197, - 0.9709, 0.0139, 0.1557, 0.9115, 0.1512, 0.0977, 0.5834, - 0.2007, 0.9483, 0.8228, 0.2172, 0.6709, 0.2237, 0.7140, - 0.3427, 0.4328, 0.7857, 0.0871, 0.8851]), + col_indices=tensor([1956, 3431, 4238, 4052, 786, 1266, 4001, 4666, 1092, + 3296, 1173, 3822, 1879, 2941, 234, 3811, 4402, 613, + 1443, 3709, 2594, 4608, 2613, 4283, 2501, 2445, 1427, + 1885, 2181, 4860, 4243, 1214, 4813, 213, 317, 2056, + 1188, 2407, 2876, 3155, 1101, 928, 2235, 3824, 2607, + 1838, 1357, 2674, 2550, 2106, 3424, 1994, 546, 3354, + 2675, 2714, 2261, 610, 3755, 671, 2095, 3697, 1347, + 360, 4317, 4158, 117, 2249, 4990, 9, 681, 1881, + 3923, 690, 4091, 4107, 2892, 3886, 738, 1353, 208, + 2168, 895, 3568, 3595, 4527, 3909, 2258, 3141, 4180, + 3592, 2057, 988, 1280, 2222, 2461, 1230, 4436, 4278, + 242, 2898, 3677, 486, 2915, 2700, 3313, 1582, 3947, + 2304, 3998, 4209, 1784, 1143, 1298, 4100, 1668, 4807, + 1129, 194, 547, 3477, 3466, 2653, 743, 1451, 4033, + 4459, 4677, 861, 209, 3635, 4103, 4596, 643, 4673, + 918, 2069, 3861, 438, 73, 4960, 2624, 85, 282, + 1088, 4531, 2175, 2789, 2455, 4369, 79, 3997, 2161, + 4382, 981, 4359, 3035, 4336, 2903, 3375, 3402, 3351, + 1467, 4310, 1833, 4757, 656, 4630, 2068, 4311, 2912, + 3879, 2330, 4316, 2205, 1238, 2693, 24, 3949, 2312, + 3596, 1298, 4258, 443, 1549, 4970, 757, 2174, 4522, + 1179, 2988, 3936, 197, 3432, 3114, 1592, 3091, 2331, + 4463, 471, 4489, 3432, 1070, 2015, 3140, 985, 1046, + 1718, 2928, 3116, 850, 3114, 4187, 476, 1975, 2045, + 614, 4508, 49, 4308, 3320, 2902, 2360, 679, 1323, + 174, 1683, 333, 3655, 3484, 4282, 1715, 4161, 2251, + 4716, 3026, 1067, 4926, 819, 4915, 1642, 2499, 4667, + 1179, 724, 2553, 2235, 2183, 4346, 882]), + values=tensor([0.9579, 0.0390, 0.9157, 0.4104, 0.8526, 0.8581, 0.0404, + 0.7977, 0.1127, 0.6398, 0.6320, 0.5466, 0.3249, 0.5353, + 0.2779, 0.1888, 0.8800, 0.3646, 0.5797, 0.6102, 0.4524, + 0.6672, 0.1933, 0.3172, 0.9142, 0.2116, 0.8694, 0.7671, + 0.3860, 0.8538, 0.6306, 0.0812, 0.2634, 0.0747, 0.1165, + 0.9209, 0.5169, 0.3874, 0.9909, 0.1687, 0.6244, 0.9682, + 0.8180, 0.6958, 0.5743, 0.6762, 0.4274, 0.8734, 0.8720, + 0.7096, 0.0314, 0.6237, 0.5408, 0.2599, 0.6993, 0.8694, + 0.9325, 0.6629, 0.2586, 0.5189, 0.1311, 0.0244, 0.5487, + 0.2381, 0.3764, 0.6005, 0.3517, 0.2902, 0.4481, 0.7095, + 0.8595, 0.4167, 0.1455, 0.6941, 0.7561, 0.5053, 0.8598, + 0.7230, 0.0345, 0.2736, 0.2353, 0.0978, 0.0877, 0.4407, + 0.2045, 0.2192, 0.5832, 0.6619, 0.6681, 0.4330, 0.3404, + 0.7309, 0.2497, 0.1260, 0.4309, 0.3463, 0.5670, 0.2970, + 0.0325, 0.3660, 0.0477, 0.7916, 0.4884, 0.4378, 0.2655, + 0.3694, 0.2809, 0.0571, 0.7346, 0.1636, 0.4691, 0.0250, + 0.7714, 0.9482, 0.5488, 0.4704, 0.4720, 0.5671, 0.0525, + 0.3009, 0.2823, 0.4234, 0.6898, 0.6608, 0.4215, 0.1505, + 0.7591, 0.3435, 0.3440, 0.1149, 0.8423, 0.9694, 0.2894, + 0.1485, 0.3692, 0.7112, 0.1061, 0.2705, 0.6923, 0.0732, + 0.3921, 0.4155, 0.1813, 0.6651, 0.6460, 0.6574, 0.6629, + 0.9666, 0.4550, 0.0643, 0.0699, 0.5834, 0.9262, 0.5997, + 0.8477, 0.8876, 0.3470, 0.5717, 0.3027, 0.1708, 0.3344, + 0.6564, 0.5221, 0.9971, 0.5670, 0.6595, 0.4442, 0.6979, + 0.0346, 0.7421, 0.0167, 0.0427, 0.0476, 0.9750, 0.8359, + 0.0377, 0.5836, 0.0387, 0.6069, 0.4111, 0.5689, 0.8082, + 0.2356, 0.9052, 0.6351, 0.6643, 0.1830, 0.8091, 0.9222, + 0.0255, 0.1085, 0.2291, 0.7019, 0.5629, 0.9653, 0.1272, + 0.0502, 0.4026, 0.3717, 0.7964, 0.3440, 0.2227, 0.9211, + 0.0353, 0.8462, 0.1378, 0.9728, 0.9579, 0.9282, 0.2986, + 0.0902, 0.8570, 0.3740, 0.3544, 0.0310, 0.9061, 0.7041, + 0.8388, 0.2022, 0.8961, 0.9929, 0.7981, 0.4087, 0.5336, + 0.1071, 0.4185, 0.4326, 0.1289, 0.6766, 0.0293, 0.3061, + 0.0095, 0.2011, 0.1571, 0.7405, 0.5313, 0.7308, 0.3716, + 0.4635, 0.9587, 0.8976, 0.2622, 0.9705, 0.8033, 0.2844, + 0.0305, 0.3661, 0.4222, 0.5149, 0.3785]), size=(5000, 5000), nnz=250, layout=torch.sparse_csr) -tensor([0.5944, 0.7034, 0.9760, ..., 0.8304, 0.5842, 0.4500]) +tensor([0.4029, 0.7352, 0.6155, ..., 0.7243, 0.5046, 0.3375]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -317,13 +344,13 @@ Rows: 5000 Size: 25000000 NNZ: 250 Density: 1e-05 -Time: 10.499107599258423 seconds +Time: 10.475499391555786 seconds -[18.49, 21.77, 18.79, 18.12, 18.06, 18.03, 18.09, 17.74, 18.25, 17.89] -[72.8] -13.888601303100586 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 357325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.499107599258423, 'TIME_S_1KI': 0.029382516194664303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1011.0901748657226, 'W': 72.8} -[18.49, 21.77, 18.79, 18.12, 18.06, 18.03, 18.09, 17.74, 18.25, 17.89, 37.95, 41.51, 40.96, 43.46, 47.23, 47.25, 47.57, 33.36, 24.72, 27.91] -526.03 -26.301499999999997 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 357325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.499107599258423, 'TIME_S_1KI': 0.029382516194664303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1011.0901748657226, 'W': 72.8, 'J_1KI': 2.829609388835717, 'W_1KI': 0.20373609459175818, 'W_D': 46.4985, 'J_D': 645.7991276922226, 'W_D_1KI': 0.1301294339886658, 'J_D_1KI': 0.0003641766850588842} +[18.92, 18.43, 18.51, 18.83, 18.51, 18.54, 18.52, 18.62, 18.72, 18.39] +[81.49] +13.97811222076416 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 362766, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.475499391555786, 'TIME_S_1KI': 0.028876739803498083, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1139.0763648700713, 'W': 81.49} +[18.92, 18.43, 18.51, 18.83, 18.51, 18.54, 18.52, 18.62, 18.72, 18.39, 18.92, 18.79, 18.57, 18.36, 18.52, 18.48, 18.7, 18.31, 18.45, 19.09] +334.52 +16.726 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 362766, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.475499391555786, 'TIME_S_1KI': 0.028876739803498083, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1139.0763648700713, 'W': 81.49, 'J_1KI': 3.1399755348353247, 'W_1KI': 0.22463516426566987, 'W_D': 64.764, 'J_D': 905.27845986557, 'W_D_1KI': 0.17852830750401083, 'J_D_1KI': 0.0004921307606115536} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json index e1d6586..f0d7169 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -1 +1 @@ -{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 344337, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.209988117218018, "TIME_S_1KI": 0.029651150231366417, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 996.8449161434174, "W": 73.42, "J_1KI": 2.894968929111357, "W_1KI": 0.21322135001466586, "W_D": 47.158, "J_D": 640.2780244550705, "W_D_1KI": 0.13695304309441042, "J_D_1KI": 0.0003977296749823877} +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 345167, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.38567328453064, "TIME_S_1KI": 0.030088836083781587, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1109.5468463230131, "W": 81.24, "J_1KI": 3.2145218005284777, "W_1KI": 0.23536433088910583, "W_D": 63.900749999999995, "J_D": 872.7335750883221, "W_D_1KI": 0.18512995158865128, "J_D_1KI": 0.000536348931353957} diff --git a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output index 395a17f..2c66565 100644 --- a/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output +++ b/pytorch/output_synthetic_maxcore/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -1,13 +1,13 @@ ['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019530296325683594} +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018830299377441406} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), - col_indices=tensor([2184, 3902, 2442, ..., 1965, 3430, 1316]), - values=tensor([0.1298, 0.8618, 0.5661, ..., 0.6807, 0.5041, 0.2985]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([3576, 214, 2738, ..., 1757, 4161, 1906]), + values=tensor([0.7403, 0.6577, 0.1056, ..., 0.5544, 0.8169, 0.7972]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.7563, 0.3154, 0.3739, ..., 0.6493, 0.1597, 0.7226]) +tensor([0.7748, 0.7223, 0.4756, ..., 0.9335, 0.0436, 0.9545]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -15,18 +15,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 0.019530296325683594 seconds +Time: 0.018830299377441406 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53762', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6393845081329346} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '55761', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.696251392364502} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), - col_indices=tensor([ 543, 3603, 176, ..., 1860, 3976, 394]), - values=tensor([0.3999, 0.3952, 0.5467, ..., 0.7650, 0.4806, 0.1331]), +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([2614, 1565, 3194, ..., 1351, 1971, 4664]), + values=tensor([0.6203, 0.6869, 0.7061, ..., 0.5399, 0.9629, 0.8026]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.3582, 0.6649, 0.9739, ..., 0.1792, 0.8232, 0.7194]) +tensor([0.2679, 0.2756, 0.7180, ..., 0.6222, 0.5676, 0.0842]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -34,18 +34,18 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 1.6393845081329346 seconds +Time: 1.696251392364502 seconds -['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '344337', '-ss', '5000', '-sd', '5e-05'] -{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.209988117218018} +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '345167', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.38567328453064} /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1250, 1250]), - col_indices=tensor([2469, 1957, 4805, ..., 986, 4084, 3397]), - values=tensor([0.1203, 0.9340, 0.5005, ..., 0.1600, 0.9840, 0.6585]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1249, 1250]), + col_indices=tensor([2243, 2702, 4050, ..., 3862, 1966, 3967]), + values=tensor([0.8868, 0.5024, 0.1535, ..., 0.5829, 0.5937, 0.1171]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.7772, 0.5114, 0.7654, ..., 0.7246, 0.7942, 0.6121]) +tensor([0.0483, 0.4917, 0.7447, ..., 0.0369, 0.7186, 0.5491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -53,15 +53,15 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.209988117218018 seconds +Time: 10.38567328453064 seconds /nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) matrix = matrix.to_sparse_csr().type(torch.float32) -tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1250, 1250]), - col_indices=tensor([2469, 1957, 4805, ..., 986, 4084, 3397]), - values=tensor([0.1203, 0.9340, 0.5005, ..., 0.1600, 0.9840, 0.6585]), +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1249, 1250]), + col_indices=tensor([2243, 2702, 4050, ..., 3862, 1966, 3967]), + values=tensor([0.8868, 0.5024, 0.1535, ..., 0.5829, 0.5937, 0.1171]), size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) -tensor([0.7772, 0.5114, 0.7654, ..., 0.7246, 0.7942, 0.6121]) +tensor([0.0483, 0.4917, 0.7447, ..., 0.0369, 0.7186, 0.5491]) Matrix Type: synthetic Matrix Format: csr Shape: torch.Size([5000, 5000]) @@ -69,13 +69,13 @@ Rows: 5000 Size: 25000000 NNZ: 1250 Density: 5e-05 -Time: 10.209988117218018 seconds +Time: 10.38567328453064 seconds -[40.04, 39.64, 39.87, 39.84, 41.87, 42.36, 39.49, 37.87, 39.91, 39.97] -[73.42] -13.577293872833252 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 344337, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.209988117218018, 'TIME_S_1KI': 0.029651150231366417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 996.8449161434174, 'W': 73.42} -[40.04, 39.64, 39.87, 39.84, 41.87, 42.36, 39.49, 37.87, 39.91, 39.97, 18.46, 17.91, 18.96, 18.18, 18.6, 18.04, 18.09, 18.2, 18.22, 17.91] -525.24 -26.262 -{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 344337, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.209988117218018, 'TIME_S_1KI': 0.029651150231366417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 996.8449161434174, 'W': 73.42, 'J_1KI': 2.894968929111357, 'W_1KI': 0.21322135001466586, 'W_D': 47.158, 'J_D': 640.2780244550705, 'W_D_1KI': 0.13695304309441042, 'J_D_1KI': 0.0003977296749823877} +[18.8, 18.54, 18.53, 18.71, 18.61, 18.52, 22.53, 18.9, 18.79, 18.82] +[81.24] +13.657642126083374 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 345167, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.38567328453064, 'TIME_S_1KI': 0.030088836083781587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1109.5468463230131, 'W': 81.24} +[18.8, 18.54, 18.53, 18.71, 18.61, 18.52, 22.53, 18.9, 18.79, 18.82, 19.04, 18.76, 22.81, 20.34, 18.63, 19.47, 18.63, 18.72, 18.67, 18.59] +346.78499999999997 +17.33925 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 345167, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.38567328453064, 'TIME_S_1KI': 0.030088836083781587, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1109.5468463230131, 'W': 81.24, 'J_1KI': 3.2145218005284777, 'W_1KI': 0.23536433088910583, 'W_D': 63.900749999999995, 'J_D': 872.7335750883221, 'W_D_1KI': 0.18512995158865128, 'J_D_1KI': 0.000536348931353957} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..b76a936 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 690, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.003246307373047, "TIME_S_1KI": 21.74383522807688, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 736.2026811027528, "W": 51.180846035126734, "J_1KI": 1066.960407395294, "W_1KI": 74.1751391813431, "W_D": 35.27984603512673, "J_D": 507.4772937932015, "W_D_1KI": 51.1302116451112, "J_D_1KI": 74.10175600740754} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..175c45d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.520033359527588} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 21, ..., 999972, + 999987, 1000000]), + col_indices=tensor([ 5448, 9227, 13530, ..., 69235, 78462, 82074]), + values=tensor([0.3723, 0.4802, 0.5484, ..., 0.8279, 0.7827, 0.0235]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9343, 0.5585, 0.5996, ..., 0.1233, 0.3217, 0.3044]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 1.520033359527588 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 690 -ss 100000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 15.003246307373047} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 20, ..., 999982, + 999992, 1000000]), + col_indices=tensor([ 122, 3109, 10697, ..., 57820, 90383, 91253]), + values=tensor([0.9003, 0.9806, 0.3302, ..., 0.9034, 0.0249, 0.7877]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6863, 0.9695, 0.8316, ..., 0.5738, 0.3295, 0.1413]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 15.003246307373047 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 20, ..., 999982, + 999992, 1000000]), + col_indices=tensor([ 122, 3109, 10697, ..., 57820, 90383, 91253]), + values=tensor([0.9003, 0.9806, 0.3302, ..., 0.9034, 0.0249, 0.7877]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6863, 0.9695, 0.8316, ..., 0.5738, 0.3295, 0.1413]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 15.003246307373047 seconds + +[17.56, 17.56, 17.72, 17.72, 17.72, 17.92, 17.92, 17.64, 17.48, 17.56] +[17.24, 17.28, 18.12, 18.12, 20.28, 29.36, 44.04, 63.04, 74.68, 91.04, 91.28, 91.4, 88.68, 90.76] +14.384339809417725 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.003246307373047, 'TIME_S_1KI': 21.74383522807688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 736.2026811027528, 'W': 51.180846035126734} +[17.56, 17.56, 17.72, 17.72, 17.72, 17.92, 17.92, 17.64, 17.48, 17.56, 17.84, 17.84, 17.6, 17.92, 17.8, 17.8, 17.68, 17.52, 17.16, 17.08] +318.02000000000004 +15.901000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 690, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 15.003246307373047, 'TIME_S_1KI': 21.74383522807688, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 736.2026811027528, 'W': 51.180846035126734, 'J_1KI': 1066.960407395294, 'W_1KI': 74.1751391813431, 'W_D': 35.27984603512673, 'J_D': 507.4772937932015, 'W_D_1KI': 51.1302116451112, 'J_D_1KI': 74.10175600740754} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..a46b44b --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.453001976013184, "TIME_S_1KI": 144.53001976013184, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.84748960495, "W": 50.98554654857658, "J_1KI": 10498.4748960495, "W_1KI": 509.85546548576576, "W_D": 35.17954654857658, "J_D": 724.3848723731041, "W_D_1KI": 351.7954654857658, "J_D_1KI": 3517.954654857658} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..f3d22b6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 14.453001976013184} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 101, 216, ..., 9999796, + 9999906, 10000000]), + col_indices=tensor([ 2079, 2370, 2404, ..., 91560, 92604, 94393]), + values=tensor([0.9041, 0.3243, 0.4250, ..., 0.8376, 0.1046, 0.5896]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3984, 0.6112, 0.9728, ..., 0.2358, 0.4123, 0.2200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 14.453001976013184 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 101, 216, ..., 9999796, + 9999906, 10000000]), + col_indices=tensor([ 2079, 2370, 2404, ..., 91560, 92604, 94393]), + values=tensor([0.9041, 0.3243, 0.4250, ..., 0.8376, 0.1046, 0.5896]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.3984, 0.6112, 0.9728, ..., 0.2358, 0.4123, 0.2200]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 14.453001976013184 seconds + +[17.32, 17.28, 17.28, 17.36, 17.64, 17.72, 17.68, 17.92, 17.88, 17.72] +[17.76, 17.96, 18.52, 20.36, 21.52, 25.36, 25.36, 31.2, 31.76, 45.88, 58.28, 65.88, 77.32, 84.48, 83.76, 84.4, 84.16, 84.96, 85.8, 86.16] +20.5910804271698 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.453001976013184, 'TIME_S_1KI': 144.53001976013184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.84748960495, 'W': 50.98554654857658} +[17.32, 17.28, 17.28, 17.36, 17.64, 17.72, 17.68, 17.92, 17.88, 17.72, 17.16, 17.24, 17.32, 17.32, 17.44, 17.8, 17.76, 17.76, 17.72, 17.8] +316.12 +15.806000000000001 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 14.453001976013184, 'TIME_S_1KI': 144.53001976013184, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.84748960495, 'W': 50.98554654857658, 'J_1KI': 10498.4748960495, 'W_1KI': 509.85546548576576, 'W_D': 35.17954654857658, 'J_D': 724.3848723731041, 'W_D_1KI': 351.7954654857658, 'J_D_1KI': 3517.954654857658} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..c8834ef --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4862, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.123133897781372, "TIME_S_1KI": 3.316152591069801, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1103.8810626983643, "W": 59.41987873604025, "J_1KI": 227.04258796757802, "W_1KI": 12.221283162492854, "W_D": 43.51287873604025, "J_D": 808.3665574879647, "W_D_1KI": 8.949584273146904, "J_D_1KI": 1.8407207472535796} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..7c14ca6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.3613910675048828} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 99999, 100000, + 100000]), + col_indices=tensor([27177, 91140, 35351, ..., 36842, 63353, 40213]), + values=tensor([0.8085, 0.0220, 0.0238, ..., 0.9528, 0.8072, 0.8356]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.5173, 0.6337, 0.4770, ..., 0.8095, 0.3804, 0.7829]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.3613910675048828 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2905 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.272582054138184} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99997, + 100000]), + col_indices=tensor([57428, 26674, 73957, ..., 55311, 85675, 87326]), + values=tensor([0.1133, 0.1214, 0.0575, ..., 0.3604, 0.8021, 0.3274]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7862, 0.8717, 0.9240, ..., 0.8758, 0.8236, 0.0748]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 6.272582054138184 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4862 -ss 100000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 16.123133897781372} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 99999, 100000, + 100000]), + col_indices=tensor([36017, 48507, 97216, ..., 44545, 10809, 6488]), + values=tensor([0.2196, 0.1379, 0.1607, ..., 0.1720, 0.9833, 0.1649]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6606, 0.9945, 0.6366, ..., 0.7352, 0.9827, 0.2989]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 16.123133897781372 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 99999, 100000, + 100000]), + col_indices=tensor([36017, 48507, 97216, ..., 44545, 10809, 6488]), + values=tensor([0.2196, 0.1379, 0.1607, ..., 0.1720, 0.9833, 0.1649]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6606, 0.9945, 0.6366, ..., 0.7352, 0.9827, 0.2989]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 16.123133897781372 seconds + +[17.72, 17.76, 17.68, 17.88, 17.72, 17.52, 17.6, 17.48, 17.6, 17.6] +[17.72, 17.72, 17.8, 21.08, 22.76, 35.32, 49.56, 66.44, 76.68, 89.56, 88.0, 87.44, 86.52, 85.84, 85.76, 86.6, 86.04, 85.28] +18.57763910293579 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.123133897781372, 'TIME_S_1KI': 3.316152591069801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.8810626983643, 'W': 59.41987873604025} +[17.72, 17.76, 17.68, 17.88, 17.72, 17.52, 17.6, 17.48, 17.6, 17.6, 17.8, 17.8, 17.76, 17.68, 17.76, 17.88, 17.72, 17.48, 17.52, 17.48] +318.14 +15.907 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4862, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 16.123133897781372, 'TIME_S_1KI': 3.316152591069801, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1103.8810626983643, 'W': 59.41987873604025, 'J_1KI': 227.04258796757802, 'W_1KI': 12.221283162492854, 'W_D': 43.51287873604025, 'J_D': 808.3665574879647, 'W_D_1KI': 8.949584273146904, 'J_D_1KI': 1.8407207472535796} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..0831cb8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1295, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.363306522369385, "TIME_S_1KI": 10.319155615729255, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1148.1477067947387, "W": 58.62479671787461, "J_1KI": 886.600545787443, "W_1KI": 45.27011329565607, "W_D": 42.857796717874606, "J_D": 839.3561048357486, "W_D_1KI": 33.09482372036649, "J_D_1KI": 25.55584843271544} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..fe6c43d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.8107287883758545} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 12, ..., 499988, 499996, + 500000]), + col_indices=tensor([ 2924, 5581, 32898, ..., 25573, 35176, 44980]), + values=tensor([0.4338, 0.2090, 0.8667, ..., 0.7968, 0.4161, 0.0285]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.7311, 0.8750, 0.4611, ..., 0.2473, 0.3600, 0.7684]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.8107287883758545 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1295 -ss 100000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 13.363306522369385} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 499992, 499997, + 500000]), + col_indices=tensor([20312, 45798, 57469, ..., 9915, 72511, 98823]), + values=tensor([0.2450, 0.9842, 0.2161, ..., 0.8948, 0.9103, 0.6478]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.2448, 0.6389, 0.3272, ..., 0.3843, 0.8480, 0.1017]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 13.363306522369385 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 499992, 499997, + 500000]), + col_indices=tensor([20312, 45798, 57469, ..., 9915, 72511, 98823]), + values=tensor([0.2450, 0.9842, 0.2161, ..., 0.8948, 0.9103, 0.6478]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.2448, 0.6389, 0.3272, ..., 0.3843, 0.8480, 0.1017]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 13.363306522369385 seconds + +[17.48, 17.6, 17.56, 17.72, 17.52, 17.48, 17.56, 17.6, 17.6, 17.8] +[18.0, 17.84, 17.76, 18.88, 19.6, 35.4, 49.36, 64.12, 77.28, 86.8, 84.84, 85.28, 83.64, 83.64, 82.96, 82.4, 81.96, 80.44, 80.16] +19.58467698097229 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.363306522369385, 'TIME_S_1KI': 10.319155615729255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1148.1477067947387, 'W': 58.62479671787461} +[17.48, 17.6, 17.56, 17.72, 17.52, 17.48, 17.56, 17.6, 17.6, 17.8, 17.64, 17.44, 17.36, 17.52, 17.36, 17.2, 17.52, 17.44, 17.6, 17.6] +315.34 +15.767 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1295, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 13.363306522369385, 'TIME_S_1KI': 10.319155615729255, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1148.1477067947387, 'W': 58.62479671787461, 'J_1KI': 886.600545787443, 'W_1KI': 45.27011329565607, 'W_D': 42.857796717874606, 'J_D': 839.3561048357486, 'W_D_1KI': 33.09482372036649, 'J_D_1KI': 25.55584843271544} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..30708d9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 33188, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.848255395889282, "TIME_S_1KI": 0.3268728274041606, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 283.2797647285461, "W": 19.966696619139725, "J_1KI": 8.535608193580394, "W_1KI": 0.6016239791231688, "W_D": 4.899696619139723, "J_D": 69.51499950075144, "W_D_1KI": 0.14763458536638915, "J_D_1KI": 0.0044484327276843785} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..ce5f5e4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.039971351623535156} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9999, 9999, 10000]), + col_indices=tensor([5709, 5957, 5382, ..., 8260, 3428, 9778]), + values=tensor([0.1410, 0.8327, 0.5618, ..., 0.6127, 0.7950, 0.7693]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.9182, 0.7141, 0.2679, ..., 0.7671, 0.9231, 0.7252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.039971351623535156 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 26268 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.310471773147583} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 9999, 9999, 10000]), + col_indices=tensor([6011, 6260, 4075, ..., 5576, 1824, 8975]), + values=tensor([0.9641, 0.0766, 0.9967, ..., 0.9539, 0.3769, 0.8002]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.0022, 0.8912, 0.7670, ..., 0.3905, 0.8453, 0.1961]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 8.310471773147583 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 33188 -ss 10000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.848255395889282} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9995, 9997, 10000]), + col_indices=tensor([1948, 7195, 8876, ..., 111, 6612, 9607]), + values=tensor([0.9860, 0.8888, 0.2852, ..., 0.2300, 0.3266, 0.6773]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.8303, 0.1186, 0.7718, ..., 0.9103, 0.1807, 0.5186]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.848255395889282 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9995, 9997, 10000]), + col_indices=tensor([1948, 7195, 8876, ..., 111, 6612, 9607]), + values=tensor([0.9860, 0.8888, 0.2852, ..., 0.2300, 0.3266, 0.6773]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.8303, 0.1186, 0.7718, ..., 0.9103, 0.1807, 0.5186]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.848255395889282 seconds + +[17.16, 17.24, 17.28, 17.28, 17.12, 16.8, 16.4, 16.48, 16.32, 16.6] +[16.76, 16.84, 20.04, 22.12, 23.96, 24.76, 24.76, 25.32, 22.2, 20.84, 19.88, 20.16, 20.24, 20.04] +14.187613010406494 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 33188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.848255395889282, 'TIME_S_1KI': 0.3268728274041606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.2797647285461, 'W': 19.966696619139725} +[17.16, 17.24, 17.28, 17.28, 17.12, 16.8, 16.4, 16.48, 16.32, 16.6, 16.72, 16.48, 16.8, 16.64, 16.64, 16.68, 16.68, 16.56, 16.44, 16.52] +301.34000000000003 +15.067000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 33188, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.848255395889282, 'TIME_S_1KI': 0.3268728274041606, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 283.2797647285461, 'W': 19.966696619139725, 'J_1KI': 8.535608193580394, 'W_1KI': 0.6016239791231688, 'W_D': 4.899696619139723, 'J_D': 69.51499950075144, 'W_D_1KI': 0.14763458536638915, 'J_D_1KI': 0.0044484327276843785} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..9b3c069 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 4682, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.500732183456421, "TIME_S_1KI": 2.2427877367484883, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 298.49395805358887, "W": 21.02945948615313, "J_1KI": 63.753515175905356, "W_1KI": 4.491554781322753, "W_D": 5.974459486153128, "J_D": 84.80199219703674, "W_D_1KI": 1.2760485873885365, "J_D_1KI": 0.27254348299627007} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..faf5980 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.2965726852416992} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 21, ..., 99978, 99991, + 100000]), + col_indices=tensor([ 670, 2215, 2340, ..., 5626, 6766, 7426]), + values=tensor([0.8454, 0.9971, 0.8602, ..., 0.7286, 0.6853, 0.9650]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8368, 0.0517, 0.8880, ..., 0.1583, 0.3761, 0.3465]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.2965726852416992 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3540 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 7.9380598068237305} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 19, ..., 99981, 99990, + 100000]), + col_indices=tensor([ 218, 1968, 1988, ..., 6580, 8417, 9626]), + values=tensor([0.6495, 0.9774, 0.8878, ..., 0.7618, 0.8151, 0.2290]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.1753, 0.6932, 0.8031, ..., 0.2090, 0.4929, 0.1708]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 7.9380598068237305 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4682 -ss 10000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.500732183456421} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 20, ..., 99977, 99983, + 100000]), + col_indices=tensor([ 32, 224, 1507, ..., 8865, 9626, 9660]), + values=tensor([0.3062, 0.4385, 0.1947, ..., 0.8116, 0.9937, 0.3510]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0637, 0.2734, 0.9054, ..., 0.1369, 0.8787, 0.2539]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.500732183456421 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 20, ..., 99977, 99983, + 100000]), + col_indices=tensor([ 32, 224, 1507, ..., 8865, 9626, 9660]), + values=tensor([0.3062, 0.4385, 0.1947, ..., 0.8116, 0.9937, 0.3510]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0637, 0.2734, 0.9054, ..., 0.1369, 0.8787, 0.2539]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.500732183456421 seconds + +[16.56, 16.28, 16.36, 16.32, 16.4, 16.32, 16.36, 16.44, 16.36, 16.28] +[16.24, 15.96, 19.32, 19.32, 21.08, 28.0, 29.28, 30.04, 27.12, 25.96, 20.44, 20.04, 19.96, 19.96] +14.194086074829102 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.500732183456421, 'TIME_S_1KI': 2.2427877367484883, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.49395805358887, 'W': 21.02945948615313} +[16.56, 16.28, 16.36, 16.32, 16.4, 16.32, 16.36, 16.44, 16.36, 16.28, 16.52, 16.8, 17.04, 17.04, 17.16, 17.48, 17.4, 17.4, 16.84, 16.84] +301.1 +15.055000000000001 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 4682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.500732183456421, 'TIME_S_1KI': 2.2427877367484883, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 298.49395805358887, 'W': 21.02945948615313, 'J_1KI': 63.753515175905356, 'W_1KI': 4.491554781322753, 'W_D': 5.974459486153128, 'J_D': 84.80199219703674, 'W_D_1KI': 1.2760485873885365, 'J_D_1KI': 0.27254348299627007} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..b135810 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 481, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.463550806045532, "TIME_S_1KI": 21.75374387951254, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 332.4283616256714, "W": 23.410421194984455, "J_1KI": 691.1192549390258, "W_1KI": 48.67031433468701, "W_D": 6.792421194984453, "J_D": 96.45249141454698, "W_D_1KI": 14.121457785830463, "J_D_1KI": 29.35854009528163} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..bf59b23 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.1813583374023438} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 110, 205, ..., 999812, + 999909, 1000000]), + col_indices=tensor([ 238, 361, 384, ..., 9612, 9634, 9698]), + values=tensor([0.5750, 0.8151, 0.3516, ..., 0.9302, 0.8544, 0.3311]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.6330, 0.0586, 0.5281, ..., 0.1634, 0.9727, 0.9265]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.1813583374023438 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 481 -ss 10000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.463550806045532} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 212, ..., 999818, + 999919, 1000000]), + col_indices=tensor([ 159, 190, 205, ..., 9628, 9649, 9961]), + values=tensor([0.6162, 0.4289, 0.4486, ..., 0.9461, 0.6549, 0.6632]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8230, 0.2220, 0.9348, ..., 0.2779, 0.8915, 0.3439]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.463550806045532 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 212, ..., 999818, + 999919, 1000000]), + col_indices=tensor([ 159, 190, 205, ..., 9628, 9649, 9961]), + values=tensor([0.6162, 0.4289, 0.4486, ..., 0.9461, 0.6549, 0.6632]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8230, 0.2220, 0.9348, ..., 0.2779, 0.8915, 0.3439]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.463550806045532 seconds + +[20.24, 19.08, 17.84, 18.08, 19.12, 19.88, 20.84, 22.4, 22.4, 22.88] +[22.68, 22.68, 21.44, 22.2, 23.24, 31.92, 32.08, 31.76, 30.52, 23.92, 21.88, 21.88, 22.0, 22.24] +14.200016260147095 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.463550806045532, 'TIME_S_1KI': 21.75374387951254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.4283616256714, 'W': 23.410421194984455} +[20.24, 19.08, 17.84, 18.08, 19.12, 19.88, 20.84, 22.4, 22.4, 22.88, 16.72, 17.0, 17.08, 16.92, 17.04, 16.84, 16.64, 16.56, 16.56, 16.32] +332.36 +16.618000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 481, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.463550806045532, 'TIME_S_1KI': 21.75374387951254, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 332.4283616256714, 'W': 23.410421194984455, 'J_1KI': 691.1192549390258, 'W_1KI': 48.67031433468701, 'W_D': 6.792421194984453, 'J_D': 96.45249141454698, 'W_D_1KI': 14.121457785830463, 'J_D_1KI': 29.35854009528163} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..18a5242 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 11.54654312133789, "TIME_S_1KI": 115.4654312133789, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 353.263671875, "W": 21.71035974754772, "J_1KI": 3532.63671875, "W_1KI": 217.1035974754772, "W_D": 6.712359747547719, "J_D": 109.22126021575927, "W_D_1KI": 67.1235974754772, "J_D_1KI": 671.235974754772} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..d49136b --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 11.54654312133789} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 487, 979, ..., 4998949, + 4999474, 5000000]), + col_indices=tensor([ 0, 2, 9, ..., 9960, 9969, 9987]), + values=tensor([0.4699, 0.5377, 0.9444, ..., 0.8781, 0.2092, 0.8180]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7212, 0.1994, 0.8974, ..., 0.2164, 0.6888, 0.0461]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 11.54654312133789 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 487, 979, ..., 4998949, + 4999474, 5000000]), + col_indices=tensor([ 0, 2, 9, ..., 9960, 9969, 9987]), + values=tensor([0.4699, 0.5377, 0.9444, ..., 0.8781, 0.2092, 0.8180]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7212, 0.1994, 0.8974, ..., 0.2164, 0.6888, 0.0461]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 11.54654312133789 seconds + +[16.56, 16.48, 16.72, 16.72, 16.8, 16.8, 16.64, 16.52, 16.52, 16.48] +[16.44, 16.64, 17.04, 18.24, 19.64, 27.0, 30.2, 30.0, 30.0, 30.08, 26.08, 22.8, 20.4, 20.48, 20.64, 20.48] +16.271663665771484 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 11.54654312133789, 'TIME_S_1KI': 115.4654312133789, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.263671875, 'W': 21.71035974754772} +[16.56, 16.48, 16.72, 16.72, 16.8, 16.8, 16.64, 16.52, 16.52, 16.48, 16.52, 16.36, 16.6, 16.84, 16.68, 16.68, 16.68, 16.96, 16.76, 16.84] +299.96000000000004 +14.998000000000001 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 11.54654312133789, 'TIME_S_1KI': 115.4654312133789, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 353.263671875, 'W': 21.71035974754772, 'J_1KI': 3532.63671875, 'W_1KI': 217.1035974754772, 'W_D': 6.712359747547719, 'J_D': 109.22126021575927, 'W_D_1KI': 67.1235974754772, 'J_D_1KI': 671.235974754772} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..94990e6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.129345178604126, "TIME_S_1KI": 211.29345178604126, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 619.1563564968109, "W": 21.836067076186225, "J_1KI": 6191.563564968109, "W_1KI": 218.36067076186225, "W_D": 6.581067076186224, "J_D": 186.6045519340038, "W_D_1KI": 65.81067076186224, "J_D_1KI": 658.1067076186224} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..12caf81 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 21.129345178604126} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 944, 1951, ..., 9997982, + 9998990, 10000000]), + col_indices=tensor([ 12, 17, 18, ..., 9973, 9979, 9988]), + values=tensor([0.0454, 0.8748, 0.1892, ..., 0.1417, 0.8974, 0.6226]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7518, 0.8666, 0.4972, ..., 0.4338, 0.0856, 0.8852]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.129345178604126 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 944, 1951, ..., 9997982, + 9998990, 10000000]), + col_indices=tensor([ 12, 17, 18, ..., 9973, 9979, 9988]), + values=tensor([0.0454, 0.8748, 0.1892, ..., 0.1417, 0.8974, 0.6226]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.7518, 0.8666, 0.4972, ..., 0.4338, 0.0856, 0.8852]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 21.129345178604126 seconds + +[16.8, 16.76, 16.76, 16.76, 16.88, 16.92, 16.92, 17.0, 17.04, 17.04] +[16.88, 17.04, 20.52, 21.6, 23.68, 23.68, 29.08, 30.56, 30.32, 30.08, 28.48, 23.96, 23.32, 20.92, 20.92, 21.08, 21.44, 21.44, 20.92, 20.72, 20.64, 20.4, 20.4, 20.56, 20.44, 20.36, 20.48, 20.68] +28.354756116867065 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.129345178604126, 'TIME_S_1KI': 211.29345178604126, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 619.1563564968109, 'W': 21.836067076186225} +[16.8, 16.76, 16.76, 16.76, 16.88, 16.92, 16.92, 17.0, 17.04, 17.04, 17.12, 17.28, 17.24, 17.24, 16.84, 16.64, 16.96, 16.72, 17.12, 17.08] +305.1 +15.255 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 21.129345178604126, 'TIME_S_1KI': 211.29345178604126, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 619.1563564968109, 'W': 21.836067076186225, 'J_1KI': 6191.563564968109, 'W_1KI': 218.36067076186225, 'W_D': 6.581067076186224, 'J_D': 186.6045519340038, 'W_D_1KI': 65.81067076186224, 'J_D_1KI': 658.1067076186224} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..364c8ea --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.021928787231445, "TIME_S_1KI": 420.21928787231445, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1136.679587249756, "W": 21.603080734566593, "J_1KI": 11366.79587249756, "W_1KI": 216.03080734566592, "W_D": 6.324080734566593, "J_D": 332.751312992096, "W_D_1KI": 63.24080734566592, "J_D_1KI": 632.4080734566592} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..b23f94c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 42.021928787231445} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1977, 4005, ..., 19996092, + 19998061, 20000000]), + col_indices=tensor([ 0, 21, 29, ..., 9987, 9990, 9994]), + values=tensor([0.7851, 0.1514, 0.2063, ..., 0.8497, 0.4761, 0.4899]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7703, 0.2799, 0.3874, ..., 0.1897, 0.4899, 0.7472]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.021928787231445 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1977, 4005, ..., 19996092, + 19998061, 20000000]), + col_indices=tensor([ 0, 21, 29, ..., 9987, 9990, 9994]), + values=tensor([0.7851, 0.1514, 0.2063, ..., 0.8497, 0.4761, 0.4899]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.7703, 0.2799, 0.3874, ..., 0.1897, 0.4899, 0.7472]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 42.021928787231445 seconds + +[16.84, 16.68, 16.68, 16.88, 16.96, 17.08, 17.12, 17.24, 17.0, 17.0] +[16.76, 16.8, 19.84, 20.68, 22.88, 22.88, 24.36, 33.76, 31.92, 32.48, 31.68, 31.64, 25.4, 24.92, 23.64, 20.2, 20.04, 20.04, 20.04, 19.84, 20.12, 20.36, 20.72, 20.76, 20.48, 20.32, 20.4, 20.28, 20.76, 20.84, 20.84, 20.92, 21.0, 20.96, 20.8, 20.8, 20.84, 20.6, 20.56, 20.56, 20.84, 20.76, 20.76, 20.8, 21.0, 21.08, 20.8, 20.96, 20.72, 20.72, 20.72, 20.76] +52.61655044555664 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.021928787231445, 'TIME_S_1KI': 420.21928787231445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.679587249756, 'W': 21.603080734566593} +[16.84, 16.68, 16.68, 16.88, 16.96, 17.08, 17.12, 17.24, 17.0, 17.0, 17.2, 17.16, 17.08, 16.84, 16.84, 16.8, 17.0, 17.04, 17.16, 17.0] +305.58 +15.279 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 42.021928787231445, 'TIME_S_1KI': 420.21928787231445, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1136.679587249756, 'W': 21.603080734566593, 'J_1KI': 11366.79587249756, 'W_1KI': 216.03080734566592, 'W_D': 6.324080734566593, 'J_D': 332.751312992096, 'W_D_1KI': 63.24080734566592, 'J_D_1KI': 632.4080734566592} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..f2ba0ed --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.62427043914795, "TIME_S_1KI": 636.2427043914795, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1670.8393529319762, "W": 21.725499941161722, "J_1KI": 16708.393529319765, "W_1KI": 217.2549994116172, "W_D": 6.518499941161723, "J_D": 501.3171735184193, "W_D_1KI": 65.18499941161723, "J_D_1KI": 651.8499941161723} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..6258d91 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 63.62427043914795} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2940, 5882, ..., 29993962, + 29997042, 30000000]), + col_indices=tensor([ 8, 14, 15, ..., 9977, 9993, 9996]), + values=tensor([0.8256, 0.2654, 0.0882, ..., 0.9659, 0.5243, 0.2720]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.4780, 0.6736, 0.3887, ..., 0.7294, 0.7408, 0.5543]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 63.62427043914795 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2940, 5882, ..., 29993962, + 29997042, 30000000]), + col_indices=tensor([ 8, 14, 15, ..., 9977, 9993, 9996]), + values=tensor([0.8256, 0.2654, 0.0882, ..., 0.9659, 0.5243, 0.2720]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.4780, 0.6736, 0.3887, ..., 0.7294, 0.7408, 0.5543]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 63.62427043914795 seconds + +[16.52, 16.44, 16.52, 16.76, 16.76, 16.68, 17.12, 16.6, 16.6, 16.64] +[16.76, 16.68, 16.6, 20.16, 21.48, 23.48, 24.68, 31.48, 34.68, 34.64, 35.08, 35.08, 34.68, 24.92, 25.84, 25.44, 24.52, 23.52, 20.92, 20.76, 20.76, 20.88, 20.6, 20.8, 20.8, 20.96, 20.88, 20.6, 20.8, 20.56, 20.44, 20.52, 20.48, 20.8, 20.56, 20.6, 20.6, 20.68, 20.72, 20.68, 20.88, 20.92, 20.8, 20.8, 20.52, 20.56, 20.68, 20.96, 20.8, 20.8, 20.92, 20.8, 20.56, 20.68, 20.8, 20.72, 20.8, 21.12, 21.24, 21.16, 21.12, 21.12, 21.04, 20.6, 20.36, 20.2, 20.4, 20.48, 20.52, 20.88, 20.64, 20.32, 20.4, 20.4, 20.4, 20.4] +76.90683102607727 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.62427043914795, 'TIME_S_1KI': 636.2427043914795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1670.8393529319762, 'W': 21.725499941161722} +[16.52, 16.44, 16.52, 16.76, 16.76, 16.68, 17.12, 16.6, 16.6, 16.64, 16.32, 16.72, 16.92, 17.2, 17.32, 17.52, 17.16, 17.24, 17.28, 17.12] +304.14 +15.206999999999999 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 63.62427043914795, 'TIME_S_1KI': 636.2427043914795, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1670.8393529319762, 'W': 21.725499941161722, 'J_1KI': 16708.393529319765, 'W_1KI': 217.2549994116172, 'W_D': 6.518499941161723, 'J_D': 501.3171735184193, 'W_D_1KI': 65.18499941161723, 'J_D_1KI': 651.8499941161723} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..66821f5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 141552, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.798607587814331, "TIME_S_1KI": 0.07628721309352274, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 269.76709407806396, "W": 18.985490730093364, "J_1KI": 1.9057808725985077, "W_1KI": 0.13412379005661076, "W_D": 4.025490730093365, "J_D": 57.198676185607916, "W_D_1KI": 0.028438246934648505, "J_D_1KI": 0.0002009031799949736} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..2de78b8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1200 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.015316009521484375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([9112, 5292, 3575, 5961, 5092, 9328, 2019, 8963, 6545, + 7422, 818, 4088, 407, 2015, 64, 4328, 8365, 6206, + 9094, 5899, 6513, 5954, 4347, 8726, 6057, 3857, 2262, + 5262, 3242, 3739, 9546, 2710, 9191, 261, 5342, 5031, + 4219, 1193, 8424, 3055, 1118, 7444, 3421, 4719, 884, + 9797, 6413, 7746, 2915, 8061, 1921, 7039, 8615, 6298, + 8962, 4075, 2597, 5310, 5194, 9913, 649, 821, 1630, + 6166, 2303, 666, 4248, 9703, 9421, 933, 8076, 2414, + 2921, 3906, 2279, 4858, 3629, 8129, 746, 1260, 8400, + 3559, 6490, 9630, 9771, 2054, 6368, 4294, 59, 1873, + 8913, 6245, 1629, 8022, 7105, 7514, 7136, 705, 7667, + 598, 5268, 8736, 8255, 3780, 9168, 855, 891, 5701, + 1088, 5566, 9882, 6656, 6138, 1874, 1409, 2224, 9088, + 5917, 2460, 1920, 8927, 3689, 9209, 2413, 7383, 5435, + 1164, 4656, 1145, 9858, 1276, 1566, 4567, 6670, 8019, + 77, 4463, 9904, 1378, 6274, 6811, 7718, 6698, 6133, + 1817, 5750, 9902, 8179, 7778, 9310, 3186, 2889, 6830, + 3732, 9331, 80, 4061, 9725, 2461, 365, 5801, 7394, + 1364, 7916, 4458, 9726, 1384, 8245, 4422, 1021, 8441, + 8635, 9830, 6237, 8296, 9417, 1755, 5150, 8755, 573, + 6482, 6077, 4649, 5929, 3346, 2879, 2567, 9845, 9617, + 7984, 1423, 20, 9389, 7465, 1857, 6041, 81, 3754, + 3443, 6292, 8270, 777, 2884, 6004, 5456, 7516, 4457, + 2299, 9549, 3283, 8815, 7872, 6555, 9764, 3765, 3453, + 3743, 966, 1219, 6368, 7586, 7598, 502, 5829, 8794, + 426, 1814, 9137, 7261, 9402, 2755, 22, 4007, 5976, + 44, 8313, 1900, 8548, 3115, 7074, 1012, 8434, 6097, + 7768, 5045, 3244, 2886, 1954, 541, 5982, 5878, 5999, + 4618, 9582, 4847, 7320, 9835, 5610, 1305, 8884, 6363, + 6478, 839, 5162, 9728, 5499, 1528, 5689, 3183, 7129, + 2488, 4352, 928, 8220, 1382, 3689, 8735, 2195, 9727, + 771, 6448, 8679, 1573, 5638, 2952, 6446, 4389, 8540, + 4711, 8193, 7117, 2236, 5186, 1639, 2122, 9434, 4600, + 6263, 5259, 9546, 979, 6158, 4405, 3909, 3341, 4623, + 4859, 8302, 4199, 6104, 2336, 9661, 8228, 7721, 9724, + 9263, 6964, 9466, 2887, 6905, 9477, 5133, 4559, 533, + 2189, 788, 3859, 2643, 7749, 7409, 8072, 2895, 4935, + 3245, 8702, 7464, 5201, 7428, 20, 9781, 4720, 1259, + 9047, 5360, 2447, 5616, 727, 910, 8863, 9370, 6403, + 6786, 6714, 5315, 5973, 2498, 2170, 4132, 8638, 1133, + 5562, 8468, 1057, 3841, 5439, 5548, 364, 3661, 4774, + 5311, 8396, 2293, 3242, 6666, 9193, 5519, 7639, 9475, + 3599, 3244, 2790, 1000, 3648, 8996, 802, 8274, 5880, + 3912, 7828, 7630, 88, 1120, 4545, 662, 8080, 8593, + 2010, 2001, 5509, 1694, 1693, 9968, 188, 5356, 2456, + 5653, 1100, 9884, 8606, 5679, 1447, 3651, 3652, 4270, + 4855, 4081, 6277, 7485, 8206, 943, 9917, 8822, 8258, + 9850, 3678, 6542, 8398, 7438, 4466, 4288, 3429, 1080, + 3756, 619, 8368, 2943, 5279, 4660, 7902, 7718, 2077, + 4700, 6225, 6141, 3048, 5583, 5672, 4237, 9220, 6789, + 8111, 8353, 6234, 7938, 6008, 8305, 6276, 297, 9555, + 5707, 1837, 2743, 2829, 3042, 4370, 7388, 411, 7128, + 8906, 2135, 2863, 8798, 9039, 1464, 3844, 5839, 7296, + 2402, 5778, 6693, 1937, 753, 3696, 9554, 7527, 2292, + 1677, 6534, 2459, 4599, 1635, 4597, 5136, 8224, 1999, + 8607, 1884, 7826, 4374, 308, 5413, 6530, 4467, 4307, + 1520, 5060, 5277, 5551, 2836, 8168, 4583, 6885, 7397, + 5086, 2166, 9687, 6985, 782, 3027, 3733, 5636, 3222, + 7058, 3585, 5104, 9529, 2646, 8519, 9774, 6453, 1089, + 4686, 8416, 5947, 3116, 5175, 8420, 7199, 8094, 9363, + 5867, 977, 7349, 1138, 8895, 8226, 6801, 1193, 7834, + 270, 5919, 3616, 9787, 5305, 1621, 9604, 7543, 6013, + 1208, 3164, 2863, 3384, 9301, 2260, 8403, 6920, 6068, + 5765, 6466, 4, 435, 2987, 5160, 1159, 5240, 7665, + 7666, 3496, 5890, 4087, 2953, 5050, 4444, 9104, 2002, + 2471, 8127, 5828, 6674, 2055, 338, 861, 9115, 7320, + 5987, 2695, 7595, 1847, 5469, 5364, 131, 9179, 7641, + 950, 6916, 2403, 4215, 3306, 372, 8899, 6595, 9275, + 9627, 6167, 7426, 8582, 6983, 176, 5262, 6497, 2411, + 5764, 3749, 2701, 4911, 6555, 8022, 3195, 6448, 88, + 3416, 512, 1476, 3734, 9571, 5879, 8312, 9489, 4113, + 9717, 371, 1290, 2779, 4391, 6234, 2856, 3490, 1940, + 9905, 1525, 994, 3707, 929, 5995, 5360, 7010, 5650, + 8880, 951, 6174, 5544, 4925, 6103, 5678, 4768, 3960, + 2589, 2623, 2714, 910, 2437, 5839, 6147, 8012, 6910, + 3761, 374, 344, 2233, 8593, 1259, 791, 3808, 9483, + 3657, 9667, 1835, 262, 6735, 8158, 1317, 3345, 593, + 9900, 4517, 43, 7724, 7756, 8051, 2464, 2382, 9551, + 482, 4411, 9983, 8638, 3604, 5980, 6568, 498, 884, + 9577, 1204, 8829, 2143, 8709, 5032, 8066, 4401, 2060, + 5903, 5267, 8698, 5847, 5943, 7454, 1160, 8740, 9725, + 1839, 5588, 4536, 704, 5542, 6133, 6713, 1191, 4737, + 9399, 4710, 6386, 7107, 8125, 104, 1397, 1398, 5125, + 3131, 2162, 8266, 675, 2153, 3931, 9784, 7854, 3433, + 1129, 6921, 4688, 2676, 7539, 2908, 159, 4716, 1238, + 3207, 1609, 6019, 8773, 1910, 6472, 7137, 9086, 8335, + 696, 7521, 729, 751, 3579, 5449, 8063, 6228, 2390, + 7376, 4836, 8275, 7154, 3050, 2576, 1473, 8343, 2610, + 7357, 3457, 410, 3334, 1324, 5461, 8837, 3534, 9245, + 7741, 3589, 1706, 4804, 9563, 1793, 2579, 2082, 8159, + 5577, 6674, 1427, 6129, 9640, 4057, 5553, 3970, 7282, + 9122, 3611, 9436, 8975, 6478, 8150, 1984, 2504, 4171, + 7462, 3550, 3994, 7069, 7248, 4676, 4904, 254, 8661, + 438, 7287, 9117, 2771, 7521, 2100, 6071, 7282, 9158, + 6042, 7770, 8275, 8513, 4678, 3085, 5802, 2613, 9576, + 3338, 5128, 2956, 544, 9210, 477, 521, 1403, 2461, + 9392, 8604, 1695, 5759, 8186, 8157, 9519, 6561, 9941, + 3767, 3185, 2012, 3106, 9408, 3943, 6742, 6790, 3649, + 2308, 3780, 5471, 9861, 2028, 220, 5009, 5916, 1418, + 7499, 7134, 5634, 2982, 2159, 5007, 6379, 1644, 1585, + 655, 2063, 509, 4928, 4419, 5224, 6728, 1096, 5532, + 7183, 8187, 9649, 6985, 850, 2224, 2125, 5250, 2441, + 2746, 5010, 9796, 7532, 4370, 5895, 816, 964, 9747, + 142, 9638, 2350, 593, 1466, 2605, 5324, 3252, 9029, + 8442, 930, 4343, 9939, 6148, 3972, 3871, 2071, 3696, + 8203, 3524, 2625, 1316, 1646, 6666, 529, 6459, 989, + 5600, 8399, 1285, 8481, 7590, 29, 8642, 6338, 8582, + 762, 2654, 667, 5523, 3773, 4019, 1164, 6494, 6234, + 2145, 3360, 6578, 3672, 4448, 9661, 2981, 1353, 5538, + 5490, 6854, 5331, 4417, 8287, 9963, 1542, 9640, 5716, + 2268, 5783, 3828, 2802, 3884, 6132, 9548, 5307, 8501, + 2200]), + values=tensor([0.9419, 0.1524, 0.6218, 0.8687, 0.7562, 0.7677, 0.3979, + 0.9838, 0.6947, 0.4933, 0.1301, 0.2967, 0.8344, 0.5735, + 0.9834, 0.9958, 0.7271, 0.2308, 0.3268, 0.5366, 0.3581, + 0.1114, 0.7784, 0.4911, 0.1357, 0.4900, 0.8647, 0.2578, + 0.0862, 0.4816, 0.6605, 0.6509, 0.5868, 0.1831, 0.6063, + 0.9861, 0.3291, 0.5581, 0.6974, 0.9991, 0.9943, 0.2495, + 0.7625, 0.5401, 0.7095, 0.7088, 0.7284, 0.6546, 0.9421, + 0.9648, 0.1593, 0.7038, 0.5084, 0.3162, 0.5113, 0.2822, + 0.9359, 0.1984, 0.8329, 0.6721, 0.0232, 0.0056, 0.2146, + 0.4983, 0.4969, 0.9219, 0.1003, 0.5479, 0.3558, 0.7477, + 0.6115, 0.2532, 0.6522, 0.9992, 0.4170, 0.8240, 0.1605, + 0.8999, 0.9924, 0.9449, 0.9921, 0.5871, 0.5119, 0.1034, + 0.9235, 0.5163, 0.6439, 0.4713, 0.2523, 0.6757, 0.9199, + 0.4059, 0.3534, 0.1535, 0.6795, 0.7794, 0.2738, 0.5412, + 0.6588, 0.5045, 0.2670, 0.8080, 0.1721, 0.4191, 0.2213, + 0.1215, 0.5896, 0.0276, 0.4051, 0.0606, 0.7105, 0.3300, + 0.4106, 0.7694, 0.3908, 0.3300, 0.1863, 0.9625, 0.9792, + 0.7697, 0.4510, 0.7286, 0.3380, 0.0833, 0.2386, 0.2422, + 0.2105, 0.7153, 0.0126, 0.8818, 0.6452, 0.7286, 0.3076, + 0.4200, 0.4807, 0.0386, 0.4801, 0.6112, 0.2188, 0.3858, + 0.6821, 0.3694, 0.1392, 0.6327, 0.5410, 0.3909, 0.7069, + 0.1312, 0.6250, 0.0480, 0.8549, 0.5135, 0.3555, 0.7340, + 0.5811, 0.9088, 0.1032, 0.1489, 0.2774, 0.2296, 0.4495, + 0.1487, 0.9200, 0.5402, 0.0202, 0.1078, 0.2516, 0.0530, + 0.5956, 0.3365, 0.3965, 0.5546, 0.6031, 0.2539, 0.5550, + 0.2459, 0.4351, 0.8734, 0.9592, 0.8627, 0.9645, 0.9888, + 0.0276, 0.4813, 0.8493, 0.9115, 0.1586, 0.0765, 0.6361, + 0.9602, 0.6837, 0.3314, 0.7764, 0.3676, 0.5147, 0.2879, + 0.7930, 0.3046, 0.9136, 0.3619, 0.8794, 0.7331, 0.7243, + 0.5387, 0.8338, 0.0246, 0.0508, 0.2668, 0.8956, 0.4222, + 0.5078, 0.8587, 0.1868, 0.2753, 0.0682, 0.7738, 0.7703, + 0.8842, 0.9299, 0.7459, 0.0157, 0.4309, 0.8926, 0.2625, + 0.8518, 0.5178, 0.5814, 0.1994, 0.2824, 0.5835, 0.0235, + 0.5291, 0.9061, 0.9534, 0.0623, 0.2911, 0.7065, 0.7912, + 0.3245, 0.5227, 0.8463, 0.5932, 0.9274, 0.1290, 0.1611, + 0.8205, 0.7713, 0.8694, 0.8998, 0.4222, 0.2481, 0.8594, + 0.5966, 0.3578, 0.7561, 0.5612, 0.9178, 0.8585, 0.6036, + 0.8103, 0.3002, 0.8454, 0.2318, 0.3982, 0.7981, 0.3046, + 0.4387, 0.6304, 0.9387, 0.6685, 0.0773, 0.6743, 0.7880, + 0.2724, 0.2127, 0.2579, 0.6638, 0.3307, 0.4078, 0.4611, + 0.2895, 0.5460, 0.4074, 0.2836, 0.0658, 0.5132, 0.7184, + 0.0086, 0.2446, 0.0312, 0.3818, 0.3470, 0.9987, 0.8897, + 0.4455, 0.4200, 0.2110, 0.0170, 0.2269, 0.7015, 0.4058, + 0.9687, 0.0766, 0.1367, 0.6704, 0.0153, 0.3212, 0.0176, + 0.4644, 0.2227, 0.7908, 0.0782, 0.2919, 0.8451, 0.5976, + 0.2473, 0.4640, 0.5505, 0.5809, 0.2989, 0.1468, 0.9500, + 0.3546, 0.0940, 0.8787, 0.7992, 0.7177, 0.5259, 0.8114, + 0.0774, 0.0167, 0.2662, 0.3178, 0.3026, 0.5005, 0.5937, + 0.5755, 0.4120, 0.1624, 0.4236, 0.9572, 0.1195, 0.8325, + 0.5589, 0.0781, 0.5692, 0.2217, 0.7537, 0.5700, 0.7103, + 0.7619, 0.2773, 0.0054, 0.4076, 0.0417, 0.9429, 0.6666, + 0.3130, 0.3752, 0.5850, 0.5828, 0.3646, 0.6606, 0.8812, + 0.8699, 0.6326, 0.1294, 0.3652, 0.1063, 0.3989, 0.2296, + 0.2561, 0.2038, 0.4856, 0.2115, 0.2677, 0.5171, 0.1168, + 0.0241, 0.6735, 0.7119, 0.7784, 0.2445, 0.7568, 0.4921, + 0.7068, 0.3231, 0.3789, 0.2044, 0.4127, 0.4285, 0.4637, + 0.9276, 0.2871, 0.0196, 0.6856, 0.2086, 0.8049, 0.5030, + 0.1551, 0.8769, 0.7722, 0.6771, 0.2796, 0.5061, 0.0514, + 0.2069, 0.4519, 0.6146, 0.0401, 0.7884, 0.8928, 0.1753, + 0.7534, 0.6318, 0.5825, 0.1058, 0.3381, 0.6609, 0.8592, + 0.1686, 0.2129, 0.2167, 0.4535, 0.8999, 0.2380, 0.4550, + 0.3194, 0.8715, 0.6805, 0.2977, 0.1180, 0.4624, 0.3632, + 0.0919, 0.3659, 0.3251, 0.3164, 0.3733, 0.2507, 0.2841, + 0.6267, 0.7667, 0.1658, 0.2106, 0.0127, 0.6038, 0.9089, + 0.4336, 0.4538, 0.2798, 0.7512, 0.9252, 0.8121, 0.4723, + 0.7860, 0.4615, 0.2088, 0.9765, 0.2109, 0.9559, 0.5039, + 0.9927, 0.1420, 0.6840, 0.8661, 0.4242, 0.4038, 0.3666, + 0.7210, 0.8886, 0.9968, 0.3170, 0.9198, 0.6665, 0.7540, + 0.6145, 0.7453, 0.6836, 0.0079, 0.6941, 0.0866, 0.0031, + 0.2017, 0.0065, 0.0384, 0.9053, 0.7335, 0.9115, 0.2172, + 0.2567, 0.1205, 0.4361, 0.8961, 0.5130, 0.3240, 0.8733, + 0.7390, 0.2479, 0.0175, 0.9269, 0.1145, 0.4122, 0.4735, + 0.1742, 0.2389, 0.1684, 0.3168, 0.1924, 0.1236, 0.4187, + 0.4623, 0.9975, 0.8678, 0.2889, 0.3517, 0.7091, 0.3332, + 0.9999, 0.5389, 0.2389, 0.1080, 0.5787, 0.9800, 0.8558, + 0.0630, 0.0153, 0.1451, 0.4288, 0.5911, 0.4383, 0.7038, + 0.9016, 0.9862, 0.5629, 0.0913, 0.5216, 0.0805, 0.8630, + 0.5475, 0.9317, 0.0118, 0.3983, 0.7472, 0.6659, 0.5661, + 0.8338, 0.0422, 0.5743, 0.1600, 0.5976, 0.2809, 0.1125, + 0.4399, 0.7717, 0.4561, 0.7256, 0.1621, 0.0820, 0.8140, + 0.0876, 0.8240, 0.8347, 0.2352, 0.3461, 0.5639, 0.8348, + 0.2411, 0.2358, 0.1766, 0.8525, 0.3548, 0.9754, 0.6037, + 0.4568, 0.0645, 0.2813, 0.1774, 0.7397, 0.4609, 0.8191, + 0.3010, 0.4836, 0.9086, 0.3092, 0.8842, 0.1688, 0.5204, + 0.4125, 0.8982, 0.5637, 0.8321, 0.1566, 0.9723, 0.6668, + 0.9762, 0.3312, 0.8818, 0.1159, 0.9962, 0.9987, 0.3133, + 0.5044, 0.1359, 0.4905, 0.8045, 0.5800, 0.3357, 0.6064, + 0.3912, 0.1467, 0.8994, 0.9625, 0.4036, 0.9552, 0.3418, + 0.0859, 0.0108, 0.4910, 0.3283, 0.5233, 0.5467, 0.3156, + 0.8281, 0.7394, 0.2690, 0.9744, 0.6414, 0.6867, 0.8652, + 0.1889, 0.1663, 0.1405, 0.4948, 0.0840, 0.6008, 0.5443, + 0.4155, 0.4281, 0.2116, 0.4298, 0.8508, 0.1255, 0.4732, + 0.9480, 0.7508, 0.4759, 0.8987, 0.4490, 0.0439, 0.9165, + 0.1613, 0.1714, 0.5982, 0.2093, 0.4536, 0.6336, 0.0431, + 0.5964, 0.6823, 0.9029, 0.0699, 0.3806, 0.0497, 0.7357, + 0.1694, 0.0286, 0.6138, 0.3334, 0.6829, 0.9751, 0.2773, + 0.8377, 0.5984, 0.0468, 0.8367, 0.4483, 0.8092, 0.3294, + 0.4584, 0.5056, 0.4772, 0.6300, 0.2983, 0.0246, 0.2509, + 0.0554, 0.8305, 0.0779, 0.2888, 0.3687, 0.6734, 0.4892, + 0.6403, 0.8594, 0.3164, 0.2151, 0.6638, 0.4833, 0.2775, + 0.8670, 0.3530, 0.3362, 0.0667, 0.0039, 0.2958, 0.4611, + 0.8969, 0.4913, 0.5028, 0.2409, 0.4242, 0.4639, 0.6091, + 0.4342, 0.7204, 0.4175, 0.9777, 0.0265, 0.9729, 0.7386, + 0.0448, 0.2047, 0.9187, 0.1396, 0.6324, 0.1331, 0.4202, + 0.8775, 0.7537, 0.5040, 0.3992, 0.6867, 0.4340, 0.9415, + 0.1384, 0.3816, 0.3686, 0.8722, 0.6098, 0.9188, 0.7850, + 0.8822, 0.4949, 0.0392, 0.6331, 0.0948, 0.8931, 0.8620, + 0.1994, 0.6761, 0.0973, 0.2537, 0.4015, 0.3324, 0.4775, + 0.8446, 0.8239, 0.8515, 0.2371, 0.2909, 0.6105, 0.7363, + 0.1806, 0.2458, 0.4418, 0.9172, 0.1027, 0.3665, 0.7822, + 0.8082, 0.3933, 0.0369, 0.7168, 0.8492, 0.4962, 0.1940, + 0.6763, 0.0429, 0.4543, 0.6495, 0.8034, 0.5999, 0.2784, + 0.9127, 0.6404, 0.0464, 0.5851, 0.6435, 0.9494, 0.8839, + 0.8651, 0.1417, 0.8417, 0.9900, 0.9699, 0.7335, 0.6386, + 0.9418, 0.5679, 0.2693, 0.6588, 0.1472, 0.9922, 0.3796, + 0.8868, 0.1143, 0.8328, 0.3848, 0.1063, 0.7051, 0.4142, + 0.7126, 0.4323, 0.6031, 0.5715, 0.2782, 0.6345, 0.3381, + 0.8477, 0.7664, 0.2106, 0.5225, 0.1520, 0.3002, 0.7031, + 0.9238, 0.4058, 0.1139, 0.1480, 0.9764, 0.4270, 0.2600, + 0.9333, 0.2794, 0.6468, 0.6423, 0.1334, 0.0294, 0.5212, + 0.8696, 0.2381, 0.9706, 0.6807, 0.1174, 0.1191, 0.9581, + 0.2531, 0.5576, 0.1977, 0.3556, 0.0861, 0.6770, 0.3195, + 0.5369, 0.7386, 0.0730, 0.9206, 0.7720, 0.6605, 0.4471, + 0.0248, 0.9663, 0.1581, 0.8167, 0.5993, 0.8833, 0.0649, + 0.0611, 0.8784, 0.2448, 0.4650, 0.9544, 0.9479, 0.1518, + 0.0437, 0.9857, 0.3792, 0.0676, 0.3550, 0.6595, 0.0586, + 0.4581, 0.2105, 0.0507, 0.6273, 0.4066, 0.6590, 0.4389, + 0.7618, 0.7600, 0.5286, 0.2619, 0.2052, 0.1600, 0.6812, + 0.2262, 0.1949, 0.2984, 0.1847, 0.7597, 0.8327, 0.3651, + 0.6843, 0.6989, 0.5748, 0.8175, 0.9017, 0.5440, 0.9000, + 0.8242, 0.3536, 0.6788, 0.4515, 0.6560, 0.7385, 0.1301, + 0.6121, 0.9098, 0.6224, 0.9725, 0.4414, 0.2350, 0.6079, + 0.7587, 0.7617, 0.6482, 0.2990, 0.3517, 0.1562, 0.3369, + 0.3247, 0.5464, 0.7908, 0.7880, 0.4408, 0.8916, 0.2765, + 0.4959, 0.0729, 0.2794, 0.6614, 0.8756, 0.3920, 0.6706, + 0.6288, 0.4740, 0.9397, 0.6608, 0.2196, 0.0473, 0.5088, + 0.0610, 0.9391, 0.7491, 0.7894, 0.7759, 0.1647, 0.6719, + 0.4100, 0.2124, 0.9261, 0.1591, 0.3181, 0.7087, 0.7252, + 0.2191, 0.3099, 0.6307, 0.3026, 0.4511, 0.2841, 0.7426, + 0.5396, 0.7791, 0.6038, 0.1869, 0.7513, 0.3030, 0.5002, + 0.3280, 0.7800, 0.6094, 0.9292, 0.6542, 0.8481, 0.9804, + 0.8992, 0.6104, 0.2123, 0.5077, 0.0578, 0.7525, 0.6613, + 0.4603, 0.7024, 0.4804, 0.7634, 0.3067, 0.6368]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8762, 0.1751, 0.9607, ..., 0.4579, 0.7370, 0.8447]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.015316009521484375 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 68555 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.085230827331543} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5549, 4311, 3241, 5542, 7620, 1988, 9532, 4491, 7500, + 4898, 3159, 1258, 6868, 335, 9155, 7115, 9938, 3068, + 4095, 7787, 2876, 5241, 3578, 3454, 7989, 7487, 5119, + 9460, 589, 1959, 8998, 1257, 4995, 3427, 1603, 8939, + 5925, 8459, 1032, 7868, 2079, 7633, 5392, 1940, 403, + 1541, 5211, 2249, 6169, 1536, 600, 3741, 1855, 2078, + 9467, 4376, 9886, 140, 7626, 2658, 4941, 2404, 1127, + 9326, 3998, 7203, 3548, 3845, 7541, 6009, 5880, 6410, + 8529, 8533, 3424, 4363, 2126, 3596, 3335, 3666, 9687, + 545, 5692, 9731, 4506, 9205, 961, 2972, 1083, 2410, + 9819, 5938, 1114, 738, 303, 4876, 608, 9920, 1043, + 2303, 3667, 1648, 2528, 6777, 9350, 5247, 2380, 2637, + 2051, 2983, 6090, 8443, 1485, 8407, 3852, 1200, 3501, + 2069, 8613, 311, 7821, 1110, 1230, 771, 2825, 2694, + 2831, 4517, 1613, 165, 4836, 9488, 5628, 1028, 8908, + 4232, 8787, 7402, 313, 3216, 3478, 8545, 1053, 6740, + 4075, 1942, 9361, 3819, 870, 2654, 7876, 1915, 6408, + 4819, 6068, 6165, 3715, 4592, 7086, 5068, 2369, 932, + 4895, 6064, 8025, 9284, 5199, 6337, 8148, 1045, 5028, + 501, 6243, 2043, 2137, 8791, 7547, 7501, 4276, 8489, + 8133, 1778, 2390, 2799, 8745, 7560, 5221, 552, 9809, + 1029, 9837, 3177, 7973, 1442, 4595, 6945, 371, 2047, + 2944, 2906, 6498, 4469, 5406, 5052, 7367, 933, 4447, + 2055, 2312, 5395, 3952, 712, 7636, 4991, 7542, 5071, + 4454, 4646, 9187, 2398, 7029, 242, 1303, 2080, 397, + 3296, 3588, 6026, 6025, 7104, 6897, 6113, 7634, 6734, + 4805, 8677, 4803, 2213, 4955, 4012, 3203, 2270, 8248, + 7769, 3668, 3425, 2655, 6439, 8554, 9308, 0, 7354, + 7253, 2174, 4104, 4444, 2290, 9428, 6233, 1866, 7587, + 3521, 719, 6920, 4896, 9532, 6571, 2230, 8694, 7233, + 1233, 636, 3293, 9047, 165, 7097, 9322, 6128, 7810, + 2149, 2341, 6666, 5136, 9684, 2136, 4547, 1610, 9629, + 4739, 7954, 7857, 5262, 7124, 5253, 140, 5788, 9593, + 808, 975, 2558, 2499, 1436, 2400, 9017, 178, 43, + 8816, 5056, 9697, 5824, 9857, 5803, 7232, 1952, 6200, + 4884, 4917, 8256, 7548, 7004, 880, 6839, 5048, 1085, + 6600, 2059, 1674, 219, 2299, 6249, 353, 8171, 7355, + 8683, 7342, 8967, 524, 9396, 4980, 9008, 5424, 845, + 8258, 3895, 3766, 7658, 6607, 7066, 4890, 6141, 7787, + 112, 7248, 7565, 9127, 1557, 5497, 8834, 1027, 911, + 5589, 8373, 3154, 5794, 7004, 4726, 2964, 5607, 7111, + 2769, 9679, 355, 4618, 1775, 9626, 5328, 1984, 2250, + 2090, 8585, 4747, 7137, 4998, 6039, 3795, 740, 1205, + 1619, 4997, 8185, 8214, 4583, 4093, 9636, 7799, 1874, + 7419, 1200, 8029, 7740, 3308, 5274, 3619, 2357, 899, + 1736, 4072, 415, 4070, 2248, 4814, 2946, 8794, 9445, + 6337, 9432, 4000, 7916, 9175, 9859, 7482, 7810, 6689, + 2981, 9118, 2494, 5004, 7307, 6762, 3643, 6961, 6980, + 2197, 3909, 3079, 438, 6429, 795, 73, 1026, 979, + 8384, 3755, 7922, 411, 4115, 3082, 528, 3768, 7563, + 6437, 4847, 8646, 6464, 6868, 4170, 7703, 1806, 6256, + 5263, 3894, 4106, 4889, 7034, 8952, 1641, 6916, 6302, + 5046, 1726, 68, 4468, 19, 2245, 4083, 3913, 7807, + 2460, 7156, 5834, 7509, 1278, 2636, 9991, 6660, 356, + 7389, 3994, 2360, 5328, 3156, 3549, 1186, 4409, 3178, + 6057, 4081, 3933, 3196, 3467, 6356, 957, 7072, 6056, + 1517, 9932, 8236, 3059, 3518, 7725, 5374, 253, 6103, + 9053, 5451, 3843, 8745, 3424, 2609, 5589, 2776, 2372, + 7125, 6217, 61, 5880, 7087, 9463, 3614, 9996, 51, + 4653, 539, 4104, 147, 7735, 4532, 9631, 6016, 4264, + 7189, 2124, 5360, 5338, 3217, 2096, 2341, 5867, 8239, + 2211, 6275, 6944, 5486, 3694, 8641, 2576, 6016, 3141, + 1, 2692, 9626, 549, 6556, 2800, 9006, 8047, 8733, + 8001, 8352, 8195, 6963, 4005, 7067, 3512, 4209, 9954, + 4445, 3646, 8905, 4713, 3526, 9003, 7451, 3437, 219, + 1503, 8839, 7882, 345, 6970, 4236, 2208, 5512, 8927, + 9951, 3787, 1901, 5577, 1088, 9910, 3298, 9382, 5591, + 9483, 7472, 9189, 60, 7374, 1511, 1324, 9001, 4608, + 5517, 5004, 5129, 7089, 1404, 7860, 699, 3528, 2226, + 1012, 1401, 8016, 4790, 2726, 6318, 8383, 1848, 6756, + 4556, 3436, 1808, 8982, 5, 3514, 278, 3993, 4715, + 213, 7894, 2785, 3705, 6527, 5420, 6147, 9936, 913, + 5219, 3050, 7066, 9881, 9308, 7445, 5575, 7043, 4840, + 1951, 3401, 1568, 3685, 4270, 197, 386, 2602, 7271, + 2689, 4939, 7521, 9720, 7213, 4350, 8459, 8461, 1245, + 7252, 516, 6211, 441, 2127, 7899, 3164, 1347, 8168, + 1675, 6484, 2975, 6772, 4995, 6275, 7968, 3036, 4636, + 8825, 8994, 4389, 3513, 2409, 7963, 8754, 1654, 9887, + 8968, 1585, 6186, 7899, 7431, 811, 7807, 9296, 9932, + 5090, 5043, 7917, 3680, 8993, 116, 1344, 2602, 6020, + 4409, 3996, 63, 8531, 2822, 1480, 6085, 4912, 9387, + 6576, 1534, 4978, 9762, 1093, 8697, 8777, 5374, 5581, + 7441, 5246, 5968, 1885, 7897, 2975, 4762, 3125, 2488, + 4768, 9657, 6999, 6361, 7556, 908, 266, 4115, 2029, + 6944, 9578, 5384, 6233, 3049, 7347, 2202, 2797, 6742, + 7052, 1750, 5660, 4425, 8800, 5195, 122, 475, 666, + 2091, 3067, 1595, 3773, 1980, 7656, 6000, 4626, 3490, + 8811, 2139, 7508, 1457, 7313, 1673, 2130, 5629, 3544, + 2197, 3987, 2008, 3661, 1996, 5074, 9955, 6834, 9013, + 2610, 61, 6725, 217, 1994, 3874, 7317, 2288, 7046, + 2138, 5148, 3006, 483, 4607, 9598, 1030, 6362, 5693, + 2535, 138, 933, 5602, 5934, 6478, 1869, 48, 1045, + 9153, 7656, 3285, 6621, 9725, 8888, 3411, 4535, 631, + 3055, 1164, 1333, 7342, 831, 3411, 7166, 1880, 6529, + 2636, 9466, 1237, 4517, 8045, 7254, 7616, 5855, 9583, + 196, 4085, 2628, 4971, 2455, 2757, 942, 9895, 7185, + 5273, 4956, 6187, 2588, 7001, 6614, 8140, 5545, 5915, + 3587, 7135, 877, 154, 7304, 9965, 8199, 8045, 2731, + 8307, 7114, 1128, 5242, 3191, 3118, 320, 7211, 7380, + 157, 3611, 5108, 657, 6019, 7596, 6799, 3299, 1717, + 9758, 7744, 1030, 3550, 7635, 8504, 4578, 9913, 3711, + 5056, 5815, 3389, 1558, 3687, 6256, 8891, 9548, 1985, + 544, 1687, 4379, 3053, 2465, 2164, 3167, 3551, 215, + 8811, 373, 6584, 1223, 6182, 3783, 6774, 4105, 4491, + 1691, 2126, 6272, 8878, 5848, 4533, 6683, 2810, 9188, + 1364, 9342, 4748, 6153, 8042, 3464, 8775, 4628, 1962, + 8094, 9114, 1543, 1701, 94, 2356, 9228, 5976, 4299, + 8430, 2075, 4774, 4949, 6412, 5320, 4378, 5355, 8428, + 7672, 9514, 6847, 4732, 9295, 1375, 9467, 4035, 4088, + 8389, 2352, 882, 7081, 3147, 6746, 9986, 2742, 7502, + 309, 8743, 4501, 8545, 4706, 1495, 254, 3125, 2344, + 6817]), + values=tensor([2.8840e-01, 4.4183e-01, 1.3721e-01, 8.1096e-01, + 4.1966e-01, 2.5994e-01, 3.7630e-01, 9.8367e-01, + 4.1568e-01, 2.1859e-01, 3.0153e-01, 3.2703e-01, + 7.8175e-01, 6.1920e-01, 9.9573e-01, 4.3919e-01, + 3.2517e-01, 5.0981e-01, 3.5351e-03, 9.0989e-01, + 6.7808e-01, 3.7911e-02, 4.0401e-01, 9.3286e-01, + 9.7730e-01, 9.8442e-01, 7.7077e-01, 7.3182e-01, + 4.8449e-01, 5.2800e-01, 8.8383e-02, 3.0085e-01, + 4.8370e-01, 1.7831e-01, 1.6859e-01, 4.7312e-01, + 1.7065e-01, 3.5337e-01, 9.7315e-01, 2.8214e-02, + 8.7500e-01, 7.8286e-01, 1.8664e-01, 2.4130e-01, + 1.7065e-01, 8.6065e-01, 6.3363e-01, 9.7779e-01, + 8.0725e-01, 9.9678e-01, 9.1147e-02, 1.8360e-01, + 4.8954e-01, 8.8423e-01, 3.4003e-01, 9.0388e-01, + 4.6861e-01, 1.7894e-01, 1.1389e-01, 4.6669e-02, + 7.7478e-01, 5.4584e-01, 8.6114e-01, 7.9349e-01, + 1.6974e-01, 4.3423e-01, 5.6248e-02, 4.5439e-01, + 5.0219e-01, 9.2351e-01, 2.6826e-01, 3.8333e-01, + 9.0307e-01, 7.2118e-01, 7.7457e-01, 1.6562e-01, + 5.4879e-01, 3.3296e-01, 1.4218e-01, 4.3012e-01, + 7.4206e-01, 6.6912e-01, 1.5511e-03, 8.2296e-01, + 8.0557e-01, 1.0555e-01, 2.0850e-01, 4.0289e-01, + 2.4296e-02, 1.4722e-01, 5.2017e-01, 9.0282e-01, + 8.1596e-01, 4.9496e-01, 8.8061e-01, 5.2639e-01, + 7.3326e-01, 5.2719e-01, 2.9275e-02, 2.2815e-01, + 8.3470e-01, 9.7604e-01, 3.8310e-01, 8.6305e-01, + 8.2162e-01, 3.7334e-01, 1.6936e-01, 9.0413e-01, + 2.3517e-01, 8.0772e-01, 6.7720e-01, 7.1841e-01, + 9.7242e-01, 5.7008e-01, 5.2057e-01, 3.3845e-01, + 2.9399e-01, 1.0539e-01, 7.1592e-01, 6.1387e-01, + 8.5398e-01, 9.5324e-01, 1.1651e-01, 2.6991e-01, + 3.6423e-01, 4.1949e-01, 7.9844e-01, 6.9822e-01, + 4.6081e-01, 2.8289e-01, 9.0768e-01, 6.5808e-01, + 9.1438e-01, 7.0070e-01, 4.9658e-01, 5.4039e-01, + 5.5886e-01, 9.2690e-01, 3.3048e-02, 5.1747e-01, + 2.0464e-01, 5.6253e-01, 2.9711e-01, 1.8511e-01, + 8.2642e-01, 8.8128e-01, 4.4636e-01, 4.8270e-01, + 9.8079e-01, 2.7729e-01, 6.7355e-01, 2.7171e-01, + 8.3580e-01, 6.7101e-02, 7.8897e-01, 1.8105e-01, + 8.3328e-01, 9.0933e-01, 2.5938e-02, 8.9679e-01, + 6.6176e-01, 8.6587e-01, 3.0818e-01, 5.4161e-01, + 3.3177e-01, 8.1727e-01, 7.3641e-01, 4.8683e-01, + 6.6534e-01, 2.4498e-01, 1.0098e-01, 5.1743e-01, + 6.1353e-01, 5.1134e-02, 1.3179e-01, 2.8674e-01, + 5.6531e-02, 9.1340e-01, 9.4780e-02, 7.7755e-01, + 9.7366e-01, 9.4401e-01, 9.1712e-01, 7.3659e-02, + 4.5041e-01, 6.0334e-01, 1.7813e-01, 5.8013e-01, + 4.4415e-01, 9.2524e-02, 5.5202e-01, 2.1924e-01, + 8.9768e-02, 8.7639e-01, 3.4106e-01, 8.9521e-01, + 4.3736e-01, 9.6161e-01, 3.0577e-01, 7.2673e-01, + 5.6922e-01, 9.1136e-01, 1.6725e-01, 4.6346e-01, + 5.0976e-01, 5.1232e-01, 7.7783e-01, 9.9038e-01, + 6.3570e-02, 7.5502e-01, 4.3562e-01, 1.6386e-01, + 4.4897e-01, 5.7603e-01, 9.5300e-01, 8.4625e-01, + 7.8549e-01, 7.0669e-01, 8.8842e-01, 4.6161e-01, + 1.0886e-01, 9.9797e-01, 7.7704e-01, 4.4745e-01, + 5.5348e-01, 7.9816e-01, 7.5833e-01, 9.0172e-01, + 6.5109e-01, 3.3934e-01, 9.6855e-01, 3.9876e-01, + 2.7865e-01, 2.3499e-01, 8.3360e-01, 1.2160e-01, + 4.8235e-01, 5.5299e-01, 3.3846e-01, 2.5896e-01, + 1.1429e-01, 5.2068e-01, 4.0576e-01, 1.0800e-01, + 1.1673e-01, 1.9148e-01, 5.3974e-01, 6.1500e-02, + 7.3862e-01, 4.0671e-02, 1.4820e-01, 3.2336e-01, + 5.6085e-01, 2.9156e-01, 2.7142e-01, 5.1415e-01, + 7.3523e-01, 9.0248e-01, 7.5671e-01, 4.9535e-01, + 7.9571e-01, 7.4643e-01, 7.8957e-01, 4.1524e-01, + 4.5184e-01, 6.5274e-02, 3.1191e-01, 7.9157e-01, + 2.7545e-02, 9.2301e-01, 9.9453e-01, 2.4729e-01, + 4.2412e-01, 3.6611e-01, 9.3792e-01, 9.9284e-01, + 5.1807e-01, 7.8474e-01, 1.1153e-01, 2.7679e-01, + 5.1358e-01, 4.8805e-02, 8.5891e-01, 2.0175e-01, + 1.4589e-01, 9.3637e-02, 5.3722e-01, 7.6807e-01, + 6.8017e-01, 5.2222e-01, 5.5103e-01, 3.4505e-01, + 9.7557e-01, 9.3842e-02, 6.2360e-01, 7.1001e-01, + 5.3117e-01, 1.7962e-01, 3.6912e-01, 6.7579e-01, + 1.0135e-01, 6.7780e-01, 1.8499e-01, 9.2320e-01, + 2.0157e-01, 8.5156e-01, 6.2098e-01, 8.7820e-01, + 5.7171e-01, 5.8453e-01, 9.9270e-01, 2.8080e-01, + 1.8687e-02, 8.7414e-01, 1.7689e-01, 2.9132e-01, + 1.9790e-01, 3.0467e-01, 3.5501e-01, 5.7991e-01, + 5.4925e-01, 7.5190e-01, 8.8675e-01, 1.4479e-01, + 8.1647e-01, 8.5290e-01, 9.0125e-01, 1.6470e-01, + 9.9914e-01, 9.4545e-01, 1.9357e-02, 1.8278e-01, + 5.5496e-02, 8.5773e-01, 6.8462e-01, 2.8210e-01, + 9.6506e-01, 7.4561e-01, 9.4014e-01, 6.6528e-02, + 3.4719e-01, 5.7601e-02, 4.6877e-01, 8.0231e-01, + 8.7718e-01, 7.6647e-01, 5.4947e-01, 6.9274e-02, + 2.6517e-01, 7.9298e-01, 7.4926e-01, 3.8426e-01, + 8.0665e-01, 8.1102e-01, 3.9734e-02, 4.8270e-01, + 2.7288e-01, 8.5640e-01, 9.0925e-01, 9.8226e-01, + 9.3178e-01, 1.3909e-01, 4.3400e-01, 3.4806e-01, + 5.9163e-02, 4.1550e-01, 5.7548e-01, 9.3041e-01, + 5.5564e-01, 8.0414e-01, 4.8781e-01, 8.2555e-01, + 9.5966e-01, 1.1761e-01, 5.4737e-01, 4.5517e-01, + 2.3339e-01, 3.3553e-01, 5.0046e-01, 9.1915e-01, + 2.3385e-01, 8.0373e-01, 8.4412e-01, 5.9031e-01, + 3.0581e-02, 3.1007e-01, 7.9126e-01, 8.8832e-01, + 1.8686e-01, 6.4693e-01, 7.3473e-01, 3.9851e-01, + 2.5276e-01, 7.5542e-01, 5.4021e-01, 7.7912e-01, + 8.0820e-01, 8.3534e-01, 3.3797e-01, 3.1584e-01, + 3.1064e-01, 4.4400e-01, 3.2848e-01, 7.7776e-01, + 8.5737e-01, 3.0283e-01, 5.6505e-01, 1.0186e-01, + 5.9803e-02, 5.5969e-01, 3.5781e-01, 6.9147e-01, + 8.0119e-01, 9.8082e-01, 8.5171e-01, 7.8022e-01, + 3.9640e-01, 9.0353e-01, 9.0919e-01, 9.3838e-01, + 9.7094e-01, 4.5839e-01, 4.4371e-01, 7.0686e-01, + 2.4518e-01, 1.3004e-01, 6.9358e-01, 4.2710e-01, + 6.7932e-01, 4.6308e-01, 8.1491e-01, 1.7000e-01, + 3.0062e-01, 7.7898e-01, 6.1484e-01, 4.0302e-01, + 9.5065e-01, 8.1030e-01, 9.3582e-03, 2.2491e-01, + 6.8609e-03, 1.2118e-01, 2.9171e-03, 9.3268e-01, + 7.7786e-01, 7.5091e-01, 9.5908e-01, 6.1182e-03, + 1.1122e-01, 2.2413e-01, 2.2139e-01, 7.5067e-01, + 9.7482e-01, 6.9779e-01, 3.9209e-02, 4.0097e-01, + 4.2371e-01, 6.4973e-01, 9.5894e-01, 8.5062e-02, + 5.8204e-01, 6.8671e-01, 5.1221e-01, 4.0473e-01, + 7.2026e-01, 1.2949e-01, 2.9672e-01, 9.2554e-01, + 2.0556e-01, 9.6803e-01, 3.5591e-01, 9.3032e-01, + 5.6400e-01, 1.5262e-01, 4.7225e-01, 9.9073e-01, + 8.9405e-01, 9.6195e-01, 9.9314e-01, 2.3625e-01, + 5.1743e-01, 1.1974e-01, 9.0363e-01, 8.6600e-01, + 8.0337e-01, 9.6559e-01, 2.6426e-01, 3.4869e-01, + 2.5160e-01, 9.6810e-02, 9.6466e-02, 4.3477e-01, + 5.1657e-01, 2.9493e-04, 6.7096e-01, 6.4018e-01, + 3.3778e-01, 9.4351e-02, 1.1026e-01, 5.3561e-01, + 3.6227e-01, 3.7598e-01, 8.5462e-01, 5.9498e-02, + 9.2387e-01, 7.9899e-02, 3.4728e-01, 1.0305e-01, + 4.6347e-01, 2.2397e-01, 1.3323e-01, 5.8151e-01, + 7.8188e-01, 7.5203e-01, 2.3923e-01, 8.7475e-01, + 3.0265e-01, 5.6830e-01, 1.2483e-02, 8.4118e-01, + 3.6248e-01, 7.1321e-02, 3.8929e-01, 7.3302e-01, + 2.9683e-02, 3.3447e-04, 3.4344e-01, 7.5402e-01, + 3.3043e-01, 3.5817e-01, 1.0547e-01, 5.2731e-01, + 3.5698e-01, 5.2197e-01, 2.1774e-02, 8.4981e-01, + 3.1434e-01, 4.3985e-01, 3.3751e-01, 8.2447e-01, + 9.0353e-01, 3.5193e-01, 9.9614e-01, 5.6655e-01, + 4.4749e-01, 1.7903e-02, 5.5607e-01, 5.5539e-01, + 4.6695e-01, 1.7420e-01, 6.8828e-01, 5.0139e-01, + 2.8580e-02, 2.6173e-01, 9.4751e-01, 1.9893e-01, + 8.5899e-01, 5.1191e-01, 8.7673e-01, 9.1152e-01, + 2.6874e-01, 4.6410e-01, 8.2734e-01, 6.1037e-01, + 1.6229e-01, 2.8812e-01, 8.8686e-01, 7.5397e-01, + 5.7413e-01, 7.9610e-01, 2.7033e-02, 9.3147e-01, + 1.6480e-01, 8.9252e-01, 6.1944e-01, 2.5837e-01, + 6.7630e-01, 4.4219e-02, 7.2525e-01, 7.9502e-01, + 4.1810e-01, 2.8494e-01, 5.8932e-02, 5.1582e-01, + 1.8311e-01, 4.1472e-01, 4.7158e-02, 4.5326e-01, + 3.3751e-01, 9.6958e-01, 8.9434e-01, 1.4553e-01, + 3.6062e-01, 5.6479e-01, 3.4382e-01, 5.0365e-01, + 6.0594e-01, 6.7806e-02, 1.6118e-01, 8.6570e-01, + 4.0710e-01, 2.4099e-01, 6.6341e-01, 7.4617e-01, + 6.2105e-01, 9.9820e-01, 2.9075e-01, 7.0006e-01, + 2.9853e-01, 2.3004e-02, 1.5685e-01, 3.4050e-01, + 2.7630e-01, 7.4879e-01, 3.9249e-01, 8.5814e-01, + 2.9174e-01, 7.5799e-01, 7.4573e-01, 1.4019e-01, + 6.8477e-01, 3.3049e-01, 5.9636e-01, 3.2202e-01, + 1.4347e-02, 5.9175e-01, 1.7777e-01, 8.4410e-01, + 7.7933e-01, 4.2032e-01, 5.5168e-01, 9.9130e-03, + 7.4954e-02, 1.4053e-01, 6.4507e-01, 8.5374e-01, + 1.0934e-01, 8.0720e-01, 6.0686e-01, 5.7376e-01, + 4.4972e-01, 7.2877e-01, 8.2845e-01, 5.5441e-01, + 1.8664e-03, 6.9860e-01, 2.3945e-01, 4.9831e-01, + 8.3538e-01, 2.4266e-01, 8.0127e-02, 3.2858e-01, + 7.2302e-02, 3.0432e-01, 3.2243e-01, 2.3603e-02, + 1.9193e-01, 7.7673e-01, 5.5788e-01, 6.9296e-01, + 1.6739e-01, 6.4205e-01, 4.9813e-01, 5.3317e-01, + 4.5772e-01, 1.0799e-01, 8.3504e-01, 7.2689e-01, + 6.4582e-01, 3.5702e-01, 2.6752e-01, 5.0306e-01, + 2.5481e-01, 4.2631e-01, 7.2228e-01, 6.4601e-01, + 4.7649e-01, 7.0767e-02, 5.7134e-01, 9.5651e-01, + 4.8798e-01, 3.5081e-01, 8.1035e-01, 2.2577e-01, + 8.9774e-01, 9.8753e-01, 3.2962e-01, 5.9843e-01, + 4.4286e-01, 8.9385e-01, 8.4978e-01, 9.8700e-01, + 6.9001e-01, 1.0624e-01, 5.2676e-01, 2.1199e-01, + 7.7498e-01, 9.6944e-01, 8.7014e-01, 2.3069e-01, + 4.9641e-01, 3.0380e-01, 2.2540e-01, 6.8408e-01, + 1.6711e-01, 8.1494e-01, 5.3474e-01, 4.7828e-01, + 1.1990e-01, 8.3419e-01, 5.3938e-01, 9.8791e-01, + 6.0981e-01, 2.8847e-01, 2.3305e-01, 2.4698e-01, + 4.4778e-01, 2.1873e-02, 1.3042e-03, 3.7369e-01, + 9.7526e-01, 1.0262e-01, 7.6608e-01, 9.2036e-01, + 8.3530e-01, 6.3795e-01, 4.3448e-01, 2.4097e-01, + 3.5225e-01, 9.3811e-02, 5.2934e-01, 4.5612e-01, + 7.4354e-01, 8.7226e-01, 8.6587e-01, 8.9845e-01, + 3.8513e-01, 3.8296e-02, 9.1128e-01, 8.1981e-01, + 1.4811e-01, 9.0155e-01, 6.4841e-01, 4.4068e-01, + 9.8688e-01, 9.9709e-01, 4.2270e-01, 6.9585e-01, + 2.7717e-01, 5.9451e-02, 9.5034e-01, 6.1396e-01, + 5.8699e-01, 5.6065e-01, 9.7675e-01, 7.6908e-01, + 4.3459e-01, 2.6185e-01, 3.3918e-01, 8.8475e-01, + 9.9469e-01, 5.1719e-01, 1.2536e-02, 7.9953e-01, + 5.8694e-02, 6.8508e-02, 1.3971e-02, 9.1346e-01, + 5.7906e-01, 8.7957e-01, 8.7184e-01, 7.9957e-01, + 6.6740e-01, 8.8910e-01, 2.3765e-01, 1.5346e-01, + 8.2845e-01, 6.1739e-02, 5.9085e-01, 9.9421e-01, + 1.1093e-01, 8.7460e-01, 7.9463e-01, 3.9043e-01, + 4.2107e-01, 9.3567e-01, 5.7116e-01, 9.5562e-01, + 1.4046e-01, 9.5840e-01, 7.2657e-01, 8.9020e-01, + 3.1909e-02, 6.2548e-01, 7.6863e-01, 2.7453e-01, + 5.2984e-01, 2.9711e-01, 4.6701e-01, 5.1813e-02, + 3.6967e-01, 6.3356e-01, 6.8065e-01, 7.5516e-01, + 4.5039e-01, 3.1351e-01, 6.9119e-01, 6.1214e-01, + 5.8392e-01, 8.1954e-02, 5.3556e-02, 6.7371e-01, + 4.6413e-01, 7.7358e-01, 3.2483e-01, 3.2597e-01, + 6.2412e-01, 9.0713e-02, 1.2413e-01, 5.7210e-01, + 8.7935e-01, 2.8617e-01, 3.7725e-01, 2.9843e-01, + 9.4261e-01, 3.6302e-01, 4.7908e-02, 6.4665e-01, + 9.3962e-01, 8.7430e-02, 9.9035e-01, 8.1030e-01, + 9.0691e-02, 7.7773e-01, 5.4666e-02, 4.8836e-01, + 5.3026e-01, 5.7899e-01, 8.0673e-01, 2.0044e-01, + 1.9934e-01, 2.8598e-01, 7.4524e-01, 1.2927e-01, + 9.2808e-01, 9.2045e-01, 3.8985e-01, 5.5247e-01, + 5.7464e-01, 5.7580e-02, 1.4284e-01, 4.7990e-02, + 4.3020e-01, 8.2560e-01, 8.1836e-01, 5.9019e-01, + 8.9344e-01, 8.7895e-01, 3.3983e-01, 5.4129e-01, + 9.0138e-01, 6.8974e-01, 8.1632e-01, 3.1266e-01, + 9.1417e-01, 6.4220e-01, 9.6898e-01, 4.5624e-01, + 7.5150e-01, 8.2712e-01, 3.1443e-01, 3.8511e-01, + 3.0069e-01, 5.4418e-01, 7.7085e-01, 7.1004e-01, + 9.0850e-01, 2.8569e-01, 2.8229e-01, 8.2281e-01, + 3.9838e-01, 5.0278e-01, 8.3763e-02, 9.3758e-01, + 9.9311e-01, 1.9914e-01, 5.0422e-01, 1.3989e-01, + 6.7582e-01, 9.3789e-01, 8.2716e-03, 8.7877e-01, + 4.9822e-01, 2.0486e-01, 2.8594e-01, 6.3313e-01, + 3.0356e-01, 4.3013e-01, 6.1970e-01, 3.6020e-01, + 9.6547e-01, 6.1613e-01, 6.4540e-01, 7.4650e-01, + 6.1805e-01, 8.3533e-01, 8.5825e-01, 1.7147e-02, + 1.2759e-01, 3.6667e-01, 3.3350e-01, 6.2218e-01, + 6.5278e-01, 2.5488e-01, 6.1015e-01, 9.2496e-01, + 8.2075e-02, 6.5632e-01, 7.0234e-01, 7.5650e-01, + 2.0221e-01, 1.0267e-01, 1.1805e-03, 2.4251e-01, + 9.2195e-01, 7.5362e-01, 9.2722e-01, 8.3398e-01, + 2.6393e-01, 4.1074e-01, 5.8470e-01, 4.7939e-01, + 9.9882e-01, 1.5441e-01, 2.9312e-01, 6.3439e-01, + 5.9746e-01, 4.5249e-01, 7.6343e-01, 6.6838e-01, + 2.6124e-01, 6.3346e-01, 8.9367e-01, 5.0087e-01, + 7.9238e-01, 3.7238e-01, 3.5710e-01, 3.5431e-01, + 4.2045e-01, 2.7507e-01, 6.1473e-01, 9.4034e-01, + 4.8498e-01, 5.8045e-01, 7.1439e-01, 2.6115e-01, + 8.6137e-01, 8.0278e-01, 3.6885e-01, 6.1547e-01, + 3.4485e-01, 8.5126e-01, 9.5304e-01, 2.8787e-01, + 4.7971e-01, 1.8011e-01, 9.1167e-01, 4.1315e-01, + 8.2359e-02, 1.3913e-01, 4.7291e-01, 9.7543e-01, + 4.5087e-01, 4.6225e-01, 8.0787e-01, 7.3710e-02, + 2.2523e-01, 2.4144e-01, 2.3303e-01, 3.6756e-01, + 6.4454e-01, 6.0330e-01, 3.3924e-01, 8.9650e-01, + 3.5891e-01, 7.6267e-02, 8.8689e-01, 1.9321e-01, + 9.8928e-01, 1.6353e-01, 9.4096e-01, 1.6797e-01, + 5.8028e-01, 9.4390e-01, 1.7118e-01, 7.7669e-01, + 1.5969e-01, 1.8190e-01, 7.3046e-01, 7.7660e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4288, 0.6749, 0.2273, ..., 0.2100, 0.8485, 0.9690]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 5.085230827331543 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 141552 -ss 10000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.798607587814331} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([3726, 7095, 4684, 2020, 836, 1255, 7107, 2910, 7765, + 3618, 8004, 8576, 4555, 2122, 3631, 3042, 6509, 854, + 5117, 7246, 7468, 4038, 6190, 4788, 6872, 1487, 1270, + 5894, 3327, 4557, 2868, 4373, 8003, 1239, 2993, 4213, + 4878, 8600, 9710, 4771, 8192, 5937, 3800, 5340, 2757, + 7379, 410, 167, 7636, 1048, 2438, 5415, 3035, 8972, + 8984, 1069, 8135, 9320, 5730, 3547, 7645, 6319, 8708, + 4794, 9059, 642, 937, 2015, 2851, 401, 5086, 3408, + 3671, 8044, 1220, 336, 7855, 7629, 930, 2750, 1987, + 9765, 3121, 9993, 829, 9850, 1435, 2979, 5489, 3796, + 4432, 9879, 9991, 2308, 6643, 3959, 9751, 638, 1357, + 4879, 6697, 7016, 2005, 3140, 2355, 1476, 7190, 9157, + 586, 8925, 8359, 7415, 6315, 5275, 7818, 5569, 2433, + 2192, 6764, 8455, 1408, 5314, 6201, 4055, 2442, 1110, + 8696, 5368, 8279, 443, 7835, 3502, 7362, 1330, 775, + 9823, 1517, 4273, 5726, 452, 5679, 1189, 861, 4804, + 3949, 9279, 5272, 3517, 1449, 1605, 1487, 9465, 4077, + 1506, 7136, 8000, 3536, 5423, 6093, 3644, 4106, 578, + 4413, 8078, 3736, 1232, 8907, 611, 6360, 4917, 5521, + 8890, 8861, 4942, 8337, 4548, 4613, 1027, 8700, 6909, + 958, 3790, 2989, 8565, 911, 5630, 6072, 1354, 9245, + 1994, 8552, 1111, 4146, 7268, 4683, 8105, 4378, 9360, + 6653, 8808, 5006, 4328, 5559, 3943, 4417, 7458, 2312, + 116, 5416, 6906, 1303, 5319, 7254, 8596, 3357, 6527, + 8398, 8704, 8210, 823, 4023, 1928, 9697, 2295, 5245, + 8970, 1573, 3796, 4232, 3566, 6537, 4098, 575, 7372, + 366, 6301, 8492, 2459, 7103, 5364, 5367, 4901, 9480, + 278, 5400, 7252, 9075, 5325, 1810, 7648, 324, 2329, + 8, 3227, 8008, 8213, 741, 1909, 4083, 9465, 4927, + 4913, 2356, 2450, 8390, 5767, 5848, 2748, 9467, 3514, + 4253, 2231, 1205, 4867, 6627, 4336, 5974, 7755, 3790, + 221, 6230, 4840, 1706, 8664, 7040, 1728, 4302, 8186, + 7438, 2847, 378, 6914, 377, 2565, 6264, 2119, 4808, + 8605, 8197, 5489, 4175, 5779, 6350, 3342, 6552, 3810, + 3380, 2149, 6531, 4018, 9838, 7109, 6420, 2314, 8276, + 8549, 3270, 8204, 1510, 99, 9666, 881, 9634, 6348, + 5206, 5717, 6256, 9140, 2894, 55, 7420, 6909, 8907, + 9934, 7455, 256, 6416, 3264, 139, 9653, 9104, 120, + 4401, 5358, 2182, 3677, 5160, 5409, 8367, 6563, 4808, + 6260, 2681, 8624, 9491, 9352, 8644, 896, 2705, 7108, + 2245, 6339, 6089, 2552, 2272, 1181, 6435, 563, 8137, + 1459, 7439, 2939, 7035, 867, 6590, 8094, 5026, 4423, + 4770, 7804, 7887, 761, 2953, 9927, 2073, 9826, 6475, + 9197, 874, 7559, 607, 1174, 7682, 3265, 6020, 6386, + 5306, 8737, 3204, 8378, 33, 4395, 8617, 1260, 266, + 2349, 7503, 7582, 3161, 3035, 9262, 8314, 2986, 5564, + 2126, 6464, 7360, 5226, 3325, 1629, 8069, 7692, 7794, + 4182, 2653, 7141, 4784, 5328, 9808, 7261, 1363, 8658, + 9123, 3810, 2620, 9643, 1362, 3134, 7384, 7296, 5209, + 6036, 7751, 7232, 736, 6172, 4771, 6452, 9215, 8373, + 5885, 7654, 3268, 8433, 3826, 1191, 4084, 6339, 3322, + 2278, 9573, 752, 1314, 9267, 6354, 6143, 35, 7253, + 1096, 5121, 7153, 7760, 1966, 6253, 3182, 9955, 1848, + 5514, 2387, 9249, 933, 7029, 186, 2702, 6241, 8466, + 1489, 3322, 5487, 5027, 3772, 1949, 4952, 2162, 9526, + 1901, 4241, 3525, 396, 3698, 5708, 9938, 9685, 4339, + 2948, 209, 6838, 1145, 5442, 7328, 5423, 6485, 1415, + 7421, 1724, 9251, 933, 6123, 4946, 5577, 7971, 9846, + 5274, 2640, 4773, 3064, 9692, 9463, 1802, 2781, 6650, + 4562, 991, 4382, 6448, 7016, 6327, 1852, 5951, 1025, + 7839, 4680, 3410, 4015, 1478, 3864, 5981, 6067, 988, + 2131, 7857, 2239, 375, 7733, 1730, 8801, 1787, 6496, + 1921, 8121, 6115, 2130, 7431, 2124, 1900, 3400, 3093, + 2034, 8191, 8469, 3659, 2287, 1005, 9444, 6813, 6960, + 3470, 8573, 1717, 3039, 2558, 4596, 4880, 4472, 7262, + 5686, 6336, 2458, 2937, 9212, 6313, 6778, 8810, 9277, + 928, 1704, 6201, 8834, 7081, 7414, 5902, 2525, 6986, + 4360, 144, 652, 1234, 5719, 484, 2482, 5766, 4021, + 4006, 6231, 1726, 4378, 3881, 6930, 8631, 3608, 4246, + 75, 8644, 7232, 8461, 8874, 3157, 9402, 5888, 3476, + 3276, 9018, 5589, 8991, 2990, 7947, 2115, 8859, 6426, + 1953, 3665, 3323, 3802, 5811, 8129, 4154, 4205, 5918, + 2014, 5679, 7180, 635, 5363, 4653, 1897, 3249, 6982, + 678, 2068, 9858, 2498, 9298, 382, 644, 7370, 2418, + 8329, 5775, 8507, 7061, 9961, 4182, 287, 2359, 5919, + 8303, 3664, 2342, 4924, 2424, 8480, 1042, 4823, 3665, + 6553, 4427, 9234, 7736, 4966, 935, 6525, 8224, 4229, + 311, 9329, 7173, 6315, 1432, 9741, 5248, 4687, 1042, + 3524, 3804, 6875, 7125, 7161, 3553, 195, 2516, 9006, + 3709, 8580, 57, 2078, 1413, 9823, 6439, 5603, 3945, + 4348, 2406, 4610, 6846, 5704, 6321, 9398, 4952, 1295, + 738, 1206, 9447, 1290, 7633, 6389, 2334, 2897, 916, + 3131, 3092, 1954, 7671, 2004, 8145, 1483, 4726, 1441, + 9782, 9165, 5535, 6096, 1612, 3414, 7082, 3936, 7625, + 116, 6121, 9652, 4539, 5938, 5687, 6276, 1101, 1633, + 6510, 9508, 5331, 5376, 9358, 2109, 2723, 7391, 4555, + 2365, 6412, 3706, 6937, 5899, 2966, 8411, 6231, 9569, + 2849, 6339, 4931, 6281, 2950, 4951, 973, 1306, 6662, + 8109, 5095, 9232, 9872, 443, 8647, 7506, 8104, 8756, + 9936, 686, 4823, 2682, 3740, 3137, 923, 9193, 8324, + 4020, 1627, 6261, 9840, 304, 4142, 1317, 5620, 8274, + 3594, 572, 272, 1611, 5193, 347, 416, 442, 6874, + 8019, 1732, 1301, 6279, 8538, 2543, 4932, 6155, 3059, + 3059, 4499, 9216, 1298, 4595, 9507, 1178, 62, 1121, + 2558, 3614, 1585, 9579, 2799, 9710, 2223, 2927, 6219, + 1930, 1468, 3790, 7212, 2706, 1924, 107, 5637, 3603, + 8882, 6287, 8286, 8516, 3947, 4130, 4658, 4839, 7052, + 5285, 9433, 2234, 8542, 478, 1484, 8586, 9538, 899, + 6365, 3422, 1846, 4728, 2876, 7973, 1396, 5267, 8176, + 201, 8298, 5151, 9727, 4317, 6828, 3372, 2733, 3465, + 1632, 1856, 2211, 4987, 6790, 3737, 9878, 8936, 3754, + 4531, 7583, 4001, 4220, 5379, 7676, 9403, 5140, 5873, + 7088, 8903, 2296, 2171, 9394, 1645, 9743, 4507, 9272, + 2477, 8320, 7982, 3456, 5868, 9002, 5854, 753, 6254, + 29, 7699, 1129, 8991, 4514, 3222, 4361, 7378, 3047, + 9627, 6076, 4663, 3561, 5068, 9330, 6752, 8500, 1957, + 9072, 5138, 8731, 1067, 4146, 1100, 4646, 1538, 8687, + 8992, 6143, 9364, 7127, 9769, 2434, 7246, 2305, 7691, + 4140, 198, 608, 1773, 6904, 7875, 2110, 7719, 282, + 4734, 4064, 2354, 1595, 7483, 992, 7146, 9086, 2639, + 12, 4356, 2592, 9295, 444, 3493, 9301, 5028, 8522, + 1719]), + values=tensor([0.8600, 0.5616, 0.7072, 0.3175, 0.5762, 0.8898, 0.7613, + 0.6444, 0.1843, 0.4299, 0.9701, 0.9160, 0.9531, 0.4047, + 0.1402, 0.4728, 0.0686, 0.5538, 0.1157, 0.4298, 0.8825, + 0.1354, 0.8143, 0.5913, 0.0480, 0.3359, 0.5316, 0.6111, + 0.8620, 0.8841, 0.0457, 0.2228, 0.0040, 0.0383, 0.6512, + 0.9426, 0.9603, 0.6779, 0.3036, 0.2358, 0.8460, 0.4662, + 0.3348, 0.0817, 0.8411, 0.0542, 0.1380, 0.3042, 0.1998, + 0.2999, 0.4133, 0.8056, 0.7595, 0.1627, 0.9131, 0.7276, + 0.6625, 0.1656, 0.8899, 0.5354, 0.8460, 0.1291, 0.5991, + 0.0805, 0.4357, 0.4936, 0.4524, 0.3191, 0.7007, 0.0375, + 0.6902, 0.4198, 0.3498, 0.7962, 0.5312, 0.3669, 0.5804, + 0.4017, 0.9393, 0.3637, 0.5425, 0.4731, 0.0877, 0.9094, + 0.7214, 0.9414, 0.9950, 0.2474, 0.5511, 0.8001, 0.0442, + 0.9555, 0.3790, 0.0597, 0.0816, 0.0893, 0.0727, 0.0855, + 0.8418, 0.0900, 0.4889, 0.1537, 0.1078, 0.3076, 0.1132, + 0.3499, 0.2464, 0.3119, 0.7397, 0.5203, 0.7296, 0.3076, + 0.0908, 0.5180, 0.8820, 0.2955, 0.7721, 0.2443, 0.4412, + 0.0643, 0.1279, 0.8697, 0.6469, 0.3773, 0.3600, 0.5487, + 0.3518, 0.6432, 0.2385, 0.7556, 0.7224, 0.2014, 0.8943, + 0.1852, 0.9558, 0.6299, 0.3671, 0.9896, 0.6391, 0.1768, + 0.0298, 0.7972, 0.7124, 0.7051, 0.9680, 0.5269, 0.0532, + 0.0232, 0.6522, 0.9276, 0.9199, 0.4637, 0.4411, 0.9074, + 0.0961, 0.7283, 0.8054, 0.0583, 0.2110, 0.5960, 0.2328, + 0.9165, 0.5817, 0.5985, 0.0950, 0.4392, 0.8056, 0.8382, + 0.3675, 0.5339, 0.7866, 0.5051, 0.9951, 0.7743, 0.5281, + 0.2536, 0.4500, 0.8310, 0.7085, 0.2637, 0.3306, 0.5037, + 0.3989, 0.2457, 0.9774, 0.4426, 0.8629, 0.9169, 0.4122, + 0.5182, 0.4459, 0.2392, 0.3915, 0.8545, 0.1335, 0.2459, + 0.0435, 0.8607, 0.2092, 0.1152, 0.2264, 0.8530, 0.0610, + 0.5909, 0.8510, 0.9060, 0.1898, 0.9730, 0.5210, 0.5104, + 0.4607, 0.9295, 0.4482, 0.7724, 0.0968, 0.9352, 0.3537, + 0.8985, 0.5128, 0.0067, 0.2582, 0.7387, 0.7979, 0.7165, + 0.3443, 0.7560, 0.5700, 0.8397, 0.1771, 0.8402, 0.3091, + 0.9997, 0.1542, 0.0685, 0.8646, 0.3574, 0.0226, 0.7519, + 0.6524, 0.0072, 0.1165, 0.5114, 0.0917, 0.6314, 0.9212, + 0.5712, 0.7090, 0.0393, 0.1201, 0.8493, 0.2898, 0.8259, + 0.4117, 0.6401, 0.8024, 0.9462, 0.0945, 0.6655, 0.6495, + 0.4999, 0.2014, 0.1871, 0.0303, 0.7432, 0.6428, 0.9133, + 0.2792, 0.0556, 0.1388, 0.0015, 0.6331, 0.2758, 0.8367, + 0.8769, 0.0407, 0.6240, 0.7762, 0.1332, 0.5250, 0.1198, + 0.5519, 0.3094, 0.1346, 0.0647, 0.4742, 0.5731, 0.8318, + 0.2796, 0.5726, 0.3119, 0.9865, 0.9941, 0.5736, 0.5420, + 0.2171, 0.5354, 0.9426, 0.6173, 0.1128, 0.0283, 0.7768, + 0.9444, 0.6839, 0.6100, 0.7668, 0.0394, 0.8433, 0.0499, + 0.7761, 0.2587, 0.7427, 0.8822, 0.5529, 0.7552, 0.8520, + 0.8179, 0.9600, 0.0466, 0.5899, 0.0884, 0.9116, 0.1822, + 0.4571, 0.9885, 0.1750, 0.1838, 0.7352, 0.3448, 0.7858, + 0.2306, 0.7624, 0.8788, 0.3283, 0.6481, 0.6658, 0.3363, + 0.3090, 0.0330, 0.1595, 0.9700, 0.3235, 0.4224, 0.0628, + 0.0681, 0.2514, 0.1198, 0.5855, 0.6518, 0.4456, 0.5212, + 0.5722, 0.1297, 0.2828, 0.7370, 0.0325, 0.1233, 0.8781, + 0.1529, 0.3239, 0.4868, 0.4461, 0.0181, 0.9715, 0.6327, + 0.0701, 0.9788, 0.7771, 0.3935, 0.2992, 0.4526, 0.2252, + 0.9195, 0.1472, 0.6317, 0.3246, 0.5631, 0.7653, 0.3490, + 0.2479, 0.8149, 0.9333, 0.5535, 0.7152, 0.7420, 0.8502, + 0.5933, 0.4264, 0.3490, 0.4412, 0.9800, 0.1953, 0.5818, + 0.4513, 0.1650, 0.3902, 0.4948, 0.7564, 0.9652, 0.1048, + 0.4943, 0.8495, 0.3903, 0.7487, 0.4664, 0.0427, 0.1690, + 0.7279, 0.3082, 0.6150, 0.2912, 0.2094, 0.9971, 0.3723, + 0.7357, 0.3658, 0.4009, 0.9999, 0.6043, 0.9117, 0.7334, + 0.5824, 0.0506, 0.7330, 0.0230, 0.4406, 0.0807, 0.6942, + 0.2245, 0.2485, 0.5748, 0.8330, 0.7840, 0.7060, 0.2348, + 0.0035, 0.5364, 0.0187, 0.6599, 0.9337, 0.6053, 0.1831, + 0.4356, 0.0679, 0.0908, 0.3894, 0.8661, 0.9286, 0.5274, + 0.2198, 0.1337, 0.5968, 0.2616, 0.9505, 0.9894, 0.6963, + 0.4705, 0.2413, 0.0355, 0.0514, 0.8694, 0.1783, 0.6758, + 0.2101, 0.3192, 0.4910, 0.5302, 0.4899, 0.5359, 0.1925, + 0.2343, 0.0188, 0.3525, 0.6944, 0.1704, 0.1291, 0.7572, + 0.5869, 0.4575, 0.5402, 0.3083, 0.7036, 0.7430, 0.3675, + 0.5465, 0.0686, 0.3712, 0.7453, 0.8458, 0.5033, 0.3961, + 0.8154, 0.6781, 0.6892, 0.5106, 0.8864, 0.2284, 0.3130, + 0.1335, 0.9835, 0.3056, 0.3963, 0.3066, 0.8333, 0.1173, + 0.1717, 0.1906, 0.5577, 0.8909, 0.5702, 0.9202, 0.0098, + 0.9034, 0.1638, 0.7871, 0.0602, 0.1046, 0.7108, 0.1792, + 0.5907, 0.9144, 0.7471, 0.9590, 0.3509, 0.9697, 0.2362, + 0.7800, 0.2561, 0.6928, 0.6592, 0.6715, 0.3954, 0.9109, + 0.8543, 0.2309, 0.9931, 0.3675, 0.9653, 0.9348, 0.8702, + 0.8780, 0.0545, 0.0095, 0.1323, 0.2713, 0.1104, 0.0360, + 0.7644, 0.3254, 0.2794, 0.7010, 0.6247, 0.7531, 0.2093, + 0.4077, 0.7399, 0.9364, 0.9070, 0.3612, 0.1375, 0.2106, + 0.6888, 0.0838, 0.3848, 0.1242, 0.2979, 0.7983, 0.3405, + 0.6686, 0.9463, 0.7854, 0.8071, 0.4226, 0.2682, 0.4134, + 0.9403, 0.2631, 0.8116, 0.3477, 0.7616, 0.0356, 0.3800, + 0.4796, 0.9664, 0.9730, 0.3146, 0.3811, 0.3444, 0.8364, + 0.4593, 0.2380, 0.3529, 0.8631, 0.8304, 0.7458, 0.2703, + 0.8323, 0.2629, 0.9141, 0.3840, 0.6975, 0.0508, 0.9536, + 0.3777, 0.3904, 0.8044, 0.1828, 0.1908, 0.1226, 0.5426, + 0.6712, 0.1985, 0.0131, 0.9193, 0.7070, 0.7883, 0.2011, + 0.5491, 0.4025, 0.3145, 0.0253, 0.2556, 0.4276, 0.9673, + 0.4027, 0.7313, 0.3000, 0.8244, 0.6521, 0.0329, 0.9128, + 0.1532, 0.7920, 0.6837, 0.1740, 0.6020, 0.4749, 0.1878, + 0.6918, 0.3430, 0.8863, 0.0954, 0.2221, 0.0196, 0.9170, + 0.2529, 0.8027, 0.9234, 0.7784, 0.4154, 0.7635, 0.3669, + 0.0588, 0.6705, 0.6605, 0.4654, 0.5060, 0.1257, 0.2869, + 0.1042, 0.7987, 0.7289, 0.1239, 0.7381, 0.3130, 0.2067, + 0.2775, 0.8959, 0.6030, 0.6860, 0.7855, 0.9485, 0.1748, + 0.7731, 0.6068, 0.3676, 0.3474, 0.3768, 0.8882, 0.3443, + 0.2956, 0.9448, 0.4045, 0.2381, 0.2525, 0.2118, 0.1224, + 0.6778, 0.2811, 0.1046, 0.4058, 0.6707, 0.0635, 0.5605, + 0.3192, 0.2209, 0.6866, 0.2181, 0.4822, 0.4068, 0.9604, + 0.5810, 0.0649, 0.5097, 0.1731, 0.2222, 0.3090, 0.8573, + 0.2342, 0.2229, 0.3303, 0.5365, 0.1241, 0.0313, 0.7432, + 0.3762, 0.5850, 0.5199, 0.0453, 0.7767, 0.0550, 0.5852, + 0.6872, 0.3713, 0.6670, 0.1341, 0.8513, 0.2874, 0.5203, + 0.7337, 0.0145, 0.5286, 0.0318, 0.0106, 0.8079, 0.5923, + 0.1886, 0.8415, 0.1474, 0.3036, 0.7165, 0.9739, 0.2050, + 0.0209, 0.3028, 0.8424, 0.4722, 0.9670, 0.8856, 0.7059, + 0.8881, 0.4477, 0.3433, 0.8833, 0.6854, 0.9979, 0.4134, + 0.6004, 0.9236, 0.5985, 0.8387, 0.3869, 0.9226, 0.8476, + 0.2815, 0.5644, 0.8983, 0.8519, 0.4415, 0.5941, 0.4933, + 0.3941, 0.0630, 0.8087, 0.5422, 0.7823, 0.0794, 0.7566, + 0.8197, 0.3825, 0.5699, 0.9195, 0.6894, 0.8052, 0.9060, + 0.8181, 0.6439, 0.3874, 0.8570, 0.7838, 0.5526, 0.0770, + 0.5884, 0.8769, 0.0709, 0.5994, 0.2015, 0.1734, 0.9822, + 0.8660, 0.0374, 0.8196, 0.5480, 0.3620, 0.6184, 0.3047, + 0.1654, 0.6492, 0.4554, 0.0477, 0.9808, 0.2615, 0.3836, + 0.1987, 0.6319, 0.9715, 0.7191, 0.5039, 0.7992, 0.8664, + 0.0815, 0.4937, 0.8132, 0.9455, 0.3586, 0.8318, 0.8930, + 0.1422, 0.4286, 0.9237, 0.7140, 0.1449, 0.5233, 0.9544, + 0.6412, 0.0940, 0.5388, 0.3539, 0.9951, 0.5860, 0.5837, + 0.0540, 0.2181, 0.6021, 0.1546, 0.1818, 0.6251, 0.6991, + 0.5441, 0.4257, 0.3883, 0.4987, 0.7504, 0.8178, 0.0667, + 0.3423, 0.4631, 0.0636, 0.6995, 0.6845, 0.1335, 0.5769, + 0.3366, 0.8741, 0.7448, 0.5158, 0.4520, 0.7964, 0.5429, + 0.3853, 0.3489, 0.7282, 0.3002, 0.4892, 0.4084, 0.7785, + 0.6224, 0.6482, 0.1360, 0.3743, 0.3204, 0.3293, 0.5975, + 0.8635, 0.1468, 0.1420, 0.1950, 0.5502, 0.3099, 0.4665, + 0.2473, 0.9781, 0.4882, 0.8615, 0.4379, 0.3304, 0.9224, + 0.6375, 0.4835, 0.7192, 0.6721, 0.7342, 0.5743, 0.9239, + 0.9773, 0.2213, 0.3478, 0.7032, 0.0574, 0.8041, 0.3853, + 0.1147, 0.0390, 0.9320, 0.5858, 0.0975, 0.5982, 0.1467, + 0.9118, 0.4835, 0.9183, 0.3489, 0.0389, 0.2553, 0.5860, + 0.2665, 0.6450, 0.3179, 0.5337, 0.7581, 0.4409, 0.1177, + 0.0512, 0.8850, 0.2142, 0.1547, 0.5876, 0.8678, 0.5430, + 0.4686, 0.4656, 0.5329, 0.4015, 0.3146, 0.2257, 0.1820, + 0.9287, 0.0585, 0.6678, 0.0868, 0.7648, 0.2970, 0.6893, + 0.7312, 0.6106, 0.1958, 0.8679, 0.9976, 0.5849, 0.7869, + 0.3363, 0.5231, 0.9619, 0.1567, 0.1143, 0.9307, 0.2825, + 0.3303, 0.5892, 0.7606, 0.7858, 0.0785, 0.3935, 0.0941, + 0.7542, 0.7552, 0.7909, 0.6337, 0.4503, 0.8151, 0.1544, + 0.0385, 0.1762, 0.7871, 0.9429, 0.7065, 0.2556, 0.7752, + 0.3810, 0.5819, 0.5096, 0.6816, 0.5826, 0.0960, 0.1244, + 0.3464, 0.1206, 0.8110, 0.0102, 0.2242, 0.3161]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0219, 0.9207, 0.3669, ..., 0.7955, 0.2670, 0.3543]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.798607587814331 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([3726, 7095, 4684, 2020, 836, 1255, 7107, 2910, 7765, + 3618, 8004, 8576, 4555, 2122, 3631, 3042, 6509, 854, + 5117, 7246, 7468, 4038, 6190, 4788, 6872, 1487, 1270, + 5894, 3327, 4557, 2868, 4373, 8003, 1239, 2993, 4213, + 4878, 8600, 9710, 4771, 8192, 5937, 3800, 5340, 2757, + 7379, 410, 167, 7636, 1048, 2438, 5415, 3035, 8972, + 8984, 1069, 8135, 9320, 5730, 3547, 7645, 6319, 8708, + 4794, 9059, 642, 937, 2015, 2851, 401, 5086, 3408, + 3671, 8044, 1220, 336, 7855, 7629, 930, 2750, 1987, + 9765, 3121, 9993, 829, 9850, 1435, 2979, 5489, 3796, + 4432, 9879, 9991, 2308, 6643, 3959, 9751, 638, 1357, + 4879, 6697, 7016, 2005, 3140, 2355, 1476, 7190, 9157, + 586, 8925, 8359, 7415, 6315, 5275, 7818, 5569, 2433, + 2192, 6764, 8455, 1408, 5314, 6201, 4055, 2442, 1110, + 8696, 5368, 8279, 443, 7835, 3502, 7362, 1330, 775, + 9823, 1517, 4273, 5726, 452, 5679, 1189, 861, 4804, + 3949, 9279, 5272, 3517, 1449, 1605, 1487, 9465, 4077, + 1506, 7136, 8000, 3536, 5423, 6093, 3644, 4106, 578, + 4413, 8078, 3736, 1232, 8907, 611, 6360, 4917, 5521, + 8890, 8861, 4942, 8337, 4548, 4613, 1027, 8700, 6909, + 958, 3790, 2989, 8565, 911, 5630, 6072, 1354, 9245, + 1994, 8552, 1111, 4146, 7268, 4683, 8105, 4378, 9360, + 6653, 8808, 5006, 4328, 5559, 3943, 4417, 7458, 2312, + 116, 5416, 6906, 1303, 5319, 7254, 8596, 3357, 6527, + 8398, 8704, 8210, 823, 4023, 1928, 9697, 2295, 5245, + 8970, 1573, 3796, 4232, 3566, 6537, 4098, 575, 7372, + 366, 6301, 8492, 2459, 7103, 5364, 5367, 4901, 9480, + 278, 5400, 7252, 9075, 5325, 1810, 7648, 324, 2329, + 8, 3227, 8008, 8213, 741, 1909, 4083, 9465, 4927, + 4913, 2356, 2450, 8390, 5767, 5848, 2748, 9467, 3514, + 4253, 2231, 1205, 4867, 6627, 4336, 5974, 7755, 3790, + 221, 6230, 4840, 1706, 8664, 7040, 1728, 4302, 8186, + 7438, 2847, 378, 6914, 377, 2565, 6264, 2119, 4808, + 8605, 8197, 5489, 4175, 5779, 6350, 3342, 6552, 3810, + 3380, 2149, 6531, 4018, 9838, 7109, 6420, 2314, 8276, + 8549, 3270, 8204, 1510, 99, 9666, 881, 9634, 6348, + 5206, 5717, 6256, 9140, 2894, 55, 7420, 6909, 8907, + 9934, 7455, 256, 6416, 3264, 139, 9653, 9104, 120, + 4401, 5358, 2182, 3677, 5160, 5409, 8367, 6563, 4808, + 6260, 2681, 8624, 9491, 9352, 8644, 896, 2705, 7108, + 2245, 6339, 6089, 2552, 2272, 1181, 6435, 563, 8137, + 1459, 7439, 2939, 7035, 867, 6590, 8094, 5026, 4423, + 4770, 7804, 7887, 761, 2953, 9927, 2073, 9826, 6475, + 9197, 874, 7559, 607, 1174, 7682, 3265, 6020, 6386, + 5306, 8737, 3204, 8378, 33, 4395, 8617, 1260, 266, + 2349, 7503, 7582, 3161, 3035, 9262, 8314, 2986, 5564, + 2126, 6464, 7360, 5226, 3325, 1629, 8069, 7692, 7794, + 4182, 2653, 7141, 4784, 5328, 9808, 7261, 1363, 8658, + 9123, 3810, 2620, 9643, 1362, 3134, 7384, 7296, 5209, + 6036, 7751, 7232, 736, 6172, 4771, 6452, 9215, 8373, + 5885, 7654, 3268, 8433, 3826, 1191, 4084, 6339, 3322, + 2278, 9573, 752, 1314, 9267, 6354, 6143, 35, 7253, + 1096, 5121, 7153, 7760, 1966, 6253, 3182, 9955, 1848, + 5514, 2387, 9249, 933, 7029, 186, 2702, 6241, 8466, + 1489, 3322, 5487, 5027, 3772, 1949, 4952, 2162, 9526, + 1901, 4241, 3525, 396, 3698, 5708, 9938, 9685, 4339, + 2948, 209, 6838, 1145, 5442, 7328, 5423, 6485, 1415, + 7421, 1724, 9251, 933, 6123, 4946, 5577, 7971, 9846, + 5274, 2640, 4773, 3064, 9692, 9463, 1802, 2781, 6650, + 4562, 991, 4382, 6448, 7016, 6327, 1852, 5951, 1025, + 7839, 4680, 3410, 4015, 1478, 3864, 5981, 6067, 988, + 2131, 7857, 2239, 375, 7733, 1730, 8801, 1787, 6496, + 1921, 8121, 6115, 2130, 7431, 2124, 1900, 3400, 3093, + 2034, 8191, 8469, 3659, 2287, 1005, 9444, 6813, 6960, + 3470, 8573, 1717, 3039, 2558, 4596, 4880, 4472, 7262, + 5686, 6336, 2458, 2937, 9212, 6313, 6778, 8810, 9277, + 928, 1704, 6201, 8834, 7081, 7414, 5902, 2525, 6986, + 4360, 144, 652, 1234, 5719, 484, 2482, 5766, 4021, + 4006, 6231, 1726, 4378, 3881, 6930, 8631, 3608, 4246, + 75, 8644, 7232, 8461, 8874, 3157, 9402, 5888, 3476, + 3276, 9018, 5589, 8991, 2990, 7947, 2115, 8859, 6426, + 1953, 3665, 3323, 3802, 5811, 8129, 4154, 4205, 5918, + 2014, 5679, 7180, 635, 5363, 4653, 1897, 3249, 6982, + 678, 2068, 9858, 2498, 9298, 382, 644, 7370, 2418, + 8329, 5775, 8507, 7061, 9961, 4182, 287, 2359, 5919, + 8303, 3664, 2342, 4924, 2424, 8480, 1042, 4823, 3665, + 6553, 4427, 9234, 7736, 4966, 935, 6525, 8224, 4229, + 311, 9329, 7173, 6315, 1432, 9741, 5248, 4687, 1042, + 3524, 3804, 6875, 7125, 7161, 3553, 195, 2516, 9006, + 3709, 8580, 57, 2078, 1413, 9823, 6439, 5603, 3945, + 4348, 2406, 4610, 6846, 5704, 6321, 9398, 4952, 1295, + 738, 1206, 9447, 1290, 7633, 6389, 2334, 2897, 916, + 3131, 3092, 1954, 7671, 2004, 8145, 1483, 4726, 1441, + 9782, 9165, 5535, 6096, 1612, 3414, 7082, 3936, 7625, + 116, 6121, 9652, 4539, 5938, 5687, 6276, 1101, 1633, + 6510, 9508, 5331, 5376, 9358, 2109, 2723, 7391, 4555, + 2365, 6412, 3706, 6937, 5899, 2966, 8411, 6231, 9569, + 2849, 6339, 4931, 6281, 2950, 4951, 973, 1306, 6662, + 8109, 5095, 9232, 9872, 443, 8647, 7506, 8104, 8756, + 9936, 686, 4823, 2682, 3740, 3137, 923, 9193, 8324, + 4020, 1627, 6261, 9840, 304, 4142, 1317, 5620, 8274, + 3594, 572, 272, 1611, 5193, 347, 416, 442, 6874, + 8019, 1732, 1301, 6279, 8538, 2543, 4932, 6155, 3059, + 3059, 4499, 9216, 1298, 4595, 9507, 1178, 62, 1121, + 2558, 3614, 1585, 9579, 2799, 9710, 2223, 2927, 6219, + 1930, 1468, 3790, 7212, 2706, 1924, 107, 5637, 3603, + 8882, 6287, 8286, 8516, 3947, 4130, 4658, 4839, 7052, + 5285, 9433, 2234, 8542, 478, 1484, 8586, 9538, 899, + 6365, 3422, 1846, 4728, 2876, 7973, 1396, 5267, 8176, + 201, 8298, 5151, 9727, 4317, 6828, 3372, 2733, 3465, + 1632, 1856, 2211, 4987, 6790, 3737, 9878, 8936, 3754, + 4531, 7583, 4001, 4220, 5379, 7676, 9403, 5140, 5873, + 7088, 8903, 2296, 2171, 9394, 1645, 9743, 4507, 9272, + 2477, 8320, 7982, 3456, 5868, 9002, 5854, 753, 6254, + 29, 7699, 1129, 8991, 4514, 3222, 4361, 7378, 3047, + 9627, 6076, 4663, 3561, 5068, 9330, 6752, 8500, 1957, + 9072, 5138, 8731, 1067, 4146, 1100, 4646, 1538, 8687, + 8992, 6143, 9364, 7127, 9769, 2434, 7246, 2305, 7691, + 4140, 198, 608, 1773, 6904, 7875, 2110, 7719, 282, + 4734, 4064, 2354, 1595, 7483, 992, 7146, 9086, 2639, + 12, 4356, 2592, 9295, 444, 3493, 9301, 5028, 8522, + 1719]), + values=tensor([0.8600, 0.5616, 0.7072, 0.3175, 0.5762, 0.8898, 0.7613, + 0.6444, 0.1843, 0.4299, 0.9701, 0.9160, 0.9531, 0.4047, + 0.1402, 0.4728, 0.0686, 0.5538, 0.1157, 0.4298, 0.8825, + 0.1354, 0.8143, 0.5913, 0.0480, 0.3359, 0.5316, 0.6111, + 0.8620, 0.8841, 0.0457, 0.2228, 0.0040, 0.0383, 0.6512, + 0.9426, 0.9603, 0.6779, 0.3036, 0.2358, 0.8460, 0.4662, + 0.3348, 0.0817, 0.8411, 0.0542, 0.1380, 0.3042, 0.1998, + 0.2999, 0.4133, 0.8056, 0.7595, 0.1627, 0.9131, 0.7276, + 0.6625, 0.1656, 0.8899, 0.5354, 0.8460, 0.1291, 0.5991, + 0.0805, 0.4357, 0.4936, 0.4524, 0.3191, 0.7007, 0.0375, + 0.6902, 0.4198, 0.3498, 0.7962, 0.5312, 0.3669, 0.5804, + 0.4017, 0.9393, 0.3637, 0.5425, 0.4731, 0.0877, 0.9094, + 0.7214, 0.9414, 0.9950, 0.2474, 0.5511, 0.8001, 0.0442, + 0.9555, 0.3790, 0.0597, 0.0816, 0.0893, 0.0727, 0.0855, + 0.8418, 0.0900, 0.4889, 0.1537, 0.1078, 0.3076, 0.1132, + 0.3499, 0.2464, 0.3119, 0.7397, 0.5203, 0.7296, 0.3076, + 0.0908, 0.5180, 0.8820, 0.2955, 0.7721, 0.2443, 0.4412, + 0.0643, 0.1279, 0.8697, 0.6469, 0.3773, 0.3600, 0.5487, + 0.3518, 0.6432, 0.2385, 0.7556, 0.7224, 0.2014, 0.8943, + 0.1852, 0.9558, 0.6299, 0.3671, 0.9896, 0.6391, 0.1768, + 0.0298, 0.7972, 0.7124, 0.7051, 0.9680, 0.5269, 0.0532, + 0.0232, 0.6522, 0.9276, 0.9199, 0.4637, 0.4411, 0.9074, + 0.0961, 0.7283, 0.8054, 0.0583, 0.2110, 0.5960, 0.2328, + 0.9165, 0.5817, 0.5985, 0.0950, 0.4392, 0.8056, 0.8382, + 0.3675, 0.5339, 0.7866, 0.5051, 0.9951, 0.7743, 0.5281, + 0.2536, 0.4500, 0.8310, 0.7085, 0.2637, 0.3306, 0.5037, + 0.3989, 0.2457, 0.9774, 0.4426, 0.8629, 0.9169, 0.4122, + 0.5182, 0.4459, 0.2392, 0.3915, 0.8545, 0.1335, 0.2459, + 0.0435, 0.8607, 0.2092, 0.1152, 0.2264, 0.8530, 0.0610, + 0.5909, 0.8510, 0.9060, 0.1898, 0.9730, 0.5210, 0.5104, + 0.4607, 0.9295, 0.4482, 0.7724, 0.0968, 0.9352, 0.3537, + 0.8985, 0.5128, 0.0067, 0.2582, 0.7387, 0.7979, 0.7165, + 0.3443, 0.7560, 0.5700, 0.8397, 0.1771, 0.8402, 0.3091, + 0.9997, 0.1542, 0.0685, 0.8646, 0.3574, 0.0226, 0.7519, + 0.6524, 0.0072, 0.1165, 0.5114, 0.0917, 0.6314, 0.9212, + 0.5712, 0.7090, 0.0393, 0.1201, 0.8493, 0.2898, 0.8259, + 0.4117, 0.6401, 0.8024, 0.9462, 0.0945, 0.6655, 0.6495, + 0.4999, 0.2014, 0.1871, 0.0303, 0.7432, 0.6428, 0.9133, + 0.2792, 0.0556, 0.1388, 0.0015, 0.6331, 0.2758, 0.8367, + 0.8769, 0.0407, 0.6240, 0.7762, 0.1332, 0.5250, 0.1198, + 0.5519, 0.3094, 0.1346, 0.0647, 0.4742, 0.5731, 0.8318, + 0.2796, 0.5726, 0.3119, 0.9865, 0.9941, 0.5736, 0.5420, + 0.2171, 0.5354, 0.9426, 0.6173, 0.1128, 0.0283, 0.7768, + 0.9444, 0.6839, 0.6100, 0.7668, 0.0394, 0.8433, 0.0499, + 0.7761, 0.2587, 0.7427, 0.8822, 0.5529, 0.7552, 0.8520, + 0.8179, 0.9600, 0.0466, 0.5899, 0.0884, 0.9116, 0.1822, + 0.4571, 0.9885, 0.1750, 0.1838, 0.7352, 0.3448, 0.7858, + 0.2306, 0.7624, 0.8788, 0.3283, 0.6481, 0.6658, 0.3363, + 0.3090, 0.0330, 0.1595, 0.9700, 0.3235, 0.4224, 0.0628, + 0.0681, 0.2514, 0.1198, 0.5855, 0.6518, 0.4456, 0.5212, + 0.5722, 0.1297, 0.2828, 0.7370, 0.0325, 0.1233, 0.8781, + 0.1529, 0.3239, 0.4868, 0.4461, 0.0181, 0.9715, 0.6327, + 0.0701, 0.9788, 0.7771, 0.3935, 0.2992, 0.4526, 0.2252, + 0.9195, 0.1472, 0.6317, 0.3246, 0.5631, 0.7653, 0.3490, + 0.2479, 0.8149, 0.9333, 0.5535, 0.7152, 0.7420, 0.8502, + 0.5933, 0.4264, 0.3490, 0.4412, 0.9800, 0.1953, 0.5818, + 0.4513, 0.1650, 0.3902, 0.4948, 0.7564, 0.9652, 0.1048, + 0.4943, 0.8495, 0.3903, 0.7487, 0.4664, 0.0427, 0.1690, + 0.7279, 0.3082, 0.6150, 0.2912, 0.2094, 0.9971, 0.3723, + 0.7357, 0.3658, 0.4009, 0.9999, 0.6043, 0.9117, 0.7334, + 0.5824, 0.0506, 0.7330, 0.0230, 0.4406, 0.0807, 0.6942, + 0.2245, 0.2485, 0.5748, 0.8330, 0.7840, 0.7060, 0.2348, + 0.0035, 0.5364, 0.0187, 0.6599, 0.9337, 0.6053, 0.1831, + 0.4356, 0.0679, 0.0908, 0.3894, 0.8661, 0.9286, 0.5274, + 0.2198, 0.1337, 0.5968, 0.2616, 0.9505, 0.9894, 0.6963, + 0.4705, 0.2413, 0.0355, 0.0514, 0.8694, 0.1783, 0.6758, + 0.2101, 0.3192, 0.4910, 0.5302, 0.4899, 0.5359, 0.1925, + 0.2343, 0.0188, 0.3525, 0.6944, 0.1704, 0.1291, 0.7572, + 0.5869, 0.4575, 0.5402, 0.3083, 0.7036, 0.7430, 0.3675, + 0.5465, 0.0686, 0.3712, 0.7453, 0.8458, 0.5033, 0.3961, + 0.8154, 0.6781, 0.6892, 0.5106, 0.8864, 0.2284, 0.3130, + 0.1335, 0.9835, 0.3056, 0.3963, 0.3066, 0.8333, 0.1173, + 0.1717, 0.1906, 0.5577, 0.8909, 0.5702, 0.9202, 0.0098, + 0.9034, 0.1638, 0.7871, 0.0602, 0.1046, 0.7108, 0.1792, + 0.5907, 0.9144, 0.7471, 0.9590, 0.3509, 0.9697, 0.2362, + 0.7800, 0.2561, 0.6928, 0.6592, 0.6715, 0.3954, 0.9109, + 0.8543, 0.2309, 0.9931, 0.3675, 0.9653, 0.9348, 0.8702, + 0.8780, 0.0545, 0.0095, 0.1323, 0.2713, 0.1104, 0.0360, + 0.7644, 0.3254, 0.2794, 0.7010, 0.6247, 0.7531, 0.2093, + 0.4077, 0.7399, 0.9364, 0.9070, 0.3612, 0.1375, 0.2106, + 0.6888, 0.0838, 0.3848, 0.1242, 0.2979, 0.7983, 0.3405, + 0.6686, 0.9463, 0.7854, 0.8071, 0.4226, 0.2682, 0.4134, + 0.9403, 0.2631, 0.8116, 0.3477, 0.7616, 0.0356, 0.3800, + 0.4796, 0.9664, 0.9730, 0.3146, 0.3811, 0.3444, 0.8364, + 0.4593, 0.2380, 0.3529, 0.8631, 0.8304, 0.7458, 0.2703, + 0.8323, 0.2629, 0.9141, 0.3840, 0.6975, 0.0508, 0.9536, + 0.3777, 0.3904, 0.8044, 0.1828, 0.1908, 0.1226, 0.5426, + 0.6712, 0.1985, 0.0131, 0.9193, 0.7070, 0.7883, 0.2011, + 0.5491, 0.4025, 0.3145, 0.0253, 0.2556, 0.4276, 0.9673, + 0.4027, 0.7313, 0.3000, 0.8244, 0.6521, 0.0329, 0.9128, + 0.1532, 0.7920, 0.6837, 0.1740, 0.6020, 0.4749, 0.1878, + 0.6918, 0.3430, 0.8863, 0.0954, 0.2221, 0.0196, 0.9170, + 0.2529, 0.8027, 0.9234, 0.7784, 0.4154, 0.7635, 0.3669, + 0.0588, 0.6705, 0.6605, 0.4654, 0.5060, 0.1257, 0.2869, + 0.1042, 0.7987, 0.7289, 0.1239, 0.7381, 0.3130, 0.2067, + 0.2775, 0.8959, 0.6030, 0.6860, 0.7855, 0.9485, 0.1748, + 0.7731, 0.6068, 0.3676, 0.3474, 0.3768, 0.8882, 0.3443, + 0.2956, 0.9448, 0.4045, 0.2381, 0.2525, 0.2118, 0.1224, + 0.6778, 0.2811, 0.1046, 0.4058, 0.6707, 0.0635, 0.5605, + 0.3192, 0.2209, 0.6866, 0.2181, 0.4822, 0.4068, 0.9604, + 0.5810, 0.0649, 0.5097, 0.1731, 0.2222, 0.3090, 0.8573, + 0.2342, 0.2229, 0.3303, 0.5365, 0.1241, 0.0313, 0.7432, + 0.3762, 0.5850, 0.5199, 0.0453, 0.7767, 0.0550, 0.5852, + 0.6872, 0.3713, 0.6670, 0.1341, 0.8513, 0.2874, 0.5203, + 0.7337, 0.0145, 0.5286, 0.0318, 0.0106, 0.8079, 0.5923, + 0.1886, 0.8415, 0.1474, 0.3036, 0.7165, 0.9739, 0.2050, + 0.0209, 0.3028, 0.8424, 0.4722, 0.9670, 0.8856, 0.7059, + 0.8881, 0.4477, 0.3433, 0.8833, 0.6854, 0.9979, 0.4134, + 0.6004, 0.9236, 0.5985, 0.8387, 0.3869, 0.9226, 0.8476, + 0.2815, 0.5644, 0.8983, 0.8519, 0.4415, 0.5941, 0.4933, + 0.3941, 0.0630, 0.8087, 0.5422, 0.7823, 0.0794, 0.7566, + 0.8197, 0.3825, 0.5699, 0.9195, 0.6894, 0.8052, 0.9060, + 0.8181, 0.6439, 0.3874, 0.8570, 0.7838, 0.5526, 0.0770, + 0.5884, 0.8769, 0.0709, 0.5994, 0.2015, 0.1734, 0.9822, + 0.8660, 0.0374, 0.8196, 0.5480, 0.3620, 0.6184, 0.3047, + 0.1654, 0.6492, 0.4554, 0.0477, 0.9808, 0.2615, 0.3836, + 0.1987, 0.6319, 0.9715, 0.7191, 0.5039, 0.7992, 0.8664, + 0.0815, 0.4937, 0.8132, 0.9455, 0.3586, 0.8318, 0.8930, + 0.1422, 0.4286, 0.9237, 0.7140, 0.1449, 0.5233, 0.9544, + 0.6412, 0.0940, 0.5388, 0.3539, 0.9951, 0.5860, 0.5837, + 0.0540, 0.2181, 0.6021, 0.1546, 0.1818, 0.6251, 0.6991, + 0.5441, 0.4257, 0.3883, 0.4987, 0.7504, 0.8178, 0.0667, + 0.3423, 0.4631, 0.0636, 0.6995, 0.6845, 0.1335, 0.5769, + 0.3366, 0.8741, 0.7448, 0.5158, 0.4520, 0.7964, 0.5429, + 0.3853, 0.3489, 0.7282, 0.3002, 0.4892, 0.4084, 0.7785, + 0.6224, 0.6482, 0.1360, 0.3743, 0.3204, 0.3293, 0.5975, + 0.8635, 0.1468, 0.1420, 0.1950, 0.5502, 0.3099, 0.4665, + 0.2473, 0.9781, 0.4882, 0.8615, 0.4379, 0.3304, 0.9224, + 0.6375, 0.4835, 0.7192, 0.6721, 0.7342, 0.5743, 0.9239, + 0.9773, 0.2213, 0.3478, 0.7032, 0.0574, 0.8041, 0.3853, + 0.1147, 0.0390, 0.9320, 0.5858, 0.0975, 0.5982, 0.1467, + 0.9118, 0.4835, 0.9183, 0.3489, 0.0389, 0.2553, 0.5860, + 0.2665, 0.6450, 0.3179, 0.5337, 0.7581, 0.4409, 0.1177, + 0.0512, 0.8850, 0.2142, 0.1547, 0.5876, 0.8678, 0.5430, + 0.4686, 0.4656, 0.5329, 0.4015, 0.3146, 0.2257, 0.1820, + 0.9287, 0.0585, 0.6678, 0.0868, 0.7648, 0.2970, 0.6893, + 0.7312, 0.6106, 0.1958, 0.8679, 0.9976, 0.5849, 0.7869, + 0.3363, 0.5231, 0.9619, 0.1567, 0.1143, 0.9307, 0.2825, + 0.3303, 0.5892, 0.7606, 0.7858, 0.0785, 0.3935, 0.0941, + 0.7542, 0.7552, 0.7909, 0.6337, 0.4503, 0.8151, 0.1544, + 0.0385, 0.1762, 0.7871, 0.9429, 0.7065, 0.2556, 0.7752, + 0.3810, 0.5819, 0.5096, 0.6816, 0.5826, 0.0960, 0.1244, + 0.3464, 0.1206, 0.8110, 0.0102, 0.2242, 0.3161]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.0219, 0.9207, 0.3669, ..., 0.7955, 0.2670, 0.3543]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.798607587814331 seconds + +[16.32, 16.4, 16.24, 16.76, 16.72, 17.12, 17.44, 17.24, 17.36, 17.12] +[16.8, 16.56, 16.56, 19.68, 21.72, 23.52, 24.24, 24.8, 20.88, 20.0, 19.92, 19.92, 19.64, 19.64] +14.209118843078613 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141552, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.798607587814331, 'TIME_S_1KI': 0.07628721309352274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 269.76709407806396, 'W': 18.985490730093364} +[16.32, 16.4, 16.24, 16.76, 16.72, 17.12, 17.44, 17.24, 17.36, 17.12, 16.28, 16.24, 16.24, 16.36, 16.6, 16.4, 16.24, 16.4, 16.44, 16.28] +299.2 +14.959999999999999 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 141552, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.798607587814331, 'TIME_S_1KI': 0.07628721309352274, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 269.76709407806396, 'W': 18.985490730093364, 'J_1KI': 1.9057808725985077, 'W_1KI': 0.13412379005661076, 'W_D': 4.025490730093365, 'J_D': 57.198676185607916, 'W_D_1KI': 0.028438246934648505, 'J_D_1KI': 0.0002009031799949736} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..ccc8254 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 52454, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.6396803855896, "TIME_S_1KI": 0.20283830376309908, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 254.19103302001952, "W": 17.900584259593533, "J_1KI": 4.845979963778158, "W_1KI": 0.3412625206770415, "W_D": 2.811584259593534, "J_D": 39.92492630434038, "W_D_1KI": 0.05360095053939707, "J_D_1KI": 0.0010218658355777839} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..6f0e024 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.02812814712524414} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4998, 4999, 5000]), + col_indices=tensor([4541, 9541, 3383, ..., 9920, 3344, 2731]), + values=tensor([0.3320, 0.1825, 0.5042, ..., 0.6612, 0.1900, 0.5121]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.0144, 0.2553, 0.6494, ..., 0.0787, 0.9201, 0.6475]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.02812814712524414 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 37329 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 7.472296953201294} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 4999, 5000]), + col_indices=tensor([6333, 2190, 7526, ..., 2226, 3084, 9881]), + values=tensor([0.4590, 0.1089, 0.5094, ..., 0.8341, 0.9457, 0.0387]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.3900, 0.6084, 0.4843, ..., 0.7689, 0.5332, 0.9837]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 7.472296953201294 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 52454 -ss 10000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.6396803855896} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([2165, 7276, 636, ..., 1970, 2680, 2527]), + values=tensor([0.0210, 0.6731, 0.7347, ..., 0.2518, 0.3264, 0.9787]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.5715, 0.5141, 0.9224, ..., 0.0062, 0.8236, 0.9187]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.6396803855896 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 5000, 5000, 5000]), + col_indices=tensor([2165, 7276, 636, ..., 1970, 2680, 2527]), + values=tensor([0.0210, 0.6731, 0.7347, ..., 0.2518, 0.3264, 0.9787]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.5715, 0.5141, 0.9224, ..., 0.0062, 0.8236, 0.9187]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.6396803855896 seconds + +[16.64, 16.48, 16.64, 16.84, 17.0, 16.8, 16.72, 16.4, 16.4, 16.2] +[16.28, 16.44, 16.8, 17.88, 19.0, 19.88, 20.6, 20.92, 20.48, 20.0, 20.12, 20.12, 19.92, 19.84] +14.20015287399292 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 52454, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.6396803855896, 'TIME_S_1KI': 0.20283830376309908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 254.19103302001952, 'W': 17.900584259593533} +[16.64, 16.48, 16.64, 16.84, 17.0, 16.8, 16.72, 16.4, 16.4, 16.2, 16.72, 16.92, 16.8, 16.88, 17.04, 17.16, 16.96, 16.8, 16.84, 16.64] +301.78 +15.088999999999999 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 52454, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.6396803855896, 'TIME_S_1KI': 0.20283830376309908, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 254.19103302001952, 'W': 17.900584259593533, 'J_1KI': 4.845979963778158, 'W_1KI': 0.3412625206770415, 'W_D': 2.811584259593534, 'J_D': 39.92492630434038, 'W_D_1KI': 0.05360095053939707, 'J_D_1KI': 0.0010218658355777839} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..2f5c509 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.72194480895996, "TIME_S_1KI": 227.2194480895996, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2080.890781612396, "W": 61.453409935621025, "J_1KI": 20808.90781612396, "W_1KI": 614.5340993562103, "W_D": 45.112409935621024, "J_D": 1527.5636953213213, "W_D_1KI": 451.12409935621025, "J_D_1KI": 4511.240993562103} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..4a89b40 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,47 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 22.72194480895996} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 56, 116, ..., 24999900, + 24999953, 25000000]), + col_indices=tensor([ 647, 4700, 33413, ..., 445020, 463377, + 482076]), + values=tensor([0.2494, 0.9199, 0.9974, ..., 0.2647, 0.3316, 0.8056]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1920, 0.0118, 0.3050, ..., 0.0609, 0.1776, 0.7503]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 22.72194480895996 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 56, 116, ..., 24999900, + 24999953, 25000000]), + col_indices=tensor([ 647, 4700, 33413, ..., 445020, 463377, + 482076]), + values=tensor([0.2494, 0.9199, 0.9974, ..., 0.2647, 0.3316, 0.8056]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1920, 0.0118, 0.3050, ..., 0.0609, 0.1776, 0.7503]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 22.72194480895996 seconds + +[17.96, 18.12, 17.96, 17.96, 17.88, 17.96, 17.88, 17.92, 17.76, 17.8] +[17.8, 17.6, 18.48, 20.36, 21.4, 22.72, 22.72, 24.08, 25.12, 34.04, 34.44, 33.88, 34.28, 32.56, 45.4, 59.36, 77.8, 92.4, 98.2, 98.2, 95.48, 95.96, 96.12, 95.0, 93.72, 96.32, 96.24, 93.28, 93.88, 93.4, 91.28, 91.4, 93.2] +33.8612744808197 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.72194480895996, 'TIME_S_1KI': 227.2194480895996, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2080.890781612396, 'W': 61.453409935621025} +[17.96, 18.12, 17.96, 17.96, 17.88, 17.96, 17.88, 17.92, 17.76, 17.8, 17.72, 18.0, 18.44, 18.6, 18.6, 18.64, 18.76, 18.48, 18.16, 17.92] +326.82000000000005 +16.341 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 22.72194480895996, 'TIME_S_1KI': 227.2194480895996, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2080.890781612396, 'W': 61.453409935621025, 'J_1KI': 20808.90781612396, 'W_1KI': 614.5340993562103, 'W_D': 45.112409935621024, 'J_D': 1527.5636953213213, 'W_D_1KI': 451.12409935621025, 'J_D_1KI': 4511.240993562103} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..208d33f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 279, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.429174900054932, "TIME_S_1KI": 44.549013978691505, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 925.7997315406801, "W": 55.99377793769148, "J_1KI": 3318.2786076726884, "W_1KI": 200.69454457953935, "W_D": 40.002777937691484, "J_D": 661.4049353270533, "W_D_1KI": 143.37913239315944, "J_D_1KI": 513.9037003339048} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..2fab080 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,110 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 4.175914764404297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 9, ..., 2499991, + 2499995, 2500000]), + col_indices=tensor([236491, 268930, 282894, ..., 290854, 362096, + 428990]), + values=tensor([0.4942, 0.2006, 0.6461, ..., 0.6339, 0.7923, 0.0061]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6272, 0.4782, 0.6613, ..., 0.5722, 0.7323, 0.6099]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 4.175914764404297 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 251 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.986790895462036} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 2499995, + 2499998, 2500000]), + col_indices=tensor([168226, 184311, 332682, ..., 175948, 40749, + 152556]), + values=tensor([0.8367, 0.0584, 0.1423, ..., 0.1509, 0.1566, 0.6036]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8910, 0.6965, 0.0939, ..., 0.1566, 0.5700, 0.8005]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.986790895462036 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 263 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.86619520187378} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 2499987, + 2499997, 2500000]), + col_indices=tensor([120869, 339930, 358219, ..., 35981, 71933, + 400518]), + values=tensor([0.0243, 0.7300, 0.4495, ..., 0.8433, 0.9453, 0.9296]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0560, 0.8217, 0.0541, ..., 0.5269, 0.7792, 0.2112]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.86619520187378 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 279 -ss 500000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 12.429174900054932} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499986, + 2499991, 2500000]), + col_indices=tensor([ 4708, 62252, 239037, ..., 346193, 443276, + 467019]), + values=tensor([0.7542, 0.0207, 0.5398, ..., 0.0649, 0.4673, 0.8331]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8471, 0.4897, 0.4001, ..., 0.3407, 0.6143, 0.4869]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 12.429174900054932 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 4, ..., 2499986, + 2499991, 2500000]), + col_indices=tensor([ 4708, 62252, 239037, ..., 346193, 443276, + 467019]), + values=tensor([0.7542, 0.0207, 0.5398, ..., 0.0649, 0.4673, 0.8331]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8471, 0.4897, 0.4001, ..., 0.3407, 0.6143, 0.4869]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 12.429174900054932 seconds + +[17.72, 17.72, 17.84, 17.8, 17.8, 18.04, 17.92, 17.8, 17.72, 17.32] +[17.36, 17.64, 18.04, 20.2, 21.44, 35.64, 48.92, 66.32, 77.96, 77.96, 91.04, 90.76, 89.6, 87.56, 86.4, 84.76] +16.53397512435913 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.429174900054932, 'TIME_S_1KI': 44.549013978691505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 925.7997315406801, 'W': 55.99377793769148} +[17.72, 17.72, 17.84, 17.8, 17.8, 18.04, 17.92, 17.8, 17.72, 17.32, 17.84, 17.72, 17.8, 17.92, 17.8, 17.72, 17.6, 17.72, 17.72, 17.48] +319.82 +15.991 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 279, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 12.429174900054932, 'TIME_S_1KI': 44.549013978691505, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 925.7997315406801, 'W': 55.99377793769148, 'J_1KI': 3318.2786076726884, 'W_1KI': 200.69454457953935, 'W_D': 40.002777937691484, 'J_D': 661.4049353270533, 'W_D_1KI': 143.37913239315944, 'J_D_1KI': 513.9037003339048} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..fd07ea0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.55699610710144, "TIME_S_1KI": 165.5699610710144, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1413.1975269508362, "W": 57.485721407839506, "J_1KI": 14131.97526950836, "W_1KI": 574.857214078395, "W_D": 41.59072140783951, "J_D": 1022.44354246974, "W_D_1KI": 415.9072140783951, "J_D_1KI": 4159.072140783951} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..33e7f40 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,47 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 500000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 16.55699610710144} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 27, 44, ..., 12499945, + 12499971, 12500000]), + col_indices=tensor([ 7086, 20899, 31000, ..., 441979, 480995, + 482795]), + values=tensor([0.3494, 0.1791, 0.5321, ..., 0.0256, 0.8127, 0.6614]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9181, 0.2128, 0.6316, ..., 0.6360, 0.7946, 0.3835]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 16.55699610710144 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 27, 44, ..., 12499945, + 12499971, 12500000]), + col_indices=tensor([ 7086, 20899, 31000, ..., 441979, 480995, + 482795]), + values=tensor([0.3494, 0.1791, 0.5321, ..., 0.0256, 0.8127, 0.6614]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9181, 0.2128, 0.6316, ..., 0.6360, 0.7946, 0.3835]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 16.55699610710144 seconds + +[17.52, 17.52, 17.88, 17.84, 17.84, 17.64, 17.48, 17.72, 17.44, 17.28] +[17.4, 17.16, 20.64, 21.36, 23.44, 25.32, 33.76, 33.76, 31.56, 35.12, 47.32, 60.6, 67.04, 80.92, 86.0, 85.72, 87.24, 88.0, 90.8, 92.64, 93.72, 92.0, 92.0, 90.8] +24.583452939987183 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.55699610710144, 'TIME_S_1KI': 165.5699610710144, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1413.1975269508362, 'W': 57.485721407839506} +[17.52, 17.52, 17.88, 17.84, 17.84, 17.64, 17.48, 17.72, 17.44, 17.28, 18.0, 17.76, 17.6, 17.64, 17.44, 17.4, 17.8, 17.88, 17.76, 17.72] +317.9 +15.895 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 16.55699610710144, 'TIME_S_1KI': 165.5699610710144, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1413.1975269508362, 'W': 57.485721407839506, 'J_1KI': 14131.97526950836, 'W_1KI': 574.857214078395, 'W_D': 41.59072140783951, 'J_D': 1022.44354246974, 'W_D_1KI': 415.9072140783951, 'J_D_1KI': 4159.072140783951} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..9346872 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1658, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.466236591339111, "TIME_S_1KI": 8.725112540011526, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1691.1909151077273, "W": 65.36355506616682, "J_1KI": 1020.0186460239609, "W_1KI": 39.423133333031856, "W_D": 49.293555066166824, "J_D": 1275.40205573082, "W_D_1KI": 29.730732850522816, "J_D_1KI": 17.93168446955538} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..686a0dc --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.8603906631469727} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 249990, 249996, + 250000]), + col_indices=tensor([ 116, 2220, 2597, ..., 31423, 34504, 36695]), + values=tensor([0.0356, 0.7526, 0.0114, ..., 0.2051, 0.2717, 0.4326]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.6822, 0.2973, 0.4245, ..., 0.4266, 0.4462, 0.3842]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.8603906631469727 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1220 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.725424766540527} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 12, ..., 249990, 249995, + 250000]), + col_indices=tensor([ 1339, 17035, 23748, ..., 19329, 30492, 33219]), + values=tensor([0.1487, 0.4152, 0.3651, ..., 0.6580, 0.7478, 0.7026]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4336, 0.1383, 0.7901, ..., 0.8031, 0.7630, 0.0295]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 7.725424766540527 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1658 -ss 50000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 14.466236591339111} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 249989, 249992, + 250000]), + col_indices=tensor([14878, 23641, 26434, ..., 39221, 43609, 44125]), + values=tensor([0.9348, 0.1734, 0.7472, ..., 0.0129, 0.0523, 0.4218]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4659, 0.1302, 0.1589, ..., 0.1214, 0.1279, 0.7413]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 14.466236591339111 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 7, ..., 249989, 249992, + 250000]), + col_indices=tensor([14878, 23641, 26434, ..., 39221, 43609, 44125]), + values=tensor([0.9348, 0.1734, 0.7472, ..., 0.0129, 0.0523, 0.4218]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4659, 0.1302, 0.1589, ..., 0.1214, 0.1279, 0.7413]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 14.466236591339111 seconds + +[18.12, 18.08, 18.12, 18.12, 18.08, 18.0, 17.76, 17.64, 17.48, 17.48] +[17.28, 17.4, 17.28, 19.64, 20.44, 35.08, 49.0, 64.92, 77.28, 86.8, 86.8, 86.52, 85.52, 84.64, 84.32, 84.0, 84.16, 84.16, 84.24, 84.24, 84.04, 84.28, 82.92, 82.4, 81.88] +25.87360668182373 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1658, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.466236591339111, 'TIME_S_1KI': 8.725112540011526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1691.1909151077273, 'W': 65.36355506616682} +[18.12, 18.08, 18.12, 18.12, 18.08, 18.0, 17.76, 17.64, 17.48, 17.48, 18.04, 17.84, 17.8, 17.64, 17.76, 17.92, 17.8, 17.8, 17.72, 18.04] +321.4 +16.07 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1658, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 14.466236591339111, 'TIME_S_1KI': 8.725112540011526, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1691.1909151077273, 'W': 65.36355506616682, 'J_1KI': 1020.0186460239609, 'W_1KI': 39.423133333031856, 'W_D': 49.293555066166824, 'J_D': 1275.40205573082, 'W_D_1KI': 29.730732850522816, 'J_D_1KI': 17.93168446955538} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..c314337 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 135, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.963478088378906, "TIME_S_1KI": 81.21094880280671, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 582.8568821525573, "W": 43.83509814272925, "J_1KI": 4317.458386315239, "W_1KI": 324.7044306868833, "W_D": 28.06209814272925, "J_D": 373.12992837095254, "W_D_1KI": 207.8673936498463, "J_D_1KI": 1539.758471480343} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..514e8a4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.26869797706604} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 102, ..., 2499900, + 2499955, 2500000]), + col_indices=tensor([ 878, 3105, 3271, ..., 44510, 45389, 45985]), + values=tensor([0.2421, 0.0051, 0.2486, ..., 0.1294, 0.9249, 0.4412]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3218, 0.5110, 0.5510, ..., 0.0407, 0.3623, 0.3415]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 8.26869797706604 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 126 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.729291677474976} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 44, 101, ..., 2499906, + 2499957, 2500000]), + col_indices=tensor([ 430, 2871, 2934, ..., 46471, 47392, 47877]), + values=tensor([0.4189, 0.2667, 0.2640, ..., 0.7329, 0.4126, 0.0437]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6895, 0.8779, 0.8508, ..., 0.5330, 0.3990, 0.7739]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 9.729291677474976 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 135 -ss 50000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.963478088378906} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499884, + 2499945, 2500000]), + col_indices=tensor([ 1316, 2608, 2921, ..., 47281, 49169, 49691]), + values=tensor([0.1237, 0.8262, 0.6046, ..., 0.7531, 0.6389, 0.8086]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9516, 0.0943, 0.1293, ..., 0.9488, 0.5626, 0.5458]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.963478088378906 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 51, 106, ..., 2499884, + 2499945, 2500000]), + col_indices=tensor([ 1316, 2608, 2921, ..., 47281, 49169, 49691]), + values=tensor([0.1237, 0.8262, 0.6046, ..., 0.7531, 0.6389, 0.8086]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.9516, 0.0943, 0.1293, ..., 0.9488, 0.5626, 0.5458]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.963478088378906 seconds + +[17.96, 17.88, 17.56, 17.44, 17.36, 17.24, 17.44, 17.68, 17.68, 17.68] +[17.88, 17.88, 17.88, 22.04, 23.04, 29.76, 42.2, 54.16, 64.96, 76.44, 81.04, 80.44, 80.44] +13.296579837799072 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.963478088378906, 'TIME_S_1KI': 81.21094880280671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8568821525573, 'W': 43.83509814272925} +[17.96, 17.88, 17.56, 17.44, 17.36, 17.24, 17.44, 17.68, 17.68, 17.68, 17.84, 17.6, 17.48, 17.48, 17.52, 17.32, 17.4, 17.36, 17.44, 17.68] +315.46 +15.773 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 135, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.963478088378906, 'TIME_S_1KI': 81.21094880280671, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 582.8568821525573, 'W': 43.83509814272925, 'J_1KI': 4317.458386315239, 'W_1KI': 324.7044306868833, 'W_D': 28.06209814272925, 'J_D': 373.12992837095254, 'W_D_1KI': 207.8673936498463, 'J_D_1KI': 1539.758471480343} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..de9236a --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.498249053955078, "TIME_S_1KI": 304.9824905395508, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2007.0959778976435, "W": 47.944381683758614, "J_1KI": 20070.959778976434, "W_1KI": 479.44381683758616, "W_D": 32.291381683758615, "J_D": 1351.8143319842811, "W_D_1KI": 322.9138168375861, "J_D_1KI": 3229.138168375861} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..9e427ef --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 30.498249053955078} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 519, 1009, ..., 24999042, + 24999488, 25000000]), + col_indices=tensor([ 129, 342, 437, ..., 49566, 49630, 49865]), + values=tensor([0.8700, 0.4704, 0.0527, ..., 0.4978, 0.7115, 0.5319]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3352, 0.1102, 0.4216, ..., 0.1668, 0.6074, 0.9924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 30.498249053955078 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 519, 1009, ..., 24999042, + 24999488, 25000000]), + col_indices=tensor([ 129, 342, 437, ..., 49566, 49630, 49865]), + values=tensor([0.8700, 0.4704, 0.0527, ..., 0.4978, 0.7115, 0.5319]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3352, 0.1102, 0.4216, ..., 0.1668, 0.6074, 0.9924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 30.498249053955078 seconds + +[17.68, 17.6, 17.56, 17.16, 17.16, 17.0, 17.0, 17.08, 17.36, 17.4] +[17.52, 17.72, 18.04, 18.88, 20.68, 21.64, 23.68, 24.04, 31.04, 33.36, 33.36, 32.8, 32.72, 32.52, 28.16, 34.64, 41.2, 49.36, 57.8, 58.4, 58.92, 62.72, 62.72, 61.04, 61.04, 59.8, 60.88, 61.56, 64.04, 67.12, 70.76, 68.32, 68.92, 65.8, 66.72, 65.88, 65.84, 68.88, 68.88, 67.08, 67.04] +41.863006830215454 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.498249053955078, 'TIME_S_1KI': 304.9824905395508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2007.0959778976435, 'W': 47.944381683758614} +[17.68, 17.6, 17.56, 17.16, 17.16, 17.0, 17.0, 17.08, 17.36, 17.4, 17.56, 17.4, 17.44, 17.44, 17.4, 17.6, 17.56, 17.52, 17.6, 17.72] +313.05999999999995 +15.652999999999997 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 30.498249053955078, 'TIME_S_1KI': 304.9824905395508, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2007.0959778976435, 'W': 47.944381683758614, 'J_1KI': 20070.959778976434, 'W_1KI': 479.44381683758616, 'W_D': 32.291381683758615, 'J_D': 1351.8143319842811, 'W_D_1KI': 322.9138168375861, 'J_D_1KI': 3229.138168375861} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..5f4cac5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 8811, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.461963891983032, "TIME_S_1KI": 1.6413532961052129, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1601.2496216583254, "W": 63.889106877708535, "J_1KI": 181.7330180068466, "W_1KI": 7.251061954115144, "W_D": 47.822106877708535, "J_D": 1198.5631712055208, "W_D_1KI": 5.4275458946440285, "J_D_1KI": 0.6159965832078116} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..6d049d2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,120 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.2421858310699463} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([11801, 48673, 42443, ..., 35599, 34008, 22453]), + values=tensor([0.3951, 0.6998, 0.6224, ..., 0.4352, 0.5927, 0.1013]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6391, 0.6971, 0.7049, ..., 0.7658, 0.2053, 0.9702]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.2421858310699463 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 4335 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.2435102462768555} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 25000, 25000, 25000]), + col_indices=tensor([48470, 32812, 24371, ..., 21811, 5693, 27792]), + values=tensor([7.9337e-01, 8.6969e-01, 2.1228e-02, ..., + 3.1628e-01, 5.2154e-01, 8.3659e-04]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5123, 0.3871, 0.5639, ..., 0.2434, 0.7885, 0.9337]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 6.2435102462768555 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 7290 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.1812264919281} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([ 1561, 34915, 11685, ..., 9985, 24943, 27218]), + values=tensor([0.3207, 0.5476, 0.7721, ..., 0.5221, 0.2072, 0.7139]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7737, 0.6011, 0.2764, ..., 0.4575, 0.9058, 0.1946]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.1812264919281 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8337 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.935004234313965} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24999, 25000, 25000]), + col_indices=tensor([ 4660, 37796, 36819, ..., 14791, 855, 165]), + values=tensor([0.6560, 0.1119, 0.0106, ..., 0.5425, 0.3178, 0.8843]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7672, 0.9593, 0.0238, ..., 0.4054, 0.5730, 0.7422]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.935004234313965 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 8811 -ss 50000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 14.461963891983032} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), + col_indices=tensor([ 8691, 13268, 31788, ..., 20611, 3075, 9688]), + values=tensor([0.8778, 0.6640, 0.4350, ..., 0.9614, 0.5782, 0.7592]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9817, 0.3893, 0.5308, ..., 0.4402, 0.5461, 0.2337]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 14.461963891983032 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24997, 24998, 25000]), + col_indices=tensor([ 8691, 13268, 31788, ..., 20611, 3075, 9688]), + values=tensor([0.8778, 0.6640, 0.4350, ..., 0.9614, 0.5782, 0.7592]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9817, 0.3893, 0.5308, ..., 0.4402, 0.5461, 0.2337]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 14.461963891983032 seconds + +[18.04, 17.96, 17.84, 17.68, 17.72, 17.8, 17.84, 17.92, 17.8, 17.8] +[17.8, 17.64, 17.64, 18.72, 19.96, 31.88, 49.36, 63.92, 79.84, 91.32, 91.4, 91.0, 90.8, 90.72, 90.68, 90.12, 89.48, 88.76, 88.76, 87.32, 86.72, 85.68, 84.4, 82.12] +25.062952041625977 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.461963891983032, 'TIME_S_1KI': 1.6413532961052129, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.2496216583254, 'W': 63.889106877708535} +[18.04, 17.96, 17.84, 17.68, 17.72, 17.8, 17.84, 17.92, 17.8, 17.8, 18.0, 17.84, 17.72, 17.92, 17.96, 17.88, 18.12, 17.92, 17.68, 17.64] +321.34000000000003 +16.067 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 8811, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 14.461963891983032, 'TIME_S_1KI': 1.6413532961052129, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.2496216583254, 'W': 63.889106877708535, 'J_1KI': 181.7330180068466, 'W_1KI': 7.251061954115144, 'W_D': 47.822106877708535, 'J_D': 1198.5631712055208, 'W_D_1KI': 5.4275458946440285, 'J_D_1KI': 0.6159965832078116} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..a856634 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 3016, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.238471984863281, "TIME_S_1KI": 4.057848801347242, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1021.0734484481814, "W": 58.16446532539802, "J_1KI": 338.5522043926331, "W_1KI": 19.285300174203588, "W_D": 42.31446532539802, "J_D": 742.827717702389, "W_D_1KI": 14.02999513441579, "J_D_1KI": 4.651855150668366} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..0ab6a55 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.37006235122680664} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 5, ..., 124994, 124997, + 125000]), + col_indices=tensor([ 3192, 33329, 36206, ..., 17521, 36763, 39198]), + values=tensor([0.7954, 0.1728, 0.6419, ..., 0.6370, 0.0715, 0.4891]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.4965, 0.4773, 0.1313, ..., 0.0503, 0.1495, 0.6552]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.37006235122680664 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 2837 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 9.87511658668518} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 124994, 124997, + 125000]), + col_indices=tensor([18930, 21812, 35293, ..., 949, 2935, 28377]), + values=tensor([0.1354, 0.7141, 0.1182, ..., 0.8833, 0.5348, 0.3796]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.0699, 0.1291, 0.1295, ..., 0.2384, 0.2084, 0.3934]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 9.87511658668518 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 3016 -ss 50000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 12.238471984863281} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 8, ..., 124995, 124998, + 125000]), + col_indices=tensor([ 1947, 25944, 43942, ..., 29833, 871, 28509]), + values=tensor([0.4807, 0.5814, 0.8403, ..., 0.1650, 0.5150, 0.8001]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7842, 0.6693, 0.1792, ..., 0.7791, 0.2472, 0.4723]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 12.238471984863281 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 8, ..., 124995, 124998, + 125000]), + col_indices=tensor([ 1947, 25944, 43942, ..., 29833, 871, 28509]), + values=tensor([0.4807, 0.5814, 0.8403, ..., 0.1650, 0.5150, 0.8001]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7842, 0.6693, 0.1792, ..., 0.7791, 0.2472, 0.4723]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 12.238471984863281 seconds + +[17.56, 17.48, 17.4, 17.56, 17.72, 17.76, 17.48, 17.56, 17.56, 17.52] +[17.52, 17.68, 17.76, 20.16, 20.96, 34.48, 49.64, 65.8, 77.8, 88.2, 87.72, 86.68, 86.76, 87.64, 87.64, 89.04, 89.44] +17.554935693740845 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3016, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.238471984863281, 'TIME_S_1KI': 4.057848801347242, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1021.0734484481814, 'W': 58.16446532539802} +[17.56, 17.48, 17.4, 17.56, 17.72, 17.76, 17.48, 17.56, 17.56, 17.52, 17.72, 17.84, 17.8, 17.68, 17.68, 17.52, 17.56, 17.6, 17.48, 17.84] +317.0 +15.85 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 3016, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 12.238471984863281, 'TIME_S_1KI': 4.057848801347242, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1021.0734484481814, 'W': 58.16446532539802, 'J_1KI': 338.5522043926331, 'W_1KI': 19.285300174203588, 'W_D': 42.31446532539802, 'J_D': 742.827717702389, 'W_D_1KI': 14.02999513441579, 'J_D_1KI': 4.651855150668366} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..fa80299 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 93664, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.030004501342773, "TIME_S_1KI": 0.107084947272621, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 253.85587940216067, "W": 17.876929919933424, "J_1KI": 2.7102822792338643, "W_1KI": 0.19086233686297213, "W_D": 2.859929919933423, "J_D": 40.61156071567538, "W_D_1KI": 0.03053392893676784, "J_D_1KI": 0.00032599428741851555} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..c2bcc94 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.018899202346801758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([1031, 4368, 4092, ..., 190, 4399, 1962]), + values=tensor([0.5248, 0.7492, 0.4749, ..., 0.9820, 0.4892, 0.6710]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7298, 0.5753, 0.5464, ..., 0.2530, 0.5594, 0.0214]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.018899202346801758 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 55557 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 6.228086948394775} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 2499, 2500, 2500]), + col_indices=tensor([ 532, 4399, 2173, ..., 2637, 3554, 2146]), + values=tensor([0.0459, 0.9731, 0.3457, ..., 0.8215, 0.1549, 0.6550]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2796, 0.3928, 0.8109, ..., 0.2089, 0.4148, 0.7694]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 6.228086948394775 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 93664 -ss 5000 -sd 0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.030004501342773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([1824, 1160, 4169, ..., 4733, 1262, 4559]), + values=tensor([0.9986, 0.6087, 0.2000, ..., 0.7208, 0.5140, 0.1151]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.6500, 0.1776, 0.6063, ..., 0.2915, 0.3213, 0.4804]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.030004501342773 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2498, 2498, 2500]), + col_indices=tensor([1824, 1160, 4169, ..., 4733, 1262, 4559]), + values=tensor([0.9986, 0.6087, 0.2000, ..., 0.7208, 0.5140, 0.1151]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.6500, 0.1776, 0.6063, ..., 0.2915, 0.3213, 0.4804]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.030004501342773 seconds + +[16.8, 16.8, 17.16, 17.24, 17.04, 16.96, 16.96, 17.0, 16.6, 17.0] +[16.8, 16.84, 17.08, 18.24, 19.2, 19.72, 20.36, 20.2, 20.2, 20.04, 19.8, 19.8, 20.0, 19.96] +14.200194358825684 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93664, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.030004501342773, 'TIME_S_1KI': 0.107084947272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 253.85587940216067, 'W': 17.876929919933424} +[16.8, 16.8, 17.16, 17.24, 17.04, 16.96, 16.96, 17.0, 16.6, 17.0, 16.72, 16.48, 16.44, 16.48, 16.32, 16.48, 16.4, 16.28, 16.28, 16.32] +300.34000000000003 +15.017000000000001 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 93664, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.030004501342773, 'TIME_S_1KI': 0.107084947272621, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 253.85587940216067, 'W': 17.876929919933424, 'J_1KI': 2.7102822792338643, 'W_1KI': 0.19086233686297213, 'W_D': 2.859929919933423, 'J_D': 40.61156071567538, 'W_D_1KI': 0.03053392893676784, 'J_D_1KI': 0.00032599428741851555} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..37907ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 17878, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.553908824920654, "TIME_S_1KI": 0.5903293894686572, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 275.88499256134037, "W": 19.43945882245899, "J_1KI": 15.431535549912763, "W_1KI": 1.0873396813099334, "W_D": 4.574458822458993, "J_D": 64.92076501369485, "W_D_1KI": 0.2558708369201808, "J_D_1KI": 0.014312050392671485} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..e1fffa0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06726479530334473} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 16, ..., 24990, 24996, 25000]), + col_indices=tensor([ 219, 546, 972, ..., 2610, 3216, 3318]), + values=tensor([0.2561, 0.1283, 0.3219, ..., 0.1859, 0.9829, 0.7598]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7133, 0.2668, 0.2369, ..., 0.2811, 0.0980, 0.8981]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.06726479530334473 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 15609 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.167168855667114} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 17, ..., 24994, 24998, 25000]), + col_indices=tensor([ 299, 1244, 1941, ..., 4267, 280, 4025]), + values=tensor([0.2861, 0.6940, 0.5528, ..., 0.3063, 0.0705, 0.3058]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.7288, 0.9023, 0.1050, ..., 0.1276, 0.6415, 0.2460]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 9.167168855667114 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 17878 -ss 5000 -sd 0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.553908824920654} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 13, ..., 24995, 24998, 25000]), + col_indices=tensor([1009, 1198, 2341, ..., 3808, 999, 4327]), + values=tensor([0.4127, 0.3151, 0.8058, ..., 0.6562, 0.4614, 0.0831]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8967, 0.5216, 0.2765, ..., 0.9189, 0.4761, 0.3303]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.553908824920654 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 13, ..., 24995, 24998, 25000]), + col_indices=tensor([1009, 1198, 2341, ..., 3808, 999, 4327]), + values=tensor([0.4127, 0.3151, 0.8058, ..., 0.6562, 0.4614, 0.0831]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8967, 0.5216, 0.2765, ..., 0.9189, 0.4761, 0.3303]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.553908824920654 seconds + +[16.32, 16.36, 16.36, 16.56, 16.68, 16.68, 16.64, 16.56, 16.68, 16.52] +[16.44, 16.88, 16.88, 17.92, 19.12, 25.32, 25.8, 25.92, 25.32, 22.16, 19.68, 19.6, 19.64, 19.4] +14.192009925842285 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17878, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.553908824920654, 'TIME_S_1KI': 0.5903293894686572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.88499256134037, 'W': 19.43945882245899} +[16.32, 16.36, 16.36, 16.56, 16.68, 16.68, 16.64, 16.56, 16.68, 16.52, 16.52, 16.4, 16.4, 16.2, 16.28, 16.36, 16.32, 16.64, 17.04, 16.92] +297.29999999999995 +14.864999999999998 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 17878, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.553908824920654, 'TIME_S_1KI': 0.5903293894686572, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 275.88499256134037, 'W': 19.43945882245899, 'J_1KI': 15.431535549912763, 'W_1KI': 1.0873396813099334, 'W_D': 4.574458822458993, 'J_D': 64.92076501369485, 'W_D_1KI': 0.2558708369201808, 'J_D_1KI': 0.014312050392671485} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..b2ee0da --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 1934, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.419332265853882, "TIME_S_1KI": 5.38745205059663, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 296.42124423980715, "W": 20.84298720811279, "J_1KI": 153.26848202678755, "W_1KI": 10.777139197576416, "W_D": 5.978987208112789, "J_D": 85.03094157409672, "W_D_1KI": 3.0915135512475644, "J_D_1KI": 1.598507523912908} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..048fad0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6361579895019531} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 88, ..., 249912, 249956, + 250000]), + col_indices=tensor([ 150, 155, 160, ..., 4906, 4918, 4974]), + values=tensor([0.5565, 0.2611, 0.6011, ..., 0.5545, 0.3341, 0.9118]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3996, 0.8652, 0.4997, ..., 0.5638, 0.5133, 0.5074]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.6361579895019531 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1650 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 8.95723843574524} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 48, 105, ..., 249902, 249944, + 250000]), + col_indices=tensor([ 142, 192, 269, ..., 4444, 4647, 4854]), + values=tensor([0.2391, 0.6427, 0.3721, ..., 0.7376, 0.6381, 0.7309]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0904, 0.1755, 0.9906, ..., 0.2784, 0.3906, 0.1798]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 8.95723843574524 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 1934 -ss 5000 -sd 0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.419332265853882} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 67, 123, ..., 249901, 249950, + 250000]), + col_indices=tensor([ 77, 297, 304, ..., 4744, 4962, 4980]), + values=tensor([0.1458, 0.7428, 0.5307, ..., 0.5713, 0.0836, 0.9823]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8374, 0.5393, 0.4548, ..., 0.1972, 0.5711, 0.3877]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.419332265853882 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 67, 123, ..., 249901, 249950, + 250000]), + col_indices=tensor([ 77, 297, 304, ..., 4744, 4962, 4980]), + values=tensor([0.1458, 0.7428, 0.5307, ..., 0.5713, 0.0836, 0.9823]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8374, 0.5393, 0.4548, ..., 0.1972, 0.5711, 0.3877]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.419332265853882 seconds + +[16.44, 16.56, 16.48, 16.6, 16.72, 16.56, 16.72, 16.6, 16.72, 16.64] +[16.6, 16.56, 16.56, 20.2, 20.72, 28.04, 28.92, 29.0, 26.44, 26.36, 20.12, 20.28, 20.2, 20.4] +14.221629619598389 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1934, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.419332265853882, 'TIME_S_1KI': 5.38745205059663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.42124423980715, 'W': 20.84298720811279} +[16.44, 16.56, 16.48, 16.6, 16.72, 16.56, 16.72, 16.6, 16.72, 16.64, 16.68, 16.44, 16.44, 16.2, 16.24, 16.32, 16.56, 16.56, 16.52, 16.32] +297.28 +14.863999999999999 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 1934, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.419332265853882, 'TIME_S_1KI': 5.38745205059663, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 296.42124423980715, 'W': 20.84298720811279, 'J_1KI': 153.26848202678755, 'W_1KI': 10.777139197576416, 'W_D': 5.978987208112789, 'J_D': 85.03094157409672, 'W_D_1KI': 3.0915135512475644, 'J_D_1KI': 1.598507523912908} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..e125259 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 394, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.532436847686768, "TIME_S_1KI": 26.732073217479105, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 305.06099658966065, "W": 21.37670924510136, "J_1KI": 774.2664888062453, "W_1KI": 54.25560722106944, "W_D": 6.54270924510136, "J_D": 93.36916079187395, "W_D_1KI": 16.60586102817604, "J_D_1KI": 42.146855401462034} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..49ea4fe --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 2.876847505569458} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 270, 507, ..., 1249492, + 1249752, 1250000]), + col_indices=tensor([ 22, 47, 49, ..., 4884, 4921, 4983]), + values=tensor([0.5298, 0.6030, 0.6480, ..., 0.1911, 0.5303, 0.8187]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2222, 0.3805, 0.6044, ..., 0.3111, 0.2543, 0.4407]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 2.876847505569458 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 364 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.686374425888062} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 236, 499, ..., 1249504, + 1249745, 1250000]), + col_indices=tensor([ 18, 50, 76, ..., 4926, 4932, 4975]), + values=tensor([0.1676, 0.6835, 0.4526, ..., 0.3904, 0.9402, 0.3969]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.5247, 0.4082, 0.5074, ..., 0.5246, 0.7808, 0.6822]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 9.686374425888062 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 394 -ss 5000 -sd 0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.532436847686768} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 285, 546, ..., 1249527, + 1249741, 1250000]), + col_indices=tensor([ 14, 79, 87, ..., 4936, 4956, 4998]), + values=tensor([0.7519, 0.7358, 0.0306, ..., 0.8162, 0.7664, 0.2246]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.9515, 0.5215, 0.2042, ..., 0.7715, 0.5250, 0.3133]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.532436847686768 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 285, 546, ..., 1249527, + 1249741, 1250000]), + col_indices=tensor([ 14, 79, 87, ..., 4936, 4956, 4998]), + values=tensor([0.7519, 0.7358, 0.0306, ..., 0.8162, 0.7664, 0.2246]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.9515, 0.5215, 0.2042, ..., 0.7715, 0.5250, 0.3133]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.532436847686768 seconds + +[16.32, 16.28, 16.04, 16.2, 16.32, 16.36, 16.36, 16.36, 16.64, 16.6] +[16.72, 16.84, 17.04, 21.76, 24.68, 30.6, 31.68, 29.24, 27.56, 20.44, 20.44, 20.32, 20.44, 20.32] +14.270718336105347 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.532436847686768, 'TIME_S_1KI': 26.732073217479105, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 305.06099658966065, 'W': 21.37670924510136} +[16.32, 16.28, 16.04, 16.2, 16.32, 16.36, 16.36, 16.36, 16.64, 16.6, 16.32, 16.36, 16.44, 16.6, 16.76, 16.8, 16.64, 16.76, 16.76, 16.76] +296.68 +14.834 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 394, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.532436847686768, 'TIME_S_1KI': 26.732073217479105, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 305.06099658966065, 'W': 21.37670924510136, 'J_1KI': 774.2664888062453, 'W_1KI': 54.25560722106944, 'W_D': 6.54270924510136, 'J_D': 93.36916079187395, 'W_D_1KI': 16.60586102817604, 'J_D_1KI': 42.146855401462034} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..5879ae9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.453055620193481, "TIME_S_1KI": 53.06119604159128, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 328.8498130035401, "W": 21.582740338257725, "J_1KI": 1669.2883908809142, "W_1KI": 109.55705755460775, "W_D": 6.5647403382577245, "J_D": 100.02500140476232, "W_D_1KI": 33.32355501653667, "J_D_1KI": 169.15510160678514} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..45da6ca --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 5.324424982070923} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 475, 961, ..., 2499006, + 2499520, 2500000]), + col_indices=tensor([ 15, 18, 19, ..., 4987, 4990, 4996]), + values=tensor([0.8221, 0.3138, 0.3999, ..., 0.4846, 0.5872, 0.2809]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6625, 0.6086, 0.7821, ..., 0.8108, 0.2752, 0.8534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 5.324424982070923 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 197 -ss 5000 -sd 0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.453055620193481} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 504, 990, ..., 2498981, + 2499511, 2500000]), + col_indices=tensor([ 3, 15, 19, ..., 4959, 4975, 4978]), + values=tensor([0.1275, 0.4769, 0.3626, ..., 0.0765, 0.7881, 0.0735]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.2182, 0.1034, 0.8832, ..., 0.0679, 0.0105, 0.3546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.453055620193481 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 504, 990, ..., 2498981, + 2499511, 2500000]), + col_indices=tensor([ 3, 15, 19, ..., 4959, 4975, 4978]), + values=tensor([0.1275, 0.4769, 0.3626, ..., 0.0765, 0.7881, 0.0735]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.2182, 0.1034, 0.8832, ..., 0.0679, 0.0105, 0.3546]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.453055620193481 seconds + +[16.48, 16.56, 16.52, 16.6, 16.92, 17.2, 17.24, 17.32, 17.32, 17.2] +[17.2, 17.08, 16.8, 20.08, 22.0, 28.44, 31.8, 32.0, 28.2, 27.28, 20.16, 20.32, 20.56, 20.56, 20.32] +15.236703395843506 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.453055620193481, 'TIME_S_1KI': 53.06119604159128, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.8498130035401, 'W': 21.582740338257725} +[16.48, 16.56, 16.52, 16.6, 16.92, 17.2, 17.24, 17.32, 17.32, 17.2, 16.8, 16.52, 16.44, 16.56, 16.44, 16.36, 16.4, 16.48, 16.24, 16.0] +300.36 +15.018 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.453055620193481, 'TIME_S_1KI': 53.06119604159128, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 328.8498130035401, 'W': 21.582740338257725, 'J_1KI': 1669.2883908809142, 'W_1KI': 109.55705755460775, 'W_D': 6.5647403382577245, 'J_D': 100.02500140476232, 'W_D_1KI': 33.32355501653667, 'J_D_1KI': 169.15510160678514} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..22c3547 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.520986080169678, "TIME_S_1KI": 105.20986080169678, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 343.0604390716553, "W": 22.467293864380782, "J_1KI": 3430.604390716553, "W_1KI": 224.67293864380784, "W_D": 7.879293864380783, "J_D": 120.3115083198548, "W_D_1KI": 78.79293864380782, "J_D_1KI": 787.9293864380782} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..46cc854 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.520986080169678} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1010, 1992, ..., 4998032, + 4999035, 5000000]), + col_indices=tensor([ 7, 20, 27, ..., 4987, 4995, 4999]), + values=tensor([0.6859, 0.1805, 0.1498, ..., 0.8538, 0.5250, 0.8232]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6117, 0.2257, 0.9695, ..., 0.2383, 0.1591, 0.7207]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.520986080169678 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1010, 1992, ..., 4998032, + 4999035, 5000000]), + col_indices=tensor([ 7, 20, 27, ..., 4987, 4995, 4999]), + values=tensor([0.6859, 0.1805, 0.1498, ..., 0.8538, 0.5250, 0.8232]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6117, 0.2257, 0.9695, ..., 0.2383, 0.1591, 0.7207]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.520986080169678 seconds + +[16.36, 16.04, 16.16, 15.96, 15.88, 15.88, 16.04, 16.04, 16.28, 16.32] +[16.4, 16.48, 16.8, 20.24, 21.88, 29.72, 32.92, 30.52, 30.52, 30.68, 27.2, 20.4, 20.72, 20.84, 20.72] +15.269326210021973 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.520986080169678, 'TIME_S_1KI': 105.20986080169678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.0604390716553, 'W': 22.467293864380782} +[16.36, 16.04, 16.16, 15.96, 15.88, 15.88, 16.04, 16.04, 16.28, 16.32, 16.84, 17.08, 16.68, 16.36, 16.32, 16.0, 16.08, 16.08, 16.04, 16.16] +291.76 +14.588 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.520986080169678, 'TIME_S_1KI': 105.20986080169678, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 343.0604390716553, 'W': 22.467293864380782, 'J_1KI': 3430.604390716553, 'W_1KI': 224.67293864380784, 'W_D': 7.879293864380783, 'J_D': 120.3115083198548, 'W_D_1KI': 78.79293864380782, 'J_D_1KI': 787.9293864380782} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..1e920c1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.019237279891968, "TIME_S_1KI": 160.19237279891968, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 445.7399830436707, "W": 20.90802632853554, "J_1KI": 4457.399830436707, "W_1KI": 209.08026328535541, "W_D": 5.958026328535542, "J_D": 127.0196676111222, "W_D_1KI": 59.58026328535542, "J_D_1KI": 595.8026328535542} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..039e7ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 16.019237279891968} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1526, 3031, ..., 7497025, + 7498559, 7500000]), + col_indices=tensor([ 0, 3, 7, ..., 4995, 4996, 4999]), + values=tensor([0.4727, 0.1556, 0.1081, ..., 0.9285, 0.0937, 0.3872]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.9962, 0.8284, 0.0086, ..., 0.9887, 0.5066, 0.4274]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 16.019237279891968 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1526, 3031, ..., 7497025, + 7498559, 7500000]), + col_indices=tensor([ 0, 3, 7, ..., 4995, 4996, 4999]), + values=tensor([0.4727, 0.1556, 0.1081, ..., 0.9285, 0.0937, 0.3872]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.9962, 0.8284, 0.0086, ..., 0.9887, 0.5066, 0.4274]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 16.019237279891968 seconds + +[16.44, 16.68, 16.72, 16.56, 16.6, 16.76, 16.84, 16.84, 16.68, 16.52] +[16.44, 16.48, 16.8, 17.84, 18.92, 25.84, 27.76, 29.92, 29.96, 29.48, 23.32, 23.32, 20.24, 20.12, 20.24, 20.04, 20.04, 20.24, 20.28, 20.2, 20.12] +21.319084644317627 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.019237279891968, 'TIME_S_1KI': 160.19237279891968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 445.7399830436707, 'W': 20.90802632853554} +[16.44, 16.68, 16.72, 16.56, 16.6, 16.76, 16.84, 16.84, 16.68, 16.52, 16.44, 16.44, 16.56, 16.56, 16.72, 16.64, 16.56, 16.52, 16.4, 16.44] +299.0 +14.95 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 16.019237279891968, 'TIME_S_1KI': 160.19237279891968, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 445.7399830436707, 'W': 20.90802632853554, 'J_1KI': 4457.399830436707, 'W_1KI': 209.08026328535541, 'W_D': 5.958026328535542, 'J_D': 127.0196676111222, 'W_D_1KI': 59.58026328535542, 'J_D_1KI': 595.8026328535542} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..d6dabaf --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.045433044433594, "TIME_S_1KI": 210.45433044433594, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 590.3603710937499, "W": 21.581829265499348, "J_1KI": 5903.603710937499, "W_1KI": 215.81829265499348, "W_D": 6.281829265499347, "J_D": 171.83636339187612, "W_D_1KI": 62.81829265499347, "J_D_1KI": 628.1829265499347} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..bcbbf0c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 21.045433044433594} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2076, 4151, ..., 9996002, + 9998013, 10000000]), + col_indices=tensor([ 1, 14, 15, ..., 4994, 4998, 4999]), + values=tensor([0.5831, 0.7619, 0.5912, ..., 0.7349, 0.2932, 0.8119]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6981, 0.1886, 0.6279, ..., 0.1836, 0.5536, 0.9370]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 21.045433044433594 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2076, 4151, ..., 9996002, + 9998013, 10000000]), + col_indices=tensor([ 1, 14, 15, ..., 4994, 4998, 4999]), + values=tensor([0.5831, 0.7619, 0.5912, ..., 0.7349, 0.2932, 0.8119]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6981, 0.1886, 0.6279, ..., 0.1836, 0.5536, 0.9370]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 21.045433044433594 seconds + +[16.72, 16.72, 16.28, 16.36, 16.28, 17.72, 18.32, 19.12, 19.12, 19.28] +[18.6, 17.76, 17.4, 20.72, 22.2, 26.08, 32.2, 30.12, 32.12, 31.16, 23.52, 23.52, 22.96, 20.28, 20.12, 20.24, 20.28, 20.28, 20.32, 20.32, 20.2, 20.12, 20.28, 20.28, 20.48, 20.6, 20.48] +27.35451030731201 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.045433044433594, 'TIME_S_1KI': 210.45433044433594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.3603710937499, 'W': 21.581829265499348} +[16.72, 16.72, 16.28, 16.36, 16.28, 17.72, 18.32, 19.12, 19.12, 19.28, 16.44, 16.48, 16.36, 16.32, 16.32, 16.52, 16.72, 16.56, 16.4, 16.36] +306.0 +15.3 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 21.045433044433594, 'TIME_S_1KI': 210.45433044433594, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 590.3603710937499, 'W': 21.581829265499348, 'J_1KI': 5903.603710937499, 'W_1KI': 215.81829265499348, 'W_D': 6.281829265499347, 'J_D': 171.83636339187612, 'W_D_1KI': 62.81829265499347, 'J_D_1KI': 628.1829265499347} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..edd5f52 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 100, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.53720736503601, "TIME_S_1KI": 265.3720736503601, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 691.3309565734863, "W": 20.656596648884427, "J_1KI": 6913.309565734863, "W_1KI": 206.56596648884428, "W_D": 5.692596648884429, "J_D": 190.5187167835236, "W_D_1KI": 56.92596648884429, "J_D_1KI": 569.2596648884429} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..7bfb817 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,45 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 26.53720736503601} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2531, 5099, ..., 12494952, + 12497469, 12500000]), + col_indices=tensor([ 0, 7, 9, ..., 4997, 4998, 4999]), + values=tensor([0.6564, 0.0127, 0.9586, ..., 0.9277, 0.7224, 0.6295]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9909, 0.2884, 0.1156, ..., 0.4898, 0.4767, 0.4308]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.53720736503601 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2531, 5099, ..., 12494952, + 12497469, 12500000]), + col_indices=tensor([ 0, 7, 9, ..., 4997, 4998, 4999]), + values=tensor([0.6564, 0.0127, 0.9586, ..., 0.9277, 0.7224, 0.6295]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9909, 0.2884, 0.1156, ..., 0.4898, 0.4767, 0.4308]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 26.53720736503601 seconds + +[16.56, 16.48, 16.36, 16.32, 16.28, 16.28, 16.52, 16.48, 16.8, 16.84] +[16.84, 16.88, 16.84, 18.36, 18.96, 19.92, 26.16, 27.2, 30.16, 30.2, 27.92, 24.32, 23.48, 20.0, 20.0, 20.32, 20.36, 20.32, 20.48, 20.28, 20.2, 20.28, 20.2, 20.12, 20.08, 19.92, 19.92, 19.92, 20.04, 20.04, 20.2, 20.32, 20.16] +33.4678053855896 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.53720736503601, 'TIME_S_1KI': 265.3720736503601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 691.3309565734863, 'W': 20.656596648884427} +[16.56, 16.48, 16.36, 16.32, 16.28, 16.28, 16.52, 16.48, 16.8, 16.84, 16.6, 16.96, 16.88, 17.08, 17.04, 16.88, 16.6, 16.6, 16.52, 16.4] +299.28 +14.963999999999999 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 100, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 26.53720736503601, 'TIME_S_1KI': 265.3720736503601, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 691.3309565734863, 'W': 20.656596648884427, 'J_1KI': 6913.309565734863, 'W_1KI': 206.56596648884428, 'W_D': 5.692596648884429, 'J_D': 190.5187167835236, 'W_D_1KI': 56.92596648884429, 'J_D_1KI': 569.2596648884429} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..50a61b1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 277466, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.306395292282104, "TIME_S_1KI": 0.037144714279522914, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 274.4401516246796, "W": 19.314621204373413, "J_1KI": 0.9890947057465764, "W_1KI": 0.06961076746114267, "W_D": 4.503621204373411, "J_D": 63.99165032076835, "W_D_1KI": 0.016231254295565625, "J_D_1KI": 5.849817381432545e-05} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..19817c6 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.012943029403686523} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 249, 250]), + col_indices=tensor([1969, 3660, 2508, 3210, 4469, 2580, 1634, 4852, 2880, + 4171, 145, 1923, 1759, 292, 976, 2609, 4097, 2004, + 2150, 921, 866, 680, 1106, 3111, 2786, 1102, 3351, + 842, 4401, 1712, 443, 1843, 4964, 3710, 2157, 2831, + 4456, 4953, 180, 4915, 1729, 1065, 1121, 4037, 4202, + 790, 3058, 490, 2402, 821, 2529, 697, 3606, 4981, + 1841, 2976, 2610, 4155, 4902, 3543, 1747, 1961, 3342, + 3548, 4564, 2128, 4095, 930, 3054, 2352, 4784, 3523, + 1341, 4422, 1218, 1510, 1390, 865, 1724, 166, 3901, + 1272, 2149, 1488, 1813, 2931, 985, 697, 3723, 958, + 3733, 65, 4612, 4936, 1010, 4190, 432, 2031, 1484, + 3237, 225, 3100, 3677, 30, 1750, 2122, 2814, 1486, + 4879, 4720, 3646, 3648, 1716, 344, 4485, 818, 3342, + 1481, 4585, 2091, 1043, 1961, 466, 2604, 1267, 2219, + 379, 4743, 1233, 4480, 2900, 1400, 1058, 3165, 3216, + 1136, 4573, 4401, 106, 3885, 340, 2591, 62, 4321, + 4518, 4971, 529, 1453, 2522, 1821, 3359, 1768, 3615, + 560, 3497, 2950, 4004, 3758, 1175, 749, 2979, 552, + 596, 1500, 318, 4113, 4216, 349, 3504, 1704, 2177, + 110, 1213, 2730, 623, 979, 4943, 5, 3100, 4274, + 1196, 1212, 1292, 4837, 2749, 2657, 2151, 116, 3452, + 1048, 4726, 3050, 2235, 3572, 3103, 2468, 336, 256, + 3689, 3259, 4778, 4872, 4322, 4966, 992, 1906, 4507, + 3887, 3908, 4661, 4040, 2063, 4632, 2722, 1787, 3381, + 2148, 4178, 2261, 3851, 1401, 2459, 2100, 1771, 2949, + 4688, 3574, 4745, 4027, 705, 4556, 616, 3800, 1985, + 763, 4632, 1608, 1959, 3621, 896, 2004, 4509, 4069, + 2954, 1079, 553, 3501, 741, 537, 251]), + values=tensor([0.4598, 0.9685, 0.1022, 0.6570, 0.2626, 0.0213, 0.4956, + 0.5380, 0.8437, 0.5470, 0.0848, 0.9620, 0.0264, 0.2693, + 0.6692, 0.6401, 0.9283, 0.8706, 0.6275, 0.5943, 0.3989, + 0.0766, 0.9946, 0.3522, 0.5912, 0.5773, 0.4771, 0.7005, + 0.3650, 0.1154, 0.5630, 0.9184, 0.7700, 0.6786, 0.7164, + 0.8177, 0.4007, 0.8722, 0.7773, 0.2355, 0.0635, 0.3183, + 0.2674, 0.1684, 0.4826, 0.2298, 0.6975, 0.5790, 0.7160, + 0.7090, 0.3192, 0.5709, 0.6972, 0.7933, 0.5030, 0.7005, + 0.6711, 0.9473, 0.3130, 0.7328, 0.0053, 0.6263, 0.3400, + 0.8317, 0.2127, 0.6912, 0.5279, 0.3351, 0.9454, 0.0502, + 0.5418, 0.3942, 0.6647, 0.1585, 0.1151, 0.4637, 0.2766, + 0.6645, 0.2561, 0.3586, 0.9007, 0.5641, 0.9024, 0.0924, + 0.7442, 0.4202, 0.9456, 0.3672, 0.6816, 0.6385, 0.3813, + 0.9352, 0.7740, 0.6736, 0.3753, 0.4691, 0.5509, 0.9780, + 0.7039, 0.8897, 0.5298, 0.1267, 0.2404, 0.1878, 0.8542, + 0.5178, 0.8945, 0.4190, 0.8436, 0.0710, 0.7443, 0.1508, + 0.8032, 0.7493, 0.7469, 0.6805, 0.5330, 0.9512, 0.6693, + 0.2875, 0.6060, 0.0101, 0.6329, 0.2104, 0.2244, 0.8216, + 0.9850, 0.4320, 0.6288, 0.3139, 0.5255, 0.8128, 0.7760, + 0.1620, 0.7643, 0.1907, 0.2993, 0.8513, 0.3012, 0.8852, + 0.2619, 0.0229, 0.3957, 0.9602, 0.2258, 0.9232, 0.3917, + 0.9188, 0.1363, 0.2426, 0.1136, 0.6949, 0.1461, 0.1346, + 0.9068, 0.3140, 0.4405, 0.5547, 0.7829, 0.5939, 0.7712, + 0.2027, 0.1672, 0.0139, 0.3950, 0.5428, 0.8675, 0.7414, + 0.0223, 0.4081, 0.0576, 0.0795, 0.5466, 0.8051, 0.7481, + 0.9772, 0.6723, 0.3420, 0.3703, 0.9258, 0.7375, 0.0725, + 0.3308, 0.3117, 0.7279, 0.7861, 0.0210, 0.0730, 0.7490, + 0.2970, 0.3351, 0.7560, 0.6932, 0.1496, 0.7733, 0.0425, + 0.6190, 0.6584, 0.3857, 0.2533, 0.8724, 0.8049, 0.1764, + 0.5947, 0.7425, 0.4336, 0.4669, 0.5320, 0.4576, 0.7077, + 0.9652, 0.4590, 0.0046, 0.8577, 0.7045, 0.2574, 0.6998, + 0.6151, 0.3438, 0.0111, 0.9822, 0.5390, 0.2832, 0.0368, + 0.2525, 0.0950, 0.9720, 0.8354, 0.5121, 0.4167, 0.7493, + 0.0233, 0.0628, 0.4681, 0.4028, 0.7881, 0.6370, 0.0526, + 0.9211, 0.1381, 0.0723, 0.8459, 0.6295, 0.9210, 0.7735, + 0.6887, 0.7409, 0.5606, 0.9654, 0.1055]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8735, 0.3833, 0.2694, ..., 0.9142, 0.8243, 0.1368]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.012943029403686523 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 81124 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.069929599761963} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 249, 250, 250]), + col_indices=tensor([4942, 4831, 3020, 4398, 15, 273, 1128, 112, 3723, + 3585, 170, 3865, 4631, 2789, 948, 1973, 2152, 1168, + 2755, 4895, 2445, 616, 942, 1070, 15, 1656, 1361, + 381, 4008, 517, 700, 1825, 1420, 3779, 3711, 3163, + 2300, 1341, 3184, 1297, 1550, 2711, 522, 3966, 1184, + 705, 4226, 2826, 2546, 3410, 4394, 1120, 1279, 3317, + 623, 3259, 183, 314, 3816, 2320, 512, 2393, 3294, + 3842, 3425, 455, 1652, 4397, 4874, 478, 513, 654, + 1631, 4536, 2192, 2601, 483, 4710, 64, 1715, 545, + 2242, 2044, 2755, 1543, 1597, 1787, 451, 1992, 2258, + 3437, 2378, 4190, 1069, 538, 3841, 4601, 4983, 2955, + 805, 2644, 3782, 688, 4775, 1057, 1887, 2840, 250, + 1640, 4588, 2355, 3842, 4628, 1450, 1276, 2776, 1876, + 703, 2888, 1093, 3936, 2831, 3151, 3865, 1871, 2237, + 1038, 3246, 1430, 323, 1681, 4674, 4158, 2884, 296, + 1248, 3609, 4395, 2571, 3253, 2353, 1909, 2894, 3695, + 4937, 191, 309, 1354, 2926, 2944, 4425, 3127, 3554, + 4775, 2727, 2655, 3557, 3449, 1163, 2318, 4832, 772, + 2251, 4810, 985, 1751, 1979, 3306, 2880, 4074, 740, + 443, 4107, 3879, 2043, 2031, 1254, 2409, 1790, 4884, + 4795, 4046, 3848, 914, 1554, 1268, 549, 3310, 1243, + 1703, 3704, 1174, 859, 2408, 4434, 1686, 3699, 3911, + 241, 4764, 2817, 4123, 1459, 2878, 3106, 16, 1449, + 1804, 3917, 2039, 916, 3993, 4637, 4103, 646, 344, + 4563, 2694, 3833, 3678, 2981, 1194, 2210, 1306, 1590, + 4934, 1620, 3680, 1815, 2507, 2898, 4255, 91, 4315, + 1006, 2747, 1763, 4043, 3117, 1987, 1941, 903, 4871, + 2123, 2041, 2574, 168, 2922, 2931, 3435]), + values=tensor([0.2833, 0.5136, 0.0459, 0.1040, 0.7712, 0.7813, 0.1004, + 0.0062, 0.4357, 0.2247, 0.8578, 0.6295, 0.0947, 0.0842, + 0.8159, 0.8756, 0.7754, 0.3890, 0.9475, 0.7902, 0.1690, + 0.2878, 0.8893, 0.3483, 0.2502, 0.4294, 0.6224, 0.2795, + 0.6981, 0.6980, 0.8634, 0.5843, 0.9074, 0.4490, 0.0617, + 0.7705, 0.8034, 0.4257, 0.7807, 0.9320, 0.2211, 0.0095, + 0.0758, 0.1987, 0.1954, 0.2907, 0.0185, 0.5826, 0.8663, + 0.7792, 0.8892, 0.0709, 0.3218, 0.2257, 0.4393, 0.5281, + 0.5105, 0.4052, 0.7907, 0.7483, 0.7892, 0.7607, 0.5190, + 0.1815, 0.1591, 0.5976, 0.6474, 0.6874, 0.8319, 0.0260, + 0.0388, 0.4193, 0.6536, 0.3715, 0.0266, 0.9481, 0.0870, + 0.8892, 0.7519, 0.8104, 0.4144, 0.7611, 0.0463, 0.1582, + 0.3558, 0.3084, 0.5278, 0.9900, 0.2019, 0.3355, 0.5727, + 0.0312, 0.7009, 0.2438, 0.6987, 0.0688, 0.4630, 0.8762, + 0.0429, 0.9174, 0.3364, 0.0108, 0.6176, 0.8302, 0.3550, + 0.6954, 0.9373, 0.8688, 0.2691, 0.2429, 0.5154, 0.3210, + 0.8363, 0.5592, 0.6375, 0.9608, 0.3593, 0.4214, 0.9371, + 0.5875, 0.2839, 0.6313, 0.8389, 0.0214, 0.7557, 0.6381, + 0.6212, 0.9792, 0.4905, 0.7606, 0.5632, 0.9431, 0.6739, + 0.1004, 0.5870, 0.3454, 0.2936, 0.8579, 0.0211, 0.1297, + 0.0434, 0.1458, 0.3630, 0.6936, 0.4422, 0.1285, 0.0197, + 0.5356, 0.2039, 0.0330, 0.8242, 0.3233, 0.8126, 0.8089, + 0.1323, 0.4931, 0.0051, 0.9759, 0.3736, 0.9694, 0.3810, + 0.6330, 0.9848, 0.5658, 0.7909, 0.5722, 0.8562, 0.9056, + 0.3408, 0.6105, 0.9888, 0.2522, 0.9582, 0.6931, 0.8565, + 0.8791, 0.4252, 0.0752, 0.4302, 0.2072, 0.9998, 0.0920, + 0.9465, 0.9645, 0.2478, 0.6900, 0.8499, 0.9862, 0.6104, + 0.7144, 0.8192, 0.7493, 0.2478, 0.5926, 0.6255, 0.9983, + 0.1475, 0.4227, 0.7128, 0.3703, 0.4025, 0.7491, 0.2392, + 0.8266, 0.0100, 0.6364, 0.4916, 0.8482, 0.7480, 0.7567, + 0.6271, 0.0847, 0.4248, 0.2642, 0.0890, 0.5453, 0.8654, + 0.6751, 0.0013, 0.9619, 0.9277, 0.1302, 0.1956, 0.7206, + 0.7741, 0.7104, 0.3550, 0.2532, 0.0939, 0.7434, 0.5649, + 0.0455, 0.1135, 0.6381, 0.8138, 0.5254, 0.5858, 0.1065, + 0.1493, 0.3104, 0.8119, 0.6904, 0.9596, 0.5459, 0.5380, + 0.4871, 0.4126, 0.3848, 0.8347, 0.6321]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.3638, 0.9432, 0.7166, ..., 0.3961, 0.0448, 0.0792]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 3.069929599761963 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 277466 -ss 5000 -sd 1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.306395292282104} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2530, 1671, 1710, 529, 4147, 2671, 774, 3521, 3142, + 4079, 2231, 3137, 198, 1214, 3263, 994, 2667, 2294, + 182, 3631, 1055, 2979, 71, 3078, 4821, 3439, 3949, + 2018, 1636, 1734, 4146, 1819, 670, 2737, 4839, 929, + 652, 4064, 1709, 1446, 637, 485, 3208, 2342, 4556, + 3470, 1760, 3837, 1164, 826, 2500, 338, 4213, 1539, + 2699, 7, 3593, 628, 2634, 1851, 2277, 2906, 1873, + 3675, 109, 933, 39, 1888, 3153, 1802, 2749, 4653, + 10, 1407, 3436, 4501, 1652, 4732, 4648, 3990, 3869, + 1528, 3105, 3115, 2926, 448, 1508, 3766, 414, 0, + 99, 1356, 732, 2391, 4307, 374, 3096, 3847, 1168, + 2149, 2270, 3071, 2538, 4038, 1887, 3751, 3671, 1345, + 271, 1144, 828, 1558, 3741, 642, 1130, 26, 2512, + 1351, 4437, 62, 3040, 3132, 4639, 4608, 3358, 1316, + 2346, 4344, 2385, 4204, 358, 3346, 4011, 297, 728, + 1635, 4143, 3858, 4661, 2365, 4156, 4923, 3921, 4212, + 419, 1025, 1912, 1997, 3589, 965, 4863, 581, 2400, + 2128, 2335, 3936, 4843, 1018, 2088, 3052, 1843, 3652, + 3264, 2342, 212, 2423, 2603, 526, 4357, 2538, 2326, + 4304, 4490, 19, 2158, 4734, 2481, 4574, 1764, 4922, + 1924, 1668, 665, 3628, 4416, 779, 3359, 3281, 668, + 2259, 684, 1693, 262, 2335, 2116, 2444, 285, 472, + 1695, 1989, 2831, 2933, 4834, 3892, 2679, 43, 338, + 1143, 3133, 3290, 2874, 3505, 1654, 2420, 3323, 4487, + 4528, 2876, 3002, 3959, 635, 1503, 2493, 4974, 3994, + 3304, 3215, 2609, 4509, 2631, 2777, 683, 3623, 3596, + 2685, 115, 2166, 1456, 3440, 4502, 1541, 136, 4160, + 2313, 2928, 4917, 3863, 3827, 2109, 4794]), + values=tensor([0.1255, 0.3198, 0.8133, 0.3742, 0.0163, 0.1439, 0.7607, + 0.6784, 0.8830, 0.0545, 0.8528, 0.7242, 0.8352, 0.4737, + 0.9256, 0.7090, 0.7451, 0.5297, 0.6794, 0.7283, 0.9067, + 0.0313, 0.2449, 0.1565, 0.6919, 0.4035, 0.9905, 0.2192, + 0.0562, 0.4841, 0.8665, 0.8712, 0.9887, 0.8805, 0.4264, + 0.9291, 0.7188, 0.3153, 0.5767, 0.0112, 0.8354, 0.4919, + 0.1313, 0.2676, 0.8495, 0.9700, 0.8615, 0.6450, 0.0071, + 0.4545, 0.8713, 0.2228, 0.4878, 0.1926, 0.0886, 0.8092, + 0.4330, 0.1067, 0.1112, 0.2683, 0.4340, 0.7229, 0.1649, + 0.0932, 0.0193, 0.5783, 0.2193, 0.3091, 0.4364, 0.5673, + 0.8010, 0.5772, 0.0521, 0.5829, 0.4101, 0.3786, 0.0283, + 0.4786, 0.3304, 0.3446, 0.7315, 0.6206, 0.8294, 0.4404, + 0.4676, 0.0871, 0.3497, 0.0069, 0.9043, 0.8947, 0.1952, + 0.6809, 0.4255, 0.1696, 0.7442, 0.9124, 0.8603, 0.9907, + 0.1133, 0.2677, 0.6551, 0.8223, 0.8137, 0.2411, 0.5924, + 0.7002, 0.4248, 0.2041, 0.8601, 0.8179, 0.6180, 0.7986, + 0.0067, 0.6255, 0.0265, 0.4455, 0.0788, 0.2798, 0.3073, + 0.9253, 0.6087, 0.7948, 0.9058, 0.2527, 0.3922, 0.9638, + 0.1626, 0.4231, 0.5916, 0.0663, 0.3747, 0.8133, 0.1672, + 0.4958, 0.1234, 0.2670, 0.0752, 0.3763, 0.6411, 0.3294, + 0.4132, 0.2682, 0.3319, 0.1004, 0.6692, 0.2485, 0.0663, + 0.1318, 0.4180, 0.2011, 0.4748, 0.2487, 0.0200, 0.3002, + 0.6475, 0.2552, 0.7456, 0.9304, 0.8959, 0.8069, 0.8309, + 0.8055, 0.5114, 0.9547, 0.4277, 0.3391, 0.6653, 0.7441, + 0.9317, 0.2522, 0.9794, 0.9450, 0.7609, 0.7552, 0.3464, + 0.2683, 0.6131, 0.7507, 0.3858, 0.2947, 0.5291, 0.7914, + 0.4452, 0.6309, 0.6569, 0.3974, 0.5452, 0.9065, 0.8000, + 0.7314, 0.8661, 0.0826, 0.0659, 0.8406, 0.3397, 0.6235, + 0.6886, 0.4334, 0.9899, 0.6808, 0.0386, 0.9324, 0.6160, + 0.2724, 0.3632, 0.4386, 0.4733, 0.7494, 0.2806, 0.7238, + 0.0116, 0.8061, 0.3580, 0.8134, 0.7511, 0.4690, 0.9418, + 0.0495, 0.8282, 0.9024, 0.7411, 0.2424, 0.5263, 0.6983, + 0.9412, 0.6025, 0.1977, 0.9907, 0.4170, 0.2685, 0.9711, + 0.6755, 0.6817, 0.5130, 0.8481, 0.9901, 0.9980, 0.3527, + 0.5949, 0.5533, 0.2777, 0.4754, 0.0948, 0.6148, 0.7233, + 0.0545, 0.7637, 0.1155, 0.4005, 0.7155]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1431, 0.4969, 0.0611, ..., 0.8896, 0.3924, 0.7446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.306395292282104 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2530, 1671, 1710, 529, 4147, 2671, 774, 3521, 3142, + 4079, 2231, 3137, 198, 1214, 3263, 994, 2667, 2294, + 182, 3631, 1055, 2979, 71, 3078, 4821, 3439, 3949, + 2018, 1636, 1734, 4146, 1819, 670, 2737, 4839, 929, + 652, 4064, 1709, 1446, 637, 485, 3208, 2342, 4556, + 3470, 1760, 3837, 1164, 826, 2500, 338, 4213, 1539, + 2699, 7, 3593, 628, 2634, 1851, 2277, 2906, 1873, + 3675, 109, 933, 39, 1888, 3153, 1802, 2749, 4653, + 10, 1407, 3436, 4501, 1652, 4732, 4648, 3990, 3869, + 1528, 3105, 3115, 2926, 448, 1508, 3766, 414, 0, + 99, 1356, 732, 2391, 4307, 374, 3096, 3847, 1168, + 2149, 2270, 3071, 2538, 4038, 1887, 3751, 3671, 1345, + 271, 1144, 828, 1558, 3741, 642, 1130, 26, 2512, + 1351, 4437, 62, 3040, 3132, 4639, 4608, 3358, 1316, + 2346, 4344, 2385, 4204, 358, 3346, 4011, 297, 728, + 1635, 4143, 3858, 4661, 2365, 4156, 4923, 3921, 4212, + 419, 1025, 1912, 1997, 3589, 965, 4863, 581, 2400, + 2128, 2335, 3936, 4843, 1018, 2088, 3052, 1843, 3652, + 3264, 2342, 212, 2423, 2603, 526, 4357, 2538, 2326, + 4304, 4490, 19, 2158, 4734, 2481, 4574, 1764, 4922, + 1924, 1668, 665, 3628, 4416, 779, 3359, 3281, 668, + 2259, 684, 1693, 262, 2335, 2116, 2444, 285, 472, + 1695, 1989, 2831, 2933, 4834, 3892, 2679, 43, 338, + 1143, 3133, 3290, 2874, 3505, 1654, 2420, 3323, 4487, + 4528, 2876, 3002, 3959, 635, 1503, 2493, 4974, 3994, + 3304, 3215, 2609, 4509, 2631, 2777, 683, 3623, 3596, + 2685, 115, 2166, 1456, 3440, 4502, 1541, 136, 4160, + 2313, 2928, 4917, 3863, 3827, 2109, 4794]), + values=tensor([0.1255, 0.3198, 0.8133, 0.3742, 0.0163, 0.1439, 0.7607, + 0.6784, 0.8830, 0.0545, 0.8528, 0.7242, 0.8352, 0.4737, + 0.9256, 0.7090, 0.7451, 0.5297, 0.6794, 0.7283, 0.9067, + 0.0313, 0.2449, 0.1565, 0.6919, 0.4035, 0.9905, 0.2192, + 0.0562, 0.4841, 0.8665, 0.8712, 0.9887, 0.8805, 0.4264, + 0.9291, 0.7188, 0.3153, 0.5767, 0.0112, 0.8354, 0.4919, + 0.1313, 0.2676, 0.8495, 0.9700, 0.8615, 0.6450, 0.0071, + 0.4545, 0.8713, 0.2228, 0.4878, 0.1926, 0.0886, 0.8092, + 0.4330, 0.1067, 0.1112, 0.2683, 0.4340, 0.7229, 0.1649, + 0.0932, 0.0193, 0.5783, 0.2193, 0.3091, 0.4364, 0.5673, + 0.8010, 0.5772, 0.0521, 0.5829, 0.4101, 0.3786, 0.0283, + 0.4786, 0.3304, 0.3446, 0.7315, 0.6206, 0.8294, 0.4404, + 0.4676, 0.0871, 0.3497, 0.0069, 0.9043, 0.8947, 0.1952, + 0.6809, 0.4255, 0.1696, 0.7442, 0.9124, 0.8603, 0.9907, + 0.1133, 0.2677, 0.6551, 0.8223, 0.8137, 0.2411, 0.5924, + 0.7002, 0.4248, 0.2041, 0.8601, 0.8179, 0.6180, 0.7986, + 0.0067, 0.6255, 0.0265, 0.4455, 0.0788, 0.2798, 0.3073, + 0.9253, 0.6087, 0.7948, 0.9058, 0.2527, 0.3922, 0.9638, + 0.1626, 0.4231, 0.5916, 0.0663, 0.3747, 0.8133, 0.1672, + 0.4958, 0.1234, 0.2670, 0.0752, 0.3763, 0.6411, 0.3294, + 0.4132, 0.2682, 0.3319, 0.1004, 0.6692, 0.2485, 0.0663, + 0.1318, 0.4180, 0.2011, 0.4748, 0.2487, 0.0200, 0.3002, + 0.6475, 0.2552, 0.7456, 0.9304, 0.8959, 0.8069, 0.8309, + 0.8055, 0.5114, 0.9547, 0.4277, 0.3391, 0.6653, 0.7441, + 0.9317, 0.2522, 0.9794, 0.9450, 0.7609, 0.7552, 0.3464, + 0.2683, 0.6131, 0.7507, 0.3858, 0.2947, 0.5291, 0.7914, + 0.4452, 0.6309, 0.6569, 0.3974, 0.5452, 0.9065, 0.8000, + 0.7314, 0.8661, 0.0826, 0.0659, 0.8406, 0.3397, 0.6235, + 0.6886, 0.4334, 0.9899, 0.6808, 0.0386, 0.9324, 0.6160, + 0.2724, 0.3632, 0.4386, 0.4733, 0.7494, 0.2806, 0.7238, + 0.0116, 0.8061, 0.3580, 0.8134, 0.7511, 0.4690, 0.9418, + 0.0495, 0.8282, 0.9024, 0.7411, 0.2424, 0.5263, 0.6983, + 0.9412, 0.6025, 0.1977, 0.9907, 0.4170, 0.2685, 0.9711, + 0.6755, 0.6817, 0.5130, 0.8481, 0.9901, 0.9980, 0.3527, + 0.5949, 0.5533, 0.2777, 0.4754, 0.0948, 0.6148, 0.7233, + 0.0545, 0.7637, 0.1155, 0.4005, 0.7155]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.1431, 0.4969, 0.0611, ..., 0.8896, 0.3924, 0.7446]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.306395292282104 seconds + +[16.56, 16.4, 16.72, 16.72, 16.84, 16.92, 17.04, 16.64, 16.64, 16.64] +[16.68, 16.8, 19.52, 20.28, 22.4, 23.28, 23.28, 24.04, 21.56, 21.2, 20.08, 19.92, 19.76, 19.72] +14.208932638168335 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 277466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.306395292282104, 'TIME_S_1KI': 0.037144714279522914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 274.4401516246796, 'W': 19.314621204373413} +[16.56, 16.4, 16.72, 16.72, 16.84, 16.92, 17.04, 16.64, 16.64, 16.64, 16.24, 16.2, 16.2, 16.48, 16.12, 16.2, 16.2, 16.16, 16.0, 16.04] +296.22 +14.811000000000002 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 277466, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.306395292282104, 'TIME_S_1KI': 0.037144714279522914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 274.4401516246796, 'W': 19.314621204373413, 'J_1KI': 0.9890947057465764, 'W_1KI': 0.06961076746114267, 'W_D': 4.503621204373411, 'J_D': 63.99165032076835, 'W_D_1KI': 0.016231254295565625, 'J_D_1KI': 5.849817381432545e-05} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..ed92a63 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Altra", "CORES": 80, "ITERATIONS": 147819, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.450809717178345, "TIME_S_1KI": 0.07070004341240534, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 270.7087575721741, "W": 19.037186911173507, "J_1KI": 1.831352921966554, "W_1KI": 0.12878714448868891, "W_D": 4.033186911173505, "J_D": 57.35190933799741, "W_D_1KI": 0.027284631279967428, "J_D_1KI": 0.00018458135476472868} diff --git a/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..3732e5f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/altra_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 100 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.015123844146728516} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([3446, 3211, 1459, ..., 3404, 1400, 4328]), + values=tensor([0.9299, 0.4025, 0.7514, ..., 0.4501, 0.7034, 0.4301]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.5652, 0.8868, 0.6802, ..., 0.3723, 0.2839, 0.7363]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.015123844146728516 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 69426 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.931497573852539} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1250, 1250, 1250]), + col_indices=tensor([2025, 3924, 898, ..., 1281, 3893, 4108]), + values=tensor([0.0304, 0.7639, 0.3864, ..., 0.2285, 0.7727, 0.3264]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.1740, 0.1105, 0.4664, ..., 0.6286, 0.2183, 0.7582]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 4.931497573852539 seconds + +['apptainer', 'run', 'pytorch-altra.sif', '-c', 'numactl --cpunodebind=0 --membind=0 python3 spmv.py synthetic csr 147819 -ss 5000 -sd 5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.450809717178345} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([4384, 3481, 3445, ..., 2544, 3259, 4103]), + values=tensor([0.0351, 0.6486, 0.3996, ..., 0.8336, 0.2691, 0.7388]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9301, 0.0450, 0.3581, ..., 0.7152, 0.3980, 0.2103]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.450809717178345 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at /space/jenkins/workspace/Releases/pytorch-dls/pytorch-dls/aten/src/ATen/SparseCsrTensorImpl.cpp:55.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 1250, 1250, 1250]), + col_indices=tensor([4384, 3481, 3445, ..., 2544, 3259, 4103]), + values=tensor([0.0351, 0.6486, 0.3996, ..., 0.8336, 0.2691, 0.7388]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.9301, 0.0450, 0.3581, ..., 0.7152, 0.3980, 0.2103]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.450809717178345 seconds + +[16.36, 16.36, 16.6, 16.8, 16.76, 16.88, 16.88, 16.64, 16.44, 16.48] +[16.72, 16.76, 17.08, 20.92, 22.6, 23.2, 24.36, 21.56, 20.88, 20.88, 19.84, 19.92, 19.92, 20.04] +14.219997882843018 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 147819, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.450809717178345, 'TIME_S_1KI': 0.07070004341240534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.7087575721741, 'W': 19.037186911173507} +[16.36, 16.36, 16.6, 16.8, 16.76, 16.88, 16.88, 16.64, 16.44, 16.48, 16.52, 16.72, 16.68, 16.76, 16.76, 16.76, 16.6, 16.68, 16.72, 16.72] +300.08000000000004 +15.004000000000001 +{'CPU': 'Altra', 'CORES': 80, 'ITERATIONS': 147819, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.450809717178345, 'TIME_S_1KI': 0.07070004341240534, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 270.7087575721741, 'W': 19.037186911173507, 'J_1KI': 1.831352921966554, 'W_1KI': 0.12878714448868891, 'W_D': 4.033186911173505, 'J_D': 57.35190933799741, 'W_D_1KI': 0.027284631279967428, 'J_D_1KI': 0.00018458135476472868} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..4ca7cf3 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 67064, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.610118389129639, "TIME_S_1KI": 0.15820885108448107, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1755.3175721168518, "W": 134.25, "J_1KI": 26.173767924920252, "W_1KI": 2.0018191578193965, "W_D": 98.104, "J_D": 1282.7089392547607, "W_D_1KI": 1.4628414648693786, "J_D_1KI": 0.021812618765200086} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..6ec2345 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.05684256553649902} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 13, 23, ..., 999979, + 999991, 1000000]), + col_indices=tensor([ 4525, 11074, 13753, ..., 80507, 85385, 86427]), + values=tensor([0.4106, 0.9983, 0.2404, ..., 0.2427, 0.2624, 0.7034]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4963, 0.1898, 0.5953, ..., 0.4144, 0.1558, 0.0288]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.05684256553649902 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18472', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8920857906341553} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 17, ..., 999981, + 999989, 1000000]), + col_indices=tensor([ 8653, 22699, 39303, ..., 86578, 89246, 90775]), + values=tensor([0.9948, 0.4799, 0.6025, ..., 0.7759, 0.3812, 0.6990]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4256, 0.6072, 0.2987, ..., 0.6512, 0.1573, 0.7068]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 2.8920857906341553 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '67064', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.610118389129639} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 22, ..., 999983, + 999993, 1000000]), + col_indices=tensor([27755, 29395, 33124, ..., 74386, 97777, 99456]), + values=tensor([0.7148, 0.9361, 0.5875, ..., 0.1256, 0.4168, 0.2712]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0632, 0.1399, 0.4829, ..., 0.0512, 0.0510, 0.5050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.610118389129639 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 9, 22, ..., 999983, + 999993, 1000000]), + col_indices=tensor([27755, 29395, 33124, ..., 74386, 97777, 99456]), + values=tensor([0.7148, 0.9361, 0.5875, ..., 0.1256, 0.4168, 0.2712]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0632, 0.1399, 0.4829, ..., 0.0512, 0.0510, 0.5050]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.610118389129639 seconds + +[42.15, 40.1, 39.72, 39.74, 39.84, 40.12, 39.64, 39.58, 39.89, 39.55] +[134.25] +13.074991226196289 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 67064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.610118389129639, 'TIME_S_1KI': 0.15820885108448107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1755.3175721168518, 'W': 134.25} +[42.15, 40.1, 39.72, 39.74, 39.84, 40.12, 39.64, 39.58, 39.89, 39.55, 42.7, 40.22, 40.9, 40.07, 39.72, 39.89, 39.69, 39.67, 39.56, 44.74] +722.9200000000001 +36.146 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 67064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.610118389129639, 'TIME_S_1KI': 0.15820885108448107, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1755.3175721168518, 'W': 134.25, 'J_1KI': 26.173767924920252, 'W_1KI': 2.0018191578193965, 'W_D': 98.104, 'J_D': 1282.7089392547607, 'W_D_1KI': 1.4628414648693786, 'J_D_1KI': 0.021812618765200086} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..97a1aaa --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3865, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6199471950531, "TIME_S_1KI": 2.7477224308028725, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1736.0991844940186, "W": 125.54, "J_1KI": 449.1847825340281, "W_1KI": 32.48124191461837, "W_D": 89.51725000000002, "J_D": 1237.938702589989, "W_D_1KI": 23.160996119016822, "J_D_1KI": 5.992495761711985} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..ab63507 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.29607152938842773} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 108, 205, ..., 9999797, + 9999886, 10000000]), + col_indices=tensor([ 1353, 2200, 3779, ..., 96854, 97028, 97339]), + values=tensor([0.4346, 0.2367, 0.4770, ..., 0.1479, 0.4649, 0.9103]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.4412, 0.7177, 0.2059, ..., 0.3280, 0.0589, 0.7180]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 0.29607152938842773 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3546', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 9.63291883468628} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 92, 201, ..., 9999814, + 9999913, 10000000]), + col_indices=tensor([ 1097, 1389, 2328, ..., 96293, 96542, 99036]), + values=tensor([0.4476, 0.1977, 0.6820, ..., 0.8020, 0.1490, 0.2819]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6506, 0.9195, 0.4022, ..., 0.6497, 0.8706, 0.8621]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 9.63291883468628 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3865', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.6199471950531} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 98, 197, ..., 9999799, + 9999902, 10000000]), + col_indices=tensor([ 2919, 3313, 3728, ..., 97238, 97697, 98577]), + values=tensor([0.4198, 0.7828, 0.7567, ..., 0.6995, 0.0988, 0.1528]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5695, 0.4144, 0.5638, ..., 0.3431, 0.2067, 0.0841]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.6199471950531 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 98, 197, ..., 9999799, + 9999902, 10000000]), + col_indices=tensor([ 2919, 3313, 3728, ..., 97238, 97697, 98577]), + values=tensor([0.4198, 0.7828, 0.7567, ..., 0.6995, 0.0988, 0.1528]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5695, 0.4144, 0.5638, ..., 0.3431, 0.2067, 0.0841]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.6199471950531 seconds + +[41.63, 40.06, 40.05, 39.72, 39.88, 39.71, 40.32, 40.32, 40.26, 39.8] +[125.54] +13.829051971435547 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3865, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6199471950531, 'TIME_S_1KI': 2.7477224308028725, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1736.0991844940186, 'W': 125.54} +[41.63, 40.06, 40.05, 39.72, 39.88, 39.71, 40.32, 40.32, 40.26, 39.8, 40.39, 39.7, 39.76, 39.75, 39.78, 40.5, 39.95, 39.84, 40.11, 39.67] +720.4549999999999 +36.022749999999995 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3865, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.6199471950531, 'TIME_S_1KI': 2.7477224308028725, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1736.0991844940186, 'W': 125.54, 'J_1KI': 449.1847825340281, 'W_1KI': 32.48124191461837, 'W_D': 89.51725000000002, 'J_D': 1237.938702589989, 'W_D_1KI': 23.160996119016822, 'J_D_1KI': 5.992495761711985} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..9be5a17 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102064, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.524274349212646, "TIME_S_1KI": 0.10311446101674092, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1601.1415306377412, "W": 113.64000000000001, "J_1KI": 15.687622772355983, "W_1KI": 1.1134190311961123, "W_D": 77.54950000000002, "J_D": 1092.6410166331532, "W_D_1KI": 0.7598124706066784, "J_D_1KI": 0.007444470828173288} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..a8ac003 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.051177263259887695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 100000, 100000, + 100000]), + col_indices=tensor([ 5338, 33433, 17911, ..., 60039, 74427, 45774]), + values=tensor([0.9933, 0.3915, 0.2951, ..., 0.3503, 0.7922, 0.2614]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6274, 0.9288, 0.1155, ..., 0.7548, 0.5951, 0.2372]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.051177263259887695 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20516', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 2.11061429977417} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 2, ..., 99999, 100000, + 100000]), + col_indices=tensor([35938, 84023, 26382, ..., 80961, 25218, 78065]), + values=tensor([0.1771, 0.7263, 0.1955, ..., 0.1569, 0.5183, 0.0872]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.4261, 0.9316, 0.9486, ..., 0.4583, 0.3074, 0.5243]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 2.11061429977417 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102064', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.524274349212646} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 100000, 100000, + 100000]), + col_indices=tensor([ 7923, 87583, 82060, ..., 33729, 87076, 106]), + values=tensor([0.1731, 0.5965, 0.6757, ..., 0.8844, 0.0621, 0.1000]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.3641, 0.2409, 0.5686, ..., 0.5557, 0.7015, 0.9398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.524274349212646 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 3, ..., 100000, 100000, + 100000]), + col_indices=tensor([ 7923, 87583, 82060, ..., 33729, 87076, 106]), + values=tensor([0.1731, 0.5965, 0.6757, ..., 0.8844, 0.0621, 0.1000]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.3641, 0.2409, 0.5686, ..., 0.5557, 0.7015, 0.9398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.524274349212646 seconds + +[40.73, 39.59, 39.74, 39.73, 39.63, 39.83, 40.08, 39.98, 40.09, 39.48] +[113.64] +14.089594602584839 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.524274349212646, 'TIME_S_1KI': 0.10311446101674092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.1415306377412, 'W': 113.64000000000001} +[40.73, 39.59, 39.74, 39.73, 39.63, 39.83, 40.08, 39.98, 40.09, 39.48, 40.32, 39.82, 39.63, 39.81, 39.64, 39.48, 39.85, 39.99, 44.93, 39.45] +721.81 +36.0905 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102064, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.524274349212646, 'TIME_S_1KI': 0.10311446101674092, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1601.1415306377412, 'W': 113.64000000000001, 'J_1KI': 15.687622772355983, 'W_1KI': 1.1134190311961123, 'W_D': 77.54950000000002, 'J_D': 1092.6410166331532, 'W_D_1KI': 0.7598124706066784, 'J_D_1KI': 0.007444470828173288} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..24bf6da --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 83443, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331598043441772, "TIME_S_1KI": 0.12381623435688761, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1753.1380189323427, "W": 133.96, "J_1KI": 21.010007057899916, "W_1KI": 1.6054072840142373, "W_D": 97.99700000000001, "J_D": 1282.489298606396, "W_D_1KI": 1.1744184652996656, "J_D_1KI": 0.014074499542198454} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..566b51a --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.04202604293823242} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 10, ..., 499989, 499993, + 500000]), + col_indices=tensor([44479, 4048, 15938, ..., 81904, 89204, 96058]), + values=tensor([0.8024, 0.2371, 0.1804, ..., 0.7304, 0.5867, 0.0881]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.7637, 0.3596, 0.9771, ..., 0.6123, 0.8042, 0.6339]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.04202604293823242 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24984', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 3.1438426971435547} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 12, ..., 499992, 499996, + 500000]), + col_indices=tensor([25849, 26475, 42516, ..., 54532, 74351, 87242]), + values=tensor([0.9779, 0.7287, 0.9943, ..., 0.8976, 0.9175, 0.6342]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.3426, 0.5652, 0.2473, ..., 0.4439, 0.3784, 0.0403]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 3.1438426971435547 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '83443', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.331598043441772} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 9, ..., 499992, 499996, + 500000]), + col_indices=tensor([44464, 48782, 50602, ..., 44812, 48851, 96308]), + values=tensor([0.0768, 0.4231, 0.3229, ..., 0.7263, 0.8571, 0.9151]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.5899, 0.0490, 0.9717, ..., 0.2037, 0.9811, 0.9760]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.331598043441772 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 9, ..., 499992, 499996, + 500000]), + col_indices=tensor([44464, 48782, 50602, ..., 44812, 48851, 96308]), + values=tensor([0.0768, 0.4231, 0.3229, ..., 0.7263, 0.8571, 0.9151]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.5899, 0.0490, 0.9717, ..., 0.2037, 0.9811, 0.9760]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.331598043441772 seconds + +[41.17, 39.77, 40.36, 40.06, 41.66, 39.61, 39.74, 39.58, 39.55, 39.78] +[133.96] +13.087026119232178 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83443, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331598043441772, 'TIME_S_1KI': 0.12381623435688761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1753.1380189323427, 'W': 133.96} +[41.17, 39.77, 40.36, 40.06, 41.66, 39.61, 39.74, 39.58, 39.55, 39.78, 41.11, 39.76, 39.73, 39.7, 39.67, 40.13, 39.73, 39.66, 39.62, 39.8] +719.26 +35.963 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 83443, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.331598043441772, 'TIME_S_1KI': 0.12381623435688761, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1753.1380189323427, 'W': 133.96, 'J_1KI': 21.010007057899916, 'W_1KI': 1.6054072840142373, 'W_D': 97.99700000000001, 'J_D': 1282.489298606396, 'W_D_1KI': 1.1744184652996656, 'J_D_1KI': 0.014074499542198454} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..846867c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 288187, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643323421478271, "TIME_S_1KI": 0.03693200394701451, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1243.6877346038818, "W": 98.76, "J_1KI": 4.315558073764194, "W_1KI": 0.34269415344897586, "W_D": 63.04600000000001, "J_D": 793.9402279853822, "W_D_1KI": 0.21876767515536788, "J_D_1KI": 0.0007591170842382477} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..c5c4dec --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02071547508239746} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9999, 10000]), + col_indices=tensor([ 662, 2214, 9373, ..., 9890, 1994, 4209]), + values=tensor([0.5116, 0.1051, 0.5373, ..., 0.4151, 0.7725, 0.9175]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.1401, 0.0502, 0.3458, ..., 0.2506, 0.9913, 0.9973]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.02071547508239746 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '50686', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8467259407043457} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 5, ..., 9998, 10000, 10000]), + col_indices=tensor([2329, 7525, 8810, ..., 9177, 1519, 2359]), + values=tensor([0.1835, 0.9536, 0.7906, ..., 0.4035, 0.0564, 0.3832]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.5918, 0.9817, 0.1058, ..., 0.3816, 0.0120, 0.7112]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 1.8467259407043457 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '288187', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643323421478271} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), + col_indices=tensor([5118, 9103, 6912, ..., 6081, 2494, 7728]), + values=tensor([0.1845, 0.4117, 0.0579, ..., 0.8363, 0.9429, 0.5429]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.5505, 0.3516, 0.4265, ..., 0.6375, 0.7561, 0.2541]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.643323421478271 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 9998, 9998, 10000]), + col_indices=tensor([5118, 9103, 6912, ..., 6081, 2494, 7728]), + values=tensor([0.1845, 0.4117, 0.0579, ..., 0.8363, 0.9429, 0.5429]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.5505, 0.3516, 0.4265, ..., 0.6375, 0.7561, 0.2541]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.643323421478271 seconds + +[40.51, 40.09, 39.52, 39.33, 39.47, 39.48, 39.43, 39.77, 39.53, 39.82] +[98.76] +12.59303092956543 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 288187, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643323421478271, 'TIME_S_1KI': 0.03693200394701451, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.6877346038818, 'W': 98.76} +[40.51, 40.09, 39.52, 39.33, 39.47, 39.48, 39.43, 39.77, 39.53, 39.82, 41.15, 40.14, 40.36, 39.64, 39.34, 39.28, 39.84, 39.18, 39.56, 39.16] +714.28 +35.714 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 288187, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643323421478271, 'TIME_S_1KI': 0.03693200394701451, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1243.6877346038818, 'W': 98.76, 'J_1KI': 4.315558073764194, 'W_1KI': 0.34269415344897586, 'W_D': 63.04600000000001, 'J_D': 793.9402279853822, 'W_D_1KI': 0.21876767515536788, 'J_D_1KI': 0.0007591170842382477} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..d5b0fd0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 192082, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38179612159729, "TIME_S_1KI": 0.054048771470503694, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1536.0875409460068, "W": 107.47, "J_1KI": 7.997040539696623, "W_1KI": 0.5595006299392968, "W_D": 71.52775, "J_D": 1022.3586638773679, "W_D_1KI": 0.3723813267250445, "J_D_1KI": 0.0019386581081259277} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..41b527b --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.02809286117553711} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 18, ..., 99983, 99993, + 100000]), + col_indices=tensor([2653, 3722, 5304, ..., 7707, 8674, 8869]), + values=tensor([0.5856, 0.9425, 0.9349, ..., 0.4089, 0.4268, 0.7151]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.5412, 0.5320, 0.4895, ..., 0.9332, 0.4774, 0.7844]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.02809286117553711 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '37376', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.043123722076416} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 18, 27, ..., 99987, 99996, + 100000]), + col_indices=tensor([ 496, 1705, 2513, ..., 6230, 8377, 9882]), + values=tensor([0.7106, 0.5928, 0.5041, ..., 0.9691, 0.8218, 0.7424]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6869, 0.6587, 0.3624, ..., 0.7168, 0.6886, 0.1198]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 2.043123722076416 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '192082', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.38179612159729} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 14, 28, ..., 99972, 99987, + 100000]), + col_indices=tensor([ 27, 2567, 2642, ..., 7209, 7267, 7735]), + values=tensor([0.8851, 0.6027, 0.9664, ..., 0.7310, 0.7426, 0.3698]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8025, 0.3022, 0.3457, ..., 0.3811, 0.1140, 0.9144]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.38179612159729 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 14, 28, ..., 99972, 99987, + 100000]), + col_indices=tensor([ 27, 2567, 2642, ..., 7209, 7267, 7735]), + values=tensor([0.8851, 0.6027, 0.9664, ..., 0.7310, 0.7426, 0.3698]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.8025, 0.3022, 0.3457, ..., 0.3811, 0.1140, 0.9144]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.38179612159729 seconds + +[40.11, 39.55, 39.33, 44.82, 39.9, 39.64, 39.98, 39.29, 39.99, 39.21] +[107.47] +14.293175220489502 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 192082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38179612159729, 'TIME_S_1KI': 0.054048771470503694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.0875409460068, 'W': 107.47} +[40.11, 39.55, 39.33, 44.82, 39.9, 39.64, 39.98, 39.29, 39.99, 39.21, 40.63, 39.47, 39.85, 39.28, 39.66, 39.35, 39.46, 39.87, 39.78, 39.3] +718.845 +35.94225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 192082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.38179612159729, 'TIME_S_1KI': 0.054048771470503694, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1536.0875409460068, 'W': 107.47, 'J_1KI': 7.997040539696623, 'W_1KI': 0.5595006299392968, 'W_D': 71.52775, 'J_D': 1022.3586638773679, 'W_D_1KI': 0.3723813267250445, 'J_D_1KI': 0.0019386581081259277} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..c0a8393 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 102052, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32213807106018, "TIME_S_1KI": 0.10114586750931075, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1720.3125169992445, "W": 134.17, "J_1KI": 16.857215115815904, "W_1KI": 1.3147219064790499, "W_D": 98.29849999999999, "J_D": 1260.3722139990327, "W_D_1KI": 0.9632197311174694, "J_D_1KI": 0.0094385189032794} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..3b630d1 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.04350447654724121} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 98, 200, ..., 999802, + 999895, 1000000]), + col_indices=tensor([ 47, 93, 107, ..., 9931, 9947, 9964]), + values=tensor([0.2387, 0.2735, 0.7135, ..., 0.1692, 0.6802, 0.4186]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2660, 0.3479, 0.7430, ..., 0.3350, 0.7379, 0.6869]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.04350447654724121 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '24135', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.483203411102295} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 121, 211, ..., 999813, + 999902, 1000000]), + col_indices=tensor([ 152, 193, 233, ..., 9824, 9889, 9990]), + values=tensor([0.9787, 0.2142, 0.0572, ..., 0.5889, 0.8836, 0.8390]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.2484, 0.0072, 0.5266, ..., 0.8378, 0.3257, 0.7895]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 2.483203411102295 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '102052', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.32213807106018} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 109, 197, ..., 999808, + 999909, 1000000]), + col_indices=tensor([ 12, 158, 312, ..., 9915, 9965, 9970]), + values=tensor([0.1097, 0.7996, 0.8802, ..., 0.2965, 0.2793, 0.1775]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4138, 0.6115, 0.5428, ..., 0.5829, 0.0748, 0.9104]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.32213807106018 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 109, 197, ..., 999808, + 999909, 1000000]), + col_indices=tensor([ 12, 158, 312, ..., 9915, 9965, 9970]), + values=tensor([0.1097, 0.7996, 0.8802, ..., 0.2965, 0.2793, 0.1775]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.4138, 0.6115, 0.5428, ..., 0.5829, 0.0748, 0.9104]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.32213807106018 seconds + +[40.44, 39.51, 39.85, 40.81, 39.86, 41.16, 39.43, 39.73, 39.64, 39.53] +[134.17] +12.821886539459229 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102052, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32213807106018, 'TIME_S_1KI': 0.10114586750931075, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.3125169992445, 'W': 134.17} +[40.44, 39.51, 39.85, 40.81, 39.86, 41.16, 39.43, 39.73, 39.64, 39.53, 40.32, 40.34, 39.64, 39.51, 39.49, 39.33, 39.71, 39.95, 39.67, 39.31] +717.43 +35.8715 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 102052, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.32213807106018, 'TIME_S_1KI': 0.10114586750931075, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1720.3125169992445, 'W': 134.17, 'J_1KI': 16.857215115815904, 'W_1KI': 1.3147219064790499, 'W_D': 98.29849999999999, 'J_D': 1260.3722139990327, 'W_D_1KI': 0.9632197311174694, 'J_D_1KI': 0.0094385189032794} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..0c8793e --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 27901, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459260940551758, "TIME_S_1KI": 0.3748704684617669, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2101.511001520157, "W": 151.74, "J_1KI": 75.32027531343526, "W_1KI": 5.43851474857532, "W_D": 115.6735, "J_D": 1602.0108925421239, "W_D_1KI": 4.145854987276442, "J_D_1KI": 0.14859162708420637} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..d46dec8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.07326960563659668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 503, 989, ..., 4998955, + 4999450, 5000000]), + col_indices=tensor([ 38, 72, 81, ..., 9956, 9978, 9983]), + values=tensor([0.2927, 0.3163, 0.4567, ..., 0.1935, 0.9639, 0.3715]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.4283, 0.0472, 0.5653, ..., 0.2916, 0.5894, 0.9993]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 0.07326960563659668 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14330', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 5.392660140991211} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 524, 1025, ..., 4998972, + 4999489, 5000000]), + col_indices=tensor([ 6, 15, 16, ..., 9973, 9985, 9996]), + values=tensor([0.1466, 0.4320, 0.8734, ..., 0.2839, 0.7163, 0.2149]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6711, 0.1737, 0.7087, ..., 0.1819, 0.7746, 0.6924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 5.392660140991211 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '27901', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.459260940551758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 963, ..., 4998976, + 4999469, 5000000]), + col_indices=tensor([ 4, 6, 8, ..., 9977, 9981, 9998]), + values=tensor([0.4938, 0.7817, 0.2868, ..., 0.2355, 0.4075, 0.9137]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7828, 0.6669, 0.8649, ..., 0.0217, 0.0077, 0.7398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.459260940551758 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 478, 963, ..., 4998976, + 4999469, 5000000]), + col_indices=tensor([ 4, 6, 8, ..., 9977, 9981, 9998]), + values=tensor([0.4938, 0.7817, 0.2868, ..., 0.2355, 0.4075, 0.9137]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7828, 0.6669, 0.8649, ..., 0.0217, 0.0077, 0.7398]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.459260940551758 seconds + +[40.28, 39.74, 40.09, 39.52, 39.81, 39.49, 40.09, 39.86, 39.96, 39.9] +[151.74] +13.849420070648193 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459260940551758, 'TIME_S_1KI': 0.3748704684617669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2101.511001520157, 'W': 151.74} +[40.28, 39.74, 40.09, 39.52, 39.81, 39.49, 40.09, 39.86, 39.96, 39.9, 41.6, 42.09, 41.06, 39.67, 39.75, 39.6, 39.57, 40.11, 40.26, 39.54] +721.33 +36.066500000000005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 27901, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.459260940551758, 'TIME_S_1KI': 0.3748704684617669, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2101.511001520157, 'W': 151.74, 'J_1KI': 75.32027531343526, 'W_1KI': 5.43851474857532, 'W_D': 115.6735, 'J_D': 1602.0108925421239, 'W_D_1KI': 4.145854987276442, 'J_D_1KI': 0.14859162708420637} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..2bdc411 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4895, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.465762615203857, "TIME_S_1KI": 2.3423416987137604, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1961.564714949131, "W": 125.13, "J_1KI": 400.72823594466416, "W_1KI": 25.562819203268642, "W_D": 88.61599999999999, "J_D": 1389.1634202823636, "W_D_1KI": 18.10337078651685, "J_D_1KI": 3.698339282230205} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..c73c74f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.26529860496520996} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 948, 1895, ..., 9998021, + 9999002, 10000000]), + col_indices=tensor([ 3, 4, 24, ..., 9958, 9984, 9986]), + values=tensor([0.2249, 0.5337, 0.8362, ..., 0.6636, 0.7975, 0.6242]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1264, 0.7394, 0.5519, ..., 0.0745, 0.0081, 0.2644]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 0.26529860496520996 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3957', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.487387418746948} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 978, 1933, ..., 9998004, + 9999014, 10000000]), + col_indices=tensor([ 3, 5, 6, ..., 9972, 9982, 9998]), + values=tensor([0.7080, 0.9187, 0.9413, ..., 0.1315, 0.2244, 0.9797]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5306, 0.8726, 0.4027, ..., 0.7037, 0.0033, 0.8016]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 8.487387418746948 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4895', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 11.465762615203857} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 986, 1965, ..., 9997996, + 9999022, 10000000]), + col_indices=tensor([ 5, 25, 37, ..., 9984, 9993, 9998]), + values=tensor([0.8800, 0.4752, 0.0446, ..., 0.6391, 0.5084, 0.8692]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6122, 0.5951, 0.3953, ..., 0.4999, 0.2315, 0.6538]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 11.465762615203857 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 986, 1965, ..., 9997996, + 9999022, 10000000]), + col_indices=tensor([ 5, 25, 37, ..., 9984, 9993, 9998]), + values=tensor([0.8800, 0.4752, 0.0446, ..., 0.6391, 0.5084, 0.8692]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6122, 0.5951, 0.3953, ..., 0.4999, 0.2315, 0.6538]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 11.465762615203857 seconds + +[40.6, 45.25, 40.43, 39.72, 40.94, 40.35, 40.49, 39.74, 42.24, 39.59] +[125.13] +15.676214456558228 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.465762615203857, 'TIME_S_1KI': 2.3423416987137604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1961.564714949131, 'W': 125.13} +[40.6, 45.25, 40.43, 39.72, 40.94, 40.35, 40.49, 39.74, 42.24, 39.59, 41.13, 40.62, 40.17, 40.53, 40.27, 40.01, 39.62, 39.57, 39.92, 39.5] +730.2800000000001 +36.514 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4895, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 11.465762615203857, 'TIME_S_1KI': 2.3423416987137604, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1961.564714949131, 'W': 125.13, 'J_1KI': 400.72823594466416, 'W_1KI': 25.562819203268642, 'W_D': 88.61599999999999, 'J_D': 1389.1634202823636, 'W_D_1KI': 18.10337078651685, 'J_D_1KI': 3.698339282230205} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..0c05e29 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2082, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.557694435119629, "TIME_S_1KI": 5.070938729644394, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1859.5419220256806, "W": 120.04, "J_1KI": 893.1517396857256, "W_1KI": 57.656099903938525, "W_D": 83.7395, "J_D": 1297.2101864334345, "W_D_1KI": 40.22070124879924, "J_D_1KI": 19.31830031162307} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..68bbca5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.5041141510009766} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2037, 4062, ..., 19995956, + 19997954, 20000000]), + col_indices=tensor([ 0, 3, 5, ..., 9996, 9997, 9998]), + values=tensor([0.3088, 0.0777, 0.1762, ..., 0.6057, 0.6562, 0.8467]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.5661, 0.3739, 0.3594, ..., 0.8068, 0.7143, 0.9609]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 0.5041141510009766 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2082', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.557694435119629} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1900, 3860, ..., 19996021, + 19998015, 20000000]), + col_indices=tensor([ 0, 3, 11, ..., 9989, 9992, 9996]), + values=tensor([0.1071, 0.4523, 0.1080, ..., 0.2881, 0.4034, 0.8495]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.3748, 0.6654, 0.9133, ..., 0.7126, 0.6760, 0.9288]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.557694435119629 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1900, 3860, ..., 19996021, + 19998015, 20000000]), + col_indices=tensor([ 0, 3, 11, ..., 9989, 9992, 9996]), + values=tensor([0.1071, 0.4523, 0.1080, ..., 0.2881, 0.4034, 0.8495]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.3748, 0.6654, 0.9133, ..., 0.7126, 0.6760, 0.9288]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.557694435119629 seconds + +[40.83, 40.15, 39.72, 39.76, 39.84, 39.84, 40.49, 39.96, 40.13, 39.73] +[120.04] +15.491019010543823 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.557694435119629, 'TIME_S_1KI': 5.070938729644394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1859.5419220256806, 'W': 120.04} +[40.83, 40.15, 39.72, 39.76, 39.84, 39.84, 40.49, 39.96, 40.13, 39.73, 41.31, 40.05, 40.38, 40.21, 39.71, 39.65, 45.13, 39.78, 40.38, 39.79] +726.01 +36.3005 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2082, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.557694435119629, 'TIME_S_1KI': 5.070938729644394, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1859.5419220256806, 'W': 120.04, 'J_1KI': 893.1517396857256, 'W_1KI': 57.656099903938525, 'W_D': 83.7395, 'J_D': 1297.2101864334345, 'W_D_1KI': 40.22070124879924, 'J_D_1KI': 19.31830031162307} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..b94bf6c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1470, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.845632314682007, "TIME_S_1KI": 7.377981166450344, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2161.157882115841, "W": 117.03, "J_1KI": 1470.1754300107762, "W_1KI": 79.61224489795917, "W_D": 80.6345, "J_D": 1489.0531081386805, "W_D_1KI": 54.853401360544225, "J_D_1KI": 37.315239020778385} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..4e7bca2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.7518825531005859} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2974, 5950, ..., 29994087, + 29997108, 30000000]), + col_indices=tensor([ 6, 8, 10, ..., 9985, 9992, 9996]), + values=tensor([0.7151, 0.6737, 0.4043, ..., 0.5812, 0.5679, 0.6733]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.6135, 0.6008, 0.5882, ..., 0.6628, 0.8539, 0.9204]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 0.7518825531005859 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1396', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.968559503555298} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3001, 6027, ..., 29994039, + 29997052, 30000000]), + col_indices=tensor([ 7, 11, 16, ..., 9989, 9996, 9999]), + values=tensor([0.2908, 0.3192, 0.9662, ..., 0.5726, 0.8523, 0.1200]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.6009, 0.9845, 0.3791, ..., 0.1987, 0.1714, 0.4278]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 9.968559503555298 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1470', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.845632314682007} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2929, 5851, ..., 29993945, + 29997029, 30000000]), + col_indices=tensor([ 6, 7, 11, ..., 9986, 9997, 9998]), + values=tensor([0.6210, 0.5427, 0.8130, ..., 0.4194, 0.0441, 0.7442]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.0141, 0.0033, 0.5199, ..., 0.4699, 0.7276, 0.5761]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.845632314682007 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2929, 5851, ..., 29993945, + 29997029, 30000000]), + col_indices=tensor([ 6, 7, 11, ..., 9986, 9997, 9998]), + values=tensor([0.6210, 0.5427, 0.8130, ..., 0.4194, 0.0441, 0.7442]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.0141, 0.0033, 0.5199, ..., 0.4699, 0.7276, 0.5761]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.845632314682007 seconds + +[46.28, 39.97, 40.2, 40.69, 40.27, 40.47, 40.33, 41.03, 39.84, 40.37] +[117.03] +18.466699838638306 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.845632314682007, 'TIME_S_1KI': 7.377981166450344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2161.157882115841, 'W': 117.03} +[46.28, 39.97, 40.2, 40.69, 40.27, 40.47, 40.33, 41.03, 39.84, 40.37, 41.81, 40.41, 40.59, 39.82, 40.04, 39.71, 39.78, 40.7, 39.78, 40.1] +727.9100000000001 +36.395500000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1470, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.845632314682007, 'TIME_S_1KI': 7.377981166450344, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2161.157882115841, 'W': 117.03, 'J_1KI': 1470.1754300107762, 'W_1KI': 79.61224489795917, 'W_D': 80.6345, 'J_D': 1489.0531081386805, 'W_D_1KI': 54.853401360544225, 'J_D_1KI': 37.315239020778385} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..b4516b2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 366482, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.093939065933228, "TIME_S_1KI": 0.0302714432521467, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1264.2006668257713, "W": 98.79, "J_1KI": 3.4495573229402026, "W_1KI": 0.26956303447372587, "W_D": 62.66125, "J_D": 801.866525297463, "W_D_1KI": 0.17098043014390885, "J_D_1KI": 0.0004665452331735497} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..b43f8ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1793 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019162416458129883} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([2508, 9046, 543, 4312, 1967, 3077, 2314, 3402, 8716, + 3244, 7898, 5856, 5314, 5976, 2342, 2492, 8104, 2147, + 1979, 1751, 5860, 9783, 5705, 5222, 9824, 95, 1929, + 8803, 8548, 677, 1693, 5788, 5523, 9109, 5789, 1810, + 1126, 4820, 9808, 8419, 2599, 1229, 2963, 1212, 2144, + 3318, 3062, 2566, 7505, 6545, 1590, 1285, 717, 2005, + 5957, 3726, 9706, 857, 1431, 3655, 7849, 8981, 2848, + 7191, 9483, 8286, 6981, 629, 346, 7910, 8938, 1876, + 6475, 566, 6786, 4287, 8002, 5447, 9857, 7286, 3753, + 6095, 8322, 7342, 5732, 7297, 8559, 2193, 1470, 9896, + 4441, 3719, 1687, 3912, 4005, 7511, 4689, 9996, 5681, + 3721, 5011, 1299, 193, 7161, 1139, 8246, 6024, 9167, + 5912, 3347, 8644, 5448, 9677, 2358, 2275, 4573, 5804, + 6640, 677, 4332, 7242, 5152, 3155, 2691, 5318, 3888, + 7771, 2370, 9442, 9350, 2217, 3939, 1845, 8407, 8949, + 3238, 6161, 7398, 563, 6153, 7042, 5316, 6185, 8259, + 7444, 4523, 6325, 9519, 7963, 8735, 3571, 4162, 6945, + 7239, 9555, 8362, 6689, 825, 2005, 7392, 6469, 6550, + 1091, 7670, 3483, 3477, 6190, 5573, 9375, 3852, 897, + 8739, 4969, 3239, 4179, 9098, 2727, 7551, 1657, 3410, + 9763, 2836, 8969, 3741, 1805, 806, 7323, 1341, 9366, + 4238, 7055, 1855, 6557, 2199, 3076, 961, 56, 4684, + 8459, 6449, 916, 1241, 3555, 6490, 6076, 4659, 6608, + 1972, 7464, 3684, 7276, 202, 4934, 2937, 6629, 8676, + 2854, 9198, 3221, 1881, 9527, 3491, 900, 551, 3477, + 7557, 5144, 2172, 5733, 2720, 3420, 7237, 7166, 9810, + 146, 2108, 2851, 228, 520, 2516, 469, 991, 7850, + 1010, 9739, 3913, 4444, 2689, 1467, 901, 7088, 235, + 7464, 8041, 9413, 9292, 9837, 5114, 68, 289, 3415, + 4247, 7541, 9998, 3514, 9674, 2670, 3572, 2167, 8523, + 1517, 6861, 179, 4531, 7528, 4118, 3477, 1329, 9307, + 6276, 937, 7241, 4841, 2423, 4555, 3917, 1683, 3585, + 9720, 9352, 5341, 6902, 9150, 1840, 8548, 9758, 2673, + 665, 8946, 4675, 2273, 3021, 7214, 5707, 6328, 3736, + 1078, 7132, 2785, 1506, 6694, 7725, 9559, 241, 6974, + 365, 3095, 3437, 5609, 2248, 2372, 9786, 6037, 6108, + 7705, 1458, 6266, 400, 8324, 6775, 3201, 1369, 6843, + 7707, 9241, 1615, 9217, 1863, 2989, 2014, 2316, 4769, + 5768, 5885, 5440, 3265, 6889, 6583, 7730, 647, 9106, + 4087, 6087, 4403, 9274, 4471, 7401, 2762, 3931, 6965, + 3164, 938, 527, 5512, 7910, 4668, 5448, 6709, 6557, + 7876, 2822, 1072, 7234, 7990, 680, 6351, 365, 8734, + 3355, 2213, 5195, 7229, 9067, 2511, 3762, 8881, 3507, + 8462, 498, 1315, 6134, 3211, 3461, 4219, 3233, 6852, + 4989, 108, 8007, 9484, 5952, 2542, 8868, 678, 6135, + 2042, 6083, 8113, 8376, 7391, 510, 8516, 672, 6513, + 3857, 4529, 1142, 2241, 2953, 8752, 7337, 485, 9766, + 7738, 1630, 4671, 1054, 3709, 3629, 1425, 6145, 3112, + 1557, 5971, 4707, 2420, 7428, 3073, 7043, 4518, 3258, + 8324, 4576, 9820, 1738, 4288, 4568, 5120, 2665, 5670, + 1160, 3431, 2936, 6123, 3465, 6501, 3805, 5189, 2086, + 6398, 7063, 7290, 1186, 1870, 6160, 4041, 9566, 9903, + 8671, 3136, 161, 9574, 903, 9086, 6744, 9121, 576, + 5200, 5674, 3097, 5935, 1757, 3232, 3580, 6609, 4322, + 3945, 8444, 3852, 1569, 5504, 6603, 1547, 878, 7861, + 8802, 3561, 6575, 4591, 301, 5462, 5293, 1550, 7795, + 7430, 4734, 185, 8890, 5875, 1566, 7507, 6627, 9171, + 1332, 7105, 4247, 5478, 9026, 7544, 154, 3320, 6187, + 9024, 5315, 5066, 2631, 6631, 4897, 3673, 328, 228, + 3166, 2519, 3745, 6596, 6669, 3088, 9711, 8165, 899, + 9589, 36, 3957, 496, 115, 6166, 3688, 1766, 2307, + 361, 8690, 3683, 5585, 8112, 1232, 4580, 3944, 1072, + 3051, 3242, 8587, 3766, 5601, 892, 958, 4286, 1119, + 8408, 8953, 1339, 9703, 8899, 4918, 8984, 90, 9279, + 2372, 4341, 1706, 6214, 6370, 5149, 5092, 1777, 9441, + 9154, 96, 8248, 9257, 324, 4153, 3647, 3530, 8044, + 6156, 5365, 6116, 6837, 5051, 5730, 9232, 9397, 1594, + 8693, 8301, 9952, 6466, 8737, 1882, 7563, 1802, 8052, + 5900, 5405, 9630, 5275, 397, 43, 1118, 3111, 7822, + 1550, 6255, 9674, 8999, 4005, 803, 8997, 3496, 9830, + 8865, 2267, 8584, 1911, 3802, 5884, 3569, 5747, 6116, + 2421, 6562, 9781, 1610, 9382, 4873, 3318, 7129, 1019, + 4920, 9241, 9316, 5277, 42, 7494, 521, 5458, 1264, + 2824, 4185, 5776, 9545, 1226, 5004, 3090, 5089, 5606, + 2635, 730, 1318, 7015, 5910, 1589, 2576, 5832, 8341, + 510, 1635, 9237, 9840, 9042, 2447, 2076, 1317, 6375, + 2349, 753, 5583, 461, 8308, 3246, 9965, 6828, 2813, + 5244, 8142, 3664, 9159, 2387, 8232, 3360, 3745, 9932, + 8242, 9581, 583, 7106, 1310, 8310, 9223, 9897, 891, + 3817, 669, 1332, 8246, 7894, 4436, 9573, 8485, 883, + 6017, 9583, 1812, 1526, 9934, 6848, 545, 1911, 9192, + 9042, 1264, 2751, 4412, 3662, 7224, 2779, 4696, 305, + 5292, 260, 4859, 3014, 8257, 6456, 630, 7298, 2761, + 6571, 1472, 237, 547, 1959, 9753, 241, 8232, 658, + 5617, 1683, 4236, 4952, 233, 3496, 5512, 1296, 2703, + 824, 6138, 8526, 4191, 8549, 1882, 3569, 880, 1859, + 3403, 3955, 3780, 9117, 9017, 4182, 8874, 6980, 6107, + 6696, 2541, 5309, 129, 1202, 6760, 6885, 2142, 3488, + 5352, 1138, 5951, 6874, 3225, 7748, 1683, 7358, 5735, + 6474, 9585, 2307, 1145, 6405, 344, 6696, 7602, 6515, + 6579, 4160, 476, 4879, 2338, 3519, 6083, 7770, 8803, + 874, 61, 5281, 3911, 7063, 496, 6813, 3096, 9178, + 9609, 9207, 4316, 298, 2152, 4674, 1655, 2709, 1060, + 1886, 4678, 5984, 8258, 3381, 1591, 2352, 5598, 2384, + 6485, 3129, 2612, 5258, 50, 7930, 121, 1164, 2557, + 7206, 332, 277, 8421, 8500, 3337, 425, 29, 4235, + 1351, 6801, 7552, 2348, 1388, 7634, 6330, 462, 92, + 7088, 6060, 755, 8641, 9588, 7108, 2787, 4729, 7562, + 1723, 9127, 2961, 5309, 3629, 9214, 609, 7748, 5877, + 8710, 813, 774, 4114, 8416, 9164, 6941, 8479, 9249, + 2209, 1935, 7138, 6867, 4080, 6512, 6310, 1493, 4434, + 2113, 1980, 842, 7628, 543, 6712, 7464, 8278, 9046, + 252, 3576, 5501, 7511, 8121, 9681, 2157, 2047, 8548, + 1713, 9404, 312, 6465, 1352, 745, 2058, 4123, 364, + 3279, 206, 6153, 6439, 2407, 2611, 8191, 5859, 4752, + 2250, 2606, 5278, 8499, 5121, 6146, 1399, 707, 9543, + 9512, 8626, 4838, 1640, 8865, 3626, 1378, 1243, 7236, + 2803, 749, 5378, 8175, 205, 2154, 4279, 8991, 3947, + 1049, 8403, 5710, 6926, 9939, 7498, 1591, 3403, 5973, + 3803, 6876, 4161, 9117, 6935, 2368, 4884, 1751, 2199, + 7506, 7285, 9043, 5992, 4277, 9161, 865, 8138, 3793, + 2124]), + values=tensor([9.5707e-01, 7.5111e-02, 5.2267e-01, 7.5134e-01, + 4.3950e-01, 1.0462e-01, 5.2766e-01, 8.0984e-01, + 3.6860e-01, 6.2700e-01, 2.7206e-01, 2.2086e-01, + 7.5800e-02, 5.2988e-01, 9.7607e-01, 6.6976e-01, + 1.0378e-01, 2.4656e-01, 7.2340e-02, 8.2021e-01, + 5.8758e-01, 3.6488e-01, 4.8457e-01, 9.5443e-01, + 8.5500e-01, 7.0430e-02, 4.0168e-01, 9.6994e-01, + 7.6007e-01, 5.2134e-01, 6.0309e-01, 2.0268e-02, + 6.4190e-01, 5.1957e-01, 1.7394e-02, 5.2587e-01, + 8.3287e-01, 5.4658e-01, 2.9352e-01, 5.8028e-01, + 2.3292e-01, 5.6525e-01, 3.0481e-01, 5.9132e-01, + 5.4885e-01, 4.3601e-01, 1.8604e-01, 7.6863e-01, + 4.0955e-01, 1.9418e-01, 8.1744e-01, 2.2905e-01, + 6.2145e-01, 4.2411e-01, 9.6454e-01, 6.9999e-01, + 2.5102e-01, 7.2112e-01, 5.4737e-03, 2.1960e-01, + 2.7281e-01, 9.8861e-01, 2.3502e-01, 1.8402e-02, + 7.3555e-01, 1.7015e-01, 8.0917e-01, 3.6306e-01, + 9.5127e-01, 9.5612e-01, 6.5620e-01, 9.4079e-01, + 8.4929e-02, 1.9682e-01, 2.9972e-01, 7.5136e-01, + 9.5920e-01, 8.9681e-01, 9.3731e-01, 7.0940e-01, + 3.4216e-01, 5.2653e-01, 4.4805e-01, 3.5030e-01, + 4.8871e-01, 6.7711e-01, 2.1250e-01, 6.4504e-01, + 8.9757e-01, 2.0048e-01, 7.0915e-01, 3.4982e-01, + 3.7719e-01, 5.1148e-01, 5.1377e-01, 4.0306e-01, + 9.0690e-01, 9.4781e-01, 4.2681e-01, 8.2944e-01, + 3.6292e-02, 1.1525e-01, 2.1119e-01, 4.8411e-01, + 5.4486e-01, 6.4081e-01, 7.2189e-01, 7.4429e-01, + 5.2306e-01, 2.1619e-01, 9.8409e-01, 1.4917e-01, + 7.9852e-01, 7.8111e-01, 1.8563e-01, 1.2931e-01, + 6.3740e-01, 4.9304e-01, 1.5550e-01, 5.8621e-01, + 6.4899e-01, 7.7558e-01, 3.3227e-01, 2.4952e-01, + 8.7603e-01, 8.7279e-01, 2.4448e-01, 5.2727e-01, + 6.1779e-01, 6.0647e-01, 4.6651e-01, 1.5071e-01, + 2.6441e-01, 7.8739e-01, 7.4962e-01, 1.1566e-01, + 2.0523e-01, 6.1684e-02, 1.3500e-01, 3.5001e-01, + 4.2839e-01, 1.0545e-01, 3.1092e-01, 1.9727e-01, + 2.2556e-01, 8.2536e-01, 5.3360e-01, 5.6445e-02, + 2.8582e-01, 9.0706e-01, 3.2536e-01, 5.8741e-01, + 6.7535e-01, 2.1045e-01, 2.8517e-01, 8.8253e-01, + 4.3607e-01, 1.1168e-01, 1.0718e-01, 9.9590e-01, + 2.8048e-01, 7.4511e-01, 5.0053e-01, 6.2593e-01, + 5.9607e-01, 4.4446e-02, 9.8355e-01, 5.2331e-01, + 5.2200e-01, 8.5502e-01, 6.4442e-01, 9.0020e-01, + 8.6652e-01, 3.0542e-01, 5.0427e-01, 7.5490e-01, + 7.0590e-01, 4.5354e-01, 9.5344e-01, 7.8571e-01, + 8.1908e-01, 4.9790e-01, 5.0901e-01, 6.2748e-01, + 8.1486e-01, 2.4903e-01, 4.0531e-01, 5.0243e-01, + 7.1026e-01, 6.1145e-01, 9.9286e-01, 9.2958e-02, + 9.5266e-01, 9.8567e-02, 6.6328e-01, 7.4995e-01, + 1.6322e-01, 4.7215e-01, 2.8642e-01, 4.0280e-01, + 4.1682e-01, 8.7567e-01, 3.3333e-01, 6.2409e-01, + 5.1732e-01, 5.0235e-01, 5.9038e-01, 9.6075e-01, + 6.4711e-01, 1.9445e-01, 6.6739e-01, 1.7538e-01, + 4.7512e-01, 7.0938e-01, 7.6463e-01, 9.7284e-01, + 6.7293e-01, 7.1057e-01, 1.6317e-01, 5.3806e-01, + 2.3246e-01, 3.3440e-01, 6.4813e-01, 1.4413e-01, + 6.7799e-01, 6.9199e-01, 2.7005e-01, 8.5967e-01, + 9.5025e-01, 5.7371e-01, 5.3952e-01, 8.1204e-01, + 8.6498e-01, 1.0209e-01, 1.7641e-01, 9.0414e-01, + 9.8256e-01, 1.4095e-01, 1.1876e-02, 6.2645e-01, + 6.8777e-01, 3.0740e-01, 2.6030e-01, 8.3542e-01, + 5.9327e-01, 4.4948e-01, 3.7350e-01, 1.0986e-04, + 2.3860e-01, 9.1896e-02, 5.3594e-01, 9.6019e-01, + 1.6538e-01, 1.1255e-01, 3.6157e-01, 8.2704e-01, + 4.6706e-01, 9.2369e-01, 5.7754e-02, 8.8497e-01, + 4.0691e-01, 1.4109e-01, 8.9898e-01, 4.6571e-01, + 6.6921e-01, 1.3252e-01, 1.8881e-01, 9.8200e-01, + 1.0344e-01, 8.4811e-01, 1.5001e-01, 2.3035e-01, + 5.0531e-01, 5.4044e-01, 1.5758e-01, 2.7234e-01, + 2.9591e-02, 5.5223e-01, 5.2184e-01, 1.3019e-01, + 5.4173e-01, 3.8830e-02, 1.7101e-01, 4.2637e-01, + 1.9357e-01, 8.3596e-01, 2.2751e-01, 3.1702e-02, + 5.1308e-01, 8.9139e-01, 7.0073e-01, 1.9805e-02, + 7.1449e-01, 8.6626e-01, 6.7742e-01, 9.7350e-02, + 9.6125e-02, 1.0821e-02, 2.0755e-01, 8.5136e-01, + 9.6269e-01, 2.9214e-01, 7.5108e-01, 9.3964e-01, + 3.7916e-01, 4.1542e-01, 5.8698e-02, 6.6382e-02, + 6.7280e-01, 5.8834e-01, 6.9868e-01, 2.9465e-01, + 8.0107e-01, 9.9069e-01, 9.1914e-01, 8.1978e-01, + 2.3299e-01, 8.9070e-01, 4.4949e-01, 3.9548e-01, + 6.8903e-02, 5.1778e-01, 5.0884e-01, 9.6649e-01, + 7.8248e-01, 7.9469e-01, 8.4596e-01, 4.1456e-01, + 5.6304e-01, 2.4162e-01, 7.7596e-01, 3.7262e-01, + 1.9690e-01, 8.7898e-01, 5.7855e-01, 8.8490e-01, + 7.9846e-01, 9.8537e-01, 2.5265e-01, 4.1278e-01, + 2.8090e-01, 7.7330e-01, 3.6348e-01, 1.2010e-01, + 4.8836e-02, 3.9874e-01, 3.6603e-01, 9.7665e-01, + 6.6776e-01, 8.6239e-01, 5.8508e-01, 6.2472e-01, + 4.6666e-01, 7.1718e-01, 5.8015e-01, 1.7355e-01, + 6.4883e-01, 6.9474e-01, 5.5885e-01, 8.0919e-01, + 2.1098e-02, 5.1721e-01, 3.2513e-01, 4.7559e-01, + 6.9479e-01, 5.7204e-02, 8.2071e-01, 7.9470e-01, + 1.5832e-01, 9.6615e-01, 8.8939e-01, 3.4476e-01, + 4.2795e-01, 2.4169e-01, 1.1676e-01, 8.6470e-01, + 8.2056e-01, 6.5573e-01, 9.8935e-01, 5.7272e-01, + 7.3601e-01, 7.9089e-02, 3.3822e-01, 4.0108e-01, + 9.6395e-01, 4.8080e-01, 5.9846e-01, 3.8560e-01, + 4.7398e-01, 5.5866e-01, 1.9426e-01, 6.3002e-01, + 3.6931e-01, 1.9150e-01, 9.5172e-01, 6.8593e-01, + 9.0949e-01, 7.7968e-01, 8.8743e-01, 7.2524e-01, + 5.8547e-01, 4.0587e-02, 7.9930e-01, 4.9416e-01, + 2.2101e-01, 3.5603e-01, 3.6054e-01, 8.1873e-01, + 4.9506e-01, 4.8170e-01, 1.9852e-02, 8.4708e-01, + 5.5999e-02, 9.3643e-01, 8.9081e-01, 5.2429e-01, + 3.4433e-01, 9.9034e-01, 7.5270e-01, 9.8192e-02, + 8.3430e-01, 4.0506e-01, 5.7351e-01, 5.8198e-01, + 7.8535e-01, 3.1474e-01, 3.7672e-01, 9.5581e-01, + 5.0140e-01, 9.4484e-01, 8.7129e-01, 7.1288e-01, + 5.5956e-01, 4.2823e-01, 5.7999e-01, 2.1299e-01, + 4.2089e-01, 2.2957e-01, 7.9354e-01, 7.6309e-02, + 3.4527e-01, 7.3564e-01, 3.1080e-02, 5.3529e-01, + 6.7180e-01, 5.7907e-01, 3.1932e-01, 7.5232e-01, + 9.5640e-01, 2.7941e-01, 3.0238e-01, 2.3890e-01, + 7.8927e-01, 3.9851e-01, 9.3470e-01, 5.2208e-01, + 2.6565e-01, 9.7156e-01, 5.3434e-02, 1.7433e-01, + 6.3462e-01, 4.0678e-01, 7.6080e-01, 7.5007e-01, + 8.8465e-01, 5.6181e-01, 1.8341e-01, 7.3839e-01, + 1.2657e-01, 4.0856e-02, 1.9742e-01, 5.7758e-01, + 9.1472e-01, 7.0792e-01, 2.2052e-01, 8.3225e-01, + 3.2311e-01, 3.3219e-01, 7.1413e-01, 9.1565e-01, + 3.5997e-01, 7.4105e-03, 6.0376e-01, 7.2324e-01, + 1.4783e-01, 6.7320e-01, 3.7075e-01, 2.5924e-02, + 9.4362e-01, 9.0266e-01, 1.6865e-01, 1.1705e-01, + 5.1386e-01, 8.8878e-01, 7.2176e-02, 4.6214e-01, + 5.6413e-01, 8.8627e-01, 1.4536e-01, 9.9284e-01, + 2.1084e-01, 2.8486e-01, 4.9650e-01, 2.9385e-02, + 6.2964e-01, 4.5897e-01, 1.2506e-01, 3.2973e-01, + 3.5741e-01, 1.1022e-01, 4.5569e-01, 6.9577e-01, + 5.5279e-01, 2.7969e-01, 8.5986e-01, 2.6368e-01, + 6.4340e-01, 9.2902e-01, 8.9041e-01, 3.8571e-01, + 5.3481e-01, 6.3246e-01, 2.9579e-01, 8.7994e-01, + 7.3501e-01, 1.8278e-01, 8.7146e-01, 6.8984e-01, + 9.7701e-01, 2.6879e-01, 4.7228e-01, 4.6858e-01, + 6.5356e-01, 3.4051e-01, 1.3083e-01, 5.5078e-01, + 9.9590e-01, 6.7336e-01, 5.2390e-01, 9.9667e-01, + 3.6021e-01, 8.2597e-01, 9.1583e-02, 8.8522e-01, + 6.2531e-01, 7.7626e-01, 2.4642e-01, 7.1187e-01, + 8.5251e-01, 3.2244e-01, 8.3217e-01, 5.6333e-01, + 9.0624e-01, 1.0129e-01, 1.7179e-01, 2.2377e-01, + 1.3904e-01, 1.0345e-01, 3.5301e-01, 9.2239e-01, + 1.7381e-01, 8.5612e-01, 4.7815e-02, 6.3335e-01, + 8.3647e-01, 9.1361e-01, 4.4021e-01, 9.0607e-01, + 9.6051e-01, 3.7103e-01, 7.5892e-01, 4.9115e-02, + 4.7167e-01, 2.2361e-01, 5.0125e-02, 5.7553e-01, + 3.8853e-01, 1.8018e-03, 2.1029e-01, 6.3036e-01, + 5.1485e-01, 2.6272e-01, 3.7676e-01, 4.0031e-01, + 5.5369e-01, 8.5186e-01, 9.5478e-01, 9.7874e-01, + 6.0960e-01, 5.7114e-01, 1.6926e-01, 9.7937e-01, + 2.8179e-01, 6.0239e-01, 1.5084e-01, 4.4762e-01, + 1.8913e-02, 7.4072e-02, 1.9277e-01, 9.7400e-01, + 6.4101e-01, 9.2459e-01, 9.2259e-01, 2.5328e-01, + 4.3386e-01, 7.5744e-01, 2.6380e-01, 1.8999e-01, + 2.9650e-01, 1.2204e-01, 6.3492e-01, 9.8427e-01, + 6.6209e-01, 2.3144e-01, 1.9369e-01, 6.9676e-01, + 7.7157e-01, 7.1536e-01, 1.9457e-01, 9.1369e-01, + 3.7470e-01, 4.8812e-01, 2.0180e-01, 8.6489e-01, + 8.3156e-01, 4.2700e-01, 5.5408e-01, 1.2690e-01, + 6.1767e-01, 8.0178e-01, 6.5887e-01, 3.7992e-01, + 3.4861e-02, 7.4799e-01, 5.4430e-01, 2.7734e-01, + 7.8324e-01, 7.5679e-01, 3.7977e-01, 9.8697e-01, + 2.1210e-01, 5.3826e-01, 9.8583e-01, 5.6862e-01, + 1.2409e-01, 4.7137e-01, 3.8364e-02, 7.9660e-02, + 1.0900e-01, 5.8409e-01, 3.5038e-01, 4.7469e-01, + 3.2646e-01, 9.7963e-01, 4.0955e-02, 7.7282e-01, + 2.3795e-01, 9.1451e-01, 6.2383e-02, 8.7752e-01, + 4.7785e-01, 6.3873e-01, 8.5135e-01, 6.3066e-01, + 9.2024e-01, 9.7095e-01, 6.0603e-01, 1.6173e-01, + 1.3692e-01, 9.7430e-02, 4.6913e-01, 1.3027e-01, + 6.4550e-01, 3.7097e-01, 1.0917e-01, 2.2626e-01, + 2.8272e-01, 9.3467e-01, 4.9372e-01, 1.1127e-02, + 2.9856e-01, 3.5094e-01, 6.7278e-01, 8.7179e-01, + 6.1750e-01, 9.3689e-01, 4.8396e-01, 7.8289e-01, + 6.7812e-01, 5.5047e-01, 6.6028e-01, 3.8193e-01, + 4.4025e-01, 4.7649e-02, 7.8513e-02, 8.4372e-01, + 1.5700e-01, 6.6635e-01, 6.0490e-02, 1.8217e-02, + 4.2363e-01, 7.2664e-01, 8.3042e-01, 2.0481e-01, + 7.7507e-01, 9.7178e-01, 2.8840e-01, 2.2320e-02, + 2.4571e-01, 3.1488e-01, 5.5842e-01, 4.4275e-01, + 6.1188e-01, 9.9512e-01, 2.2123e-01, 7.4091e-01, + 3.2291e-01, 6.8756e-01, 9.3283e-01, 5.8119e-01, + 3.3596e-01, 7.2542e-01, 4.1198e-01, 6.8787e-02, + 8.8370e-01, 9.5542e-01, 2.2005e-01, 1.3328e-01, + 8.7725e-02, 1.6866e-01, 2.6482e-01, 2.0425e-01, + 9.5978e-01, 8.7192e-01, 8.3889e-01, 8.5559e-01, + 1.8737e-01, 6.5055e-01, 3.7960e-01, 9.6932e-01, + 8.8268e-02, 4.3796e-01, 2.4756e-01, 5.3141e-01, + 5.8335e-01, 1.5163e-01, 4.6407e-01, 2.2600e-01, + 7.3892e-02, 1.8333e-01, 3.7744e-01, 6.2963e-01, + 3.5785e-01, 6.6981e-01, 3.8519e-01, 9.8844e-02, + 8.4155e-01, 9.8768e-01, 2.6869e-01, 6.6979e-01, + 3.9017e-01, 7.5747e-01, 8.9913e-01, 9.6730e-01, + 8.6778e-01, 8.9518e-01, 1.3607e-02, 7.1872e-01, + 4.9862e-01, 9.7831e-01, 3.5029e-01, 6.3459e-01, + 7.2985e-01, 3.1857e-01, 3.9858e-01, 8.7370e-01, + 7.2291e-01, 6.5606e-01, 4.2614e-01, 9.7369e-01, + 4.8132e-01, 3.9773e-01, 4.9498e-01, 9.3882e-01, + 6.7264e-01, 1.4831e-01, 5.6335e-01, 1.3523e-01, + 8.0317e-01, 4.7686e-01, 7.9812e-01, 1.6696e-01, + 1.1332e-01, 9.1364e-02, 5.8808e-01, 9.8429e-01, + 1.5637e-01, 2.4472e-01, 8.7386e-01, 6.3707e-01, + 5.1617e-01, 4.9782e-02, 8.8919e-01, 8.8265e-01, + 1.0222e-02, 2.0402e-01, 9.7026e-01, 8.6356e-01, + 1.9645e-01, 5.4133e-01, 7.4024e-01, 3.8777e-01, + 7.1005e-02, 2.4800e-01, 8.5222e-02, 6.5517e-01, + 4.2196e-01, 9.1175e-02, 1.1579e-01, 3.4600e-01, + 2.4814e-01, 4.3545e-01, 4.5340e-02, 1.3988e-01, + 9.2241e-01, 8.9701e-01, 4.8420e-01, 1.0904e-01, + 5.2898e-02, 5.7708e-01, 2.3747e-01, 4.6528e-01, + 2.2433e-01, 9.1101e-01, 8.3910e-01, 2.2365e-01, + 4.4439e-02, 4.6479e-01, 2.9108e-01, 6.7486e-01, + 2.7214e-01, 3.8601e-01, 1.6734e-01, 8.3921e-01, + 1.9230e-01, 9.9649e-01, 9.2040e-01, 5.7493e-01, + 2.5505e-02, 4.8876e-01, 2.6393e-01, 2.7271e-01, + 5.4310e-01, 8.6927e-01, 5.3533e-02, 1.9992e-01, + 5.7080e-01, 4.0119e-01, 2.5005e-01, 2.9507e-01, + 7.8395e-01, 3.1151e-01, 8.1157e-01, 6.1728e-01, + 8.6581e-01, 8.8523e-01, 8.1788e-01, 7.3004e-01, + 1.6512e-01, 3.2868e-02, 3.0387e-02, 2.1487e-01, + 6.6144e-01, 2.1608e-01, 8.9192e-01, 2.3551e-01, + 8.1202e-01, 2.6274e-01, 9.1459e-01, 7.5205e-01, + 7.3392e-01, 5.9391e-01, 2.2391e-01, 9.4922e-02, + 3.3381e-03, 9.6530e-01, 7.1320e-01, 5.3214e-01, + 5.2087e-01, 2.4255e-01, 9.0107e-01, 9.7729e-01, + 3.0352e-01, 9.8847e-01, 1.6505e-01, 5.9415e-01, + 7.8564e-01, 3.1263e-01, 1.5197e-01, 2.2146e-01, + 8.6721e-02, 5.5948e-01, 3.9253e-01, 3.7220e-01, + 5.3485e-01, 8.5741e-01, 8.0291e-01, 1.4914e-01, + 8.5698e-02, 4.0980e-01, 7.9854e-02, 3.0832e-01, + 3.0797e-01, 8.7825e-01, 6.1055e-01, 5.3491e-02, + 3.0791e-02, 3.7147e-01, 7.7878e-01, 3.4236e-01, + 4.5054e-01, 1.5377e-02, 5.9456e-02, 5.9733e-01, + 4.9498e-01, 1.1290e-01, 8.5009e-01, 8.0132e-01, + 6.7297e-01, 9.7824e-02, 9.2051e-01, 1.9335e-01, + 2.4769e-01, 1.6690e-01, 5.0092e-01, 5.4391e-01, + 7.2974e-01, 5.6050e-01, 3.7368e-01, 8.9801e-01, + 3.7280e-01, 1.0774e-01, 2.2172e-02, 3.1493e-01, + 5.8527e-01, 7.1807e-01, 7.0470e-01, 1.1035e-01, + 2.5102e-01, 7.6188e-01, 7.9622e-01, 9.4422e-01, + 1.5518e-01, 3.1240e-02, 3.5932e-01, 1.9391e-01, + 1.0883e-01, 8.0519e-01, 6.5322e-01, 2.7200e-01, + 3.6103e-01, 4.5282e-01, 9.1939e-01, 4.6131e-01, + 3.8575e-01, 8.8690e-01, 4.9604e-01, 9.2190e-01, + 8.9283e-01, 5.2838e-01, 1.4113e-01, 4.4667e-01, + 2.2835e-01, 8.8676e-01, 4.8873e-01, 9.5014e-01, + 5.1040e-01, 8.4321e-01, 5.9227e-01, 4.5634e-01, + 8.5767e-01, 8.2056e-01, 3.4810e-01, 1.9600e-01, + 2.8430e-01, 7.0722e-01, 6.5270e-01, 7.1032e-01, + 5.2650e-01, 9.3358e-01, 4.5651e-01, 1.3223e-01, + 6.9249e-01, 3.3404e-01, 3.4727e-01, 2.1805e-01, + 2.2592e-02, 4.9539e-01, 7.1062e-02, 1.8299e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4331, 0.6450, 0.9910, ..., 0.2595, 0.5081, 0.5389]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.019162416458129883 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '54794', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.6940038204193115} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([7911, 3161, 1262, 5192, 2357, 7680, 3052, 5022, 1293, + 7499, 7977, 6646, 8728, 6750, 3375, 7755, 8716, 989, + 2588, 5755, 4685, 6627, 6922, 2935, 5765, 5675, 6658, + 7409, 1352, 5956, 5147, 9211, 2687, 6131, 3712, 585, + 972, 5746, 1667, 2824, 532, 1593, 3811, 2678, 9253, + 6720, 7376, 2847, 3241, 3587, 6951, 8223, 340, 5643, + 5214, 8395, 1798, 7092, 3059, 6235, 7618, 486, 1778, + 3237, 6697, 2502, 70, 2828, 606, 6952, 9286, 5888, + 3027, 7384, 4383, 6428, 4570, 1783, 1294, 7026, 2076, + 4918, 1488, 770, 957, 9836, 1056, 2315, 474, 4971, + 3554, 7405, 3832, 8094, 428, 4395, 7438, 9704, 1633, + 6658, 2294, 942, 9262, 5660, 6854, 8366, 1078, 2854, + 2434, 3985, 3190, 6248, 4349, 7344, 8178, 4674, 4996, + 6996, 4763, 2253, 1593, 2769, 2167, 4085, 6424, 9420, + 1242, 4354, 6300, 5604, 1686, 343, 7940, 6599, 3332, + 608, 9023, 9655, 8459, 2336, 1618, 2580, 2819, 4817, + 7559, 8620, 7978, 3358, 9453, 7322, 6766, 7975, 3814, + 2608, 1766, 6541, 541, 4726, 3955, 8201, 8104, 2555, + 2228, 8697, 1378, 8871, 834, 5468, 3992, 9300, 86, + 2146, 9398, 545, 591, 5437, 3320, 3820, 8590, 4135, + 3007, 2522, 3922, 759, 5236, 2202, 5387, 7098, 8105, + 3515, 2202, 6617, 5113, 2562, 4701, 3458, 7765, 7192, + 5627, 6408, 9799, 6567, 37, 4616, 271, 7974, 5307, + 9377, 7332, 242, 7565, 8542, 4407, 5590, 7693, 2731, + 2564, 8507, 8892, 8486, 4127, 1297, 9765, 3824, 1756, + 2347, 1093, 902, 2490, 5112, 7578, 5964, 7096, 3492, + 4916, 3077, 6301, 2536, 5861, 1994, 5412, 4184, 9482, + 5468, 6176, 4074, 3736, 5024, 9965, 8881, 7464, 2623, + 7850, 5867, 9092, 7361, 2749, 846, 3270, 8221, 7657, + 5496, 1168, 6665, 6181, 6205, 2270, 71, 245, 7836, + 4101, 8588, 9378, 9346, 9253, 744, 9634, 1404, 1681, + 2033, 4795, 7187, 7704, 9971, 4828, 565, 2094, 6252, + 5611, 2984, 5542, 4429, 355, 2391, 7037, 3331, 7438, + 5605, 5312, 7733, 9542, 2583, 2261, 2389, 2273, 3645, + 3262, 6647, 1886, 8483, 8073, 2214, 4868, 6777, 8269, + 220, 9717, 5193, 6143, 4955, 3714, 2595, 1190, 5189, + 9733, 4412, 3274, 76, 3147, 2339, 1088, 8301, 86, + 524, 2350, 3099, 415, 3796, 2977, 9907, 1395, 6139, + 3508, 4520, 9604, 3340, 6041, 595, 5957, 587, 1195, + 3410, 8756, 8453, 6996, 1505, 8596, 2662, 1437, 6740, + 7061, 9248, 4638, 2125, 1555, 5142, 5829, 750, 446, + 1115, 4258, 132, 8639, 8024, 4417, 7514, 9888, 5171, + 3804, 9336, 7399, 7498, 3446, 517, 7466, 8729, 1786, + 5529, 3386, 5123, 5833, 8522, 7193, 4596, 3871, 9148, + 2138, 2608, 1676, 5641, 518, 8701, 9620, 1125, 9274, + 49, 13, 2714, 3823, 7010, 8129, 8631, 2865, 1750, + 395, 7820, 5688, 9017, 6767, 3565, 4202, 6678, 2244, + 4721, 9333, 8878, 5819, 3634, 8317, 592, 1447, 3012, + 6300, 8346, 1608, 1213, 80, 7267, 4298, 6409, 2292, + 1670, 6487, 5529, 8657, 2479, 6350, 6733, 7577, 2776, + 1458, 945, 8267, 2346, 8008, 3294, 2469, 7020, 2715, + 4869, 2481, 110, 4234, 4845, 890, 5463, 618, 8967, + 4348, 4756, 6306, 3882, 9183, 1947, 8107, 5369, 2472, + 2945, 5358, 9391, 1200, 4322, 8117, 5100, 4644, 6706, + 1082, 1513, 5226, 9231, 7175, 4584, 8366, 633, 1495, + 9566, 7257, 4456, 3124, 5334, 3920, 5125, 2467, 1786, + 6231, 6395, 2461, 1514, 7642, 3727, 7424, 7613, 140, + 1890, 4821, 1469, 8259, 8978, 1802, 5378, 6805, 3869, + 3783, 3061, 8658, 5541, 472, 3859, 5961, 4577, 2988, + 7578, 2133, 7348, 1104, 8479, 3907, 7659, 5190, 50, + 2069, 5632, 5929, 7650, 8492, 5399, 5679, 9805, 3063, + 5599, 7144, 9305, 7057, 6281, 1286, 2319, 3291, 9287, + 6547, 67, 7805, 2707, 2756, 5914, 771, 1219, 7868, + 5501, 6882, 9791, 7785, 1336, 5478, 8053, 5806, 7905, + 3258, 9112, 5104, 8343, 355, 2229, 5367, 3909, 5464, + 6309, 3948, 8995, 9885, 961, 7409, 259, 5234, 1566, + 6742, 4902, 8034, 5441, 8932, 8602, 8641, 2237, 3033, + 9579, 7336, 5320, 4915, 4711, 249, 8378, 2695, 4635, + 92, 567, 6131, 6716, 4080, 593, 9208, 2994, 3780, + 2398, 5252, 9333, 6118, 8719, 1357, 5108, 235, 4942, + 1134, 1638, 6292, 3484, 3730, 5250, 2588, 5449, 8152, + 3481, 8309, 8874, 8518, 9128, 7674, 540, 7089, 6715, + 9035, 4981, 6442, 2568, 3698, 4804, 8943, 9401, 5794, + 9163, 6978, 9060, 5634, 2027, 2034, 1419, 2698, 183, + 7135, 9013, 6760, 5936, 2836, 9968, 4826, 125, 558, + 4114, 8419, 405, 4366, 2095, 5583, 4237, 5302, 1643, + 9648, 2160, 4098, 7817, 4906, 2647, 1861, 3658, 3239, + 7570, 9370, 1637, 5703, 5705, 4124, 7739, 3422, 7854, + 374, 4555, 5446, 8087, 1258, 7641, 3331, 206, 3865, + 5881, 2468, 7763, 6628, 5348, 3313, 1312, 6220, 7644, + 2313, 2844, 4936, 8483, 7091, 8320, 4877, 8564, 3187, + 7061, 8879, 9360, 4897, 4797, 5039, 1909, 3193, 7560, + 7693, 3282, 6661, 6942, 7250, 971, 5884, 8450, 9680, + 3181, 9619, 3815, 7862, 3788, 2853, 1779, 620, 2071, + 7700, 9810, 9051, 7192, 1172, 513, 1386, 4644, 9842, + 8082, 2156, 1625, 6587, 1021, 7260, 7201, 4689, 6301, + 3054, 353, 719, 3436, 2864, 3509, 2571, 8255, 3289, + 2857, 9690, 7954, 7304, 3426, 8079, 286, 5262, 2657, + 9448, 9159, 1770, 1621, 7884, 4261, 6666, 4278, 4311, + 7112, 8402, 1282, 8940, 4636, 3650, 8680, 7177, 7540, + 4412, 8615, 2593, 7393, 8896, 8921, 925, 9474, 1231, + 9773, 527, 5255, 560, 8846, 659, 7410, 8294, 942, + 381, 8693, 7884, 7087, 7532, 7167, 4092, 5794, 5787, + 1151, 2797, 6916, 9663, 9835, 638, 5822, 4133, 7949, + 2884, 3556, 5839, 4844, 9052, 2314, 7830, 3146, 2062, + 4109, 7795, 1041, 8997, 551, 976, 8822, 2074, 4611, + 9870, 9228, 3272, 764, 4377, 9210, 5300, 6377, 8413, + 8545, 7738, 2478, 981, 8075, 2541, 730, 3435, 6845, + 5767, 9726, 1931, 5221, 4111, 3176, 1229, 2817, 2841, + 4797, 7580, 1623, 3843, 1681, 230, 6130, 6583, 5659, + 1878, 8027, 707, 3549, 2451, 852, 509, 669, 881, + 8913, 9704, 6400, 4696, 2366, 2605, 200, 1715, 7757, + 2614, 8597, 2968, 9280, 1679, 6206, 4716, 535, 4893, + 9648, 755, 7542, 2709, 8355, 4740, 8184, 1893, 4039, + 8992, 4769, 9806, 5916, 3331, 2004, 6705, 5834, 6838, + 3763, 2856, 3248, 7450, 851, 7429, 3879, 5390, 1163, + 6404, 5606, 1150, 1948, 251, 8914, 5922, 4549, 8687, + 2810, 7589, 9366, 9847, 3758, 1716, 1962, 2678, 4578, + 3134, 7173, 219, 8841, 5097, 3735, 2863, 5504, 1245, + 4000, 1797, 2381, 29, 3878, 2914, 2744, 2104, 7360, + 1303, 8133, 533, 2444, 608, 8144, 3134, 7588, 4660, + 219]), + values=tensor([8.4162e-01, 2.9686e-01, 6.3714e-01, 1.6817e-01, + 1.6215e-01, 6.1437e-01, 1.6080e-01, 4.0610e-01, + 7.3645e-01, 9.7877e-01, 8.8529e-01, 8.5771e-01, + 2.8589e-02, 2.0612e-02, 9.9026e-01, 9.0065e-02, + 3.1218e-01, 2.7400e-01, 6.6157e-01, 3.2569e-01, + 1.7011e-01, 4.0876e-01, 6.9676e-01, 5.2962e-01, + 4.7625e-01, 5.2877e-02, 6.4254e-01, 8.4744e-01, + 5.7453e-01, 9.2040e-01, 9.7805e-01, 8.0590e-01, + 4.4096e-01, 1.8318e-01, 8.9755e-01, 2.8005e-01, + 2.3996e-01, 1.8286e-01, 1.7292e-01, 7.1175e-01, + 2.0714e-02, 4.9211e-01, 5.1628e-01, 5.1485e-01, + 8.2763e-01, 1.8358e-01, 8.4319e-01, 4.9073e-01, + 1.8916e-01, 5.6885e-01, 3.3605e-01, 6.3594e-01, + 5.1482e-01, 4.9282e-01, 5.9958e-01, 7.0978e-01, + 1.7567e-01, 1.1230e-02, 2.8903e-01, 9.9449e-01, + 1.0897e-01, 3.9189e-01, 5.9951e-01, 6.7777e-01, + 8.9047e-01, 6.3510e-01, 8.1965e-01, 8.2343e-01, + 5.8432e-01, 2.8112e-01, 3.4195e-02, 8.1705e-01, + 2.9005e-01, 1.3897e-01, 6.9293e-01, 1.5391e-01, + 2.2869e-01, 8.3383e-01, 5.4870e-02, 5.6915e-01, + 3.8852e-01, 2.6519e-01, 4.5692e-01, 5.4502e-02, + 5.7453e-01, 7.9181e-01, 5.6057e-01, 7.9160e-01, + 4.4644e-01, 5.0880e-01, 7.4451e-01, 3.2819e-01, + 8.0628e-01, 8.6929e-01, 3.4416e-01, 7.6103e-01, + 3.6389e-01, 3.5701e-01, 8.2362e-01, 6.3586e-01, + 8.3509e-01, 8.9715e-01, 8.8398e-01, 8.3148e-01, + 8.4779e-01, 9.5284e-01, 8.3857e-01, 6.0527e-01, + 2.4645e-01, 8.9898e-01, 4.8666e-01, 8.8162e-01, + 6.1103e-01, 7.0425e-01, 6.8737e-01, 9.5292e-01, + 1.6806e-02, 4.9765e-01, 7.8396e-01, 8.0695e-01, + 7.4330e-01, 8.0275e-01, 4.5302e-01, 8.7655e-01, + 7.2798e-01, 7.4388e-01, 9.7066e-02, 1.6811e-01, + 1.8099e-01, 2.9537e-01, 2.8883e-01, 6.7815e-01, + 7.5316e-01, 1.1697e-01, 4.4732e-01, 7.4137e-01, + 6.2402e-01, 4.4193e-01, 6.8972e-01, 9.3106e-01, + 3.8489e-01, 6.1625e-01, 7.1358e-01, 1.2465e-01, + 8.8973e-01, 4.3619e-01, 3.8639e-01, 6.1804e-01, + 3.4197e-01, 5.8895e-02, 7.7694e-01, 9.2218e-03, + 4.0218e-01, 3.5958e-01, 5.1621e-01, 2.1586e-02, + 1.8157e-01, 7.4709e-01, 1.2399e-01, 5.7793e-02, + 8.7317e-01, 3.4726e-01, 5.8867e-01, 9.9642e-01, + 1.2702e-01, 3.3654e-01, 5.9711e-01, 9.5511e-01, + 7.8878e-02, 1.0158e-01, 8.9875e-01, 4.1339e-01, + 3.2155e-01, 9.3632e-01, 2.3121e-01, 5.1482e-01, + 3.4904e-01, 7.3977e-01, 5.6021e-01, 6.6210e-01, + 3.0267e-01, 5.9350e-03, 8.0584e-01, 1.7500e-01, + 4.5025e-01, 2.9967e-01, 6.1546e-01, 8.6159e-01, + 3.5830e-01, 6.4170e-01, 3.6363e-01, 7.0448e-01, + 8.1901e-01, 2.5682e-01, 1.0264e-01, 5.9217e-01, + 1.0416e-01, 3.9465e-01, 8.6154e-01, 2.6364e-01, + 9.8520e-01, 3.4529e-01, 6.1024e-01, 9.4500e-01, + 4.5989e-01, 1.2879e-02, 1.0827e-02, 6.9805e-01, + 9.4260e-01, 2.2079e-02, 1.8200e-01, 2.3832e-02, + 1.8420e-01, 3.2436e-01, 5.0197e-01, 5.8627e-01, + 6.7237e-01, 5.7570e-01, 5.1838e-01, 5.0143e-01, + 7.0347e-01, 5.7707e-01, 2.1966e-01, 5.2688e-01, + 9.8927e-01, 3.2737e-01, 3.0341e-01, 8.9357e-02, + 9.6648e-01, 6.1951e-01, 8.4411e-01, 5.1367e-02, + 9.2186e-01, 3.2165e-02, 5.2456e-02, 8.1840e-01, + 6.5262e-01, 1.4597e-01, 3.5673e-01, 6.4159e-01, + 3.6264e-01, 6.3084e-02, 8.3980e-01, 2.7736e-01, + 2.9819e-01, 8.1897e-01, 3.4132e-01, 4.9222e-01, + 3.9032e-01, 7.3209e-02, 6.3313e-01, 9.4217e-01, + 8.0046e-02, 8.8661e-01, 5.4139e-02, 9.1303e-01, + 6.2584e-01, 8.8930e-01, 6.4369e-01, 1.6622e-01, + 9.3592e-01, 7.0938e-01, 3.8339e-01, 6.4713e-01, + 2.2843e-01, 9.2113e-01, 6.4446e-03, 5.0513e-01, + 7.8366e-02, 4.3885e-01, 8.3500e-01, 2.3568e-01, + 2.3491e-01, 4.6610e-01, 4.7569e-01, 7.3570e-03, + 3.8697e-01, 3.0485e-02, 9.0027e-01, 3.9759e-01, + 3.0559e-01, 3.6608e-01, 8.3416e-01, 4.2041e-01, + 5.2523e-01, 4.0730e-02, 2.0952e-01, 2.7056e-01, + 7.0586e-01, 8.2052e-01, 2.9046e-01, 8.7390e-01, + 6.6215e-01, 9.9736e-01, 8.2920e-01, 9.5761e-01, + 4.7888e-01, 8.5421e-01, 1.9642e-01, 4.6196e-01, + 2.6211e-01, 4.3134e-01, 5.5166e-01, 3.0145e-02, + 2.4152e-01, 9.4430e-01, 8.6994e-01, 7.1148e-01, + 2.3961e-01, 3.3245e-01, 5.5968e-01, 1.9932e-01, + 3.6100e-02, 4.2814e-01, 9.4590e-01, 7.4276e-01, + 5.4453e-01, 1.6084e-01, 2.2574e-01, 3.1439e-01, + 3.7958e-02, 8.4663e-01, 2.3722e-01, 2.8379e-01, + 5.7939e-01, 6.0883e-01, 5.5850e-01, 3.0063e-01, + 4.9289e-01, 6.4727e-02, 3.2414e-01, 2.7767e-02, + 1.5239e-01, 4.5157e-01, 6.5935e-01, 8.5589e-01, + 8.2281e-01, 1.3098e-01, 5.7403e-01, 8.8951e-01, + 2.3362e-01, 4.4099e-01, 2.9584e-01, 2.1959e-01, + 9.8822e-01, 7.6077e-01, 3.2764e-02, 2.2886e-01, + 1.5180e-01, 2.4079e-02, 2.9072e-01, 1.0037e-01, + 8.8988e-01, 7.2298e-01, 4.5100e-01, 7.2322e-01, + 5.2411e-02, 7.5577e-01, 6.5300e-01, 1.9748e-01, + 4.6204e-01, 7.1756e-02, 5.5358e-01, 6.3139e-01, + 1.1557e-01, 8.2697e-01, 8.2970e-01, 9.8441e-01, + 9.1614e-02, 3.1059e-03, 7.4810e-01, 9.8221e-01, + 1.2364e-01, 1.9347e-01, 1.9584e-02, 9.5603e-01, + 6.7176e-01, 6.8425e-01, 1.9972e-01, 5.4195e-01, + 6.3874e-01, 7.5708e-01, 7.9813e-01, 3.3289e-01, + 8.7933e-01, 7.3670e-01, 6.5968e-01, 2.5864e-01, + 6.8278e-01, 3.0164e-01, 9.2539e-01, 4.0284e-01, + 4.5093e-01, 8.7324e-01, 8.0143e-01, 4.8116e-01, + 3.4355e-01, 8.8919e-01, 1.5159e-01, 7.5020e-01, + 9.8429e-01, 9.9678e-01, 4.1278e-01, 7.5484e-01, + 4.7396e-01, 2.1042e-01, 7.2843e-02, 1.4297e-01, + 6.7845e-01, 8.5086e-02, 5.1551e-01, 4.9162e-01, + 9.7966e-01, 3.6049e-01, 4.7386e-01, 5.7859e-01, + 7.0943e-01, 9.1205e-02, 6.6770e-01, 5.3590e-01, + 3.9755e-01, 9.6597e-01, 1.8599e-01, 4.7860e-01, + 4.2112e-02, 1.8189e-01, 2.4269e-01, 7.7636e-01, + 9.7677e-01, 1.4310e-01, 6.1272e-02, 1.4161e-01, + 5.1371e-01, 6.6644e-01, 6.0793e-01, 5.3914e-01, + 2.2731e-01, 8.7603e-01, 2.9862e-01, 4.6113e-01, + 4.3524e-01, 9.1175e-01, 3.9624e-01, 1.0346e-01, + 6.1379e-01, 4.7208e-01, 2.6070e-01, 5.2191e-01, + 3.9442e-01, 1.4743e-01, 3.8716e-01, 3.7503e-01, + 1.3589e-01, 5.0473e-01, 6.7728e-01, 9.0319e-01, + 6.1190e-01, 1.9275e-01, 3.5936e-01, 6.4987e-01, + 3.1396e-01, 2.8159e-01, 8.3147e-02, 4.5551e-01, + 6.6356e-01, 7.0244e-02, 1.7106e-02, 6.4133e-02, + 6.5616e-02, 2.5462e-01, 7.3261e-01, 5.0907e-01, + 6.7076e-01, 4.0218e-01, 6.6139e-01, 1.8017e-01, + 2.0269e-01, 2.6760e-01, 6.2695e-01, 7.8797e-01, + 9.1126e-01, 8.1648e-01, 3.9421e-01, 4.6220e-01, + 6.8836e-01, 5.0359e-01, 6.4967e-01, 9.8028e-01, + 9.1023e-01, 1.1718e-01, 4.9634e-01, 5.6551e-01, + 6.8389e-01, 3.6489e-01, 3.9624e-01, 5.3191e-01, + 5.4360e-01, 8.7333e-01, 6.8269e-01, 7.5760e-01, + 1.1377e-01, 1.5015e-01, 4.3439e-02, 6.1503e-01, + 3.4363e-01, 4.3709e-01, 6.4079e-01, 7.8204e-01, + 1.1716e-01, 5.7515e-01, 6.4631e-02, 1.4533e-01, + 5.2120e-01, 8.7179e-01, 4.7879e-01, 9.3303e-02, + 2.1845e-01, 6.3517e-01, 6.8688e-01, 3.5430e-01, + 9.3739e-01, 7.0591e-01, 7.7055e-01, 7.5304e-01, + 3.9056e-02, 1.4006e-02, 2.2822e-01, 9.9645e-01, + 5.4400e-02, 8.0951e-01, 1.8179e-01, 1.9010e-01, + 3.0349e-01, 2.4530e-01, 3.4986e-01, 4.8871e-01, + 3.9206e-01, 5.5413e-01, 6.4310e-02, 9.0201e-01, + 2.0129e-01, 7.2874e-01, 7.4927e-01, 4.7834e-01, + 1.0777e-02, 2.5515e-01, 2.9574e-01, 6.6742e-01, + 5.9485e-01, 3.8711e-01, 1.8718e-01, 8.6283e-01, + 9.1934e-01, 8.9372e-01, 2.7499e-01, 3.4311e-02, + 5.7311e-01, 2.1089e-01, 4.2092e-01, 2.0315e-01, + 3.0962e-01, 1.3347e-01, 9.5185e-01, 4.1612e-01, + 4.3202e-01, 2.5416e-01, 7.7755e-01, 7.1075e-01, + 4.2297e-01, 2.0397e-01, 1.3687e-01, 9.6658e-01, + 3.0341e-01, 9.1344e-01, 5.3464e-01, 4.7718e-01, + 7.5356e-01, 8.8694e-01, 8.9264e-01, 8.1240e-02, + 7.6203e-02, 3.2592e-01, 5.3572e-02, 7.7634e-02, + 7.7625e-01, 1.5836e-02, 5.8242e-01, 2.1002e-01, + 8.9491e-02, 6.8441e-01, 6.4412e-01, 2.9177e-02, + 6.1618e-01, 1.2632e-01, 5.3563e-01, 5.0509e-01, + 5.5684e-02, 5.2367e-01, 2.2220e-01, 6.6590e-01, + 3.5274e-01, 2.7941e-01, 3.9525e-01, 1.3783e-01, + 9.9827e-01, 2.9496e-01, 3.3969e-02, 7.0416e-01, + 6.2724e-03, 3.5413e-01, 4.3579e-01, 2.8431e-01, + 6.6274e-01, 5.5869e-03, 4.6483e-01, 9.4265e-01, + 1.9390e-01, 2.6871e-01, 8.9957e-01, 7.6910e-01, + 9.2851e-01, 8.5764e-01, 2.8679e-01, 5.6221e-02, + 6.3789e-01, 4.9980e-01, 2.9834e-01, 1.6547e-01, + 7.2863e-01, 7.4335e-01, 1.7202e-01, 6.0711e-01, + 6.2744e-01, 2.3141e-01, 7.7128e-01, 6.9575e-01, + 6.1477e-01, 3.6069e-01, 4.9022e-02, 9.4412e-01, + 3.5188e-02, 5.7942e-01, 2.9890e-02, 8.4625e-01, + 7.8233e-01, 8.3817e-01, 5.2931e-01, 7.8769e-01, + 5.6272e-01, 7.8863e-01, 1.4452e-03, 7.3161e-02, + 6.3288e-01, 7.0660e-01, 5.5303e-01, 6.9013e-02, + 9.2833e-01, 6.1609e-01, 2.7628e-01, 3.8301e-01, + 7.8596e-01, 8.8683e-01, 7.7771e-01, 2.8419e-01, + 6.9736e-02, 9.6704e-01, 8.8528e-01, 2.2352e-01, + 2.4524e-01, 8.5698e-01, 6.4514e-01, 6.2044e-01, + 8.3695e-01, 9.1677e-01, 1.1584e-01, 7.9561e-01, + 9.3166e-01, 2.7498e-01, 9.4050e-01, 3.5642e-01, + 3.1322e-01, 3.1113e-01, 9.8499e-01, 7.1392e-02, + 3.1345e-01, 7.0182e-01, 8.1882e-01, 4.9724e-01, + 5.4993e-01, 8.6382e-01, 6.3868e-01, 1.5000e-02, + 5.5265e-01, 2.4538e-01, 9.4592e-02, 7.1374e-01, + 7.2630e-01, 8.8515e-01, 8.8786e-01, 8.9234e-01, + 8.5142e-03, 2.3346e-01, 2.8292e-01, 2.9767e-01, + 8.2406e-01, 9.4692e-01, 2.3154e-01, 6.5589e-02, + 9.1864e-01, 6.0994e-01, 7.0920e-01, 9.5407e-01, + 6.2214e-01, 5.5778e-01, 6.0645e-01, 4.8067e-01, + 2.8270e-01, 1.7280e-01, 1.0979e-01, 9.5386e-01, + 6.6667e-01, 7.6987e-01, 9.1941e-01, 3.8360e-01, + 8.9910e-02, 8.7933e-01, 6.6930e-01, 4.3986e-01, + 5.4556e-01, 4.9679e-01, 4.7997e-01, 1.4883e-01, + 4.5139e-01, 1.8357e-01, 2.4832e-01, 3.7126e-01, + 9.7198e-01, 3.6657e-01, 2.2981e-01, 1.6650e-02, + 6.3243e-01, 1.5482e-01, 8.0779e-01, 1.6308e-01, + 4.8827e-01, 1.8842e-01, 9.6686e-01, 7.4347e-01, + 6.0157e-01, 4.7871e-01, 4.4855e-02, 8.6666e-01, + 9.0341e-01, 7.7832e-01, 1.2656e-01, 4.6982e-01, + 6.7927e-01, 2.0556e-01, 4.8391e-01, 1.0870e-01, + 6.0179e-01, 8.6866e-01, 5.9287e-02, 3.0958e-01, + 3.3432e-01, 5.5680e-01, 6.7654e-01, 5.0669e-01, + 3.4185e-01, 1.6502e-01, 2.0130e-01, 4.4286e-01, + 1.0996e-01, 7.5754e-01, 3.4793e-01, 2.8929e-01, + 7.9544e-01, 1.0781e-02, 9.4940e-01, 4.9267e-01, + 7.7169e-01, 5.1969e-01, 6.1145e-01, 8.8191e-01, + 3.8444e-01, 2.4200e-01, 1.7867e-01, 8.6492e-01, + 8.4970e-01, 5.0800e-01, 5.6133e-01, 6.4340e-01, + 5.8533e-01, 6.0481e-01, 2.3846e-01, 1.4102e-01, + 3.7119e-01, 5.6156e-01, 2.8298e-01, 7.1845e-01, + 1.4668e-01, 4.7663e-01, 4.2636e-01, 1.0275e-02, + 2.9743e-01, 1.1376e-02, 7.0160e-02, 4.7251e-01, + 9.5838e-02, 8.5482e-01, 7.9510e-01, 3.6924e-01, + 9.5391e-01, 4.9116e-01, 3.0690e-04, 8.6869e-01, + 9.4535e-01, 3.7650e-01, 9.4503e-01, 9.6097e-01, + 5.9794e-03, 4.1384e-01, 2.2876e-01, 5.0617e-01, + 2.3170e-02, 3.0538e-01, 4.0455e-01, 9.4603e-02, + 2.0357e-01, 2.1517e-01, 9.0257e-01, 3.8634e-01, + 2.4908e-01, 8.0986e-02, 3.9443e-01, 2.4807e-01, + 2.8439e-01, 4.2593e-01, 3.6809e-01, 1.9789e-01, + 9.8594e-01, 8.9386e-01, 6.2015e-01, 1.7708e-01, + 3.8974e-01, 7.4022e-01, 5.4677e-01, 4.3473e-01, + 3.6837e-01, 3.6400e-01, 9.9743e-01, 7.1220e-01, + 8.7479e-01, 2.2457e-01, 8.5053e-01, 4.2955e-01, + 7.5176e-01, 1.0062e-01, 7.3583e-01, 2.5060e-02, + 5.0283e-02, 8.5963e-01, 4.0599e-01, 9.0739e-01, + 2.8833e-01, 2.1531e-01, 1.5452e-02, 9.4690e-01, + 3.1250e-01, 5.6093e-01, 6.1900e-01, 7.3466e-01, + 7.2323e-01, 8.9401e-01, 7.7222e-01, 9.3134e-01, + 6.5260e-01, 8.7584e-01, 4.5350e-01, 7.4368e-01, + 5.8671e-02, 3.5188e-01, 8.3654e-02, 3.3309e-01, + 1.0706e-01, 4.1255e-01, 7.9140e-01, 2.8887e-01, + 7.8441e-02, 2.1380e-01, 9.3099e-01, 4.7350e-01, + 9.2422e-01, 1.4535e-01, 3.3640e-01, 9.2002e-01, + 5.7304e-01, 6.5916e-01, 8.6914e-01, 2.5138e-01, + 3.1603e-01, 3.5203e-01, 3.1131e-02, 6.5641e-01, + 5.7494e-01, 4.7371e-01, 5.0827e-01, 1.5062e-01, + 5.8503e-01, 8.7183e-01, 7.0288e-01, 9.2990e-01, + 1.5908e-01, 4.9748e-01, 9.3223e-01, 9.9643e-01, + 3.8223e-01, 7.7110e-02, 3.4388e-02, 8.0052e-01, + 8.6565e-01, 2.9712e-01, 8.5471e-01, 8.6162e-01, + 4.4628e-01, 5.6741e-01, 4.0056e-02, 7.8839e-01, + 8.9395e-01, 6.8466e-01, 3.0041e-01, 2.2500e-02, + 1.8457e-01, 1.3861e-01, 1.0338e-01, 4.6879e-01, + 9.2662e-01, 7.4286e-01, 5.5725e-01, 5.3379e-01, + 6.8767e-01, 7.4719e-01, 9.0113e-01, 7.7449e-01, + 8.9973e-01, 7.3285e-01, 2.0062e-01, 1.0561e-01, + 4.3906e-01, 3.1029e-01, 6.7605e-01, 7.1638e-01, + 5.8611e-01, 9.9285e-01, 6.1233e-01, 8.2145e-01, + 3.1992e-01, 5.9686e-01, 6.7828e-01, 7.8322e-01, + 3.7277e-01, 3.7731e-01, 6.0362e-01, 1.4908e-01, + 5.2504e-01, 7.4365e-01, 2.2895e-01, 4.0076e-01, + 3.6715e-01, 8.1935e-01, 9.5313e-01, 5.8028e-01, + 4.6037e-01, 2.6326e-01, 5.8730e-01, 3.8793e-01, + 9.9606e-01, 9.6277e-01, 2.9002e-01, 4.2454e-01, + 2.8341e-01, 2.3389e-01, 3.9656e-01, 8.9761e-01, + 7.3352e-01, 1.8460e-01, 7.1116e-01, 2.3146e-01, + 5.8144e-01, 9.4575e-02, 8.3670e-01, 7.9219e-01, + 4.0643e-01, 1.8008e-01, 1.0599e-02, 4.8597e-01, + 2.9480e-01, 2.6967e-01, 8.8380e-03, 9.0770e-02]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8612, 0.4581, 0.7630, ..., 0.0492, 0.2811, 0.3451]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 1.6940038204193115 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '339631', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.730679035186768} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1000, 1000, 1000]), + col_indices=tensor([4972, 1405, 2738, 821, 7457, 7836, 7120, 5534, 695, + 6668, 7153, 1083, 1806, 3581, 349, 2928, 9019, 9318, + 9657, 5964, 9091, 9508, 6651, 348, 2668, 9801, 712, + 819, 3104, 2583, 7723, 85, 5857, 6849, 7363, 7282, + 890, 1906, 2514, 9264, 7871, 8221, 7845, 1927, 471, + 2360, 1083, 3678, 1270, 9654, 9373, 7266, 9755, 6534, + 9043, 6484, 9707, 3465, 997, 9966, 6876, 4035, 1407, + 8477, 4573, 4827, 5524, 7695, 5419, 327, 5516, 1549, + 7525, 9004, 8876, 6314, 2296, 6027, 9452, 72, 7459, + 3044, 8559, 628, 5871, 4084, 7963, 158, 9675, 7550, + 1796, 3225, 8892, 8104, 445, 2716, 8203, 4628, 6210, + 6301, 5576, 3974, 7025, 8928, 9625, 8171, 958, 6404, + 7807, 8154, 451, 3321, 6728, 5234, 6976, 1597, 9431, + 4125, 7315, 3074, 7163, 9117, 5604, 3258, 3491, 1302, + 780, 3021, 7140, 4452, 7130, 506, 742, 4904, 4779, + 9113, 8947, 7664, 7637, 7987, 302, 2532, 7421, 9325, + 6951, 8027, 5405, 1574, 4570, 9089, 7665, 528, 4791, + 1506, 5090, 7803, 281, 4795, 4297, 886, 3647, 8898, + 3120, 2601, 2574, 3655, 4214, 5707, 7550, 3370, 2324, + 3105, 7422, 3066, 3760, 2726, 2303, 8568, 327, 2589, + 4688, 3603, 3785, 9818, 9283, 469, 9286, 2273, 8934, + 9475, 2423, 1816, 6319, 9100, 1702, 4299, 4023, 4882, + 7771, 666, 7807, 7905, 397, 3796, 7251, 6833, 7018, + 1044, 9904, 4747, 3464, 5425, 3407, 175, 604, 2034, + 5882, 7224, 7439, 7369, 6262, 2750, 1254, 2862, 9223, + 4910, 6784, 6117, 9740, 1422, 8971, 7737, 1310, 9814, + 9075, 1744, 3303, 5610, 8739, 407, 7188, 9037, 2975, + 1568, 2983, 3434, 8327, 2274, 5215, 604, 2230, 2563, + 9134, 7881, 5093, 3453, 5598, 6471, 8742, 985, 2483, + 2627, 1373, 280, 8576, 3642, 7906, 7329, 9906, 6733, + 1745, 6623, 7734, 9790, 7276, 5528, 4362, 3188, 4638, + 4280, 2963, 1142, 372, 5049, 2909, 7165, 3193, 6148, + 2271, 1609, 1609, 3499, 5181, 9245, 9403, 2085, 5724, + 1181, 1771, 2960, 1899, 6350, 6779, 7732, 2210, 1463, + 9426, 5307, 9888, 664, 9851, 9122, 4726, 1985, 4843, + 7919, 8459, 1506, 1424, 6535, 3932, 1577, 8292, 9293, + 4141, 260, 3379, 4488, 4, 9670, 8903, 4311, 6111, + 7184, 4147, 9673, 2790, 579, 3619, 2848, 7168, 4487, + 6706, 3418, 9886, 3386, 9654, 7227, 6301, 8182, 2042, + 5889, 8620, 4715, 2314, 6102, 7812, 4452, 8306, 3245, + 7661, 4687, 9454, 372, 2859, 4089, 1590, 2543, 5916, + 5118, 5541, 2765, 9023, 1245, 8801, 911, 9153, 8331, + 2736, 8562, 6910, 738, 6503, 5375, 4609, 3990, 2407, + 9645, 8807, 6638, 1515, 7548, 7650, 8775, 7638, 2895, + 6146, 8851, 53, 3827, 901, 3926, 4196, 8539, 6422, + 4461, 7590, 8706, 9720, 3957, 975, 1113, 7177, 3493, + 2456, 3981, 2750, 1678, 6119, 1019, 1015, 1019, 2970, + 6212, 4435, 9544, 7136, 3621, 5132, 6730, 1344, 8173, + 9568, 9492, 3762, 8302, 9499, 2078, 7665, 8552, 646, + 4448, 2453, 4214, 4831, 3595, 6202, 6664, 5181, 1561, + 6495, 6356, 988, 6217, 6069, 4983, 5304, 5430, 6831, + 4249, 4868, 5418, 5675, 8390, 9467, 6739, 6570, 383, + 7997, 8462, 8831, 1856, 398, 1814, 9850, 683, 2765, + 7155, 6554, 5795, 7478, 9531, 9477, 5234, 8954, 6229, + 5571, 6549, 4977, 4582, 7552, 1900, 5253, 2076, 9585, + 1810, 4182, 1517, 8795, 4585, 8047, 1703, 8851, 7992, + 9840, 7872, 8668, 1604, 1226, 7542, 2036, 8321, 8280, + 6516, 7301, 5800, 6655, 556, 7240, 1925, 6371, 8117, + 4008, 1573, 297, 4617, 3500, 1096, 8348, 7795, 2166, + 9919, 7046, 907, 4373, 9942, 7959, 4612, 2191, 7471, + 1551, 1253, 9622, 5451, 6375, 2718, 9172, 1328, 2206, + 6610, 8512, 9620, 3166, 70, 7979, 8362, 4317, 9319, + 7145, 9471, 1102, 6052, 3736, 7897, 3018, 9318, 4626, + 3035, 3845, 5367, 445, 1352, 6964, 3743, 6956, 6815, + 8871, 4156, 4269, 4098, 3309, 4397, 5465, 3229, 1823, + 6544, 3842, 8845, 3484, 822, 6074, 4146, 9045, 7757, + 2777, 66, 9385, 2615, 599, 3543, 2303, 1567, 9769, + 3202, 4399, 2891, 5597, 2945, 5620, 5806, 4689, 9005, + 4553, 137, 7443, 892, 7777, 7252, 8060, 4460, 732, + 8162, 7391, 2245, 7604, 9063, 1073, 7517, 4465, 394, + 9244, 8, 9498, 9116, 9485, 5995, 3554, 4940, 8129, + 1458, 4587, 2078, 1470, 3731, 5211, 8596, 5666, 6142, + 3359, 1472, 8209, 9872, 1611, 3338, 6557, 3513, 3864, + 4827, 2518, 9053, 530, 3073, 418, 1537, 6645, 7411, + 2104, 9688, 8234, 7188, 7937, 4386, 6521, 9854, 2965, + 8265, 9490, 2081, 612, 4080, 5175, 9752, 1944, 2111, + 791, 5228, 349, 166, 4411, 9072, 6256, 1725, 7501, + 2904, 3632, 5352, 2745, 3201, 3650, 4953, 3431, 9310, + 9213, 5480, 3794, 6746, 4307, 2422, 1271, 9553, 7121, + 8150, 7353, 1417, 870, 806, 3925, 4907, 2082, 5868, + 6597, 5497, 4547, 6014, 1707, 4907, 455, 5675, 355, + 6173, 5131, 1727, 6109, 8687, 7982, 736, 6797, 2166, + 1063, 4073, 7028, 1072, 3289, 5899, 2997, 9326, 5623, + 1788, 1089, 8209, 9870, 4781, 9452, 2450, 7131, 2788, + 2853, 5511, 9849, 2656, 7920, 674, 1300, 8932, 416, + 8093, 9874, 7324, 5225, 4621, 926, 980, 8985, 1462, + 7040, 9170, 9555, 8932, 9914, 219, 2964, 7753, 6648, + 2390, 6208, 5248, 5320, 4117, 7437, 430, 4484, 2611, + 5121, 8916, 1447, 5161, 1499, 9794, 1265, 4597, 8256, + 1496, 7641, 2433, 6962, 8984, 1170, 5695, 5461, 547, + 9880, 7152, 272, 80, 1601, 2649, 2962, 4267, 9755, + 1424, 3921, 9152, 9785, 972, 1812, 7994, 2298, 8551, + 708, 668, 3575, 9762, 8171, 2486, 2048, 3839, 1631, + 1897, 7990, 4894, 2240, 6177, 6171, 9539, 2465, 9722, + 9336, 2467, 6414, 2170, 3729, 1314, 3949, 8063, 6208, + 823, 5984, 6659, 6415, 2310, 7658, 6846, 2561, 8927, + 5336, 4676, 5025, 5266, 9954, 5759, 4786, 389, 1969, + 8561, 5438, 7925, 6769, 2925, 3548, 5821, 4979, 5495, + 4420, 2929, 4317, 8343, 6738, 1023, 1264, 816, 5153, + 6387, 9609, 7627, 9609, 8951, 8523, 9734, 7325, 692, + 9657, 595, 9347, 5128, 5970, 6833, 8478, 2254, 1877, + 386, 8236, 3505, 9182, 9403, 1371, 4925, 3513, 8183, + 6878, 3320, 8021, 3631, 1112, 3099, 3784, 132, 834, + 2343, 2612, 8494, 4918, 7861, 7283, 6749, 3142, 1632, + 4302, 8980, 7897, 515, 2749, 5212, 8365, 982, 7831, + 4787, 5300, 7623, 7109, 9010, 3076, 1284, 467, 7979, + 5644, 8907, 7522, 6202, 6272, 5531, 9124, 4043, 7300, + 250, 3433, 7921, 7720, 9874, 1712, 2699, 8196, 3473, + 3245, 9036, 8606, 942, 3651, 8212, 979, 3999, 790, + 7368, 4160, 7287, 156, 3177, 9487, 6150, 318, 6342, + 8617, 6278, 8278, 8892, 1288, 6740, 9226, 7056, 9217, + 1811]), + values=tensor([0.6196, 0.0114, 0.0780, 0.5906, 0.7505, 0.5993, 0.7945, + 0.6434, 0.3118, 0.3104, 0.2388, 0.2046, 0.7622, 0.0777, + 0.2242, 0.6515, 0.2665, 0.6905, 0.5118, 0.3093, 0.1972, + 0.8279, 0.7758, 0.5459, 0.1846, 0.6189, 0.2595, 0.9186, + 0.9405, 0.9879, 0.1042, 0.7520, 0.4405, 0.4499, 0.2446, + 0.5636, 0.5715, 0.4879, 0.0724, 0.4352, 0.0923, 0.1412, + 0.7009, 0.0672, 0.7384, 0.5888, 0.4764, 0.9840, 0.4351, + 0.7794, 0.0814, 0.2714, 0.4090, 0.8987, 0.3438, 0.2136, + 0.2617, 0.9509, 0.7670, 0.2193, 0.5040, 0.9778, 0.5290, + 0.4287, 0.4993, 0.6605, 0.4552, 0.9814, 0.6170, 0.0979, + 0.2177, 0.2630, 0.6411, 0.5989, 0.5365, 0.6080, 0.2088, + 0.6048, 0.4912, 0.3916, 0.1699, 0.3572, 0.0296, 0.1407, + 0.2305, 0.8274, 0.8609, 0.2424, 0.0171, 0.3855, 0.8338, + 0.0725, 0.1924, 0.3285, 0.2749, 0.2272, 0.8472, 0.4564, + 0.0152, 0.9213, 0.6620, 0.1417, 0.5779, 0.7029, 0.8146, + 0.0682, 0.3470, 0.1203, 0.3985, 0.2526, 0.9231, 0.2354, + 0.8514, 0.4049, 0.6712, 0.6265, 0.6751, 0.7498, 0.8617, + 0.8223, 0.5316, 0.8207, 0.7825, 0.3233, 0.8320, 0.0205, + 0.8938, 0.9868, 0.4228, 0.0904, 0.1323, 0.2104, 0.5759, + 0.7486, 0.0044, 0.1971, 0.8234, 0.8820, 0.6865, 0.9321, + 0.1935, 0.5814, 0.3290, 0.8572, 0.7029, 0.1494, 0.4076, + 0.6910, 0.7343, 0.6929, 0.6184, 0.4509, 0.3354, 0.0941, + 0.8616, 0.1833, 0.4475, 0.8165, 0.7540, 0.0272, 0.8090, + 0.0075, 0.3321, 0.8506, 0.8823, 0.3041, 0.9698, 0.2208, + 0.0487, 0.0944, 0.5882, 0.4789, 0.2189, 0.7276, 0.4046, + 0.6510, 0.9386, 0.5425, 0.0569, 0.9029, 0.3170, 0.4560, + 0.5885, 0.3819, 0.2215, 0.3602, 0.5884, 0.5067, 0.6889, + 0.4126, 0.5286, 0.8708, 0.3178, 0.5978, 0.0567, 0.3482, + 0.7129, 0.2828, 0.9540, 0.1061, 0.9066, 0.0741, 0.4504, + 0.7678, 0.9215, 0.2588, 0.3298, 0.0216, 0.3903, 0.8528, + 0.9555, 0.5742, 0.2240, 0.8449, 0.7139, 0.5892, 0.2852, + 0.5518, 0.0411, 0.1811, 0.1596, 0.8581, 0.7313, 0.1657, + 0.9703, 0.4189, 0.2887, 0.6831, 0.2369, 0.7634, 0.0408, + 0.1000, 0.5756, 0.0151, 0.0558, 0.6882, 0.9572, 0.3370, + 0.9688, 0.4463, 0.3662, 0.7410, 0.8981, 0.8137, 0.7748, + 0.5107, 0.2607, 0.4339, 0.7448, 0.8445, 0.3417, 0.7647, + 0.9351, 0.4583, 0.4626, 0.8247, 0.0549, 0.7239, 0.1440, + 0.1775, 0.0044, 0.8569, 0.9508, 0.9056, 0.7959, 0.1560, + 0.6764, 0.7063, 0.5503, 0.9575, 0.2693, 0.8490, 0.4128, + 0.9714, 0.7818, 0.2400, 0.9716, 0.6862, 0.9725, 0.7942, + 0.6956, 0.0325, 0.8764, 0.6559, 0.7553, 0.5630, 0.4887, + 0.4200, 0.4990, 0.6213, 0.7425, 0.5404, 0.5604, 0.6518, + 0.0938, 0.0682, 0.2423, 0.9653, 0.1080, 0.3011, 0.1907, + 0.9731, 0.6522, 0.5311, 0.7260, 0.4884, 0.7901, 0.0902, + 0.8898, 0.7591, 0.1705, 0.1715, 0.3061, 0.0252, 0.3621, + 0.2712, 0.6191, 0.9504, 0.7431, 0.1438, 0.4641, 0.1864, + 0.8295, 0.2078, 0.4217, 0.6064, 0.8959, 0.2519, 0.3506, + 0.8294, 0.1176, 0.3434, 0.4118, 0.0391, 0.7786, 0.0788, + 0.3045, 0.2730, 0.0333, 0.9811, 0.5870, 0.5459, 0.4360, + 0.7475, 0.4862, 0.2886, 0.3649, 0.8278, 0.5869, 0.3199, + 0.6849, 0.0285, 0.4652, 0.5037, 0.2746, 0.7393, 0.6614, + 0.8444, 0.4003, 0.6111, 0.3264, 0.6698, 0.0865, 0.1122, + 0.8565, 0.1133, 0.1540, 0.9887, 0.7073, 0.1087, 0.5905, + 0.7220, 0.0133, 0.1397, 0.8300, 0.0324, 0.8468, 0.3611, + 0.2452, 0.3323, 0.2390, 0.8962, 0.8835, 0.5849, 0.8184, + 0.7935, 0.0660, 0.7531, 0.0935, 0.0932, 0.0297, 0.7930, + 0.4667, 0.6703, 0.2160, 0.8538, 0.6976, 0.7919, 0.6943, + 0.5213, 0.4328, 0.9720, 0.9873, 0.3954, 0.1633, 0.0324, + 0.1143, 0.9281, 0.5805, 0.4522, 0.1840, 0.3413, 0.7327, + 0.8227, 0.7055, 0.4474, 0.9122, 0.5135, 0.1786, 0.5499, + 0.6141, 0.0692, 0.4429, 0.4518, 0.1137, 0.3476, 0.6665, + 0.4712, 0.6495, 0.4523, 0.1555, 0.4635, 0.4607, 0.8030, + 0.8073, 0.9042, 0.2096, 0.7414, 0.3257, 0.5309, 0.6492, + 0.5166, 0.4222, 0.1800, 0.6811, 0.2543, 0.7807, 0.8292, + 0.0337, 0.3617, 0.3639, 0.6057, 0.9194, 0.7802, 0.0115, + 0.4737, 0.1007, 0.6828, 0.4037, 0.6724, 0.8920, 0.1067, + 0.2017, 0.4498, 0.1470, 0.6942, 0.3285, 0.6704, 0.5841, + 0.6335, 0.3846, 0.4546, 0.4434, 0.2040, 0.5921, 0.4102, + 0.9338, 0.0085, 0.3554, 0.7339, 0.9378, 0.6361, 0.4532, + 0.8577, 0.0919, 0.8382, 0.8771, 0.9330, 0.9355, 0.3061, + 0.1102, 0.2291, 0.2677, 0.0542, 0.0233, 0.9935, 0.2414, + 0.5357, 0.0318, 0.7366, 0.4385, 0.9397, 0.9292, 0.4194, + 0.0669, 0.0388, 0.1712, 0.0651, 0.0535, 0.4304, 0.4725, + 0.6864, 0.1406, 0.9443, 0.6677, 0.0116, 0.4935, 0.0069, + 0.5753, 0.7223, 0.1602, 0.1981, 0.8268, 0.1964, 0.1201, + 0.4187, 0.9315, 0.6910, 0.1607, 0.8515, 0.6607, 0.1831, + 0.4073, 0.4556, 0.6316, 0.3967, 0.7742, 0.0299, 0.8444, + 0.4225, 0.4172, 0.0107, 0.7973, 0.1650, 0.5326, 0.3424, + 0.1339, 0.3026, 0.2960, 0.8852, 0.5235, 0.3847, 0.9486, + 0.9744, 0.7896, 0.0970, 0.8576, 0.9830, 0.8277, 0.6377, + 0.6529, 0.3114, 0.3164, 0.5753, 0.9730, 0.1173, 0.7698, + 0.0827, 0.2026, 0.6431, 0.6275, 0.9894, 0.9420, 0.4072, + 0.0354, 0.6950, 0.2008, 0.8919, 0.9516, 0.2605, 0.2712, + 0.3361, 0.8498, 0.9614, 0.7851, 0.9593, 0.8290, 0.5531, + 0.4367, 0.7236, 0.1173, 0.3492, 0.3041, 0.4874, 0.6287, + 0.4729, 0.9250, 0.4853, 0.7466, 0.5273, 0.7349, 0.8320, + 0.4933, 0.8099, 0.6428, 0.4210, 0.3452, 0.1825, 0.1989, + 0.0157, 0.5749, 0.0068, 0.8464, 0.1640, 0.6036, 0.8043, + 0.1153, 0.1076, 0.3058, 0.3746, 0.4799, 0.3056, 0.9610, + 0.9127, 0.1886, 0.6659, 0.9943, 0.2424, 0.3077, 0.7229, + 0.1714, 0.4319, 0.1211, 0.4333, 0.8060, 0.0129, 0.3734, + 0.2043, 0.8662, 0.0012, 0.7686, 0.9359, 0.7522, 0.5003, + 0.8250, 0.6805, 0.4069, 0.5015, 0.3243, 0.2291, 0.6806, + 0.3313, 0.5285, 0.2989, 0.5609, 0.2452, 0.4071, 0.5377, + 0.6963, 0.5657, 0.0299, 0.3653, 0.3895, 0.5235, 0.1440, + 0.6253, 0.9459, 0.1675, 0.0939, 0.6242, 0.6776, 0.4588, + 0.7446, 0.8041, 0.5422, 0.2562, 0.4694, 0.6440, 0.4271, + 0.2793, 0.9064, 0.9594, 0.0651, 0.7852, 0.2015, 0.1116, + 0.8944, 0.6879, 0.1163, 0.9220, 0.0269, 0.3468, 0.8496, + 0.0650, 0.6194, 0.8677, 0.0084, 0.7029, 0.1122, 0.9175, + 0.6419, 0.6504, 0.1891, 0.4529, 0.1905, 0.8371, 0.7381, + 0.0046, 0.0964, 0.3131, 0.0824, 0.0035, 0.8733, 0.0679, + 0.3019, 0.0364, 0.0796, 0.9762, 0.3044, 0.5891, 0.5660, + 0.3560, 0.7397, 0.3023, 0.7408, 0.9024, 0.2952, 0.3074, + 0.6006, 0.7129, 0.0891, 0.3833, 0.2026, 0.5861, 0.2465, + 0.9574, 0.5804, 0.7353, 0.6099, 0.7893, 0.5485, 0.6035, + 0.2349, 0.9017, 0.3856, 0.5056, 0.0559, 0.3784, 0.5241, + 0.2260, 0.3588, 0.0084, 0.9935, 0.5469, 0.0778, 0.5282, + 0.6435, 0.6276, 0.8416, 0.0584, 0.0156, 0.0848, 0.6834, + 0.9575, 0.2536, 0.5391, 0.2540, 0.8715, 0.2861, 0.5460, + 0.7418, 0.0582, 0.9596, 0.0844, 0.4428, 0.3021, 0.7742, + 0.6810, 0.4964, 0.7090, 0.4618, 0.1299, 0.8111, 0.3574, + 0.2849, 0.4554, 0.2968, 0.9678, 0.0744, 0.2541, 0.3812, + 0.5255, 0.2774, 0.7182, 0.6545, 0.3975, 0.7431, 0.2071, + 0.5491, 0.9683, 0.3905, 0.1637, 0.7412, 0.4003, 0.0465, + 0.7927, 0.1817, 0.9405, 0.6939, 0.2323, 0.2436, 0.5829, + 0.6947, 0.0693, 0.6028, 0.7684, 0.0952, 0.9277, 0.4091, + 0.9988, 0.6707, 0.1396, 0.1664, 0.2853, 0.2971, 0.9641, + 0.1686, 0.0860, 0.6361, 0.7716, 0.8737, 0.6248, 0.0015, + 0.3479, 0.1224, 0.1738, 0.6328, 0.2988, 0.9543, 0.5409, + 0.0047, 0.1434, 0.4069, 0.6226, 0.8539, 0.0827, 0.8015, + 0.2080, 0.8185, 0.6100, 0.2443, 0.4521, 0.2640, 0.8834, + 0.8696, 0.2577, 0.6036, 0.3935, 0.5480, 0.9590, 0.9883, + 0.7734, 0.5568, 0.6996, 0.0841, 0.9604, 0.4231, 0.9460, + 0.7411, 0.8125, 0.9801, 0.0252, 0.0568, 0.4796, 0.9708, + 0.3495, 0.6681, 0.3051, 0.6744, 0.6027, 0.3956, 0.2149, + 0.6839, 0.9513, 0.7049, 0.9582, 0.9253, 0.8298, 0.8000, + 0.7741, 0.1759, 0.5742, 0.7797, 0.5900, 0.6486, 0.5787, + 0.2751, 0.3526, 0.0315, 0.3035, 0.6247, 0.4841, 0.5887, + 0.1239, 0.9917, 0.3466, 0.9117, 0.5496, 0.3028, 0.9246, + 0.4866, 0.4419, 0.3541, 0.1189, 0.6739, 0.6026, 0.2252, + 0.8252, 0.4125, 0.4146, 0.0962, 0.2742, 0.6324, 0.9534, + 0.6412, 0.1798, 0.3442, 0.4388, 0.5642, 0.0475, 0.3987, + 0.9407, 0.2760, 0.9317, 0.9750, 0.2067, 0.0954, 0.3279, + 0.9463, 0.0269, 0.3547, 0.9989, 0.6845, 0.0502, 0.2075, + 0.9402, 0.9150, 0.2132, 0.4162, 0.9616, 0.9976, 0.1376, + 0.2488, 0.9598, 0.7374, 0.5987, 0.2084, 0.2543, 0.9909, + 0.9516, 0.5972, 0.2731, 0.2157, 0.3846, 0.1856, 0.3478, + 0.8278, 0.9150, 0.8429, 0.1244, 0.6218, 0.9288, 0.3402, + 0.1837, 0.8875, 0.3500, 0.7672, 0.3250, 0.7786, 0.8135, + 0.0048, 0.4153, 0.0159, 0.0130, 0.8717, 0.9126, 0.3962, + 0.4043, 0.1810, 0.9416, 0.6720, 0.1907, 0.4396, 0.7264, + 0.0043, 0.4775, 0.5628, 0.5494, 0.7407, 0.3406]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4250, 0.1408, 0.2235, ..., 0.5768, 0.6841, 0.8384]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 9.730679035186768 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '366482', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 11.093939065933228} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5850, 8366, 5592, 2122, 6679, 1664, 2469, 7640, 8756, + 2915, 7211, 1515, 5525, 1719, 3439, 4040, 8857, 5364, + 6132, 895, 3852, 156, 8811, 9741, 357, 3518, 4399, + 7286, 180, 9528, 5416, 6953, 1721, 2595, 7283, 9795, + 7825, 5079, 6282, 136, 4669, 6702, 3303, 3010, 2380, + 2256, 7997, 5883, 9682, 9926, 8146, 9663, 3941, 6670, + 9041, 817, 4692, 3045, 7544, 9848, 5442, 871, 3702, + 7376, 94, 6327, 3751, 7760, 7748, 920, 3358, 4678, + 8037, 93, 1218, 9719, 1249, 634, 196, 2894, 7487, + 1159, 8188, 332, 5966, 4396, 4268, 8665, 6886, 7804, + 7445, 4388, 7265, 7541, 8403, 5348, 9890, 2631, 5205, + 8499, 4507, 2643, 7756, 857, 4020, 8538, 7850, 4986, + 7312, 3766, 1567, 2852, 7235, 6927, 653, 379, 1077, + 6334, 4614, 1987, 3571, 6355, 1814, 6184, 9414, 2153, + 8882, 3562, 6625, 5036, 2967, 1233, 9339, 4935, 5597, + 9689, 4093, 5663, 8161, 6587, 3112, 612, 6031, 2698, + 3403, 9263, 9319, 8735, 104, 2810, 3672, 5639, 8827, + 5048, 8805, 1031, 9837, 9524, 6912, 3766, 3659, 7073, + 737, 797, 5226, 7104, 3103, 6515, 4992, 1665, 5226, + 6199, 6712, 6310, 7443, 3988, 2676, 2322, 244, 3312, + 4295, 2693, 9044, 9455, 7558, 2076, 364, 3468, 4685, + 2690, 1818, 1795, 8073, 8740, 3982, 5591, 7437, 4434, + 6272, 8601, 5896, 7206, 452, 124, 6062, 3722, 2858, + 3166, 6119, 1841, 8789, 9596, 5567, 5894, 7970, 5360, + 9933, 6197, 3790, 8303, 9150, 3759, 378, 6338, 8692, + 3037, 225, 8540, 8302, 3869, 1980, 7102, 3526, 84, + 2428, 5319, 4508, 9845, 2414, 8840, 833, 2752, 1575, + 8598, 7280, 7141, 2098, 7332, 9471, 7172, 6064, 5586, + 6496, 4719, 7246, 9857, 461, 1971, 1577, 2883, 521, + 887, 7601, 1096, 7587, 8032, 9839, 9759, 3694, 4208, + 2775, 4747, 9442, 6136, 6253, 7707, 4233, 955, 7815, + 9711, 2360, 974, 4097, 9684, 7356, 2815, 7178, 3217, + 6396, 7951, 6798, 3086, 4009, 4560, 5258, 4636, 5136, + 484, 5572, 9213, 7528, 8274, 5548, 1629, 8245, 2276, + 9250, 7073, 612, 1264, 8789, 9413, 9556, 4024, 8035, + 4389, 2075, 2015, 6780, 9946, 8182, 3173, 4875, 4374, + 7146, 6566, 1212, 3788, 6737, 1525, 4118, 597, 1164, + 95, 8578, 5382, 7, 4863, 8244, 8410, 5238, 6413, + 1176, 8621, 5893, 5470, 5285, 4907, 1576, 9176, 6701, + 5861, 5303, 5175, 2021, 3075, 7189, 9086, 6729, 1443, + 4021, 3184, 1282, 194, 3967, 741, 191, 6596, 1251, + 1613, 4577, 1550, 7988, 7486, 1702, 7074, 9193, 1036, + 2186, 5023, 2564, 6337, 940, 1519, 8948, 8449, 9328, + 479, 6279, 8562, 8923, 2685, 1574, 5617, 5722, 970, + 640, 8699, 1647, 1059, 8229, 1516, 892, 441, 4405, + 4899, 5037, 6072, 8409, 3457, 6678, 8778, 5168, 4725, + 1582, 2499, 3623, 3399, 2800, 6268, 2077, 192, 5047, + 1851, 3023, 9282, 7259, 5334, 5942, 1157, 8925, 7158, + 2680, 8224, 1863, 9792, 1845, 5045, 6307, 5364, 7590, + 3501, 3167, 5703, 4100, 6744, 3117, 6631, 2272, 3513, + 1946, 3024, 5825, 1185, 1348, 6559, 74, 4615, 3419, + 4395, 7611, 6167, 8539, 9714, 7761, 9431, 2318, 6641, + 1733, 4522, 5800, 6094, 9427, 1318, 4807, 6951, 6988, + 2813, 985, 7549, 9176, 9180, 4980, 2792, 4241, 7249, + 1163, 61, 7125, 2555, 6886, 8991, 196, 9270, 35, + 1529, 3941, 7793, 7718, 7033, 4030, 6810, 850, 9329, + 5979, 6544, 751, 4582, 8368, 565, 7754, 7052, 3290, + 8677, 983, 7257, 1909, 8475, 6667, 894, 6714, 9857, + 238, 8379, 8656, 2402, 83, 4439, 4472, 7734, 2074, + 3272, 618, 1715, 8742, 745, 2543, 7748, 5408, 6663, + 8178, 2886, 7594, 1196, 5498, 3894, 893, 8965, 6034, + 6443, 2520, 5737, 483, 2405, 4283, 6209, 1176, 448, + 606, 958, 8428, 3541, 8984, 1463, 4865, 173, 2388, + 4515, 610, 3428, 834, 1897, 3416, 3506, 6850, 7650, + 1567, 2088, 4588, 3271, 6086, 2429, 5007, 1641, 3405, + 5664, 2262, 1925, 5269, 9056, 8204, 5139, 609, 3768, + 1372, 8780, 3638, 7690, 2428, 8787, 4274, 9543, 7138, + 8745, 2025, 668, 6826, 8790, 6413, 9920, 6540, 348, + 4430, 4748, 8976, 1596, 914, 2089, 5266, 7048, 2343, + 5663, 5852, 3682, 5650, 2554, 2359, 5522, 968, 4538, + 556, 900, 332, 6036, 4210, 6798, 9668, 7214, 3249, + 934, 7260, 1264, 9553, 9573, 1341, 5076, 7088, 723, + 8670, 6474, 3175, 9943, 1202, 328, 6087, 194, 8768, + 258, 7554, 9741, 2069, 3477, 9790, 8686, 2875, 4984, + 7848, 4751, 5568, 9759, 8560, 7437, 4239, 7813, 4618, + 6633, 990, 5961, 926, 7923, 4657, 2745, 2212, 9361, + 5154, 9425, 4873, 6600, 5372, 8037, 7620, 2502, 3781, + 7433, 6170, 819, 1336, 484, 3513, 4395, 19, 8130, + 7693, 6153, 6352, 4202, 7488, 1192, 4764, 7301, 6821, + 6865, 6976, 937, 1234, 1710, 549, 5075, 6580, 8729, + 4805, 1375, 3711, 1305, 4371, 3503, 8608, 3833, 3074, + 1072, 7655, 4667, 966, 192, 6852, 7586, 1491, 9585, + 8090, 3373, 1438, 2850, 8318, 8429, 9764, 8245, 7536, + 1862, 6568, 9277, 3906, 4401, 671, 9709, 9370, 6046, + 9211, 9673, 4331, 921, 674, 7979, 374, 7224, 8804, + 5223, 9172, 2758, 3516, 3647, 8604, 8106, 9070, 2337, + 3498, 3861, 2253, 9022, 3709, 4466, 2031, 9776, 7382, + 6631, 6983, 229, 9213, 4744, 5134, 2773, 3818, 2045, + 3684, 5192, 2488, 8503, 6845, 9997, 5752, 1325, 8942, + 352, 1614, 4161, 7427, 7988, 1814, 8864, 5443, 6505, + 9390, 1752, 5714, 9392, 1996, 5576, 5760, 390, 6435, + 7898, 7417, 7401, 1217, 7128, 1412, 6756, 7275, 5202, + 2409, 1660, 2736, 5263, 8336, 8044, 3978, 3020, 5519, + 7853, 8427, 824, 1792, 4254, 4572, 5695, 1587, 1988, + 4588, 728, 1627, 3761, 5276, 8300, 992, 8389, 3069, + 3780, 9133, 145, 9462, 7403, 1537, 9647, 5057, 3362, + 9039, 6376, 8446, 4900, 5977, 7345, 4098, 447, 1820, + 252, 9481, 6404, 6592, 1295, 8043, 4753, 3605, 3518, + 2606, 2287, 9068, 9762, 699, 1959, 5988, 6313, 7817, + 5094, 8321, 2706, 7762, 8810, 5984, 698, 2889, 3562, + 3511, 7298, 3188, 9974, 5668, 6020, 8731, 9606, 2958, + 8385, 4702, 6639, 3111, 8106, 914, 952, 5956, 6944, + 6389, 6930, 4511, 693, 2408, 8878, 4458, 6005, 4687, + 5262, 264, 7556, 7239, 6348, 319, 3127, 4870, 9830, + 6495, 9430, 82, 2845, 2158, 7553, 2485, 4718, 9487, + 9863, 2351, 9993, 9611, 7651, 6752, 9890, 4226, 7914, + 1090, 3994, 273, 9314, 2869, 8105, 8063, 8350, 5143, + 2771, 6914, 6289, 1069, 6603, 7055, 1101, 2035, 1775, + 4440, 1100, 9988, 6654, 8779, 3492, 65, 3068, 7601, + 6582, 5059, 9136, 6153, 4692, 6999, 662, 8797, 8102, + 5680, 9525, 2546, 5756, 4348, 7370, 557, 1546, 4320, + 5734]), + values=tensor([1.6341e-01, 5.4230e-01, 5.9412e-01, 6.2649e-01, + 6.1492e-01, 1.1463e-02, 8.8850e-01, 3.1827e-01, + 1.4850e-01, 1.7623e-01, 5.0631e-01, 8.7356e-01, + 2.6593e-01, 3.7335e-01, 6.7543e-01, 8.1424e-01, + 3.7460e-01, 6.9153e-01, 8.0907e-01, 8.3914e-01, + 9.7417e-02, 3.4410e-04, 1.3414e-01, 2.2155e-01, + 6.0112e-02, 8.4328e-01, 2.7848e-01, 1.4958e-01, + 7.9439e-01, 7.7172e-01, 2.7028e-01, 1.6852e-01, + 6.2794e-01, 3.7628e-01, 7.9275e-01, 2.0746e-01, + 6.5757e-02, 6.6970e-01, 1.3521e-02, 6.0790e-01, + 5.0451e-01, 9.2714e-01, 7.8707e-01, 4.1523e-01, + 5.5244e-01, 9.7376e-01, 6.7472e-01, 9.9321e-01, + 9.5176e-01, 6.2001e-01, 7.8968e-01, 3.0430e-01, + 8.2453e-01, 9.4042e-02, 5.1519e-01, 8.5738e-01, + 5.8535e-01, 8.7087e-01, 6.5326e-01, 1.7395e-01, + 3.0247e-01, 7.3945e-02, 4.1833e-01, 8.0312e-01, + 8.8320e-01, 5.1558e-01, 2.5666e-01, 5.6112e-01, + 5.5084e-01, 7.0649e-01, 3.5059e-01, 7.8879e-01, + 5.2108e-02, 9.8239e-01, 3.4744e-01, 5.8319e-01, + 9.1928e-02, 4.9802e-02, 8.4140e-01, 2.0825e-01, + 6.5549e-01, 3.3967e-02, 4.5861e-01, 3.1781e-01, + 7.2623e-01, 4.3890e-01, 9.1717e-01, 6.7930e-01, + 2.2711e-01, 5.3114e-01, 9.4610e-01, 1.2012e-01, + 3.8896e-01, 9.0657e-01, 8.7514e-01, 9.4601e-01, + 6.8092e-01, 4.2792e-01, 7.8983e-01, 7.0867e-01, + 6.8668e-01, 7.3584e-01, 4.8951e-01, 1.1601e-01, + 5.8369e-01, 8.1548e-02, 4.6998e-01, 8.2472e-01, + 8.1798e-01, 6.4792e-01, 7.9098e-01, 4.6883e-01, + 5.3531e-01, 9.9356e-01, 2.4036e-01, 4.8060e-01, + 2.8861e-01, 8.7768e-01, 7.6854e-01, 2.6962e-01, + 9.4503e-01, 7.5491e-01, 6.3333e-01, 3.9724e-01, + 6.1673e-01, 9.9300e-01, 8.4452e-02, 6.1927e-01, + 7.8274e-01, 9.4288e-01, 5.5703e-01, 9.9018e-01, + 8.7030e-01, 6.7458e-01, 7.1377e-01, 3.9155e-01, + 4.0923e-01, 1.9269e-01, 7.8647e-01, 1.7260e-01, + 1.9351e-01, 1.6644e-02, 6.3159e-01, 4.2569e-01, + 3.5414e-01, 1.5447e-01, 6.0382e-01, 1.1039e-01, + 7.4996e-01, 9.7809e-01, 4.9703e-01, 6.8052e-02, + 4.0995e-03, 1.5993e-01, 2.5589e-01, 6.1582e-01, + 4.1141e-01, 8.3175e-01, 9.2280e-02, 6.6768e-01, + 2.1373e-01, 7.4338e-01, 3.8856e-01, 4.0554e-01, + 2.4237e-01, 6.8970e-01, 5.1570e-01, 2.2133e-04, + 8.5322e-01, 4.7984e-01, 4.1539e-01, 8.1641e-01, + 5.1215e-01, 7.5280e-01, 4.9588e-04, 8.3219e-01, + 1.3510e-01, 5.6800e-01, 8.8518e-01, 9.6846e-01, + 2.4576e-01, 9.1717e-01, 4.6194e-01, 4.1655e-01, + 8.5447e-01, 1.2544e-01, 6.0240e-01, 5.7586e-01, + 2.2428e-01, 4.5450e-01, 2.4531e-01, 3.5914e-01, + 5.8131e-01, 4.4973e-01, 3.0343e-01, 8.7101e-01, + 5.4478e-01, 5.0386e-01, 6.8135e-01, 4.2381e-01, + 3.1555e-02, 3.0972e-01, 2.4608e-01, 1.9970e-01, + 4.6368e-01, 9.9803e-01, 8.2862e-01, 8.5141e-01, + 8.7867e-01, 4.8625e-01, 5.2020e-01, 4.1959e-01, + 1.7473e-01, 2.5225e-01, 7.2666e-01, 9.2040e-01, + 1.8559e-01, 6.3520e-01, 4.5196e-01, 9.4681e-01, + 9.5216e-01, 1.0019e-01, 9.7707e-01, 4.5094e-01, + 5.0805e-01, 5.1974e-01, 5.3486e-01, 1.8077e-01, + 8.5364e-01, 7.2740e-01, 4.8004e-01, 6.8966e-01, + 2.0804e-01, 7.7792e-01, 6.3289e-01, 1.2722e-01, + 9.0620e-01, 4.9687e-01, 5.6847e-01, 1.3671e-01, + 4.0281e-01, 1.7219e-01, 1.9050e-01, 6.2485e-01, + 1.0260e-01, 2.5271e-02, 9.4031e-01, 9.4275e-01, + 7.8410e-03, 3.3465e-02, 3.6601e-01, 2.9329e-01, + 2.0289e-01, 8.1331e-01, 9.6038e-01, 7.8543e-01, + 8.1769e-01, 9.2929e-01, 5.1055e-01, 3.5358e-01, + 4.8515e-01, 3.5044e-01, 1.9924e-01, 8.1918e-01, + 2.9889e-01, 3.7500e-02, 2.3185e-01, 6.2795e-01, + 5.8909e-01, 1.2007e-01, 7.5640e-01, 8.0080e-01, + 8.4206e-01, 2.3327e-01, 8.8223e-01, 9.9512e-01, + 2.9164e-01, 4.3867e-01, 7.8315e-01, 9.6653e-01, + 4.0617e-01, 6.9039e-01, 4.3199e-01, 8.2813e-01, + 3.5563e-01, 9.1892e-01, 5.9521e-01, 8.9108e-01, + 3.7947e-01, 2.0591e-01, 6.7351e-01, 5.7368e-01, + 2.8254e-01, 6.7739e-02, 4.4276e-01, 9.7228e-01, + 4.6186e-01, 6.9010e-01, 8.4715e-01, 1.5150e-01, + 2.0227e-01, 3.6577e-01, 9.5994e-01, 2.5249e-01, + 6.5535e-02, 5.6708e-01, 7.2591e-01, 7.0050e-01, + 4.6753e-01, 9.1326e-02, 6.7496e-01, 5.8336e-01, + 3.1459e-01, 4.7441e-01, 6.5107e-01, 2.7592e-01, + 1.4742e-01, 4.5510e-01, 7.4301e-01, 4.8033e-01, + 1.4801e-01, 7.7726e-01, 9.3909e-01, 8.1948e-01, + 2.9724e-01, 8.1459e-01, 4.7430e-01, 3.8389e-01, + 5.8885e-02, 3.1119e-01, 5.3076e-01, 1.4421e-01, + 9.6859e-01, 9.4119e-01, 8.4274e-01, 8.1548e-02, + 4.2885e-02, 1.4390e-01, 9.9599e-01, 9.8519e-01, + 5.8954e-01, 4.9409e-01, 6.5081e-01, 4.7729e-01, + 7.1507e-01, 2.3377e-01, 3.5997e-01, 9.6579e-01, + 3.9773e-01, 2.1377e-02, 3.5134e-01, 6.4577e-01, + 9.9705e-01, 4.8704e-01, 4.2052e-01, 3.9684e-01, + 9.2759e-01, 3.0995e-01, 3.1089e-01, 7.6678e-01, + 5.4422e-04, 9.2904e-01, 1.6571e-01, 9.8144e-01, + 7.6337e-01, 8.8536e-01, 1.6786e-01, 9.4677e-01, + 3.5525e-01, 3.5127e-01, 1.3314e-01, 8.7067e-01, + 2.8725e-01, 6.7870e-01, 3.3831e-01, 4.6605e-01, + 7.4196e-01, 8.8785e-01, 9.6258e-01, 7.7168e-01, + 4.8538e-01, 2.0843e-01, 6.4007e-01, 1.9033e-01, + 3.4627e-01, 1.1059e-01, 7.1554e-01, 5.5574e-01, + 7.6910e-01, 7.3835e-01, 2.8739e-01, 6.9284e-01, + 7.5175e-02, 9.9640e-01, 9.1137e-01, 2.2022e-01, + 4.9205e-01, 9.0083e-01, 5.1433e-01, 4.3040e-01, + 3.8550e-01, 4.9743e-01, 2.3713e-01, 2.5804e-01, + 8.8147e-02, 9.0523e-02, 8.5804e-01, 1.4407e-01, + 3.2087e-01, 6.9830e-01, 5.0311e-02, 5.9795e-01, + 2.3588e-01, 9.7651e-01, 1.1260e-01, 6.7643e-01, + 7.7883e-01, 4.7514e-01, 7.6449e-01, 3.5045e-01, + 7.3614e-01, 8.8137e-01, 4.6469e-01, 4.9258e-01, + 8.1075e-01, 6.2703e-01, 1.2233e-01, 2.7440e-01, + 6.9122e-01, 7.3033e-01, 3.4342e-02, 1.2576e-01, + 8.0974e-01, 4.0072e-01, 6.0245e-01, 1.5077e-02, + 4.1940e-01, 2.0633e-01, 7.1347e-01, 8.3114e-01, + 3.1728e-01, 3.9989e-01, 6.0468e-01, 2.0407e-01, + 8.7073e-01, 4.3857e-01, 8.9954e-02, 6.5643e-01, + 5.8418e-01, 3.5790e-01, 6.7886e-01, 9.5451e-01, + 3.6580e-01, 3.0585e-01, 7.6034e-01, 6.5825e-02, + 2.9720e-01, 3.1525e-01, 3.2830e-01, 7.9663e-02, + 7.4797e-01, 4.5439e-01, 4.4041e-01, 6.1706e-01, + 1.4869e-01, 3.7743e-01, 6.0526e-01, 7.4958e-01, + 1.3105e-01, 3.9610e-01, 8.5301e-01, 8.1549e-01, + 8.5854e-02, 7.4522e-01, 7.4950e-01, 3.7042e-01, + 2.4261e-01, 1.7942e-01, 8.2764e-01, 1.6097e-01, + 2.2542e-01, 5.2571e-01, 1.0887e-01, 3.8000e-02, + 8.1497e-01, 8.0201e-01, 9.4936e-01, 2.6775e-01, + 9.4312e-01, 9.3715e-01, 6.7901e-02, 4.0906e-01, + 1.5251e-01, 2.4190e-01, 2.4089e-01, 2.6151e-01, + 6.2245e-01, 4.6082e-01, 5.0570e-01, 2.5937e-01, + 3.5881e-01, 2.2385e-01, 2.8770e-02, 1.7716e-01, + 5.4713e-02, 5.1027e-01, 3.1178e-02, 6.5533e-01, + 2.2465e-01, 6.9285e-01, 2.5103e-01, 3.0570e-01, + 3.7755e-03, 8.7853e-01, 6.5763e-01, 7.6528e-02, + 9.2357e-01, 9.3172e-01, 1.4133e-01, 9.0949e-01, + 3.7744e-01, 2.6587e-01, 8.7043e-01, 1.0112e-02, + 2.8448e-01, 7.2542e-02, 1.5147e-01, 2.2166e-01, + 9.7594e-01, 3.2101e-01, 3.2571e-01, 9.1645e-01, + 5.1688e-01, 9.8729e-02, 3.8775e-01, 7.4951e-01, + 2.4091e-01, 2.2275e-01, 8.9300e-01, 4.1567e-01, + 8.1383e-01, 7.1688e-01, 7.5982e-01, 9.9028e-02, + 1.0199e-01, 1.7657e-01, 8.1854e-01, 9.5101e-01, + 8.6789e-01, 7.9254e-01, 9.6088e-01, 8.2911e-01, + 7.9367e-01, 5.3301e-01, 8.6699e-02, 6.5008e-01, + 2.4111e-01, 4.0949e-01, 7.9876e-01, 6.0784e-01, + 8.6621e-01, 7.1156e-01, 3.6004e-01, 1.5720e-02, + 6.4658e-01, 3.4960e-01, 8.6761e-01, 8.2966e-01, + 9.8307e-01, 1.8464e-01, 5.4622e-01, 2.8174e-01, + 4.2027e-01, 6.2481e-01, 2.5893e-01, 5.8887e-01, + 8.4609e-01, 2.6754e-01, 5.9884e-01, 1.1711e-01, + 6.7826e-01, 2.8852e-01, 6.4017e-01, 1.2806e-02, + 3.1902e-03, 4.7820e-01, 8.9185e-01, 5.7234e-02, + 6.7506e-01, 8.4710e-01, 6.0623e-01, 2.2402e-01, + 4.0295e-01, 6.4904e-01, 9.2718e-01, 3.1307e-01, + 6.5796e-01, 3.5345e-01, 1.9087e-01, 2.1269e-01, + 5.7823e-01, 3.9982e-01, 4.3154e-01, 6.7568e-01, + 6.8414e-01, 2.4388e-01, 8.7462e-01, 4.2036e-02, + 3.5337e-01, 5.3395e-01, 9.2982e-01, 8.6034e-01, + 9.3195e-01, 6.8440e-01, 2.5818e-01, 7.2423e-01, + 1.2544e-01, 2.7728e-01, 6.8784e-01, 6.0665e-01, + 1.3165e-01, 8.5017e-01, 4.3265e-03, 8.0981e-01, + 7.9983e-01, 3.2596e-01, 4.9482e-01, 1.1663e-01, + 4.6665e-01, 9.3435e-01, 7.9541e-01, 7.1414e-01, + 5.3506e-02, 4.0912e-01, 6.8238e-01, 3.4773e-01, + 5.6049e-01, 7.1487e-01, 2.1138e-01, 8.1397e-01, + 5.4932e-01, 2.0457e-02, 5.6340e-02, 5.7141e-02, + 4.6515e-01, 1.4683e-01, 7.4366e-01, 4.1761e-02, + 5.1584e-02, 1.2634e-01, 4.6336e-01, 5.9261e-01, + 5.6130e-01, 7.8972e-02, 7.9983e-01, 2.2448e-01, + 1.8797e-01, 2.5057e-01, 7.7180e-01, 4.0901e-01, + 3.7017e-01, 4.0182e-01, 8.7125e-01, 2.8655e-01, + 1.4119e-01, 4.3303e-01, 6.6902e-01, 1.5943e-01, + 2.5892e-01, 5.0928e-01, 6.7761e-01, 3.2212e-02, + 3.0824e-01, 9.2674e-02, 3.6193e-02, 4.4219e-01, + 1.6390e-01, 5.9400e-01, 8.2224e-01, 6.9900e-01, + 5.2481e-01, 9.5165e-01, 4.4474e-01, 2.4445e-01, + 1.0981e-01, 9.3663e-01, 8.4897e-01, 9.5652e-01, + 4.6435e-01, 1.7098e-01, 5.9974e-01, 4.3018e-01, + 2.1721e-01, 3.8205e-01, 2.6684e-01, 2.6286e-01, + 8.1812e-01, 5.7012e-01, 6.3330e-01, 6.6896e-02, + 8.6486e-02, 5.7371e-01, 2.1333e-01, 5.1711e-01, + 3.0138e-01, 2.1686e-01, 9.4270e-01, 3.0485e-01, + 4.4156e-01, 9.5896e-01, 6.0745e-01, 1.5570e-01, + 4.1088e-01, 3.7352e-01, 3.6528e-01, 1.2019e-01, + 7.4565e-02, 5.5689e-01, 5.1790e-01, 1.3767e-01, + 1.2340e-01, 9.6204e-01, 1.6892e-01, 2.9572e-01, + 2.1066e-01, 3.3179e-01, 3.3274e-01, 8.4143e-01, + 8.3193e-01, 7.2407e-01, 7.0281e-01, 6.8041e-01, + 8.2730e-01, 1.8872e-01, 2.6244e-01, 2.1234e-01, + 7.4230e-01, 2.6976e-01, 6.3711e-01, 4.3716e-01, + 6.9298e-01, 8.1356e-01, 6.0939e-01, 7.0288e-01, + 3.5558e-01, 9.2964e-01, 1.1833e-01, 4.6906e-01, + 8.0664e-01, 4.8526e-01, 9.6169e-01, 4.1867e-01, + 8.3224e-01, 9.6285e-01, 9.5262e-01, 8.6424e-01, + 4.9817e-01, 5.4699e-01, 6.1704e-01, 3.9186e-01, + 6.8935e-01, 5.9439e-01, 7.3227e-01, 4.1190e-01, + 9.0233e-01, 3.2790e-01, 4.3206e-01, 5.5893e-01, + 5.8605e-02, 9.4473e-01, 2.4772e-01, 3.1700e-01, + 7.7245e-01, 5.5508e-01, 7.4692e-01, 1.2111e-01, + 8.0345e-01, 4.4160e-01, 6.1610e-01, 4.0302e-01, + 7.2755e-01, 5.2552e-01, 5.2678e-01, 1.3783e-01, + 6.7301e-01, 8.4666e-02, 9.7543e-01, 5.6391e-01, + 6.8981e-01, 5.0252e-01, 3.1345e-01, 8.3631e-01, + 5.1942e-01, 4.1463e-01, 3.8551e-01, 5.6612e-01, + 9.0226e-02, 4.5281e-01, 1.3323e-01, 8.3660e-01, + 3.3939e-01, 2.9707e-01, 3.8862e-01, 7.2849e-01, + 8.2419e-01, 6.6920e-01, 4.7306e-01, 8.6893e-01, + 7.5703e-01, 5.7027e-01, 5.4140e-01, 8.8231e-01, + 3.8567e-02, 3.5538e-01, 6.1680e-01, 5.9111e-01, + 4.5875e-01, 6.0458e-01, 7.5967e-02, 3.2091e-01, + 3.9756e-01, 5.3389e-01, 4.5029e-01, 1.5185e-01, + 6.9442e-01, 7.6304e-01, 4.6109e-01, 5.2312e-02, + 7.7110e-01, 4.8813e-01, 1.8485e-01, 9.4212e-01, + 2.0752e-01, 2.7050e-01, 8.0133e-01, 5.6501e-01, + 7.0480e-01, 3.7931e-01, 4.9377e-01, 7.7738e-01, + 6.5307e-01, 5.0050e-01, 1.0036e-01, 3.6651e-01, + 4.6488e-01, 6.5613e-01, 3.1970e-01, 5.0090e-01, + 2.3238e-01, 4.7361e-01, 4.2273e-01, 1.9837e-01, + 4.1443e-01, 2.8358e-01, 5.9909e-01, 4.6853e-01, + 1.5862e-01, 5.4860e-01, 9.4413e-01, 6.4800e-03, + 2.0553e-01, 6.7561e-01, 1.6860e-01, 5.9291e-01, + 2.2881e-01, 3.9992e-01, 6.9442e-01, 7.9259e-01, + 6.9638e-01, 7.6941e-01, 2.3646e-01, 7.2314e-01, + 1.4276e-01, 4.5362e-01, 5.5788e-01, 2.2118e-01, + 3.1804e-01, 6.9545e-01, 4.2483e-01, 3.3560e-01, + 3.3597e-01, 4.0398e-02, 1.9135e-01, 5.4384e-01, + 5.9982e-01, 9.7372e-01, 1.5970e-01, 6.0079e-01, + 5.9714e-01, 9.5044e-01, 5.0232e-01, 3.6790e-01, + 7.3455e-01, 8.0673e-01, 4.0026e-01, 6.3795e-02, + 8.2192e-01, 1.8676e-01, 1.9070e-01, 6.2954e-02, + 1.8591e-01, 1.6582e-01, 8.1810e-01, 1.1599e-02, + 4.7558e-01, 8.4737e-01, 7.5306e-01, 1.2992e-01, + 1.5127e-01, 2.0163e-01, 4.6257e-01, 3.4956e-01, + 9.0495e-01, 2.1175e-03, 9.4719e-01, 3.1681e-01, + 2.0418e-01, 5.3479e-02, 3.5683e-01, 5.8247e-02, + 9.3423e-01, 1.1090e-01, 4.3007e-01, 6.1117e-01, + 4.0593e-01, 1.1448e-02, 8.5302e-01, 2.5408e-01, + 9.1190e-01, 6.4738e-01, 2.7596e-01, 2.8235e-01, + 4.6522e-01, 8.7885e-01, 7.0320e-01, 4.2790e-01, + 2.9479e-02, 3.8500e-01, 3.9245e-01, 1.0004e-01, + 4.2397e-01, 7.1833e-01, 6.6614e-01, 4.6682e-02, + 5.8017e-01, 6.0782e-01, 4.8419e-01, 5.5802e-01, + 2.3916e-01, 1.4114e-01, 8.3739e-01, 1.0626e-01, + 5.1946e-01, 9.4847e-01, 4.1767e-01, 3.7856e-01, + 1.1090e-01, 1.5010e-01, 4.3945e-01, 7.5067e-02, + 9.8959e-01, 2.8002e-01, 5.0613e-01, 8.0707e-01, + 6.1595e-01, 8.2005e-01, 9.9749e-01, 1.1749e-01, + 6.5959e-01, 2.3371e-01, 8.3971e-01, 4.3270e-03, + 6.2581e-01, 8.0238e-01, 2.8393e-01, 7.0314e-01, + 2.0960e-01, 3.2954e-02, 6.5011e-01, 8.0206e-01, + 9.2215e-01, 8.1873e-01, 3.4350e-01, 2.8733e-01, + 1.9274e-01, 3.4014e-01, 3.0741e-01, 3.4144e-01, + 2.7448e-02, 7.6554e-01, 6.2323e-01, 3.0307e-01, + 4.5175e-01, 3.9421e-01, 8.5280e-01, 6.5476e-01, + 3.1057e-01, 3.6455e-01, 8.0890e-01, 2.7987e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.7740, 0.9703, 0.1840, ..., 0.7477, 0.1526, 0.5369]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 11.093939065933228 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5850, 8366, 5592, 2122, 6679, 1664, 2469, 7640, 8756, + 2915, 7211, 1515, 5525, 1719, 3439, 4040, 8857, 5364, + 6132, 895, 3852, 156, 8811, 9741, 357, 3518, 4399, + 7286, 180, 9528, 5416, 6953, 1721, 2595, 7283, 9795, + 7825, 5079, 6282, 136, 4669, 6702, 3303, 3010, 2380, + 2256, 7997, 5883, 9682, 9926, 8146, 9663, 3941, 6670, + 9041, 817, 4692, 3045, 7544, 9848, 5442, 871, 3702, + 7376, 94, 6327, 3751, 7760, 7748, 920, 3358, 4678, + 8037, 93, 1218, 9719, 1249, 634, 196, 2894, 7487, + 1159, 8188, 332, 5966, 4396, 4268, 8665, 6886, 7804, + 7445, 4388, 7265, 7541, 8403, 5348, 9890, 2631, 5205, + 8499, 4507, 2643, 7756, 857, 4020, 8538, 7850, 4986, + 7312, 3766, 1567, 2852, 7235, 6927, 653, 379, 1077, + 6334, 4614, 1987, 3571, 6355, 1814, 6184, 9414, 2153, + 8882, 3562, 6625, 5036, 2967, 1233, 9339, 4935, 5597, + 9689, 4093, 5663, 8161, 6587, 3112, 612, 6031, 2698, + 3403, 9263, 9319, 8735, 104, 2810, 3672, 5639, 8827, + 5048, 8805, 1031, 9837, 9524, 6912, 3766, 3659, 7073, + 737, 797, 5226, 7104, 3103, 6515, 4992, 1665, 5226, + 6199, 6712, 6310, 7443, 3988, 2676, 2322, 244, 3312, + 4295, 2693, 9044, 9455, 7558, 2076, 364, 3468, 4685, + 2690, 1818, 1795, 8073, 8740, 3982, 5591, 7437, 4434, + 6272, 8601, 5896, 7206, 452, 124, 6062, 3722, 2858, + 3166, 6119, 1841, 8789, 9596, 5567, 5894, 7970, 5360, + 9933, 6197, 3790, 8303, 9150, 3759, 378, 6338, 8692, + 3037, 225, 8540, 8302, 3869, 1980, 7102, 3526, 84, + 2428, 5319, 4508, 9845, 2414, 8840, 833, 2752, 1575, + 8598, 7280, 7141, 2098, 7332, 9471, 7172, 6064, 5586, + 6496, 4719, 7246, 9857, 461, 1971, 1577, 2883, 521, + 887, 7601, 1096, 7587, 8032, 9839, 9759, 3694, 4208, + 2775, 4747, 9442, 6136, 6253, 7707, 4233, 955, 7815, + 9711, 2360, 974, 4097, 9684, 7356, 2815, 7178, 3217, + 6396, 7951, 6798, 3086, 4009, 4560, 5258, 4636, 5136, + 484, 5572, 9213, 7528, 8274, 5548, 1629, 8245, 2276, + 9250, 7073, 612, 1264, 8789, 9413, 9556, 4024, 8035, + 4389, 2075, 2015, 6780, 9946, 8182, 3173, 4875, 4374, + 7146, 6566, 1212, 3788, 6737, 1525, 4118, 597, 1164, + 95, 8578, 5382, 7, 4863, 8244, 8410, 5238, 6413, + 1176, 8621, 5893, 5470, 5285, 4907, 1576, 9176, 6701, + 5861, 5303, 5175, 2021, 3075, 7189, 9086, 6729, 1443, + 4021, 3184, 1282, 194, 3967, 741, 191, 6596, 1251, + 1613, 4577, 1550, 7988, 7486, 1702, 7074, 9193, 1036, + 2186, 5023, 2564, 6337, 940, 1519, 8948, 8449, 9328, + 479, 6279, 8562, 8923, 2685, 1574, 5617, 5722, 970, + 640, 8699, 1647, 1059, 8229, 1516, 892, 441, 4405, + 4899, 5037, 6072, 8409, 3457, 6678, 8778, 5168, 4725, + 1582, 2499, 3623, 3399, 2800, 6268, 2077, 192, 5047, + 1851, 3023, 9282, 7259, 5334, 5942, 1157, 8925, 7158, + 2680, 8224, 1863, 9792, 1845, 5045, 6307, 5364, 7590, + 3501, 3167, 5703, 4100, 6744, 3117, 6631, 2272, 3513, + 1946, 3024, 5825, 1185, 1348, 6559, 74, 4615, 3419, + 4395, 7611, 6167, 8539, 9714, 7761, 9431, 2318, 6641, + 1733, 4522, 5800, 6094, 9427, 1318, 4807, 6951, 6988, + 2813, 985, 7549, 9176, 9180, 4980, 2792, 4241, 7249, + 1163, 61, 7125, 2555, 6886, 8991, 196, 9270, 35, + 1529, 3941, 7793, 7718, 7033, 4030, 6810, 850, 9329, + 5979, 6544, 751, 4582, 8368, 565, 7754, 7052, 3290, + 8677, 983, 7257, 1909, 8475, 6667, 894, 6714, 9857, + 238, 8379, 8656, 2402, 83, 4439, 4472, 7734, 2074, + 3272, 618, 1715, 8742, 745, 2543, 7748, 5408, 6663, + 8178, 2886, 7594, 1196, 5498, 3894, 893, 8965, 6034, + 6443, 2520, 5737, 483, 2405, 4283, 6209, 1176, 448, + 606, 958, 8428, 3541, 8984, 1463, 4865, 173, 2388, + 4515, 610, 3428, 834, 1897, 3416, 3506, 6850, 7650, + 1567, 2088, 4588, 3271, 6086, 2429, 5007, 1641, 3405, + 5664, 2262, 1925, 5269, 9056, 8204, 5139, 609, 3768, + 1372, 8780, 3638, 7690, 2428, 8787, 4274, 9543, 7138, + 8745, 2025, 668, 6826, 8790, 6413, 9920, 6540, 348, + 4430, 4748, 8976, 1596, 914, 2089, 5266, 7048, 2343, + 5663, 5852, 3682, 5650, 2554, 2359, 5522, 968, 4538, + 556, 900, 332, 6036, 4210, 6798, 9668, 7214, 3249, + 934, 7260, 1264, 9553, 9573, 1341, 5076, 7088, 723, + 8670, 6474, 3175, 9943, 1202, 328, 6087, 194, 8768, + 258, 7554, 9741, 2069, 3477, 9790, 8686, 2875, 4984, + 7848, 4751, 5568, 9759, 8560, 7437, 4239, 7813, 4618, + 6633, 990, 5961, 926, 7923, 4657, 2745, 2212, 9361, + 5154, 9425, 4873, 6600, 5372, 8037, 7620, 2502, 3781, + 7433, 6170, 819, 1336, 484, 3513, 4395, 19, 8130, + 7693, 6153, 6352, 4202, 7488, 1192, 4764, 7301, 6821, + 6865, 6976, 937, 1234, 1710, 549, 5075, 6580, 8729, + 4805, 1375, 3711, 1305, 4371, 3503, 8608, 3833, 3074, + 1072, 7655, 4667, 966, 192, 6852, 7586, 1491, 9585, + 8090, 3373, 1438, 2850, 8318, 8429, 9764, 8245, 7536, + 1862, 6568, 9277, 3906, 4401, 671, 9709, 9370, 6046, + 9211, 9673, 4331, 921, 674, 7979, 374, 7224, 8804, + 5223, 9172, 2758, 3516, 3647, 8604, 8106, 9070, 2337, + 3498, 3861, 2253, 9022, 3709, 4466, 2031, 9776, 7382, + 6631, 6983, 229, 9213, 4744, 5134, 2773, 3818, 2045, + 3684, 5192, 2488, 8503, 6845, 9997, 5752, 1325, 8942, + 352, 1614, 4161, 7427, 7988, 1814, 8864, 5443, 6505, + 9390, 1752, 5714, 9392, 1996, 5576, 5760, 390, 6435, + 7898, 7417, 7401, 1217, 7128, 1412, 6756, 7275, 5202, + 2409, 1660, 2736, 5263, 8336, 8044, 3978, 3020, 5519, + 7853, 8427, 824, 1792, 4254, 4572, 5695, 1587, 1988, + 4588, 728, 1627, 3761, 5276, 8300, 992, 8389, 3069, + 3780, 9133, 145, 9462, 7403, 1537, 9647, 5057, 3362, + 9039, 6376, 8446, 4900, 5977, 7345, 4098, 447, 1820, + 252, 9481, 6404, 6592, 1295, 8043, 4753, 3605, 3518, + 2606, 2287, 9068, 9762, 699, 1959, 5988, 6313, 7817, + 5094, 8321, 2706, 7762, 8810, 5984, 698, 2889, 3562, + 3511, 7298, 3188, 9974, 5668, 6020, 8731, 9606, 2958, + 8385, 4702, 6639, 3111, 8106, 914, 952, 5956, 6944, + 6389, 6930, 4511, 693, 2408, 8878, 4458, 6005, 4687, + 5262, 264, 7556, 7239, 6348, 319, 3127, 4870, 9830, + 6495, 9430, 82, 2845, 2158, 7553, 2485, 4718, 9487, + 9863, 2351, 9993, 9611, 7651, 6752, 9890, 4226, 7914, + 1090, 3994, 273, 9314, 2869, 8105, 8063, 8350, 5143, + 2771, 6914, 6289, 1069, 6603, 7055, 1101, 2035, 1775, + 4440, 1100, 9988, 6654, 8779, 3492, 65, 3068, 7601, + 6582, 5059, 9136, 6153, 4692, 6999, 662, 8797, 8102, + 5680, 9525, 2546, 5756, 4348, 7370, 557, 1546, 4320, + 5734]), + values=tensor([1.6341e-01, 5.4230e-01, 5.9412e-01, 6.2649e-01, + 6.1492e-01, 1.1463e-02, 8.8850e-01, 3.1827e-01, + 1.4850e-01, 1.7623e-01, 5.0631e-01, 8.7356e-01, + 2.6593e-01, 3.7335e-01, 6.7543e-01, 8.1424e-01, + 3.7460e-01, 6.9153e-01, 8.0907e-01, 8.3914e-01, + 9.7417e-02, 3.4410e-04, 1.3414e-01, 2.2155e-01, + 6.0112e-02, 8.4328e-01, 2.7848e-01, 1.4958e-01, + 7.9439e-01, 7.7172e-01, 2.7028e-01, 1.6852e-01, + 6.2794e-01, 3.7628e-01, 7.9275e-01, 2.0746e-01, + 6.5757e-02, 6.6970e-01, 1.3521e-02, 6.0790e-01, + 5.0451e-01, 9.2714e-01, 7.8707e-01, 4.1523e-01, + 5.5244e-01, 9.7376e-01, 6.7472e-01, 9.9321e-01, + 9.5176e-01, 6.2001e-01, 7.8968e-01, 3.0430e-01, + 8.2453e-01, 9.4042e-02, 5.1519e-01, 8.5738e-01, + 5.8535e-01, 8.7087e-01, 6.5326e-01, 1.7395e-01, + 3.0247e-01, 7.3945e-02, 4.1833e-01, 8.0312e-01, + 8.8320e-01, 5.1558e-01, 2.5666e-01, 5.6112e-01, + 5.5084e-01, 7.0649e-01, 3.5059e-01, 7.8879e-01, + 5.2108e-02, 9.8239e-01, 3.4744e-01, 5.8319e-01, + 9.1928e-02, 4.9802e-02, 8.4140e-01, 2.0825e-01, + 6.5549e-01, 3.3967e-02, 4.5861e-01, 3.1781e-01, + 7.2623e-01, 4.3890e-01, 9.1717e-01, 6.7930e-01, + 2.2711e-01, 5.3114e-01, 9.4610e-01, 1.2012e-01, + 3.8896e-01, 9.0657e-01, 8.7514e-01, 9.4601e-01, + 6.8092e-01, 4.2792e-01, 7.8983e-01, 7.0867e-01, + 6.8668e-01, 7.3584e-01, 4.8951e-01, 1.1601e-01, + 5.8369e-01, 8.1548e-02, 4.6998e-01, 8.2472e-01, + 8.1798e-01, 6.4792e-01, 7.9098e-01, 4.6883e-01, + 5.3531e-01, 9.9356e-01, 2.4036e-01, 4.8060e-01, + 2.8861e-01, 8.7768e-01, 7.6854e-01, 2.6962e-01, + 9.4503e-01, 7.5491e-01, 6.3333e-01, 3.9724e-01, + 6.1673e-01, 9.9300e-01, 8.4452e-02, 6.1927e-01, + 7.8274e-01, 9.4288e-01, 5.5703e-01, 9.9018e-01, + 8.7030e-01, 6.7458e-01, 7.1377e-01, 3.9155e-01, + 4.0923e-01, 1.9269e-01, 7.8647e-01, 1.7260e-01, + 1.9351e-01, 1.6644e-02, 6.3159e-01, 4.2569e-01, + 3.5414e-01, 1.5447e-01, 6.0382e-01, 1.1039e-01, + 7.4996e-01, 9.7809e-01, 4.9703e-01, 6.8052e-02, + 4.0995e-03, 1.5993e-01, 2.5589e-01, 6.1582e-01, + 4.1141e-01, 8.3175e-01, 9.2280e-02, 6.6768e-01, + 2.1373e-01, 7.4338e-01, 3.8856e-01, 4.0554e-01, + 2.4237e-01, 6.8970e-01, 5.1570e-01, 2.2133e-04, + 8.5322e-01, 4.7984e-01, 4.1539e-01, 8.1641e-01, + 5.1215e-01, 7.5280e-01, 4.9588e-04, 8.3219e-01, + 1.3510e-01, 5.6800e-01, 8.8518e-01, 9.6846e-01, + 2.4576e-01, 9.1717e-01, 4.6194e-01, 4.1655e-01, + 8.5447e-01, 1.2544e-01, 6.0240e-01, 5.7586e-01, + 2.2428e-01, 4.5450e-01, 2.4531e-01, 3.5914e-01, + 5.8131e-01, 4.4973e-01, 3.0343e-01, 8.7101e-01, + 5.4478e-01, 5.0386e-01, 6.8135e-01, 4.2381e-01, + 3.1555e-02, 3.0972e-01, 2.4608e-01, 1.9970e-01, + 4.6368e-01, 9.9803e-01, 8.2862e-01, 8.5141e-01, + 8.7867e-01, 4.8625e-01, 5.2020e-01, 4.1959e-01, + 1.7473e-01, 2.5225e-01, 7.2666e-01, 9.2040e-01, + 1.8559e-01, 6.3520e-01, 4.5196e-01, 9.4681e-01, + 9.5216e-01, 1.0019e-01, 9.7707e-01, 4.5094e-01, + 5.0805e-01, 5.1974e-01, 5.3486e-01, 1.8077e-01, + 8.5364e-01, 7.2740e-01, 4.8004e-01, 6.8966e-01, + 2.0804e-01, 7.7792e-01, 6.3289e-01, 1.2722e-01, + 9.0620e-01, 4.9687e-01, 5.6847e-01, 1.3671e-01, + 4.0281e-01, 1.7219e-01, 1.9050e-01, 6.2485e-01, + 1.0260e-01, 2.5271e-02, 9.4031e-01, 9.4275e-01, + 7.8410e-03, 3.3465e-02, 3.6601e-01, 2.9329e-01, + 2.0289e-01, 8.1331e-01, 9.6038e-01, 7.8543e-01, + 8.1769e-01, 9.2929e-01, 5.1055e-01, 3.5358e-01, + 4.8515e-01, 3.5044e-01, 1.9924e-01, 8.1918e-01, + 2.9889e-01, 3.7500e-02, 2.3185e-01, 6.2795e-01, + 5.8909e-01, 1.2007e-01, 7.5640e-01, 8.0080e-01, + 8.4206e-01, 2.3327e-01, 8.8223e-01, 9.9512e-01, + 2.9164e-01, 4.3867e-01, 7.8315e-01, 9.6653e-01, + 4.0617e-01, 6.9039e-01, 4.3199e-01, 8.2813e-01, + 3.5563e-01, 9.1892e-01, 5.9521e-01, 8.9108e-01, + 3.7947e-01, 2.0591e-01, 6.7351e-01, 5.7368e-01, + 2.8254e-01, 6.7739e-02, 4.4276e-01, 9.7228e-01, + 4.6186e-01, 6.9010e-01, 8.4715e-01, 1.5150e-01, + 2.0227e-01, 3.6577e-01, 9.5994e-01, 2.5249e-01, + 6.5535e-02, 5.6708e-01, 7.2591e-01, 7.0050e-01, + 4.6753e-01, 9.1326e-02, 6.7496e-01, 5.8336e-01, + 3.1459e-01, 4.7441e-01, 6.5107e-01, 2.7592e-01, + 1.4742e-01, 4.5510e-01, 7.4301e-01, 4.8033e-01, + 1.4801e-01, 7.7726e-01, 9.3909e-01, 8.1948e-01, + 2.9724e-01, 8.1459e-01, 4.7430e-01, 3.8389e-01, + 5.8885e-02, 3.1119e-01, 5.3076e-01, 1.4421e-01, + 9.6859e-01, 9.4119e-01, 8.4274e-01, 8.1548e-02, + 4.2885e-02, 1.4390e-01, 9.9599e-01, 9.8519e-01, + 5.8954e-01, 4.9409e-01, 6.5081e-01, 4.7729e-01, + 7.1507e-01, 2.3377e-01, 3.5997e-01, 9.6579e-01, + 3.9773e-01, 2.1377e-02, 3.5134e-01, 6.4577e-01, + 9.9705e-01, 4.8704e-01, 4.2052e-01, 3.9684e-01, + 9.2759e-01, 3.0995e-01, 3.1089e-01, 7.6678e-01, + 5.4422e-04, 9.2904e-01, 1.6571e-01, 9.8144e-01, + 7.6337e-01, 8.8536e-01, 1.6786e-01, 9.4677e-01, + 3.5525e-01, 3.5127e-01, 1.3314e-01, 8.7067e-01, + 2.8725e-01, 6.7870e-01, 3.3831e-01, 4.6605e-01, + 7.4196e-01, 8.8785e-01, 9.6258e-01, 7.7168e-01, + 4.8538e-01, 2.0843e-01, 6.4007e-01, 1.9033e-01, + 3.4627e-01, 1.1059e-01, 7.1554e-01, 5.5574e-01, + 7.6910e-01, 7.3835e-01, 2.8739e-01, 6.9284e-01, + 7.5175e-02, 9.9640e-01, 9.1137e-01, 2.2022e-01, + 4.9205e-01, 9.0083e-01, 5.1433e-01, 4.3040e-01, + 3.8550e-01, 4.9743e-01, 2.3713e-01, 2.5804e-01, + 8.8147e-02, 9.0523e-02, 8.5804e-01, 1.4407e-01, + 3.2087e-01, 6.9830e-01, 5.0311e-02, 5.9795e-01, + 2.3588e-01, 9.7651e-01, 1.1260e-01, 6.7643e-01, + 7.7883e-01, 4.7514e-01, 7.6449e-01, 3.5045e-01, + 7.3614e-01, 8.8137e-01, 4.6469e-01, 4.9258e-01, + 8.1075e-01, 6.2703e-01, 1.2233e-01, 2.7440e-01, + 6.9122e-01, 7.3033e-01, 3.4342e-02, 1.2576e-01, + 8.0974e-01, 4.0072e-01, 6.0245e-01, 1.5077e-02, + 4.1940e-01, 2.0633e-01, 7.1347e-01, 8.3114e-01, + 3.1728e-01, 3.9989e-01, 6.0468e-01, 2.0407e-01, + 8.7073e-01, 4.3857e-01, 8.9954e-02, 6.5643e-01, + 5.8418e-01, 3.5790e-01, 6.7886e-01, 9.5451e-01, + 3.6580e-01, 3.0585e-01, 7.6034e-01, 6.5825e-02, + 2.9720e-01, 3.1525e-01, 3.2830e-01, 7.9663e-02, + 7.4797e-01, 4.5439e-01, 4.4041e-01, 6.1706e-01, + 1.4869e-01, 3.7743e-01, 6.0526e-01, 7.4958e-01, + 1.3105e-01, 3.9610e-01, 8.5301e-01, 8.1549e-01, + 8.5854e-02, 7.4522e-01, 7.4950e-01, 3.7042e-01, + 2.4261e-01, 1.7942e-01, 8.2764e-01, 1.6097e-01, + 2.2542e-01, 5.2571e-01, 1.0887e-01, 3.8000e-02, + 8.1497e-01, 8.0201e-01, 9.4936e-01, 2.6775e-01, + 9.4312e-01, 9.3715e-01, 6.7901e-02, 4.0906e-01, + 1.5251e-01, 2.4190e-01, 2.4089e-01, 2.6151e-01, + 6.2245e-01, 4.6082e-01, 5.0570e-01, 2.5937e-01, + 3.5881e-01, 2.2385e-01, 2.8770e-02, 1.7716e-01, + 5.4713e-02, 5.1027e-01, 3.1178e-02, 6.5533e-01, + 2.2465e-01, 6.9285e-01, 2.5103e-01, 3.0570e-01, + 3.7755e-03, 8.7853e-01, 6.5763e-01, 7.6528e-02, + 9.2357e-01, 9.3172e-01, 1.4133e-01, 9.0949e-01, + 3.7744e-01, 2.6587e-01, 8.7043e-01, 1.0112e-02, + 2.8448e-01, 7.2542e-02, 1.5147e-01, 2.2166e-01, + 9.7594e-01, 3.2101e-01, 3.2571e-01, 9.1645e-01, + 5.1688e-01, 9.8729e-02, 3.8775e-01, 7.4951e-01, + 2.4091e-01, 2.2275e-01, 8.9300e-01, 4.1567e-01, + 8.1383e-01, 7.1688e-01, 7.5982e-01, 9.9028e-02, + 1.0199e-01, 1.7657e-01, 8.1854e-01, 9.5101e-01, + 8.6789e-01, 7.9254e-01, 9.6088e-01, 8.2911e-01, + 7.9367e-01, 5.3301e-01, 8.6699e-02, 6.5008e-01, + 2.4111e-01, 4.0949e-01, 7.9876e-01, 6.0784e-01, + 8.6621e-01, 7.1156e-01, 3.6004e-01, 1.5720e-02, + 6.4658e-01, 3.4960e-01, 8.6761e-01, 8.2966e-01, + 9.8307e-01, 1.8464e-01, 5.4622e-01, 2.8174e-01, + 4.2027e-01, 6.2481e-01, 2.5893e-01, 5.8887e-01, + 8.4609e-01, 2.6754e-01, 5.9884e-01, 1.1711e-01, + 6.7826e-01, 2.8852e-01, 6.4017e-01, 1.2806e-02, + 3.1902e-03, 4.7820e-01, 8.9185e-01, 5.7234e-02, + 6.7506e-01, 8.4710e-01, 6.0623e-01, 2.2402e-01, + 4.0295e-01, 6.4904e-01, 9.2718e-01, 3.1307e-01, + 6.5796e-01, 3.5345e-01, 1.9087e-01, 2.1269e-01, + 5.7823e-01, 3.9982e-01, 4.3154e-01, 6.7568e-01, + 6.8414e-01, 2.4388e-01, 8.7462e-01, 4.2036e-02, + 3.5337e-01, 5.3395e-01, 9.2982e-01, 8.6034e-01, + 9.3195e-01, 6.8440e-01, 2.5818e-01, 7.2423e-01, + 1.2544e-01, 2.7728e-01, 6.8784e-01, 6.0665e-01, + 1.3165e-01, 8.5017e-01, 4.3265e-03, 8.0981e-01, + 7.9983e-01, 3.2596e-01, 4.9482e-01, 1.1663e-01, + 4.6665e-01, 9.3435e-01, 7.9541e-01, 7.1414e-01, + 5.3506e-02, 4.0912e-01, 6.8238e-01, 3.4773e-01, + 5.6049e-01, 7.1487e-01, 2.1138e-01, 8.1397e-01, + 5.4932e-01, 2.0457e-02, 5.6340e-02, 5.7141e-02, + 4.6515e-01, 1.4683e-01, 7.4366e-01, 4.1761e-02, + 5.1584e-02, 1.2634e-01, 4.6336e-01, 5.9261e-01, + 5.6130e-01, 7.8972e-02, 7.9983e-01, 2.2448e-01, + 1.8797e-01, 2.5057e-01, 7.7180e-01, 4.0901e-01, + 3.7017e-01, 4.0182e-01, 8.7125e-01, 2.8655e-01, + 1.4119e-01, 4.3303e-01, 6.6902e-01, 1.5943e-01, + 2.5892e-01, 5.0928e-01, 6.7761e-01, 3.2212e-02, + 3.0824e-01, 9.2674e-02, 3.6193e-02, 4.4219e-01, + 1.6390e-01, 5.9400e-01, 8.2224e-01, 6.9900e-01, + 5.2481e-01, 9.5165e-01, 4.4474e-01, 2.4445e-01, + 1.0981e-01, 9.3663e-01, 8.4897e-01, 9.5652e-01, + 4.6435e-01, 1.7098e-01, 5.9974e-01, 4.3018e-01, + 2.1721e-01, 3.8205e-01, 2.6684e-01, 2.6286e-01, + 8.1812e-01, 5.7012e-01, 6.3330e-01, 6.6896e-02, + 8.6486e-02, 5.7371e-01, 2.1333e-01, 5.1711e-01, + 3.0138e-01, 2.1686e-01, 9.4270e-01, 3.0485e-01, + 4.4156e-01, 9.5896e-01, 6.0745e-01, 1.5570e-01, + 4.1088e-01, 3.7352e-01, 3.6528e-01, 1.2019e-01, + 7.4565e-02, 5.5689e-01, 5.1790e-01, 1.3767e-01, + 1.2340e-01, 9.6204e-01, 1.6892e-01, 2.9572e-01, + 2.1066e-01, 3.3179e-01, 3.3274e-01, 8.4143e-01, + 8.3193e-01, 7.2407e-01, 7.0281e-01, 6.8041e-01, + 8.2730e-01, 1.8872e-01, 2.6244e-01, 2.1234e-01, + 7.4230e-01, 2.6976e-01, 6.3711e-01, 4.3716e-01, + 6.9298e-01, 8.1356e-01, 6.0939e-01, 7.0288e-01, + 3.5558e-01, 9.2964e-01, 1.1833e-01, 4.6906e-01, + 8.0664e-01, 4.8526e-01, 9.6169e-01, 4.1867e-01, + 8.3224e-01, 9.6285e-01, 9.5262e-01, 8.6424e-01, + 4.9817e-01, 5.4699e-01, 6.1704e-01, 3.9186e-01, + 6.8935e-01, 5.9439e-01, 7.3227e-01, 4.1190e-01, + 9.0233e-01, 3.2790e-01, 4.3206e-01, 5.5893e-01, + 5.8605e-02, 9.4473e-01, 2.4772e-01, 3.1700e-01, + 7.7245e-01, 5.5508e-01, 7.4692e-01, 1.2111e-01, + 8.0345e-01, 4.4160e-01, 6.1610e-01, 4.0302e-01, + 7.2755e-01, 5.2552e-01, 5.2678e-01, 1.3783e-01, + 6.7301e-01, 8.4666e-02, 9.7543e-01, 5.6391e-01, + 6.8981e-01, 5.0252e-01, 3.1345e-01, 8.3631e-01, + 5.1942e-01, 4.1463e-01, 3.8551e-01, 5.6612e-01, + 9.0226e-02, 4.5281e-01, 1.3323e-01, 8.3660e-01, + 3.3939e-01, 2.9707e-01, 3.8862e-01, 7.2849e-01, + 8.2419e-01, 6.6920e-01, 4.7306e-01, 8.6893e-01, + 7.5703e-01, 5.7027e-01, 5.4140e-01, 8.8231e-01, + 3.8567e-02, 3.5538e-01, 6.1680e-01, 5.9111e-01, + 4.5875e-01, 6.0458e-01, 7.5967e-02, 3.2091e-01, + 3.9756e-01, 5.3389e-01, 4.5029e-01, 1.5185e-01, + 6.9442e-01, 7.6304e-01, 4.6109e-01, 5.2312e-02, + 7.7110e-01, 4.8813e-01, 1.8485e-01, 9.4212e-01, + 2.0752e-01, 2.7050e-01, 8.0133e-01, 5.6501e-01, + 7.0480e-01, 3.7931e-01, 4.9377e-01, 7.7738e-01, + 6.5307e-01, 5.0050e-01, 1.0036e-01, 3.6651e-01, + 4.6488e-01, 6.5613e-01, 3.1970e-01, 5.0090e-01, + 2.3238e-01, 4.7361e-01, 4.2273e-01, 1.9837e-01, + 4.1443e-01, 2.8358e-01, 5.9909e-01, 4.6853e-01, + 1.5862e-01, 5.4860e-01, 9.4413e-01, 6.4800e-03, + 2.0553e-01, 6.7561e-01, 1.6860e-01, 5.9291e-01, + 2.2881e-01, 3.9992e-01, 6.9442e-01, 7.9259e-01, + 6.9638e-01, 7.6941e-01, 2.3646e-01, 7.2314e-01, + 1.4276e-01, 4.5362e-01, 5.5788e-01, 2.2118e-01, + 3.1804e-01, 6.9545e-01, 4.2483e-01, 3.3560e-01, + 3.3597e-01, 4.0398e-02, 1.9135e-01, 5.4384e-01, + 5.9982e-01, 9.7372e-01, 1.5970e-01, 6.0079e-01, + 5.9714e-01, 9.5044e-01, 5.0232e-01, 3.6790e-01, + 7.3455e-01, 8.0673e-01, 4.0026e-01, 6.3795e-02, + 8.2192e-01, 1.8676e-01, 1.9070e-01, 6.2954e-02, + 1.8591e-01, 1.6582e-01, 8.1810e-01, 1.1599e-02, + 4.7558e-01, 8.4737e-01, 7.5306e-01, 1.2992e-01, + 1.5127e-01, 2.0163e-01, 4.6257e-01, 3.4956e-01, + 9.0495e-01, 2.1175e-03, 9.4719e-01, 3.1681e-01, + 2.0418e-01, 5.3479e-02, 3.5683e-01, 5.8247e-02, + 9.3423e-01, 1.1090e-01, 4.3007e-01, 6.1117e-01, + 4.0593e-01, 1.1448e-02, 8.5302e-01, 2.5408e-01, + 9.1190e-01, 6.4738e-01, 2.7596e-01, 2.8235e-01, + 4.6522e-01, 8.7885e-01, 7.0320e-01, 4.2790e-01, + 2.9479e-02, 3.8500e-01, 3.9245e-01, 1.0004e-01, + 4.2397e-01, 7.1833e-01, 6.6614e-01, 4.6682e-02, + 5.8017e-01, 6.0782e-01, 4.8419e-01, 5.5802e-01, + 2.3916e-01, 1.4114e-01, 8.3739e-01, 1.0626e-01, + 5.1946e-01, 9.4847e-01, 4.1767e-01, 3.7856e-01, + 1.1090e-01, 1.5010e-01, 4.3945e-01, 7.5067e-02, + 9.8959e-01, 2.8002e-01, 5.0613e-01, 8.0707e-01, + 6.1595e-01, 8.2005e-01, 9.9749e-01, 1.1749e-01, + 6.5959e-01, 2.3371e-01, 8.3971e-01, 4.3270e-03, + 6.2581e-01, 8.0238e-01, 2.8393e-01, 7.0314e-01, + 2.0960e-01, 3.2954e-02, 6.5011e-01, 8.0206e-01, + 9.2215e-01, 8.1873e-01, 3.4350e-01, 2.8733e-01, + 1.9274e-01, 3.4014e-01, 3.0741e-01, 3.4144e-01, + 2.7448e-02, 7.6554e-01, 6.2323e-01, 3.0307e-01, + 4.5175e-01, 3.9421e-01, 8.5280e-01, 6.5476e-01, + 3.1057e-01, 3.6455e-01, 8.0890e-01, 2.7987e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.7740, 0.9703, 0.1840, ..., 0.7477, 0.1526, 0.5369]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 11.093939065933228 seconds + +[40.33, 40.0, 39.88, 40.14, 39.62, 39.53, 39.86, 39.53, 39.55, 39.82] +[98.79] +12.79684853553772 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 366482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.093939065933228, 'TIME_S_1KI': 0.0302714432521467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1264.2006668257713, 'W': 98.79} +[40.33, 40.0, 39.88, 40.14, 39.62, 39.53, 39.86, 39.53, 39.55, 39.82, 40.52, 39.54, 40.01, 39.91, 40.14, 39.87, 40.07, 39.9, 44.97, 39.44] +722.575 +36.128750000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 366482, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 11.093939065933228, 'TIME_S_1KI': 0.0302714432521467, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1264.2006668257713, 'W': 98.79, 'J_1KI': 3.4495573229402026, 'W_1KI': 0.26956303447372587, 'W_D': 62.66125, 'J_D': 801.866525297463, 'W_D_1KI': 0.17098043014390885, 'J_D_1KI': 0.0004665452331735497} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..c87bd7a --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 305580, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.080953121185303, "TIME_S_1KI": 0.032989571049104334, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1230.7398206591606, "W": 98.25, "J_1KI": 4.027553572416914, "W_1KI": 0.32151973296681724, "W_D": 62.38325, "J_D": 781.450889741838, "W_D_1KI": 0.20414703187381372, "J_D_1KI": 0.0006680641137306555} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..9e7563d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.018548250198364258} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 5000, 5000, 5000]), + col_indices=tensor([1572, 8127, 3303, ..., 3635, 8012, 8701]), + values=tensor([0.7029, 0.2681, 0.5472, ..., 0.1372, 0.6564, 0.9870]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.6228, 0.5154, 0.0077, ..., 0.6369, 0.2601, 0.0192]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.018548250198364258 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '56609', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9451327323913574} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4999, 4999, 5000]), + col_indices=tensor([3842, 387, 3686, ..., 4115, 6419, 2917]), + values=tensor([0.9231, 0.0215, 0.0697, ..., 0.2708, 0.2879, 0.7516]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.2403, 0.0214, 0.1380, ..., 0.6094, 0.2095, 0.9923]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 1.9451327323913574 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '305580', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.080953121185303} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 4999, 5000]), + col_indices=tensor([1481, 9557, 9045, ..., 186, 1024, 519]), + values=tensor([0.0681, 0.8562, 0.9064, ..., 0.7770, 0.2010, 0.9088]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.5005, 0.6529, 0.0782, ..., 0.6202, 0.1736, 0.9901]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.080953121185303 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 4999, 4999, 5000]), + col_indices=tensor([1481, 9557, 9045, ..., 186, 1024, 519]), + values=tensor([0.0681, 0.8562, 0.9064, ..., 0.7770, 0.2010, 0.9088]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.5005, 0.6529, 0.0782, ..., 0.6202, 0.1736, 0.9901]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.080953121185303 seconds + +[40.69, 39.86, 40.65, 39.74, 39.56, 39.52, 39.78, 39.47, 39.45, 39.45] +[98.25] +12.52661395072937 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 305580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.080953121185303, 'TIME_S_1KI': 0.032989571049104334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7398206591606, 'W': 98.25} +[40.69, 39.86, 40.65, 39.74, 39.56, 39.52, 39.78, 39.47, 39.45, 39.45, 40.13, 40.88, 39.46, 41.14, 39.99, 39.46, 39.46, 39.44, 39.59, 39.5] +717.335 +35.86675 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 305580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.080953121185303, 'TIME_S_1KI': 0.032989571049104334, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1230.7398206591606, 'W': 98.25, 'J_1KI': 4.027553572416914, 'W_1KI': 0.32151973296681724, 'W_D': 62.38325, 'J_D': 781.450889741838, 'W_D_1KI': 0.20414703187381372, 'J_D_1KI': 0.0006680641137306555} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..0a007ba --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1273, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.290857076644897, "TIME_S_1KI": 8.083941144261507, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2248.191835269928, "W": 120.44000000000001, "J_1KI": 1766.0580009975868, "W_1KI": 94.61115475255303, "W_D": 84.17750000000001, "J_D": 1571.2983079826834, "W_D_1KI": 66.12529457973291, "J_D_1KI": 51.944457643152326} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..1b9241c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,68 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.824350118637085} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 100, ..., 24999899, + 24999953, 25000000]), + col_indices=tensor([ 1283, 31647, 40047, ..., 487577, 491974, + 492635]), + values=tensor([0.2687, 0.0076, 0.0743, ..., 0.5051, 0.4444, 0.1527]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2422, 0.7782, 0.2817, ..., 0.4809, 0.1219, 0.8722]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 0.824350118637085 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1273', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.290857076644897} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 101, ..., 24999883, + 24999939, 25000000]), + col_indices=tensor([ 9313, 23523, 43031, ..., 488537, 498363, + 498593]), + values=tensor([0.9134, 0.4019, 0.3601, ..., 0.2723, 0.3306, 0.0527]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9993, 0.9766, 0.6194, ..., 0.0672, 0.4807, 0.1643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.290857076644897 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 101, ..., 24999883, + 24999939, 25000000]), + col_indices=tensor([ 9313, 23523, 43031, ..., 488537, 498363, + 498593]), + values=tensor([0.9134, 0.4019, 0.3601, ..., 0.2723, 0.3306, 0.0527]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.9993, 0.9766, 0.6194, ..., 0.0672, 0.4807, 0.1643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.290857076644897 seconds + +[40.99, 40.04, 40.78, 39.98, 40.28, 39.98, 39.98, 39.94, 40.19, 39.9] +[120.44] +18.66648817062378 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1273, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.290857076644897, 'TIME_S_1KI': 8.083941144261507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2248.191835269928, 'W': 120.44000000000001} +[40.99, 40.04, 40.78, 39.98, 40.28, 39.98, 39.98, 39.94, 40.19, 39.9, 41.12, 40.02, 40.17, 39.62, 39.57, 39.49, 39.52, 45.4, 39.55, 39.47] +725.25 +36.2625 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1273, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.290857076644897, 'TIME_S_1KI': 8.083941144261507, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2248.191835269928, 'W': 120.44000000000001, 'J_1KI': 1766.0580009975868, 'W_1KI': 94.61115475255303, 'W_D': 84.17750000000001, 'J_D': 1571.2983079826834, 'W_D_1KI': 66.12529457973291, 'J_D_1KI': 51.944457643152326} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..22e8b8e --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 21531, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.381713628768921, "TIME_S_1KI": 0.48217517202029264, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2048.753864378929, "W": 153.79, "J_1KI": 95.15367908499043, "W_1KI": 7.142724443825182, "W_D": 117.79275, "J_D": 1569.2070470012427, "W_D_1KI": 5.4708443639403646, "J_D_1KI": 0.25409151288562376} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..da8bd5c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,89 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.08279204368591309} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 10, ..., 2499988, + 2499995, 2500000]), + col_indices=tensor([111354, 133493, 148601, ..., 214459, 291734, + 295580]), + values=tensor([0.7692, 0.5972, 0.6345, ..., 0.2595, 0.9828, 0.2512]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1544, 0.9362, 0.6152, ..., 0.8648, 0.4518, 0.0330]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.08279204368591309 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '12682', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 6.184589385986328} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 12, ..., 2499985, + 2499992, 2500000]), + col_indices=tensor([135614, 168986, 215859, ..., 402290, 443216, + 486549]), + values=tensor([0.4455, 0.2288, 0.5445, ..., 0.6029, 0.8332, 0.9959]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.5969, 0.6605, 0.1157, ..., 0.5750, 0.9019, 0.4949]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 6.184589385986328 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '21531', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.381713628768921} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 6, ..., 2499988, + 2499995, 2500000]), + col_indices=tensor([192037, 290494, 298239, ..., 203209, 269872, + 299833]), + values=tensor([0.2087, 0.5501, 0.3490, ..., 0.1907, 0.4204, 0.3032]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3717, 0.6256, 0.6803, ..., 0.1727, 0.9290, 0.7130]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.381713628768921 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 6, ..., 2499988, + 2499995, 2500000]), + col_indices=tensor([192037, 290494, 298239, ..., 203209, 269872, + 299833]), + values=tensor([0.2087, 0.5501, 0.3490, ..., 0.1907, 0.4204, 0.3032]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3717, 0.6256, 0.6803, ..., 0.1727, 0.9290, 0.7130]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.381713628768921 seconds + +[40.84, 39.87, 40.71, 40.09, 39.75, 39.79, 39.67, 39.6, 39.76, 40.21] +[153.79] +13.321762561798096 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.381713628768921, 'TIME_S_1KI': 0.48217517202029264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.753864378929, 'W': 153.79} +[40.84, 39.87, 40.71, 40.09, 39.75, 39.79, 39.67, 39.6, 39.76, 40.21, 41.58, 40.07, 40.12, 40.08, 39.68, 39.76, 39.59, 39.74, 40.65, 39.4] +719.9449999999999 +35.997249999999994 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 21531, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.381713628768921, 'TIME_S_1KI': 0.48217517202029264, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2048.753864378929, 'W': 153.79, 'J_1KI': 95.15367908499043, 'W_1KI': 7.142724443825182, 'W_D': 117.79275, 'J_D': 1569.2070470012427, 'W_D_1KI': 5.4708443639403646, 'J_D_1KI': 0.25409151288562376} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..2972d33 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 2288, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.664037466049194, "TIME_S_1KI": 4.660855535860661, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2026.948439025879, "W": 124.48, "J_1KI": 885.9040380357864, "W_1KI": 54.40559440559441, "W_D": 87.53675000000001, "J_D": 1425.389450272322, "W_D_1KI": 38.25906905594406, "J_D_1KI": 16.721621090884643} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..16454d4 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.45888853073120117} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 46, ..., 12499954, + 12499981, 12500000]), + col_indices=tensor([ 49072, 112972, 116125, ..., 361100, 370525, + 412609]), + values=tensor([0.2354, 0.3643, 0.0075, ..., 0.8603, 0.9033, 0.4787]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.8534, 0.6179, 0.0838, ..., 0.4832, 0.1451, 0.6650]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 0.45888853073120117 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '2288', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.664037466049194} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 27, 48, ..., 12499952, + 12499972, 12500000]), + col_indices=tensor([ 3842, 7633, 8971, ..., 455163, 462741, + 476944]), + values=tensor([0.8075, 0.4724, 0.8976, ..., 0.5541, 0.2969, 0.9431]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0930, 0.1654, 0.5776, ..., 0.1397, 0.2168, 0.6873]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.664037466049194 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 27, 48, ..., 12499952, + 12499972, 12500000]), + col_indices=tensor([ 3842, 7633, 8971, ..., 455163, 462741, + 476944]), + values=tensor([0.8075, 0.4724, 0.8976, ..., 0.5541, 0.2969, 0.9431]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0930, 0.1654, 0.5776, ..., 0.1397, 0.2168, 0.6873]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.664037466049194 seconds + +[40.58, 39.64, 39.65, 39.68, 39.54, 40.08, 40.01, 55.98, 39.52, 39.5] +[124.48] +16.283326148986816 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.664037466049194, 'TIME_S_1KI': 4.660855535860661, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2026.948439025879, 'W': 124.48} +[40.58, 39.64, 39.65, 39.68, 39.54, 40.08, 40.01, 55.98, 39.52, 39.5, 40.22, 40.13, 39.62, 39.51, 44.89, 40.37, 40.29, 40.46, 39.51, 39.67] +738.865 +36.94325 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 2288, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.664037466049194, 'TIME_S_1KI': 4.660855535860661, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2026.948439025879, 'W': 124.48, 'J_1KI': 885.9040380357864, 'W_1KI': 54.40559440559441, 'W_D': 87.53675000000001, 'J_D': 1425.389450272322, 'W_D_1KI': 38.25906905594406, 'J_D_1KI': 16.721621090884643} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..f1d4670 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 94004, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.384105920791626, "TIME_S_1KI": 0.12110235650388947, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1783.8070621061327, "W": 114.34, "J_1KI": 18.975863389920992, "W_1KI": 1.2163312199480873, "W_D": 78.24925, "J_D": 1220.758831157148, "W_D_1KI": 0.8324034083656016, "J_D_1KI": 0.008854978600544674} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..0b0a270 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04173541069030762} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 9, ..., 249994, 249996, + 250000]), + col_indices=tensor([ 2875, 11250, 41033, ..., 32140, 46339, 48534]), + values=tensor([0.9791, 0.8918, 0.3698, ..., 0.3708, 0.0646, 0.7857]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.2100, 0.1946, 0.2511, ..., 0.6374, 0.0985, 0.4430]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.04173541069030762 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '25158', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.8100624084472656} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2, 7, ..., 249990, 249993, + 250000]), + col_indices=tensor([41615, 42906, 15488, ..., 31340, 31947, 35417]), + values=tensor([0.0772, 0.0729, 0.2688, ..., 0.4463, 0.5032, 0.2162]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.5388, 0.5198, 0.1392, ..., 0.7254, 0.7688, 0.9922]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 2.8100624084472656 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '94004', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 11.384105920791626} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 13, ..., 249985, 249990, + 250000]), + col_indices=tensor([ 2939, 14473, 20084, ..., 45023, 47616, 49448]), + values=tensor([0.6894, 0.8051, 0.8240, ..., 0.7425, 0.1769, 0.4023]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4931, 0.5539, 0.3107, ..., 0.8523, 0.9706, 0.6879]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 11.384105920791626 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 13, ..., 249985, 249990, + 250000]), + col_indices=tensor([ 2939, 14473, 20084, ..., 45023, 47616, 49448]), + values=tensor([0.6894, 0.8051, 0.8240, ..., 0.7425, 0.1769, 0.4023]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4931, 0.5539, 0.3107, ..., 0.8523, 0.9706, 0.6879]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 11.384105920791626 seconds + +[40.36, 39.91, 39.67, 39.83, 39.58, 39.72, 40.16, 39.72, 39.91, 39.98] +[114.34] +15.600901365280151 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 94004, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.384105920791626, 'TIME_S_1KI': 0.12110235650388947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1783.8070621061327, 'W': 114.34} +[40.36, 39.91, 39.67, 39.83, 39.58, 39.72, 40.16, 39.72, 39.91, 39.98, 40.89, 39.87, 39.99, 39.51, 39.5, 39.56, 39.75, 45.01, 39.69, 39.64] +721.815 +36.09075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 94004, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 11.384105920791626, 'TIME_S_1KI': 0.12110235650388947, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1783.8070621061327, 'W': 114.34, 'J_1KI': 18.975863389920992, 'W_1KI': 1.2163312199480873, 'W_D': 78.24925, 'J_D': 1220.758831157148, 'W_D_1KI': 0.8324034083656016, 'J_D_1KI': 0.008854978600544674} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..ca129d7 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 46418, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.356630086898804, "TIME_S_1KI": 0.22311668074666732, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1954.5975055408476, "W": 147.82, "J_1KI": 42.10861100307742, "W_1KI": 3.1845404799862123, "W_D": 111.69725, "J_D": 1476.952822525859, "W_D_1KI": 2.4063348270067646, "J_D_1KI": 0.051840553815476} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..0dda590 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.06997370719909668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 102, ..., 2499883, + 2499945, 2500000]), + col_indices=tensor([ 266, 347, 3014, ..., 46062, 47055, 47354]), + values=tensor([0.8937, 0.7241, 0.1967, ..., 0.6923, 0.3348, 0.4624]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3743, 0.1265, 0.3703, ..., 0.6234, 0.9781, 0.6963]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.06997370719909668 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '15005', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 3.3941891193389893} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 54, 97, ..., 2499911, + 2499955, 2500000]), + col_indices=tensor([ 2370, 4930, 5051, ..., 41423, 44524, 44646]), + values=tensor([0.0412, 0.5807, 0.8088, ..., 0.8046, 0.7553, 0.5801]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6465, 0.4915, 0.1151, ..., 0.6682, 0.4745, 0.9594]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 3.3941891193389893 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '46418', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.356630086898804} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 46, 86, ..., 2499896, + 2499949, 2500000]), + col_indices=tensor([ 1254, 3268, 3363, ..., 48004, 48805, 49373]), + values=tensor([0.4618, 0.8696, 0.7740, ..., 0.9354, 0.3130, 0.0156]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3463, 0.0749, 0.0037, ..., 0.8223, 0.0446, 0.2738]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.356630086898804 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 46, 86, ..., 2499896, + 2499949, 2500000]), + col_indices=tensor([ 1254, 3268, 3363, ..., 48004, 48805, 49373]), + values=tensor([0.4618, 0.8696, 0.7740, ..., 0.9354, 0.3130, 0.0156]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.3463, 0.0749, 0.0037, ..., 0.8223, 0.0446, 0.2738]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.356630086898804 seconds + +[40.8, 41.43, 39.77, 40.05, 40.2, 40.01, 40.25, 39.8, 39.91, 39.83] +[147.82] +13.222821712493896 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.356630086898804, 'TIME_S_1KI': 0.22311668074666732, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1954.5975055408476, 'W': 147.82} +[40.8, 41.43, 39.77, 40.05, 40.2, 40.01, 40.25, 39.8, 39.91, 39.83, 40.83, 39.7, 40.25, 40.38, 40.19, 39.67, 41.03, 39.72, 39.6, 39.53] +722.4549999999999 +36.122749999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 46418, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.356630086898804, 'TIME_S_1KI': 0.22311668074666732, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1954.5975055408476, 'W': 147.82, 'J_1KI': 42.10861100307742, 'W_1KI': 3.1845404799862123, 'W_D': 111.69725, 'J_D': 1476.952822525859, 'W_D_1KI': 2.4063348270067646, 'J_D_1KI': 0.051840553815476} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..ce3aa00 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 1681, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.72816252708435, "TIME_S_1KI": 6.382012211233998, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2210.374014186859, "W": 116.29999999999998, "J_1KI": 1314.9161297958708, "W_1KI": 69.18500892325996, "W_D": 80.01049999999998, "J_D": 1520.6631991581912, "W_D_1KI": 47.596966091612124, "J_D_1KI": 28.314673463183894} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..fde54b7 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.6244547367095947} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 524, 1064, ..., 24999000, + 24999502, 25000000]), + col_indices=tensor([ 60, 76, 165, ..., 49872, 49944, 49977]), + values=tensor([0.3464, 0.5127, 0.2524, ..., 0.4585, 0.6152, 0.8409]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3169, 0.2467, 0.7317, ..., 0.4966, 0.9013, 0.2021]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 0.6244547367095947 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '1681', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.72816252708435} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 503, 984, ..., 24998995, + 24999487, 25000000]), + col_indices=tensor([ 80, 111, 167, ..., 49695, 49904, 49943]), + values=tensor([0.9741, 0.4832, 0.1000, ..., 0.9253, 0.4991, 0.7681]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5734, 0.0323, 0.0030, ..., 0.5787, 0.7337, 0.7260]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.72816252708435 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 503, 984, ..., 24998995, + 24999487, 25000000]), + col_indices=tensor([ 80, 111, 167, ..., 49695, 49904, 49943]), + values=tensor([0.9741, 0.4832, 0.1000, ..., 0.9253, 0.4991, 0.7681]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5734, 0.0323, 0.0030, ..., 0.5787, 0.7337, 0.7260]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.72816252708435 seconds + +[40.52, 40.33, 46.14, 39.67, 40.04, 40.05, 39.67, 39.99, 39.82, 39.93] +[116.3] +19.0057954788208 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.72816252708435, 'TIME_S_1KI': 6.382012211233998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.374014186859, 'W': 116.29999999999998} +[40.52, 40.33, 46.14, 39.67, 40.04, 40.05, 39.67, 39.99, 39.82, 39.93, 42.61, 39.87, 39.87, 39.75, 39.67, 39.77, 39.75, 39.54, 40.39, 39.88] +725.7900000000001 +36.289500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 1681, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.72816252708435, 'TIME_S_1KI': 6.382012211233998, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2210.374014186859, 'W': 116.29999999999998, 'J_1KI': 1314.9161297958708, 'W_1KI': 69.18500892325996, 'W_D': 80.01049999999998, 'J_D': 1520.6631991581912, 'W_D_1KI': 47.596966091612124, 'J_D_1KI': 28.314673463183894} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..73ac2d2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 128261, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.553161382675171, "TIME_S_1KI": 0.08227880168309283, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1360.8756747579575, "W": 104.26, "J_1KI": 10.610206335191194, "W_1KI": 0.8128737496199158, "W_D": 68.19500000000001, "J_D": 890.1296435844899, "W_D_1KI": 0.5316892898075019, "J_D_1KI": 0.004145369908292481} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..b70442e --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.0555570125579834} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([42051, 34515, 37611, ..., 41473, 46289, 26191]), + values=tensor([0.0144, 0.4378, 0.1715, ..., 0.0832, 0.5030, 0.6687]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1124, 0.4102, 0.7912, ..., 0.3553, 0.2259, 0.3847]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.0555570125579834 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '18899', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.547147274017334} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 24999, 25000, 25000]), + col_indices=tensor([ 9684, 40954, 42907, ..., 26506, 37971, 35337]), + values=tensor([0.0766, 0.5354, 0.2778, ..., 0.4912, 0.6494, 0.7856]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.2325, 0.6288, 0.8060, ..., 0.4059, 0.0257, 0.5351]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 1.547147274017334 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '128261', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.553161382675171} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 24999, 25000]), + col_indices=tensor([43490, 45422, 41208, ..., 48729, 34812, 29106]), + values=tensor([0.6729, 0.4582, 0.1719, ..., 0.9792, 0.1938, 0.4197]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3715, 0.8721, 0.7070, ..., 0.0207, 0.1985, 0.3006]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.553161382675171 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 24999, 24999, 25000]), + col_indices=tensor([43490, 45422, 41208, ..., 48729, 34812, 29106]), + values=tensor([0.6729, 0.4582, 0.1719, ..., 0.9792, 0.1938, 0.4197]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3715, 0.8721, 0.7070, ..., 0.0207, 0.1985, 0.3006]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.553161382675171 seconds + +[42.38, 39.84, 39.75, 39.86, 39.86, 40.05, 40.23, 39.88, 39.93, 39.73] +[104.26] +13.052711248397827 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.553161382675171, 'TIME_S_1KI': 0.08227880168309283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1360.8756747579575, 'W': 104.26} +[42.38, 39.84, 39.75, 39.86, 39.86, 40.05, 40.23, 39.88, 39.93, 39.73, 40.22, 39.71, 39.72, 39.44, 39.86, 40.03, 39.57, 39.83, 40.02, 45.11] +721.3 +36.065 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 128261, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.553161382675171, 'TIME_S_1KI': 0.08227880168309283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1360.8756747579575, 'W': 104.26, 'J_1KI': 10.610206335191194, 'W_1KI': 0.8128737496199158, 'W_D': 68.19500000000001, 'J_D': 890.1296435844899, 'W_D_1KI': 0.5316892898075019, 'J_D_1KI': 0.004145369908292481} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..b465c9c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 110115, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.965436697006226, "TIME_S_1KI": 0.1086630949190049, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1446.9478559374809, "W": 111.69, "J_1KI": 13.14033379591773, "W_1KI": 1.0143032284429914, "W_D": 75.71975, "J_D": 980.9520092633368, "W_D_1KI": 0.6876424646959997, "J_D_1KI": 0.006244766514062568} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..b1f3c1f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.05198168754577637} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 124998, 124999, + 125000]), + col_indices=tensor([ 927, 8914, 5646, ..., 41839, 2622, 37662]), + values=tensor([0.2093, 0.3505, 0.4434, ..., 0.7585, 0.2953, 0.8139]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.7158, 0.3261, 0.8838, ..., 0.1644, 0.9864, 0.1779]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.05198168754577637 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '20199', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.9260566234588623} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 9, ..., 124993, 124995, + 125000]), + col_indices=tensor([ 5280, 12669, 18309, ..., 32915, 33761, 44585]), + values=tensor([0.1104, 0.6442, 0.1166, ..., 0.0611, 0.5204, 0.6774]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.3981, 0.9235, 0.5295, ..., 0.9056, 0.3690, 0.2596]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 1.9260566234588623 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '110115', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 11.965436697006226} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 7, ..., 124990, 124994, + 125000]), + col_indices=tensor([ 332, 15911, 38702, ..., 27905, 36936, 47310]), + values=tensor([0.9967, 0.4995, 0.1475, ..., 0.0565, 0.7404, 0.0608]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.3103, 0.3240, 0.6987, ..., 0.1758, 0.7445, 0.7079]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 11.965436697006226 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 7, ..., 124990, 124994, + 125000]), + col_indices=tensor([ 332, 15911, 38702, ..., 27905, 36936, 47310]), + values=tensor([0.9967, 0.4995, 0.1475, ..., 0.0565, 0.7404, 0.0608]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.3103, 0.3240, 0.6987, ..., 0.1758, 0.7445, 0.7079]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 11.965436697006226 seconds + +[40.59, 40.33, 40.2, 40.19, 39.75, 39.57, 39.56, 39.52, 39.58, 39.6] +[111.69] +12.955034971237183 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 110115, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.965436697006226, 'TIME_S_1KI': 0.1086630949190049, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.9478559374809, 'W': 111.69} +[40.59, 40.33, 40.2, 40.19, 39.75, 39.57, 39.56, 39.52, 39.58, 39.6, 40.22, 41.59, 39.67, 40.02, 39.96, 39.85, 39.52, 40.16, 39.94, 39.58] +719.405 +35.97025 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 110115, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 11.965436697006226, 'TIME_S_1KI': 0.1086630949190049, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1446.9478559374809, 'W': 111.69, 'J_1KI': 13.14033379591773, 'W_1KI': 1.0143032284429914, 'W_D': 75.71975, 'J_D': 980.9520092633368, 'W_D_1KI': 0.6876424646959997, 'J_D_1KI': 0.006244766514062568} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..2c10823 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 447788, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.585598468780518, "TIME_S_1KI": 0.023639754680296294, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1210.6447257900238, "W": 96.76, "J_1KI": 2.7036113647306848, "W_1KI": 0.21608439708076144, "W_D": 60.779250000000005, "J_D": 760.4596780691744, "W_D_1KI": 0.13573219916567664, "J_D_1KI": 0.00030311709819306603} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..38edffa --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.01688361167907715} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 2499, 2499, 2500]), + col_indices=tensor([2619, 4724, 4043, ..., 721, 4005, 3452]), + values=tensor([0.3560, 0.4737, 0.9490, ..., 0.6650, 0.5511, 0.5102]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9035, 0.6347, 0.7264, ..., 0.8885, 0.4271, 0.9746]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.01688361167907715 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '62190', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.4582650661468506} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([ 785, 2078, 964, ..., 3093, 2409, 4914]), + values=tensor([0.2674, 0.7127, 0.0446, ..., 0.5887, 0.3242, 0.2984]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.2053, 0.2435, 0.6452, ..., 0.2463, 0.9693, 0.2980]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.4582650661468506 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '447788', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.585598468780518} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([3170, 56, 953, ..., 2101, 4088, 4138]), + values=tensor([0.7441, 0.4324, 0.6982, ..., 0.2565, 0.3946, 0.1156]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7808, 0.5836, 0.6876, ..., 0.2450, 0.1275, 0.2911]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.585598468780518 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 2499, 2500, 2500]), + col_indices=tensor([3170, 56, 953, ..., 2101, 4088, 4138]), + values=tensor([0.7441, 0.4324, 0.6982, ..., 0.2565, 0.3946, 0.1156]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.7808, 0.5836, 0.6876, ..., 0.2450, 0.1275, 0.2911]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.585598468780518 seconds + +[40.68, 39.36, 44.27, 39.07, 39.18, 39.33, 39.3, 39.56, 39.47, 39.52] +[96.76] +12.511830568313599 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 447788, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.585598468780518, 'TIME_S_1KI': 0.023639754680296294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1210.6447257900238, 'W': 96.76} +[40.68, 39.36, 44.27, 39.07, 39.18, 39.33, 39.3, 39.56, 39.47, 39.52, 45.98, 39.49, 39.82, 39.41, 39.22, 39.42, 41.09, 39.62, 39.26, 39.31] +719.615 +35.98075 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 447788, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.585598468780518, 'TIME_S_1KI': 0.023639754680296294, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1210.6447257900238, 'W': 96.76, 'J_1KI': 2.7036113647306848, 'W_1KI': 0.21608439708076144, 'W_D': 60.779250000000005, 'J_D': 760.4596780691744, 'W_D_1KI': 0.13573219916567664, 'J_D_1KI': 0.00030311709819306603} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..f972143 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 251242, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.495816707611084, "TIME_S_1KI": 0.041775725028502735, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1274.650104198456, "W": 98.84, "J_1KI": 5.073395786526361, "W_1KI": 0.393405561172097, "W_D": 63.200750000000006, "J_D": 815.0429236434699, "W_D_1KI": 0.25155328328862214, "J_D_1KI": 0.0010012389779122206} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..489fd07 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.020148515701293945} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 10, ..., 24991, 24995, 25000]), + col_indices=tensor([ 119, 931, 3406, ..., 3461, 3840, 3846]), + values=tensor([0.6773, 0.1678, 0.7190, ..., 0.1084, 0.6735, 0.7339]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0928, 0.4745, 0.9490, ..., 0.8279, 0.2614, 0.8062]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.020148515701293945 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52113', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.177922248840332} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 11, ..., 24987, 24992, 25000]), + col_indices=tensor([1164, 1818, 2007, ..., 3806, 4515, 4674]), + values=tensor([0.7293, 0.4677, 0.0557, ..., 0.9608, 0.8022, 0.8772]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.5594, 0.0674, 0.3236, ..., 0.9684, 0.1982, 0.3579]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 2.177922248840332 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '251242', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.495816707611084} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 8, ..., 24992, 24997, 25000]), + col_indices=tensor([1486, 3242, 3522, ..., 1754, 2627, 4146]), + values=tensor([0.1836, 0.4006, 0.2197, ..., 0.0536, 0.9699, 0.8761]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8727, 0.2072, 0.9866, ..., 0.7187, 0.7974, 0.6886]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.495816707611084 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 8, ..., 24992, 24997, 25000]), + col_indices=tensor([1486, 3242, 3522, ..., 1754, 2627, 4146]), + values=tensor([0.1836, 0.4006, 0.2197, ..., 0.0536, 0.9699, 0.8761]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8727, 0.2072, 0.9866, ..., 0.7187, 0.7974, 0.6886]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.495816707611084 seconds + +[40.75, 39.26, 39.42, 39.22, 39.47, 40.61, 39.23, 39.53, 39.63, 39.38] +[98.84] +12.896095752716064 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 251242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.495816707611084, 'TIME_S_1KI': 0.041775725028502735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.650104198456, 'W': 98.84} +[40.75, 39.26, 39.42, 39.22, 39.47, 40.61, 39.23, 39.53, 39.63, 39.38, 40.47, 39.12, 40.14, 40.63, 39.59, 39.14, 39.11, 39.22, 39.39, 39.55] +712.785 +35.63925 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 251242, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.495816707611084, 'TIME_S_1KI': 0.041775725028502735, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1274.650104198456, 'W': 98.84, 'J_1KI': 5.073395786526361, 'W_1KI': 0.393405561172097, 'W_D': 63.200750000000006, 'J_D': 815.0429236434699, 'W_D_1KI': 0.25155328328862214, 'J_D_1KI': 0.0010012389779122206} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..7edf7f8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 151851, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.677409887313843, "TIME_S_1KI": 0.07031504492768466, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1443.7484180927277, "W": 117.47, "J_1KI": 9.507664869462351, "W_1KI": 0.7735872664651534, "W_D": 81.37549999999999, "J_D": 1000.1340716481208, "W_D_1KI": 0.5358904452390829, "J_D_1KI": 0.0035290544365139706} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..f6f6dab --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.027439117431640625} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 55, 107, ..., 249902, 249955, + 250000]), + col_indices=tensor([ 26, 155, 397, ..., 4652, 4756, 4760]), + values=tensor([0.9134, 0.8993, 0.8423, ..., 0.2444, 0.0288, 0.7023]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3636, 0.7288, 0.4529, ..., 0.6147, 0.2907, 0.3015]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.027439117431640625 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '38266', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 2.645953893661499} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 49, 92, ..., 249897, 249949, + 250000]), + col_indices=tensor([ 507, 568, 655, ..., 4839, 4844, 4959]), + values=tensor([0.4543, 0.3787, 0.6932, ..., 0.4487, 0.3087, 0.1431]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.4167, 0.3931, 0.0326, ..., 0.8288, 0.4472, 0.1506]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 2.645953893661499 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '151851', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.677409887313843} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 62, 123, ..., 249890, 249948, + 250000]), + col_indices=tensor([ 53, 60, 101, ..., 4781, 4787, 4941]), + values=tensor([0.8546, 0.9316, 0.6470, ..., 0.1212, 0.6179, 0.4318]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7208, 0.6556, 0.2590, ..., 0.8294, 0.6979, 0.2347]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.677409887313843 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 62, 123, ..., 249890, 249948, + 250000]), + col_indices=tensor([ 53, 60, 101, ..., 4781, 4787, 4941]), + values=tensor([0.8546, 0.9316, 0.6470, ..., 0.1212, 0.6179, 0.4318]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7208, 0.6556, 0.2590, ..., 0.8294, 0.6979, 0.2347]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.677409887313843 seconds + +[39.83, 39.62, 39.22, 39.2, 39.58, 39.98, 44.8, 39.52, 39.58, 39.44] +[117.47] +12.290358543395996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 151851, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.677409887313843, 'TIME_S_1KI': 0.07031504492768466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.7484180927277, 'W': 117.47} +[39.83, 39.62, 39.22, 39.2, 39.58, 39.98, 44.8, 39.52, 39.58, 39.44, 40.11, 39.32, 39.47, 44.63, 39.38, 39.24, 39.42, 39.88, 39.66, 39.4] +721.8900000000001 +36.094500000000004 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 151851, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.677409887313843, 'TIME_S_1KI': 0.07031504492768466, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1443.7484180927277, 'W': 117.47, 'J_1KI': 9.507664869462351, 'W_1KI': 0.7735872664651534, 'W_D': 81.37549999999999, 'J_D': 1000.1340716481208, 'W_D_1KI': 0.5358904452390829, 'J_D_1KI': 0.0035290544365139706} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..0dcc054 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 91742, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460400104522705, "TIME_S_1KI": 0.11401975218027409, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1797.7089305996894, "W": 131.45, "J_1KI": 19.595266405786766, "W_1KI": 1.4328224804342613, "W_D": 95.54724999999999, "J_D": 1306.7032683091759, "W_D_1KI": 1.0414777310283185, "J_D_1KI": 0.011352245765607012} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..ee62fae --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03591585159301758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 282, 523, ..., 1249534, + 1249768, 1250000]), + col_indices=tensor([ 14, 20, 65, ..., 4981, 4988, 4994]), + values=tensor([0.5427, 0.7626, 0.3688, ..., 0.1462, 0.4395, 0.5084]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.3604, 0.6234, 0.1526, ..., 0.4035, 0.4868, 0.5530]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.03591585159301758 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '29235', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 3.345984697341919} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 247, 522, ..., 1249509, + 1249766, 1250000]), + col_indices=tensor([ 11, 41, 47, ..., 4983, 4993, 4996]), + values=tensor([0.4860, 0.1371, 0.2214, ..., 0.6634, 0.1469, 0.9637]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.5776, 0.4725, 0.0368, ..., 0.6036, 0.3775, 0.0011]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 3.345984697341919 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '91742', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460400104522705} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 245, 499, ..., 1249503, + 1249746, 1250000]), + col_indices=tensor([ 11, 13, 56, ..., 4961, 4967, 4994]), + values=tensor([0.1643, 0.9353, 0.3976, ..., 0.1683, 0.6963, 0.8462]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6649, 0.4073, 0.4211, ..., 0.1078, 0.2188, 0.0388]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.460400104522705 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 245, 499, ..., 1249503, + 1249746, 1250000]), + col_indices=tensor([ 11, 13, 56, ..., 4961, 4967, 4994]), + values=tensor([0.1643, 0.9353, 0.3976, ..., 0.1683, 0.6963, 0.8462]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.6649, 0.4073, 0.4211, ..., 0.1078, 0.2188, 0.0388]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.460400104522705 seconds + +[40.58, 39.92, 39.93, 39.37, 39.59, 39.9, 39.71, 40.07, 40.76, 39.67] +[131.45] +13.675990343093872 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460400104522705, 'TIME_S_1KI': 0.11401975218027409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1797.7089305996894, 'W': 131.45} +[40.58, 39.92, 39.93, 39.37, 39.59, 39.9, 39.71, 40.07, 40.76, 39.67, 40.34, 39.45, 39.48, 40.31, 39.86, 39.84, 39.9, 39.88, 39.9, 39.78] +718.055 +35.90275 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 91742, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460400104522705, 'TIME_S_1KI': 0.11401975218027409, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1797.7089305996894, 'W': 131.45, 'J_1KI': 19.595266405786766, 'W_1KI': 1.4328224804342613, 'W_D': 95.54724999999999, 'J_D': 1306.7032683091759, 'W_D_1KI': 1.0414777310283185, 'J_D_1KI': 0.011352245765607012} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..44fa72d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 52297, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.236442565917969, "TIME_S_1KI": 0.195736706998833, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1815.1719595336915, "W": 138.62, "J_1KI": 34.70891178334688, "W_1KI": 2.650630055261296, "W_D": 102.202, "J_D": 1338.293208831787, "W_D_1KI": 1.9542612386943803, "J_D_1KI": 0.0373685151862321} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..4fe4648 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.053244590759277344} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 487, 976, ..., 2498986, + 2499481, 2500000]), + col_indices=tensor([ 13, 19, 40, ..., 4975, 4977, 4981]), + values=tensor([0.0276, 0.4992, 0.8339, ..., 0.1235, 0.3053, 0.8819]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6982, 0.6943, 0.5654, ..., 0.0343, 0.1924, 0.4615]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.053244590759277344 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19720', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 3.959235429763794} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 500, 997, ..., 2499001, + 2499525, 2500000]), + col_indices=tensor([ 22, 31, 36, ..., 4976, 4979, 4990]), + values=tensor([0.9139, 0.4529, 0.5623, ..., 0.7413, 0.5022, 0.1210]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.1909, 0.5057, 0.7269, ..., 0.6307, 0.9165, 0.6325]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 3.959235429763794 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '52297', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.236442565917969} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 495, 967, ..., 2499009, + 2499501, 2500000]), + col_indices=tensor([ 2, 3, 29, ..., 4974, 4984, 4998]), + values=tensor([0.7947, 0.6825, 0.2906, ..., 0.1208, 0.9049, 0.2265]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7067, 0.8426, 0.8818, ..., 0.1022, 0.5608, 0.1343]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.236442565917969 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 495, 967, ..., 2499009, + 2499501, 2500000]), + col_indices=tensor([ 2, 3, 29, ..., 4974, 4984, 4998]), + values=tensor([0.7947, 0.6825, 0.2906, ..., 0.1208, 0.9049, 0.2265]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7067, 0.8426, 0.8818, ..., 0.1022, 0.5608, 0.1343]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.236442565917969 seconds + +[40.97, 39.52, 39.62, 39.4, 39.87, 45.09, 39.68, 39.56, 39.77, 39.67] +[138.62] +13.094589233398438 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.236442565917969, 'TIME_S_1KI': 0.195736706998833, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1815.1719595336915, 'W': 138.62} +[40.97, 39.52, 39.62, 39.4, 39.87, 45.09, 39.68, 39.56, 39.77, 39.67, 41.69, 43.46, 41.5, 39.76, 39.97, 39.88, 39.62, 39.85, 40.84, 39.61] +728.3600000000001 +36.418000000000006 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 52297, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.236442565917969, 'TIME_S_1KI': 0.195736706998833, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1815.1719595336915, 'W': 138.62, 'J_1KI': 34.70891178334688, 'W_1KI': 2.650630055261296, 'W_D': 102.202, 'J_D': 1338.293208831787, 'W_D_1KI': 1.9542612386943803, 'J_D_1KI': 0.0373685151862321} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..c26577c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 28289, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.465468645095825, "TIME_S_1KI": 0.36994834193841514, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1925.348158209324, "W": 138.77, "J_1KI": 68.05995822437428, "W_1KI": 4.905440277139524, "W_D": 102.84225, "J_D": 1426.8727867954374, "W_D_1KI": 3.6354148255505674, "J_D_1KI": 0.1285098386493184} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..4cf3fef --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.07305312156677246} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1021, 1969, ..., 4997949, + 4999001, 5000000]), + col_indices=tensor([ 1, 7, 18, ..., 4990, 4995, 4998]), + values=tensor([0.6231, 0.9951, 0.8252, ..., 0.9756, 0.2344, 0.2983]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0355, 0.1933, 0.9788, ..., 0.6197, 0.2365, 0.0393]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 0.07305312156677246 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '14373', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 5.334789037704468} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1024, 2040, ..., 4998012, + 4999011, 5000000]), + col_indices=tensor([ 1, 7, 11, ..., 4993, 4994, 4998]), + values=tensor([0.0452, 0.7491, 0.1728, ..., 0.4616, 0.1426, 0.7347]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.8794, 0.6343, 0.4463, ..., 0.6355, 0.8597, 0.5087]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 5.334789037704468 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '28289', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.465468645095825} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 999, 2016, ..., 4997966, + 4998976, 5000000]), + col_indices=tensor([ 2, 9, 14, ..., 4993, 4994, 4995]), + values=tensor([0.1135, 0.5000, 0.4923, ..., 0.1880, 0.5290, 0.9229]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0610, 0.8156, 0.2755, ..., 0.7165, 0.9008, 0.9624]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.465468645095825 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 999, 2016, ..., 4997966, + 4998976, 5000000]), + col_indices=tensor([ 2, 9, 14, ..., 4993, 4994, 4995]), + values=tensor([0.1135, 0.5000, 0.4923, ..., 0.1880, 0.5290, 0.9229]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.0610, 0.8156, 0.2755, ..., 0.7165, 0.9008, 0.9624]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.465468645095825 seconds + +[40.8, 40.7, 39.71, 40.23, 39.76, 39.64, 39.79, 39.68, 39.73, 39.72] +[138.77] +13.874383211135864 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.465468645095825, 'TIME_S_1KI': 0.36994834193841514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1925.348158209324, 'W': 138.77} +[40.8, 40.7, 39.71, 40.23, 39.76, 39.64, 39.79, 39.68, 39.73, 39.72, 40.37, 39.65, 40.3, 39.61, 40.3, 40.15, 39.64, 39.78, 39.65, 39.58] +718.555 +35.927749999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 28289, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.465468645095825, 'TIME_S_1KI': 0.36994834193841514, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1925.348158209324, 'W': 138.77, 'J_1KI': 68.05995822437428, 'W_1KI': 4.905440277139524, 'W_D': 102.84225, 'J_D': 1426.8727867954374, 'W_D_1KI': 3.6354148255505674, 'J_D_1KI': 0.1285098386493184} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..edb21c0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 19365, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.708929538726807, "TIME_S_1KI": 0.5530043655423086, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1994.3257465744018, "W": 137.56, "J_1KI": 102.98609587267761, "W_1KI": 7.103537309579138, "W_D": 101.59625, "J_D": 1472.9283013260365, "W_D_1KI": 5.246385231087013, "J_D_1KI": 0.270921003412704} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..4cf90e5 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.0932457447052002} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1472, 2953, ..., 7496947, + 7498464, 7500000]), + col_indices=tensor([ 7, 9, 10, ..., 4989, 4991, 4994]), + values=tensor([0.9868, 0.9731, 0.3711, ..., 0.9277, 0.6596, 0.4560]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.4150, 0.1407, 0.7534, ..., 0.5098, 0.0887, 0.6433]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 0.0932457447052002 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '11260', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.105090618133545} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1440, 2968, ..., 7497040, + 7498552, 7500000]), + col_indices=tensor([ 4, 15, 16, ..., 4993, 4997, 4998]), + values=tensor([0.0079, 0.6033, 0.5837, ..., 0.4070, 0.1537, 0.2862]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.9942, 0.8351, 0.2634, ..., 0.1203, 0.3761, 0.2393]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 6.105090618133545 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '19365', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.708929538726807} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1483, 3029, ..., 7497007, + 7498484, 7500000]), + col_indices=tensor([ 0, 1, 2, ..., 4991, 4992, 4999]), + values=tensor([0.2037, 0.3378, 0.5245, ..., 0.5597, 0.6700, 0.6684]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.7739, 0.8705, 0.9400, ..., 0.4166, 0.9328, 0.4141]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.708929538726807 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1483, 3029, ..., 7497007, + 7498484, 7500000]), + col_indices=tensor([ 0, 1, 2, ..., 4991, 4992, 4999]), + values=tensor([0.2037, 0.3378, 0.5245, ..., 0.5597, 0.6700, 0.6684]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.7739, 0.8705, 0.9400, ..., 0.4166, 0.9328, 0.4141]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.708929538726807 seconds + +[42.29, 39.85, 39.72, 39.53, 39.58, 40.0, 40.36, 39.5, 39.8, 40.06] +[137.56] +14.4978609085083 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.708929538726807, 'TIME_S_1KI': 0.5530043655423086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1994.3257465744018, 'W': 137.56} +[42.29, 39.85, 39.72, 39.53, 39.58, 40.0, 40.36, 39.5, 39.8, 40.06, 40.39, 40.11, 40.04, 39.88, 41.07, 39.92, 39.57, 39.46, 39.73, 39.57] +719.275 +35.96375 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 19365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.708929538726807, 'TIME_S_1KI': 0.5530043655423086, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1994.3257465744018, 'W': 137.56, 'J_1KI': 102.98609587267761, 'W_1KI': 7.103537309579138, 'W_D': 101.59625, 'J_D': 1472.9283013260365, 'W_D_1KI': 5.246385231087013, 'J_D_1KI': 0.270921003412704} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..d0ddde8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 4054, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.937983989715576, "TIME_S_1KI": 2.6980720250901764, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1733.5753089761736, "W": 118.18, "J_1KI": 427.62094449338275, "W_1KI": 29.15145535273804, "W_D": 74.48075000000001, "J_D": 1092.5536401593092, "W_D_1KI": 18.37216329551061, "J_D_1KI": 4.531860704368675} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..fbc9c8c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.25896668434143066} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1967, 3942, ..., 9996007, + 9997987, 10000000]), + col_indices=tensor([ 0, 1, 3, ..., 4989, 4995, 4996]), + values=tensor([0.9037, 0.0824, 0.8127, ..., 0.2074, 0.6033, 0.5497]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6227, 0.6614, 0.9902, ..., 0.2660, 0.9614, 0.3260]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 0.25896668434143066 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '4054', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.937983989715576} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2032, 3978, ..., 9995983, + 9998000, 10000000]), + col_indices=tensor([ 1, 2, 3, ..., 4988, 4993, 4994]), + values=tensor([0.2457, 0.5907, 0.9941, ..., 0.3357, 0.2301, 0.2269]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2230, 0.0807, 0.5998, ..., 0.1430, 0.1498, 0.1360]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.937983989715576 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2032, 3978, ..., 9995983, + 9998000, 10000000]), + col_indices=tensor([ 1, 2, 3, ..., 4988, 4993, 4994]), + values=tensor([0.2457, 0.5907, 0.9941, ..., 0.3357, 0.2301, 0.2269]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2230, 0.0807, 0.5998, ..., 0.1430, 0.1498, 0.1360]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.937983989715576 seconds + +[40.89, 40.0, 39.72, 39.6, 39.91, 39.64, 39.68, 39.81, 39.88, 45.41] +[118.18] +14.66893982887268 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.937983989715576, 'TIME_S_1KI': 2.6980720250901764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1733.5753089761736, 'W': 118.18} +[40.89, 40.0, 39.72, 39.6, 39.91, 39.64, 39.68, 39.81, 39.88, 45.41, 40.78, 40.13, 44.65, 73.6, 81.22, 73.52, 67.52, 42.65, 48.49, 40.85] +873.9849999999999 +43.69924999999999 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 4054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.937983989715576, 'TIME_S_1KI': 2.6980720250901764, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1733.5753089761736, 'W': 118.18, 'J_1KI': 427.62094449338275, 'W_1KI': 29.15145535273804, 'W_D': 74.48075000000001, 'J_D': 1092.5536401593092, 'W_D_1KI': 18.37216329551061, 'J_D_1KI': 4.531860704368675} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..a25c6eb --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 3758, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.1781005859375, "TIME_S_1KI": 2.708382274065327, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1956.303444838524, "W": 122.9, "J_1KI": 520.570368504131, "W_1KI": 32.70356572645024, "W_D": 86.40525000000001, "J_D": 1375.3855836219193, "W_D_1KI": 22.992349654071315, "J_D_1KI": 6.118240993632601} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..fd072be --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.29501914978027344} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2490, 5019, ..., 12495032, + 12497514, 12500000]), + col_indices=tensor([ 0, 1, 2, ..., 4992, 4993, 4994]), + values=tensor([0.4521, 0.6419, 0.1807, ..., 0.6429, 0.2936, 0.1963]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.6822, 0.2314, 0.5095, ..., 0.2635, 0.2792, 0.8048]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 0.29501914978027344 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3559', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.942713260650635} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2478, 4936, ..., 12495093, + 12497610, 12500000]), + col_indices=tensor([ 1, 2, 3, ..., 4992, 4995, 4998]), + values=tensor([0.6608, 0.6509, 0.8650, ..., 0.2551, 0.6130, 0.3679]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.9258, 0.3878, 0.0027, ..., 0.4707, 0.4169, 0.1792]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 9.942713260650635 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '3758', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.1781005859375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2527, 4930, ..., 12495017, + 12497540, 12500000]), + col_indices=tensor([ 1, 2, 3, ..., 4995, 4997, 4999]), + values=tensor([0.2576, 0.5438, 0.7818, ..., 0.1593, 0.4265, 0.7530]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0335, 0.1835, 0.7330, ..., 0.5684, 0.8047, 0.4810]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.1781005859375 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2527, 4930, ..., 12495017, + 12497540, 12500000]), + col_indices=tensor([ 1, 2, 3, ..., 4995, 4997, 4999]), + values=tensor([0.2576, 0.5438, 0.7818, ..., 0.1593, 0.4265, 0.7530]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0335, 0.1835, 0.7330, ..., 0.5684, 0.8047, 0.4810]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.1781005859375 seconds + +[41.47, 40.36, 39.99, 40.33, 39.95, 40.28, 39.98, 39.88, 39.78, 45.28] +[122.9] +15.917847394943237 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.1781005859375, 'TIME_S_1KI': 2.708382274065327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.303444838524, 'W': 122.9} +[41.47, 40.36, 39.99, 40.33, 39.95, 40.28, 39.98, 39.88, 39.78, 45.28, 40.38, 39.63, 39.74, 44.87, 40.1, 40.15, 40.75, 40.1, 40.4, 40.08] +729.895 +36.494749999999996 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 3758, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.1781005859375, 'TIME_S_1KI': 2.708382274065327, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1956.303444838524, 'W': 122.9, 'J_1KI': 520.570368504131, 'W_1KI': 32.70356572645024, 'W_D': 86.40525000000001, 'J_D': 1375.3855836219193, 'W_D_1KI': 22.992349654071315, 'J_D_1KI': 6.118240993632601} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..c2948bd --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 505155, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3298921585083, "TIME_S_1KI": 0.020448955584935914, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1240.863141503334, "W": 95.58, "J_1KI": 2.4564007908529737, "W_1KI": 0.1892092526056359, "W_D": 59.942499999999995, "J_D": 778.2008669131993, "W_D_1KI": 0.11866159891518444, "J_D_1KI": 0.0002349013647596964} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..107ac32 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.013948440551757812} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3548, 3508, 4386, 3528, 2702, 3004, 3629, 4756, 1243, + 213, 2804, 1698, 689, 4639, 4580, 1578, 3327, 694, + 1408, 2610, 4665, 1701, 4464, 632, 2037, 2500, 1517, + 2177, 1389, 4628, 306, 1568, 3761, 3194, 3074, 2522, + 3705, 2681, 4246, 249, 1916, 3633, 4678, 1217, 107, + 2703, 1648, 2700, 2961, 4336, 1084, 4254, 396, 3740, + 3046, 2671, 2061, 1766, 3209, 4565, 1985, 2700, 4834, + 2805, 875, 2910, 2400, 2621, 4389, 955, 1399, 578, + 2242, 4964, 3239, 222, 1256, 3099, 3567, 2886, 3721, + 1671, 1246, 4445, 3748, 4434, 1765, 983, 1353, 3314, + 2249, 2525, 4314, 2896, 2171, 3775, 3320, 730, 2027, + 2731, 3976, 3825, 4171, 1978, 4468, 2371, 386, 1118, + 3263, 840, 3509, 4865, 3412, 2573, 1668, 4140, 1828, + 1203, 819, 4214, 2533, 3446, 643, 4924, 2902, 1393, + 4975, 841, 1924, 1159, 1396, 1327, 3531, 2008, 2330, + 3344, 0, 1785, 2268, 4522, 1792, 2828, 305, 4487, + 4986, 3210, 3476, 4418, 3986, 3188, 1206, 4837, 2877, + 2143, 1316, 3014, 3807, 339, 1928, 4332, 1721, 1955, + 1430, 1820, 1733, 132, 1124, 4910, 399, 4998, 3203, + 1066, 4770, 3787, 2390, 4240, 862, 2987, 1396, 4199, + 2140, 4278, 4725, 3767, 4419, 1019, 3708, 90, 2851, + 2610, 3655, 3402, 2040, 1712, 1375, 4589, 2905, 1572, + 545, 3985, 3399, 582, 4328, 3912, 2552, 83, 2255, + 1709, 772, 4299, 2146, 3329, 2442, 3295, 60, 173, + 543, 4997, 2966, 3912, 1602, 135, 2282, 3935, 2764, + 2342, 3756, 4573, 3705, 1470, 3025, 1498, 4276, 668, + 3561, 4033, 260, 3652, 775, 4020, 1031, 2617, 2294, + 2109, 2487, 3590, 1199, 2797, 1290, 3990]), + values=tensor([0.6994, 0.2438, 0.4802, 0.0829, 0.0677, 0.0178, 0.7638, + 0.1665, 0.8626, 0.8633, 0.8809, 0.3889, 0.5842, 0.4728, + 0.4918, 0.0860, 0.7324, 0.8491, 0.3798, 0.3500, 0.4975, + 0.0872, 0.8650, 0.3555, 0.4399, 0.2630, 0.0729, 0.3054, + 0.9674, 0.7941, 0.9749, 0.5236, 0.8844, 0.2916, 0.4218, + 0.0889, 0.1637, 0.0411, 0.1963, 0.8167, 0.6130, 0.2282, + 0.0754, 0.2471, 0.0778, 0.4752, 0.2737, 0.1262, 0.2451, + 0.2934, 0.3944, 0.0397, 0.3394, 0.7909, 0.5453, 0.0895, + 0.2329, 0.3870, 0.5830, 0.0888, 0.8460, 0.7742, 0.7374, + 0.8528, 0.2281, 0.9068, 0.0092, 0.0150, 0.9568, 0.4508, + 0.2063, 0.9542, 0.6049, 0.5147, 0.9346, 0.5104, 0.1196, + 0.8281, 0.2227, 0.7282, 0.2980, 0.7830, 0.6065, 0.2936, + 0.6589, 0.1956, 0.8884, 0.6244, 0.8765, 0.9279, 0.5777, + 0.8162, 0.0894, 0.3744, 0.1591, 0.3051, 0.9299, 0.1618, + 0.7383, 0.9907, 0.5121, 0.6397, 0.8338, 0.9391, 0.2607, + 0.4098, 0.6073, 0.2048, 0.8476, 0.1799, 0.1533, 0.5127, + 0.3612, 0.8614, 0.5878, 0.7167, 0.1917, 0.2581, 0.3381, + 0.5246, 0.2437, 0.9851, 0.9032, 0.6527, 0.5590, 0.5454, + 0.0253, 0.0710, 0.1587, 0.3574, 0.7354, 0.6182, 0.5365, + 0.0479, 0.8974, 0.6075, 0.5864, 0.7635, 0.4139, 0.6734, + 0.0016, 0.0763, 0.3633, 0.3792, 0.6630, 0.0919, 0.1222, + 0.5443, 0.8587, 0.0627, 0.1060, 0.4814, 0.8481, 0.2733, + 0.7553, 0.9339, 0.1865, 0.2260, 0.9547, 0.8541, 0.1158, + 0.0258, 0.5314, 0.6595, 0.5573, 0.7953, 0.3786, 0.1641, + 0.8997, 0.2507, 0.1855, 0.6951, 0.2863, 0.1627, 0.3079, + 0.5000, 0.3625, 0.8186, 0.3705, 0.2957, 0.1551, 0.0216, + 0.3714, 0.8284, 0.9522, 0.8937, 0.5141, 0.0703, 0.2182, + 0.9274, 0.7097, 0.4349, 0.6001, 0.7581, 0.1855, 0.1138, + 0.0069, 0.0143, 0.6779, 0.4223, 0.2934, 0.1234, 0.5974, + 0.7303, 0.9182, 0.4432, 0.6166, 0.0534, 0.9601, 0.1664, + 0.7453, 0.2693, 0.7496, 0.1561, 0.1695, 0.4247, 0.5083, + 0.7464, 0.9108, 0.9708, 0.4346, 0.1849, 0.3357, 0.6306, + 0.3234, 0.0643, 0.0684, 0.2529, 0.3070, 0.5381, 0.4691, + 0.3912, 0.0111, 0.6019, 0.4700, 0.8282, 0.9967, 0.0138, + 0.0331, 0.4050, 0.1544, 0.2207, 0.6016, 0.9303, 0.9139, + 0.9840, 0.7431, 0.3482, 0.1124, 0.6413]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.3949, 0.9428, 0.0102, ..., 0.0310, 0.9492, 0.7070]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.013948440551757812 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '75277', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5646839141845703} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([3746, 1654, 2453, 1836, 1687, 4126, 577, 989, 1161, + 2149, 3910, 3295, 4472, 133, 3358, 1352, 3096, 3601, + 3758, 2512, 1092, 4489, 1464, 1660, 3070, 3361, 4966, + 822, 3500, 236, 2632, 1344, 3148, 1004, 2075, 4538, + 1923, 4311, 3791, 3093, 1373, 470, 112, 1162, 2705, + 3514, 4485, 3748, 3597, 4486, 4629, 78, 32, 3433, + 1822, 3440, 1230, 93, 1755, 4162, 1309, 3789, 3501, + 2710, 1926, 2165, 381, 2357, 4887, 3442, 1756, 2858, + 2903, 4359, 3016, 2687, 1689, 4625, 1621, 3805, 3094, + 1702, 3528, 1035, 4698, 4982, 1451, 1771, 2089, 3195, + 4919, 4133, 1397, 4984, 2564, 4549, 4619, 2832, 4040, + 4237, 2079, 1796, 1577, 4625, 3108, 1608, 19, 3574, + 3985, 1287, 3355, 4562, 3138, 4018, 4235, 751, 3240, + 1452, 49, 2916, 1280, 2827, 2493, 4891, 2490, 4843, + 2541, 1858, 112, 4172, 3878, 2893, 375, 3701, 1061, + 2843, 3468, 53, 4322, 1606, 2648, 4201, 4904, 4969, + 3035, 4661, 1890, 3624, 2603, 426, 3014, 1375, 437, + 1036, 1237, 4055, 3154, 3403, 3642, 4899, 4262, 474, + 2778, 534, 2901, 1174, 2089, 4005, 2324, 163, 146, + 519, 4127, 1342, 708, 4532, 125, 4427, 212, 2077, + 4307, 3955, 3476, 203, 3487, 4805, 2720, 1985, 727, + 17, 534, 1652, 733, 3208, 1391, 4340, 3700, 1893, + 162, 3604, 4298, 2419, 2136, 802, 564, 129, 2585, + 1324, 2167, 2710, 4323, 116, 1401, 831, 2891, 2913, + 1398, 372, 4148, 3575, 2563, 4021, 40, 3400, 4783, + 824, 3068, 2795, 4664, 2245, 2717, 3120, 1588, 2273, + 2302, 3501, 1853, 4736, 2336, 2390, 3070, 2778, 2782, + 746, 4888, 427, 933, 3852, 4383, 1889]), + values=tensor([0.9927, 0.0340, 0.4488, 0.4635, 0.2230, 0.1466, 0.4603, + 0.2785, 0.5078, 0.0027, 0.7560, 0.4421, 0.3531, 0.7573, + 0.7663, 0.5287, 0.4866, 0.0160, 0.0811, 0.2667, 0.0905, + 0.2771, 0.9262, 0.8504, 0.9933, 0.1455, 0.8531, 0.9069, + 0.5790, 0.0929, 0.4671, 0.6608, 0.9664, 0.1896, 0.7953, + 0.9833, 0.4291, 0.7412, 0.1386, 0.5823, 0.9668, 0.6184, + 0.6067, 0.5096, 0.9072, 0.2000, 0.6029, 0.7709, 0.3337, + 0.1225, 0.3581, 0.4923, 0.5670, 0.5531, 0.2219, 0.3460, + 0.1648, 0.9635, 0.0634, 0.0066, 0.2847, 0.4656, 0.8836, + 0.9572, 0.7046, 0.5077, 0.6573, 0.4027, 0.0704, 0.6249, + 0.7137, 0.0161, 0.2786, 0.6964, 0.9732, 0.0101, 0.9546, + 0.7324, 0.8008, 0.6620, 0.3443, 0.9296, 0.6939, 0.5327, + 0.2792, 0.5590, 0.4343, 0.4000, 0.5836, 0.6246, 0.3293, + 0.4244, 0.5279, 0.7005, 0.2154, 0.0350, 0.9262, 0.9148, + 0.3048, 0.0077, 0.5649, 0.4772, 0.8216, 0.6110, 0.2610, + 0.1616, 0.7469, 0.5265, 0.3053, 0.8103, 0.5940, 0.9742, + 0.5862, 0.5529, 0.3872, 0.3034, 0.6804, 0.6806, 0.6660, + 0.5771, 0.6250, 0.9114, 0.6649, 0.3603, 0.1633, 0.2146, + 0.1054, 0.1040, 0.0319, 0.4499, 0.9767, 0.8617, 0.7495, + 0.1652, 0.5616, 0.5919, 0.4704, 0.2766, 0.9966, 0.7157, + 0.1989, 0.6419, 0.8518, 0.4191, 0.4983, 0.6045, 0.7545, + 0.3583, 0.1657, 0.2221, 0.8782, 0.5595, 0.9120, 0.4869, + 0.2789, 0.3396, 0.9955, 0.5682, 0.9596, 0.9327, 0.6012, + 0.2415, 0.5228, 0.5492, 0.9986, 0.8668, 0.0412, 0.8168, + 0.4480, 0.6255, 0.6348, 0.6366, 0.4660, 0.8412, 0.9469, + 0.3182, 0.0180, 0.7704, 0.3602, 0.4075, 0.8083, 0.5267, + 0.5330, 0.4008, 0.8286, 0.6612, 0.5353, 0.6215, 0.4553, + 0.1920, 0.3166, 0.3250, 0.3744, 0.5410, 0.8495, 0.8267, + 0.2666, 0.2654, 0.6447, 0.8392, 0.9176, 0.4756, 0.9542, + 0.8318, 0.5561, 0.5761, 0.1449, 0.0902, 0.9651, 0.4745, + 0.1336, 0.4136, 0.1136, 0.8153, 0.3693, 0.4404, 0.2291, + 0.9951, 0.7922, 0.8470, 0.5195, 0.9072, 0.0501, 0.5628, + 0.6200, 0.3160, 0.6988, 0.7319, 0.9009, 0.3185, 0.5934, + 0.7917, 0.9332, 0.5038, 0.7465, 0.1646, 0.8555, 0.0988, + 0.4002, 0.8098, 0.8642, 0.7419, 0.3377, 0.6378, 0.4276, + 0.2050, 0.6970, 0.0429, 0.1896, 0.1443]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0748, 0.8455, 0.5581, ..., 0.9449, 0.9600, 0.8816]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.5646839141845703 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '505155', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3298921585083} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1247, 4227, 2384, 1324, 2688, 777, 3611, 2096, 3777, + 4017, 3060, 3853, 275, 2455, 1734, 3565, 37, 2244, + 1733, 4350, 1234, 2896, 3213, 1561, 1980, 4472, 4663, + 506, 4951, 3982, 1179, 751, 4549, 4447, 2215, 404, + 243, 3834, 4527, 3698, 4091, 3198, 2812, 3614, 4523, + 1643, 1713, 1084, 2707, 4, 4057, 1938, 3151, 2591, + 736, 3345, 437, 2659, 175, 3499, 4582, 1472, 654, + 3080, 4209, 728, 2500, 4977, 4359, 4315, 4550, 1180, + 65, 1612, 4551, 4050, 2941, 4296, 4434, 1223, 1238, + 1086, 3880, 4530, 1316, 2102, 4761, 3908, 1050, 3705, + 3194, 4362, 3882, 122, 751, 3713, 3799, 1795, 2520, + 1510, 2238, 4244, 1885, 2083, 3076, 2704, 4833, 3679, + 4672, 3718, 2996, 4020, 4241, 4395, 853, 4475, 4071, + 1868, 2791, 21, 4190, 1530, 3067, 1932, 3665, 3853, + 1426, 1044, 3601, 1259, 2412, 133, 3883, 2099, 3331, + 2256, 2974, 3456, 1750, 1889, 4841, 2200, 4977, 1425, + 1536, 158, 2655, 3709, 3160, 4144, 3380, 1076, 1185, + 3818, 1983, 1852, 4777, 1300, 431, 3274, 3575, 4837, + 325, 4137, 4592, 671, 998, 3275, 4613, 992, 3846, + 2916, 2447, 2605, 2672, 2276, 4850, 4965, 1297, 694, + 259, 1199, 4640, 4880, 3240, 1438, 11, 2797, 2545, + 3917, 4254, 214, 1182, 3713, 4287, 4785, 2360, 1203, + 4398, 3290, 2436, 3411, 3739, 928, 1526, 663, 2059, + 3931, 404, 1876, 3217, 4800, 4377, 4206, 877, 1100, + 1979, 2229, 3016, 180, 945, 4382, 2217, 3064, 609, + 1772, 4045, 3022, 1213, 4864, 3162, 1697, 2645, 550, + 2485, 4784, 4550, 726, 4920, 1331, 357, 3709, 4153, + 3528, 2484, 3864, 1866, 4322, 727, 4703]), + values=tensor([0.4663, 0.4340, 0.1210, 0.0765, 0.6336, 0.1902, 0.3091, + 0.4203, 0.8220, 0.0850, 0.1025, 0.1361, 0.5908, 0.4685, + 0.3710, 0.6463, 0.1664, 0.6973, 0.9489, 0.6122, 0.1354, + 0.4027, 0.3554, 0.9045, 0.3464, 0.4691, 0.3320, 0.2792, + 0.0515, 0.5607, 0.4585, 0.0483, 0.1589, 0.9453, 0.6795, + 0.0575, 0.3527, 0.3951, 0.3470, 0.3333, 0.5355, 0.7562, + 0.1077, 0.3035, 0.4048, 0.6937, 0.2967, 0.6561, 0.1737, + 0.9035, 0.4547, 0.3603, 0.1408, 0.4830, 0.8065, 0.3582, + 0.5455, 0.5723, 0.4523, 0.5057, 0.6324, 0.9028, 0.0577, + 0.4956, 0.6657, 0.9848, 0.6391, 0.6395, 0.4769, 0.0056, + 0.1044, 0.2976, 0.6221, 0.9725, 0.0205, 0.3420, 0.9773, + 0.7515, 0.8006, 0.2336, 0.4394, 0.9756, 0.3057, 0.6186, + 0.7004, 0.6806, 0.0982, 0.4285, 0.8466, 0.0638, 0.7804, + 0.2686, 0.1423, 0.1658, 0.0440, 0.8558, 0.2543, 0.9629, + 0.0443, 0.2812, 0.7112, 0.2679, 0.0325, 0.1914, 0.4697, + 0.5212, 0.5036, 0.8278, 0.2212, 0.7637, 0.6411, 0.8324, + 0.1085, 0.7903, 0.3041, 0.7685, 0.7610, 0.1247, 0.0153, + 0.0898, 0.0347, 0.7987, 0.9119, 0.5255, 0.4670, 0.2413, + 0.8008, 0.0124, 0.0719, 0.3268, 0.5991, 0.1237, 0.5734, + 0.5860, 0.8964, 0.6851, 0.7980, 0.9940, 0.8001, 0.3866, + 0.4921, 0.3129, 0.6180, 0.1949, 0.6009, 0.4868, 0.5484, + 0.3765, 0.9472, 0.7499, 0.8386, 0.8281, 0.0445, 0.8521, + 0.1827, 0.1424, 0.9964, 0.2930, 0.6405, 0.8880, 0.8455, + 0.8730, 0.0256, 0.3303, 0.4997, 0.7068, 0.9986, 0.4515, + 0.8492, 0.9495, 0.7252, 0.1393, 0.3125, 0.0955, 0.0411, + 0.0855, 0.4594, 0.0571, 0.4760, 0.9753, 0.9680, 0.6765, + 0.6632, 0.7732, 0.2205, 0.7219, 0.1231, 0.5507, 0.9386, + 0.9853, 0.1484, 0.5471, 0.2415, 0.3412, 0.3970, 0.9721, + 0.4075, 0.7397, 0.6041, 0.4919, 0.1150, 0.1028, 0.3707, + 0.5907, 0.4305, 0.9162, 0.9956, 0.3282, 0.6112, 0.6540, + 0.0961, 0.8665, 0.2552, 0.6175, 0.4850, 0.4310, 0.1165, + 0.3274, 0.7923, 0.1515, 0.5293, 0.8418, 0.1450, 0.8268, + 0.9665, 0.7626, 0.7605, 0.9986, 0.9489, 0.8011, 0.9290, + 0.5451, 0.8590, 0.5389, 0.0080, 0.8363, 0.8570, 0.5734, + 0.7613, 0.9018, 0.0697, 0.9293, 0.2543, 0.2531, 0.2854, + 0.3722, 0.6889, 0.4487, 0.3475, 0.2897]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8985, 0.0813, 0.9894, ..., 0.1805, 0.5543, 0.3501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.3298921585083 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1247, 4227, 2384, 1324, 2688, 777, 3611, 2096, 3777, + 4017, 3060, 3853, 275, 2455, 1734, 3565, 37, 2244, + 1733, 4350, 1234, 2896, 3213, 1561, 1980, 4472, 4663, + 506, 4951, 3982, 1179, 751, 4549, 4447, 2215, 404, + 243, 3834, 4527, 3698, 4091, 3198, 2812, 3614, 4523, + 1643, 1713, 1084, 2707, 4, 4057, 1938, 3151, 2591, + 736, 3345, 437, 2659, 175, 3499, 4582, 1472, 654, + 3080, 4209, 728, 2500, 4977, 4359, 4315, 4550, 1180, + 65, 1612, 4551, 4050, 2941, 4296, 4434, 1223, 1238, + 1086, 3880, 4530, 1316, 2102, 4761, 3908, 1050, 3705, + 3194, 4362, 3882, 122, 751, 3713, 3799, 1795, 2520, + 1510, 2238, 4244, 1885, 2083, 3076, 2704, 4833, 3679, + 4672, 3718, 2996, 4020, 4241, 4395, 853, 4475, 4071, + 1868, 2791, 21, 4190, 1530, 3067, 1932, 3665, 3853, + 1426, 1044, 3601, 1259, 2412, 133, 3883, 2099, 3331, + 2256, 2974, 3456, 1750, 1889, 4841, 2200, 4977, 1425, + 1536, 158, 2655, 3709, 3160, 4144, 3380, 1076, 1185, + 3818, 1983, 1852, 4777, 1300, 431, 3274, 3575, 4837, + 325, 4137, 4592, 671, 998, 3275, 4613, 992, 3846, + 2916, 2447, 2605, 2672, 2276, 4850, 4965, 1297, 694, + 259, 1199, 4640, 4880, 3240, 1438, 11, 2797, 2545, + 3917, 4254, 214, 1182, 3713, 4287, 4785, 2360, 1203, + 4398, 3290, 2436, 3411, 3739, 928, 1526, 663, 2059, + 3931, 404, 1876, 3217, 4800, 4377, 4206, 877, 1100, + 1979, 2229, 3016, 180, 945, 4382, 2217, 3064, 609, + 1772, 4045, 3022, 1213, 4864, 3162, 1697, 2645, 550, + 2485, 4784, 4550, 726, 4920, 1331, 357, 3709, 4153, + 3528, 2484, 3864, 1866, 4322, 727, 4703]), + values=tensor([0.4663, 0.4340, 0.1210, 0.0765, 0.6336, 0.1902, 0.3091, + 0.4203, 0.8220, 0.0850, 0.1025, 0.1361, 0.5908, 0.4685, + 0.3710, 0.6463, 0.1664, 0.6973, 0.9489, 0.6122, 0.1354, + 0.4027, 0.3554, 0.9045, 0.3464, 0.4691, 0.3320, 0.2792, + 0.0515, 0.5607, 0.4585, 0.0483, 0.1589, 0.9453, 0.6795, + 0.0575, 0.3527, 0.3951, 0.3470, 0.3333, 0.5355, 0.7562, + 0.1077, 0.3035, 0.4048, 0.6937, 0.2967, 0.6561, 0.1737, + 0.9035, 0.4547, 0.3603, 0.1408, 0.4830, 0.8065, 0.3582, + 0.5455, 0.5723, 0.4523, 0.5057, 0.6324, 0.9028, 0.0577, + 0.4956, 0.6657, 0.9848, 0.6391, 0.6395, 0.4769, 0.0056, + 0.1044, 0.2976, 0.6221, 0.9725, 0.0205, 0.3420, 0.9773, + 0.7515, 0.8006, 0.2336, 0.4394, 0.9756, 0.3057, 0.6186, + 0.7004, 0.6806, 0.0982, 0.4285, 0.8466, 0.0638, 0.7804, + 0.2686, 0.1423, 0.1658, 0.0440, 0.8558, 0.2543, 0.9629, + 0.0443, 0.2812, 0.7112, 0.2679, 0.0325, 0.1914, 0.4697, + 0.5212, 0.5036, 0.8278, 0.2212, 0.7637, 0.6411, 0.8324, + 0.1085, 0.7903, 0.3041, 0.7685, 0.7610, 0.1247, 0.0153, + 0.0898, 0.0347, 0.7987, 0.9119, 0.5255, 0.4670, 0.2413, + 0.8008, 0.0124, 0.0719, 0.3268, 0.5991, 0.1237, 0.5734, + 0.5860, 0.8964, 0.6851, 0.7980, 0.9940, 0.8001, 0.3866, + 0.4921, 0.3129, 0.6180, 0.1949, 0.6009, 0.4868, 0.5484, + 0.3765, 0.9472, 0.7499, 0.8386, 0.8281, 0.0445, 0.8521, + 0.1827, 0.1424, 0.9964, 0.2930, 0.6405, 0.8880, 0.8455, + 0.8730, 0.0256, 0.3303, 0.4997, 0.7068, 0.9986, 0.4515, + 0.8492, 0.9495, 0.7252, 0.1393, 0.3125, 0.0955, 0.0411, + 0.0855, 0.4594, 0.0571, 0.4760, 0.9753, 0.9680, 0.6765, + 0.6632, 0.7732, 0.2205, 0.7219, 0.1231, 0.5507, 0.9386, + 0.9853, 0.1484, 0.5471, 0.2415, 0.3412, 0.3970, 0.9721, + 0.4075, 0.7397, 0.6041, 0.4919, 0.1150, 0.1028, 0.3707, + 0.5907, 0.4305, 0.9162, 0.9956, 0.3282, 0.6112, 0.6540, + 0.0961, 0.8665, 0.2552, 0.6175, 0.4850, 0.4310, 0.1165, + 0.3274, 0.7923, 0.1515, 0.5293, 0.8418, 0.1450, 0.8268, + 0.9665, 0.7626, 0.7605, 0.9986, 0.9489, 0.8011, 0.9290, + 0.5451, 0.8590, 0.5389, 0.0080, 0.8363, 0.8570, 0.5734, + 0.7613, 0.9018, 0.0697, 0.9293, 0.2543, 0.2531, 0.2854, + 0.3722, 0.6889, 0.4487, 0.3475, 0.2897]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.8985, 0.0813, 0.9894, ..., 0.1805, 0.5543, 0.3501]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.3298921585083 seconds + +[40.32, 39.94, 39.79, 39.67, 40.95, 39.71, 39.34, 39.43, 39.56, 39.61] +[95.58] +12.982455968856812 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 505155, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3298921585083, 'TIME_S_1KI': 0.020448955584935914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.863141503334, 'W': 95.58} +[40.32, 39.94, 39.79, 39.67, 40.95, 39.71, 39.34, 39.43, 39.56, 39.61, 39.84, 39.42, 39.39, 39.45, 39.34, 39.21, 39.4, 39.18, 39.13, 39.91] +712.75 +35.6375 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 505155, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3298921585083, 'TIME_S_1KI': 0.020448955584935914, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1240.863141503334, 'W': 95.58, 'J_1KI': 2.4564007908529737, 'W_1KI': 0.1892092526056359, 'W_D': 59.942499999999995, 'J_D': 778.2008669131993, 'W_D_1KI': 0.11866159891518444, 'J_D_1KI': 0.0002349013647596964} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..1cf76cc --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Epyc 7313P", "CORES": 16, "ITERATIONS": 461197, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.877047538757324, "TIME_S_1KI": 0.023584384848030937, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1221.180625166893, "W": 95.74, "J_1KI": 2.6478503224585004, "W_1KI": 0.20759024885244265, "W_D": 60.15774999999999, "J_D": 767.3227360939383, "W_D_1KI": 0.13043829426470682, "J_D_1KI": 0.0002828255480081328} diff --git a/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..09b3a13 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/epyc_7313p_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.0320582389831543} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 1249, 1249, 1250]), + col_indices=tensor([2956, 558, 3504, ..., 1528, 4784, 1878]), + values=tensor([0.5224, 0.1438, 0.5941, ..., 0.0368, 0.6760, 0.3012]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.8976, 0.5094, 0.6995, ..., 0.0327, 0.1649, 0.7937]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.0320582389831543 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '32752', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.745659589767456} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([1743, 4461, 346, ..., 1137, 3893, 4349]), + values=tensor([0.7861, 0.9854, 0.5411, ..., 0.5282, 0.2898, 0.9587]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.5708, 0.3567, 0.0850, ..., 0.6472, 0.1624, 0.7150]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.745659589767456 seconds + +['apptainer', 'run', 'pytorch-epyc_7313p.sif', 'python3', 'spmv.py', 'synthetic', 'csr', '461197', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.877047538757324} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4244, 4483, 4692, ..., 3607, 4429, 290]), + values=tensor([0.6080, 0.0136, 0.3918, ..., 0.5066, 0.3391, 0.6977]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.1101, 0.0872, 0.5048, ..., 0.5059, 0.1642, 0.4124]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.877047538757324 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1250, 1250, 1250]), + col_indices=tensor([4244, 4483, 4692, ..., 3607, 4429, 290]), + values=tensor([0.6080, 0.0136, 0.3918, ..., 0.5066, 0.3391, 0.6977]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.1101, 0.0872, 0.5048, ..., 0.5059, 0.1642, 0.4124]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.877047538757324 seconds + +[42.19, 40.35, 39.27, 40.36, 39.17, 39.06, 39.17, 39.21, 39.04, 39.45] +[95.74] +12.755176782608032 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 461197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.877047538757324, 'TIME_S_1KI': 0.023584384848030937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.180625166893, 'W': 95.74} +[42.19, 40.35, 39.27, 40.36, 39.17, 39.06, 39.17, 39.21, 39.04, 39.45, 39.84, 39.7, 39.64, 39.58, 39.75, 39.24, 39.17, 39.36, 39.12, 39.43] +711.645 +35.58225 +{'CPU': 'Epyc 7313P', 'CORES': 16, 'ITERATIONS': 461197, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.877047538757324, 'TIME_S_1KI': 0.023584384848030937, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.180625166893, 'W': 95.74, 'J_1KI': 2.6478503224585004, 'W_1KI': 0.20759024885244265, 'W_D': 60.15774999999999, 'J_D': 767.3227360939383, 'W_D_1KI': 0.13043829426470682, 'J_D_1KI': 0.0002828255480081328} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json new file mode 100644 index 0000000..b0c2a19 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 33525, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643972158432007, "TIME_S_1KI": 0.3174935766870099, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1221.4049115991593, "W": 88.39, "J_1KI": 36.43265955553048, "W_1KI": 2.6365398956002983, "W_D": 72.1155, "J_D": 996.5179986698627, "W_D_1KI": 2.151096196868009, "J_D_1KI": 0.06416394323245365} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output new file mode 100644 index 0000000..dc5d053 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.04636812210083008} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 13, ..., 999983, + 999994, 1000000]), + col_indices=tensor([ 8176, 34026, 54478, ..., 84998, 92494, 98961]), + values=tensor([0.4351, 0.9999, 0.3437, ..., 0.3684, 0.7357, 0.5729]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8699, 0.2767, 0.3378, ..., 0.5349, 0.7243, 0.7857]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 0.04636812210083008 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '22644', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 7.091935634613037} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 23, ..., 999978, + 999991, 1000000]), + col_indices=tensor([26374, 42582, 44652, ..., 65952, 74293, 78884]), + values=tensor([0.4256, 0.1611, 0.6127, ..., 0.5242, 0.3400, 0.0348]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.0369, 0.6183, 0.8933, ..., 0.8293, 0.4628, 0.9829]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 7.091935634613037 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33525', '-ss', '100000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.643972158432007} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 17, ..., 999978, + 999994, 1000000]), + col_indices=tensor([ 9594, 11946, 25379, ..., 52892, 57506, 73818]), + values=tensor([0.4978, 0.8076, 0.6002, ..., 0.2925, 0.4675, 0.5122]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8765, 0.8954, 0.8874, ..., 0.8137, 0.3245, 0.8007]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.643972158432007 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 17, ..., 999978, + 999994, 1000000]), + col_indices=tensor([ 9594, 11946, 25379, ..., 52892, 57506, 73818]), + values=tensor([0.4978, 0.8076, 0.6002, ..., 0.2925, 0.4675, 0.5122]), + size=(100000, 100000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8765, 0.8954, 0.8874, ..., 0.8137, 0.3245, 0.8007]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 1000000 +Density: 0.0001 +Time: 10.643972158432007 seconds + +[18.37, 17.76, 17.76, 17.79, 18.13, 17.85, 18.03, 17.72, 18.33, 18.48] +[88.39] +13.818360805511475 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643972158432007, 'TIME_S_1KI': 0.3174935766870099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.4049115991593, 'W': 88.39} +[18.37, 17.76, 17.76, 17.79, 18.13, 17.85, 18.03, 17.72, 18.33, 18.48, 18.41, 18.08, 18.64, 18.55, 18.21, 18.02, 18.07, 18.01, 17.96, 17.9] +325.49 +16.2745 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 33525, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.643972158432007, 'TIME_S_1KI': 0.3174935766870099, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1221.4049115991593, 'W': 88.39, 'J_1KI': 36.43265955553048, 'W_1KI': 2.6365398956002983, 'W_D': 72.1155, 'J_D': 996.5179986698627, 'W_D_1KI': 2.151096196868009, 'J_D_1KI': 0.06416394323245365} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json new file mode 100644 index 0000000..7fe967e --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2660, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.18576693534851, "TIME_S_1KI": 3.829235689980643, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1328.5220910072328, "W": 81.2, "J_1KI": 499.44439511550104, "W_1KI": 30.526315789473685, "W_D": 64.453, "J_D": 1054.5225902917387, "W_D_1KI": 24.23045112781955, "J_D_1KI": 9.10919215331562} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output new file mode 100644 index 0000000..8cde86c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_0.001.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.39462947845458984} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 104, 208, ..., 9999814, + 9999906, 10000000]), + col_indices=tensor([ 1924, 2222, 4663, ..., 98435, 98556, 99127]), + values=tensor([0.9193, 0.2961, 0.8826, ..., 0.2999, 0.4100, 0.0457]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8186, 0.3714, 0.9798, ..., 0.9009, 0.9275, 0.2252]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 0.39462947845458984 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2660', '-ss', '100000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.18576693534851} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 111, 209, ..., 9999785, + 9999908, 10000000]), + col_indices=tensor([ 4849, 5332, 5597, ..., 99100, 99293, 99777]), + values=tensor([0.3984, 0.3126, 0.3684, ..., 0.2469, 0.5703, 0.8605]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6012, 0.7247, 0.1820, ..., 0.8515, 0.6518, 0.6577]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.18576693534851 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 111, 209, ..., 9999785, + 9999908, 10000000]), + col_indices=tensor([ 4849, 5332, 5597, ..., 99100, 99293, 99777]), + values=tensor([0.3984, 0.3126, 0.3684, ..., 0.2469, 0.5703, 0.8605]), + size=(100000, 100000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.6012, 0.7247, 0.1820, ..., 0.8515, 0.6518, 0.6577]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 10000000 +Density: 0.001 +Time: 10.18576693534851 seconds + +[18.33, 18.01, 18.45, 18.12, 17.88, 17.91, 18.33, 22.42, 18.21, 17.9] +[81.2] +16.361109495162964 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.18576693534851, 'TIME_S_1KI': 3.829235689980643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1328.5220910072328, 'W': 81.2} +[18.33, 18.01, 18.45, 18.12, 17.88, 17.91, 18.33, 22.42, 18.21, 17.9, 22.63, 20.65, 18.09, 18.03, 18.02, 18.26, 18.0, 18.17, 17.93, 18.06] +334.94000000000005 +16.747000000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2660, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.18576693534851, 'TIME_S_1KI': 3.829235689980643, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1328.5220910072328, 'W': 81.2, 'J_1KI': 499.44439511550104, 'W_1KI': 30.526315789473685, 'W_D': 64.453, 'J_D': 1054.5225902917387, 'W_D_1KI': 24.23045112781955, 'J_D_1KI': 9.10919215331562} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json new file mode 100644 index 0000000..fdd737d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 64522, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3600492477417, "TIME_S_1KI": 0.1605661518201807, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1143.5515343093873, "W": 83.36000000000001, "J_1KI": 17.723435949124134, "W_1KI": 1.291962431418741, "W_D": 66.67275000000001, "J_D": 914.6320244616867, "W_D_1KI": 1.0333335916431607, "J_D_1KI": 0.01601521328605996} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output new file mode 100644 index 0000000..8218685 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_1e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.03250718116760254} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 99998, 99998, + 100000]), + col_indices=tensor([12882, 21465, 63858, ..., 96153, 4715, 69382]), + values=tensor([0.1495, 0.9028, 0.7353, ..., 0.9651, 0.0553, 0.0388]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6296, 0.2324, 0.7696, ..., 0.0819, 0.5051, 0.6795]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 0.03250718116760254 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '32300', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 5.256327390670776} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 3, ..., 99998, 99999, + 100000]), + col_indices=tensor([24100, 22524, 41698, ..., 71518, 54296, 46275]), + values=tensor([0.6729, 0.0195, 0.1396, ..., 0.6516, 0.4177, 0.7883]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.6330, 0.0132, 0.4522, ..., 0.1249, 0.8426, 0.7168]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 5.256327390670776 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '64522', '-ss', '100000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.3600492477417} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99999, + 100000]), + col_indices=tensor([63372, 90175, 43637, ..., 48404, 84175, 41742]), + values=tensor([0.0143, 0.7083, 0.4138, ..., 0.7171, 0.1589, 0.0907]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2672, 0.1313, 0.6231, ..., 0.4829, 0.5251, 0.7815]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.3600492477417 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 99996, 99999, + 100000]), + col_indices=tensor([63372, 90175, 43637, ..., 48404, 84175, 41742]), + values=tensor([0.0143, 0.7083, 0.4138, ..., 0.7171, 0.1589, 0.0907]), + size=(100000, 100000), nnz=100000, layout=torch.sparse_csr) +tensor([0.2672, 0.1313, 0.6231, ..., 0.4829, 0.5251, 0.7815]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 100000 +Density: 1e-05 +Time: 10.3600492477417 seconds + +[18.53, 17.8, 18.06, 18.37, 18.25, 17.96, 18.14, 21.36, 18.43, 17.88] +[83.36] +13.718228578567505 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64522, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3600492477417, 'TIME_S_1KI': 0.1605661518201807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1143.5515343093873, 'W': 83.36000000000001} +[18.53, 17.8, 18.06, 18.37, 18.25, 17.96, 18.14, 21.36, 18.43, 17.88, 18.55, 17.91, 18.58, 19.12, 20.8, 18.14, 18.33, 17.85, 18.25, 17.83] +333.745 +16.68725 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 64522, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.3600492477417, 'TIME_S_1KI': 0.1605661518201807, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1143.5515343093873, 'W': 83.36000000000001, 'J_1KI': 17.723435949124134, 'W_1KI': 1.291962431418741, 'W_D': 66.67275000000001, 'J_D': 914.6320244616867, 'W_D_1KI': 1.0333335916431607, 'J_D_1KI': 0.01601521328605996} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json new file mode 100644 index 0000000..07c28cc --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46682, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.460205316543579, "TIME_S_1KI": 0.22407363258951157, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1228.6304648017883, "W": 87.08, "J_1KI": 26.319147954281917, "W_1KI": 1.865387087099953, "W_D": 70.695, "J_D": 997.4509727740286, "W_D_1KI": 1.51439527012553, "J_D_1KI": 0.03244066814030097} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output new file mode 100644 index 0000000..0398e28 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_100000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.03882646560668945} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 12, ..., 499993, 499995, + 500000]), + col_indices=tensor([20130, 29829, 49027, ..., 32515, 51857, 99803]), + values=tensor([0.4194, 0.2208, 0.8236, ..., 0.3620, 0.7637, 0.5129]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.7254, 0.4636, 0.7256, ..., 0.4819, 0.1264, 0.5273]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 0.03882646560668945 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27043', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 6.0826029777526855} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 14, ..., 499987, 499992, + 500000]), + col_indices=tensor([19007, 19428, 24486, ..., 87536, 92504, 96559]), + values=tensor([0.8398, 0.0370, 0.5128, ..., 0.3625, 0.4907, 0.6853]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6134, 0.6519, 0.5356, ..., 0.0589, 0.6530, 0.3358]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 6.0826029777526855 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46682', '-ss', '100000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [100000, 100000], "MATRIX_ROWS": 100000, "MATRIX_SIZE": 10000000000, "MATRIX_NNZ": 500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.460205316543579} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 19, ..., 499992, 499995, + 500000]), + col_indices=tensor([ 9218, 31473, 33160, ..., 57052, 72094, 94375]), + values=tensor([0.1819, 0.5310, 0.0116, ..., 0.3541, 0.3048, 0.3110]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6007, 0.7506, 0.6846, ..., 0.1657, 0.4869, 0.7821]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.460205316543579 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 19, ..., 499992, 499995, + 500000]), + col_indices=tensor([ 9218, 31473, 33160, ..., 57052, 72094, 94375]), + values=tensor([0.1819, 0.5310, 0.0116, ..., 0.3541, 0.3048, 0.3110]), + size=(100000, 100000), nnz=500000, layout=torch.sparse_csr) +tensor([0.6007, 0.7506, 0.6846, ..., 0.1657, 0.4869, 0.7821]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([100000, 100000]) +Rows: 100000 +Size: 10000000000 +NNZ: 500000 +Density: 5e-05 +Time: 10.460205316543579 seconds + +[18.32, 17.95, 18.46, 18.2, 18.15, 17.94, 18.28, 18.06, 18.31, 18.04] +[87.08] +14.109215259552002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.460205316543579, 'TIME_S_1KI': 0.22407363258951157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1228.6304648017883, 'W': 87.08} +[18.32, 17.95, 18.46, 18.2, 18.15, 17.94, 18.28, 18.06, 18.31, 18.04, 18.26, 18.28, 17.89, 18.48, 18.48, 18.03, 17.99, 18.35, 18.44, 18.2] +327.69999999999993 +16.384999999999998 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46682, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [100000, 100000], 'MATRIX_ROWS': 100000, 'MATRIX_SIZE': 10000000000, 'MATRIX_NNZ': 500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.460205316543579, 'TIME_S_1KI': 0.22407363258951157, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1228.6304648017883, 'W': 87.08, 'J_1KI': 26.319147954281917, 'W_1KI': 1.865387087099953, 'W_D': 70.695, 'J_D': 997.4509727740286, 'W_D_1KI': 1.51439527012553, 'J_D_1KI': 0.03244066814030097} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json new file mode 100644 index 0000000..1b64231 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 253108, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.677898168563843, "TIME_S_1KI": 0.0421871223689644, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1070.6826658773423, "W": 74.61, "J_1KI": 4.230141543836395, "W_1KI": 0.2947753528138186, "W_D": 58.27775, "J_D": 836.3084939194918, "W_D_1KI": 0.23024855002607583, "J_D_1KI": 0.0009096849962311576} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output new file mode 100644 index 0000000..1ec4855 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.02065730094909668} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3, 4, ..., 9998, 9998, 10000]), + col_indices=tensor([6728, 7614, 8179, ..., 1004, 2058, 8025]), + values=tensor([0.0279, 0.0803, 0.4096, ..., 0.0871, 0.5549, 0.2943]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.5482, 0.4991, 0.5547, ..., 0.6547, 0.2547, 0.5094]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 0.02065730094909668 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50829', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 2.10860276222229} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 10000, 10000, 10000]), + col_indices=tensor([5153, 2587, 4463, ..., 5061, 9520, 1424]), + values=tensor([0.2204, 0.6183, 0.5613, ..., 0.3086, 0.3306, 0.5938]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.9842, 0.7142, 0.2121, ..., 0.3434, 0.1561, 0.6145]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 2.10860276222229 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '253108', '-ss', '10000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.677898168563843} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 9999, 10000]), + col_indices=tensor([7291, 527, 4481, ..., 6785, 7922, 1484]), + values=tensor([0.3434, 0.3822, 0.2401, ..., 0.8298, 0.4309, 0.4668]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.2882, 0.0253, 0.9805, ..., 0.1323, 0.6315, 0.0794]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.677898168563843 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 4, ..., 9999, 9999, 10000]), + col_indices=tensor([7291, 527, 4481, ..., 6785, 7922, 1484]), + values=tensor([0.3434, 0.3822, 0.2401, ..., 0.8298, 0.4309, 0.4668]), + size=(10000, 10000), nnz=10000, layout=torch.sparse_csr) +tensor([0.2882, 0.0253, 0.9805, ..., 0.1323, 0.6315, 0.0794]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000 +Density: 0.0001 +Time: 10.677898168563843 seconds + +[18.38, 17.89, 18.34, 17.77, 18.06, 17.77, 18.13, 17.86, 17.98, 18.23] +[74.61] +14.350390911102295 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.677898168563843, 'TIME_S_1KI': 0.0421871223689644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.6826658773423, 'W': 74.61} +[18.38, 17.89, 18.34, 17.77, 18.06, 17.77, 18.13, 17.86, 17.98, 18.23, 18.53, 18.0, 18.04, 18.11, 18.16, 18.08, 18.2, 18.27, 19.29, 18.25] +326.645 +16.33225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 253108, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.677898168563843, 'TIME_S_1KI': 0.0421871223689644, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1070.6826658773423, 'W': 74.61, 'J_1KI': 4.230141543836395, 'W_1KI': 0.2947753528138186, 'W_D': 58.27775, 'J_D': 836.3084939194918, 'W_D_1KI': 0.23024855002607583, 'J_D_1KI': 0.0009096849962311576} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json new file mode 100644 index 0000000..090450b --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 194593, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.473386764526367, "TIME_S_1KI": 0.053822011914747024, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1083.1964892935753, "W": 79.61, "J_1KI": 5.566472017459905, "W_1KI": 0.40911029687604383, "W_D": 62.8515, "J_D": 855.175532556653, "W_D_1KI": 0.3229895217196919, "J_D_1KI": 0.001659820865702733} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output new file mode 100644 index 0000000..8858416 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.020806312561035156} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 7, 14, ..., 99985, 99993, + 100000]), + col_indices=tensor([5438, 7119, 8479, ..., 6797, 6979, 8109]), + values=tensor([0.2056, 0.5255, 0.6332, ..., 0.1682, 0.9365, 0.4633]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.0316, 0.8438, 0.1562, ..., 0.6730, 0.8332, 0.9126]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 0.020806312561035156 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '50465', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.723015785217285} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 18, 34, ..., 99978, 99989, + 100000]), + col_indices=tensor([ 818, 1321, 1616, ..., 5603, 7366, 9704]), + values=tensor([0.8713, 0.7316, 0.2331, ..., 0.6687, 0.3725, 0.7818]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.5742, 0.6233, 0.0987, ..., 0.1452, 0.2067, 0.1195]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 2.723015785217285 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '194593', '-ss', '10000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 100000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.473386764526367} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 16, ..., 99982, 99989, + 100000]), + col_indices=tensor([1034, 3380, 4243, ..., 7428, 9116, 9600]), + values=tensor([0.4227, 0.1092, 0.7794, ..., 0.2113, 0.3090, 0.9237]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7442, 0.3504, 0.6358, ..., 0.6138, 0.7536, 0.9226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.473386764526367 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 11, 16, ..., 99982, 99989, + 100000]), + col_indices=tensor([1034, 3380, 4243, ..., 7428, 9116, 9600]), + values=tensor([0.4227, 0.1092, 0.7794, ..., 0.2113, 0.3090, 0.9237]), + size=(10000, 10000), nnz=100000, layout=torch.sparse_csr) +tensor([0.7442, 0.3504, 0.6358, ..., 0.6138, 0.7536, 0.9226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 100000 +Density: 0.001 +Time: 10.473386764526367 seconds + +[19.08, 18.28, 17.95, 17.9, 19.17, 18.22, 19.03, 18.14, 18.71, 20.45] +[79.61] +13.606286764144897 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 194593, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.473386764526367, 'TIME_S_1KI': 0.053822011914747024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1964892935753, 'W': 79.61} +[19.08, 18.28, 17.95, 17.9, 19.17, 18.22, 19.03, 18.14, 18.71, 20.45, 18.56, 18.36, 18.2, 17.96, 18.09, 18.17, 22.32, 18.37, 18.26, 17.99] +335.17 +16.7585 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 194593, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 100000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.473386764526367, 'TIME_S_1KI': 0.053822011914747024, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1964892935753, 'W': 79.61, 'J_1KI': 5.566472017459905, 'W_1KI': 0.40911029687604383, 'W_D': 62.8515, 'J_D': 855.175532556653, 'W_D_1KI': 0.3229895217196919, 'J_D_1KI': 0.001659820865702733} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json new file mode 100644 index 0000000..8c6811d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 57740, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.343472957611084, "TIME_S_1KI": 0.17913877654331634, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1219.6624715328217, "W": 87.4, "J_1KI": 21.123354200429883, "W_1KI": 1.5136820228611017, "W_D": 71.01700000000001, "J_D": 991.0385553872587, "W_D_1KI": 1.2299445791479044, "J_D_1KI": 0.021301430189606934} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output new file mode 100644 index 0000000..ff97c3a --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.03377032279968262} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 110, 219, ..., 999820, + 999911, 1000000]), + col_indices=tensor([ 0, 38, 265, ..., 9703, 9904, 9960]), + values=tensor([0.3683, 0.9828, 0.4174, ..., 0.4331, 0.2376, 0.7467]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8519, 0.1445, 0.2230, ..., 0.9278, 0.7204, 0.3614]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 0.03377032279968262 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '31092', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 5.654046535491943} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 102, 199, ..., 999795, + 999900, 1000000]), + col_indices=tensor([ 113, 165, 189, ..., 9912, 9940, 9996]), + values=tensor([0.2048, 0.9236, 0.8269, ..., 0.2195, 0.4387, 0.6731]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.8364, 0.2950, 0.2365, ..., 0.2102, 0.7661, 0.3156]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 5.654046535491943 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '57740', '-ss', '10000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.343472957611084} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 106, 216, ..., 999804, + 999899, 1000000]), + col_indices=tensor([ 61, 88, 117, ..., 9666, 9676, 9799]), + values=tensor([0.7050, 0.8533, 0.9508, ..., 0.3667, 0.6991, 0.8071]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9939, 0.1893, 0.9694, ..., 0.0779, 0.3428, 0.9229]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.343472957611084 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 106, 216, ..., 999804, + 999899, 1000000]), + col_indices=tensor([ 61, 88, 117, ..., 9666, 9676, 9799]), + values=tensor([0.7050, 0.8533, 0.9508, ..., 0.3667, 0.6991, 0.8071]), + size=(10000, 10000), nnz=1000000, layout=torch.sparse_csr) +tensor([0.9939, 0.1893, 0.9694, ..., 0.0779, 0.3428, 0.9229]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000000 +Density: 0.01 +Time: 10.343472957611084 seconds + +[18.73, 18.07, 18.16, 18.31, 18.98, 17.81, 18.18, 18.17, 18.06, 18.83] +[87.4] +13.95494818687439 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.343472957611084, 'TIME_S_1KI': 0.17913877654331634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1219.6624715328217, 'W': 87.4} +[18.73, 18.07, 18.16, 18.31, 18.98, 17.81, 18.18, 18.17, 18.06, 18.83, 18.31, 18.07, 18.19, 17.94, 18.17, 18.13, 18.04, 18.09, 18.22, 18.27] +327.65999999999997 +16.383 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 57740, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.343472957611084, 'TIME_S_1KI': 0.17913877654331634, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1219.6624715328217, 'W': 87.4, 'J_1KI': 21.123354200429883, 'W_1KI': 1.5136820228611017, 'W_D': 71.01700000000001, 'J_D': 991.0385553872587, 'W_D_1KI': 1.2299445791479044, 'J_D_1KI': 0.021301430189606934} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json new file mode 100644 index 0000000..9882866 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8902, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.860300064086914, "TIME_S_1KI": 1.2199842803961933, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1340.0520877552033, "W": 84.52, "J_1KI": 150.53382248429602, "W_1KI": 9.49449561896203, "W_D": 68.1745, "J_D": 1080.896605024457, "W_D_1KI": 7.658335205571781, "J_D_1KI": 0.8602937773053} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output new file mode 100644 index 0000000..266a604 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.13720321655273438} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 497, 987, ..., 4998984, + 4999490, 5000000]), + col_indices=tensor([ 31, 32, 48, ..., 9945, 9978, 9990]), + values=tensor([0.2379, 0.1839, 0.8156, ..., 0.3545, 0.1897, 0.0490]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7954, 0.2457, 0.2337, ..., 0.1008, 0.2602, 0.7172]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 0.13720321655273438 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7652', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 9.025035858154297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 488, 990, ..., 4999027, + 4999510, 5000000]), + col_indices=tensor([ 13, 24, 30, ..., 9939, 9983, 9997]), + values=tensor([0.8521, 0.2131, 0.0790, ..., 0.0763, 0.7991, 0.5452]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7324, 0.5581, 0.6877, ..., 0.3893, 0.7172, 0.4223]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 9.025035858154297 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8902', '-ss', '10000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.860300064086914} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 512, 999, ..., 4999039, + 4999553, 5000000]), + col_indices=tensor([ 40, 52, 78, ..., 9943, 9982, 9986]), + values=tensor([0.6271, 0.3251, 0.6536, ..., 0.8006, 0.2414, 0.7322]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7511, 0.5744, 0.2092, ..., 0.0810, 0.1870, 0.5605]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.860300064086914 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 512, 999, ..., 4999039, + 4999553, 5000000]), + col_indices=tensor([ 40, 52, 78, ..., 9943, 9982, 9986]), + values=tensor([0.6271, 0.3251, 0.6536, ..., 0.8006, 0.2414, 0.7322]), + size=(10000, 10000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7511, 0.5744, 0.2092, ..., 0.0810, 0.1870, 0.5605]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000000 +Density: 0.05 +Time: 10.860300064086914 seconds + +[18.66, 18.42, 18.15, 17.87, 18.05, 18.01, 18.31, 17.87, 17.97, 18.0] +[84.52] +15.854851961135864 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8902, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.860300064086914, 'TIME_S_1KI': 1.2199842803961933, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.0520877552033, 'W': 84.52} +[18.66, 18.42, 18.15, 17.87, 18.05, 18.01, 18.31, 17.87, 17.97, 18.0, 18.28, 17.95, 18.41, 17.97, 19.13, 18.12, 18.3, 17.86, 18.09, 17.92] +326.91 +16.3455 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8902, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.860300064086914, 'TIME_S_1KI': 1.2199842803961933, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1340.0520877552033, 'W': 84.52, 'J_1KI': 150.53382248429602, 'W_1KI': 9.49449561896203, 'W_D': 68.1745, 'J_D': 1080.896605024457, 'W_D_1KI': 7.658335205571781, 'J_D_1KI': 0.8602937773053} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json new file mode 100644 index 0000000..4142b7d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2952, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.514222383499146, "TIME_S_1KI": 3.5617284496948325, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1409.0968302583694, "W": 80.82, "J_1KI": 477.3363246132688, "W_1KI": 27.378048780487802, "W_D": 64.16999999999999, "J_D": 1118.8040534234044, "W_D_1KI": 21.737804878048774, "J_D_1KI": 7.363755039989422} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output new file mode 100644 index 0000000..f33aa65 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.3909003734588623} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 946, 1929, ..., 9998013, + 9999030, 10000000]), + col_indices=tensor([ 1, 29, 66, ..., 9951, 9961, 9963]), + values=tensor([0.1920, 0.8019, 0.0618, ..., 0.8349, 0.9652, 0.3956]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.5191, 0.4835, 0.4753, ..., 0.1633, 0.4541, 0.9422]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 0.3909003734588623 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2686', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 9.552263259887695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1009, 2066, ..., 9998034, + 9999022, 10000000]), + col_indices=tensor([ 1, 6, 11, ..., 9982, 9996, 9999]), + values=tensor([0.5825, 0.5025, 0.2695, ..., 0.8369, 0.3596, 0.5616]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.8229, 0.3714, 0.9138, ..., 0.5827, 0.9903, 0.1706]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 9.552263259887695 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2952', '-ss', '10000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.514222383499146} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1012, 2027, ..., 9998023, + 9999029, 10000000]), + col_indices=tensor([ 20, 21, 65, ..., 9939, 9966, 9982]), + values=tensor([0.6149, 0.2165, 0.5741, ..., 0.9222, 0.5603, 0.9724]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1720, 0.3458, 0.5186, ..., 0.7493, 0.6588, 0.1643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.514222383499146 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1012, 2027, ..., 9998023, + 9999029, 10000000]), + col_indices=tensor([ 20, 21, 65, ..., 9939, 9966, 9982]), + values=tensor([0.6149, 0.2165, 0.5741, ..., 0.9222, 0.5603, 0.9724]), + size=(10000, 10000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.1720, 0.3458, 0.5186, ..., 0.7493, 0.6588, 0.1643]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 10000000 +Density: 0.1 +Time: 10.514222383499146 seconds + +[18.22, 18.02, 18.22, 18.83, 18.28, 18.32, 18.36, 18.07, 18.03, 17.97] +[80.82] +17.435001611709595 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2952, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.514222383499146, 'TIME_S_1KI': 3.5617284496948325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.0968302583694, 'W': 80.82} +[18.22, 18.02, 18.22, 18.83, 18.28, 18.32, 18.36, 18.07, 18.03, 17.97, 19.01, 19.01, 17.78, 18.5, 20.32, 20.23, 18.29, 18.21, 17.96, 17.94] +333.0 +16.65 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2952, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.514222383499146, 'TIME_S_1KI': 3.5617284496948325, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1409.0968302583694, 'W': 80.82, 'J_1KI': 477.3363246132688, 'W_1KI': 27.378048780487802, 'W_D': 64.16999999999999, 'J_D': 1118.8040534234044, 'W_D_1KI': 21.737804878048774, 'J_D_1KI': 7.363755039989422} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json new file mode 100644 index 0000000..047807c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1497, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375812530517578, "TIME_S_1KI": 6.931070494667721, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2016.6524360942842, "W": 63.56, "J_1KI": 1347.1292158278452, "W_1KI": 42.458249832999336, "W_D": 46.965500000000006, "J_D": 1490.1367210098506, "W_D_1KI": 31.373079492317977, "J_D_1KI": 20.95730093007213} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output new file mode 100644 index 0000000..e1c6993 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.2.output @@ -0,0 +1,65 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.7011280059814453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1997, 4011, ..., 19995978, + 19998029, 20000000]), + col_indices=tensor([ 0, 6, 10, ..., 9977, 9981, 9997]), + values=tensor([0.1409, 0.7742, 0.3684, ..., 0.6455, 0.2528, 0.1779]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.4126, 0.0545, 0.9867, ..., 0.1672, 0.9147, 0.2153]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 0.7011280059814453 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1497', '-ss', '10000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 20000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.375812530517578} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2004, 3927, ..., 19995948, + 19997998, 20000000]), + col_indices=tensor([ 0, 7, 8, ..., 9981, 9983, 9993]), + values=tensor([0.6769, 0.3987, 0.0257, ..., 0.1977, 0.2040, 0.8027]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.3084, 0.6863, 0.1716, ..., 0.2861, 0.5214, 0.4353]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.375812530517578 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2004, 3927, ..., 19995948, + 19997998, 20000000]), + col_indices=tensor([ 0, 7, 8, ..., 9981, 9983, 9993]), + values=tensor([0.6769, 0.3987, 0.0257, ..., 0.1977, 0.2040, 0.8027]), + size=(10000, 10000), nnz=20000000, layout=torch.sparse_csr) +tensor([0.3084, 0.6863, 0.1716, ..., 0.2861, 0.5214, 0.4353]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 20000000 +Density: 0.2 +Time: 10.375812530517578 seconds + +[18.42, 17.88, 18.24, 17.8, 17.91, 17.84, 18.25, 18.2, 17.93, 17.84] +[63.56] +31.728326559066772 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375812530517578, 'TIME_S_1KI': 6.931070494667721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2016.6524360942842, 'W': 63.56} +[18.42, 17.88, 18.24, 17.8, 17.91, 17.84, 18.25, 18.2, 17.93, 17.84, 18.76, 17.93, 18.32, 21.74, 19.38, 18.29, 18.56, 18.06, 18.67, 18.76] +331.89 +16.5945 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1497, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 20000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.375812530517578, 'TIME_S_1KI': 6.931070494667721, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2016.6524360942842, 'W': 63.56, 'J_1KI': 1347.1292158278452, 'W_1KI': 42.458249832999336, 'W_D': 46.965500000000006, 'J_D': 1490.1367210098506, 'W_D_1KI': 31.373079492317977, 'J_D_1KI': 20.95730093007213} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json new file mode 100644 index 0000000..b205393 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 949, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.941918134689331, "TIME_S_1KI": 11.529945347407093, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 3564.162018098831, "W": 52.14, "J_1KI": 3755.7028641715815, "W_1KI": 54.94204425711275, "W_D": 35.83725, "J_D": 2449.7461696032283, "W_D_1KI": 37.763171759747095, "J_D_1KI": 39.79259405663551} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output new file mode 100644 index 0000000..d986f3d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_0.3.output @@ -0,0 +1,105 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 1.9010562896728516} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2954, 5989, ..., 29993992, + 29996937, 30000000]), + col_indices=tensor([ 2, 3, 4, ..., 9991, 9993, 9999]), + values=tensor([0.8641, 0.5764, 0.3491, ..., 0.5822, 0.6256, 0.4859]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.5324, 0.9227, 0.4901, ..., 0.4747, 0.1770, 0.2536]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 1.9010562896728516 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '552', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 6.5163209438323975} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3039, 6048, ..., 29994088, + 29997031, 30000000]), + col_indices=tensor([ 5, 17, 19, ..., 9983, 9988, 9994]), + values=tensor([0.5897, 0.1355, 0.4586, ..., 0.2138, 0.9666, 0.8141]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.3973, 0.5491, 0.6398, ..., 0.0595, 0.1069, 0.4910]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 6.5163209438323975 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '889', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.831693172454834} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3022, 6006, ..., 29994154, + 29996992, 30000000]), + col_indices=tensor([ 2, 5, 11, ..., 9993, 9994, 9997]), + values=tensor([0.4549, 0.7646, 0.3501, ..., 0.7301, 0.5346, 0.2783]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.0238, 0.3406, 0.5500, ..., 0.0227, 0.0108, 0.7785]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 9.831693172454834 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '949', '-ss', '10000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 30000000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.941918134689331} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3007, 5955, ..., 29993893, + 29996918, 30000000]), + col_indices=tensor([ 0, 1, 2, ..., 9996, 9997, 9998]), + values=tensor([0.2348, 0.5341, 0.6896, ..., 0.7208, 0.8300, 0.7790]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7258, 0.9160, 0.6029, ..., 0.0530, 0.7513, 0.2296]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.941918134689331 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 3007, 5955, ..., 29993893, + 29996918, 30000000]), + col_indices=tensor([ 0, 1, 2, ..., 9996, 9997, 9998]), + values=tensor([0.2348, 0.5341, 0.6896, ..., 0.7208, 0.8300, 0.7790]), + size=(10000, 10000), nnz=30000000, layout=torch.sparse_csr) +tensor([0.7258, 0.9160, 0.6029, ..., 0.0530, 0.7513, 0.2296]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 30000000 +Density: 0.3 +Time: 10.941918134689331 seconds + +[18.62, 18.03, 18.0, 17.99, 18.26, 17.85, 18.27, 17.94, 18.3, 18.09] +[52.14] +68.35753774642944 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.941918134689331, 'TIME_S_1KI': 11.529945347407093, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3564.162018098831, 'W': 52.14} +[18.62, 18.03, 18.0, 17.99, 18.26, 17.85, 18.27, 17.94, 18.3, 18.09, 18.58, 17.83, 17.98, 17.87, 18.18, 18.09, 18.02, 17.83, 18.92, 18.1] +326.055 +16.30275 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 949, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 30000000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.941918134689331, 'TIME_S_1KI': 11.529945347407093, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 3564.162018098831, 'W': 52.14, 'J_1KI': 3755.7028641715815, 'W_1KI': 54.94204425711275, 'W_D': 35.83725, 'J_D': 2449.7461696032283, 'W_D_1KI': 37.763171759747095, 'J_D_1KI': 39.79259405663551} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json new file mode 100644 index 0000000..00c8b98 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 284305, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.350545883178711, "TIME_S_1KI": 0.03640648558125503, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1004.1447986841201, "W": 73.05, "J_1KI": 3.5319280304043903, "W_1KI": 0.2569423682313009, "W_D": 56.574, "J_D": 777.6658157529831, "W_D_1KI": 0.19899052074356766, "J_D_1KI": 0.0006999191739278861} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output new file mode 100644 index 0000000..cbcf9b7 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_1e-05.output @@ -0,0 +1,1307 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.019997835159301758} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 999, 1000, 1000]), + col_indices=tensor([7109, 2385, 2417, 5435, 3828, 4759, 3881, 3125, 6359, + 8100, 8295, 5616, 9929, 4079, 7787, 359, 8328, 6707, + 7136, 309, 5171, 1453, 2735, 9306, 3785, 4541, 8081, + 2235, 61, 4688, 6276, 2644, 5471, 837, 3908, 9598, + 9530, 9506, 8707, 10, 4532, 534, 4561, 3707, 1054, + 2351, 8898, 694, 8085, 1481, 2176, 6847, 2046, 5123, + 1350, 2400, 2135, 4037, 9549, 4749, 612, 1214, 9582, + 9826, 1033, 8057, 3787, 6733, 5484, 5013, 513, 3997, + 9224, 768, 6259, 8198, 4081, 8587, 7654, 6997, 3629, + 5266, 8440, 8098, 4353, 7259, 6119, 7369, 9095, 5623, + 7383, 8433, 7910, 8608, 4139, 6690, 2987, 5688, 7896, + 7911, 6242, 6429, 722, 2362, 2947, 7072, 2748, 1401, + 7063, 9804, 2718, 4440, 4754, 9463, 9666, 7272, 8681, + 5892, 7815, 4523, 800, 9689, 6398, 686, 9595, 7223, + 252, 5697, 861, 6369, 1019, 2052, 8774, 3827, 3582, + 5597, 1734, 6986, 7447, 6272, 3623, 3565, 4417, 277, + 3843, 3333, 5620, 6442, 4003, 7391, 9554, 8526, 2101, + 9854, 8955, 4012, 5691, 3570, 9292, 3509, 3148, 5405, + 2599, 5760, 6655, 1397, 5002, 378, 9560, 8896, 7138, + 8297, 8567, 8541, 8058, 9553, 2217, 475, 973, 6966, + 8382, 9130, 3653, 6730, 5404, 1171, 6230, 4421, 8787, + 1013, 7148, 6496, 6029, 92, 1710, 3227, 4417, 1415, + 6028, 7960, 1123, 8741, 7693, 8010, 7518, 4472, 6141, + 7642, 3828, 9726, 5089, 1835, 1514, 2431, 8797, 5042, + 676, 3698, 5937, 3628, 6068, 1887, 5909, 6973, 5973, + 477, 5895, 1885, 7449, 3691, 1472, 679, 7389, 1532, + 192, 1918, 9427, 2430, 3048, 5494, 2607, 9231, 1796, + 5499, 9054, 3543, 8995, 5396, 1535, 682, 2719, 1631, + 5250, 5969, 556, 4778, 6509, 839, 3254, 5655, 3541, + 9791, 8791, 8465, 9659, 643, 5662, 6522, 1226, 6110, + 8738, 2729, 9937, 1594, 5216, 1576, 322, 138, 1174, + 8026, 182, 2386, 8814, 3055, 5706, 1632, 3053, 1250, + 5293, 4479, 2534, 9457, 8636, 4963, 8810, 6513, 3303, + 2068, 7365, 4824, 6966, 3195, 1321, 1454, 3116, 1891, + 4602, 103, 1064, 1824, 4653, 5493, 2856, 7214, 4796, + 8079, 259, 3544, 8673, 5026, 1149, 8318, 1462, 4443, + 1549, 5100, 9230, 1260, 6291, 510, 2869, 7765, 852, + 2488, 6706, 4128, 5493, 7619, 4287, 6605, 7113, 7503, + 9635, 3819, 3422, 9417, 6701, 4678, 7292, 2009, 2745, + 5180, 902, 7517, 8196, 9257, 3699, 5950, 411, 2498, + 9110, 1182, 7464, 8023, 3683, 9203, 4592, 2000, 9730, + 3403, 953, 7193, 6806, 597, 9712, 7122, 7344, 9620, + 5762, 2940, 6890, 5706, 4675, 1376, 4625, 5803, 9790, + 8162, 563, 3992, 4806, 1869, 419, 4808, 6093, 8645, + 7788, 7684, 43, 5821, 4794, 4820, 4004, 9919, 4616, + 3804, 9946, 5014, 1305, 7851, 5946, 4091, 2485, 728, + 6971, 3549, 5585, 2412, 5888, 4457, 2799, 9818, 9618, + 2750, 2922, 131, 8551, 3306, 311, 2869, 5197, 1613, + 4841, 1155, 9841, 5868, 9266, 5281, 1337, 8753, 4768, + 9229, 9711, 9344, 3792, 7689, 9243, 3075, 8565, 7580, + 5999, 5247, 6273, 1505, 5520, 7307, 2076, 4532, 2693, + 6116, 7270, 3433, 8912, 5182, 9345, 8104, 6702, 450, + 9103, 8031, 6959, 6210, 5641, 9318, 4784, 626, 6063, + 6803, 5977, 7007, 8105, 2597, 6168, 9640, 5148, 1688, + 1007, 3856, 2440, 1610, 46, 3388, 8832, 9487, 77, + 9414, 713, 2259, 3015, 2404, 4677, 2375, 9203, 6270, + 6686, 5362, 8403, 149, 4599, 8799, 3044, 9352, 6320, + 9182, 3494, 5862, 697, 9208, 5897, 6456, 742, 9438, + 8241, 7310, 2664, 1140, 2638, 6331, 2836, 2339, 5416, + 6386, 5007, 3424, 4140, 8418, 2768, 7025, 8519, 4071, + 2786, 7259, 6812, 4479, 9408, 3790, 6230, 7656, 9695, + 7286, 975, 8931, 3543, 222, 7839, 9094, 3012, 9084, + 5657, 4120, 1509, 8871, 3510, 6988, 7621, 3914, 9254, + 7142, 2284, 5420, 893, 5854, 8200, 2689, 257, 9233, + 367, 2764, 2893, 1881, 3564, 7955, 9774, 2290, 4849, + 2776, 7436, 5689, 7041, 6283, 9905, 9032, 4403, 634, + 1762, 6101, 1433, 5426, 5594, 1114, 903, 7462, 6667, + 5106, 7360, 5373, 7882, 4664, 8370, 3731, 173, 2530, + 2913, 8577, 9326, 1993, 372, 3877, 3134, 5685, 5591, + 3164, 8523, 4532, 1524, 5876, 2337, 1910, 6825, 1701, + 3345, 9908, 1129, 356, 162, 6909, 8511, 9623, 5538, + 2227, 7108, 5801, 5376, 9922, 4394, 3505, 955, 7447, + 1821, 2264, 2665, 4362, 6017, 10, 1864, 589, 9835, + 2738, 3409, 7929, 445, 7809, 8506, 4478, 5175, 1642, + 6654, 4156, 4637, 493, 8641, 4849, 9459, 1260, 1722, + 3709, 7871, 294, 9014, 3592, 2234, 1761, 3148, 2059, + 5761, 4292, 2390, 8145, 3925, 4523, 4323, 5192, 2668, + 5700, 4434, 4088, 1368, 2304, 5701, 1648, 4535, 7667, + 1044, 9357, 9493, 3733, 2278, 133, 5541, 1244, 1991, + 6410, 8224, 9359, 3118, 3172, 8411, 5956, 6213, 2834, + 5982, 6131, 5315, 9976, 9217, 3251, 7863, 191, 6442, + 949, 3581, 6996, 1148, 1590, 3581, 8669, 6526, 7295, + 8556, 631, 6698, 4352, 244, 9874, 5590, 8576, 2401, + 1573, 3011, 2458, 9030, 4245, 4065, 2172, 6903, 7079, + 180, 6734, 6853, 2898, 2302, 7261, 5268, 6537, 9604, + 2335, 8773, 923, 6612, 3305, 4601, 6384, 7525, 2043, + 5654, 6990, 6475, 3112, 7425, 5617, 5040, 5416, 1325, + 6773, 9569, 5311, 6897, 8039, 8302, 1711, 1443, 196, + 4009, 1601, 330, 2436, 6513, 5857, 7676, 6398, 2101, + 1528, 4145, 5694, 8965, 6555, 727, 1560, 7665, 4841, + 9592, 6158, 320, 8749, 6157, 8725, 3024, 8319, 4003, + 8281, 4317, 3464, 6228, 1404, 1578, 4680, 708, 1351, + 5822, 8145, 2056, 581, 3036, 7910, 1186, 9116, 6024, + 1434, 1121, 2457, 6394, 749, 7915, 803, 8518, 7707, + 4500, 2148, 2294, 5223, 1435, 4429, 6437, 2976, 3334, + 8071, 6080, 8489, 4989, 9346, 5095, 7683, 5920, 8664, + 8772, 9150, 8104, 7035, 3166, 4753, 5176, 7494, 3694, + 9055, 8323, 5783, 9581, 6133, 7181, 4449, 117, 1431, + 1310, 266, 1137, 641, 4783, 4011, 9704, 8990, 9913, + 2411, 6941, 1035, 6630, 99, 4087, 4073, 98, 8673, + 9180, 1336, 2464, 8321, 3258, 7885, 1201, 4488, 1290, + 3441, 3470, 3653, 1653, 1475, 9906, 7698, 6670, 6752, + 2289, 4337, 3018, 852, 1912, 1673, 4537, 1138, 4596, + 7841, 7168, 8678, 7894, 9826, 4471, 2161, 7831, 2817, + 1699, 5434, 614, 6337, 9902, 5326, 7325, 5049, 225, + 8836, 5786, 4286, 4756, 4125, 2729, 2426, 1236, 6028, + 1462, 9172, 3237, 1796, 628, 5307, 403, 6318, 2926, + 4691, 4352, 6358, 7028, 7697, 1313, 5381, 6924, 8207, + 3263, 6004, 9719, 5541, 3521, 162, 72, 8395, 7836, + 2764, 8356, 2955, 3340, 3636, 5608, 97, 858, 6715, + 7963, 8977, 8904, 1286, 2929, 52, 9172, 966, 8016, + 7012]), + values=tensor([0.0046, 0.9711, 0.5783, 0.3964, 0.2053, 0.1726, 0.9437, + 0.5542, 0.1093, 0.4547, 0.1845, 0.3445, 0.8403, 0.4506, + 0.0902, 0.5378, 0.5966, 0.3221, 0.2087, 0.6069, 0.9250, + 0.3018, 0.8874, 0.5907, 0.8418, 0.6404, 0.9959, 0.3059, + 0.6226, 0.0368, 0.8190, 0.1303, 0.5256, 0.5555, 0.1522, + 0.6237, 0.0781, 0.5043, 0.0689, 0.0302, 0.2280, 0.2510, + 0.9391, 0.7562, 0.6143, 0.6495, 0.3458, 0.6716, 0.1218, + 0.4951, 0.5813, 0.4377, 0.3803, 0.5588, 0.7278, 0.7107, + 0.7077, 0.3647, 0.7869, 0.9654, 0.7183, 0.8252, 0.4085, + 0.6874, 0.5162, 0.6114, 0.2250, 0.1783, 0.2852, 0.3965, + 0.8459, 0.5182, 0.7181, 0.4470, 0.6154, 0.2685, 0.7435, + 0.8775, 0.0173, 0.8189, 0.0651, 0.4964, 0.7706, 0.1775, + 0.6594, 0.4169, 0.0757, 0.1719, 0.6911, 0.1189, 0.3832, + 0.8399, 0.3009, 0.9319, 0.3858, 0.4031, 0.3018, 0.8705, + 0.5963, 0.7087, 0.4444, 0.2108, 0.7106, 0.9488, 0.1619, + 0.3416, 0.0156, 0.2551, 0.5023, 0.9762, 0.6355, 0.9340, + 0.8159, 0.3185, 0.7462, 0.9256, 0.7851, 0.0782, 0.8295, + 0.8717, 0.6926, 0.4574, 0.6799, 0.4534, 0.6691, 0.9648, + 0.8736, 0.5727, 0.3189, 0.4649, 0.9263, 0.7281, 0.0818, + 0.5704, 0.3615, 0.0346, 0.8665, 0.5480, 0.2733, 0.6432, + 0.8981, 0.9119, 0.2568, 0.5919, 0.8320, 0.2875, 0.1674, + 0.5919, 0.1054, 0.6833, 0.1399, 0.6362, 0.3561, 0.7467, + 0.9977, 0.1907, 0.4833, 0.7635, 0.1246, 0.8287, 0.5890, + 0.3699, 0.5934, 0.4119, 0.6383, 0.0205, 0.6354, 0.0025, + 0.3055, 0.5465, 0.8335, 0.9974, 0.4798, 0.9157, 0.2277, + 0.9812, 0.4215, 0.6840, 0.5241, 0.0527, 0.5703, 0.9916, + 0.4607, 0.4300, 0.6671, 0.7892, 0.1737, 0.1225, 0.0598, + 0.8796, 0.4030, 0.2456, 0.1589, 0.8107, 0.8549, 0.7206, + 0.2242, 0.4514, 0.2530, 0.9423, 0.8787, 0.4399, 0.3810, + 0.5901, 0.7345, 0.4668, 0.3478, 0.9963, 0.2961, 0.9270, + 0.0301, 0.4210, 0.5019, 0.9625, 0.5041, 0.7790, 0.5782, + 0.6252, 0.3890, 0.3274, 0.9770, 0.8231, 0.4966, 0.5155, + 0.5583, 0.1196, 0.3282, 0.5482, 0.7136, 0.0064, 0.9655, + 0.6750, 0.3247, 0.2048, 0.1243, 0.1485, 0.7152, 0.4737, + 0.3270, 0.2946, 0.1922, 0.2685, 0.5926, 0.8430, 0.0407, + 0.4774, 0.0717, 0.6093, 0.7594, 0.9792, 0.5158, 0.2854, + 0.0632, 0.5014, 0.9048, 0.8406, 0.1082, 0.0133, 0.0800, + 0.2800, 0.2617, 0.2001, 0.9723, 0.1130, 0.9439, 0.4990, + 0.9070, 0.6093, 0.2296, 0.6950, 0.7193, 0.1734, 0.8450, + 0.0447, 0.7501, 0.4548, 0.6695, 0.9652, 0.8606, 0.7535, + 0.0773, 0.7629, 0.0103, 0.0948, 0.7605, 0.9538, 0.4662, + 0.4806, 0.8728, 0.0392, 0.7303, 0.8236, 0.3805, 0.4985, + 0.1028, 0.6346, 0.5210, 0.1024, 0.8731, 0.5873, 0.7105, + 0.2108, 0.4969, 0.9031, 0.1387, 0.8343, 0.4993, 0.5455, + 0.3538, 0.1324, 0.9783, 0.5165, 0.4133, 0.9455, 0.6371, + 0.7301, 0.2826, 0.6823, 0.7616, 0.3904, 0.8366, 0.6816, + 0.9409, 0.7877, 0.5158, 0.1541, 0.1117, 0.4563, 0.1217, + 0.9525, 0.4021, 0.7397, 0.2559, 0.7187, 0.5250, 0.4043, + 0.2738, 0.9184, 0.0736, 0.1076, 0.2850, 0.5552, 0.1645, + 0.2585, 0.5505, 0.6136, 0.2864, 0.1428, 0.3411, 0.7240, + 0.2285, 0.7929, 0.6109, 0.9222, 0.1483, 0.5863, 0.2524, + 0.4750, 0.1637, 0.5790, 0.5447, 0.3454, 0.7649, 0.1908, + 0.3288, 0.5701, 0.5499, 0.0369, 0.9922, 0.3383, 0.4741, + 0.1304, 0.9901, 0.7400, 0.9626, 0.4575, 0.2628, 0.5791, + 0.5795, 0.8287, 0.9675, 0.5851, 0.5863, 0.4631, 0.4520, + 0.6908, 0.8302, 0.4709, 0.1620, 0.2509, 0.4523, 0.9606, + 0.8422, 0.9930, 0.3544, 0.9618, 0.4633, 0.8424, 0.3012, + 0.5074, 0.7673, 0.9314, 0.0486, 0.6586, 0.7796, 0.8682, + 0.1246, 0.1399, 0.3038, 0.0213, 0.2975, 0.1825, 0.5585, + 0.5935, 0.0441, 0.6723, 0.6869, 0.1223, 0.3975, 0.4510, + 0.4005, 0.3733, 0.5220, 0.4286, 0.5743, 0.5693, 0.9693, + 0.4086, 0.3833, 0.3952, 0.3540, 0.0647, 0.9855, 0.3131, + 0.1430, 0.2211, 0.2883, 0.2379, 0.8994, 0.5349, 0.6071, + 0.0012, 0.7795, 0.9085, 0.8059, 0.8457, 0.2047, 0.3553, + 0.4264, 0.4073, 0.7708, 0.1265, 0.5393, 0.9354, 0.1862, + 0.8447, 0.1265, 0.0465, 0.9024, 0.1589, 0.4910, 0.2636, + 0.3445, 0.1545, 0.4366, 0.0582, 0.3173, 0.5045, 0.7525, + 0.2332, 0.9149, 0.7252, 0.7029, 0.1697, 0.1214, 0.8720, + 0.7803, 0.4878, 0.7906, 0.7439, 0.1899, 0.9880, 0.0016, + 0.8939, 0.2914, 0.7113, 0.9239, 0.8221, 0.8266, 0.3684, + 0.5005, 0.8793, 0.5734, 0.2862, 0.4043, 0.7599, 0.4516, + 0.4459, 0.5388, 0.8147, 0.3911, 0.4150, 0.0336, 0.3314, + 0.5237, 0.5194, 0.3540, 0.6803, 0.0259, 0.3157, 0.1146, + 0.7491, 0.8857, 0.2876, 0.6475, 0.0696, 0.1554, 0.9070, + 0.5700, 0.0259, 0.9973, 0.2760, 0.1917, 0.3491, 0.2604, + 0.2647, 0.7207, 0.8895, 0.6272, 0.5734, 0.1372, 0.5916, + 0.1230, 0.6040, 0.2042, 0.4307, 0.6738, 0.3775, 0.2795, + 0.7792, 0.4760, 0.5589, 0.1711, 0.0766, 0.4005, 0.1397, + 0.4940, 0.6769, 0.4257, 0.6752, 0.0242, 0.5821, 0.7391, + 0.1474, 0.0300, 0.9215, 0.5654, 0.7654, 0.6347, 0.1786, + 0.5402, 0.8466, 0.6369, 0.9853, 0.3023, 0.7607, 0.1105, + 0.3121, 0.2599, 0.7204, 0.9168, 0.3226, 0.5478, 0.0268, + 0.5417, 0.0114, 0.3890, 0.7421, 0.8261, 0.6148, 0.3356, + 0.9823, 0.4833, 0.8964, 0.0377, 0.3211, 0.7509, 0.8021, + 0.7647, 0.4392, 0.6333, 0.7826, 0.0068, 0.0507, 0.8981, + 0.1710, 0.5027, 0.0944, 0.1727, 0.3980, 0.1663, 0.4815, + 0.5099, 0.0482, 0.4204, 0.5662, 0.4082, 0.0087, 0.3299, + 0.7731, 0.2476, 0.4990, 0.2030, 0.3567, 0.4913, 0.1398, + 0.7595, 0.5107, 0.5222, 0.1537, 0.7700, 0.5986, 0.2341, + 0.1186, 0.9382, 0.4366, 0.4841, 0.6114, 0.1605, 0.4472, + 0.8581, 0.4342, 0.2576, 0.0087, 0.5861, 0.5826, 0.6503, + 0.7493, 0.7005, 0.0734, 0.4947, 0.2408, 0.7256, 0.1668, + 0.0344, 0.0192, 0.7524, 0.4331, 0.7951, 0.0257, 0.8771, + 0.8072, 0.0260, 0.4420, 0.0808, 0.6151, 0.0167, 0.8132, + 0.7864, 0.9344, 0.3589, 0.8588, 0.5190, 0.0941, 0.3937, + 0.8457, 0.0223, 0.2472, 0.0820, 0.6375, 0.0538, 0.4322, + 0.7601, 0.6627, 0.0062, 0.0971, 0.7569, 0.9276, 0.3232, + 0.8488, 0.9506, 0.0535, 0.2724, 0.3163, 0.3730, 0.1016, + 0.7062, 0.4016, 0.3210, 0.3024, 0.4988, 0.2671, 0.4939, + 0.4041, 0.2020, 0.5827, 0.9863, 0.2068, 0.6956, 0.4398, + 0.8649, 0.3693, 0.0382, 0.5895, 0.1246, 0.3779, 0.4021, + 0.5374, 0.0896, 0.5865, 0.0911, 0.1552, 0.7309, 0.7310, + 0.2105, 0.1533, 0.2641, 0.5318, 0.8937, 0.7014, 0.2487, + 0.1566, 0.4421, 0.8097, 0.3049, 0.0983, 0.7323, 0.3418, + 0.2636, 0.9905, 0.7297, 0.1602, 0.5620, 0.7455, 0.1122, + 0.2778, 0.8976, 0.4302, 0.8190, 0.8163, 0.5642, 0.1086, + 0.1870, 0.4408, 0.7743, 0.8301, 0.3106, 0.4607, 0.1685, + 0.0543, 0.2486, 0.7269, 0.7754, 0.8640, 0.8996, 0.1225, + 0.2326, 0.0306, 0.0742, 0.7702, 0.5675, 0.7799, 0.9378, + 0.5182, 0.8150, 0.0442, 0.1759, 0.3578, 0.2557, 0.3991, + 0.6127, 0.7081, 0.3617, 0.4428, 0.3630, 0.7568, 0.7306, + 0.3583, 0.4751, 0.1731, 0.4017, 0.8439, 0.5007, 0.7755, + 0.1364, 0.1487, 0.0013, 0.4861, 0.3305, 0.4277, 0.8493, + 0.7152, 0.9337, 0.8933, 0.3178, 0.0571, 0.4652, 0.1484, + 0.1146, 0.5186, 0.0231, 0.7337, 0.2879, 0.2599, 0.0899, + 0.0770, 0.6050, 0.7205, 0.0087, 0.4341, 0.8062, 0.5612, + 0.1963, 0.5542, 0.4445, 0.5707, 0.2487, 0.0387, 0.5727, + 0.6643, 0.9010, 0.1369, 0.6983, 0.6861, 0.3137, 0.4893, + 0.4623, 0.0331, 0.5923, 0.7494, 0.8065, 0.6913, 0.0840, + 0.0472, 0.5097, 0.9016, 0.0515, 0.3195, 0.9293, 0.7461, + 0.1002, 0.7266, 0.1789, 0.8944, 0.7784, 0.2704, 0.3854, + 0.4940, 0.9700, 0.7281, 0.8176, 0.6416, 0.3232, 0.8090, + 0.8313, 0.2872, 0.7163, 0.7261, 0.2980, 0.0117, 0.1334, + 0.9487, 0.8411, 0.1255, 0.1021, 0.4857, 0.9363, 0.6866, + 0.0451, 0.9201, 0.6535, 0.4626, 0.0913, 0.1620, 0.4656, + 0.4797, 0.5295, 0.5314, 0.4883, 0.4602, 0.8743, 0.5607, + 0.2065, 0.7862, 0.3051, 0.8028, 0.0092, 0.2581, 0.3033, + 0.5664, 0.6606, 0.8288, 0.0928, 0.1772, 0.1866, 0.3586, + 0.1155, 0.4118, 0.0623, 0.7029, 0.5408, 0.9844, 0.7491, + 0.3391, 0.6861, 0.5463, 0.9448, 0.4787, 0.4255, 0.1814, + 0.5560, 0.4709, 0.7970, 0.6626, 0.4346, 0.7159, 0.4083, + 0.2911, 0.8691, 0.8557, 0.2129, 0.4209, 0.7430, 0.6979, + 0.9694, 0.7088, 0.4947, 0.8038, 0.6058, 0.9515, 0.7892, + 0.7707, 0.4757, 0.4024, 0.6333, 0.3033, 0.1606, 0.0297, + 0.7386, 0.5267, 0.5079, 0.4638, 0.7216, 0.8308, 0.8888, + 0.3272, 0.2089, 0.9250, 0.8630, 0.4374, 0.2899, 0.1274, + 0.6033, 0.2213, 0.0608, 0.3072, 0.5266, 0.1377, 0.2166, + 0.9680, 0.0325, 0.7625, 0.3333, 0.5851, 0.2286, 0.8361, + 0.7176, 0.8044, 0.5438, 0.9445, 0.8563, 0.5310, 0.1555, + 0.9713, 0.4299, 0.7669, 0.0890, 0.3225, 0.6580, 0.0724, + 0.4834, 0.7834, 0.9243, 0.1867, 0.6811, 0.0298, 0.4897, + 0.4802, 0.4453, 0.3359, 0.1067, 0.8065, 0.3394, 0.3666, + 0.4986, 0.5179, 0.7221, 0.2831, 0.8118, 0.0090]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.8851, 0.6493, 0.1793, ..., 0.2200, 0.7121, 0.0570]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 0.019997835159301758 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52505', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.9391217231750488} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5864, 6409, 3268, 9372, 8851, 1349, 7924, 3157, 113, + 9640, 7379, 4185, 6988, 7306, 9094, 3198, 1145, 6328, + 5627, 8866, 1416, 350, 9287, 2849, 3030, 612, 443, + 8128, 5292, 2326, 3640, 1427, 2108, 7969, 9609, 7236, + 449, 1646, 9388, 8979, 3518, 778, 6273, 9500, 148, + 8296, 6022, 1119, 7603, 6748, 5170, 7685, 82, 4394, + 1107, 6222, 8327, 9701, 1319, 6392, 2965, 9029, 77, + 7995, 2221, 994, 2914, 8588, 327, 853, 3415, 919, + 9810, 4408, 2821, 7082, 5424, 5361, 480, 245, 7586, + 5204, 9870, 329, 4522, 6404, 2012, 935, 9448, 4082, + 3761, 8854, 4617, 2954, 1700, 2460, 8360, 6564, 5100, + 3903, 5238, 470, 9756, 8808, 586, 4317, 2554, 4487, + 2746, 991, 8986, 854, 1896, 4114, 4630, 2853, 8275, + 2623, 6046, 9508, 843, 3243, 5691, 444, 3056, 2850, + 3932, 3623, 259, 8407, 4684, 5172, 642, 1359, 2285, + 474, 5560, 5107, 8311, 8207, 4317, 4153, 5714, 4003, + 4520, 3174, 6645, 1349, 9932, 49, 4730, 4119, 9928, + 924, 9713, 1216, 50, 1671, 1300, 8418, 6864, 8358, + 4746, 4247, 7492, 9246, 7928, 4332, 8375, 9814, 9129, + 6833, 517, 824, 8642, 884, 7236, 7434, 7005, 4885, + 1510, 7918, 3471, 5546, 6237, 6012, 5086, 1592, 1134, + 9459, 4579, 5292, 366, 2218, 1740, 6379, 1799, 5994, + 759, 9988, 1345, 1070, 3745, 943, 8461, 5082, 1933, + 443, 7004, 3136, 7242, 6373, 1139, 5779, 3999, 5632, + 9482, 7292, 5796, 4340, 662, 2787, 1120, 4116, 3039, + 1174, 4242, 5490, 1535, 6250, 1065, 51, 3412, 460, + 9840, 5194, 4843, 7403, 7445, 4382, 8727, 8774, 6609, + 3247, 8201, 755, 8291, 3275, 8064, 4388, 6768, 2576, + 7034, 7091, 7595, 8900, 6407, 6040, 2696, 6321, 4834, + 1840, 3927, 864, 6511, 7817, 7242, 3035, 6998, 4134, + 9868, 182, 4140, 6908, 717, 2392, 8434, 3505, 8667, + 4427, 1108, 8007, 8132, 9145, 9203, 8310, 7166, 6141, + 9370, 4104, 9680, 212, 8669, 6370, 4838, 4629, 457, + 90, 2059, 4579, 7993, 5081, 3590, 42, 8026, 1256, + 9078, 2167, 2789, 4777, 2980, 8000, 7728, 4259, 788, + 9381, 4203, 4616, 6598, 6785, 9230, 3073, 7215, 4492, + 5558, 6560, 4239, 6465, 3254, 9443, 396, 9087, 9830, + 2837, 8876, 5671, 8685, 1134, 9981, 4038, 2521, 8217, + 3054, 3580, 3011, 2548, 4358, 9379, 9530, 6078, 4586, + 2961, 7702, 9843, 1093, 5655, 4332, 7551, 7559, 7229, + 9775, 8733, 7492, 4725, 2683, 5859, 6974, 5769, 413, + 7759, 3920, 4343, 8482, 2185, 3683, 5666, 5694, 9484, + 6805, 8125, 6011, 7452, 4625, 1996, 5674, 8346, 9829, + 5348, 6153, 4294, 7632, 675, 4714, 7735, 690, 8377, + 8366, 8956, 6093, 9462, 520, 8335, 6971, 9049, 3198, + 2406, 1525, 3742, 9835, 2100, 6818, 61, 7216, 7574, + 6125, 8245, 226, 3951, 8198, 6726, 1159, 3284, 4090, + 6559, 2427, 2922, 1917, 2184, 8419, 8320, 6172, 7937, + 3032, 5477, 1970, 2511, 9106, 3545, 5702, 8387, 6240, + 4527, 5936, 2989, 4798, 44, 5593, 8588, 9801, 6339, + 270, 4836, 4663, 7863, 5753, 5138, 1468, 2135, 8533, + 8874, 7550, 9948, 511, 8382, 5075, 5125, 621, 8037, + 2970, 7317, 955, 5187, 9266, 8012, 9116, 8287, 5819, + 6798, 6108, 9485, 7294, 6292, 3050, 9766, 3371, 6775, + 8624, 323, 9518, 993, 713, 4232, 4555, 9512, 4219, + 172, 7436, 1908, 9551, 5355, 2034, 7444, 6222, 5358, + 415, 4523, 8497, 1159, 6546, 2982, 9657, 7284, 2823, + 9948, 1307, 293, 9936, 9908, 9836, 9326, 6563, 2566, + 6336, 7704, 5608, 6306, 9283, 5085, 1829, 8868, 5796, + 2330, 5352, 2073, 2907, 4221, 2317, 4045, 1974, 6528, + 4428, 9165, 9570, 1902, 7934, 3584, 5642, 5226, 6064, + 2401, 627, 4348, 5433, 7193, 9010, 3873, 1074, 3547, + 2036, 1502, 8524, 1516, 8038, 6483, 5421, 9090, 1282, + 6461, 3107, 4786, 5722, 2040, 3948, 1805, 1340, 2167, + 8524, 873, 7912, 9277, 615, 2598, 3584, 5232, 6528, + 4041, 1307, 4984, 7493, 7893, 2441, 8985, 9241, 545, + 8462, 5218, 1131, 1603, 1467, 916, 391, 1880, 6303, + 236, 88, 6641, 9343, 1938, 644, 6243, 6324, 3102, + 4046, 4842, 9256, 877, 1050, 7198, 6510, 675, 7712, + 3992, 5524, 5461, 8463, 2767, 8024, 9124, 3831, 3301, + 5238, 839, 822, 8427, 8192, 7249, 6075, 7734, 8472, + 8865, 4636, 8397, 393, 3339, 9477, 1559, 9233, 1895, + 3219, 9244, 2768, 1620, 5867, 3834, 6459, 6900, 8402, + 4353, 4101, 1060, 3930, 3593, 9426, 100, 2578, 4861, + 5687, 6175, 9073, 9873, 7765, 4191, 8697, 5284, 4715, + 9219, 7611, 5627, 3329, 9064, 6928, 3037, 7722, 7680, + 8059, 8098, 5508, 1795, 6236, 2332, 9755, 2995, 556, + 7522, 7740, 4501, 6741, 6961, 3755, 958, 7285, 897, + 2682, 3633, 9522, 6519, 7381, 4140, 6325, 9071, 7470, + 6696, 5233, 3651, 7869, 774, 543, 8750, 4426, 5429, + 485, 730, 3337, 2506, 4162, 4944, 9298, 3907, 2447, + 8819, 8506, 6050, 3923, 8654, 5401, 6376, 181, 7284, + 5906, 8062, 5293, 5482, 1078, 6794, 7475, 2224, 7260, + 3627, 9570, 135, 9578, 8302, 8006, 6725, 2538, 6437, + 594, 9238, 4645, 7390, 6678, 2155, 7680, 9552, 1670, + 2472, 2676, 5726, 3946, 527, 7726, 2240, 9083, 9592, + 3444, 3546, 376, 9469, 8897, 9577, 5076, 4801, 9727, + 566, 4934, 7459, 7690, 5216, 4131, 7019, 8688, 2923, + 2438, 4950, 4206, 1087, 5226, 3034, 6574, 6662, 9559, + 5466, 9185, 1863, 2990, 6618, 392, 2496, 9534, 5645, + 2208, 4474, 5079, 8377, 5730, 1470, 1051, 9292, 67, + 8958, 5488, 3313, 2560, 3838, 7969, 4761, 7727, 4225, + 3905, 3354, 8114, 5546, 4451, 1620, 530, 3536, 5417, + 292, 5393, 7449, 3344, 2366, 832, 2586, 1114, 1164, + 7745, 1601, 5885, 6819, 9950, 7296, 956, 7785, 751, + 9927, 4784, 6024, 1885, 597, 3327, 6636, 4759, 2636, + 1681, 5642, 9192, 251, 2937, 9361, 5124, 5779, 3731, + 998, 1458, 3334, 3423, 3618, 2367, 2989, 6484, 6596, + 3275, 3629, 1246, 5835, 3014, 480, 3197, 9504, 4739, + 7848, 5807, 5889, 6941, 5807, 5884, 9995, 7302, 9589, + 3118, 8179, 1634, 7475, 493, 2924, 4475, 3955, 9825, + 4774, 5869, 8402, 5728, 9453, 2751, 6979, 8042, 1938, + 255, 5871, 8287, 8146, 4117, 6879, 8378, 1539, 4897, + 6347, 5086, 8422, 8986, 4509, 9294, 2883, 9942, 8480, + 6013, 8174, 977, 1721, 2494, 5501, 2010, 8785, 4090, + 3984, 7019, 8716, 3571, 3228, 7733, 6351, 2624, 2714, + 8580, 9476, 747, 7962, 8294, 3951, 2439, 8475, 4886, + 941, 8870, 8639, 4412, 7879, 4719, 1308, 6188, 3204, + 5487, 2922, 9803, 6526, 1340, 8108, 1809, 5276, 4996, + 2606, 5863, 2844, 128, 5891, 2949, 2522, 2371, 374, + 9131, 1337, 2279, 3177, 3776, 5283, 9934, 312, 8545, + 8088]), + values=tensor([0.8990, 0.9202, 0.0544, 0.3278, 0.1036, 0.9834, 0.5405, + 0.8387, 0.1303, 0.8299, 0.9099, 0.8006, 0.3383, 0.3639, + 0.2173, 0.0563, 0.7372, 0.1968, 0.4499, 0.6369, 0.1211, + 0.5305, 0.7026, 0.0869, 0.4015, 0.0938, 0.8149, 0.6744, + 0.4215, 0.0815, 0.4822, 0.0786, 0.4204, 0.6460, 0.6276, + 0.2883, 0.4330, 0.3983, 0.2719, 0.4502, 0.7063, 0.0086, + 0.6751, 0.5856, 0.5977, 0.3533, 0.7195, 0.3114, 0.7411, + 0.5056, 0.2852, 0.3437, 0.7335, 0.9652, 0.3676, 0.1371, + 0.6895, 0.6801, 0.1441, 0.6072, 0.2913, 0.9589, 0.4646, + 0.7267, 0.3881, 0.6695, 0.4149, 0.4926, 0.6183, 0.3275, + 0.6292, 0.3672, 0.7396, 0.0629, 0.7870, 0.8240, 0.0651, + 0.7638, 0.1125, 0.7864, 0.0952, 0.8313, 0.9512, 0.1631, + 0.0751, 0.7252, 0.2013, 0.8188, 0.2883, 0.7422, 0.6655, + 0.8235, 0.3703, 0.8712, 0.1832, 0.1385, 0.5564, 0.7498, + 0.7736, 0.6575, 0.7905, 0.1372, 0.1047, 0.2516, 0.5275, + 0.0779, 0.8681, 0.3296, 0.6114, 0.4478, 0.1522, 0.3051, + 0.0405, 0.3528, 0.7471, 0.2608, 0.1704, 0.9466, 0.6985, + 0.0406, 0.9413, 0.2522, 0.5107, 0.2012, 0.1769, 0.8491, + 0.1199, 0.4970, 0.5967, 0.3353, 0.0061, 0.9854, 0.8058, + 0.2915, 0.5621, 0.6851, 0.9985, 0.0085, 0.7012, 0.0603, + 0.8973, 0.4404, 0.2983, 0.8140, 0.0895, 0.2012, 0.7432, + 0.9272, 0.9516, 0.8375, 0.3237, 0.4042, 0.2740, 0.8401, + 0.1138, 0.0969, 0.5800, 0.4304, 0.8447, 0.6543, 0.9229, + 0.0170, 0.9115, 0.8422, 0.3169, 0.2633, 0.5759, 0.5969, + 0.1088, 0.3461, 0.7896, 0.4253, 0.4344, 0.4992, 0.6827, + 0.6879, 0.8758, 0.6928, 0.8708, 0.9897, 0.8788, 0.0817, + 0.7722, 0.0931, 0.3481, 0.9886, 0.8853, 0.2278, 0.8570, + 0.9045, 0.1065, 0.2685, 0.6322, 0.7042, 0.6308, 0.2807, + 0.6931, 0.2248, 0.5412, 0.1164, 0.3317, 0.6726, 0.8543, + 0.5206, 0.9983, 0.7612, 0.9817, 0.2254, 0.2596, 0.3813, + 0.6946, 0.1231, 0.1329, 0.8727, 0.5792, 0.5145, 0.9953, + 0.8468, 0.6893, 0.7309, 0.5070, 0.6686, 0.8055, 0.1055, + 0.9254, 0.4455, 0.4767, 0.6225, 0.0209, 0.2930, 0.1811, + 0.8374, 0.8892, 0.9045, 0.1674, 0.8744, 0.7283, 0.0811, + 0.5372, 0.2055, 0.4944, 0.7620, 0.6061, 0.2733, 0.4358, + 0.1363, 0.3064, 0.0238, 0.7458, 0.8337, 0.8253, 0.5030, + 0.0973, 0.5618, 0.2875, 0.7789, 0.8665, 0.9501, 0.5176, + 0.4426, 0.1897, 0.1486, 0.1669, 0.2509, 0.1659, 0.8053, + 0.4105, 0.5846, 0.9191, 0.5151, 0.0551, 0.8020, 0.3620, + 0.9344, 0.3530, 0.0386, 0.0335, 0.8446, 0.1984, 0.7097, + 0.7277, 0.6649, 0.9046, 0.0203, 0.5170, 0.0550, 0.6834, + 0.5615, 0.5645, 0.5249, 0.9453, 0.7207, 0.2810, 0.7493, + 0.2947, 0.9369, 0.6771, 0.7460, 0.1467, 0.5385, 0.9790, + 0.7366, 0.3428, 0.7251, 0.2418, 0.1147, 0.6978, 0.6208, + 0.9677, 0.2034, 0.6015, 0.7335, 0.4291, 0.0051, 0.0440, + 0.2729, 0.3203, 0.0797, 0.2242, 0.1081, 0.2293, 0.8883, + 0.6170, 0.7219, 0.1224, 0.4373, 0.3181, 0.8085, 0.7996, + 0.6508, 0.7263, 0.9121, 0.5064, 0.0502, 0.9917, 0.9834, + 0.3031, 0.8748, 0.6856, 0.5292, 0.6315, 0.8787, 0.4765, + 0.8102, 0.6878, 0.1287, 0.8970, 0.7946, 0.8583, 0.9049, + 0.0389, 0.0623, 0.5568, 0.6879, 0.2721, 0.1463, 0.8727, + 0.9772, 0.1081, 0.1611, 0.2843, 0.0447, 0.6310, 0.1932, + 0.2621, 0.8047, 0.8238, 0.9649, 0.2488, 0.4610, 0.2602, + 0.8419, 0.7266, 0.3002, 0.7494, 0.6113, 0.1487, 0.3589, + 0.4282, 0.7405, 0.9373, 0.2091, 0.9161, 0.1005, 0.8514, + 0.7238, 0.1078, 0.6520, 0.6311, 0.1891, 0.8272, 0.1963, + 0.8590, 0.8189, 0.0257, 0.2833, 0.3513, 0.3634, 0.3576, + 0.8904, 0.2484, 0.4918, 0.2908, 0.1508, 0.5806, 0.9406, + 0.7294, 0.0950, 0.5064, 0.0349, 0.6437, 0.4083, 0.5026, + 0.6560, 0.0148, 0.4276, 0.3401, 0.2383, 0.0904, 0.7044, + 0.9844, 0.8310, 0.9388, 0.3867, 0.2827, 0.1664, 0.3305, + 0.1891, 0.7274, 0.3593, 0.5517, 0.2275, 0.0974, 0.5619, + 0.4200, 0.3096, 0.0034, 0.0516, 0.4134, 0.2690, 0.2115, + 0.6454, 0.2865, 0.6179, 0.9913, 0.0141, 0.5942, 0.3621, + 0.2100, 0.3445, 0.3904, 0.9909, 0.6731, 0.4570, 0.4820, + 0.5848, 0.2093, 0.4957, 0.8327, 0.2632, 0.6657, 0.2324, + 0.0994, 0.6140, 0.4480, 0.9800, 0.2896, 0.1520, 0.2608, + 0.3853, 0.2094, 0.0521, 0.1625, 0.2360, 0.0706, 0.0520, + 0.9422, 0.5291, 0.5359, 0.8697, 0.5739, 0.8183, 0.5758, + 0.4869, 0.9594, 0.7777, 0.8218, 0.1785, 0.0020, 0.3345, + 0.9444, 0.5328, 0.4805, 0.9232, 0.6862, 0.9030, 0.9146, + 0.6421, 0.5457, 0.0643, 0.6817, 0.9655, 0.1845, 0.7938, + 0.4551, 0.4198, 0.9446, 0.9759, 0.6259, 0.5381, 0.3308, + 0.3232, 0.5498, 0.8269, 0.1628, 0.3341, 0.1440, 0.8802, + 0.7811, 0.5460, 0.9304, 0.6129, 0.5889, 0.0395, 0.1012, + 0.5055, 0.2108, 0.0596, 0.8797, 0.0924, 0.0840, 0.9035, + 0.4740, 0.3740, 0.2634, 0.4030, 0.2026, 0.3835, 0.9804, + 0.3664, 0.2344, 0.2632, 0.9989, 0.1712, 0.0177, 0.7018, + 0.1117, 0.0373, 0.6412, 0.6706, 0.9767, 0.2936, 0.5259, + 0.5797, 0.5149, 0.1616, 0.5650, 0.5356, 0.0065, 0.9212, + 0.1415, 0.5431, 0.0097, 0.9858, 0.4122, 0.1358, 0.1911, + 0.7329, 0.8335, 0.4353, 0.3789, 0.9018, 0.6853, 0.2444, + 0.9519, 0.7522, 0.6207, 0.8681, 0.9259, 0.8747, 0.9193, + 0.4093, 0.6975, 0.2182, 0.9647, 0.1252, 0.2843, 0.9046, + 0.3990, 0.1488, 0.1784, 0.6775, 0.5082, 0.9612, 0.5896, + 0.0701, 0.8792, 0.5361, 0.2316, 0.0926, 0.9316, 0.9904, + 0.4062, 0.1734, 0.2068, 0.7329, 0.0588, 0.2116, 0.2498, + 0.2860, 0.6263, 0.2277, 0.3363, 0.0558, 0.6334, 0.9261, + 0.3782, 0.1726, 0.3657, 0.2001, 0.2643, 0.5676, 0.4875, + 0.2652, 0.7263, 0.4490, 0.1802, 0.8678, 0.5205, 0.9599, + 0.7949, 0.8153, 0.1755, 0.6089, 0.8090, 0.1965, 0.7501, + 0.8519, 0.9054, 0.1044, 0.7000, 0.2903, 0.5652, 0.3256, + 0.2532, 0.3074, 0.2884, 0.7156, 0.1406, 0.5555, 0.2160, + 0.4498, 0.0747, 0.9689, 0.5720, 0.9390, 0.9625, 0.3014, + 0.2693, 0.5947, 0.5986, 0.9539, 0.5617, 0.3646, 0.1056, + 0.6700, 0.6793, 0.1324, 0.7287, 0.1481, 0.3299, 0.7685, + 0.3760, 0.6705, 0.5083, 0.8665, 0.5329, 0.5057, 0.7230, + 0.2213, 0.3819, 0.8994, 0.9305, 0.0147, 0.6384, 0.9557, + 0.4297, 0.3363, 0.5610, 0.1485, 0.7226, 0.7840, 0.2842, + 0.6194, 0.3137, 0.3575, 0.5156, 0.8823, 0.6332, 0.6090, + 0.2125, 0.2540, 0.8502, 0.5188, 0.0320, 0.4363, 0.2213, + 0.1104, 0.5745, 0.3976, 0.8852, 0.6841, 0.1436, 0.8128, + 0.0664, 0.8122, 0.0275, 0.5139, 0.0401, 0.8774, 0.4004, + 0.6001, 0.5350, 0.9205, 0.6614, 0.3780, 0.2849, 0.3091, + 0.9873, 0.8828, 0.8702, 0.8818, 0.6909, 0.9257, 0.8168, + 0.3636, 0.3185, 0.8926, 0.4749, 0.0620, 0.4189, 0.6713, + 0.8811, 0.0223, 0.4110, 0.0053, 0.9365, 0.2006, 0.8273, + 0.4083, 0.1751, 0.9186, 0.3824, 0.9992, 0.8941, 0.4645, + 0.1171, 0.3220, 0.6118, 0.7887, 0.2217, 0.1825, 0.7475, + 0.9667, 0.5530, 0.3419, 0.9475, 0.0701, 0.5839, 0.3842, + 0.1233, 0.6971, 0.4857, 0.1287, 0.5722, 0.0250, 0.8936, + 0.8207, 0.3129, 0.2199, 0.6123, 0.1672, 0.5414, 0.7945, + 0.0311, 0.1705, 0.9516, 0.1397, 0.2348, 0.9404, 0.3368, + 0.1816, 0.7521, 0.1326, 0.1366, 0.8702, 0.3360, 0.7067, + 0.1910, 0.8902, 0.9378, 0.2610, 0.2939, 0.3489, 0.7916, + 0.4565, 0.4221, 0.7669, 0.2915, 0.7403, 0.1889, 0.8303, + 0.4423, 0.9543, 0.7012, 0.6502, 0.1369, 0.0177, 0.6082, + 0.2162, 0.2686, 0.0179, 0.4480, 0.6701, 0.3740, 0.8426, + 0.1687, 0.0416, 0.5328, 0.3704, 0.6536, 0.4452, 0.6981, + 0.5511, 0.1110, 0.1241, 0.1011, 0.5345, 0.6973, 0.0525, + 0.1417, 0.8094, 0.0874, 0.8494, 0.7134, 0.8426, 0.7518, + 0.9024, 0.4875, 0.9175, 0.0907, 0.4753, 0.1481, 0.7654, + 0.7412, 0.2930, 0.8077, 0.0519, 0.4826, 0.4705, 0.4910, + 0.0307, 0.1733, 0.7764, 0.7803, 0.3800, 0.6955, 0.7178, + 0.6864, 0.9434, 0.4690, 0.9390, 0.5393, 0.6128, 0.4020, + 0.0722, 0.0742, 0.1685, 0.1343, 0.7205, 0.3661, 0.0503, + 0.2629, 0.7318, 0.6532, 0.6350, 0.4666, 0.8599, 0.1969, + 0.7470, 0.5813, 0.2403, 0.3313, 0.8673, 0.0911, 0.1399, + 0.7504, 0.8548, 0.7052, 0.4678, 0.6190, 0.1905, 0.2523, + 0.4053, 0.0730, 0.1745, 0.6615, 0.1279, 0.0102, 0.5054, + 0.2090, 0.7368, 0.7129, 0.6557, 0.6802, 0.2242, 0.6301, + 0.7717, 0.4921, 0.4267, 0.4852, 0.1080, 0.7094, 0.6823, + 0.5678, 0.2979, 0.7735, 0.4171, 0.3043, 0.4562, 0.9694, + 0.7066, 0.4892, 0.2640, 0.1546, 0.4359, 0.3991, 0.4200, + 0.9092, 0.0011, 0.6541, 0.1216, 0.2866, 0.3196, 0.1896, + 0.5043, 0.0425, 0.9999, 0.4499, 0.5582, 0.8404, 0.9463, + 0.7216, 0.6425, 0.3931, 0.8890, 0.7122, 0.3751, 0.8529, + 0.0063, 0.4366, 0.9377, 0.1575, 0.5576, 0.6903, 0.3497, + 0.5692, 0.9612, 0.7095, 0.9042, 0.6678, 0.8446, 0.3919, + 0.7942, 0.4563, 0.7095, 0.7390, 0.5213, 0.8669, 0.1933, + 0.8827, 0.3576, 0.3715, 0.3966, 0.7670, 0.8625, 0.0249, + 0.4165, 0.2028, 0.9277, 0.8840, 0.7235, 0.4226, 0.0014, + 0.6919, 0.7665, 0.1665, 0.5380, 0.1084, 0.7142]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.5397, 0.8345, 0.2583, ..., 0.2923, 0.3741, 0.4815]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 1.9391217231750488 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '284305', '-ss', '10000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 1000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.350545883178711} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5455, 5592, 4109, 222, 9693, 3577, 9334, 9406, 2137, + 1799, 7682, 5442, 9556, 3008, 6623, 6748, 1968, 1806, + 8890, 8613, 4077, 4885, 7962, 302, 1013, 8955, 2558, + 3711, 9681, 9593, 3121, 9869, 3177, 5155, 4286, 6364, + 2440, 3983, 2281, 3730, 740, 3324, 1186, 4197, 4183, + 3787, 1237, 618, 3728, 4305, 7249, 7969, 8150, 5074, + 336, 432, 1883, 5788, 1998, 9882, 8337, 8410, 6941, + 4025, 3080, 9432, 7607, 2625, 8188, 4417, 71, 2070, + 1396, 3989, 9431, 872, 4755, 1177, 5104, 5, 1671, + 5203, 8385, 3215, 9836, 5654, 6123, 9005, 9573, 3860, + 3248, 2894, 9896, 9991, 4949, 9, 1784, 5266, 2022, + 6095, 1517, 7556, 4941, 521, 2910, 5575, 7555, 1842, + 2590, 9377, 4113, 4907, 8259, 6209, 7070, 152, 6147, + 6757, 41, 774, 7469, 8152, 8924, 5911, 4839, 3386, + 4880, 5093, 7504, 578, 6341, 2413, 9223, 2943, 8245, + 3762, 1885, 3988, 7760, 6659, 2327, 8319, 4889, 2086, + 6582, 2999, 7225, 5620, 5397, 6166, 1192, 9054, 1944, + 8586, 7428, 4653, 1578, 8066, 4063, 6909, 8387, 9373, + 1779, 2964, 5753, 4702, 6011, 649, 4878, 6031, 3620, + 4368, 8038, 9222, 6809, 4045, 7946, 9720, 2447, 8709, + 5211, 3070, 483, 7264, 6025, 5336, 6583, 193, 3061, + 7753, 6937, 7842, 2405, 5951, 5344, 4719, 6714, 6154, + 7382, 3202, 1630, 3013, 8617, 6912, 3313, 4097, 2564, + 7916, 1190, 6815, 3429, 4803, 8583, 6446, 9482, 7512, + 5802, 3394, 6637, 5002, 8800, 9545, 1806, 7825, 5873, + 6547, 42, 3341, 4336, 2945, 8309, 6317, 8694, 7310, + 861, 6529, 9657, 2013, 7583, 4059, 8819, 8573, 3062, + 4530, 3219, 6965, 7043, 4000, 1751, 3453, 7507, 9158, + 8456, 5641, 2323, 894, 6849, 2354, 8414, 9263, 340, + 7205, 5325, 7515, 2661, 4262, 8481, 6503, 8559, 631, + 4284, 892, 3568, 2478, 5313, 8074, 1189, 3638, 1570, + 2900, 1226, 7729, 8931, 3902, 2531, 9721, 4451, 7093, + 6369, 3584, 8858, 4594, 9711, 2895, 8279, 6635, 5468, + 1625, 2878, 183, 9364, 8049, 8688, 3754, 9500, 5156, + 8141, 1233, 6270, 9152, 9609, 8833, 9689, 6804, 6879, + 8482, 5351, 2008, 5339, 174, 2631, 2752, 9101, 4250, + 1959, 1601, 8705, 8321, 4156, 2212, 1758, 6719, 390, + 8271, 8873, 6936, 6913, 6183, 6416, 6740, 9148, 7993, + 6077, 7025, 1434, 5450, 4696, 8201, 4111, 5108, 3263, + 4376, 992, 3001, 3573, 2771, 1330, 3404, 1985, 1213, + 7105, 2969, 1853, 4254, 2574, 8578, 3726, 9824, 8756, + 9790, 4725, 6571, 1525, 4226, 397, 3148, 8143, 1081, + 9319, 954, 8043, 2951, 6714, 1546, 7498, 5572, 6847, + 373, 6338, 6704, 6136, 6912, 8385, 6773, 2897, 1857, + 7837, 5293, 883, 3465, 2333, 2706, 2793, 8339, 9030, + 170, 6025, 238, 5536, 326, 7382, 5214, 9691, 1226, + 6312, 9099, 7743, 9256, 7826, 3961, 7289, 2810, 2889, + 5530, 8793, 1729, 9330, 6285, 259, 2418, 2723, 7786, + 9499, 1444, 3444, 9403, 4786, 2961, 3253, 210, 3033, + 4287, 1941, 2567, 5596, 8952, 1045, 8409, 1209, 2289, + 4009, 2954, 8195, 2521, 9908, 6497, 2176, 8245, 1878, + 2833, 2015, 1191, 8989, 9949, 1659, 7998, 6449, 3287, + 4617, 3730, 4250, 2845, 7685, 2587, 570, 2759, 5601, + 1122, 4194, 6085, 3538, 2068, 3553, 7147, 4353, 8619, + 741, 5644, 3102, 8593, 5741, 717, 5287, 9540, 3750, + 9476, 1080, 2354, 3239, 9391, 2983, 3615, 7545, 9316, + 3217, 546, 2878, 3385, 4369, 7750, 7048, 7638, 5003, + 1954, 2932, 9977, 2825, 6294, 7412, 4947, 5551, 4606, + 6527, 8202, 9874, 3630, 9332, 3043, 9614, 4366, 4415, + 5789, 5831, 6056, 8765, 7112, 3900, 5169, 8287, 5622, + 6492, 3446, 6222, 3934, 6761, 6496, 7921, 3767, 4657, + 6468, 9740, 3922, 1388, 791, 996, 5882, 8931, 4041, + 1804, 3408, 873, 8854, 8857, 5197, 3222, 7923, 1837, + 6637, 5267, 8748, 7386, 9946, 9090, 7284, 9796, 1916, + 1676, 6417, 478, 4042, 9158, 4562, 6428, 4901, 9841, + 4156, 3811, 6129, 8100, 2410, 9667, 1059, 6471, 9086, + 7783, 436, 8399, 2026, 7724, 9135, 6834, 2224, 8238, + 4091, 7721, 9973, 6278, 3251, 4004, 2804, 6760, 1348, + 3105, 8920, 3080, 8489, 6412, 1945, 5275, 9361, 5992, + 4252, 5985, 14, 8664, 6186, 8914, 4367, 8021, 3430, + 7586, 569, 9236, 221, 6330, 387, 7069, 4231, 8633, + 1760, 5743, 1734, 3474, 4921, 5437, 5342, 1188, 3716, + 7244, 3921, 3117, 1899, 6123, 1541, 2426, 3487, 1543, + 8966, 4322, 9621, 5499, 9634, 197, 2016, 1179, 1742, + 603, 4446, 2127, 5502, 5419, 1800, 8843, 3410, 9280, + 6183, 5336, 3557, 1301, 2924, 2685, 4789, 2681, 8075, + 9074, 779, 9722, 2242, 1422, 9799, 190, 8813, 4955, + 9269, 8432, 360, 3105, 1117, 2106, 6923, 340, 3913, + 6338, 2743, 3141, 5508, 8419, 4462, 3804, 4854, 8563, + 27, 56, 4099, 9429, 7128, 3170, 1423, 1480, 4108, + 9263, 248, 6315, 5394, 2111, 8815, 9468, 1653, 6525, + 7824, 7918, 7021, 7163, 2918, 4892, 3181, 2452, 2941, + 8090, 124, 6939, 1996, 9469, 8469, 3564, 8562, 416, + 7462, 4270, 2521, 6664, 1132, 2808, 1882, 6008, 4243, + 8264, 72, 188, 8612, 5177, 8716, 575, 8437, 8572, + 272, 3041, 762, 93, 8887, 453, 2740, 5642, 1483, + 387, 8650, 4556, 7071, 2833, 2163, 2519, 8518, 2921, + 1296, 6818, 2707, 3507, 8598, 8802, 112, 2742, 5974, + 6565, 5489, 7784, 469, 1046, 8118, 2916, 1384, 4596, + 8660, 1524, 2862, 8341, 9259, 3914, 7327, 3943, 1127, + 6398, 6612, 8113, 2461, 8714, 9729, 8226, 6354, 9494, + 9498, 6160, 330, 9056, 6769, 4637, 540, 5583, 6515, + 9235, 1832, 1756, 9622, 3128, 5815, 6161, 5166, 2180, + 2553, 9617, 5271, 540, 669, 8109, 2118, 7870, 9305, + 5197, 8512, 8704, 2565, 8570, 3358, 8597, 6817, 1442, + 7822, 9580, 2286, 877, 5934, 4989, 9155, 9087, 5891, + 4023, 8446, 2014, 2362, 990, 1376, 5099, 6917, 1513, + 4755, 9921, 9633, 586, 9793, 2424, 7385, 2711, 6971, + 8476, 3945, 2785, 4359, 7402, 6094, 3054, 5997, 6264, + 4973, 9403, 224, 8540, 1749, 1440, 9039, 4450, 6560, + 7985, 2950, 8212, 6558, 2305, 2992, 3067, 7181, 688, + 557, 3139, 1085, 2535, 8708, 4783, 1637, 2401, 5400, + 8152, 9595, 2332, 5036, 9866, 6137, 9544, 4606, 3463, + 5129, 4600, 2840, 4681, 5620, 3070, 9929, 3817, 6586, + 1810, 9677, 8838, 5997, 8061, 2182, 2092, 5426, 2012, + 4695, 4335, 5207, 18, 6447, 8196, 4896, 1724, 8190, + 6513, 7255, 7873, 632, 350, 9671, 2671, 6415, 9769, + 5192, 2244, 5805, 3331, 1110, 1188, 9979, 5220, 7760, + 7927, 860, 6526, 7297, 2539, 220, 4541, 1369, 5557, + 6832, 3456, 2993, 2123, 4095, 2625, 8888, 4611, 5854, + 494, 6448, 3694, 6940, 6717, 6857, 7774, 2832, 3690, + 2621]), + values=tensor([3.0392e-01, 3.2415e-01, 6.4353e-02, 7.3274e-01, + 3.2946e-01, 1.5879e-01, 9.2352e-02, 2.3222e-01, + 2.7476e-01, 5.5662e-01, 2.0841e-01, 4.3983e-01, + 4.6932e-01, 4.7844e-01, 6.0685e-01, 9.7693e-01, + 3.5238e-01, 4.6964e-01, 7.5140e-01, 3.1413e-01, + 6.6449e-02, 8.3856e-01, 5.5909e-01, 2.9668e-01, + 6.8550e-01, 8.4874e-01, 2.7284e-01, 6.4469e-01, + 5.5500e-01, 5.1334e-01, 1.1239e-01, 3.2908e-02, + 6.3958e-01, 9.5935e-01, 2.7353e-01, 6.6292e-01, + 3.1922e-01, 6.9750e-01, 5.5048e-01, 6.8061e-01, + 4.3532e-01, 7.7149e-01, 7.8764e-01, 6.0497e-01, + 9.6987e-02, 3.7830e-01, 8.7905e-01, 7.0427e-02, + 1.6845e-01, 8.8919e-01, 8.9750e-01, 1.9794e-01, + 5.5784e-01, 5.4874e-01, 9.3778e-02, 6.5393e-01, + 3.7119e-01, 2.3349e-01, 6.0309e-01, 4.8361e-01, + 5.1730e-01, 5.3303e-01, 8.8849e-01, 7.5067e-03, + 6.5848e-01, 7.7182e-01, 2.5538e-01, 9.6187e-01, + 3.6024e-01, 5.1765e-01, 2.1626e-02, 5.8628e-01, + 6.4821e-01, 2.7907e-01, 5.4479e-01, 9.4676e-01, + 2.6434e-01, 4.1497e-01, 1.2576e-01, 7.4574e-01, + 6.0185e-01, 6.4194e-01, 2.8693e-01, 3.0484e-01, + 6.4746e-01, 8.6023e-01, 7.7437e-01, 8.2817e-01, + 8.2911e-01, 6.5601e-01, 3.6870e-01, 3.0474e-01, + 9.7824e-01, 7.0873e-01, 7.5584e-01, 4.7182e-01, + 3.3010e-01, 1.0185e-02, 8.5565e-01, 6.5803e-01, + 2.1163e-01, 9.6445e-01, 3.5526e-01, 9.0210e-01, + 6.1257e-01, 3.5304e-01, 3.5164e-01, 8.1901e-01, + 9.3322e-01, 2.9058e-01, 5.5850e-01, 4.1175e-01, + 7.5611e-01, 5.2276e-01, 8.1503e-01, 9.5294e-01, + 5.9539e-01, 9.9769e-01, 2.3382e-01, 5.8700e-01, + 3.9790e-01, 1.0685e-01, 1.3325e-01, 1.5247e-02, + 4.9237e-01, 5.8495e-01, 6.7974e-01, 6.5205e-01, + 2.4978e-01, 1.5540e-01, 6.9466e-01, 9.8909e-01, + 6.7400e-01, 4.4045e-01, 8.0887e-01, 8.7366e-01, + 6.1470e-01, 6.6878e-01, 2.0722e-01, 5.6730e-01, + 9.6699e-01, 2.1420e-01, 5.0036e-01, 4.3882e-02, + 2.4509e-01, 9.6699e-01, 6.2712e-02, 8.0118e-01, + 7.0259e-01, 4.9349e-01, 2.3668e-01, 6.4690e-01, + 3.3297e-01, 8.1392e-01, 3.3370e-01, 3.6099e-01, + 6.9785e-01, 5.8653e-01, 2.3494e-01, 4.2606e-01, + 5.3776e-02, 2.9098e-01, 3.5190e-01, 8.5533e-01, + 3.9164e-01, 4.5423e-01, 3.2810e-02, 7.6592e-01, + 5.1452e-01, 5.8263e-01, 3.0590e-01, 4.6225e-01, + 8.9127e-01, 3.8718e-04, 3.6956e-01, 1.0716e-01, + 8.9555e-01, 5.1526e-01, 2.6090e-01, 5.4827e-01, + 7.6613e-01, 9.6451e-02, 1.6855e-01, 2.7123e-01, + 7.9078e-01, 7.0227e-01, 9.2769e-01, 3.2768e-01, + 7.4133e-02, 8.0175e-01, 8.9212e-01, 1.4596e-01, + 5.2250e-02, 7.1920e-01, 2.8915e-01, 7.1399e-01, + 3.9989e-01, 8.4612e-01, 7.6692e-01, 7.6603e-01, + 9.7955e-01, 8.5926e-01, 4.7268e-01, 3.0567e-01, + 3.5521e-01, 4.5346e-01, 7.0907e-01, 1.0000e-01, + 3.8556e-01, 7.4063e-01, 1.2875e-01, 5.0308e-01, + 6.8759e-01, 5.4430e-01, 9.2335e-01, 5.9479e-01, + 3.5215e-01, 2.5979e-01, 3.6195e-01, 1.1209e-01, + 6.7558e-01, 3.6084e-01, 2.9372e-01, 7.9418e-02, + 8.0128e-01, 5.5807e-01, 6.3595e-01, 3.1372e-01, + 2.3848e-01, 1.4012e-02, 2.1033e-01, 5.1052e-01, + 6.6708e-01, 3.8104e-01, 6.2857e-01, 2.5671e-01, + 6.7301e-01, 6.4080e-01, 7.3818e-01, 6.5250e-01, + 7.2748e-01, 2.8088e-01, 4.3795e-01, 7.6139e-01, + 3.2002e-01, 1.0962e-01, 1.1736e-01, 1.1390e-01, + 8.8693e-01, 5.6804e-01, 5.4451e-01, 4.7759e-01, + 9.7875e-02, 8.1348e-02, 1.2472e-01, 6.8343e-01, + 7.6072e-01, 4.3782e-01, 9.4758e-01, 9.8629e-01, + 1.3619e-01, 8.9717e-01, 8.0717e-01, 5.1829e-01, + 6.6901e-01, 7.6695e-01, 3.5278e-01, 7.6203e-02, + 8.1739e-01, 2.6432e-02, 7.8358e-02, 7.6105e-01, + 6.0698e-01, 9.7534e-01, 1.0290e-01, 6.2350e-03, + 3.6916e-02, 9.6921e-01, 9.2309e-01, 3.6705e-01, + 4.1131e-01, 7.1992e-01, 4.8131e-01, 3.8551e-02, + 8.7653e-01, 4.2984e-01, 7.4999e-01, 5.9486e-01, + 2.0777e-01, 2.4797e-02, 7.2719e-01, 8.6476e-01, + 9.2557e-02, 6.6099e-01, 8.8421e-01, 9.9344e-01, + 5.9213e-01, 8.8296e-01, 4.4506e-01, 6.1979e-01, + 2.4620e-01, 6.4475e-01, 9.4222e-01, 4.3135e-01, + 6.9601e-01, 7.7456e-01, 9.3620e-01, 4.9096e-01, + 7.2207e-01, 6.4022e-01, 5.2574e-01, 8.2484e-01, + 5.7041e-01, 6.9043e-01, 2.4631e-02, 9.5777e-02, + 5.9238e-01, 3.0126e-01, 9.4882e-01, 3.7736e-01, + 4.4950e-01, 2.8003e-01, 1.1028e-01, 4.2071e-01, + 9.9009e-01, 5.0994e-01, 4.9474e-01, 7.2898e-01, + 4.3563e-01, 2.0331e-01, 6.0930e-01, 3.4882e-01, + 2.9900e-01, 6.1199e-01, 2.0308e-01, 1.3459e-01, + 5.6701e-01, 4.8437e-01, 6.0606e-01, 4.1922e-01, + 4.5665e-01, 4.1795e-02, 2.1442e-01, 8.5784e-03, + 1.2383e-01, 6.8451e-01, 8.2903e-01, 9.3818e-01, + 8.6183e-01, 9.2220e-01, 1.2146e-02, 9.4702e-01, + 7.2689e-01, 7.0124e-01, 5.2058e-01, 7.6183e-01, + 2.7320e-01, 6.4457e-01, 1.3569e-01, 3.2953e-01, + 1.9373e-01, 1.1614e-01, 6.8419e-01, 1.1889e-01, + 1.5054e-01, 6.8449e-01, 2.2163e-02, 3.3239e-01, + 5.3542e-01, 4.6539e-01, 5.7549e-01, 6.0063e-01, + 7.3725e-01, 7.7272e-01, 7.1549e-01, 8.3333e-02, + 2.5724e-01, 4.8954e-01, 5.4990e-01, 5.5515e-01, + 6.6187e-01, 1.4302e-01, 2.6241e-01, 1.3082e-01, + 1.0201e-01, 4.7238e-01, 8.0345e-01, 3.5296e-01, + 4.3307e-02, 5.1890e-01, 8.2623e-01, 7.8766e-01, + 9.6443e-01, 5.7328e-01, 9.6623e-01, 5.1756e-01, + 8.4229e-01, 8.6955e-01, 5.2500e-01, 2.4364e-01, + 4.6531e-04, 7.7566e-01, 2.3278e-01, 5.0290e-01, + 1.2741e-01, 7.4793e-01, 6.6397e-01, 8.4389e-01, + 7.1603e-01, 1.3434e-01, 9.1897e-01, 1.6605e-01, + 3.1924e-01, 8.7903e-01, 1.4216e-02, 1.4696e-01, + 7.2418e-01, 1.2448e-01, 1.1574e-01, 8.0022e-01, + 3.2231e-01, 5.5328e-01, 2.4152e-01, 4.0399e-01, + 1.2053e-01, 1.5238e-01, 5.0061e-01, 8.8357e-01, + 2.6656e-01, 3.4203e-01, 5.0313e-01, 9.2105e-01, + 5.4412e-01, 7.6757e-01, 1.4392e-01, 9.2549e-01, + 4.4630e-02, 1.0189e-01, 2.1381e-01, 6.3179e-01, + 1.4210e-01, 6.4822e-01, 4.4733e-02, 7.0778e-02, + 5.3670e-01, 6.7468e-01, 9.6249e-02, 1.4701e-01, + 6.7904e-01, 6.2977e-01, 4.8222e-01, 5.6410e-01, + 6.6069e-01, 8.2291e-01, 2.7086e-01, 1.3385e-02, + 3.8370e-02, 7.6000e-01, 1.2836e-01, 4.2271e-01, + 3.7971e-01, 4.0221e-01, 1.9058e-01, 1.3246e-02, + 9.7472e-01, 8.1468e-01, 9.5465e-01, 5.0494e-01, + 4.8024e-01, 8.6375e-01, 2.1211e-01, 7.4747e-01, + 8.8496e-01, 2.3040e-01, 2.1539e-01, 6.6296e-01, + 4.6006e-01, 7.6222e-01, 6.9519e-01, 2.5685e-01, + 1.2762e-01, 4.8623e-01, 8.5541e-01, 1.9816e-01, + 6.4360e-01, 5.6243e-01, 6.0436e-01, 9.6360e-01, + 7.3027e-01, 8.0053e-01, 9.3960e-02, 8.9196e-01, + 6.5344e-01, 5.7618e-01, 3.8071e-02, 9.8561e-01, + 3.9902e-01, 2.0152e-02, 8.4945e-01, 1.0773e-01, + 5.1144e-01, 7.1844e-01, 8.5285e-02, 4.5100e-01, + 1.5098e-01, 2.6810e-01, 5.1885e-02, 6.6289e-01, + 5.9605e-01, 5.1952e-01, 3.2494e-01, 4.2823e-01, + 8.5842e-01, 6.0189e-01, 2.0347e-01, 9.8130e-01, + 1.8163e-01, 2.5564e-02, 1.8724e-02, 5.1201e-01, + 4.5720e-01, 7.9371e-01, 4.8374e-01, 4.3205e-01, + 7.7302e-01, 7.5530e-01, 9.7319e-01, 2.8166e-01, + 6.8553e-01, 9.0165e-01, 1.3726e-01, 6.1107e-01, + 9.6470e-01, 9.3457e-01, 1.6750e-01, 5.7026e-02, + 9.7853e-01, 4.8808e-01, 5.2986e-01, 4.4763e-01, + 2.1220e-01, 1.8968e-01, 6.6682e-01, 2.7978e-01, + 6.2518e-02, 1.7188e-01, 6.6203e-01, 9.5117e-01, + 2.6765e-01, 7.5161e-01, 4.2205e-01, 8.3213e-01, + 3.4401e-02, 5.5833e-01, 2.2728e-01, 8.6978e-01, + 5.0487e-01, 6.8315e-01, 8.8845e-01, 3.7450e-01, + 4.7156e-01, 2.0297e-01, 5.9919e-01, 8.7437e-01, + 1.8376e-01, 3.8162e-01, 3.8759e-01, 2.8332e-01, + 7.3703e-01, 8.2598e-01, 5.7822e-01, 8.1318e-01, + 6.0403e-01, 3.0036e-01, 8.6647e-01, 6.3278e-01, + 3.3144e-02, 8.3655e-02, 2.0403e-01, 6.3295e-01, + 5.5983e-01, 1.7687e-01, 7.9099e-01, 6.5995e-01, + 1.6364e-01, 8.0576e-01, 5.9218e-01, 3.7825e-01, + 7.3625e-01, 6.8133e-01, 1.5251e-01, 3.4541e-01, + 4.3200e-01, 4.7561e-01, 6.5115e-01, 4.9609e-01, + 4.9830e-01, 1.7116e-01, 8.1782e-01, 5.6982e-02, + 7.8582e-01, 4.1358e-01, 6.8337e-01, 8.8174e-01, + 2.7893e-02, 6.1252e-01, 6.8428e-01, 4.7886e-04, + 4.5579e-01, 2.6597e-01, 8.9291e-01, 4.7913e-01, + 5.1187e-02, 9.1252e-01, 5.2623e-01, 9.2323e-01, + 2.5712e-02, 7.4165e-01, 8.7147e-01, 1.1067e-01, + 3.3337e-01, 6.6053e-01, 8.1395e-02, 3.0634e-01, + 5.9633e-01, 6.1441e-01, 5.2337e-01, 9.4467e-01, + 1.6455e-01, 6.0027e-01, 9.9735e-01, 5.4964e-01, + 4.2544e-01, 5.2938e-01, 2.7487e-01, 5.2740e-01, + 7.2458e-01, 2.1872e-01, 2.3165e-02, 8.5565e-01, + 3.4327e-02, 8.8372e-01, 3.6038e-01, 5.2099e-01, + 7.8544e-01, 8.5584e-01, 2.0450e-01, 7.0439e-01, + 1.8946e-01, 5.2352e-01, 1.3840e-01, 7.9757e-01, + 6.4771e-01, 4.2226e-01, 3.3701e-01, 5.7625e-01, + 7.5119e-01, 6.0090e-01, 5.5169e-01, 2.6335e-01, + 2.5175e-01, 1.3511e-01, 7.5301e-01, 3.6857e-01, + 2.4815e-01, 9.7900e-01, 5.3734e-01, 2.5181e-01, + 9.6212e-01, 3.0052e-01, 2.3817e-01, 5.4727e-01, + 7.4985e-01, 4.0814e-01, 2.3324e-01, 9.0258e-01, + 9.2453e-01, 5.1688e-01, 6.8008e-01, 8.4417e-01, + 2.9048e-01, 3.5275e-01, 9.1446e-01, 3.1166e-01, + 9.3812e-02, 3.2061e-01, 3.8420e-01, 8.3599e-01, + 2.2761e-01, 7.4261e-01, 2.4781e-01, 8.3249e-01, + 6.2992e-01, 3.7271e-01, 6.4205e-01, 8.0600e-01, + 5.2952e-01, 8.6497e-03, 2.0012e-01, 6.7808e-01, + 7.6509e-01, 3.6618e-01, 3.6418e-01, 7.0343e-01, + 6.0658e-01, 1.8231e-01, 4.0747e-02, 7.2457e-01, + 7.5662e-01, 3.1029e-02, 1.9408e-01, 5.8483e-03, + 2.5497e-01, 2.7861e-01, 6.7215e-01, 9.8377e-01, + 3.9461e-01, 5.7729e-01, 7.4282e-01, 7.8487e-01, + 6.6966e-01, 8.0111e-01, 2.7436e-01, 9.1071e-01, + 7.5479e-01, 9.6961e-02, 5.9253e-01, 4.3539e-01, + 8.1993e-02, 1.4436e-01, 5.4192e-02, 4.5414e-01, + 2.2083e-01, 6.8883e-01, 3.0813e-03, 5.7122e-01, + 6.4824e-01, 6.1088e-01, 9.9103e-01, 3.0128e-01, + 2.5519e-01, 5.5098e-01, 7.7501e-01, 8.2747e-01, + 2.7382e-01, 1.6131e-01, 3.8473e-01, 1.8858e-01, + 7.6480e-01, 5.7925e-01, 3.7285e-01, 4.1565e-01, + 6.6174e-01, 7.9534e-01, 3.9512e-01, 3.1463e-02, + 2.2917e-01, 3.0596e-01, 2.7861e-03, 2.0807e-01, + 3.7628e-01, 6.8980e-01, 2.7128e-02, 6.5713e-01, + 4.9102e-01, 7.0889e-01, 4.5564e-02, 9.8297e-01, + 6.8338e-01, 2.7678e-03, 5.2548e-01, 9.3723e-01, + 6.9210e-01, 8.4505e-01, 5.3302e-01, 5.5013e-01, + 2.2889e-02, 8.7608e-02, 1.4028e-01, 4.6686e-01, + 6.4919e-01, 9.9436e-01, 7.3888e-01, 4.1538e-01, + 9.4718e-01, 9.8412e-01, 4.2306e-01, 1.3232e-01, + 6.8812e-01, 2.0943e-01, 7.5932e-01, 1.2380e-01, + 6.2774e-01, 4.9894e-01, 3.0889e-01, 1.1004e-01, + 7.4553e-01, 8.5544e-01, 2.0482e-01, 7.2767e-01, + 2.0936e-02, 5.0479e-01, 5.7524e-01, 5.4774e-01, + 8.4315e-01, 5.2362e-01, 2.8163e-01, 5.1186e-01, + 8.0800e-01, 7.9337e-01, 3.6713e-01, 6.2638e-01, + 6.5095e-01, 4.0210e-01, 6.3034e-01, 5.3272e-01, + 3.4358e-01, 5.9924e-01, 1.9674e-01, 6.4351e-01, + 8.5855e-02, 2.3977e-01, 1.2607e-01, 7.8134e-01, + 9.6526e-02, 6.0975e-01, 4.6583e-01, 1.7783e-01, + 7.3707e-02, 8.4537e-01, 7.9093e-01, 4.1040e-02, + 2.5473e-01, 3.9781e-01, 1.3302e-01, 4.8382e-01, + 6.0215e-02, 7.9066e-01, 9.9902e-01, 9.0029e-01, + 5.6279e-01, 9.9788e-01, 7.8531e-01, 2.8619e-01, + 6.3489e-01, 1.5229e-01, 6.9891e-01, 7.3891e-01, + 9.9196e-01, 1.2942e-02, 1.0882e-01, 1.6395e-01, + 9.7799e-01, 9.8840e-01, 9.3287e-01, 9.7669e-01, + 9.3534e-01, 3.8769e-01, 5.8205e-01, 7.1933e-01, + 8.1893e-01, 2.3647e-02, 7.8599e-01, 3.5138e-01, + 8.8097e-01, 5.9339e-01, 8.1908e-01, 2.1259e-01, + 1.7916e-01, 8.3775e-01, 4.2342e-02, 1.7547e-01, + 4.9728e-01, 7.5815e-01, 6.1732e-01, 7.7818e-01, + 3.0544e-01, 7.8172e-01, 1.0312e-01, 9.2880e-01, + 4.3926e-01, 8.6426e-02, 2.2251e-01, 4.2217e-01, + 8.8595e-01, 8.2722e-02, 7.3624e-01, 5.9382e-02, + 4.9932e-01, 1.4760e-01, 4.7458e-02, 6.0080e-01, + 1.4431e-01, 2.9584e-01, 7.1655e-01, 8.9494e-01, + 7.0292e-01, 6.4015e-01, 3.6954e-01, 4.8047e-01, + 2.1881e-01, 1.9305e-01, 5.3317e-01, 1.5076e-01, + 4.3873e-01, 9.4135e-01, 1.0942e-01, 8.7236e-01, + 5.3086e-01, 1.7910e-01, 2.4322e-01, 2.2052e-02, + 4.1013e-01, 1.4191e-01, 3.4491e-01, 5.5417e-01, + 3.3288e-01, 3.1302e-01, 2.3566e-01, 1.8459e-01, + 3.6159e-01, 4.8536e-01, 9.0908e-01, 6.3893e-02, + 7.7289e-02, 8.0569e-01, 8.2725e-01, 8.5490e-01, + 4.1791e-02, 4.0255e-01, 2.0672e-01, 9.7864e-01, + 3.6149e-01, 8.5159e-01, 9.6188e-01, 4.1123e-01, + 2.3421e-01, 6.1193e-01, 2.1097e-01, 2.7882e-01, + 5.5135e-01, 8.0230e-01, 3.1907e-01, 1.8198e-01, + 3.6314e-02, 6.2212e-01, 5.1175e-01, 6.7357e-01, + 3.7007e-02, 1.2024e-01, 9.1569e-01, 8.0422e-01, + 4.8728e-01, 3.3584e-01, 5.0960e-01, 3.6273e-01, + 6.1914e-01, 3.8996e-01, 5.9564e-01, 4.2090e-01, + 8.0087e-01, 6.1589e-01, 5.8173e-01, 6.2723e-01, + 6.9793e-01, 1.5997e-01, 1.5756e-01, 9.2695e-02, + 7.7600e-01, 5.7205e-01, 5.7808e-01, 5.9198e-02, + 8.2480e-01, 2.8009e-01, 8.4499e-02, 3.5912e-01, + 7.7211e-01, 1.0716e-01, 7.1486e-01, 5.0595e-01, + 6.5373e-02, 7.7498e-01, 9.1086e-03, 4.0288e-01, + 6.5420e-01, 2.4228e-01, 8.2790e-01, 1.6179e-02, + 4.5517e-01, 2.3154e-01, 4.0528e-01, 9.9680e-01, + 1.9648e-01, 8.9156e-01, 4.9073e-01, 6.1991e-01, + 5.2047e-01, 2.0881e-01, 9.4192e-01, 4.9260e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4825, 0.6787, 0.3927, ..., 0.9418, 0.0965, 0.0788]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.350545883178711 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1000, 1000, 1000]), + col_indices=tensor([5455, 5592, 4109, 222, 9693, 3577, 9334, 9406, 2137, + 1799, 7682, 5442, 9556, 3008, 6623, 6748, 1968, 1806, + 8890, 8613, 4077, 4885, 7962, 302, 1013, 8955, 2558, + 3711, 9681, 9593, 3121, 9869, 3177, 5155, 4286, 6364, + 2440, 3983, 2281, 3730, 740, 3324, 1186, 4197, 4183, + 3787, 1237, 618, 3728, 4305, 7249, 7969, 8150, 5074, + 336, 432, 1883, 5788, 1998, 9882, 8337, 8410, 6941, + 4025, 3080, 9432, 7607, 2625, 8188, 4417, 71, 2070, + 1396, 3989, 9431, 872, 4755, 1177, 5104, 5, 1671, + 5203, 8385, 3215, 9836, 5654, 6123, 9005, 9573, 3860, + 3248, 2894, 9896, 9991, 4949, 9, 1784, 5266, 2022, + 6095, 1517, 7556, 4941, 521, 2910, 5575, 7555, 1842, + 2590, 9377, 4113, 4907, 8259, 6209, 7070, 152, 6147, + 6757, 41, 774, 7469, 8152, 8924, 5911, 4839, 3386, + 4880, 5093, 7504, 578, 6341, 2413, 9223, 2943, 8245, + 3762, 1885, 3988, 7760, 6659, 2327, 8319, 4889, 2086, + 6582, 2999, 7225, 5620, 5397, 6166, 1192, 9054, 1944, + 8586, 7428, 4653, 1578, 8066, 4063, 6909, 8387, 9373, + 1779, 2964, 5753, 4702, 6011, 649, 4878, 6031, 3620, + 4368, 8038, 9222, 6809, 4045, 7946, 9720, 2447, 8709, + 5211, 3070, 483, 7264, 6025, 5336, 6583, 193, 3061, + 7753, 6937, 7842, 2405, 5951, 5344, 4719, 6714, 6154, + 7382, 3202, 1630, 3013, 8617, 6912, 3313, 4097, 2564, + 7916, 1190, 6815, 3429, 4803, 8583, 6446, 9482, 7512, + 5802, 3394, 6637, 5002, 8800, 9545, 1806, 7825, 5873, + 6547, 42, 3341, 4336, 2945, 8309, 6317, 8694, 7310, + 861, 6529, 9657, 2013, 7583, 4059, 8819, 8573, 3062, + 4530, 3219, 6965, 7043, 4000, 1751, 3453, 7507, 9158, + 8456, 5641, 2323, 894, 6849, 2354, 8414, 9263, 340, + 7205, 5325, 7515, 2661, 4262, 8481, 6503, 8559, 631, + 4284, 892, 3568, 2478, 5313, 8074, 1189, 3638, 1570, + 2900, 1226, 7729, 8931, 3902, 2531, 9721, 4451, 7093, + 6369, 3584, 8858, 4594, 9711, 2895, 8279, 6635, 5468, + 1625, 2878, 183, 9364, 8049, 8688, 3754, 9500, 5156, + 8141, 1233, 6270, 9152, 9609, 8833, 9689, 6804, 6879, + 8482, 5351, 2008, 5339, 174, 2631, 2752, 9101, 4250, + 1959, 1601, 8705, 8321, 4156, 2212, 1758, 6719, 390, + 8271, 8873, 6936, 6913, 6183, 6416, 6740, 9148, 7993, + 6077, 7025, 1434, 5450, 4696, 8201, 4111, 5108, 3263, + 4376, 992, 3001, 3573, 2771, 1330, 3404, 1985, 1213, + 7105, 2969, 1853, 4254, 2574, 8578, 3726, 9824, 8756, + 9790, 4725, 6571, 1525, 4226, 397, 3148, 8143, 1081, + 9319, 954, 8043, 2951, 6714, 1546, 7498, 5572, 6847, + 373, 6338, 6704, 6136, 6912, 8385, 6773, 2897, 1857, + 7837, 5293, 883, 3465, 2333, 2706, 2793, 8339, 9030, + 170, 6025, 238, 5536, 326, 7382, 5214, 9691, 1226, + 6312, 9099, 7743, 9256, 7826, 3961, 7289, 2810, 2889, + 5530, 8793, 1729, 9330, 6285, 259, 2418, 2723, 7786, + 9499, 1444, 3444, 9403, 4786, 2961, 3253, 210, 3033, + 4287, 1941, 2567, 5596, 8952, 1045, 8409, 1209, 2289, + 4009, 2954, 8195, 2521, 9908, 6497, 2176, 8245, 1878, + 2833, 2015, 1191, 8989, 9949, 1659, 7998, 6449, 3287, + 4617, 3730, 4250, 2845, 7685, 2587, 570, 2759, 5601, + 1122, 4194, 6085, 3538, 2068, 3553, 7147, 4353, 8619, + 741, 5644, 3102, 8593, 5741, 717, 5287, 9540, 3750, + 9476, 1080, 2354, 3239, 9391, 2983, 3615, 7545, 9316, + 3217, 546, 2878, 3385, 4369, 7750, 7048, 7638, 5003, + 1954, 2932, 9977, 2825, 6294, 7412, 4947, 5551, 4606, + 6527, 8202, 9874, 3630, 9332, 3043, 9614, 4366, 4415, + 5789, 5831, 6056, 8765, 7112, 3900, 5169, 8287, 5622, + 6492, 3446, 6222, 3934, 6761, 6496, 7921, 3767, 4657, + 6468, 9740, 3922, 1388, 791, 996, 5882, 8931, 4041, + 1804, 3408, 873, 8854, 8857, 5197, 3222, 7923, 1837, + 6637, 5267, 8748, 7386, 9946, 9090, 7284, 9796, 1916, + 1676, 6417, 478, 4042, 9158, 4562, 6428, 4901, 9841, + 4156, 3811, 6129, 8100, 2410, 9667, 1059, 6471, 9086, + 7783, 436, 8399, 2026, 7724, 9135, 6834, 2224, 8238, + 4091, 7721, 9973, 6278, 3251, 4004, 2804, 6760, 1348, + 3105, 8920, 3080, 8489, 6412, 1945, 5275, 9361, 5992, + 4252, 5985, 14, 8664, 6186, 8914, 4367, 8021, 3430, + 7586, 569, 9236, 221, 6330, 387, 7069, 4231, 8633, + 1760, 5743, 1734, 3474, 4921, 5437, 5342, 1188, 3716, + 7244, 3921, 3117, 1899, 6123, 1541, 2426, 3487, 1543, + 8966, 4322, 9621, 5499, 9634, 197, 2016, 1179, 1742, + 603, 4446, 2127, 5502, 5419, 1800, 8843, 3410, 9280, + 6183, 5336, 3557, 1301, 2924, 2685, 4789, 2681, 8075, + 9074, 779, 9722, 2242, 1422, 9799, 190, 8813, 4955, + 9269, 8432, 360, 3105, 1117, 2106, 6923, 340, 3913, + 6338, 2743, 3141, 5508, 8419, 4462, 3804, 4854, 8563, + 27, 56, 4099, 9429, 7128, 3170, 1423, 1480, 4108, + 9263, 248, 6315, 5394, 2111, 8815, 9468, 1653, 6525, + 7824, 7918, 7021, 7163, 2918, 4892, 3181, 2452, 2941, + 8090, 124, 6939, 1996, 9469, 8469, 3564, 8562, 416, + 7462, 4270, 2521, 6664, 1132, 2808, 1882, 6008, 4243, + 8264, 72, 188, 8612, 5177, 8716, 575, 8437, 8572, + 272, 3041, 762, 93, 8887, 453, 2740, 5642, 1483, + 387, 8650, 4556, 7071, 2833, 2163, 2519, 8518, 2921, + 1296, 6818, 2707, 3507, 8598, 8802, 112, 2742, 5974, + 6565, 5489, 7784, 469, 1046, 8118, 2916, 1384, 4596, + 8660, 1524, 2862, 8341, 9259, 3914, 7327, 3943, 1127, + 6398, 6612, 8113, 2461, 8714, 9729, 8226, 6354, 9494, + 9498, 6160, 330, 9056, 6769, 4637, 540, 5583, 6515, + 9235, 1832, 1756, 9622, 3128, 5815, 6161, 5166, 2180, + 2553, 9617, 5271, 540, 669, 8109, 2118, 7870, 9305, + 5197, 8512, 8704, 2565, 8570, 3358, 8597, 6817, 1442, + 7822, 9580, 2286, 877, 5934, 4989, 9155, 9087, 5891, + 4023, 8446, 2014, 2362, 990, 1376, 5099, 6917, 1513, + 4755, 9921, 9633, 586, 9793, 2424, 7385, 2711, 6971, + 8476, 3945, 2785, 4359, 7402, 6094, 3054, 5997, 6264, + 4973, 9403, 224, 8540, 1749, 1440, 9039, 4450, 6560, + 7985, 2950, 8212, 6558, 2305, 2992, 3067, 7181, 688, + 557, 3139, 1085, 2535, 8708, 4783, 1637, 2401, 5400, + 8152, 9595, 2332, 5036, 9866, 6137, 9544, 4606, 3463, + 5129, 4600, 2840, 4681, 5620, 3070, 9929, 3817, 6586, + 1810, 9677, 8838, 5997, 8061, 2182, 2092, 5426, 2012, + 4695, 4335, 5207, 18, 6447, 8196, 4896, 1724, 8190, + 6513, 7255, 7873, 632, 350, 9671, 2671, 6415, 9769, + 5192, 2244, 5805, 3331, 1110, 1188, 9979, 5220, 7760, + 7927, 860, 6526, 7297, 2539, 220, 4541, 1369, 5557, + 6832, 3456, 2993, 2123, 4095, 2625, 8888, 4611, 5854, + 494, 6448, 3694, 6940, 6717, 6857, 7774, 2832, 3690, + 2621]), + values=tensor([3.0392e-01, 3.2415e-01, 6.4353e-02, 7.3274e-01, + 3.2946e-01, 1.5879e-01, 9.2352e-02, 2.3222e-01, + 2.7476e-01, 5.5662e-01, 2.0841e-01, 4.3983e-01, + 4.6932e-01, 4.7844e-01, 6.0685e-01, 9.7693e-01, + 3.5238e-01, 4.6964e-01, 7.5140e-01, 3.1413e-01, + 6.6449e-02, 8.3856e-01, 5.5909e-01, 2.9668e-01, + 6.8550e-01, 8.4874e-01, 2.7284e-01, 6.4469e-01, + 5.5500e-01, 5.1334e-01, 1.1239e-01, 3.2908e-02, + 6.3958e-01, 9.5935e-01, 2.7353e-01, 6.6292e-01, + 3.1922e-01, 6.9750e-01, 5.5048e-01, 6.8061e-01, + 4.3532e-01, 7.7149e-01, 7.8764e-01, 6.0497e-01, + 9.6987e-02, 3.7830e-01, 8.7905e-01, 7.0427e-02, + 1.6845e-01, 8.8919e-01, 8.9750e-01, 1.9794e-01, + 5.5784e-01, 5.4874e-01, 9.3778e-02, 6.5393e-01, + 3.7119e-01, 2.3349e-01, 6.0309e-01, 4.8361e-01, + 5.1730e-01, 5.3303e-01, 8.8849e-01, 7.5067e-03, + 6.5848e-01, 7.7182e-01, 2.5538e-01, 9.6187e-01, + 3.6024e-01, 5.1765e-01, 2.1626e-02, 5.8628e-01, + 6.4821e-01, 2.7907e-01, 5.4479e-01, 9.4676e-01, + 2.6434e-01, 4.1497e-01, 1.2576e-01, 7.4574e-01, + 6.0185e-01, 6.4194e-01, 2.8693e-01, 3.0484e-01, + 6.4746e-01, 8.6023e-01, 7.7437e-01, 8.2817e-01, + 8.2911e-01, 6.5601e-01, 3.6870e-01, 3.0474e-01, + 9.7824e-01, 7.0873e-01, 7.5584e-01, 4.7182e-01, + 3.3010e-01, 1.0185e-02, 8.5565e-01, 6.5803e-01, + 2.1163e-01, 9.6445e-01, 3.5526e-01, 9.0210e-01, + 6.1257e-01, 3.5304e-01, 3.5164e-01, 8.1901e-01, + 9.3322e-01, 2.9058e-01, 5.5850e-01, 4.1175e-01, + 7.5611e-01, 5.2276e-01, 8.1503e-01, 9.5294e-01, + 5.9539e-01, 9.9769e-01, 2.3382e-01, 5.8700e-01, + 3.9790e-01, 1.0685e-01, 1.3325e-01, 1.5247e-02, + 4.9237e-01, 5.8495e-01, 6.7974e-01, 6.5205e-01, + 2.4978e-01, 1.5540e-01, 6.9466e-01, 9.8909e-01, + 6.7400e-01, 4.4045e-01, 8.0887e-01, 8.7366e-01, + 6.1470e-01, 6.6878e-01, 2.0722e-01, 5.6730e-01, + 9.6699e-01, 2.1420e-01, 5.0036e-01, 4.3882e-02, + 2.4509e-01, 9.6699e-01, 6.2712e-02, 8.0118e-01, + 7.0259e-01, 4.9349e-01, 2.3668e-01, 6.4690e-01, + 3.3297e-01, 8.1392e-01, 3.3370e-01, 3.6099e-01, + 6.9785e-01, 5.8653e-01, 2.3494e-01, 4.2606e-01, + 5.3776e-02, 2.9098e-01, 3.5190e-01, 8.5533e-01, + 3.9164e-01, 4.5423e-01, 3.2810e-02, 7.6592e-01, + 5.1452e-01, 5.8263e-01, 3.0590e-01, 4.6225e-01, + 8.9127e-01, 3.8718e-04, 3.6956e-01, 1.0716e-01, + 8.9555e-01, 5.1526e-01, 2.6090e-01, 5.4827e-01, + 7.6613e-01, 9.6451e-02, 1.6855e-01, 2.7123e-01, + 7.9078e-01, 7.0227e-01, 9.2769e-01, 3.2768e-01, + 7.4133e-02, 8.0175e-01, 8.9212e-01, 1.4596e-01, + 5.2250e-02, 7.1920e-01, 2.8915e-01, 7.1399e-01, + 3.9989e-01, 8.4612e-01, 7.6692e-01, 7.6603e-01, + 9.7955e-01, 8.5926e-01, 4.7268e-01, 3.0567e-01, + 3.5521e-01, 4.5346e-01, 7.0907e-01, 1.0000e-01, + 3.8556e-01, 7.4063e-01, 1.2875e-01, 5.0308e-01, + 6.8759e-01, 5.4430e-01, 9.2335e-01, 5.9479e-01, + 3.5215e-01, 2.5979e-01, 3.6195e-01, 1.1209e-01, + 6.7558e-01, 3.6084e-01, 2.9372e-01, 7.9418e-02, + 8.0128e-01, 5.5807e-01, 6.3595e-01, 3.1372e-01, + 2.3848e-01, 1.4012e-02, 2.1033e-01, 5.1052e-01, + 6.6708e-01, 3.8104e-01, 6.2857e-01, 2.5671e-01, + 6.7301e-01, 6.4080e-01, 7.3818e-01, 6.5250e-01, + 7.2748e-01, 2.8088e-01, 4.3795e-01, 7.6139e-01, + 3.2002e-01, 1.0962e-01, 1.1736e-01, 1.1390e-01, + 8.8693e-01, 5.6804e-01, 5.4451e-01, 4.7759e-01, + 9.7875e-02, 8.1348e-02, 1.2472e-01, 6.8343e-01, + 7.6072e-01, 4.3782e-01, 9.4758e-01, 9.8629e-01, + 1.3619e-01, 8.9717e-01, 8.0717e-01, 5.1829e-01, + 6.6901e-01, 7.6695e-01, 3.5278e-01, 7.6203e-02, + 8.1739e-01, 2.6432e-02, 7.8358e-02, 7.6105e-01, + 6.0698e-01, 9.7534e-01, 1.0290e-01, 6.2350e-03, + 3.6916e-02, 9.6921e-01, 9.2309e-01, 3.6705e-01, + 4.1131e-01, 7.1992e-01, 4.8131e-01, 3.8551e-02, + 8.7653e-01, 4.2984e-01, 7.4999e-01, 5.9486e-01, + 2.0777e-01, 2.4797e-02, 7.2719e-01, 8.6476e-01, + 9.2557e-02, 6.6099e-01, 8.8421e-01, 9.9344e-01, + 5.9213e-01, 8.8296e-01, 4.4506e-01, 6.1979e-01, + 2.4620e-01, 6.4475e-01, 9.4222e-01, 4.3135e-01, + 6.9601e-01, 7.7456e-01, 9.3620e-01, 4.9096e-01, + 7.2207e-01, 6.4022e-01, 5.2574e-01, 8.2484e-01, + 5.7041e-01, 6.9043e-01, 2.4631e-02, 9.5777e-02, + 5.9238e-01, 3.0126e-01, 9.4882e-01, 3.7736e-01, + 4.4950e-01, 2.8003e-01, 1.1028e-01, 4.2071e-01, + 9.9009e-01, 5.0994e-01, 4.9474e-01, 7.2898e-01, + 4.3563e-01, 2.0331e-01, 6.0930e-01, 3.4882e-01, + 2.9900e-01, 6.1199e-01, 2.0308e-01, 1.3459e-01, + 5.6701e-01, 4.8437e-01, 6.0606e-01, 4.1922e-01, + 4.5665e-01, 4.1795e-02, 2.1442e-01, 8.5784e-03, + 1.2383e-01, 6.8451e-01, 8.2903e-01, 9.3818e-01, + 8.6183e-01, 9.2220e-01, 1.2146e-02, 9.4702e-01, + 7.2689e-01, 7.0124e-01, 5.2058e-01, 7.6183e-01, + 2.7320e-01, 6.4457e-01, 1.3569e-01, 3.2953e-01, + 1.9373e-01, 1.1614e-01, 6.8419e-01, 1.1889e-01, + 1.5054e-01, 6.8449e-01, 2.2163e-02, 3.3239e-01, + 5.3542e-01, 4.6539e-01, 5.7549e-01, 6.0063e-01, + 7.3725e-01, 7.7272e-01, 7.1549e-01, 8.3333e-02, + 2.5724e-01, 4.8954e-01, 5.4990e-01, 5.5515e-01, + 6.6187e-01, 1.4302e-01, 2.6241e-01, 1.3082e-01, + 1.0201e-01, 4.7238e-01, 8.0345e-01, 3.5296e-01, + 4.3307e-02, 5.1890e-01, 8.2623e-01, 7.8766e-01, + 9.6443e-01, 5.7328e-01, 9.6623e-01, 5.1756e-01, + 8.4229e-01, 8.6955e-01, 5.2500e-01, 2.4364e-01, + 4.6531e-04, 7.7566e-01, 2.3278e-01, 5.0290e-01, + 1.2741e-01, 7.4793e-01, 6.6397e-01, 8.4389e-01, + 7.1603e-01, 1.3434e-01, 9.1897e-01, 1.6605e-01, + 3.1924e-01, 8.7903e-01, 1.4216e-02, 1.4696e-01, + 7.2418e-01, 1.2448e-01, 1.1574e-01, 8.0022e-01, + 3.2231e-01, 5.5328e-01, 2.4152e-01, 4.0399e-01, + 1.2053e-01, 1.5238e-01, 5.0061e-01, 8.8357e-01, + 2.6656e-01, 3.4203e-01, 5.0313e-01, 9.2105e-01, + 5.4412e-01, 7.6757e-01, 1.4392e-01, 9.2549e-01, + 4.4630e-02, 1.0189e-01, 2.1381e-01, 6.3179e-01, + 1.4210e-01, 6.4822e-01, 4.4733e-02, 7.0778e-02, + 5.3670e-01, 6.7468e-01, 9.6249e-02, 1.4701e-01, + 6.7904e-01, 6.2977e-01, 4.8222e-01, 5.6410e-01, + 6.6069e-01, 8.2291e-01, 2.7086e-01, 1.3385e-02, + 3.8370e-02, 7.6000e-01, 1.2836e-01, 4.2271e-01, + 3.7971e-01, 4.0221e-01, 1.9058e-01, 1.3246e-02, + 9.7472e-01, 8.1468e-01, 9.5465e-01, 5.0494e-01, + 4.8024e-01, 8.6375e-01, 2.1211e-01, 7.4747e-01, + 8.8496e-01, 2.3040e-01, 2.1539e-01, 6.6296e-01, + 4.6006e-01, 7.6222e-01, 6.9519e-01, 2.5685e-01, + 1.2762e-01, 4.8623e-01, 8.5541e-01, 1.9816e-01, + 6.4360e-01, 5.6243e-01, 6.0436e-01, 9.6360e-01, + 7.3027e-01, 8.0053e-01, 9.3960e-02, 8.9196e-01, + 6.5344e-01, 5.7618e-01, 3.8071e-02, 9.8561e-01, + 3.9902e-01, 2.0152e-02, 8.4945e-01, 1.0773e-01, + 5.1144e-01, 7.1844e-01, 8.5285e-02, 4.5100e-01, + 1.5098e-01, 2.6810e-01, 5.1885e-02, 6.6289e-01, + 5.9605e-01, 5.1952e-01, 3.2494e-01, 4.2823e-01, + 8.5842e-01, 6.0189e-01, 2.0347e-01, 9.8130e-01, + 1.8163e-01, 2.5564e-02, 1.8724e-02, 5.1201e-01, + 4.5720e-01, 7.9371e-01, 4.8374e-01, 4.3205e-01, + 7.7302e-01, 7.5530e-01, 9.7319e-01, 2.8166e-01, + 6.8553e-01, 9.0165e-01, 1.3726e-01, 6.1107e-01, + 9.6470e-01, 9.3457e-01, 1.6750e-01, 5.7026e-02, + 9.7853e-01, 4.8808e-01, 5.2986e-01, 4.4763e-01, + 2.1220e-01, 1.8968e-01, 6.6682e-01, 2.7978e-01, + 6.2518e-02, 1.7188e-01, 6.6203e-01, 9.5117e-01, + 2.6765e-01, 7.5161e-01, 4.2205e-01, 8.3213e-01, + 3.4401e-02, 5.5833e-01, 2.2728e-01, 8.6978e-01, + 5.0487e-01, 6.8315e-01, 8.8845e-01, 3.7450e-01, + 4.7156e-01, 2.0297e-01, 5.9919e-01, 8.7437e-01, + 1.8376e-01, 3.8162e-01, 3.8759e-01, 2.8332e-01, + 7.3703e-01, 8.2598e-01, 5.7822e-01, 8.1318e-01, + 6.0403e-01, 3.0036e-01, 8.6647e-01, 6.3278e-01, + 3.3144e-02, 8.3655e-02, 2.0403e-01, 6.3295e-01, + 5.5983e-01, 1.7687e-01, 7.9099e-01, 6.5995e-01, + 1.6364e-01, 8.0576e-01, 5.9218e-01, 3.7825e-01, + 7.3625e-01, 6.8133e-01, 1.5251e-01, 3.4541e-01, + 4.3200e-01, 4.7561e-01, 6.5115e-01, 4.9609e-01, + 4.9830e-01, 1.7116e-01, 8.1782e-01, 5.6982e-02, + 7.8582e-01, 4.1358e-01, 6.8337e-01, 8.8174e-01, + 2.7893e-02, 6.1252e-01, 6.8428e-01, 4.7886e-04, + 4.5579e-01, 2.6597e-01, 8.9291e-01, 4.7913e-01, + 5.1187e-02, 9.1252e-01, 5.2623e-01, 9.2323e-01, + 2.5712e-02, 7.4165e-01, 8.7147e-01, 1.1067e-01, + 3.3337e-01, 6.6053e-01, 8.1395e-02, 3.0634e-01, + 5.9633e-01, 6.1441e-01, 5.2337e-01, 9.4467e-01, + 1.6455e-01, 6.0027e-01, 9.9735e-01, 5.4964e-01, + 4.2544e-01, 5.2938e-01, 2.7487e-01, 5.2740e-01, + 7.2458e-01, 2.1872e-01, 2.3165e-02, 8.5565e-01, + 3.4327e-02, 8.8372e-01, 3.6038e-01, 5.2099e-01, + 7.8544e-01, 8.5584e-01, 2.0450e-01, 7.0439e-01, + 1.8946e-01, 5.2352e-01, 1.3840e-01, 7.9757e-01, + 6.4771e-01, 4.2226e-01, 3.3701e-01, 5.7625e-01, + 7.5119e-01, 6.0090e-01, 5.5169e-01, 2.6335e-01, + 2.5175e-01, 1.3511e-01, 7.5301e-01, 3.6857e-01, + 2.4815e-01, 9.7900e-01, 5.3734e-01, 2.5181e-01, + 9.6212e-01, 3.0052e-01, 2.3817e-01, 5.4727e-01, + 7.4985e-01, 4.0814e-01, 2.3324e-01, 9.0258e-01, + 9.2453e-01, 5.1688e-01, 6.8008e-01, 8.4417e-01, + 2.9048e-01, 3.5275e-01, 9.1446e-01, 3.1166e-01, + 9.3812e-02, 3.2061e-01, 3.8420e-01, 8.3599e-01, + 2.2761e-01, 7.4261e-01, 2.4781e-01, 8.3249e-01, + 6.2992e-01, 3.7271e-01, 6.4205e-01, 8.0600e-01, + 5.2952e-01, 8.6497e-03, 2.0012e-01, 6.7808e-01, + 7.6509e-01, 3.6618e-01, 3.6418e-01, 7.0343e-01, + 6.0658e-01, 1.8231e-01, 4.0747e-02, 7.2457e-01, + 7.5662e-01, 3.1029e-02, 1.9408e-01, 5.8483e-03, + 2.5497e-01, 2.7861e-01, 6.7215e-01, 9.8377e-01, + 3.9461e-01, 5.7729e-01, 7.4282e-01, 7.8487e-01, + 6.6966e-01, 8.0111e-01, 2.7436e-01, 9.1071e-01, + 7.5479e-01, 9.6961e-02, 5.9253e-01, 4.3539e-01, + 8.1993e-02, 1.4436e-01, 5.4192e-02, 4.5414e-01, + 2.2083e-01, 6.8883e-01, 3.0813e-03, 5.7122e-01, + 6.4824e-01, 6.1088e-01, 9.9103e-01, 3.0128e-01, + 2.5519e-01, 5.5098e-01, 7.7501e-01, 8.2747e-01, + 2.7382e-01, 1.6131e-01, 3.8473e-01, 1.8858e-01, + 7.6480e-01, 5.7925e-01, 3.7285e-01, 4.1565e-01, + 6.6174e-01, 7.9534e-01, 3.9512e-01, 3.1463e-02, + 2.2917e-01, 3.0596e-01, 2.7861e-03, 2.0807e-01, + 3.7628e-01, 6.8980e-01, 2.7128e-02, 6.5713e-01, + 4.9102e-01, 7.0889e-01, 4.5564e-02, 9.8297e-01, + 6.8338e-01, 2.7678e-03, 5.2548e-01, 9.3723e-01, + 6.9210e-01, 8.4505e-01, 5.3302e-01, 5.5013e-01, + 2.2889e-02, 8.7608e-02, 1.4028e-01, 4.6686e-01, + 6.4919e-01, 9.9436e-01, 7.3888e-01, 4.1538e-01, + 9.4718e-01, 9.8412e-01, 4.2306e-01, 1.3232e-01, + 6.8812e-01, 2.0943e-01, 7.5932e-01, 1.2380e-01, + 6.2774e-01, 4.9894e-01, 3.0889e-01, 1.1004e-01, + 7.4553e-01, 8.5544e-01, 2.0482e-01, 7.2767e-01, + 2.0936e-02, 5.0479e-01, 5.7524e-01, 5.4774e-01, + 8.4315e-01, 5.2362e-01, 2.8163e-01, 5.1186e-01, + 8.0800e-01, 7.9337e-01, 3.6713e-01, 6.2638e-01, + 6.5095e-01, 4.0210e-01, 6.3034e-01, 5.3272e-01, + 3.4358e-01, 5.9924e-01, 1.9674e-01, 6.4351e-01, + 8.5855e-02, 2.3977e-01, 1.2607e-01, 7.8134e-01, + 9.6526e-02, 6.0975e-01, 4.6583e-01, 1.7783e-01, + 7.3707e-02, 8.4537e-01, 7.9093e-01, 4.1040e-02, + 2.5473e-01, 3.9781e-01, 1.3302e-01, 4.8382e-01, + 6.0215e-02, 7.9066e-01, 9.9902e-01, 9.0029e-01, + 5.6279e-01, 9.9788e-01, 7.8531e-01, 2.8619e-01, + 6.3489e-01, 1.5229e-01, 6.9891e-01, 7.3891e-01, + 9.9196e-01, 1.2942e-02, 1.0882e-01, 1.6395e-01, + 9.7799e-01, 9.8840e-01, 9.3287e-01, 9.7669e-01, + 9.3534e-01, 3.8769e-01, 5.8205e-01, 7.1933e-01, + 8.1893e-01, 2.3647e-02, 7.8599e-01, 3.5138e-01, + 8.8097e-01, 5.9339e-01, 8.1908e-01, 2.1259e-01, + 1.7916e-01, 8.3775e-01, 4.2342e-02, 1.7547e-01, + 4.9728e-01, 7.5815e-01, 6.1732e-01, 7.7818e-01, + 3.0544e-01, 7.8172e-01, 1.0312e-01, 9.2880e-01, + 4.3926e-01, 8.6426e-02, 2.2251e-01, 4.2217e-01, + 8.8595e-01, 8.2722e-02, 7.3624e-01, 5.9382e-02, + 4.9932e-01, 1.4760e-01, 4.7458e-02, 6.0080e-01, + 1.4431e-01, 2.9584e-01, 7.1655e-01, 8.9494e-01, + 7.0292e-01, 6.4015e-01, 3.6954e-01, 4.8047e-01, + 2.1881e-01, 1.9305e-01, 5.3317e-01, 1.5076e-01, + 4.3873e-01, 9.4135e-01, 1.0942e-01, 8.7236e-01, + 5.3086e-01, 1.7910e-01, 2.4322e-01, 2.2052e-02, + 4.1013e-01, 1.4191e-01, 3.4491e-01, 5.5417e-01, + 3.3288e-01, 3.1302e-01, 2.3566e-01, 1.8459e-01, + 3.6159e-01, 4.8536e-01, 9.0908e-01, 6.3893e-02, + 7.7289e-02, 8.0569e-01, 8.2725e-01, 8.5490e-01, + 4.1791e-02, 4.0255e-01, 2.0672e-01, 9.7864e-01, + 3.6149e-01, 8.5159e-01, 9.6188e-01, 4.1123e-01, + 2.3421e-01, 6.1193e-01, 2.1097e-01, 2.7882e-01, + 5.5135e-01, 8.0230e-01, 3.1907e-01, 1.8198e-01, + 3.6314e-02, 6.2212e-01, 5.1175e-01, 6.7357e-01, + 3.7007e-02, 1.2024e-01, 9.1569e-01, 8.0422e-01, + 4.8728e-01, 3.3584e-01, 5.0960e-01, 3.6273e-01, + 6.1914e-01, 3.8996e-01, 5.9564e-01, 4.2090e-01, + 8.0087e-01, 6.1589e-01, 5.8173e-01, 6.2723e-01, + 6.9793e-01, 1.5997e-01, 1.5756e-01, 9.2695e-02, + 7.7600e-01, 5.7205e-01, 5.7808e-01, 5.9198e-02, + 8.2480e-01, 2.8009e-01, 8.4499e-02, 3.5912e-01, + 7.7211e-01, 1.0716e-01, 7.1486e-01, 5.0595e-01, + 6.5373e-02, 7.7498e-01, 9.1086e-03, 4.0288e-01, + 6.5420e-01, 2.4228e-01, 8.2790e-01, 1.6179e-02, + 4.5517e-01, 2.3154e-01, 4.0528e-01, 9.9680e-01, + 1.9648e-01, 8.9156e-01, 4.9073e-01, 6.1991e-01, + 5.2047e-01, 2.0881e-01, 9.4192e-01, 4.9260e-01]), + size=(10000, 10000), nnz=1000, layout=torch.sparse_csr) +tensor([0.4825, 0.6787, 0.3927, ..., 0.9418, 0.0965, 0.0788]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 1000 +Density: 1e-05 +Time: 10.350545883178711 seconds + +[18.41, 22.21, 18.1, 18.25, 17.88, 18.06, 18.18, 18.5, 18.02, 17.84] +[73.05] +13.74599313735962 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 284305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.350545883178711, 'TIME_S_1KI': 0.03640648558125503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1004.1447986841201, 'W': 73.05} +[18.41, 22.21, 18.1, 18.25, 17.88, 18.06, 18.18, 18.5, 18.02, 17.84, 18.25, 17.97, 18.0, 17.86, 18.03, 17.9, 18.06, 17.99, 17.95, 18.62] +329.52 +16.476 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 284305, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 1000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.350545883178711, 'TIME_S_1KI': 0.03640648558125503, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1004.1447986841201, 'W': 73.05, 'J_1KI': 3.5319280304043903, 'W_1KI': 0.2569423682313009, 'W_D': 56.574, 'J_D': 777.6658157529831, 'W_D_1KI': 0.19899052074356766, 'J_D_1KI': 0.0006999191739278861} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json new file mode 100644 index 0000000..3fe7967 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 264429, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.455932378768921, "TIME_S_1KI": 0.03954154944718212, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1008.905144047737, "W": 73.49, "J_1KI": 3.8154103522977323, "W_1KI": 0.2779195927829398, "W_D": 57.09475, "J_D": 783.8234722155332, "W_D_1KI": 0.21591712709271677, "J_D_1KI": 0.0008165410264861901} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output new file mode 100644 index 0000000..39a6146 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_10000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.01993107795715332} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 4997, 4998, 5000]), + col_indices=tensor([9328, 8573, 6400, ..., 9443, 3853, 6322]), + values=tensor([0.9995, 0.8210, 0.4187, ..., 0.1342, 0.0596, 0.9033]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.6106, 0.0696, 0.9188, ..., 0.7595, 0.3313, 0.0671]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 0.01993107795715332 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52681', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 2.0918641090393066} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 4998, 4998, 5000]), + col_indices=tensor([2599, 3812, 3885, ..., 206, 4492, 5501]), + values=tensor([0.4008, 0.6783, 0.6051, ..., 0.8606, 0.8114, 0.5557]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.4634, 0.0164, 0.5073, ..., 0.3776, 0.2676, 0.0715]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 2.0918641090393066 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '264429', '-ss', '10000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [10000, 10000], "MATRIX_ROWS": 10000, "MATRIX_SIZE": 100000000, "MATRIX_NNZ": 5000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.455932378768921} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([3612, 1318, 1874, ..., 9308, 8111, 3978]), + values=tensor([0.0263, 0.8701, 0.1587, ..., 0.1521, 0.0294, 0.0367]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.4791, 0.9785, 0.1725, ..., 0.5898, 0.0968, 0.4149]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.455932378768921 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 5000, 5000, 5000]), + col_indices=tensor([3612, 1318, 1874, ..., 9308, 8111, 3978]), + values=tensor([0.0263, 0.8701, 0.1587, ..., 0.1521, 0.0294, 0.0367]), + size=(10000, 10000), nnz=5000, layout=torch.sparse_csr) +tensor([0.4791, 0.9785, 0.1725, ..., 0.5898, 0.0968, 0.4149]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([10000, 10000]) +Rows: 10000 +Size: 100000000 +NNZ: 5000 +Density: 5e-05 +Time: 10.455932378768921 seconds + +[19.02, 17.97, 18.1, 18.25, 17.93, 18.7, 18.16, 18.15, 18.22, 17.98] +[73.49] +13.728468418121338 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 264429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.455932378768921, 'TIME_S_1KI': 0.03954154944718212, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1008.905144047737, 'W': 73.49} +[19.02, 17.97, 18.1, 18.25, 17.93, 18.7, 18.16, 18.15, 18.22, 17.98, 18.41, 18.7, 18.23, 17.8, 17.94, 18.94, 18.01, 18.04, 18.02, 18.08] +327.905 +16.395249999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 264429, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [10000, 10000], 'MATRIX_ROWS': 10000, 'MATRIX_SIZE': 100000000, 'MATRIX_NNZ': 5000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.455932378768921, 'TIME_S_1KI': 0.03954154944718212, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1008.905144047737, 'W': 73.49, 'J_1KI': 3.8154103522977323, 'W_1KI': 0.2779195927829398, 'W_D': 57.09475, 'J_D': 783.8234722155332, 'W_D_1KI': 0.21591712709271677, 'J_D_1KI': 0.0008165410264861901} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json new file mode 100644 index 0000000..35c6334 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 693, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433570146560669, "TIME_S_1KI": 15.055656777143822, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2886.4447816610336, "W": 54.5, "J_1KI": 4165.1439850808565, "W_1KI": 78.64357864357864, "W_D": 38.16675, "J_D": 2021.3984655130505, "W_D_1KI": 55.074675324675326, "J_D_1KI": 79.47283596634246} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output new file mode 100644 index 0000000..f2efbf0 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_0.0001.output @@ -0,0 +1,89 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.8274271488189697} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 57, 98, ..., 24999897, + 24999949, 25000000]), + col_indices=tensor([ 2496, 8518, 12544, ..., 449306, 467869, + 486714]), + values=tensor([0.2667, 0.8213, 0.8309, ..., 0.8074, 0.6926, 0.7796]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.0179, 0.7693, 0.5874, ..., 0.4128, 0.7472, 0.6195]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 1.8274271488189697 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '574', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 8.696394205093384} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 45, 88, ..., 24999889, + 24999948, 25000000]), + col_indices=tensor([ 21942, 37292, 56785, ..., 479111, 486535, + 489318]), + values=tensor([0.4753, 0.7614, 0.3285, ..., 0.3162, 0.0061, 0.9591]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.2582, 0.0329, 0.5597, ..., 0.0495, 0.5298, 0.4237]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 8.696394205093384 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '693', '-ss', '500000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.433570146560669} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 38, 84, ..., 24999899, + 24999948, 25000000]), + col_indices=tensor([ 30660, 43953, 94811, ..., 484319, 487924, + 499108]), + values=tensor([0.4696, 0.5982, 0.2681, ..., 0.1240, 0.7008, 0.8579]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3176, 0.2592, 0.6200, ..., 0.0886, 0.2852, 0.1534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.433570146560669 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 38, 84, ..., 24999899, + 24999948, 25000000]), + col_indices=tensor([ 30660, 43953, 94811, ..., 484319, 487924, + 499108]), + values=tensor([0.4696, 0.5982, 0.2681, ..., 0.1240, 0.7008, 0.8579]), + size=(500000, 500000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.3176, 0.2592, 0.6200, ..., 0.0886, 0.2852, 0.1534]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 25000000 +Density: 0.0001 +Time: 10.433570146560669 seconds + +[18.16, 18.09, 17.95, 18.13, 17.83, 18.01, 18.2, 17.99, 18.23, 17.93] +[54.5] +52.962289571762085 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433570146560669, 'TIME_S_1KI': 15.055656777143822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2886.4447816610336, 'W': 54.5} +[18.16, 18.09, 17.95, 18.13, 17.83, 18.01, 18.2, 17.99, 18.23, 17.93, 18.48, 17.91, 18.03, 18.08, 18.03, 18.09, 18.34, 19.26, 18.16, 18.1] +326.66499999999996 +16.33325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 693, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.433570146560669, 'TIME_S_1KI': 15.055656777143822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2886.4447816610336, 'W': 54.5, 'J_1KI': 4165.1439850808565, 'W_1KI': 78.64357864357864, 'W_D': 38.16675, 'J_D': 2021.3984655130505, 'W_D_1KI': 55.074675324675326, 'J_D_1KI': 79.47283596634246} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json new file mode 100644 index 0000000..c977413 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 8054, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420526504516602, "TIME_S_1KI": 1.293832444067122, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1257.315396785736, "W": 87.34, "J_1KI": 156.11067752492377, "W_1KI": 10.844300968462875, "W_D": 70.58375000000001, "J_D": 1016.0984158217908, "W_D_1KI": 8.763813012167867, "J_D_1KI": 1.0881317372942474} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output new file mode 100644 index 0000000..2324509 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_1e-05.output @@ -0,0 +1,89 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.1425309181213379} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 8, ..., 2499994, + 2499996, 2500000]), + col_indices=tensor([ 55676, 267462, 335220, ..., 59414, 387658, + 467981]), + values=tensor([0.3669, 0.3244, 0.1572, ..., 0.0615, 0.1330, 0.1317]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.2232, 0.7011, 0.4607, ..., 0.0263, 0.5751, 0.5574]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 0.1425309181213379 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '7366', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.601978540420532} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 17, ..., 2499992, + 2499998, 2500000]), + col_indices=tensor([ 78904, 197792, 264056, ..., 387862, 102468, + 277870]), + values=tensor([0.6219, 0.1671, 0.9871, ..., 0.4185, 0.9305, 0.2778]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0210, 0.3595, 0.6743, ..., 0.0036, 0.8741, 0.0347]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 9.601978540420532 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8054', '-ss', '500000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.420526504516602} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499986, + 2499993, 2500000]), + col_indices=tensor([136514, 185390, 204506, ..., 365577, 371722, + 449843]), + values=tensor([0.2143, 0.0864, 0.9943, ..., 0.6371, 0.6570, 0.8441]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0733, 0.2994, 0.4999, ..., 0.8006, 0.1699, 0.8850]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.420526504516602 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 7, ..., 2499986, + 2499993, 2500000]), + col_indices=tensor([136514, 185390, 204506, ..., 365577, 371722, + 449843]), + values=tensor([0.2143, 0.0864, 0.9943, ..., 0.6371, 0.6570, 0.8441]), + size=(500000, 500000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0733, 0.2994, 0.4999, ..., 0.8006, 0.1699, 0.8850]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 2500000 +Density: 1e-05 +Time: 10.420526504516602 seconds + +[18.47, 17.91, 18.18, 18.64, 18.09, 18.11, 18.17, 18.18, 22.06, 17.96] +[87.34] +14.395642280578613 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420526504516602, 'TIME_S_1KI': 1.293832444067122, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.315396785736, 'W': 87.34} +[18.47, 17.91, 18.18, 18.64, 18.09, 18.11, 18.17, 18.18, 22.06, 17.96, 18.46, 18.6, 18.06, 19.85, 20.28, 18.18, 18.02, 17.82, 18.61, 17.84] +335.125 +16.75625 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 8054, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.420526504516602, 'TIME_S_1KI': 1.293832444067122, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1257.315396785736, 'W': 87.34, 'J_1KI': 156.11067752492377, 'W_1KI': 10.844300968462875, 'W_D': 70.58375000000001, 'J_D': 1016.0984158217908, 'W_D_1KI': 8.763813012167867, 'J_D_1KI': 1.0881317372942474} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json new file mode 100644 index 0000000..72c367a --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1363, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.215147256851196, "TIME_S_1KI": 7.494605470910636, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1461.7694299888612, "W": 77.68, "J_1KI": 1072.4647322001917, "W_1KI": 56.99192956713133, "W_D": 61.057, "J_D": 1148.960557245493, "W_D_1KI": 44.796038151137196, "J_D_1KI": 32.865765334656786} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output new file mode 100644 index 0000000..097beff --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_500000_5e-05.output @@ -0,0 +1,68 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.7700090408325195} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 24, 55, ..., 12499948, + 12499975, 12500000]), + col_indices=tensor([ 4417, 27723, 55822, ..., 442008, 448310, + 496598]), + values=tensor([0.6543, 0.3065, 0.8363, ..., 0.0104, 0.9892, 0.0257]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.5546, 0.9568, 0.6697, ..., 0.4050, 0.9738, 0.6596]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 0.7700090408325195 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1363', '-ss', '500000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [500000, 500000], "MATRIX_ROWS": 500000, "MATRIX_SIZE": 250000000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.215147256851196} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 32, 58, ..., 12499956, + 12499981, 12500000]), + col_indices=tensor([ 7736, 12237, 33305, ..., 443222, 470958, + 475326]), + values=tensor([0.9583, 0.2636, 0.0225, ..., 0.4084, 0.8296, 0.8114]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4848, 0.9833, 0.6868, ..., 0.4430, 0.1817, 0.0586]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.215147256851196 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 32, 58, ..., 12499956, + 12499981, 12500000]), + col_indices=tensor([ 7736, 12237, 33305, ..., 443222, 470958, + 475326]), + values=tensor([0.9583, 0.2636, 0.0225, ..., 0.4084, 0.8296, 0.8114]), + size=(500000, 500000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.4848, 0.9833, 0.6868, ..., 0.4430, 0.1817, 0.0586]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([500000, 500000]) +Rows: 500000 +Size: 250000000000 +NNZ: 12500000 +Density: 5e-05 +Time: 10.215147256851196 seconds + +[18.64, 18.68, 18.32, 18.11, 18.55, 17.92, 17.92, 19.0, 18.5, 17.99] +[77.68] +18.817835092544556 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.215147256851196, 'TIME_S_1KI': 7.494605470910636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.7694299888612, 'W': 77.68} +[18.64, 18.68, 18.32, 18.11, 18.55, 17.92, 17.92, 19.0, 18.5, 17.99, 18.42, 18.14, 18.35, 18.41, 18.1, 18.07, 18.11, 21.75, 17.97, 18.07] +332.46000000000004 +16.623 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1363, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [500000, 500000], 'MATRIX_ROWS': 500000, 'MATRIX_SIZE': 250000000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.215147256851196, 'TIME_S_1KI': 7.494605470910636, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1461.7694299888612, 'W': 77.68, 'J_1KI': 1072.4647322001917, 'W_1KI': 56.99192956713133, 'W_D': 61.057, 'J_D': 1148.960557245493, 'W_D_1KI': 44.796038151137196, 'J_D_1KI': 32.865765334656786} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json new file mode 100644 index 0000000..393ceed --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 80365, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.785846710205078, "TIME_S_1KI": 0.1342107473428119, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1142.0150223541261, "W": 83.68, "J_1KI": 14.210353043664854, "W_1KI": 1.041249300068438, "W_D": 67.33675000000001, "J_D": 918.9720370041133, "W_D_1KI": 0.8378865177627077, "J_D_1KI": 0.01042601278868547} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output new file mode 100644 index 0000000..ace565c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.0001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.031140804290771484} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 10, ..., 249988, 249996, + 250000]), + col_indices=tensor([ 3938, 11827, 18410, ..., 25331, 39292, 43613]), + values=tensor([0.0696, 0.4480, 0.5528, ..., 0.5051, 0.2445, 0.3952]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.1610, 0.2872, 0.6159, ..., 0.3941, 0.7048, 0.2210]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 0.031140804290771484 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '33717', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 4.405243873596191} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 8, 16, ..., 249992, 249993, + 250000]), + col_indices=tensor([ 4798, 5191, 8748, ..., 31389, 41207, 45142]), + values=tensor([0.4595, 0.5274, 0.3770, ..., 0.1787, 0.1620, 0.3761]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.7848, 0.4939, 0.9259, ..., 0.5597, 0.9743, 0.2454]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 4.405243873596191 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '80365', '-ss', '50000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.785846710205078} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 6, ..., 249987, 249993, + 250000]), + col_indices=tensor([ 3036, 18920, 36950, ..., 33008, 35825, 42083]), + values=tensor([0.1216, 0.1218, 0.6996, ..., 0.2614, 0.2596, 0.0475]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0022, 0.5368, 0.2941, ..., 0.2951, 0.8515, 0.2587]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.785846710205078 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 6, ..., 249987, 249993, + 250000]), + col_indices=tensor([ 3036, 18920, 36950, ..., 33008, 35825, 42083]), + values=tensor([0.1216, 0.1218, 0.6996, ..., 0.2614, 0.2596, 0.0475]), + size=(50000, 50000), nnz=250000, layout=torch.sparse_csr) +tensor([0.0022, 0.5368, 0.2941, ..., 0.2951, 0.8515, 0.2587]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 250000 +Density: 0.0001 +Time: 10.785846710205078 seconds + +[18.57, 17.87, 18.3, 17.99, 18.1, 18.03, 18.33, 18.16, 18.09, 17.89] +[83.68] +13.647407054901123 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.785846710205078, 'TIME_S_1KI': 0.1342107473428119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1142.0150223541261, 'W': 83.68} +[18.57, 17.87, 18.3, 17.99, 18.1, 18.03, 18.33, 18.16, 18.09, 17.89, 18.23, 17.91, 18.25, 19.1, 18.2, 17.88, 18.14, 18.11, 18.11, 17.9] +326.865 +16.34325 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 80365, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.785846710205078, 'TIME_S_1KI': 0.1342107473428119, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1142.0150223541261, 'W': 83.68, 'J_1KI': 14.210353043664854, 'W_1KI': 1.041249300068438, 'W_D': 67.33675000000001, 'J_D': 918.9720370041133, 'W_D_1KI': 0.8378865177627077, 'J_D_1KI': 0.01042601278868547} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json new file mode 100644 index 0000000..3d88fb8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 17258, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.52219271659851, "TIME_S_1KI": 0.6096994273147822, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1290.1128985500336, "W": 87.73, "J_1KI": 74.75448479256191, "W_1KI": 5.083439564260054, "W_D": 71.3875, "J_D": 1049.7883796334268, "W_D_1KI": 4.136487426121219, "J_D_1KI": 0.23968521416857222} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output new file mode 100644 index 0000000..c067392 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.001.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.07522106170654297} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 54, 95, ..., 2499899, + 2499951, 2500000]), + col_indices=tensor([ 245, 316, 650, ..., 46425, 47933, 49262]), + values=tensor([0.3598, 0.8732, 0.2112, ..., 0.2076, 0.2855, 0.8514]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6227, 0.9680, 0.6855, ..., 0.8971, 0.9917, 0.2060]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 0.07522106170654297 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '13958', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 8.492224931716919} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 97, ..., 2499905, + 2499950, 2500000]), + col_indices=tensor([ 3061, 5035, 6476, ..., 48999, 49661, 49813]), + values=tensor([0.5243, 0.0379, 0.3507, ..., 0.2954, 0.9764, 0.8519]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.8088, 0.8530, 0.3688, ..., 0.5327, 0.1148, 0.7333]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 8.492224931716919 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '17258', '-ss', '50000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.52219271659851} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 98, ..., 2499898, + 2499955, 2500000]), + col_indices=tensor([ 336, 813, 2467, ..., 44805, 45101, 46338]), + values=tensor([0.1989, 0.3364, 0.1097, ..., 0.3897, 0.4637, 0.0665]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6060, 0.7426, 0.0681, ..., 0.1484, 0.6293, 0.7345]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.52219271659851 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 60, 98, ..., 2499898, + 2499955, 2500000]), + col_indices=tensor([ 336, 813, 2467, ..., 44805, 45101, 46338]), + values=tensor([0.1989, 0.3364, 0.1097, ..., 0.3897, 0.4637, 0.0665]), + size=(50000, 50000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.6060, 0.7426, 0.0681, ..., 0.1484, 0.6293, 0.7345]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 2500000 +Density: 0.001 +Time: 10.52219271659851 seconds + +[18.24, 17.95, 18.03, 17.9, 18.31, 18.2, 18.96, 17.99, 17.74, 18.24] +[87.73] +14.705492973327637 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17258, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.52219271659851, 'TIME_S_1KI': 0.6096994273147822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.1128985500336, 'W': 87.73} +[18.24, 17.95, 18.03, 17.9, 18.31, 18.2, 18.96, 17.99, 17.74, 18.24, 18.79, 18.06, 18.38, 18.13, 18.12, 18.05, 18.08, 18.09, 18.27, 17.91] +326.85 +16.3425 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 17258, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.52219271659851, 'TIME_S_1KI': 0.6096994273147822, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1290.1128985500336, 'W': 87.73, 'J_1KI': 74.75448479256191, 'W_1KI': 5.083439564260054, 'W_D': 71.3875, 'J_D': 1049.7883796334268, 'W_D_1KI': 4.136487426121219, 'J_D_1KI': 0.23968521416857222} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json new file mode 100644 index 0000000..00dd221 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 1116, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.347081184387207, "TIME_S_1KI": 9.27157812221076, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 2825.247347984314, "W": 53.89, "J_1KI": 2531.5836451472346, "W_1KI": 48.288530465949826, "W_D": 37.37625, "J_D": 1959.4943624067305, "W_D_1KI": 33.491263440860216, "J_D_1KI": 30.010092688942844} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output new file mode 100644 index 0000000..1b20dcd --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_0.01.output @@ -0,0 +1,86 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 1.2946875095367432} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 488, 979, ..., 24999008, + 24999504, 25000000]), + col_indices=tensor([ 111, 301, 401, ..., 49766, 49891, 49958]), + values=tensor([2.3418e-01, 9.8131e-01, 8.1298e-03, ..., + 6.0106e-01, 4.2789e-04, 8.6966e-01]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.1063, 0.9255, 0.6653, ..., 0.3278, 0.7920, 0.4701]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 1.2946875095367432 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '811', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 7.6258299350738525} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 521, 1028, ..., 24999005, + 24999496, 25000000]), + col_indices=tensor([ 169, 333, 382, ..., 49620, 49646, 49746]), + values=tensor([0.1336, 0.2367, 0.6093, ..., 0.7411, 0.2218, 0.9154]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.7321, 0.9416, 0.5259, ..., 0.9099, 0.7583, 0.2580]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 7.6258299350738525 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '1116', '-ss', '50000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.347081184387207} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 467, 956, ..., 24998994, + 24999500, 25000000]), + col_indices=tensor([ 21, 163, 165, ..., 49855, 49860, 49938]), + values=tensor([0.3050, 0.9077, 0.0930, ..., 0.0680, 0.0415, 0.3010]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5141, 0.6637, 0.6626, ..., 0.2332, 0.8993, 0.6899]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.347081184387207 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 467, 956, ..., 24998994, + 24999500, 25000000]), + col_indices=tensor([ 21, 163, 165, ..., 49855, 49860, 49938]), + values=tensor([0.3050, 0.9077, 0.0930, ..., 0.0680, 0.0415, 0.3010]), + size=(50000, 50000), nnz=25000000, layout=torch.sparse_csr) +tensor([0.5141, 0.6637, 0.6626, ..., 0.2332, 0.8993, 0.6899]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000000 +Density: 0.01 +Time: 10.347081184387207 seconds + +[18.41, 17.99, 18.33, 18.02, 17.96, 17.91, 21.56, 18.23, 18.08, 17.76] +[53.89] +52.42618942260742 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.347081184387207, 'TIME_S_1KI': 9.27157812221076, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2825.247347984314, 'W': 53.89} +[18.41, 17.99, 18.33, 18.02, 17.96, 17.91, 21.56, 18.23, 18.08, 17.76, 18.54, 18.29, 18.11, 17.98, 18.09, 18.71, 18.38, 18.01, 18.26, 18.02] +330.27500000000003 +16.51375 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 1116, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.347081184387207, 'TIME_S_1KI': 9.27157812221076, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 2825.247347984314, 'W': 53.89, 'J_1KI': 2531.5836451472346, 'W_1KI': 48.288530465949826, 'W_D': 37.37625, 'J_D': 1959.4943624067305, 'W_D_1KI': 33.491263440860216, 'J_D_1KI': 30.010092688942844} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json new file mode 100644 index 0000000..3827350 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 114513, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.690773725509644, "TIME_S_1KI": 0.09335860317614283, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1104.0986349916457, "W": 76.13, "J_1KI": 9.641688148870832, "W_1KI": 0.6648153484757189, "W_D": 59.82149999999999, "J_D": 867.5796202962398, "W_D_1KI": 0.5223992035838725, "J_D_1KI": 0.004561920511940762} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output new file mode 100644 index 0000000..1524e8c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_1e-05.output @@ -0,0 +1,100 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.025936126708984375} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 25000, 25000, 25000]), + col_indices=tensor([20193, 9953, 42880, ..., 12429, 7497, 42914]), + values=tensor([0.2197, 0.8269, 0.1857, ..., 0.3505, 0.1509, 0.9771]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.1918, 0.1249, 0.1152, ..., 0.8034, 0.5794, 0.7689]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 0.025936126708984375 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '40484', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 3.938389301300049} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 2, ..., 24997, 24998, 25000]), + col_indices=tensor([10460, 10153, 3528, ..., 24271, 7757, 10191]), + values=tensor([0.9506, 0.4415, 0.5851, ..., 0.4538, 0.6997, 0.3353]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8879, 0.8298, 0.7702, ..., 0.5204, 0.0041, 0.9281]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 3.938389301300049 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '107932', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 9.896521091461182} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1, 1, ..., 24999, 24999, 25000]), + col_indices=tensor([28383, 18616, 44948, ..., 18982, 36427, 31817]), + values=tensor([0.7119, 0.6692, 0.1695, ..., 0.9276, 0.2360, 0.8672]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.3408, 0.5698, 0.4224, ..., 0.7704, 0.6104, 0.7559]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 9.896521091461182 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '114513', '-ss', '50000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.690773725509644} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([ 6936, 24119, 42629, ..., 33437, 2193, 4338]), + values=tensor([0.2031, 0.4334, 0.3517, ..., 0.1037, 0.1982, 0.4022]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0121, 0.3083, 0.6972, ..., 0.1499, 0.8195, 0.8871]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.690773725509644 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 25000, 25000, 25000]), + col_indices=tensor([ 6936, 24119, 42629, ..., 33437, 2193, 4338]), + values=tensor([0.2031, 0.4334, 0.3517, ..., 0.1037, 0.1982, 0.4022]), + size=(50000, 50000), nnz=25000, layout=torch.sparse_csr) +tensor([0.0121, 0.3083, 0.6972, ..., 0.1499, 0.8195, 0.8871]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 25000 +Density: 1e-05 +Time: 10.690773725509644 seconds + +[18.43, 18.05, 18.18, 17.94, 17.98, 18.17, 18.18, 17.96, 18.08, 18.28] +[76.13] +14.502806186676025 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 114513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.690773725509644, 'TIME_S_1KI': 0.09335860317614283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1104.0986349916457, 'W': 76.13} +[18.43, 18.05, 18.18, 17.94, 17.98, 18.17, 18.18, 17.96, 18.08, 18.28, 18.27, 17.98, 18.37, 18.47, 18.2, 17.9, 18.1, 18.12, 18.02, 17.96] +326.17 +16.308500000000002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 114513, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.690773725509644, 'TIME_S_1KI': 0.09335860317614283, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1104.0986349916457, 'W': 76.13, 'J_1KI': 9.641688148870832, 'W_1KI': 0.6648153484757189, 'W_D': 59.82149999999999, 'J_D': 867.5796202962398, 'W_D_1KI': 0.5223992035838725, 'J_D_1KI': 0.004561920511940762} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json new file mode 100644 index 0000000..7635079 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 87123, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.505861759185791, "TIME_S_1KI": 0.12058654728585783, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1083.1826461410521, "W": 80.24, "J_1KI": 12.43279783915903, "W_1KI": 0.9209967517188342, "W_D": 63.674249999999994, "J_D": 859.5568619896172, "W_D_1KI": 0.7308546537653662, "J_D_1KI": 0.00838876822154157} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output new file mode 100644 index 0000000..089ad96 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_50000_5e-05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.029273509979248047} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 5, 7, ..., 124995, 124998, + 125000]), + col_indices=tensor([ 551, 18742, 22548, ..., 32794, 16422, 37041]), + values=tensor([0.7421, 0.1633, 0.4685, ..., 0.4955, 0.1690, 0.9373]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.5671, 0.1718, 0.7006, ..., 0.0603, 0.4651, 0.4190]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 0.029273509979248047 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '35868', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 4.322763681411743} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 6, 7, ..., 124993, 124996, + 125000]), + col_indices=tensor([10292, 17103, 19384, ..., 21480, 22459, 30474]), + values=tensor([0.3992, 0.1013, 0.2691, ..., 0.1460, 0.2288, 0.5075]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.9479, 0.9531, 0.4260, ..., 0.3198, 0.2541, 0.8697]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 4.322763681411743 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '87123', '-ss', '50000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [50000, 50000], "MATRIX_ROWS": 50000, "MATRIX_SIZE": 2500000000, "MATRIX_NNZ": 125000, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.505861759185791} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 124993, 124998, + 125000]), + col_indices=tensor([ 2935, 11122, 10966, ..., 49613, 4331, 15007]), + values=tensor([0.3574, 0.4392, 0.6710, ..., 0.1033, 0.1015, 0.5102]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.0826, 0.0461, 0.6873, ..., 0.1063, 0.7746, 0.5118]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.505861759185791 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 2, ..., 124993, 124998, + 125000]), + col_indices=tensor([ 2935, 11122, 10966, ..., 49613, 4331, 15007]), + values=tensor([0.3574, 0.4392, 0.6710, ..., 0.1033, 0.1015, 0.5102]), + size=(50000, 50000), nnz=125000, layout=torch.sparse_csr) +tensor([0.0826, 0.0461, 0.6873, ..., 0.1063, 0.7746, 0.5118]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([50000, 50000]) +Rows: 50000 +Size: 2500000000 +NNZ: 125000 +Density: 5e-05 +Time: 10.505861759185791 seconds + +[18.58, 18.04, 18.0, 17.91, 17.9, 20.59, 18.05, 18.07, 17.88, 18.18] +[80.24] +13.499285221099854 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87123, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.505861759185791, 'TIME_S_1KI': 0.12058654728585783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1826461410521, 'W': 80.24} +[18.58, 18.04, 18.0, 17.91, 17.9, 20.59, 18.05, 18.07, 17.88, 18.18, 18.74, 21.37, 18.26, 18.13, 18.12, 17.84, 18.06, 17.91, 18.35, 18.17] +331.315 +16.56575 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 87123, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [50000, 50000], 'MATRIX_ROWS': 50000, 'MATRIX_SIZE': 2500000000, 'MATRIX_NNZ': 125000, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.505861759185791, 'TIME_S_1KI': 0.12058654728585783, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1083.1826461410521, 'W': 80.24, 'J_1KI': 12.43279783915903, 'W_1KI': 0.9209967517188342, 'W_D': 63.674249999999994, 'J_D': 859.5568619896172, 'W_D_1KI': 0.7308546537653662, 'J_D_1KI': 0.00838876822154157} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json new file mode 100644 index 0000000..c76a143 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 334626, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109118700027466, "TIME_S_1KI": 0.030210200940833844, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1025.1804789018631, "W": 73.29, "J_1KI": 3.063660561049838, "W_1KI": 0.219020637965968, "W_D": 56.847750000000005, "J_D": 795.1862951220274, "W_D_1KI": 0.16988443814885876, "J_D_1KI": 0.0005076845139016656} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output new file mode 100644 index 0000000..81b5928 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.0001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 0.020055532455444336} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2499, 2500, 2500]), + col_indices=tensor([2122, 3396, 4900, ..., 3006, 1251, 2017]), + values=tensor([0.1868, 0.0259, 0.3673, ..., 0.0909, 0.5358, 0.3608]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.5194, 0.1996, 0.4802, ..., 0.7962, 0.4168, 0.5561]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 0.020055532455444336 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '52354', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 1.6427783966064453} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2498, 2499, 2500]), + col_indices=tensor([2755, 4785, 642, ..., 761, 1671, 4009]), + values=tensor([0.6216, 0.3711, 0.5927, ..., 0.4412, 0.8122, 0.2675]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.4766, 0.5997, 0.2696, ..., 0.3490, 0.2681, 0.0383]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 1.6427783966064453 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '334626', '-ss', '5000', '-sd', '0.0001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500, "MATRIX_DENSITY": 0.0001, "TIME_S": 10.109118700027466} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([3166, 2984, 4242, ..., 1801, 191, 2968]), + values=tensor([0.8061, 0.6429, 0.9344, ..., 0.0259, 0.3545, 0.7535]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9083, 0.2521, 0.7196, ..., 0.0784, 0.7825, 0.2390]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.109118700027466 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 2500, 2500, 2500]), + col_indices=tensor([3166, 2984, 4242, ..., 1801, 191, 2968]), + values=tensor([0.8061, 0.6429, 0.9344, ..., 0.0259, 0.3545, 0.7535]), + size=(5000, 5000), nnz=2500, layout=torch.sparse_csr) +tensor([0.9083, 0.2521, 0.7196, ..., 0.0784, 0.7825, 0.2390]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500 +Density: 0.0001 +Time: 10.109118700027466 seconds + +[20.29, 18.13, 18.23, 18.27, 18.36, 17.99, 18.37, 18.01, 18.07, 17.87] +[73.29] +13.987999439239502 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109118700027466, 'TIME_S_1KI': 0.030210200940833844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1025.1804789018631, 'W': 73.29} +[20.29, 18.13, 18.23, 18.27, 18.36, 17.99, 18.37, 18.01, 18.07, 17.87, 18.97, 18.06, 18.34, 18.0, 18.27, 18.07, 18.01, 18.54, 18.55, 18.02] +328.845 +16.44225 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 334626, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500, 'MATRIX_DENSITY': 0.0001, 'TIME_S': 10.109118700027466, 'TIME_S_1KI': 0.030210200940833844, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1025.1804789018631, 'W': 73.29, 'J_1KI': 3.063660561049838, 'W_1KI': 0.219020637965968, 'W_D': 56.847750000000005, 'J_D': 795.1862951220274, 'W_D_1KI': 0.16988443814885876, 'J_D_1KI': 0.0005076845139016656} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json new file mode 100644 index 0000000..417052f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 248882, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432960033416748, "TIME_S_1KI": 0.041919303257836035, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1049.6097160863876, "W": 75.02, "J_1KI": 4.2172986237911445, "W_1KI": 0.3014279859531826, "W_D": 58.4725, "J_D": 818.0925636345148, "W_D_1KI": 0.2349406546074043, "J_D_1KI": 0.0009439841153936576} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output new file mode 100644 index 0000000..121e30b --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.001.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 0.018964052200317383} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 14, ..., 24990, 24995, 25000]), + col_indices=tensor([ 484, 2538, 3016, ..., 2694, 4380, 4909]), + values=tensor([0.3483, 0.1743, 0.1939, ..., 0.2265, 0.8602, 0.7977]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.6164, 0.4905, 0.7241, ..., 0.3672, 0.4239, 0.9077]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 0.018964052200317383 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '55367', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 2.335857391357422} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 4, 9, ..., 24986, 24994, 25000]), + col_indices=tensor([1163, 1240, 1422, ..., 4522, 4571, 4830]), + values=tensor([0.2729, 0.7937, 0.6768, ..., 0.2019, 0.8649, 0.0759]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.9266, 0.1779, 0.1408, ..., 0.8566, 0.7762, 0.0695]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 2.335857391357422 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '248882', '-ss', '5000', '-sd', '0.001'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 25000, "MATRIX_DENSITY": 0.001, "TIME_S": 10.432960033416748} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 15, ..., 24986, 24991, 25000]), + col_indices=tensor([ 131, 267, 600, ..., 3068, 3643, 3839]), + values=tensor([0.7605, 0.7816, 0.2401, ..., 0.7557, 0.2099, 0.5290]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8976, 0.4401, 0.6879, ..., 0.0741, 0.3573, 0.5052]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.432960033416748 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 10, 15, ..., 24986, 24991, 25000]), + col_indices=tensor([ 131, 267, 600, ..., 3068, 3643, 3839]), + values=tensor([0.7605, 0.7816, 0.2401, ..., 0.7557, 0.2099, 0.5290]), + size=(5000, 5000), nnz=25000, layout=torch.sparse_csr) +tensor([0.8976, 0.4401, 0.6879, ..., 0.0741, 0.3573, 0.5052]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 25000 +Density: 0.001 +Time: 10.432960033416748 seconds + +[18.55, 18.1, 18.04, 18.77, 18.18, 18.14, 18.08, 18.33, 18.19, 18.07] +[75.02] +13.991065263748169 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248882, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432960033416748, 'TIME_S_1KI': 0.041919303257836035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.6097160863876, 'W': 75.02} +[18.55, 18.1, 18.04, 18.77, 18.18, 18.14, 18.08, 18.33, 18.19, 18.07, 18.74, 17.95, 18.05, 18.34, 18.14, 19.14, 18.15, 18.14, 20.44, 18.18] +330.95 +16.5475 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 248882, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 25000, 'MATRIX_DENSITY': 0.001, 'TIME_S': 10.432960033416748, 'TIME_S_1KI': 0.041919303257836035, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1049.6097160863876, 'W': 75.02, 'J_1KI': 4.2172986237911445, 'W_1KI': 0.3014279859531826, 'W_D': 58.4725, 'J_D': 818.0925636345148, 'W_D_1KI': 0.2349406546074043, 'J_D_1KI': 0.0009439841153936576} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json new file mode 100644 index 0000000..e80d2db --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 163647, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.69955587387085, "TIME_S_1KI": 0.06538192495964393, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1158.7786533069611, "W": 84.04, "J_1KI": 7.080964840827887, "W_1KI": 0.5135443973919473, "W_D": 67.71475000000001, "J_D": 933.6792814614178, "W_D_1KI": 0.41378546505588254, "J_D_1KI": 0.002528524599020346} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output new file mode 100644 index 0000000..483c400 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.01.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 0.021794557571411133} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 52, 109, ..., 249903, 249954, + 250000]), + col_indices=tensor([ 37, 94, 126, ..., 4726, 4735, 4938]), + values=tensor([0.6014, 0.9404, 0.8499, ..., 0.7854, 0.8553, 0.6608]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8748, 0.2409, 0.9137, ..., 0.2396, 0.5569, 0.4924]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 0.021794557571411133 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '48177', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 3.0911431312561035} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 42, 103, ..., 249895, 249947, + 250000]), + col_indices=tensor([ 128, 146, 225, ..., 4883, 4900, 4933]), + values=tensor([0.9316, 0.8049, 0.8923, ..., 0.5892, 0.9266, 0.8295]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.8118, 0.4201, 0.8184, ..., 0.0833, 0.7283, 0.6165]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 3.0911431312561035 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '163647', '-ss', '5000', '-sd', '0.01'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250000, "MATRIX_DENSITY": 0.01, "TIME_S": 10.69955587387085} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 65, 110, ..., 249906, 249948, + 250000]), + col_indices=tensor([ 32, 165, 212, ..., 4365, 4391, 4539]), + values=tensor([0.5399, 0.4522, 0.1183, ..., 0.3103, 0.6929, 0.7632]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3241, 0.6966, 0.4101, ..., 0.4425, 0.6108, 0.0322]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.69955587387085 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 65, 110, ..., 249906, 249948, + 250000]), + col_indices=tensor([ 32, 165, 212, ..., 4365, 4391, 4539]), + values=tensor([0.5399, 0.4522, 0.1183, ..., 0.3103, 0.6929, 0.7632]), + size=(5000, 5000), nnz=250000, layout=torch.sparse_csr) +tensor([0.3241, 0.6966, 0.4101, ..., 0.4425, 0.6108, 0.0322]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250000 +Density: 0.01 +Time: 10.69955587387085 seconds + +[18.19, 18.02, 18.18, 18.28, 17.94, 18.08, 18.33, 18.44, 17.82, 18.2] +[84.04] +13.788418054580688 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 163647, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.69955587387085, 'TIME_S_1KI': 0.06538192495964393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.7786533069611, 'W': 84.04} +[18.19, 18.02, 18.18, 18.28, 17.94, 18.08, 18.33, 18.44, 17.82, 18.2, 18.17, 18.08, 18.2, 17.87, 17.92, 18.23, 18.75, 17.8, 18.09, 18.39] +326.505 +16.32525 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 163647, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250000, 'MATRIX_DENSITY': 0.01, 'TIME_S': 10.69955587387085, 'TIME_S_1KI': 0.06538192495964393, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1158.7786533069611, 'W': 84.04, 'J_1KI': 7.080964840827887, 'W_1KI': 0.5135443973919473, 'W_D': 67.71475000000001, 'J_D': 933.6792814614178, 'W_D_1KI': 0.41378546505588254, 'J_D_1KI': 0.002528524599020346} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json new file mode 100644 index 0000000..868cce2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 46354, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460933923721313, "TIME_S_1KI": 0.22567489156753062, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1249.411584854126, "W": 87.62, "J_1KI": 26.95369514721763, "W_1KI": 1.8902360098373387, "W_D": 71.2095, "J_D": 1015.4071473598481, "W_D_1KI": 1.5362104672735903, "J_D_1KI": 0.03314083935094254} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output new file mode 100644 index 0000000..056b89d --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.05.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 0.03854489326477051} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 268, 547, ..., 1249522, + 1249752, 1250000]), + col_indices=tensor([ 55, 88, 92, ..., 4943, 4993, 4995]), + values=tensor([0.0205, 0.9162, 0.9855, ..., 0.3784, 0.4892, 0.4396]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.2348, 0.0514, 0.2847, ..., 0.2791, 0.0660, 0.1214]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 0.03854489326477051 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '27240', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 6.170231342315674} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 248, 482, ..., 1249549, + 1249769, 1250000]), + col_indices=tensor([ 15, 21, 37, ..., 4863, 4919, 4974]), + values=tensor([0.5137, 0.1549, 0.4159, ..., 0.2935, 0.1856, 0.8128]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.7661, 0.7592, 0.8422, ..., 0.1260, 0.8520, 0.6179]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 6.170231342315674 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '46354', '-ss', '5000', '-sd', '0.05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250000, "MATRIX_DENSITY": 0.05, "TIME_S": 10.460933923721313} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 262, 517, ..., 1249525, + 1249760, 1250000]), + col_indices=tensor([ 1, 93, 112, ..., 4957, 4968, 4975]), + values=tensor([0.1837, 0.8744, 0.5620, ..., 0.5820, 0.1645, 0.1343]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.3700, 0.8319, 0.5287, ..., 0.8208, 0.8249, 0.9051]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.460933923721313 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 262, 517, ..., 1249525, + 1249760, 1250000]), + col_indices=tensor([ 1, 93, 112, ..., 4957, 4968, 4975]), + values=tensor([0.1837, 0.8744, 0.5620, ..., 0.5820, 0.1645, 0.1343]), + size=(5000, 5000), nnz=1250000, layout=torch.sparse_csr) +tensor([0.3700, 0.8319, 0.5287, ..., 0.8208, 0.8249, 0.9051]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250000 +Density: 0.05 +Time: 10.460933923721313 seconds + +[18.2, 17.97, 18.75, 20.89, 18.05, 18.04, 17.92, 18.15, 18.09, 18.27] +[87.62] +14.25943374633789 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460933923721313, 'TIME_S_1KI': 0.22567489156753062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.411584854126, 'W': 87.62} +[18.2, 17.97, 18.75, 20.89, 18.05, 18.04, 17.92, 18.15, 18.09, 18.27, 18.35, 17.98, 17.96, 18.31, 17.83, 17.89, 18.02, 18.18, 17.85, 17.84] +328.21000000000004 +16.410500000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 46354, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250000, 'MATRIX_DENSITY': 0.05, 'TIME_S': 10.460933923721313, 'TIME_S_1KI': 0.22567489156753062, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1249.411584854126, 'W': 87.62, 'J_1KI': 26.95369514721763, 'W_1KI': 1.8902360098373387, 'W_D': 71.2095, 'J_D': 1015.4071473598481, 'W_D_1KI': 1.5362104672735903, 'J_D_1KI': 0.03314083935094254} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json new file mode 100644 index 0000000..30ee0bd --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 19580, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.499455213546753, "TIME_S_1KI": 0.5362336676990169, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1272.879629366398, "W": 87.53, "J_1KI": 65.00917412494371, "W_1KI": 4.470377936670071, "W_D": 70.8715, "J_D": 1030.6282263525725, "W_D_1KI": 3.6195863125638406, "J_D_1KI": 0.18486140513604907} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output new file mode 100644 index 0000000..e7e4d14 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.1.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 0.06696105003356934} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 497, 967, ..., 2499018, + 2499482, 2500000]), + col_indices=tensor([ 1, 5, 13, ..., 4954, 4967, 4978]), + values=tensor([0.9405, 0.9423, 0.1495, ..., 0.5665, 0.9976, 0.7425]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.7464, 0.9379, 0.0503, ..., 0.7608, 0.2384, 0.8928]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 0.06696105003356934 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '15680', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 8.408252954483032} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 497, 993, ..., 2499009, + 2499527, 2500000]), + col_indices=tensor([ 20, 30, 45, ..., 4969, 4978, 4985]), + values=tensor([0.7997, 0.6173, 0.7759, ..., 0.4558, 0.1544, 0.5880]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.0537, 0.7107, 0.3297, ..., 0.5168, 0.3739, 0.3386]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 8.408252954483032 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '19580', '-ss', '5000', '-sd', '0.1'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 2500000, "MATRIX_DENSITY": 0.1, "TIME_S": 10.499455213546753} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 479, 1002, ..., 2498942, + 2499462, 2500000]), + col_indices=tensor([ 2, 9, 12, ..., 4986, 4993, 4996]), + values=tensor([0.9703, 0.1385, 0.8259, ..., 0.8503, 0.3399, 0.6161]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4198, 0.0425, 0.0591, ..., 0.3064, 0.5860, 0.9864]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.499455213546753 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 479, 1002, ..., 2498942, + 2499462, 2500000]), + col_indices=tensor([ 2, 9, 12, ..., 4986, 4993, 4996]), + values=tensor([0.9703, 0.1385, 0.8259, ..., 0.8503, 0.3399, 0.6161]), + size=(5000, 5000), nnz=2500000, layout=torch.sparse_csr) +tensor([0.4198, 0.0425, 0.0591, ..., 0.3064, 0.5860, 0.9864]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 2500000 +Density: 0.1 +Time: 10.499455213546753 seconds + +[18.41, 17.85, 18.5, 18.03, 18.16, 17.87, 17.95, 21.49, 18.34, 18.02] +[87.53] +14.54220986366272 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.499455213546753, 'TIME_S_1KI': 0.5362336676990169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.879629366398, 'W': 87.53} +[18.41, 17.85, 18.5, 18.03, 18.16, 17.87, 17.95, 21.49, 18.34, 18.02, 18.22, 18.11, 19.31, 20.46, 18.5, 18.28, 17.98, 18.24, 17.73, 18.09] +333.17 +16.6585 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 19580, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 2500000, 'MATRIX_DENSITY': 0.1, 'TIME_S': 10.499455213546753, 'TIME_S_1KI': 0.5362336676990169, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1272.879629366398, 'W': 87.53, 'J_1KI': 65.00917412494371, 'W_1KI': 4.470377936670071, 'W_D': 70.8715, 'J_D': 1030.6282263525725, 'W_D_1KI': 3.6195863125638406, 'J_D_1KI': 0.18486140513604907} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json new file mode 100644 index 0000000..e35fdc8 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 9170, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.682870388031006, "TIME_S_1KI": 1.1649804130895316, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1294.0732465410233, "W": 85.83, "J_1KI": 141.1203104188684, "W_1KI": 9.359869138495092, "W_D": 69.25649999999999, "J_D": 1044.1918187005517, "W_D_1KI": 7.552508178844056, "J_D_1KI": 0.8236104884235611} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output new file mode 100644 index 0000000..5537faf --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.2.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 0.13022398948669434} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1019, 1995, ..., 4997994, + 4998986, 5000000]), + col_indices=tensor([ 3, 6, 16, ..., 4985, 4988, 4992]), + values=tensor([0.7011, 0.4191, 0.0262, ..., 0.8760, 0.6600, 0.8030]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.6407, 0.2177, 0.8923, ..., 0.2240, 0.7772, 0.6410]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 0.13022398948669434 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '8063', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 9.23195219039917} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 967, 1963, ..., 4997972, + 4999023, 5000000]), + col_indices=tensor([ 1, 8, 12, ..., 4992, 4997, 4998]), + values=tensor([0.9256, 0.0587, 0.0398, ..., 0.7563, 0.3156, 0.8023]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.2454, 0.6913, 0.0165, ..., 0.7228, 0.0396, 0.9796]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 9.23195219039917 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '9170', '-ss', '5000', '-sd', '0.2'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 5000000, "MATRIX_DENSITY": 0.2, "TIME_S": 10.682870388031006} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1039, 2045, ..., 4997951, + 4999001, 5000000]), + col_indices=tensor([ 1, 3, 21, ..., 4981, 4993, 4997]), + values=tensor([0.7269, 0.5102, 0.5857, ..., 0.9126, 0.4820, 0.1281]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7504, 0.3556, 0.6036, ..., 0.9776, 0.8116, 0.9002]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.682870388031006 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1039, 2045, ..., 4997951, + 4999001, 5000000]), + col_indices=tensor([ 1, 3, 21, ..., 4981, 4993, 4997]), + values=tensor([0.7269, 0.5102, 0.5857, ..., 0.9126, 0.4820, 0.1281]), + size=(5000, 5000), nnz=5000000, layout=torch.sparse_csr) +tensor([0.7504, 0.3556, 0.6036, ..., 0.9776, 0.8116, 0.9002]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 5000000 +Density: 0.2 +Time: 10.682870388031006 seconds + +[18.71, 17.74, 17.83, 18.37, 18.26, 18.61, 17.98, 17.99, 18.09, 22.02] +[85.83] +15.07716703414917 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.682870388031006, 'TIME_S_1KI': 1.1649804130895316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.0732465410233, 'W': 85.83} +[18.71, 17.74, 17.83, 18.37, 18.26, 18.61, 17.98, 17.99, 18.09, 22.02, 18.37, 17.98, 18.13, 19.94, 19.96, 18.01, 18.17, 17.86, 18.08, 17.84] +331.47 +16.573500000000003 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 9170, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 5000000, 'MATRIX_DENSITY': 0.2, 'TIME_S': 10.682870388031006, 'TIME_S_1KI': 1.1649804130895316, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1294.0732465410233, 'W': 85.83, 'J_1KI': 141.1203104188684, 'W_1KI': 9.359869138495092, 'W_D': 69.25649999999999, 'J_D': 1044.1918187005517, 'W_D_1KI': 7.552508178844056, 'J_D_1KI': 0.8236104884235611} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json new file mode 100644 index 0000000..b09a9ac --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 5638, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.486673831939697, "TIME_S_1KI": 1.859998905984338, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1324.2176235437394, "W": 84.45, "J_1KI": 234.87364731176646, "W_1KI": 14.978715856686769, "W_D": 67.81275, "J_D": 1063.3373434098958, "W_D_1KI": 12.027802412202908, "J_D_1KI": 2.1333455857046664} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output new file mode 100644 index 0000000..933e0af --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.3.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 0.19828367233276367} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1476, 3005, ..., 7497107, + 7498540, 7500000]), + col_indices=tensor([ 1, 2, 5, ..., 4997, 4998, 4999]), + values=tensor([0.6696, 0.7406, 0.6295, ..., 0.1006, 0.8443, 0.3530]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.2303, 0.2102, 0.7986, ..., 0.7152, 0.6703, 0.2250]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 0.19828367233276367 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5295', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 9.860368490219116} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1522, 3011, ..., 7496960, + 7498495, 7500000]), + col_indices=tensor([ 0, 6, 7, ..., 4993, 4995, 4996]), + values=tensor([0.9810, 0.8874, 0.6033, ..., 0.0373, 0.4053, 0.1135]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.3541, 0.8542, 0.0162, ..., 0.1880, 0.1632, 0.2816]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 9.860368490219116 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '5638', '-ss', '5000', '-sd', '0.3'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 7500000, "MATRIX_DENSITY": 0.3, "TIME_S": 10.486673831939697} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1590, 3134, ..., 7496893, + 7498424, 7500000]), + col_indices=tensor([ 1, 9, 13, ..., 4990, 4995, 4996]), + values=tensor([0.2379, 0.8284, 0.2265, ..., 0.1431, 0.4495, 0.9988]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.8528, 0.9292, 0.5816, ..., 0.4785, 0.5785, 0.0165]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.486673831939697 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 1590, 3134, ..., 7496893, + 7498424, 7500000]), + col_indices=tensor([ 1, 9, 13, ..., 4990, 4995, 4996]), + values=tensor([0.2379, 0.8284, 0.2265, ..., 0.1431, 0.4495, 0.9988]), + size=(5000, 5000), nnz=7500000, layout=torch.sparse_csr) +tensor([0.8528, 0.9292, 0.5816, ..., 0.4785, 0.5785, 0.0165]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 7500000 +Density: 0.3 +Time: 10.486673831939697 seconds + +[18.49, 17.91, 18.44, 17.9, 18.58, 17.81, 17.92, 21.43, 18.39, 17.95] +[84.45] +15.680492877960205 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.486673831939697, 'TIME_S_1KI': 1.859998905984338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1324.2176235437394, 'W': 84.45} +[18.49, 17.91, 18.44, 17.9, 18.58, 17.81, 17.92, 21.43, 18.39, 17.95, 18.43, 20.84, 19.1, 18.06, 18.27, 17.75, 18.15, 17.72, 18.16, 17.76] +332.745 +16.63725 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 5638, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 7500000, 'MATRIX_DENSITY': 0.3, 'TIME_S': 10.486673831939697, 'TIME_S_1KI': 1.859998905984338, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1324.2176235437394, 'W': 84.45, 'J_1KI': 234.87364731176646, 'W_1KI': 14.978715856686769, 'W_D': 67.81275, 'J_D': 1063.3373434098958, 'W_D_1KI': 12.027802412202908, 'J_D_1KI': 2.1333455857046664} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json new file mode 100644 index 0000000..6bb4210 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2997, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.852682113647461, "TIME_S_1KI": 3.6211818864355894, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1423.3116033363342, "W": 83.38, "J_1KI": 474.9121132253367, "W_1KI": 27.82115448782115, "W_D": 66.859, "J_D": 1141.295160559654, "W_D_1KI": 22.30864197530864, "J_D_1KI": 7.443657649418965} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output new file mode 100644 index 0000000..7229c6c --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.4.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 0.37653517723083496} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2029, 4093, ..., 9995991, + 9998004, 10000000]), + col_indices=tensor([ 2, 3, 6, ..., 4991, 4993, 4995]), + values=tensor([0.7344, 0.2463, 0.3008, ..., 0.7679, 0.9534, 0.0176]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.9829, 0.9205, 0.8754, ..., 0.7887, 0.3539, 0.3362]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 0.37653517723083496 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2788', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 9.764836072921753} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2035, 4051, ..., 9995975, + 9998000, 10000000]), + col_indices=tensor([ 1, 7, 13, ..., 4993, 4995, 4999]), + values=tensor([0.5646, 0.0016, 0.0540, ..., 0.1360, 0.4218, 0.1397]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2261, 0.1429, 0.2182, ..., 0.9969, 0.6483, 0.4023]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 9.764836072921753 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2997', '-ss', '5000', '-sd', '0.4'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 10000000, "MATRIX_DENSITY": 0.4, "TIME_S": 10.852682113647461} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2037, 4021, ..., 9995904, + 9997943, 10000000]), + col_indices=tensor([ 4, 9, 10, ..., 4993, 4994, 4995]), + values=tensor([0.7234, 0.7434, 0.9107, ..., 0.4914, 0.1939, 0.0446]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2935, 0.8834, 0.9590, ..., 0.1952, 0.7236, 0.3428]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.852682113647461 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2037, 4021, ..., 9995904, + 9997943, 10000000]), + col_indices=tensor([ 4, 9, 10, ..., 4993, 4994, 4995]), + values=tensor([0.7234, 0.7434, 0.9107, ..., 0.4914, 0.1939, 0.0446]), + size=(5000, 5000), nnz=10000000, layout=torch.sparse_csr) +tensor([0.2935, 0.8834, 0.9590, ..., 0.1952, 0.7236, 0.3428]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 10000000 +Density: 0.4 +Time: 10.852682113647461 seconds + +[18.3, 18.02, 18.13, 22.02, 18.21, 18.26, 17.95, 17.94, 17.94, 18.03] +[83.38] +17.07017993927002 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.852682113647461, 'TIME_S_1KI': 3.6211818864355894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1423.3116033363342, 'W': 83.38} +[18.3, 18.02, 18.13, 22.02, 18.21, 18.26, 17.95, 17.94, 17.94, 18.03, 18.77, 18.01, 18.52, 18.59, 18.17, 17.81, 18.04, 18.12, 18.22, 17.84] +330.41999999999996 +16.520999999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2997, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 10000000, 'MATRIX_DENSITY': 0.4, 'TIME_S': 10.852682113647461, 'TIME_S_1KI': 3.6211818864355894, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1423.3116033363342, 'W': 83.38, 'J_1KI': 474.9121132253367, 'W_1KI': 27.82115448782115, 'W_D': 66.859, 'J_D': 1141.295160559654, 'W_D_1KI': 22.30864197530864, 'J_D_1KI': 7.443657649418965} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json new file mode 100644 index 0000000..65b89f9 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 2368, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.176312685012817, "TIME_S_1KI": 4.297429343333116, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1437.3391367673873, "W": 78.7, "J_1KI": 606.984432756498, "W_1KI": 33.2347972972973, "W_D": 62.306000000000004, "J_D": 1137.9269663968087, "W_D_1KI": 26.311655405405407, "J_D_1KI": 11.111340965120526} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output new file mode 100644 index 0000000..4c78e30 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_0.5.output @@ -0,0 +1,85 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 0.48082637786865234} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2493, 4988, ..., 12494973, + 12497489, 12500000]), + col_indices=tensor([ 0, 3, 4, ..., 4995, 4996, 4997]), + values=tensor([0.7019, 0.9323, 0.5533, ..., 0.8475, 0.3948, 0.0670]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.2037, 0.7210, 0.2412, ..., 0.0549, 0.3207, 0.0757]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 0.48082637786865234 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2183', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 9.679495573043823} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2494, 4974, ..., 12494974, + 12497479, 12500000]), + col_indices=tensor([ 2, 3, 5, ..., 4990, 4992, 4993]), + values=tensor([0.3550, 0.2209, 0.9878, ..., 0.1100, 0.7010, 0.8735]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.0897, 0.5505, 0.7451, ..., 0.8823, 0.0649, 0.5912]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 9.679495573043823 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '2368', '-ss', '5000', '-sd', '0.5'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 12500000, "MATRIX_DENSITY": 0.5, "TIME_S": 10.176312685012817} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2500, 5025, ..., 12495060, + 12497549, 12500000]), + col_indices=tensor([ 3, 7, 9, ..., 4994, 4997, 4998]), + values=tensor([0.3762, 0.0915, 0.3071, ..., 0.1948, 0.2052, 0.2572]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.2717, 0.6125, 0.0994, ..., 0.4315, 0.1911, 0.5863]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.176312685012817 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 2500, 5025, ..., 12495060, + 12497549, 12500000]), + col_indices=tensor([ 3, 7, 9, ..., 4994, 4997, 4998]), + values=tensor([0.3762, 0.0915, 0.3071, ..., 0.1948, 0.2052, 0.2572]), + size=(5000, 5000), nnz=12500000, layout=torch.sparse_csr) +tensor([0.2717, 0.6125, 0.0994, ..., 0.4315, 0.1911, 0.5863]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 12500000 +Density: 0.5 +Time: 10.176312685012817 seconds + +[18.38, 18.14, 18.13, 18.09, 18.53, 18.05, 18.22, 17.96, 18.23, 17.96] +[78.7] +18.263521432876587 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.176312685012817, 'TIME_S_1KI': 4.297429343333116, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1437.3391367673873, 'W': 78.7} +[18.38, 18.14, 18.13, 18.09, 18.53, 18.05, 18.22, 17.96, 18.23, 17.96, 18.81, 17.96, 18.41, 17.98, 18.95, 18.19, 18.23, 17.96, 18.21, 18.13] +327.88 +16.394 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 2368, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 12500000, 'MATRIX_DENSITY': 0.5, 'TIME_S': 10.176312685012817, 'TIME_S_1KI': 4.297429343333116, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1437.3391367673873, 'W': 78.7, 'J_1KI': 606.984432756498, 'W_1KI': 33.2347972972973, 'W_D': 62.306000000000004, 'J_D': 1137.9269663968087, 'W_D_1KI': 26.311655405405407, 'J_D_1KI': 11.111340965120526} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json new file mode 100644 index 0000000..2ac6398 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 357325, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.499107599258423, "TIME_S_1KI": 0.029382516194664303, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 1011.0901748657226, "W": 72.8, "J_1KI": 2.829609388835717, "W_1KI": 0.20373609459175818, "W_D": 46.4985, "J_D": 645.7991276922226, "W_D_1KI": 0.1301294339886658, "J_D_1KI": 0.0003641766850588842} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output new file mode 100644 index 0000000..109e8d2 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_1e-05.output @@ -0,0 +1,329 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 0.020415544509887695} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([1333, 4323, 980, 4513, 212, 4800, 425, 2650, 4181, + 969, 725, 4708, 2978, 4829, 2835, 2475, 4298, 490, + 871, 3121, 4762, 4479, 218, 2461, 4825, 2919, 512, + 1363, 1944, 2772, 493, 2448, 4663, 4533, 3227, 2251, + 3206, 1036, 4160, 3108, 4112, 520, 4884, 1695, 3846, + 3433, 4341, 314, 4963, 4557, 1460, 2013, 3131, 1087, + 1051, 4820, 1214, 2275, 33, 4231, 755, 1677, 4993, + 6, 698, 1317, 4059, 4178, 3988, 3341, 1529, 1327, + 4807, 1459, 3985, 2360, 1563, 1141, 2987, 4820, 3117, + 3988, 665, 4532, 3163, 3252, 4621, 4938, 2411, 4883, + 2036, 4582, 143, 3182, 722, 2609, 1076, 3554, 197, + 34, 2097, 1008, 2860, 1655, 2265, 3241, 2350, 1493, + 3432, 135, 3780, 2490, 3232, 1376, 4188, 3874, 3039, + 211, 1650, 4645, 4026, 3400, 1898, 591, 1094, 2347, + 3740, 1237, 925, 2787, 2157, 2054, 2757, 4014, 2493, + 4673, 4093, 396, 3012, 1306, 4796, 91, 1743, 543, + 4293, 1561, 1048, 3859, 1955, 3884, 1858, 722, 4225, + 3, 2890, 2664, 3170, 385, 1272, 188, 920, 1323, + 3875, 304, 4410, 2754, 3377, 4890, 4523, 4463, 2061, + 4108, 1554, 1408, 436, 1757, 483, 3973, 1483, 4992, + 3373, 401, 3150, 4902, 982, 3824, 3035, 683, 1843, + 1258, 996, 3656, 3918, 3943, 3598, 214, 724, 3021, + 312, 2986, 2034, 3929, 4868, 4093, 391, 1333, 1344, + 94, 1127, 616, 21, 1697, 646, 658, 1404, 4972, + 1106, 2493, 822, 2442, 3750, 3893, 297, 3607, 4990, + 1137, 2122, 3346, 36, 515, 2447, 1921, 2437, 4525, + 918, 1225, 2348, 1665, 3413, 132, 2821, 111, 225, + 4735, 1677, 1746, 1399, 3492, 1994, 2191]), + values=tensor([0.3602, 0.2302, 0.1896, 0.7717, 0.8956, 0.2078, 0.9487, + 0.0390, 0.3051, 0.4281, 0.9325, 0.8375, 0.6844, 0.7644, + 0.9804, 0.7332, 0.5227, 0.6093, 0.2260, 0.2957, 0.5954, + 0.5378, 0.5320, 0.4643, 0.3270, 0.8112, 0.8555, 0.5438, + 0.7934, 0.2222, 0.8906, 0.2729, 0.9780, 0.8597, 0.4400, + 0.4858, 0.2262, 0.4282, 0.8462, 0.2562, 0.8815, 0.7446, + 0.8391, 0.9365, 0.8781, 0.6986, 0.6943, 0.7579, 0.7636, + 0.9590, 0.7739, 0.5121, 0.8335, 0.2962, 0.5344, 0.4995, + 0.3316, 0.3046, 0.9720, 0.3429, 0.6108, 0.9040, 0.4735, + 0.4252, 0.9794, 0.6492, 0.0484, 0.2038, 0.5032, 0.1575, + 0.2125, 0.6202, 0.9042, 0.6689, 0.5066, 0.9113, 0.9195, + 0.2614, 0.9604, 0.9164, 0.9297, 0.6647, 0.8507, 0.2096, + 0.8186, 0.7548, 0.5297, 0.5479, 0.9328, 0.6830, 0.1651, + 0.9770, 0.1478, 0.1631, 0.6486, 0.7340, 0.9314, 0.8298, + 0.0625, 0.5779, 0.9267, 0.8341, 0.6247, 0.5959, 0.2706, + 0.5015, 0.0222, 0.8356, 0.2739, 0.1607, 0.6428, 0.2275, + 0.4531, 0.9656, 0.4285, 0.6913, 0.6388, 0.2488, 0.5417, + 0.6641, 0.6693, 0.7008, 0.2798, 0.3263, 0.3780, 0.6106, + 0.1001, 0.9342, 0.1338, 0.9089, 0.1127, 0.8287, 0.1281, + 0.0152, 0.2533, 0.8228, 0.6184, 0.5915, 0.5316, 0.8294, + 0.0163, 0.4275, 0.8868, 0.8268, 0.7667, 0.0040, 0.2374, + 0.5011, 0.5032, 0.5731, 0.6046, 0.1518, 0.5693, 0.3308, + 0.3440, 0.5026, 0.8207, 0.2844, 0.6651, 0.1261, 0.7301, + 0.3788, 0.3102, 0.2854, 0.7833, 0.8403, 0.4733, 0.1726, + 0.2895, 0.2693, 0.2450, 0.4577, 0.3046, 0.8813, 0.0902, + 0.8416, 0.8712, 0.6652, 0.0828, 0.2244, 0.6326, 0.9806, + 0.2561, 0.7608, 0.6760, 0.2091, 0.8004, 0.6240, 0.1978, + 0.2246, 0.8102, 0.6207, 0.2204, 0.2354, 0.0013, 0.2440, + 0.6407, 0.4005, 0.5265, 0.2470, 0.8146, 0.4334, 0.7311, + 0.4576, 0.4028, 0.8969, 0.8062, 0.4521, 0.2212, 0.0569, + 0.5145, 0.3716, 0.8367, 0.2637, 0.4458, 0.4981, 0.9509, + 0.3003, 0.2789, 0.8010, 0.0297, 0.6055, 0.0149, 0.5383, + 0.2695, 0.1287, 0.0767, 0.7664, 0.3198, 0.2386, 0.2384, + 0.1616, 0.1390, 0.9600, 0.6032, 0.7446, 0.5942, 0.1408, + 0.5912, 0.3909, 0.1909, 0.4245, 0.3596, 0.3316, 0.0552, + 0.1715, 0.2483, 0.6015, 0.5276, 0.6350]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.0146, 0.8199, 0.1846, ..., 0.5715, 0.8048, 0.4041]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 0.020415544509887695 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '51431', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 1.5112977027893066} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([4110, 4882, 2528, 560, 2850, 1135, 2189, 267, 591, + 4248, 392, 4389, 952, 4426, 675, 791, 4134, 2003, + 330, 4266, 1795, 3038, 2537, 1414, 4513, 1875, 1420, + 3040, 4172, 4236, 4029, 37, 2565, 1038, 1280, 832, + 4901, 4729, 13, 26, 702, 4420, 4734, 1556, 899, + 2280, 4293, 3965, 1974, 4811, 4611, 1009, 692, 1950, + 707, 1172, 1767, 2120, 1996, 634, 3311, 4113, 1903, + 2733, 3685, 3363, 3449, 2814, 1438, 4520, 218, 740, + 498, 655, 1313, 1745, 4116, 95, 357, 4580, 2628, + 4483, 1655, 4732, 165, 2596, 205, 1356, 2811, 2330, + 4604, 2547, 3455, 1624, 3457, 1604, 4751, 1859, 1095, + 4469, 2397, 2809, 2539, 2371, 2342, 4452, 4342, 4591, + 2097, 1276, 1250, 2060, 1578, 4677, 4803, 4831, 2724, + 2018, 2284, 2775, 4691, 199, 4210, 2309, 943, 2881, + 1649, 3410, 3567, 3333, 2952, 4866, 2146, 2388, 3555, + 3049, 2189, 2470, 1262, 1368, 686, 4746, 2521, 3366, + 3586, 4367, 3371, 2520, 3654, 86, 810, 3504, 2051, + 4871, 4860, 841, 544, 3974, 2005, 981, 3244, 488, + 3372, 1091, 2635, 1589, 2299, 733, 4674, 2817, 2103, + 1209, 3577, 3623, 4300, 2443, 1295, 4756, 2336, 3550, + 2095, 4292, 2842, 1971, 4691, 4042, 3354, 592, 2063, + 1832, 1378, 3596, 4385, 745, 2532, 1883, 1739, 1247, + 2943, 1826, 3219, 4178, 3486, 2469, 1068, 4370, 1539, + 326, 2628, 3947, 4717, 3828, 4255, 4155, 4188, 3551, + 3390, 3231, 2307, 2529, 2288, 367, 4198, 4896, 743, + 1579, 1013, 645, 2200, 4435, 4882, 1640, 288, 3098, + 3761, 2384, 847, 509, 4067, 1126, 4356, 2690, 2684, + 3064, 181, 819, 409, 3928, 4195, 3862]), + values=tensor([0.6256, 0.4971, 0.1060, 0.7139, 0.1823, 0.2509, 0.2673, + 0.8663, 0.0835, 0.0808, 0.2049, 0.4420, 0.4395, 0.8635, + 0.0169, 0.4133, 0.6820, 0.9890, 0.7762, 0.6220, 0.1534, + 0.5506, 0.6723, 0.8443, 0.3974, 0.8265, 0.0110, 0.1617, + 0.9872, 0.3380, 0.0737, 0.5011, 0.8812, 0.5991, 0.0522, + 0.5730, 0.8120, 0.7506, 0.2397, 0.8742, 0.8477, 0.3557, + 0.0116, 0.0076, 0.2125, 0.8492, 0.7414, 0.8939, 0.9161, + 0.6369, 0.7960, 0.8359, 0.4748, 0.7205, 0.4515, 0.6595, + 0.2492, 0.2025, 0.4674, 0.4437, 0.5543, 0.2380, 0.4617, + 0.5720, 0.4895, 0.6114, 0.1214, 0.9049, 0.4914, 0.2397, + 0.6242, 0.3103, 0.8200, 0.7931, 0.9387, 0.1219, 0.7863, + 0.3172, 0.8665, 0.4111, 0.3328, 0.3737, 0.2933, 0.4526, + 0.3424, 0.8586, 0.4153, 0.8629, 0.3744, 0.5686, 0.0746, + 0.0221, 0.4632, 0.2182, 0.7406, 0.2698, 0.1778, 0.5649, + 0.4080, 0.6883, 0.2942, 0.9953, 0.6518, 0.2163, 0.7535, + 0.6975, 0.2221, 0.2275, 0.5405, 0.0798, 0.5573, 0.6302, + 0.4985, 0.1104, 0.5174, 0.9401, 0.1389, 0.1581, 0.0424, + 0.6895, 0.3613, 0.0928, 0.0065, 0.9081, 0.0044, 0.4960, + 0.9823, 0.5854, 0.2610, 0.0038, 0.7425, 0.4802, 0.1516, + 0.8368, 0.1838, 0.8782, 0.0715, 0.4223, 0.7005, 0.4430, + 0.9462, 0.7502, 0.6645, 0.2891, 0.9383, 0.3987, 0.8215, + 0.5606, 0.2261, 0.8011, 0.4375, 0.3961, 0.7279, 0.2806, + 0.0713, 0.4871, 0.3038, 0.5371, 0.2216, 0.6841, 0.2878, + 0.3646, 0.4532, 0.8491, 0.7578, 0.6510, 0.7523, 0.4340, + 0.7007, 0.4708, 0.9965, 0.8203, 0.2246, 0.8996, 0.3303, + 0.0148, 0.3760, 0.7093, 0.5329, 0.2574, 0.3450, 0.1580, + 0.7186, 0.2033, 0.7798, 0.4593, 0.7075, 0.5720, 0.1499, + 0.1156, 0.4260, 0.0288, 0.5145, 0.3668, 0.5589, 0.8589, + 0.3216, 0.4607, 0.7258, 0.8841, 0.3750, 0.0423, 0.5771, + 0.4191, 0.6488, 0.7104, 0.5144, 0.6011, 0.7757, 0.0524, + 0.4180, 0.1533, 0.5397, 0.3671, 0.1765, 0.9044, 0.7096, + 0.5951, 0.8654, 0.2226, 0.3541, 0.2302, 0.8815, 0.7479, + 0.9847, 0.5335, 0.2697, 0.6788, 0.5998, 0.9345, 0.5600, + 0.8733, 0.1996, 0.0591, 0.2713, 0.9857, 0.2291, 0.3974, + 0.8274, 0.9419, 0.8414, 0.0360, 0.8374, 0.9999, 0.3537, + 0.3299, 0.5865, 0.7469, 0.2686, 0.3364]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.4917, 0.3630, 0.9281, ..., 0.8752, 0.5291, 0.1405]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 1.5112977027893066 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '357325', '-ss', '5000', '-sd', '1e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 250, "MATRIX_DENSITY": 1e-05, "TIME_S": 10.499107599258423} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2104, 1861, 7, 1520, 3728, 1941, 2107, 138, 2161, + 3365, 332, 4735, 2493, 4284, 393, 1314, 1302, 4705, + 1583, 2354, 365, 1361, 3891, 149, 1170, 2523, 1316, + 504, 3112, 2441, 3025, 3794, 4286, 3194, 1606, 1584, + 3408, 2741, 1246, 4491, 4352, 753, 1486, 3301, 2391, + 2673, 3251, 2341, 1657, 2899, 1405, 28, 3720, 4641, + 155, 2571, 1960, 1838, 3742, 1460, 3050, 1966, 3313, + 1854, 4564, 1529, 1889, 4664, 3289, 4098, 3070, 1858, + 1104, 4802, 1430, 2787, 4743, 1421, 1813, 2073, 2691, + 3256, 821, 4666, 4791, 494, 2847, 2089, 295, 92, + 3053, 2874, 4675, 1142, 2097, 3430, 3192, 3228, 4790, + 4424, 4658, 1164, 1384, 2389, 731, 3926, 526, 3782, + 4373, 3966, 3264, 2145, 1214, 2000, 245, 4102, 2011, + 66, 3256, 4976, 3641, 1843, 2314, 3228, 1928, 847, + 3368, 1129, 1702, 2867, 4161, 4680, 2563, 195, 417, + 4789, 399, 2588, 3130, 324, 4572, 3283, 4937, 216, + 3937, 29, 3425, 1846, 776, 2604, 3452, 1647, 2368, + 423, 57, 1474, 4006, 1987, 4359, 1194, 867, 4968, + 2616, 1438, 4125, 4484, 4543, 3714, 1301, 2231, 4571, + 1275, 4305, 3280, 4945, 2663, 1185, 1842, 622, 3361, + 2569, 1402, 2348, 1829, 182, 3336, 570, 3323, 562, + 1935, 4764, 3304, 2399, 1649, 1406, 4405, 2794, 3186, + 4464, 4180, 1762, 4406, 310, 3727, 661, 2438, 4818, + 1167, 2299, 3410, 2102, 990, 2923, 1131, 3164, 4442, + 2744, 348, 2673, 3077, 2766, 827, 2340, 2888, 4315, + 1191, 4533, 1425, 1473, 1269, 2758, 4677, 1018, 4467, + 4891, 1237, 820, 1622, 44, 4185, 1279, 3969, 4975, + 4333, 1750, 1936, 243, 4652, 1623, 3975]), + values=tensor([0.9387, 0.4700, 0.2447, 0.6228, 0.4503, 0.1997, 0.0053, + 0.5157, 0.4796, 0.4250, 0.8220, 0.3582, 0.4251, 0.4646, + 0.9478, 0.0325, 0.0848, 0.5917, 0.2071, 0.4572, 0.9167, + 0.3871, 0.6967, 0.0513, 0.7695, 0.7027, 0.3866, 0.0150, + 0.1037, 0.9121, 0.5340, 0.5642, 0.8146, 0.9553, 0.8607, + 0.1413, 0.3229, 0.0119, 0.0552, 0.9205, 0.4848, 0.8673, + 0.6922, 0.3249, 0.7973, 0.5503, 0.3256, 0.5167, 0.2558, + 0.6122, 0.0546, 0.7903, 0.3759, 0.7968, 0.2510, 0.1679, + 0.4338, 0.6740, 0.2626, 0.4105, 0.4029, 0.3869, 0.1119, + 0.9939, 0.6154, 0.1891, 0.7886, 0.8106, 0.2588, 0.8530, + 0.1960, 0.5522, 0.9502, 0.2036, 0.8479, 0.5893, 0.2336, + 0.5492, 0.4318, 0.6485, 0.5562, 0.9481, 0.9451, 0.8583, + 0.7556, 0.1458, 0.7503, 0.1231, 0.2746, 0.0172, 0.6993, + 0.4164, 0.7854, 0.6995, 0.8084, 0.5901, 0.9482, 0.9912, + 0.8222, 0.8265, 0.6019, 0.5462, 0.6062, 0.4916, 0.3704, + 0.9894, 0.1946, 0.9052, 0.1356, 0.8292, 0.4460, 0.3066, + 0.5424, 0.9972, 0.4829, 0.2518, 0.5680, 0.4922, 0.3950, + 0.5510, 0.1881, 0.6119, 0.3857, 0.5521, 0.8319, 0.6787, + 0.8327, 0.0041, 0.6533, 0.9035, 0.3529, 0.2045, 0.8781, + 0.7715, 0.7148, 0.2012, 0.4429, 0.6908, 0.0217, 0.9409, + 0.2398, 0.9282, 0.8675, 0.2060, 0.8887, 0.5702, 0.9940, + 0.9652, 0.4202, 0.5062, 0.1515, 0.6839, 0.1289, 0.7773, + 0.8039, 0.1706, 0.5382, 0.6842, 0.4665, 0.8705, 0.9740, + 0.3731, 0.3796, 0.0945, 0.8938, 0.9546, 0.7375, 0.4347, + 0.0317, 0.0560, 0.1482, 0.2781, 0.1532, 0.0170, 0.3948, + 0.1619, 0.3596, 0.1826, 0.6650, 0.4684, 0.2191, 0.9123, + 0.0088, 0.2608, 0.7519, 0.3594, 0.8499, 0.1793, 0.6810, + 0.4007, 0.7171, 0.6382, 0.5741, 0.6774, 0.9314, 0.4111, + 0.3073, 0.3304, 0.1048, 0.8381, 0.8387, 0.6626, 0.9350, + 0.3559, 0.0898, 0.9007, 0.8371, 0.0933, 0.7505, 0.1760, + 0.8200, 0.6000, 0.8535, 0.8461, 0.8576, 0.1788, 0.4860, + 0.8224, 0.8204, 0.6502, 0.3745, 0.9926, 0.3104, 0.0651, + 0.2962, 0.9752, 0.3565, 0.2747, 0.0907, 0.8216, 0.2197, + 0.9709, 0.0139, 0.1557, 0.9115, 0.1512, 0.0977, 0.5834, + 0.2007, 0.9483, 0.8228, 0.2172, 0.6709, 0.2237, 0.7140, + 0.3427, 0.4328, 0.7857, 0.0871, 0.8851]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.5944, 0.7034, 0.9760, ..., 0.8304, 0.5842, 0.4500]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.499107599258423 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 250, 250, 250]), + col_indices=tensor([2104, 1861, 7, 1520, 3728, 1941, 2107, 138, 2161, + 3365, 332, 4735, 2493, 4284, 393, 1314, 1302, 4705, + 1583, 2354, 365, 1361, 3891, 149, 1170, 2523, 1316, + 504, 3112, 2441, 3025, 3794, 4286, 3194, 1606, 1584, + 3408, 2741, 1246, 4491, 4352, 753, 1486, 3301, 2391, + 2673, 3251, 2341, 1657, 2899, 1405, 28, 3720, 4641, + 155, 2571, 1960, 1838, 3742, 1460, 3050, 1966, 3313, + 1854, 4564, 1529, 1889, 4664, 3289, 4098, 3070, 1858, + 1104, 4802, 1430, 2787, 4743, 1421, 1813, 2073, 2691, + 3256, 821, 4666, 4791, 494, 2847, 2089, 295, 92, + 3053, 2874, 4675, 1142, 2097, 3430, 3192, 3228, 4790, + 4424, 4658, 1164, 1384, 2389, 731, 3926, 526, 3782, + 4373, 3966, 3264, 2145, 1214, 2000, 245, 4102, 2011, + 66, 3256, 4976, 3641, 1843, 2314, 3228, 1928, 847, + 3368, 1129, 1702, 2867, 4161, 4680, 2563, 195, 417, + 4789, 399, 2588, 3130, 324, 4572, 3283, 4937, 216, + 3937, 29, 3425, 1846, 776, 2604, 3452, 1647, 2368, + 423, 57, 1474, 4006, 1987, 4359, 1194, 867, 4968, + 2616, 1438, 4125, 4484, 4543, 3714, 1301, 2231, 4571, + 1275, 4305, 3280, 4945, 2663, 1185, 1842, 622, 3361, + 2569, 1402, 2348, 1829, 182, 3336, 570, 3323, 562, + 1935, 4764, 3304, 2399, 1649, 1406, 4405, 2794, 3186, + 4464, 4180, 1762, 4406, 310, 3727, 661, 2438, 4818, + 1167, 2299, 3410, 2102, 990, 2923, 1131, 3164, 4442, + 2744, 348, 2673, 3077, 2766, 827, 2340, 2888, 4315, + 1191, 4533, 1425, 1473, 1269, 2758, 4677, 1018, 4467, + 4891, 1237, 820, 1622, 44, 4185, 1279, 3969, 4975, + 4333, 1750, 1936, 243, 4652, 1623, 3975]), + values=tensor([0.9387, 0.4700, 0.2447, 0.6228, 0.4503, 0.1997, 0.0053, + 0.5157, 0.4796, 0.4250, 0.8220, 0.3582, 0.4251, 0.4646, + 0.9478, 0.0325, 0.0848, 0.5917, 0.2071, 0.4572, 0.9167, + 0.3871, 0.6967, 0.0513, 0.7695, 0.7027, 0.3866, 0.0150, + 0.1037, 0.9121, 0.5340, 0.5642, 0.8146, 0.9553, 0.8607, + 0.1413, 0.3229, 0.0119, 0.0552, 0.9205, 0.4848, 0.8673, + 0.6922, 0.3249, 0.7973, 0.5503, 0.3256, 0.5167, 0.2558, + 0.6122, 0.0546, 0.7903, 0.3759, 0.7968, 0.2510, 0.1679, + 0.4338, 0.6740, 0.2626, 0.4105, 0.4029, 0.3869, 0.1119, + 0.9939, 0.6154, 0.1891, 0.7886, 0.8106, 0.2588, 0.8530, + 0.1960, 0.5522, 0.9502, 0.2036, 0.8479, 0.5893, 0.2336, + 0.5492, 0.4318, 0.6485, 0.5562, 0.9481, 0.9451, 0.8583, + 0.7556, 0.1458, 0.7503, 0.1231, 0.2746, 0.0172, 0.6993, + 0.4164, 0.7854, 0.6995, 0.8084, 0.5901, 0.9482, 0.9912, + 0.8222, 0.8265, 0.6019, 0.5462, 0.6062, 0.4916, 0.3704, + 0.9894, 0.1946, 0.9052, 0.1356, 0.8292, 0.4460, 0.3066, + 0.5424, 0.9972, 0.4829, 0.2518, 0.5680, 0.4922, 0.3950, + 0.5510, 0.1881, 0.6119, 0.3857, 0.5521, 0.8319, 0.6787, + 0.8327, 0.0041, 0.6533, 0.9035, 0.3529, 0.2045, 0.8781, + 0.7715, 0.7148, 0.2012, 0.4429, 0.6908, 0.0217, 0.9409, + 0.2398, 0.9282, 0.8675, 0.2060, 0.8887, 0.5702, 0.9940, + 0.9652, 0.4202, 0.5062, 0.1515, 0.6839, 0.1289, 0.7773, + 0.8039, 0.1706, 0.5382, 0.6842, 0.4665, 0.8705, 0.9740, + 0.3731, 0.3796, 0.0945, 0.8938, 0.9546, 0.7375, 0.4347, + 0.0317, 0.0560, 0.1482, 0.2781, 0.1532, 0.0170, 0.3948, + 0.1619, 0.3596, 0.1826, 0.6650, 0.4684, 0.2191, 0.9123, + 0.0088, 0.2608, 0.7519, 0.3594, 0.8499, 0.1793, 0.6810, + 0.4007, 0.7171, 0.6382, 0.5741, 0.6774, 0.9314, 0.4111, + 0.3073, 0.3304, 0.1048, 0.8381, 0.8387, 0.6626, 0.9350, + 0.3559, 0.0898, 0.9007, 0.8371, 0.0933, 0.7505, 0.1760, + 0.8200, 0.6000, 0.8535, 0.8461, 0.8576, 0.1788, 0.4860, + 0.8224, 0.8204, 0.6502, 0.3745, 0.9926, 0.3104, 0.0651, + 0.2962, 0.9752, 0.3565, 0.2747, 0.0907, 0.8216, 0.2197, + 0.9709, 0.0139, 0.1557, 0.9115, 0.1512, 0.0977, 0.5834, + 0.2007, 0.9483, 0.8228, 0.2172, 0.6709, 0.2237, 0.7140, + 0.3427, 0.4328, 0.7857, 0.0871, 0.8851]), + size=(5000, 5000), nnz=250, layout=torch.sparse_csr) +tensor([0.5944, 0.7034, 0.9760, ..., 0.8304, 0.5842, 0.4500]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 250 +Density: 1e-05 +Time: 10.499107599258423 seconds + +[18.49, 21.77, 18.79, 18.12, 18.06, 18.03, 18.09, 17.74, 18.25, 17.89] +[72.8] +13.888601303100586 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 357325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.499107599258423, 'TIME_S_1KI': 0.029382516194664303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1011.0901748657226, 'W': 72.8} +[18.49, 21.77, 18.79, 18.12, 18.06, 18.03, 18.09, 17.74, 18.25, 17.89, 37.95, 41.51, 40.96, 43.46, 47.23, 47.25, 47.57, 33.36, 24.72, 27.91] +526.03 +26.301499999999997 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 357325, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 250, 'MATRIX_DENSITY': 1e-05, 'TIME_S': 10.499107599258423, 'TIME_S_1KI': 0.029382516194664303, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 1011.0901748657226, 'W': 72.8, 'J_1KI': 2.829609388835717, 'W_1KI': 0.20373609459175818, 'W_D': 46.4985, 'J_D': 645.7991276922226, 'W_D_1KI': 0.1301294339886658, 'J_D_1KI': 0.0003641766850588842} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json new file mode 100644 index 0000000..e1d6586 --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.json @@ -0,0 +1 @@ +{"CPU": "Xeon 4216", "CORES": 16, "ITERATIONS": 344337, "MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.209988117218018, "TIME_S_1KI": 0.029651150231366417, "BASELINE_TIME_S": 10, "BASELINE_DELAY_S": 10, "J": 996.8449161434174, "W": 73.42, "J_1KI": 2.894968929111357, "W_1KI": 0.21322135001466586, "W_D": 47.158, "J_D": 640.2780244550705, "W_D_1KI": 0.13695304309441042, "J_D_1KI": 0.0003977296749823877} diff --git a/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output new file mode 100644 index 0000000..395a17f --- /dev/null +++ b/pytorch/output_synthetic_maxcore_old2/xeon_4216_max_csr_10_10_10_synthetic_5000_5e-05.output @@ -0,0 +1,81 @@ +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '100', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 0.019530296325683594} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 1, ..., 1249, 1250, 1250]), + col_indices=tensor([2184, 3902, 2442, ..., 1965, 3430, 1316]), + values=tensor([0.1298, 0.8618, 0.5661, ..., 0.6807, 0.5041, 0.2985]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.7563, 0.3154, 0.3739, ..., 0.6493, 0.1597, 0.7226]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 0.019530296325683594 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '53762', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 1.6393845081329346} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1248, 1249, 1250]), + col_indices=tensor([ 543, 3603, 176, ..., 1860, 3976, 394]), + values=tensor([0.3999, 0.3952, 0.5467, ..., 0.7650, 0.4806, 0.1331]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.3582, 0.6649, 0.9739, ..., 0.1792, 0.8232, 0.7194]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 1.6393845081329346 seconds + +['apptainer', 'run', 'pytorch-xeon_4216.sif', 'numactl', '--cpunodebind=0', '--membind=0', 'python3', 'spmv.py', 'synthetic', 'csr', '344337', '-ss', '5000', '-sd', '5e-05'] +{"MATRIX_TYPE": "synthetic", "MATRIX_FORMAT": "csr", "MATRIX_SHAPE": [5000, 5000], "MATRIX_ROWS": 5000, "MATRIX_SIZE": 25000000, "MATRIX_NNZ": 1250, "MATRIX_DENSITY": 5e-05, "TIME_S": 10.209988117218018} + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1250, 1250]), + col_indices=tensor([2469, 1957, 4805, ..., 986, 4084, 3397]), + values=tensor([0.1203, 0.9340, 0.5005, ..., 0.1600, 0.9840, 0.6585]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.7772, 0.5114, 0.7654, ..., 0.7246, 0.7942, 0.6121]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.209988117218018 seconds + +/nfshomes/vut/ampere_research/pytorch/spmv.py:75: UserWarning: Sparse CSR tensor support is in beta state. If you miss a functionality in the sparse tensor support, please submit a feature request to https://github.com/pytorch/pytorch/issues. (Triggered internally at ../aten/src/ATen/SparseCsrTensorImpl.cpp:53.) + matrix = matrix.to_sparse_csr().type(torch.float32) +tensor(crow_indices=tensor([ 0, 0, 0, ..., 1249, 1250, 1250]), + col_indices=tensor([2469, 1957, 4805, ..., 986, 4084, 3397]), + values=tensor([0.1203, 0.9340, 0.5005, ..., 0.1600, 0.9840, 0.6585]), + size=(5000, 5000), nnz=1250, layout=torch.sparse_csr) +tensor([0.7772, 0.5114, 0.7654, ..., 0.7246, 0.7942, 0.6121]) +Matrix Type: synthetic +Matrix Format: csr +Shape: torch.Size([5000, 5000]) +Rows: 5000 +Size: 25000000 +NNZ: 1250 +Density: 5e-05 +Time: 10.209988117218018 seconds + +[40.04, 39.64, 39.87, 39.84, 41.87, 42.36, 39.49, 37.87, 39.91, 39.97] +[73.42] +13.577293872833252 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 344337, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.209988117218018, 'TIME_S_1KI': 0.029651150231366417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 996.8449161434174, 'W': 73.42} +[40.04, 39.64, 39.87, 39.84, 41.87, 42.36, 39.49, 37.87, 39.91, 39.97, 18.46, 17.91, 18.96, 18.18, 18.6, 18.04, 18.09, 18.2, 18.22, 17.91] +525.24 +26.262 +{'CPU': 'Xeon 4216', 'CORES': 16, 'ITERATIONS': 344337, 'MATRIX_TYPE': 'synthetic', 'MATRIX_FORMAT': 'csr', 'MATRIX_SHAPE': [5000, 5000], 'MATRIX_ROWS': 5000, 'MATRIX_SIZE': 25000000, 'MATRIX_NNZ': 1250, 'MATRIX_DENSITY': 5e-05, 'TIME_S': 10.209988117218018, 'TIME_S_1KI': 0.029651150231366417, 'BASELINE_TIME_S': 10, 'BASELINE_DELAY_S': 10, 'J': 996.8449161434174, 'W': 73.42, 'J_1KI': 2.894968929111357, 'W_1KI': 0.21322135001466586, 'W_D': 47.158, 'J_D': 640.2780244550705, 'W_D_1KI': 0.13695304309441042, 'J_D_1KI': 0.0003977296749823877}